code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.Assert; import org.apache.hadoop.util.FindClass; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test the find class logic */ public class TestFindClass extends Assert { private static final Logger LOG = LoggerFactory.getLogger(TestFindClass.class); public static final String LOG4J_PROPERTIES = "log4j.properties"; /** * Run the tool runner instance * @param expected expected return code * @param args a list of arguments * @throws Exception on any falure that is not handled earlier */ private void run(int expected, String... args) throws Exception { int result = ToolRunner.run(new FindClass(), args); assertEquals(expected, result); } @Test public void testUsage() throws Throwable { run(FindClass.E_USAGE, "org.apache.hadoop.util.TestFindClass"); } @Test public void testFindsResource() throws Throwable { run(FindClass.SUCCESS, FindClass.A_RESOURCE, "org/apache/hadoop/util/TestFindClass.class"); } @Test public void testFailsNoSuchResource() throws Throwable { run(FindClass.E_NOT_FOUND, FindClass.A_RESOURCE, "org/apache/hadoop/util/ThereIsNoSuchClass.class"); } @Test public void testLoadFindsSelf() throws Throwable { run(FindClass.SUCCESS, FindClass.A_LOAD, "org.apache.hadoop.util.TestFindClass"); } @Test public void testLoadFailsNoSuchClass() throws Throwable { run(FindClass.E_NOT_FOUND, FindClass.A_LOAD, "org.apache.hadoop.util.ThereIsNoSuchClass"); } @Test public void testLoadWithErrorInStaticInit() throws Throwable { run(FindClass.E_LOAD_FAILED, FindClass.A_LOAD, "org.apache.hadoop.util.TestFindClass$FailInStaticInit"); } @Test public void testCreateHandlesBadToString() throws Throwable { run(FindClass.SUCCESS, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$BadToStringClass"); } @Test public void testCreatesClass() throws Throwable { run(FindClass.SUCCESS, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass"); } @Test public void testCreateFailsInStaticInit() throws Throwable { run(FindClass.E_LOAD_FAILED, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$FailInStaticInit"); } @Test public void testCreateFailsInConstructor() throws Throwable { run(FindClass.E_CREATE_FAILED, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$FailInConstructor"); } @Test public void testCreateFailsNoEmptyConstructor() throws Throwable { run(FindClass.E_CREATE_FAILED, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$NoEmptyConstructor"); } @Test public void testLoadPrivateClass() throws Throwable { run(FindClass.SUCCESS, FindClass.A_LOAD, "org.apache.hadoop.util.TestFindClass$PrivateClass"); } @Test public void testCreateFailsPrivateClass() throws Throwable { run(FindClass.E_CREATE_FAILED, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$PrivateClass"); } @Test public void testCreateFailsInPrivateConstructor() throws Throwable { run(FindClass.E_CREATE_FAILED, FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass$PrivateConstructor"); } @Test public void testLoadFindsLog4J() throws Throwable { run(FindClass.SUCCESS, FindClass.A_RESOURCE, LOG4J_PROPERTIES); } @SuppressWarnings("UseOfSystemOutOrSystemErr") @Test public void testPrintLog4J() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream out = new PrintStream(baos); FindClass.setOutputStreams(out, System.err); run(FindClass.SUCCESS, FindClass.A_PRINTRESOURCE, LOG4J_PROPERTIES); //here the content should be done out.flush(); String body = baos.toString("UTF8"); LOG.info(LOG4J_PROPERTIES + " =\n" + body); assertTrue(body.contains("Apache")); } /** * trigger a divide by zero fault in the static init */ public static class FailInStaticInit { static { int x = 0; int y = 1 / x; } } /** * trigger a divide by zero fault in the constructor */ public static class FailInConstructor { public FailInConstructor() { int x = 0; int y = 1 / x; } } /** * A class with no parameterless constructor -expect creation to fail */ public static class NoEmptyConstructor { public NoEmptyConstructor(String text) { } } /** * This has triggers an NPE in the toString() method; checks the logging * code handles this. */ public static class BadToStringClass { public BadToStringClass() { } @Override public String toString() { throw new NullPointerException("oops"); } } /** * This has a private constructor * -creating it will trigger an IllegalAccessException */ public static class PrivateClass { private PrivateClass() { } } /** * This has a private constructor * -creating it will trigger an IllegalAccessException */ public static class PrivateConstructor { private PrivateConstructor() { } } }
dennishuo/hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java
Java
apache-2.0
6,169
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.test.autoconfigure.web.servlet; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import com.gargoylesoftware.htmlunit.WebClient; import org.openqa.selenium.WebDriver; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.test.autoconfigure.properties.PropertyMapping; import org.springframework.boot.test.autoconfigure.properties.SkipPropertyMapping; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; /** * Annotation that can be applied to a test class to enable and configure * auto-configuration of {@link MockMvc}. * * @author Phillip Webb * @since 1.4.0 * @see MockMvcAutoConfiguration * @see SpringBootMockMvcBuilderCustomizer */ @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @Documented @Inherited @ImportAutoConfiguration @PropertyMapping("spring.test.mockmvc") public @interface AutoConfigureMockMvc { /** * If filters from the application context should be registered with MockMVC. Defaults * to {@code true}. * @return if filters should be added */ boolean addFilters() default true; /** * How {@link MvcResult} information should be printed after each MockMVC invocation. * @return how information is printed */ @PropertyMapping(skip = SkipPropertyMapping.ON_DEFAULT_VALUE) MockMvcPrint print() default MockMvcPrint.DEFAULT; /** * If {@link MvcResult} information should be printed only if the test fails. * @return {@code true} if printing only occurs on failure */ boolean printOnlyOnFailure() default true; /** * If a {@link WebClient} should be auto-configured when HtmlUnit is on the classpath. * Defaults to {@code true}. * @return if a {@link WebClient} is auto-configured */ @PropertyMapping("webclient.enabled") boolean webClientEnabled() default true; /** * If a {@link WebDriver} should be auto-configured when Selenium is on the classpath. * Defaults to {@code true}. * @return if a {@link WebDriver} is auto-configured */ @PropertyMapping("webdriver.enabled") boolean webDriverEnabled() default true; }
tiarebalbi/spring-boot
spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/web/servlet/AutoConfigureMockMvc.java
Java
apache-2.0
2,989
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} DOCUMENTATION = ''' --- module: acl version_added: "1.4" short_description: Sets and retrieves file ACL information. description: - Sets and retrieves file ACL information. options: path: required: true default: null description: - The full path of the file or object. aliases: ['name'] state: required: false default: query choices: [ 'query', 'present', 'absent' ] description: - defines whether the ACL should be present or not. The C(query) state gets the current acl without changing it, for use in 'register' operations. follow: required: false default: yes choices: [ 'yes', 'no' ] description: - whether to follow symlinks on the path if a symlink is encountered. default: version_added: "1.5" required: false default: no choices: [ 'yes', 'no' ] description: - if the target is a directory, setting this to yes will make it the default acl for entities created inside the directory. It causes an error if path is a file. entity: version_added: "1.5" required: false description: - actual user or group that the ACL applies to when matching entity types user or group are selected. etype: version_added: "1.5" required: false default: null choices: [ 'user', 'group', 'mask', 'other' ] description: - the entity type of the ACL to apply, see setfacl documentation for more info. permissions: version_added: "1.5" required: false default: null description: - Permissions to apply/remove can be any combination of r, w and x (read, write and execute respectively) entry: required: false default: null description: - DEPRECATED. The acl to set or remove. This must always be quoted in the form of '<etype>:<qualifier>:<perms>'. The qualifier may be empty for some types, but the type and perms are always required. '-' can be used as placeholder when you do not care about permissions. This is now superseded by entity, type and permissions fields. recursive: version_added: "2.0" required: false default: no choices: [ 'yes', 'no' ] description: - Recursively sets the specified ACL (added in Ansible 2.0). Incompatible with C(state=query). author: - "Brian Coca (@bcoca)" - "Jérémie Astori (@astorije)" notes: - The "acl" module requires that acls are enabled on the target filesystem and that the setfacl and getfacl binaries are installed. - As of Ansible 2.0, this module only supports Linux distributions. - As of Ansible 2.3, the I(name) option has been changed to I(path) as default, but I(name) still works as well. ''' EXAMPLES = ''' # Grant user Joe read access to a file - acl: path: /etc/foo.conf entity: joe etype: user permissions: r state: present # Removes the acl for Joe on a specific file - acl: path: /etc/foo.conf entity: joe etype: user state: absent # Sets default acl for joe on foo.d - acl: path: /etc/foo.d entity: joe etype: user permissions: rw default: yes state: present # Same as previous but using entry shorthand - acl: path: /etc/foo.d entry: "default:user:joe:rw-" state: present # Obtain the acl for a specific file - acl: path: /etc/foo.conf register: acl_info ''' RETURN = ''' acl: description: Current acl on provided path (after changes, if any) returned: success type: list sample: [ "user::rwx", "group::rwx", "other::rwx" ] ''' import os from ansible.module_utils.basic import AnsibleModule, get_platform from ansible.module_utils.pycompat24 import get_exception def split_entry(entry): ''' splits entry and ensures normalized return''' a = entry.split(':') d = None if entry.lower().startswith("d"): d = True a.pop(0) if len(a) == 2: a.append(None) t, e, p = a t = t.lower() if t.startswith("u"): t = "user" elif t.startswith("g"): t = "group" elif t.startswith("m"): t = "mask" elif t.startswith("o"): t = "other" else: t = None return [d, t, e, p] def build_entry(etype, entity, permissions=None, use_nfsv4_acls=False): '''Builds and returns an entry string. Does not include the permissions bit if they are not provided.''' if use_nfsv4_acls: return ':'.join([etype, entity, permissions, 'allow']) if permissions: return etype + ':' + entity + ':' + permissions else: return etype + ':' + entity def build_command(module, mode, path, follow, default, recursive, entry=''): '''Builds and returns a getfacl/setfacl command.''' if mode == 'set': cmd = [module.get_bin_path('setfacl', True)] cmd.append('-m "%s"' % entry) elif mode == 'rm': cmd = [module.get_bin_path('setfacl', True)] cmd.append('-x "%s"' % entry) else: # mode == 'get' cmd = [module.get_bin_path('getfacl', True)] # prevents absolute path warnings and removes headers if get_platform().lower() == 'linux': cmd.append('--omit-header') cmd.append('--absolute-names') if recursive: cmd.append('--recursive') if not follow: if get_platform().lower() == 'linux': cmd.append('--physical') elif get_platform().lower() == 'freebsd': cmd.append('-h') if default: if mode == 'rm': cmd.insert(1, '-k') else: # mode == 'set' or mode == 'get' cmd.insert(1, '-d') cmd.append(path) return cmd def acl_changed(module, cmd): '''Returns true if the provided command affects the existing ACLs, false otherwise.''' # FreeBSD do not have a --test flag, so by default, it is safer to always say "true" if get_platform().lower() == 'freebsd': return True cmd = cmd[:] # lists are mutables so cmd would be overwritten without this cmd.insert(1, '--test') lines = run_acl(module, cmd) for line in lines: if not line.endswith('*,*'): return True return False def run_acl(module, cmd, check_rc=True): try: (rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc) except Exception: e = get_exception() module.fail_json(msg=e.strerror) lines = [] for l in out.splitlines(): if not l.startswith('#'): lines.append(l.strip()) if lines and not lines[-1].split(): # trim last line only when it is empty return lines[:-1] else: return lines def main(): module = AnsibleModule( argument_spec=dict( path=dict(required=True, aliases=['name'], type='path'), entry=dict(required=False, type='str'), entity=dict(required=False, type='str', default=''), etype=dict( required=False, choices=['other', 'user', 'group', 'mask'], type='str' ), permissions=dict(required=False, type='str'), state=dict( required=False, default='query', choices=['query', 'present', 'absent'], type='str' ), follow=dict(required=False, type='bool', default=True), default=dict(required=False, type='bool', default=False), recursive=dict(required=False, type='bool', default=False), use_nfsv4_acls=dict(required=False, type='bool', default=False) ), supports_check_mode=True, ) if get_platform().lower() not in ['linux', 'freebsd']: module.fail_json(msg="The acl module is not available on this system.") path = module.params.get('path') entry = module.params.get('entry') entity = module.params.get('entity') etype = module.params.get('etype') permissions = module.params.get('permissions') state = module.params.get('state') follow = module.params.get('follow') default = module.params.get('default') recursive = module.params.get('recursive') use_nfsv4_acls = module.params.get('use_nfsv4_acls') if not os.path.exists(path): module.fail_json(msg="Path not found or not accessible.") if state == 'query' and recursive: module.fail_json(msg="'recursive' MUST NOT be set when 'state=query'.") if not entry: if state == 'absent' and permissions: module.fail_json(msg="'permissions' MUST NOT be set when 'state=absent'.") if state == 'absent' and not entity: module.fail_json(msg="'entity' MUST be set when 'state=absent'.") if state in ['present', 'absent'] and not etype: module.fail_json(msg="'etype' MUST be set when 'state=%s'." % state) if entry: if etype or entity or permissions: module.fail_json(msg="'entry' MUST NOT be set when 'entity', 'etype' or 'permissions' are set.") if state == 'present' and not entry.count(":") in [2, 3]: module.fail_json(msg="'entry' MUST have 3 or 4 sections divided by ':' when 'state=present'.") if state == 'absent' and not entry.count(":") in [1, 2]: module.fail_json(msg="'entry' MUST have 2 or 3 sections divided by ':' when 'state=absent'.") if state == 'query': module.fail_json(msg="'entry' MUST NOT be set when 'state=query'.") default_flag, etype, entity, permissions = split_entry(entry) if default_flag is not None: default = default_flag if get_platform().lower() == 'freebsd': if recursive: module.fail_json(msg="recursive is not supported on that platform.") changed = False msg = "" if state == 'present': entry = build_entry(etype, entity, permissions, use_nfsv4_acls) command = build_command( module, 'set', path, follow, default, recursive, entry ) changed = acl_changed(module, command) if changed and not module.check_mode: run_acl(module, command) msg = "%s is present" % entry elif state == 'absent': entry = build_entry(etype, entity, use_nfsv4_acls) command = build_command( module, 'rm', path, follow, default, recursive, entry ) changed = acl_changed(module, command) if changed and not module.check_mode: run_acl(module, command, False) msg = "%s is absent" % entry elif state == 'query': msg = "current acl" acl = run_acl( module, build_command(module, 'get', path, follow, default, recursive) ) module.exit_json(changed=changed, msg=msg, acl=acl) if __name__ == '__main__': main()
e-gob/plataforma-kioscos-autoatencion
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/files/acl.py
Python
bsd-3-clause
11,261
<?php /** * * This file is part of the phpBB Forum Software package. * * @copyright (c) phpBB Limited <https://www.phpbb.com> * @license GNU General Public License, version 2 (GPL-2.0) * * For full copyright and license information, please see * the docs/CREDITS.txt file. * */ namespace phpbb\template; interface template { /** * Clear the cache * * @return \phpbb\template\template */ public function clear_cache(); /** * Sets the template filenames for handles. * * @param array $filename_array Should be a hash of handle => filename pairs. * @return \phpbb\template\template $this */ public function set_filenames(array $filename_array); /** * Get the style tree of the style preferred by the current user * * @return array Style tree, most specific first */ public function get_user_style(); /** * Set style location based on (current) user's chosen style. * * @param array $style_directories The directories to add style paths for * E.g. array('ext/foo/bar/styles', 'styles') * Default: array('styles') (phpBB's style directory) * @return \phpbb\template\template $this */ public function set_style($style_directories = array('styles')); /** * Set custom style location (able to use directory outside of phpBB). * * Note: Templates are still compiled to phpBB's cache directory. * * @param string|array $names Array of names or string of name of template(s) in inheritance tree order, used by extensions. * @param string|array or string $paths Array of style paths, relative to current root directory * @return \phpbb\template\template $this */ public function set_custom_style($names, $paths); /** * Clears all variables and blocks assigned to this template. * * @return \phpbb\template\template $this */ public function destroy(); /** * Reset/empty complete block * * @param string $blockname Name of block to destroy * @return \phpbb\template\template $this */ public function destroy_block_vars($blockname); /** * Display a template for provided handle. * * The template will be loaded and compiled, if necessary, first. * * This function calls hooks. * * @param string $handle Handle to display * @return \phpbb\template\template $this */ public function display($handle); /** * Display the handle and assign the output to a template variable * or return the compiled result. * * @param string $handle Handle to operate on * @param string $template_var Template variable to assign compiled handle to * @param bool $return_content If true return compiled handle, otherwise assign to $template_var * @return \phpbb\template\template|string if $return_content is true return string of the compiled handle, otherwise return $this */ public function assign_display($handle, $template_var = '', $return_content = true); /** * Assign key variable pairs from an array * * @param array $vararray A hash of variable name => value pairs * @return \phpbb\template\template $this */ public function assign_vars(array $vararray); /** * Assign a single scalar value to a single key. * * Value can be a string, an integer or a boolean. * * @param string $varname Variable name * @param string $varval Value to assign to variable * @return \phpbb\template\template $this */ public function assign_var($varname, $varval); /** * Append text to the string value stored in a key. * * Text is appended using the string concatenation operator (.). * * @param string $varname Variable name * @param string $varval Value to append to variable * @return \phpbb\template\template $this */ public function append_var($varname, $varval); /** * Assign key variable pairs from an array to a specified block * @param string $blockname Name of block to assign $vararray to * @param array $vararray A hash of variable name => value pairs * @return \phpbb\template\template $this */ public function assign_block_vars($blockname, array $vararray); /** * Assign key variable pairs from an array to a whole specified block loop * @param string $blockname Name of block to assign $block_vars_array to * @param array $block_vars_array An array of hashes of variable name => value pairs * @return \phpbb\template\template $this */ public function assign_block_vars_array($blockname, array $block_vars_array); /** * Change already assigned key variable pair (one-dimensional - single loop entry) * * An example of how to use this function: * {@example alter_block_array.php} * * @param string $blockname the blockname, for example 'loop' * @param array $vararray the var array to insert/add or merge * @param mixed $key Key to search for * * array: KEY => VALUE [the key/value pair to search for within the loop to determine the correct position] * * int: Position [the position to change or insert at directly given] * * If key is false the position is set to 0 * If key is true the position is set to the last entry * * @param string $mode Mode to execute (valid modes are 'insert' and 'change') * * If insert, the vararray is inserted at the given position (position counting from zero). * If change, the current block gets merged with the vararray (resulting in new \key/value pairs be added and existing keys be replaced by the new \value). * * Since counting begins by zero, inserting at the last position will result in this array: array(vararray, last positioned array) * and inserting at position 1 will result in this array: array(first positioned array, vararray, following vars) * * @return bool false on error, true on success */ public function alter_block_array($blockname, array $vararray, $key = false, $mode = 'insert'); /** * Get path to template for handle (required for BBCode parser) * * @param string $handle Handle to retrieve the source file * @return string */ public function get_source_file_for_handle($handle); }
kivi8/ars-poetica
www/forum/phpbb/template/template.php
PHP
bsd-3-clause
5,881
//################################################################################################################## // #TOOLBAR# /* global CMS */ (function ($) { 'use strict'; // CMS.$ will be passed for $ $(document).ready(function () { /*! * Toolbar * Handles all features related to the toolbar */ CMS.Toolbar = new CMS.Class({ implement: [CMS.API.Helpers], options: { preventSwitch: false, preventSwitchMessage: 'Switching is disabled.', messageDelay: 2000 }, initialize: function (options) { this.container = $('#cms-toolbar'); this.options = $.extend(true, {}, this.options, options); this.config = CMS.config; this.settings = CMS.settings; // elements this.body = $('html'); this.toolbar = this.container.find('.cms-toolbar').hide(); this.toolbarTrigger = this.container.find('.cms-toolbar-trigger'); this.navigations = this.container.find('.cms-toolbar-item-navigation'); this.buttons = this.container.find('.cms-toolbar-item-buttons'); this.switcher = this.container.find('.cms-toolbar-item-switch'); this.messages = this.container.find('.cms-messages'); this.screenBlock = this.container.find('.cms-screenblock'); // states this.click = 'click.cms'; this.timer = function () {}; this.lockToolbar = false; // setup initial stuff this._setup(); // setup events this._events(); }, // initial methods _setup: function () { // setup toolbar visibility, we need to reverse the options to set the correct state (this.settings.toolbar === 'expanded') ? this._showToolbar(0, true) : this._hideToolbar(0, true); // hide publish button var publishBtn = $('.cms-btn-publish').parent(); publishBtn.hide(); if ($('.cms-btn-publish-active').length) { publishBtn.show(); } // check if debug is true if (CMS.config.debug) { this._debug(); } // check if there are messages and display them if (CMS.config.messages) { this.openMessage(CMS.config.messages); } // check if there are error messages and display them if (CMS.config.error) { this.showError(CMS.config.error); } // enforce open state if user is not logged in but requests the toolbar if (!CMS.config.auth || CMS.config.settings.version !== this.settings.version) { this.toggleToolbar(true); this.settings = this.setSettings(CMS.config.settings); } // should switcher indicate that there is an unpublished page? if (CMS.config.publisher) { this.openMessage(CMS.config.publisher, 'right'); setInterval(function () { CMS.$('.cms-toolbar-item-switch').toggleClass('cms-toolbar-item-switch-highlight'); }, this.options.messageDelay); } // open sideframe if it was previously opened if (this.settings.sideframe.url) { var sideframe = new CMS.Sideframe(); sideframe.open(this.settings.sideframe.url, false); } // if there is a screenblock, do some resize magic if (this.screenBlock.length) { this._screenBlock(); } // add toolbar ready class to body and fire event this.body.addClass('cms-ready'); $(document).trigger('cms-ready'); }, _events: function () { var that = this; // attach event to the trigger handler this.toolbarTrigger.bind(this.click, function (e) { e.preventDefault(); that.toggleToolbar(); }); // attach event to the navigation elements this.navigations.each(function () { var item = $(this); var lists = item.find('li'); var root = 'cms-toolbar-item-navigation'; var hover = 'cms-toolbar-item-navigation-hover'; var disabled = 'cms-toolbar-item-navigation-disabled'; var children = 'cms-toolbar-item-navigation-children'; // remove events from first level item.find('a').bind(that.click, function (e) { e.preventDefault(); if ($(this).attr('href') !== '' && $(this).attr('href') !== '#' && !$(this).parent().hasClass(disabled) && !$(this).parent().hasClass(disabled)) { that._delegate($(this)); reset(); return false; } }); // handle click states lists.bind(that.click, function (e) { e.stopPropagation(); var el = $(this); // close if el is first item if (el.parent().hasClass(root) && el.hasClass(hover) || el.hasClass(disabled)) { reset(); return false; } else { reset(); el.addClass(hover); } // activate hover selection item.find('> li').bind('mouseenter', function () { // cancel if item is already active if ($(this).hasClass(hover)) { return false; } $(this).trigger(that.click); }); // create the document event $(document).bind(that.click, reset); }); // attach hover lists.find('li').bind('mouseenter mouseleave', function () { var el = $(this); var parent = el.closest('.cms-toolbar-item-navigation-children') .add(el.parents('.cms-toolbar-item-navigation-children')); var hasChildren = el.hasClass(children) || parent.length; // do not attach hover effect if disabled // cancel event if element has already hover class if (el.hasClass(disabled) || el.hasClass(hover)) { return false; } // reset lists.find('li').removeClass(hover); // add hover effect el.addClass(hover); // handle children elements if (hasChildren) { el.find('> ul').show(); // add parent class parent.addClass(hover); } else { lists.find('ul ul').hide(); } // Remove stale submenus el.siblings().find('> ul').hide(); }); // fix leave event lists.find('> ul').bind('mouseleave', function () { lists.find('li').removeClass(hover); }); // removes classes and events function reset() { lists.removeClass(hover); lists.find('ul ul').hide(); item.find('> li').unbind('mouseenter'); $(document).unbind(that.click); } }); // attach event to the switcher elements this.switcher.each(function () { $(this).bind(that.click, function (e) { e.preventDefault(); that._setSwitcher($(e.currentTarget)); }); }); // attach event for first page publish this.buttons.each(function () { var btn = $(this); // in case the button has a data-rel attribute if (btn.find('a').attr('data-rel')) { btn.on('click', function (e) { e.preventDefault(); that._delegate($(this).find('a')); }); } // in case of the publish button btn.find('.cms-publish-page').bind(that.click, function (e) { if (!confirm(that.config.lang.publish)) { e.preventDefault(); } }); btn.find('.cms-btn-publish').bind(that.click, function (e) { e.preventDefault(); // send post request to prevent xss attacks $.ajax({ 'type': 'post', 'url': $(this).prop('href'), 'data': { 'csrfmiddlewaretoken': CMS.config.csrf }, 'success': function () { CMS.API.Helpers.reloadBrowser(); }, 'error': function (request) { throw new Error(request); } }); }); }); }, // public methods toggleToolbar: function (show) { // overwrite state when provided if (show) { this.settings.toolbar = 'collapsed'; } // toggle bar (this.settings.toolbar === 'collapsed') ? this._showToolbar(200) : this._hideToolbar(200); }, openMessage: function (msg, dir, delay, error) { // set toolbar freeze this._lock(true); // add content to element this.messages.find('.cms-messages-inner').html(msg); // clear timeout clearTimeout(this.timer); // determine width var that = this; var width = 320; var height = this.messages.outerHeight(true); var top = this.toolbar.outerHeight(true); var close = this.messages.find('.cms-messages-close'); close.hide(); close.bind(this.click, function () { that.closeMessage(); }); // set top to 0 if toolbar is collapsed if (this.settings.toolbar === 'collapsed') { top = 0; } // do we need to add debug styles? if (this.config.debug) { top = top + 5; } // set correct position and show this.messages.css('top', -height).show(); // error handling this.messages.removeClass('cms-messages-error'); if (error) { this.messages.addClass('cms-messages-error'); } // dir should be left, center, right dir = dir || 'center'; // set correct direction and animation switch (dir) { case 'left': this.messages.css({ 'top': top, 'left': -width, 'right': 'auto', 'margin-left': 0 }); this.messages.animate({ 'left': 0 }); break; case 'right': this.messages.css({ 'top': top, 'right': -width, 'left': 'auto', 'margin-left': 0 }); this.messages.animate({ 'right': 0 }); break; default: this.messages.css({ 'left': '50%', 'right': 'auto', 'margin-left': -(width / 2) }); this.messages.animate({ 'top': top }); } // cancel autohide if delay is 0 if (delay === 0) { close.show(); return false; } // add delay to hide this.timer = setTimeout(function () { that.closeMessage(); }, delay || this.options.messageDelay); }, closeMessage: function () { this.messages.fadeOut(300); // unlock toolbar this._lock(false); }, openAjax: function (url, post, text, callback, onSuccess) { var that = this; // check if we have a confirmation text var question = (text) ? confirm(text) : true; // cancel if question has been denied if (!question) { return false; } // set loader this._loader(true); $.ajax({ 'type': 'POST', 'url': url, 'data': (post) ? JSON.parse(post) : {}, 'success': function (response) { CMS.API.locked = false; if (callback) { callback(that, response); that._loader(false); } else if (onSuccess) { CMS.API.Helpers.reloadBrowser(onSuccess, false, true); } else { // reload CMS.API.Helpers.reloadBrowser(false, false, true); } }, 'error': function (jqXHR) { CMS.API.locked = false; that.showError(jqXHR.response + ' | ' + jqXHR.status + ' ' + jqXHR.statusText); } }); }, showError: function (msg, reload) { this.openMessage(msg, 'center', 0, true); // force reload if param is passed if (reload) { CMS.API.Helpers.reloadBrowser(false, this.options.messageDelay); } }, // private methods _showToolbar: function (speed, init) { this.toolbarTrigger.addClass('cms-toolbar-trigger-expanded'); this.toolbar.slideDown(speed); // animate html this.body.animate({ 'margin-top': (this.config.debug) ? 35 : 30 }, (init) ? 0 : speed, function () { $(this).addClass('cms-toolbar-expanded'); }); // set messages top to toolbar height this.messages.css('top', 31); // set new settings this.settings.toolbar = 'expanded'; if (!init) { this.settings = this.setSettings(this.settings); } }, _hideToolbar: function (speed, init) { // cancel if sideframe is active if (this.lockToolbar) { return false; } this.toolbarTrigger.removeClass('cms-toolbar-trigger-expanded'); this.toolbar.slideUp(speed); // animate html this.body.removeClass('cms-toolbar-expanded') .animate({ 'margin-top': (this.config.debug) ? 5 : 0 }, speed); // set messages top to 0 this.messages.css('top', 0); // set new settings this.settings.toolbar = 'collapsed'; if (!init) { this.settings = this.setSettings(this.settings); } }, _setSwitcher: function (el) { // save local vars var active = el.hasClass('cms-toolbar-item-switch-active'); var anchor = el.find('a'); var knob = el.find('.cms-toolbar-item-switch-knob'); var duration = 300; // prevent if switchopstion is passed if (this.options.preventSwitch) { this.openMessage(this.options.preventSwitchMessage, 'right'); return false; } // determin what to trigger if (active) { knob.animate({ 'right': anchor.outerWidth(true) - (knob.outerWidth(true) + 2) }, duration); // move anchor behind the knob anchor.css('z-index', 1).animate({ 'padding-top': 6, 'padding-right': 14, 'padding-bottom': 4, 'padding-left': 28 }, duration); } else { knob.animate({ 'left': anchor.outerWidth(true) - (knob.outerWidth(true) + 2) }, duration); // move anchor behind the knob anchor.css('z-index', 1).animate({ 'padding-top': 6, 'padding-right': 28, 'padding-bottom': 4, 'padding-left': 14 }, duration); } // reload setTimeout(function () { window.location.href = anchor.attr('href'); }, duration); }, _delegate: function (el) { // save local vars var target = el.data('rel'); switch (target) { case 'modal': var modal = new CMS.Modal({'onClose': el.data('on-close')}); modal.open(el.attr('href'), el.data('name')); break; case 'message': this.openMessage(el.data('text')); break; case 'sideframe': var sideframe = new CMS.Sideframe({'onClose': el.data('on-close')}); sideframe.open(el.attr('href'), true); break; case 'ajax': this.openAjax(el.attr('href'), JSON.stringify( el.data('post')), el.data('text'), null, el.data('on-success') ); break; default: window.location.href = el.attr('href'); } }, _lock: function (lock) { if (lock) { this.lockToolbar = true; // make button look disabled this.toolbarTrigger.css('opacity', 0.2); } else { this.lockToolbar = false; // make button look disabled this.toolbarTrigger.css('opacity', 1); } }, _loader: function (loader) { if (loader) { this.toolbarTrigger.addClass('cms-toolbar-loader'); } else { this.toolbarTrigger.removeClass('cms-toolbar-loader'); } }, _debug: function () { var that = this; var timeout = 1000; var timer = function () {}; // bind message event var debug = this.container.find('.cms-debug-bar'); debug.bind('mouseenter mouseleave', function (e) { clearTimeout(timer); if (e.type === 'mouseenter') { timer = setTimeout(function () { that.openMessage(that.config.lang.debug); }, timeout); } }); }, _screenBlock: function () { var interval = 20; var blocker = this.screenBlock; var sideframe = $('.cms-sideframe'); // automatically resize screenblock window according to given attributes $(window).on('resize.cms.screenblock', function () { var width = $(this).width() - sideframe.width(); blocker.css({ 'width': width, 'height': $(window).height() }); }).trigger('resize'); // set update interval setInterval(function () { $(window).trigger('resize.cms.screenblock'); }, interval); } }); }); })(CMS.$);
josjevv/django-cms
cms/static/cms/js/modules/cms.toolbar.js
JavaScript
bsd-3-clause
22,476
from __future__ import absolute_import import psycopg2 as Database # Some of these imports are unused, but they are inherited from other engines # and should be available as part of the backend ``base.py`` namespace. from django.db.backends.postgresql_psycopg2.base import ( # NOQA DatabaseWrapper, DatabaseFeatures, DatabaseOperations, DatabaseClient, DatabaseCreation, DatabaseIntrospection ) from .decorators import ( capture_transaction_exceptions, auto_reconnect_cursor, auto_reconnect_connection, less_shitty_error_messages ) __all__ = ('DatabaseWrapper', 'DatabaseFeatures', 'DatabaseOperations', 'DatabaseOperations', 'DatabaseClient', 'DatabaseCreation', 'DatabaseIntrospection') class CursorWrapper(object): """ A wrapper around the postgresql_psycopg2 backend which handles various events from cursors, such as auto reconnects and lazy time zone evaluation. """ def __init__(self, db, cursor): self.db = db self.cursor = cursor def __getattr__(self, attr): return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) @capture_transaction_exceptions @auto_reconnect_cursor @less_shitty_error_messages def execute(self, sql, params=None): if params is not None: return self.cursor.execute(sql, params) return self.cursor.execute(sql) @capture_transaction_exceptions @auto_reconnect_cursor @less_shitty_error_messages def executemany(self, sql, paramlist=()): return self.cursor.executemany(sql, paramlist) class DatabaseWrapper(DatabaseWrapper): @auto_reconnect_connection def _set_isolation_level(self, level): return super(DatabaseWrapper, self)._set_isolation_level(level) @auto_reconnect_connection def _cursor(self, *args, **kwargs): cursor = super(DatabaseWrapper, self)._cursor() return CursorWrapper(self, cursor) def close(self, reconnect=False): """ This ensures we dont error if the connection has already been closed. """ if self.connection is not None: if not self.connection.closed: try: self.connection.close() except Database.InterfaceError: # connection was already closed by something # like pgbouncer idle timeout. pass self.connection = None class DatabaseFeatures(DatabaseFeatures): can_return_id_from_insert = True def __init__(self, connection): self.connection = connection
Kryz/sentry
src/sentry/db/postgres/base.py
Python
bsd-3-clause
2,640
import foo from "foo"; import {default as foo2} from "foo";
hawkrives/6to5
test/fixtures/transformation/es6-modules-umd/imports-default/actual.js
JavaScript
mit
60
(function webpackUniversalModuleDefinition(root, factory) { if(typeof exports === 'object' && typeof module === 'object') module.exports = factory(require("react"), require("react-onclickoutside"), require("moment")); else if(typeof define === 'function' && define.amd) define(["react", "react-onclickoutside", "moment"], factory); else if(typeof exports === 'object') exports["DatePicker"] = factory(require("react"), require("react-onclickoutside"), require("moment")); else root["DatePicker"] = factory(root["React"], root["onClickOutside"], root["moment"]); })(this, function(__WEBPACK_EXTERNAL_MODULE_3__, __WEBPACK_EXTERNAL_MODULE_11__, __WEBPACK_EXTERNAL_MODULE_13__) { return /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ exports: {}, /******/ id: moduleId, /******/ loaded: false /******/ }; /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ // Flag the module as loaded /******/ module.loaded = true; /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ // __webpack_public_path__ /******/ __webpack_require__.p = ""; /******/ // Load entry module and return exports /******/ return __webpack_require__(0); /******/ }) /************************************************************************/ /******/ ([ /* 0 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _calendar = __webpack_require__(1); var _calendar2 = _interopRequireDefault(_calendar); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _popper_component = __webpack_require__(21); var _popper_component2 = _interopRequireDefault(_popper_component); var _classnames2 = __webpack_require__(10); var _classnames3 = _interopRequireDefault(_classnames2); var _date_utils = __webpack_require__(12); var _reactOnclickoutside = __webpack_require__(11); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var outsideClickIgnoreClass = 'react-datepicker-ignore-onclickoutside'; var WrappedCalendar = (0, _reactOnclickoutside2.default)(_calendar2.default); /** * General datepicker component. */ var DatePicker = function (_React$Component) { _inherits(DatePicker, _React$Component); _createClass(DatePicker, null, [{ key: 'defaultProps', get: function get() { return { allowSameDay: false, dateFormat: 'L', dateFormatCalendar: 'MMMM YYYY', onChange: function onChange() {}, disabled: false, disabledKeyboardNavigation: false, dropdownMode: 'scroll', onFocus: function onFocus() {}, onBlur: function onBlur() {}, onKeyDown: function onKeyDown() {}, onSelect: function onSelect() {}, onClickOutside: function onClickOutside() {}, onMonthChange: function onMonthChange() {}, monthsShown: 1, withPortal: false, shouldCloseOnSelect: true, showTimeSelect: false, timeIntervals: 30 }; } }]); function DatePicker(props) { _classCallCheck(this, DatePicker); var _this = _possibleConstructorReturn(this, (DatePicker.__proto__ || Object.getPrototypeOf(DatePicker)).call(this, props)); _this.getPreSelection = function () { return _this.props.openToDate ? (0, _date_utils.newDate)(_this.props.openToDate) : _this.props.selectsEnd && _this.props.startDate ? (0, _date_utils.newDate)(_this.props.startDate) : _this.props.selectsStart && _this.props.endDate ? (0, _date_utils.newDate)(_this.props.endDate) : (0, _date_utils.now)(_this.props.utcOffset); }; _this.calcInitialState = function () { var defaultPreSelection = _this.getPreSelection(); var minDate = (0, _date_utils.getEffectiveMinDate)(_this.props); var maxDate = (0, _date_utils.getEffectiveMaxDate)(_this.props); var boundedPreSelection = minDate && (0, _date_utils.isBefore)(defaultPreSelection, minDate) ? minDate : maxDate && (0, _date_utils.isAfter)(defaultPreSelection, maxDate) ? maxDate : defaultPreSelection; return { open: _this.props.startOpen || false, preventFocus: false, preSelection: _this.props.selected ? (0, _date_utils.newDate)(_this.props.selected) : boundedPreSelection }; }; _this.clearPreventFocusTimeout = function () { if (_this.preventFocusTimeout) { clearTimeout(_this.preventFocusTimeout); } }; _this.setFocus = function () { _this.input.focus(); }; _this.setOpen = function (open) { _this.setState({ open: open, preSelection: open && _this.state.open ? _this.state.preSelection : _this.calcInitialState().preSelection }); }; _this.handleFocus = function (event) { if (!_this.state.preventFocus) { _this.props.onFocus(event); _this.setOpen(true); } }; _this.cancelFocusInput = function () { clearTimeout(_this.inputFocusTimeout); _this.inputFocusTimeout = null; }; _this.deferFocusInput = function () { _this.cancelFocusInput(); _this.inputFocusTimeout = setTimeout(function () { return _this.setFocus(); }, 1); }; _this.handleDropdownFocus = function () { _this.cancelFocusInput(); }; _this.handleBlur = function (event) { if (_this.state.open) { _this.deferFocusInput(); } else { _this.props.onBlur(event); } }; _this.handleCalendarClickOutside = function (event) { if (!_this.props.inline) { _this.setOpen(false); } _this.props.onClickOutside(event); if (_this.props.withPortal) { event.preventDefault(); } }; _this.handleChange = function (event) { if (_this.props.onChangeRaw) { _this.props.onChangeRaw(event); if (event.isDefaultPrevented()) { return; } } _this.setState({ inputValue: event.target.value }); var date = (0, _date_utils.parseDate)(event.target.value, _this.props); if (date || !event.target.value) { _this.setSelected(date, event, true); } }; _this.handleSelect = function (date, event) { // Preventing onFocus event to fix issue // https://github.com/Hacker0x01/react-datepicker/issues/628 _this.setState({ preventFocus: true }, function () { _this.preventFocusTimeout = setTimeout(function () { return _this.setState({ preventFocus: false }); }, 50); return _this.preventFocusTimeout; }); _this.setSelected(date, event); if (!_this.props.shouldCloseOnSelect) { _this.setPreSelection(date); } else if (!_this.props.inline) { _this.setOpen(false); } }; _this.setSelected = function (date, event, keepInput) { var changedDate = date; if (changedDate !== null && (0, _date_utils.isDayDisabled)(changedDate, _this.props)) { return; } if (!(0, _date_utils.isSameDay)(_this.props.selected, changedDate) || _this.props.allowSameDay) { if (changedDate !== null) { if (_this.props.selected) { changedDate = (0, _date_utils.setTime)((0, _date_utils.newDate)(changedDate), { hour: (0, _date_utils.getHour)(_this.props.selected), minute: (0, _date_utils.getMinute)(_this.props.selected), second: (0, _date_utils.getSecond)(_this.props.selected) }); } _this.setState({ preSelection: changedDate }); } _this.props.onChange(changedDate, event); } _this.props.onSelect(changedDate, event); if (!keepInput) { _this.setState({ inputValue: null }); } }; _this.setPreSelection = function (date) { var isDateRangePresent = typeof _this.props.minDate !== 'undefined' && typeof _this.props.maxDate !== 'undefined'; var isValidDateSelection = isDateRangePresent && date ? (0, _date_utils.isDayInRange)(date, _this.props.minDate, _this.props.maxDate) : true; if (isValidDateSelection) { _this.setState({ preSelection: date }); } }; _this.handleTimeChange = function (time) { var selected = _this.props.selected ? _this.props.selected : _this.getPreSelection(); var changedDate = (0, _date_utils.setTime)((0, _date_utils.cloneDate)(selected), { hour: (0, _date_utils.getHour)(time), minute: (0, _date_utils.getMinute)(time) }); _this.setState({ preSelection: changedDate }); _this.props.onChange(changedDate); }; _this.onInputClick = function () { if (!_this.props.disabled) { _this.setOpen(true); } }; _this.onInputKeyDown = function (event) { _this.props.onKeyDown(event); var eventKey = event.key; if (!_this.state.open && !_this.props.inline) { if (eventKey !== 'Enter' && eventKey !== 'Escape' && eventKey !== 'Tab') { _this.onInputClick(); } return; } var copy = (0, _date_utils.newDate)(_this.state.preSelection); if (eventKey === 'Enter') { event.preventDefault(); if ((0, _date_utils.isMoment)(_this.state.preSelection) || (0, _date_utils.isDate)(_this.state.preSelection)) { _this.handleSelect(copy, event); !_this.props.shouldCloseOnSelect && _this.setPreSelection(copy); } else { _this.setOpen(false); } } else if (eventKey === 'Escape') { event.preventDefault(); _this.setOpen(false); } else if (eventKey === 'Tab') { _this.setOpen(false); } else if (!_this.props.disabledKeyboardNavigation) { var newSelection = void 0; switch (eventKey) { case 'ArrowLeft': event.preventDefault(); newSelection = (0, _date_utils.subtractDays)(copy, 1); break; case 'ArrowRight': event.preventDefault(); newSelection = (0, _date_utils.addDays)(copy, 1); break; case 'ArrowUp': event.preventDefault(); newSelection = (0, _date_utils.subtractWeeks)(copy, 1); break; case 'ArrowDown': event.preventDefault(); newSelection = (0, _date_utils.addWeeks)(copy, 1); break; case 'PageUp': event.preventDefault(); newSelection = (0, _date_utils.subtractMonths)(copy, 1); break; case 'PageDown': event.preventDefault(); newSelection = (0, _date_utils.addMonths)(copy, 1); break; case 'Home': event.preventDefault(); newSelection = (0, _date_utils.subtractYears)(copy, 1); break; case 'End': event.preventDefault(); newSelection = (0, _date_utils.addYears)(copy, 1); break; } _this.setPreSelection(newSelection); } }; _this.onClearClick = function (event) { event.preventDefault(); _this.props.onChange(null, event); _this.setState({ inputValue: null }); }; _this.renderCalendar = function () { if (!_this.props.inline && (!_this.state.open || _this.props.disabled)) { return null; } return _react2.default.createElement( WrappedCalendar, { ref: function ref(elem) { _this.calendar = elem; }, locale: _this.props.locale, dateFormat: _this.props.dateFormatCalendar, useWeekdaysShort: _this.props.useWeekdaysShort, dropdownMode: _this.props.dropdownMode, selected: _this.props.selected, preSelection: _this.state.preSelection, onSelect: _this.handleSelect, onWeekSelect: _this.props.onWeekSelect, openToDate: _this.props.openToDate, minDate: _this.props.minDate, maxDate: _this.props.maxDate, selectsStart: _this.props.selectsStart, selectsEnd: _this.props.selectsEnd, startDate: _this.props.startDate, endDate: _this.props.endDate, excludeDates: _this.props.excludeDates, filterDate: _this.props.filterDate, onClickOutside: _this.handleCalendarClickOutside, formatWeekNumber: _this.props.formatWeekNumber, highlightDates: _this.props.highlightDates, includeDates: _this.props.includeDates, inline: _this.props.inline, peekNextMonth: _this.props.peekNextMonth, showMonthDropdown: _this.props.showMonthDropdown, showWeekNumbers: _this.props.showWeekNumbers, showYearDropdown: _this.props.showYearDropdown, withPortal: _this.props.withPortal, forceShowMonthNavigation: _this.props.forceShowMonthNavigation, scrollableYearDropdown: _this.props.scrollableYearDropdown, todayButton: _this.props.todayButton, weekLabel: _this.props.weekLabel, utcOffset: _this.props.utcOffset, outsideClickIgnoreClass: outsideClickIgnoreClass, fixedHeight: _this.props.fixedHeight, monthsShown: _this.props.monthsShown, onDropdownFocus: _this.handleDropdownFocus, onMonthChange: _this.props.onMonthChange, dayClassName: _this.props.dayClassName, showTimeSelect: _this.props.showTimeSelect, onTimeChange: _this.handleTimeChange, timeFormat: _this.props.timeFormat, timeIntervals: _this.props.timeIntervals, minTime: _this.props.minTime, maxTime: _this.props.maxTime, excludeTimes: _this.props.excludeTimes, className: _this.props.calendarClassName, yearDropdownItemNumber: _this.props.yearDropdownItemNumber }, _this.props.children ); }; _this.renderDateInput = function () { var className = (0, _classnames3.default)(_this.props.className, _defineProperty({}, outsideClickIgnoreClass, _this.state.open)); var customInput = _this.props.customInput || _react2.default.createElement('input', { type: 'text' }); var inputValue = typeof _this.props.value === 'string' ? _this.props.value : typeof _this.state.inputValue === 'string' ? _this.state.inputValue : (0, _date_utils.safeDateFormat)(_this.props.selected, _this.props); return _react2.default.cloneElement(customInput, { ref: function ref(input) { _this.input = input; }, value: inputValue, onBlur: _this.handleBlur, onChange: _this.handleChange, onClick: _this.onInputClick, onFocus: _this.handleFocus, onKeyDown: _this.onInputKeyDown, id: _this.props.id, name: _this.props.name, autoFocus: _this.props.autoFocus, placeholder: _this.props.placeholderText, disabled: _this.props.disabled, autoComplete: _this.props.autoComplete, className: className, title: _this.props.title, readOnly: _this.props.readOnly, required: _this.props.required, tabIndex: _this.props.tabIndex }); }; _this.renderClearButton = function () { if (_this.props.isClearable && _this.props.selected != null) { return _react2.default.createElement('a', { className: 'react-datepicker__close-icon', href: '#', onClick: _this.onClearClick }); } else { return null; } }; _this.state = _this.calcInitialState(); return _this; } _createClass(DatePicker, [{ key: 'componentWillReceiveProps', value: function componentWillReceiveProps(nextProps) { var currentMonth = this.props.selected && (0, _date_utils.getMonth)(this.props.selected); var nextMonth = nextProps.selected && (0, _date_utils.getMonth)(nextProps.selected); if (this.props.inline && currentMonth !== nextMonth) { this.setPreSelection(nextProps.selected); } } }, { key: 'componentWillUnmount', value: function componentWillUnmount() { this.clearPreventFocusTimeout(); } }, { key: 'render', value: function render() { var calendar = this.renderCalendar(); if (this.props.inline && !this.props.withPortal) { return calendar; } if (this.props.withPortal) { return _react2.default.createElement( 'div', null, !this.props.inline ? _react2.default.createElement( 'div', { className: 'react-datepicker__input-container' }, this.renderDateInput(), this.renderClearButton() ) : null, this.state.open || this.props.inline ? _react2.default.createElement( 'div', { className: 'react-datepicker__portal' }, calendar ) : null ); } return _react2.default.createElement(_popper_component2.default, { className: this.props.popperClassName, hidePopper: !this.state.open || this.props.disabled, popperModifiers: this.props.popperModifiers, targetComponent: _react2.default.createElement( 'div', { className: 'react-datepicker__input-container' }, this.renderDateInput(), this.renderClearButton() ), popperContainer: this.props.popperContainer, popperComponent: calendar, popperPlacement: this.props.popperPlacement }); } }]); return DatePicker; }(_react2.default.Component); DatePicker.propTypes = { allowSameDay: _propTypes2.default.bool, autoComplete: _propTypes2.default.string, autoFocus: _propTypes2.default.bool, calendarClassName: _propTypes2.default.string, children: _propTypes2.default.node, className: _propTypes2.default.string, customInput: _propTypes2.default.element, dateFormat: _propTypes2.default.oneOfType([// eslint-disable-line react/no-unused-prop-types _propTypes2.default.string, _propTypes2.default.array]), dateFormatCalendar: _propTypes2.default.string, dayClassName: _propTypes2.default.func, disabled: _propTypes2.default.bool, disabledKeyboardNavigation: _propTypes2.default.bool, dropdownMode: _propTypes2.default.oneOf(['scroll', 'select']).isRequired, endDate: _propTypes2.default.object, excludeDates: _propTypes2.default.array, filterDate: _propTypes2.default.func, fixedHeight: _propTypes2.default.bool, formatWeekNumber: _propTypes2.default.func, highlightDates: _propTypes2.default.array, id: _propTypes2.default.string, includeDates: _propTypes2.default.array, inline: _propTypes2.default.bool, isClearable: _propTypes2.default.bool, locale: _propTypes2.default.string, maxDate: _propTypes2.default.object, minDate: _propTypes2.default.object, monthsShown: _propTypes2.default.number, name: _propTypes2.default.string, onBlur: _propTypes2.default.func, onChange: _propTypes2.default.func.isRequired, onSelect: _propTypes2.default.func, onWeekSelect: _propTypes2.default.func, onClickOutside: _propTypes2.default.func, onChangeRaw: _propTypes2.default.func, onFocus: _propTypes2.default.func, onKeyDown: _propTypes2.default.func, onMonthChange: _propTypes2.default.func, openToDate: _propTypes2.default.object, peekNextMonth: _propTypes2.default.bool, placeholderText: _propTypes2.default.string, popperContainer: _propTypes2.default.func, popperClassName: _propTypes2.default.string, // <PopperComponent/> props popperModifiers: _propTypes2.default.object, // <PopperComponent/> props popperPlacement: _propTypes2.default.oneOf(_popper_component.popperPlacementPositions), // <PopperComponent/> props readOnly: _propTypes2.default.bool, required: _propTypes2.default.bool, scrollableYearDropdown: _propTypes2.default.bool, selected: _propTypes2.default.object, selectsEnd: _propTypes2.default.bool, selectsStart: _propTypes2.default.bool, showMonthDropdown: _propTypes2.default.bool, showWeekNumbers: _propTypes2.default.bool, showYearDropdown: _propTypes2.default.bool, forceShowMonthNavigation: _propTypes2.default.bool, startDate: _propTypes2.default.object, startOpen: _propTypes2.default.bool, tabIndex: _propTypes2.default.number, title: _propTypes2.default.string, todayButton: _propTypes2.default.string, useWeekdaysShort: _propTypes2.default.bool, utcOffset: _propTypes2.default.number, value: _propTypes2.default.string, weekLabel: _propTypes2.default.string, withPortal: _propTypes2.default.bool, yearDropdownItemNumber: _propTypes2.default.number, shouldCloseOnSelect: _propTypes2.default.bool, showTimeSelect: _propTypes2.default.bool, timeFormat: _propTypes2.default.string, timeIntervals: _propTypes2.default.number, minTime: _propTypes2.default.object, maxTime: _propTypes2.default.object, excludeTimes: _propTypes2.default.array }; exports.default = DatePicker; /***/ }), /* 1 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _year_dropdown = __webpack_require__(2); var _year_dropdown2 = _interopRequireDefault(_year_dropdown); var _month_dropdown = __webpack_require__(14); var _month_dropdown2 = _interopRequireDefault(_month_dropdown); var _month = __webpack_require__(16); var _month2 = _interopRequireDefault(_month); var _time = __webpack_require__(20); var _time2 = _interopRequireDefault(_time); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); var _date_utils = __webpack_require__(12); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var DROPDOWN_FOCUS_CLASSNAMES = ['react-datepicker__year-select', 'react-datepicker__month-select']; var isDropdownSelect = function isDropdownSelect() { var element = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; var classNames = (element.className || '').split(/\s+/); return DROPDOWN_FOCUS_CLASSNAMES.some(function (testClassname) { return classNames.indexOf(testClassname) >= 0; }); }; var Calendar = function (_React$Component) { _inherits(Calendar, _React$Component); _createClass(Calendar, null, [{ key: 'defaultProps', get: function get() { return { onDropdownFocus: function onDropdownFocus() {}, monthsShown: 1, forceShowMonthNavigation: false }; } }]); function Calendar(props) { _classCallCheck(this, Calendar); var _this = _possibleConstructorReturn(this, (Calendar.__proto__ || Object.getPrototypeOf(Calendar)).call(this, props)); _this.handleClickOutside = function (event) { _this.props.onClickOutside(event); }; _this.handleDropdownFocus = function (event) { if (isDropdownSelect(event.target)) { _this.props.onDropdownFocus(); } }; _this.getDateInView = function () { var _this$props = _this.props, preSelection = _this$props.preSelection, selected = _this$props.selected, openToDate = _this$props.openToDate, utcOffset = _this$props.utcOffset; var minDate = (0, _date_utils.getEffectiveMinDate)(_this.props); var maxDate = (0, _date_utils.getEffectiveMaxDate)(_this.props); var current = (0, _date_utils.now)(utcOffset); var initialDate = openToDate || selected || preSelection; if (initialDate) { return initialDate; } else { if (minDate && (0, _date_utils.isBefore)(current, minDate)) { return minDate; } else if (maxDate && (0, _date_utils.isAfter)(current, maxDate)) { return maxDate; } } return current; }; _this.localizeDate = function (date) { return (0, _date_utils.localizeDate)(date, _this.props.locale); }; _this.increaseMonth = function () { _this.setState({ date: (0, _date_utils.addMonths)((0, _date_utils.cloneDate)(_this.state.date), 1) }, function () { return _this.handleMonthChange(_this.state.date); }); }; _this.decreaseMonth = function () { _this.setState({ date: (0, _date_utils.subtractMonths)((0, _date_utils.cloneDate)(_this.state.date), 1) }, function () { return _this.handleMonthChange(_this.state.date); }); }; _this.handleDayClick = function (day, event) { return _this.props.onSelect(day, event); }; _this.handleDayMouseEnter = function (day) { return _this.setState({ selectingDate: day }); }; _this.handleMonthMouseLeave = function () { return _this.setState({ selectingDate: null }); }; _this.handleMonthChange = function (date) { if (_this.props.onMonthChange) { _this.props.onMonthChange(date); } }; _this.changeYear = function (year) { _this.setState({ date: (0, _date_utils.setYear)((0, _date_utils.cloneDate)(_this.state.date), year) }); }; _this.changeMonth = function (month) { _this.setState({ date: (0, _date_utils.setMonth)((0, _date_utils.cloneDate)(_this.state.date), month) }, function () { return _this.handleMonthChange(_this.state.date); }); }; _this.header = function () { var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : _this.state.date; var startOfWeek = (0, _date_utils.getStartOfWeek)((0, _date_utils.cloneDate)(date)); var dayNames = []; if (_this.props.showWeekNumbers) { dayNames.push(_react2.default.createElement( 'div', { key: 'W', className: 'react-datepicker__day-name' }, _this.props.weekLabel || '#' )); } return dayNames.concat([0, 1, 2, 3, 4, 5, 6].map(function (offset) { var day = (0, _date_utils.addDays)((0, _date_utils.cloneDate)(startOfWeek), offset); var localeData = (0, _date_utils.getLocaleData)(day); var weekDayName = _this.props.useWeekdaysShort ? (0, _date_utils.getWeekdayShortInLocale)(localeData, day) : (0, _date_utils.getWeekdayMinInLocale)(localeData, day); return _react2.default.createElement( 'div', { key: offset, className: 'react-datepicker__day-name' }, weekDayName ); })); }; _this.renderPreviousMonthButton = function () { if (!_this.props.forceShowMonthNavigation && (0, _date_utils.allDaysDisabledBefore)(_this.state.date, 'month', _this.props)) { return; } return _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--previous', onClick: _this.decreaseMonth }); }; _this.renderNextMonthButton = function () { if (!_this.props.forceShowMonthNavigation && (0, _date_utils.allDaysDisabledAfter)(_this.state.date, 'month', _this.props)) { return; } var classes = ['react-datepicker__navigation', 'react-datepicker__navigation--next']; if (_this.props.showTimeSelect) { classes.push('react-datepicker__navigation--next--with-time'); } if (_this.props.todayButton) { classes.push('react-datepicker__navigation--next--with-today-button'); } return _react2.default.createElement('a', { className: classes.join(' '), onClick: _this.increaseMonth }); }; _this.renderCurrentMonth = function () { var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : _this.state.date; var classes = ['react-datepicker__current-month']; if (_this.props.showYearDropdown) { classes.push('react-datepicker__current-month--hasYearDropdown'); } if (_this.props.showMonthDropdown) { classes.push('react-datepicker__current-month--hasMonthDropdown'); } return _react2.default.createElement( 'div', { className: classes.join(' ') }, (0, _date_utils.formatDate)(date, _this.props.dateFormat) ); }; _this.renderYearDropdown = function () { var overrideHide = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; if (!_this.props.showYearDropdown || overrideHide) { return; } return _react2.default.createElement(_year_dropdown2.default, { dropdownMode: _this.props.dropdownMode, onChange: _this.changeYear, minDate: _this.props.minDate, maxDate: _this.props.maxDate, year: (0, _date_utils.getYear)(_this.state.date), scrollableYearDropdown: _this.props.scrollableYearDropdown, yearDropdownItemNumber: _this.props.yearDropdownItemNumber }); }; _this.renderMonthDropdown = function () { var overrideHide = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; if (!_this.props.showMonthDropdown) { return; } return _react2.default.createElement(_month_dropdown2.default, { dropdownMode: _this.props.dropdownMode, locale: _this.props.locale, dateFormat: _this.props.dateFormat, onChange: _this.changeMonth, month: (0, _date_utils.getMonth)(_this.state.date) }); }; _this.renderTodayButton = function () { if (!_this.props.todayButton) { return; } return _react2.default.createElement( 'div', { className: 'react-datepicker__today-button', onClick: function onClick(e) { return _this.props.onSelect((0, _date_utils.getStartOfDate)((0, _date_utils.now)(_this.props.utcOffset)), e); } }, _this.props.todayButton ); }; _this.renderMonths = function () { var monthList = []; for (var i = 0; i < _this.props.monthsShown; ++i) { var monthDate = (0, _date_utils.addMonths)((0, _date_utils.cloneDate)(_this.state.date), i); var monthKey = 'month-' + i; monthList.push(_react2.default.createElement( 'div', { key: monthKey, ref: function ref(div) { _this.monthContainer = div; }, className: 'react-datepicker__month-container' }, _react2.default.createElement( 'div', { className: 'react-datepicker__header' }, _this.renderCurrentMonth(monthDate), _react2.default.createElement( 'div', { className: 'react-datepicker__header__dropdown react-datepicker__header__dropdown--' + _this.props.dropdownMode, onFocus: _this.handleDropdownFocus }, _this.renderMonthDropdown(i !== 0), _this.renderYearDropdown(i !== 0) ), _react2.default.createElement( 'div', { className: 'react-datepicker__day-names' }, _this.header(monthDate) ) ), _react2.default.createElement(_month2.default, { day: monthDate, dayClassName: _this.props.dayClassName, onDayClick: _this.handleDayClick, onDayMouseEnter: _this.handleDayMouseEnter, onMouseLeave: _this.handleMonthMouseLeave, onWeekSelect: _this.props.onWeekSelect, formatWeekNumber: _this.props.formatWeekNumber, minDate: _this.props.minDate, maxDate: _this.props.maxDate, excludeDates: _this.props.excludeDates, highlightDates: _this.props.highlightDates, selectingDate: _this.state.selectingDate, includeDates: _this.props.includeDates, inline: _this.props.inline, fixedHeight: _this.props.fixedHeight, filterDate: _this.props.filterDate, preSelection: _this.props.preSelection, selected: _this.props.selected, selectsStart: _this.props.selectsStart, selectsEnd: _this.props.selectsEnd, showWeekNumbers: _this.props.showWeekNumbers, startDate: _this.props.startDate, endDate: _this.props.endDate, peekNextMonth: _this.props.peekNextMonth, utcOffset: _this.props.utcOffset }) )); } return monthList; }; _this.renderTimeSection = function () { if (_this.props.showTimeSelect) { return _react2.default.createElement(_time2.default, { selected: _this.props.selected, onChange: _this.props.onTimeChange, format: _this.props.timeFormat, intervals: _this.props.timeIntervals, minTime: _this.props.minTime, maxTime: _this.props.maxTime, excludeTimes: _this.props.excludeTimes, todayButton: _this.props.todayButton, showMonthDropdown: _this.props.showMonthDropdown, showYearDropdown: _this.props.showYearDropdown, withPortal: _this.props.withPortal, monthRef: _this.state.monthContainer }); } else { return; } }; _this.state = { date: _this.localizeDate(_this.getDateInView()), selectingDate: null, monthContainer: _this.monthContainer }; return _this; } _createClass(Calendar, [{ key: 'componentDidMount', value: function componentDidMount() { var _this2 = this; /* monthContainer height is needed in time component to determine the height for the ul in the time component. setState here so height is given after final component layout is rendered */ if (this.props.showTimeSelect) { this.assignMonthContainer = function () { _this2.setState({ monthContainer: _this2.monthContainer }); }(); } } }, { key: 'componentWillReceiveProps', value: function componentWillReceiveProps(nextProps) { if (nextProps.preSelection && !(0, _date_utils.isSameDay)(nextProps.preSelection, this.props.preSelection)) { this.setState({ date: this.localizeDate(nextProps.preSelection) }); } else if (nextProps.openToDate && !(0, _date_utils.isSameDay)(nextProps.openToDate, this.props.openToDate)) { this.setState({ date: this.localizeDate(nextProps.openToDate) }); } } }, { key: 'render', value: function render() { return _react2.default.createElement( 'div', { className: (0, _classnames2.default)('react-datepicker', this.props.className) }, _react2.default.createElement('div', { className: 'react-datepicker__triangle' }), this.renderPreviousMonthButton(), this.renderNextMonthButton(), this.renderMonths(), this.renderTodayButton(), this.renderTimeSection(), this.props.children ); } }]); return Calendar; }(_react2.default.Component); Calendar.propTypes = { className: _propTypes2.default.string, children: _propTypes2.default.node, dateFormat: _propTypes2.default.oneOfType([_propTypes2.default.string, _propTypes2.default.array]).isRequired, dayClassName: _propTypes2.default.func, dropdownMode: _propTypes2.default.oneOf(['scroll', 'select']).isRequired, endDate: _propTypes2.default.object, excludeDates: _propTypes2.default.array, filterDate: _propTypes2.default.func, fixedHeight: _propTypes2.default.bool, formatWeekNumber: _propTypes2.default.func, highlightDates: _propTypes2.default.array, includeDates: _propTypes2.default.array, inline: _propTypes2.default.bool, locale: _propTypes2.default.string, maxDate: _propTypes2.default.object, minDate: _propTypes2.default.object, monthsShown: _propTypes2.default.number, onClickOutside: _propTypes2.default.func.isRequired, onMonthChange: _propTypes2.default.func, forceShowMonthNavigation: _propTypes2.default.bool, onDropdownFocus: _propTypes2.default.func, onSelect: _propTypes2.default.func.isRequired, onWeekSelect: _propTypes2.default.func, showTimeSelect: _propTypes2.default.bool, timeFormat: _propTypes2.default.string, timeIntervals: _propTypes2.default.number, onTimeChange: _propTypes2.default.func, minTime: _propTypes2.default.object, maxTime: _propTypes2.default.object, excludeTimes: _propTypes2.default.array, openToDate: _propTypes2.default.object, peekNextMonth: _propTypes2.default.bool, scrollableYearDropdown: _propTypes2.default.bool, preSelection: _propTypes2.default.object, selected: _propTypes2.default.object, selectsEnd: _propTypes2.default.bool, selectsStart: _propTypes2.default.bool, showMonthDropdown: _propTypes2.default.bool, showWeekNumbers: _propTypes2.default.bool, showYearDropdown: _propTypes2.default.bool, startDate: _propTypes2.default.object, todayButton: _propTypes2.default.string, useWeekdaysShort: _propTypes2.default.bool, withPortal: _propTypes2.default.bool, utcOffset: _propTypes2.default.number, weekLabel: _propTypes2.default.string, yearDropdownItemNumber: _propTypes2.default.number }; exports.default = Calendar; /***/ }), /* 2 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _year_dropdown_options = __webpack_require__(9); var _year_dropdown_options2 = _interopRequireDefault(_year_dropdown_options); var _reactOnclickoutside = __webpack_require__(11); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); var _date_utils = __webpack_require__(12); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var WrappedYearDropdownOptions = (0, _reactOnclickoutside2.default)(_year_dropdown_options2.default); var YearDropdown = function (_React$Component) { _inherits(YearDropdown, _React$Component); function YearDropdown() { var _ref; var _temp, _this, _ret; _classCallCheck(this, YearDropdown); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = YearDropdown.__proto__ || Object.getPrototypeOf(YearDropdown)).call.apply(_ref, [this].concat(args))), _this), _this.state = { dropdownVisible: false }, _this.renderSelectOptions = function () { var minYear = _this.props.minDate ? (0, _date_utils.getYear)(_this.props.minDate) : 1900; var maxYear = _this.props.maxDate ? (0, _date_utils.getYear)(_this.props.maxDate) : 2100; var options = []; for (var i = minYear; i <= maxYear; i++) { options.push(_react2.default.createElement( 'option', { key: i, value: i }, i )); } return options; }, _this.onSelectChange = function (e) { _this.onChange(e.target.value); }, _this.renderSelectMode = function () { return _react2.default.createElement( 'select', { value: _this.props.year, className: 'react-datepicker__year-select', onChange: _this.onSelectChange }, _this.renderSelectOptions() ); }, _this.renderReadView = function (visible) { return _react2.default.createElement( 'div', { key: 'read', style: { visibility: visible ? 'visible' : 'hidden' }, className: 'react-datepicker__year-read-view', onClick: _this.toggleDropdown }, _react2.default.createElement('span', { className: 'react-datepicker__year-read-view--down-arrow' }), _react2.default.createElement( 'span', { className: 'react-datepicker__year-read-view--selected-year' }, _this.props.year ) ); }, _this.renderDropdown = function () { return _react2.default.createElement(WrappedYearDropdownOptions, { key: 'dropdown', ref: 'options', year: _this.props.year, onChange: _this.onChange, onCancel: _this.toggleDropdown, minDate: _this.props.minDate, maxDate: _this.props.maxDate, scrollableYearDropdown: _this.props.scrollableYearDropdown, yearDropdownItemNumber: _this.props.yearDropdownItemNumber }); }, _this.renderScrollMode = function () { var dropdownVisible = _this.state.dropdownVisible; var result = [_this.renderReadView(!dropdownVisible)]; if (dropdownVisible) { result.unshift(_this.renderDropdown()); } return result; }, _this.onChange = function (year) { _this.toggleDropdown(); if (year === _this.props.year) return; _this.props.onChange(year); }, _this.toggleDropdown = function () { _this.setState({ dropdownVisible: !_this.state.dropdownVisible }); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(YearDropdown, [{ key: 'render', value: function render() { var renderedDropdown = void 0; switch (this.props.dropdownMode) { case 'scroll': renderedDropdown = this.renderScrollMode(); break; case 'select': renderedDropdown = this.renderSelectMode(); break; } return _react2.default.createElement( 'div', { className: 'react-datepicker__year-dropdown-container react-datepicker__year-dropdown-container--' + this.props.dropdownMode }, renderedDropdown ); } }]); return YearDropdown; }(_react2.default.Component); YearDropdown.propTypes = { dropdownMode: _propTypes2.default.oneOf(['scroll', 'select']).isRequired, maxDate: _propTypes2.default.object, minDate: _propTypes2.default.object, onChange: _propTypes2.default.func.isRequired, scrollableYearDropdown: _propTypes2.default.bool, year: _propTypes2.default.number.isRequired, yearDropdownItemNumber: _propTypes2.default.number }; exports.default = YearDropdown; /***/ }), /* 3 */ /***/ (function(module, exports) { module.exports = __WEBPACK_EXTERNAL_MODULE_3__; /***/ }), /* 4 */ /***/ (function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ if (false) { var REACT_ELEMENT_TYPE = (typeof Symbol === 'function' && Symbol.for && Symbol.for('react.element')) || 0xeac7; var isValidElement = function(object) { return typeof object === 'object' && object !== null && object.$$typeof === REACT_ELEMENT_TYPE; }; // By explicitly using `prop-types` you are opting into new development behavior. // http://fb.me/prop-types-in-prod var throwOnDirectAccess = true; module.exports = require('./factoryWithTypeCheckers')(isValidElement, throwOnDirectAccess); } else { // By explicitly using `prop-types` you are opting into new production behavior. // http://fb.me/prop-types-in-prod module.exports = __webpack_require__(5)(); } /***/ }), /* 5 */ /***/ (function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ 'use strict'; var emptyFunction = __webpack_require__(6); var invariant = __webpack_require__(7); var ReactPropTypesSecret = __webpack_require__(8); module.exports = function() { function shim(props, propName, componentName, location, propFullName, secret) { if (secret === ReactPropTypesSecret) { // It is still safe when called from React. return; } invariant( false, 'Calling PropTypes validators directly is not supported by the `prop-types` package. ' + 'Use PropTypes.checkPropTypes() to call them. ' + 'Read more at http://fb.me/use-check-prop-types' ); }; shim.isRequired = shim; function getShim() { return shim; }; // Important! // Keep this list in sync with production version in `./factoryWithTypeCheckers.js`. var ReactPropTypes = { array: shim, bool: shim, func: shim, number: shim, object: shim, string: shim, symbol: shim, any: shim, arrayOf: getShim, element: shim, instanceOf: getShim, node: shim, objectOf: getShim, oneOf: getShim, oneOfType: getShim, shape: getShim }; ReactPropTypes.checkPropTypes = emptyFunction; ReactPropTypes.PropTypes = ReactPropTypes; return ReactPropTypes; }; /***/ }), /* 6 */ /***/ (function(module, exports) { "use strict"; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ function makeEmptyFunction(arg) { return function () { return arg; }; } /** * This function accepts and discards inputs; it has no side effects. This is * primarily useful idiomatically for overridable function endpoints which * always need to be callable, since JS lacks a null-call idiom ala Cocoa. */ var emptyFunction = function emptyFunction() {}; emptyFunction.thatReturns = makeEmptyFunction; emptyFunction.thatReturnsFalse = makeEmptyFunction(false); emptyFunction.thatReturnsTrue = makeEmptyFunction(true); emptyFunction.thatReturnsNull = makeEmptyFunction(null); emptyFunction.thatReturnsThis = function () { return this; }; emptyFunction.thatReturnsArgument = function (arg) { return arg; }; module.exports = emptyFunction; /***/ }), /* 7 */ /***/ (function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Use invariant() to assert state which your program assumes to be true. * * Provide sprintf-style format (only %s is supported) and arguments * to provide information about what broke and what you were * expecting. * * The invariant message will be stripped in production, but the invariant * will remain to ensure logic does not differ in production. */ var validateFormat = function validateFormat(format) {}; if (false) { validateFormat = function validateFormat(format) { if (format === undefined) { throw new Error('invariant requires an error message argument'); } }; } function invariant(condition, format, a, b, c, d, e, f) { validateFormat(format); if (!condition) { var error; if (format === undefined) { error = new Error('Minified exception occurred; use the non-minified dev environment ' + 'for the full error message and additional helpful warnings.'); } else { var args = [a, b, c, d, e, f]; var argIndex = 0; error = new Error(format.replace(/%s/g, function () { return args[argIndex++]; })); error.name = 'Invariant Violation'; } error.framesToPop = 1; // we don't care about invariant's own frame throw error; } } module.exports = invariant; /***/ }), /* 8 */ /***/ (function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ 'use strict'; var ReactPropTypesSecret = 'SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED'; module.exports = ReactPropTypesSecret; /***/ }), /* 9 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } function generateYears(year, noOfYear, minDate, maxDate) { var list = []; for (var i = 0; i < 2 * noOfYear + 1; i++) { var newYear = year + noOfYear - i; var isInRange = true; if (minDate) { isInRange = minDate.year() <= newYear; } if (maxDate && isInRange) { isInRange = maxDate.year() >= newYear; } if (isInRange) { list.push(newYear); } } return list; } var YearDropdownOptions = function (_React$Component) { _inherits(YearDropdownOptions, _React$Component); function YearDropdownOptions(props) { _classCallCheck(this, YearDropdownOptions); var _this = _possibleConstructorReturn(this, (YearDropdownOptions.__proto__ || Object.getPrototypeOf(YearDropdownOptions)).call(this, props)); _this.renderOptions = function () { var selectedYear = _this.props.year; var options = _this.state.yearsList.map(function (year) { return _react2.default.createElement( 'div', { className: 'react-datepicker__year-option', key: year, ref: year, onClick: _this.onChange.bind(_this, year) }, selectedYear === year ? _react2.default.createElement( 'span', { className: 'react-datepicker__year-option--selected' }, '\u2713' ) : '', year ); }); var minYear = _this.props.minDate ? _this.props.minDate.year() : null; var maxYear = _this.props.maxDate ? _this.props.maxDate.year() : null; if (!maxYear || !_this.state.yearsList.find(function (year) { return year === maxYear; })) { options.unshift(_react2.default.createElement( 'div', { className: 'react-datepicker__year-option', ref: 'upcoming', key: 'upcoming', onClick: _this.incrementYears }, _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--years react-datepicker__navigation--years-upcoming' }) )); } if (!minYear || !_this.state.yearsList.find(function (year) { return year === minYear; })) { options.push(_react2.default.createElement( 'div', { className: 'react-datepicker__year-option', ref: 'previous', key: 'previous', onClick: _this.decrementYears }, _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--years react-datepicker__navigation--years-previous' }) )); } return options; }; _this.onChange = function (year) { _this.props.onChange(year); }; _this.handleClickOutside = function () { _this.props.onCancel(); }; _this.shiftYears = function (amount) { var years = _this.state.yearsList.map(function (year) { return year + amount; }); _this.setState({ yearsList: years }); }; _this.incrementYears = function () { return _this.shiftYears(1); }; _this.decrementYears = function () { return _this.shiftYears(-1); }; var yearDropdownItemNumber = props.yearDropdownItemNumber, scrollableYearDropdown = props.scrollableYearDropdown; var noOfYear = yearDropdownItemNumber || (scrollableYearDropdown ? 10 : 5); _this.state = { yearsList: generateYears(_this.props.year, noOfYear, _this.props.minDate, _this.props.maxDate) }; return _this; } _createClass(YearDropdownOptions, [{ key: 'render', value: function render() { var dropdownClass = (0, _classnames2.default)({ 'react-datepicker__year-dropdown': true, 'react-datepicker__year-dropdown--scrollable': this.props.scrollableYearDropdown }); return _react2.default.createElement( 'div', { className: dropdownClass }, this.renderOptions() ); } }]); return YearDropdownOptions; }(_react2.default.Component); YearDropdownOptions.propTypes = { minDate: _propTypes2.default.object, maxDate: _propTypes2.default.object, onCancel: _propTypes2.default.func.isRequired, onChange: _propTypes2.default.func.isRequired, scrollableYearDropdown: _propTypes2.default.bool, year: _propTypes2.default.number.isRequired, yearDropdownItemNumber: _propTypes2.default.number }; exports.default = YearDropdownOptions; /***/ }), /* 10 */ /***/ (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;/*! Copyright (c) 2016 Jed Watson. Licensed under the MIT License (MIT), see http://jedwatson.github.io/classnames */ /* global define */ (function () { 'use strict'; var hasOwn = {}.hasOwnProperty; function classNames () { var classes = []; for (var i = 0; i < arguments.length; i++) { var arg = arguments[i]; if (!arg) continue; var argType = typeof arg; if (argType === 'string' || argType === 'number') { classes.push(arg); } else if (Array.isArray(arg)) { classes.push(classNames.apply(null, arg)); } else if (argType === 'object') { for (var key in arg) { if (hasOwn.call(arg, key) && arg[key]) { classes.push(key); } } } } return classes.join(' '); } if (typeof module !== 'undefined' && module.exports) { module.exports = classNames; } else if (true) { // register as 'classnames', consistent with npm package name !(__WEBPACK_AMD_DEFINE_ARRAY__ = [], __WEBPACK_AMD_DEFINE_RESULT__ = function () { return classNames; }.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); } else { window.classNames = classNames; } }()); /***/ }), /* 11 */ /***/ (function(module, exports) { module.exports = __WEBPACK_EXTERNAL_MODULE_11__; /***/ }), /* 12 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.newDate = newDate; exports.newDateWithOffset = newDateWithOffset; exports.now = now; exports.cloneDate = cloneDate; exports.parseDate = parseDate; exports.isMoment = isMoment; exports.isDate = isDate; exports.formatDate = formatDate; exports.safeDateFormat = safeDateFormat; exports.setTime = setTime; exports.setMonth = setMonth; exports.setYear = setYear; exports.setUTCOffset = setUTCOffset; exports.getSecond = getSecond; exports.getMinute = getMinute; exports.getHour = getHour; exports.getDay = getDay; exports.getWeek = getWeek; exports.getMonth = getMonth; exports.getYear = getYear; exports.getDate = getDate; exports.getUTCOffset = getUTCOffset; exports.getDayOfWeekCode = getDayOfWeekCode; exports.getStartOfDay = getStartOfDay; exports.getStartOfWeek = getStartOfWeek; exports.getStartOfMonth = getStartOfMonth; exports.getStartOfDate = getStartOfDate; exports.getEndOfWeek = getEndOfWeek; exports.getEndOfMonth = getEndOfMonth; exports.addMinutes = addMinutes; exports.addDays = addDays; exports.addWeeks = addWeeks; exports.addMonths = addMonths; exports.addYears = addYears; exports.subtractDays = subtractDays; exports.subtractWeeks = subtractWeeks; exports.subtractMonths = subtractMonths; exports.subtractYears = subtractYears; exports.isBefore = isBefore; exports.isAfter = isAfter; exports.equals = equals; exports.isSameMonth = isSameMonth; exports.isSameDay = isSameDay; exports.isSameUtcOffset = isSameUtcOffset; exports.isDayInRange = isDayInRange; exports.getDaysDiff = getDaysDiff; exports.localizeDate = localizeDate; exports.getDefaultLocale = getDefaultLocale; exports.getDefaultLocaleData = getDefaultLocaleData; exports.registerLocale = registerLocale; exports.getLocaleData = getLocaleData; exports.getLocaleDataForLocale = getLocaleDataForLocale; exports.getWeekdayMinInLocale = getWeekdayMinInLocale; exports.getWeekdayShortInLocale = getWeekdayShortInLocale; exports.getMonthInLocale = getMonthInLocale; exports.isDayDisabled = isDayDisabled; exports.isTimeDisabled = isTimeDisabled; exports.isTimeInDisabledRange = isTimeInDisabledRange; exports.allDaysDisabledBefore = allDaysDisabledBefore; exports.allDaysDisabledAfter = allDaysDisabledAfter; exports.getEffectiveMinDate = getEffectiveMinDate; exports.getEffectiveMaxDate = getEffectiveMaxDate; var _moment = __webpack_require__(13); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var dayOfWeekCodes = { 1: 'mon', 2: 'tue', 3: 'wed', 4: 'thu', 5: 'fri', 6: 'sat', 7: 'sun' // These functions are not exported so // that we avoid magic strings like 'days' };function set(date, unit, to) { return date.set(unit, to); } function add(date, amount, unit) { return date.add(amount, unit); } function subtract(date, amount, unit) { return date.subtract(amount, unit); } function get(date, unit) { return date.get(unit); } function getStartOf(date, unit) { return date.startOf(unit); } function getEndOf(date, unit) { return date.endOf(unit); } function getDiff(date1, date2, unit) { return date1.diff(date2, unit); } function isSame(date1, date2, unit) { return date1.isSame(date2, unit); } // ** Date Constructors ** function newDate(point) { return (0, _moment2.default)(point); } function newDateWithOffset(utcOffset) { return (0, _moment2.default)().utc().utcOffset(utcOffset); } function now(maybeFixedUtcOffset) { if (maybeFixedUtcOffset == null) { return newDate(); } return newDateWithOffset(maybeFixedUtcOffset); } function cloneDate(date) { return date.clone(); } function parseDate(value, _ref) { var dateFormat = _ref.dateFormat, locale = _ref.locale; var m = (0, _moment2.default)(value, dateFormat, locale || _moment2.default.locale(), true); return m.isValid() ? m : null; } // ** Date "Reflection" ** function isMoment(date) { return _moment2.default.isMoment(date); } function isDate(date) { return _moment2.default.isDate(date); } // ** Date Formatting ** function formatDate(date, format) { return date.format(format); } function safeDateFormat(date, _ref2) { var dateFormat = _ref2.dateFormat, locale = _ref2.locale; return date && date.clone().locale(locale || _moment2.default.locale()).format(Array.isArray(dateFormat) ? dateFormat[0] : dateFormat) || ''; } // ** Date Setters ** function setTime(date, _ref3) { var hour = _ref3.hour, minute = _ref3.minute, second = _ref3.second; date.set({ hour: hour, minute: minute, second: second }); return date; } function setMonth(date, month) { return set(date, 'month', month); } function setYear(date, year) { return set(date, 'year', year); } function setUTCOffset(date, offset) { return date.utcOffset(offset); } // ** Date Getters ** function getSecond(date) { return get(date, 'second'); } function getMinute(date) { return get(date, 'minute'); } function getHour(date) { return get(date, 'hour'); } // Returns day of week function getDay(date) { return get(date, 'day'); } function getWeek(date) { return get(date, 'week'); } function getMonth(date) { return get(date, 'month'); } function getYear(date) { return get(date, 'year'); } // Returns day of month function getDate(date) { return get(date, 'date'); } function getUTCOffset() { return (0, _moment2.default)().utcOffset(); } function getDayOfWeekCode(day) { return dayOfWeekCodes[day.isoWeekday()]; } // *** Start of *** function getStartOfDay(date) { return getStartOf(date, 'day'); } function getStartOfWeek(date) { return getStartOf(date, 'week'); } function getStartOfMonth(date) { return getStartOf(date, 'month'); } function getStartOfDate(date) { return getStartOf(date, 'date'); } // *** End of *** function getEndOfWeek(date) { return getEndOf(date, 'week'); } function getEndOfMonth(date) { return getEndOf(date, 'month'); } // ** Date Math ** // *** Addition *** function addMinutes(date, amount) { return add(date, amount, 'minutes'); } function addDays(date, amount) { return add(date, amount, 'days'); } function addWeeks(date, amount) { return add(date, amount, 'weeks'); } function addMonths(date, amount) { return add(date, amount, 'months'); } function addYears(date, amount) { return add(date, amount, 'years'); } // *** Subtraction *** function subtractDays(date, amount) { return subtract(date, amount, 'days'); } function subtractWeeks(date, amount) { return subtract(date, amount, 'weeks'); } function subtractMonths(date, amount) { return subtract(date, amount, 'months'); } function subtractYears(date, amount) { return subtract(date, amount, 'years'); } // ** Date Comparison ** function isBefore(date1, date2) { return date1.isBefore(date2); } function isAfter(date1, date2) { return date1.isAfter(date2); } function equals(date1, date2) { return date1.isSame(date2); } function isSameMonth(date1, date2) { return isSame(date1, date2, 'month'); } function isSameDay(moment1, moment2) { if (moment1 && moment2) { return moment1.isSame(moment2, 'day'); } else { return !moment1 && !moment2; } } function isSameUtcOffset(moment1, moment2) { if (moment1 && moment2) { return moment1.utcOffset() === moment2.utcOffset(); } else { return !moment1 && !moment2; } } function isDayInRange(day, startDate, endDate) { var before = startDate.clone().startOf('day').subtract(1, 'seconds'); var after = endDate.clone().startOf('day').add(1, 'seconds'); return day.clone().startOf('day').isBetween(before, after); } // *** Diffing *** function getDaysDiff(date1, date2) { return getDiff(date1, date2, 'days'); } // ** Date Localization ** function localizeDate(date, locale) { return date.clone().locale(locale || _moment2.default.locale()); } function getDefaultLocale() { return _moment2.default.locale(); } function getDefaultLocaleData() { return _moment2.default.localeData(); } function registerLocale(localeName, localeData) { _moment2.default.defineLocale(localeName, localeData); } function getLocaleData(date) { return date.localeData(); } function getLocaleDataForLocale(locale) { return _moment2.default.localeData(locale); } function getWeekdayMinInLocale(locale, date) { return locale.weekdaysMin(date); } function getWeekdayShortInLocale(locale, date) { return locale.weekdaysShort(date); } // TODO what is this format exactly? function getMonthInLocale(locale, date, format) { return locale.months(date, format); } // ** Utils for some components ** function isDayDisabled(day) { var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, minDate = _ref4.minDate, maxDate = _ref4.maxDate, excludeDates = _ref4.excludeDates, includeDates = _ref4.includeDates, filterDate = _ref4.filterDate; return minDate && day.isBefore(minDate, 'day') || maxDate && day.isAfter(maxDate, 'day') || excludeDates && excludeDates.some(function (excludeDate) { return isSameDay(day, excludeDate); }) || includeDates && !includeDates.some(function (includeDate) { return isSameDay(day, includeDate); }) || filterDate && !filterDate(day.clone()) || false; } function isTimeDisabled(time, disabledTimes) { var l = disabledTimes.length; for (var i = 0; i < l; i++) { if (disabledTimes[i].get('hours') === time.get('hours') && disabledTimes[i].get('minutes') === time.get('minutes')) { return true; } } return false; } function isTimeInDisabledRange(time, _ref5) { var minTime = _ref5.minTime, maxTime = _ref5.maxTime; if (!minTime || !maxTime) { throw new Error('Both minTime and maxTime props required'); } var base = (0, _moment2.default)().hours(0).minutes(0).seconds(0); var baseTime = base.clone().hours(time.get('hours')).minutes(time.get('minutes')); var min = base.clone().hours(minTime.get('hours')).minutes(minTime.get('minutes')); var max = base.clone().hours(maxTime.get('hours')).minutes(maxTime.get('minutes')); return !(baseTime.isSameOrAfter(min) && baseTime.isSameOrBefore(max)); } function allDaysDisabledBefore(day, unit) { var _ref6 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, minDate = _ref6.minDate, includeDates = _ref6.includeDates; var dateBefore = day.clone().subtract(1, unit); return minDate && dateBefore.isBefore(minDate, unit) || includeDates && includeDates.every(function (includeDate) { return dateBefore.isBefore(includeDate, unit); }) || false; } function allDaysDisabledAfter(day, unit) { var _ref7 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, maxDate = _ref7.maxDate, includeDates = _ref7.includeDates; var dateAfter = day.clone().add(1, unit); return maxDate && dateAfter.isAfter(maxDate, unit) || includeDates && includeDates.every(function (includeDate) { return dateAfter.isAfter(includeDate, unit); }) || false; } function getEffectiveMinDate(_ref8) { var minDate = _ref8.minDate, includeDates = _ref8.includeDates; if (includeDates && minDate) { return _moment2.default.min(includeDates.filter(function (includeDate) { return minDate.isSameOrBefore(includeDate, 'day'); })); } else if (includeDates) { return _moment2.default.min(includeDates); } else { return minDate; } } function getEffectiveMaxDate(_ref9) { var maxDate = _ref9.maxDate, includeDates = _ref9.includeDates; if (includeDates && maxDate) { return _moment2.default.max(includeDates.filter(function (includeDate) { return maxDate.isSameOrAfter(includeDate, 'day'); })); } else if (includeDates) { return _moment2.default.max(includeDates); } else { return maxDate; } } /***/ }), /* 13 */ /***/ (function(module, exports) { module.exports = __WEBPACK_EXTERNAL_MODULE_13__; /***/ }), /* 14 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _month_dropdown_options = __webpack_require__(15); var _month_dropdown_options2 = _interopRequireDefault(_month_dropdown_options); var _reactOnclickoutside = __webpack_require__(11); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); var _date_utils = __webpack_require__(12); var utils = _interopRequireWildcard(_date_utils); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var WrappedMonthDropdownOptions = (0, _reactOnclickoutside2.default)(_month_dropdown_options2.default); var MonthDropdown = function (_React$Component) { _inherits(MonthDropdown, _React$Component); function MonthDropdown() { var _ref; var _temp, _this, _ret; _classCallCheck(this, MonthDropdown); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = MonthDropdown.__proto__ || Object.getPrototypeOf(MonthDropdown)).call.apply(_ref, [this].concat(args))), _this), _this.state = { dropdownVisible: false }, _this.renderSelectOptions = function (monthNames) { return monthNames.map(function (M, i) { return _react2.default.createElement( 'option', { key: i, value: i }, M ); }); }, _this.renderSelectMode = function (monthNames) { return _react2.default.createElement( 'select', { value: _this.props.month, className: 'react-datepicker__month-select', onChange: function onChange(e) { return _this.onChange(e.target.value); } }, _this.renderSelectOptions(monthNames) ); }, _this.renderReadView = function (visible, monthNames) { return _react2.default.createElement( 'div', { key: 'read', style: { visibility: visible ? 'visible' : 'hidden' }, className: 'react-datepicker__month-read-view', onClick: _this.toggleDropdown }, _react2.default.createElement( 'span', { className: 'react-datepicker__month-read-view--selected-month' }, monthNames[_this.props.month] ), _react2.default.createElement('span', { className: 'react-datepicker__month-read-view--down-arrow' }) ); }, _this.renderDropdown = function (monthNames) { return _react2.default.createElement(WrappedMonthDropdownOptions, { key: 'dropdown', ref: 'options', month: _this.props.month, monthNames: monthNames, onChange: _this.onChange, onCancel: _this.toggleDropdown }); }, _this.renderScrollMode = function (monthNames) { var dropdownVisible = _this.state.dropdownVisible; var result = [_this.renderReadView(!dropdownVisible, monthNames)]; if (dropdownVisible) { result.unshift(_this.renderDropdown(monthNames)); } return result; }, _this.onChange = function (month) { _this.toggleDropdown(); if (month !== _this.props.month) { _this.props.onChange(month); } }, _this.toggleDropdown = function () { return _this.setState({ dropdownVisible: !_this.state.dropdownVisible }); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(MonthDropdown, [{ key: 'render', value: function render() { var _this2 = this; var localeData = utils.getLocaleDataForLocale(this.props.locale); var monthNames = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11].map(function (M) { return utils.getMonthInLocale(localeData, utils.newDate({ M: M }), _this2.props.dateFormat); }); var renderedDropdown = void 0; switch (this.props.dropdownMode) { case 'scroll': renderedDropdown = this.renderScrollMode(monthNames); break; case 'select': renderedDropdown = this.renderSelectMode(monthNames); break; } return _react2.default.createElement( 'div', { className: 'react-datepicker__month-dropdown-container react-datepicker__month-dropdown-container--' + this.props.dropdownMode }, renderedDropdown ); } }]); return MonthDropdown; }(_react2.default.Component); MonthDropdown.propTypes = { dropdownMode: _propTypes2.default.oneOf(['scroll', 'select']).isRequired, locale: _propTypes2.default.string, dateFormat: _propTypes2.default.string.isRequired, month: _propTypes2.default.number.isRequired, onChange: _propTypes2.default.func.isRequired }; exports.default = MonthDropdown; /***/ }), /* 15 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var MonthDropdownOptions = function (_React$Component) { _inherits(MonthDropdownOptions, _React$Component); function MonthDropdownOptions() { var _ref; var _temp, _this, _ret; _classCallCheck(this, MonthDropdownOptions); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = MonthDropdownOptions.__proto__ || Object.getPrototypeOf(MonthDropdownOptions)).call.apply(_ref, [this].concat(args))), _this), _this.renderOptions = function () { return _this.props.monthNames.map(function (month, i) { return _react2.default.createElement( 'div', { className: 'react-datepicker__month-option', key: month, ref: month, onClick: _this.onChange.bind(_this, i) }, _this.props.month === i ? _react2.default.createElement( 'span', { className: 'react-datepicker__month-option--selected' }, '\u2713' ) : '', month ); }); }, _this.onChange = function (month) { return _this.props.onChange(month); }, _this.handleClickOutside = function () { return _this.props.onCancel(); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(MonthDropdownOptions, [{ key: 'render', value: function render() { return _react2.default.createElement( 'div', { className: 'react-datepicker__month-dropdown' }, this.renderOptions() ); } }]); return MonthDropdownOptions; }(_react2.default.Component); MonthDropdownOptions.propTypes = { onCancel: _propTypes2.default.func.isRequired, onChange: _propTypes2.default.func.isRequired, month: _propTypes2.default.number.isRequired, monthNames: _propTypes2.default.arrayOf(_propTypes2.default.string.isRequired).isRequired }; exports.default = MonthDropdownOptions; /***/ }), /* 16 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); var _week = __webpack_require__(17); var _week2 = _interopRequireDefault(_week); var _date_utils = __webpack_require__(12); var utils = _interopRequireWildcard(_date_utils); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var FIXED_HEIGHT_STANDARD_WEEK_COUNT = 6; var Month = function (_React$Component) { _inherits(Month, _React$Component); function Month() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Month); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Month.__proto__ || Object.getPrototypeOf(Month)).call.apply(_ref, [this].concat(args))), _this), _this.handleDayClick = function (day, event) { if (_this.props.onDayClick) { _this.props.onDayClick(day, event); } }, _this.handleDayMouseEnter = function (day) { if (_this.props.onDayMouseEnter) { _this.props.onDayMouseEnter(day); } }, _this.handleMouseLeave = function () { if (_this.props.onMouseLeave) { _this.props.onMouseLeave(); } }, _this.isWeekInMonth = function (startOfWeek) { var day = _this.props.day; var endOfWeek = utils.addDays(utils.cloneDate(startOfWeek), 6); return utils.isSameMonth(startOfWeek, day) || utils.isSameMonth(endOfWeek, day); }, _this.renderWeeks = function () { var weeks = []; var isFixedHeight = _this.props.fixedHeight; var currentWeekStart = utils.getStartOfWeek(utils.getStartOfMonth(utils.cloneDate(_this.props.day))); var i = 0; var breakAfterNextPush = false; while (true) { weeks.push(_react2.default.createElement(_week2.default, { key: i, day: currentWeekStart, month: utils.getMonth(_this.props.day), onDayClick: _this.handleDayClick, onDayMouseEnter: _this.handleDayMouseEnter, onWeekSelect: _this.props.onWeekSelect, formatWeekNumber: _this.props.formatWeekNumber, minDate: _this.props.minDate, maxDate: _this.props.maxDate, excludeDates: _this.props.excludeDates, includeDates: _this.props.includeDates, inline: _this.props.inline, highlightDates: _this.props.highlightDates, selectingDate: _this.props.selectingDate, filterDate: _this.props.filterDate, preSelection: _this.props.preSelection, selected: _this.props.selected, selectsStart: _this.props.selectsStart, selectsEnd: _this.props.selectsEnd, showWeekNumber: _this.props.showWeekNumbers, startDate: _this.props.startDate, endDate: _this.props.endDate, dayClassName: _this.props.dayClassName, utcOffset: _this.props.utcOffset })); if (breakAfterNextPush) break; i++; currentWeekStart = utils.addWeeks(utils.cloneDate(currentWeekStart), 1); // If one of these conditions is true, we will either break on this week // or break on the next week var isFixedAndFinalWeek = isFixedHeight && i >= FIXED_HEIGHT_STANDARD_WEEK_COUNT; var isNonFixedAndOutOfMonth = !isFixedHeight && !_this.isWeekInMonth(currentWeekStart); if (isFixedAndFinalWeek || isNonFixedAndOutOfMonth) { if (_this.props.peekNextMonth) { breakAfterNextPush = true; } else { break; } } } return weeks; }, _this.getClassNames = function () { var _this$props = _this.props, selectingDate = _this$props.selectingDate, selectsStart = _this$props.selectsStart, selectsEnd = _this$props.selectsEnd; return (0, _classnames2.default)('react-datepicker__month', { 'react-datepicker__month--selecting-range': selectingDate && (selectsStart || selectsEnd) }); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Month, [{ key: 'render', value: function render() { return _react2.default.createElement( 'div', { className: this.getClassNames(), onMouseLeave: this.handleMouseLeave, role: 'listbox' }, this.renderWeeks() ); } }]); return Month; }(_react2.default.Component); Month.propTypes = { day: _propTypes2.default.object.isRequired, dayClassName: _propTypes2.default.func, endDate: _propTypes2.default.object, excludeDates: _propTypes2.default.array, filterDate: _propTypes2.default.func, fixedHeight: _propTypes2.default.bool, formatWeekNumber: _propTypes2.default.func, highlightDates: _propTypes2.default.array, includeDates: _propTypes2.default.array, inline: _propTypes2.default.bool, maxDate: _propTypes2.default.object, minDate: _propTypes2.default.object, onDayClick: _propTypes2.default.func, onDayMouseEnter: _propTypes2.default.func, onMouseLeave: _propTypes2.default.func, onWeekSelect: _propTypes2.default.func, peekNextMonth: _propTypes2.default.bool, preSelection: _propTypes2.default.object, selected: _propTypes2.default.object, selectingDate: _propTypes2.default.object, selectsEnd: _propTypes2.default.bool, selectsStart: _propTypes2.default.bool, showWeekNumbers: _propTypes2.default.bool, startDate: _propTypes2.default.object, utcOffset: _propTypes2.default.number }; exports.default = Month; /***/ }), /* 17 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _day = __webpack_require__(18); var _day2 = _interopRequireDefault(_day); var _week_number = __webpack_require__(19); var _week_number2 = _interopRequireDefault(_week_number); var _date_utils = __webpack_require__(12); var utils = _interopRequireWildcard(_date_utils); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var Week = function (_React$Component) { _inherits(Week, _React$Component); function Week() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Week); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Week.__proto__ || Object.getPrototypeOf(Week)).call.apply(_ref, [this].concat(args))), _this), _this.handleDayClick = function (day, event) { if (_this.props.onDayClick) { _this.props.onDayClick(day, event); } }, _this.handleDayMouseEnter = function (day) { if (_this.props.onDayMouseEnter) { _this.props.onDayMouseEnter(day); } }, _this.handleWeekClick = function (day, weekNumber, event) { if (typeof _this.props.onWeekSelect === 'function') { _this.props.onWeekSelect(day, weekNumber, event); } }, _this.formatWeekNumber = function (startOfWeek) { if (_this.props.formatWeekNumber) { return _this.props.formatWeekNumber(startOfWeek); } return utils.getWeek(startOfWeek); }, _this.renderDays = function () { var startOfWeek = utils.getStartOfWeek(utils.cloneDate(_this.props.day)); var days = []; var weekNumber = _this.formatWeekNumber(startOfWeek); if (_this.props.showWeekNumber) { var onClickAction = _this.props.onWeekSelect ? _this.handleWeekClick.bind(_this, startOfWeek, weekNumber) : undefined; days.push(_react2.default.createElement(_week_number2.default, { key: 'W', weekNumber: weekNumber, onClick: onClickAction })); } return days.concat([0, 1, 2, 3, 4, 5, 6].map(function (offset) { var day = utils.addDays(utils.cloneDate(startOfWeek), offset); return _react2.default.createElement(_day2.default, { key: offset, day: day, month: _this.props.month, onClick: _this.handleDayClick.bind(_this, day), onMouseEnter: _this.handleDayMouseEnter.bind(_this, day), minDate: _this.props.minDate, maxDate: _this.props.maxDate, excludeDates: _this.props.excludeDates, includeDates: _this.props.includeDates, inline: _this.props.inline, highlightDates: _this.props.highlightDates, selectingDate: _this.props.selectingDate, filterDate: _this.props.filterDate, preSelection: _this.props.preSelection, selected: _this.props.selected, selectsStart: _this.props.selectsStart, selectsEnd: _this.props.selectsEnd, startDate: _this.props.startDate, endDate: _this.props.endDate, dayClassName: _this.props.dayClassName, utcOffset: _this.props.utcOffset }); })); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Week, [{ key: 'render', value: function render() { return _react2.default.createElement( 'div', { className: 'react-datepicker__week' }, this.renderDays() ); } }]); return Week; }(_react2.default.Component); Week.propTypes = { day: _propTypes2.default.object.isRequired, dayClassName: _propTypes2.default.func, endDate: _propTypes2.default.object, excludeDates: _propTypes2.default.array, filterDate: _propTypes2.default.func, formatWeekNumber: _propTypes2.default.func, highlightDates: _propTypes2.default.array, includeDates: _propTypes2.default.array, inline: _propTypes2.default.bool, maxDate: _propTypes2.default.object, minDate: _propTypes2.default.object, month: _propTypes2.default.number, onDayClick: _propTypes2.default.func, onDayMouseEnter: _propTypes2.default.func, onWeekSelect: _propTypes2.default.func, preSelection: _propTypes2.default.object, selected: _propTypes2.default.object, selectingDate: _propTypes2.default.object, selectsEnd: _propTypes2.default.bool, selectsStart: _propTypes2.default.bool, showWeekNumber: _propTypes2.default.bool, startDate: _propTypes2.default.object, utcOffset: _propTypes2.default.number }; exports.default = Week; /***/ }), /* 18 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); var _date_utils = __webpack_require__(12); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var Day = function (_React$Component) { _inherits(Day, _React$Component); function Day() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Day); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Day.__proto__ || Object.getPrototypeOf(Day)).call.apply(_ref, [this].concat(args))), _this), _this.handleClick = function (event) { if (!_this.isDisabled() && _this.props.onClick) { _this.props.onClick(event); } }, _this.handleMouseEnter = function (event) { if (!_this.isDisabled() && _this.props.onMouseEnter) { _this.props.onMouseEnter(event); } }, _this.isSameDay = function (other) { return (0, _date_utils.isSameDay)(_this.props.day, other); }, _this.isKeyboardSelected = function () { return !_this.props.inline && !_this.isSameDay(_this.props.selected) && _this.isSameDay(_this.props.preSelection); }, _this.isDisabled = function () { return (0, _date_utils.isDayDisabled)(_this.props.day, _this.props); }, _this.getHighLightedClass = function (defaultClassName) { var _this$props = _this.props, day = _this$props.day, highlightDates = _this$props.highlightDates; if (!highlightDates) { return _defineProperty({}, defaultClassName, false); } var classNames = {}; for (var i = 0, len = highlightDates.length; i < len; i++) { var obj = highlightDates[i]; if ((0, _date_utils.isMoment)(obj)) { if ((0, _date_utils.isSameDay)(day, obj)) { classNames[defaultClassName] = true; } } else if ((typeof obj === 'undefined' ? 'undefined' : _typeof(obj)) === 'object') { var keys = Object.keys(obj); var arr = obj[keys[0]]; if (typeof keys[0] === 'string' && arr.constructor === Array) { for (var k = 0, _len2 = arr.length; k < _len2; k++) { if ((0, _date_utils.isSameDay)(day, arr[k])) { classNames[keys[0]] = true; } } } } } return classNames; }, _this.isInRange = function () { var _this$props2 = _this.props, day = _this$props2.day, startDate = _this$props2.startDate, endDate = _this$props2.endDate; if (!startDate || !endDate) { return false; } return (0, _date_utils.isDayInRange)(day, startDate, endDate); }, _this.isInSelectingRange = function () { var _this$props3 = _this.props, day = _this$props3.day, selectsStart = _this$props3.selectsStart, selectsEnd = _this$props3.selectsEnd, selectingDate = _this$props3.selectingDate, startDate = _this$props3.startDate, endDate = _this$props3.endDate; if (!(selectsStart || selectsEnd) || !selectingDate || _this.isDisabled()) { return false; } if (selectsStart && endDate && selectingDate.isSameOrBefore(endDate)) { return (0, _date_utils.isDayInRange)(day, selectingDate, endDate); } if (selectsEnd && startDate && selectingDate.isSameOrAfter(startDate)) { return (0, _date_utils.isDayInRange)(day, startDate, selectingDate); } return false; }, _this.isSelectingRangeStart = function () { if (!_this.isInSelectingRange()) { return false; } var _this$props4 = _this.props, day = _this$props4.day, selectingDate = _this$props4.selectingDate, startDate = _this$props4.startDate, selectsStart = _this$props4.selectsStart; if (selectsStart) { return (0, _date_utils.isSameDay)(day, selectingDate); } else { return (0, _date_utils.isSameDay)(day, startDate); } }, _this.isSelectingRangeEnd = function () { if (!_this.isInSelectingRange()) { return false; } var _this$props5 = _this.props, day = _this$props5.day, selectingDate = _this$props5.selectingDate, endDate = _this$props5.endDate, selectsEnd = _this$props5.selectsEnd; if (selectsEnd) { return (0, _date_utils.isSameDay)(day, selectingDate); } else { return (0, _date_utils.isSameDay)(day, endDate); } }, _this.isRangeStart = function () { var _this$props6 = _this.props, day = _this$props6.day, startDate = _this$props6.startDate, endDate = _this$props6.endDate; if (!startDate || !endDate) { return false; } return (0, _date_utils.isSameDay)(startDate, day); }, _this.isRangeEnd = function () { var _this$props7 = _this.props, day = _this$props7.day, startDate = _this$props7.startDate, endDate = _this$props7.endDate; if (!startDate || !endDate) { return false; } return (0, _date_utils.isSameDay)(endDate, day); }, _this.isWeekend = function () { var weekday = (0, _date_utils.getDay)(_this.props.day); return weekday === 0 || weekday === 6; }, _this.isOutsideMonth = function () { return _this.props.month !== undefined && _this.props.month !== (0, _date_utils.getMonth)(_this.props.day); }, _this.getClassNames = function (date) { var dayClassName = _this.props.dayClassName ? _this.props.dayClassName(date) : undefined; return (0, _classnames2.default)('react-datepicker__day', dayClassName, 'react-datepicker__day--' + (0, _date_utils.getDayOfWeekCode)(_this.props.day), { 'react-datepicker__day--disabled': _this.isDisabled(), 'react-datepicker__day--selected': _this.isSameDay(_this.props.selected), 'react-datepicker__day--keyboard-selected': _this.isKeyboardSelected(), 'react-datepicker__day--range-start': _this.isRangeStart(), 'react-datepicker__day--range-end': _this.isRangeEnd(), 'react-datepicker__day--in-range': _this.isInRange(), 'react-datepicker__day--in-selecting-range': _this.isInSelectingRange(), 'react-datepicker__day--selecting-range-start': _this.isSelectingRangeStart(), 'react-datepicker__day--selecting-range-end': _this.isSelectingRangeEnd(), 'react-datepicker__day--today': _this.isSameDay((0, _date_utils.now)(_this.props.utcOffset)), 'react-datepicker__day--weekend': _this.isWeekend(), 'react-datepicker__day--outside-month': _this.isOutsideMonth() }, _this.getHighLightedClass('react-datepicker__day--highlighted')); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Day, [{ key: 'render', value: function render() { return _react2.default.createElement( 'div', { className: this.getClassNames(this.props.day), onClick: this.handleClick, onMouseEnter: this.handleMouseEnter, 'aria-label': 'day-' + (0, _date_utils.getDate)(this.props.day), role: 'option' }, (0, _date_utils.getDate)(this.props.day) ); } }]); return Day; }(_react2.default.Component); Day.propTypes = { day: _propTypes2.default.object.isRequired, dayClassName: _propTypes2.default.func, endDate: _propTypes2.default.object, highlightDates: _propTypes2.default.array, inline: _propTypes2.default.bool, month: _propTypes2.default.number, onClick: _propTypes2.default.func, onMouseEnter: _propTypes2.default.func, preSelection: _propTypes2.default.object, selected: _propTypes2.default.object, selectingDate: _propTypes2.default.object, selectsEnd: _propTypes2.default.bool, selectsStart: _propTypes2.default.bool, startDate: _propTypes2.default.object, utcOffset: _propTypes2.default.number }; exports.default = Day; /***/ }), /* 19 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var WeekNumber = function (_React$Component) { _inherits(WeekNumber, _React$Component); function WeekNumber() { var _ref; var _temp, _this, _ret; _classCallCheck(this, WeekNumber); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = WeekNumber.__proto__ || Object.getPrototypeOf(WeekNumber)).call.apply(_ref, [this].concat(args))), _this), _this.handleClick = function (event) { if (_this.props.onClick) { _this.props.onClick(event); } }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(WeekNumber, [{ key: 'render', value: function render() { var weekNumberClasses = { 'react-datepicker__week-number': true, 'react-datepicker__week-number--clickable': !!this.props.onClick }; return _react2.default.createElement( 'div', { className: (0, _classnames2.default)(weekNumberClasses), 'aria-label': 'week-' + this.props.weekNumber, onClick: this.handleClick }, this.props.weekNumber ); } }]); return WeekNumber; }(_react2.default.Component); WeekNumber.propTypes = { weekNumber: _propTypes2.default.number.isRequired, onClick: _propTypes2.default.func }; exports.default = WeekNumber; /***/ }), /* 20 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _date_utils = __webpack_require__(12); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var Time = function (_React$Component) { _inherits(Time, _React$Component); function Time() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Time); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Time.__proto__ || Object.getPrototypeOf(Time)).call.apply(_ref, [this].concat(args))), _this), _this.handleClick = function (time) { if ((_this.props.minTime || _this.props.maxTime) && (0, _date_utils.isTimeInDisabledRange)(time, _this.props) || _this.props.excludeTimes && (0, _date_utils.isTimeDisabled)(time, _this.props.excludeTimes)) { return; } _this.props.onChange(time); }, _this.liClasses = function (time, currH, currM) { var classes = ['react-datepicker__time-list-item']; if (currH === (0, _date_utils.getHour)(time) && currM === (0, _date_utils.getMinute)(time)) { classes.push('react-datepicker__time-list-item--selected'); } if ((_this.props.minTime || _this.props.maxTime) && (0, _date_utils.isTimeInDisabledRange)(time, _this.props) || _this.props.excludeTimes && (0, _date_utils.isTimeDisabled)(time, _this.props.excludeTimes)) { classes.push('react-datepicker__time-list-item--disabled'); } return classes.join(' '); }, _this.renderTimes = function () { var times = []; var format = _this.props.format ? _this.props.format : 'hh:mm A'; var intervals = _this.props.intervals; var activeTime = _this.props.selected ? _this.props.selected : (0, _date_utils.newDate)(); var currH = (0, _date_utils.getHour)(activeTime); var currM = (0, _date_utils.getMinute)(activeTime); var base = (0, _date_utils.getStartOfDay)((0, _date_utils.newDate)()); var multiplier = 1440 / intervals; for (var i = 0; i < multiplier; i++) { times.push((0, _date_utils.addMinutes)((0, _date_utils.cloneDate)(base), i * intervals)); } return times.map(function (time, i) { return _react2.default.createElement( 'li', { key: i, onClick: _this.handleClick.bind(_this, time), className: _this.liClasses(time, currH, currM) }, (0, _date_utils.formatDate)(time, format) ); }); }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Time, [{ key: 'componentDidMount', value: function componentDidMount() { // code to ensure selected time will always be in focus within time window when it first appears var multiplier = 60 / this.props.intervals; var currH = this.props.selected ? (0, _date_utils.getHour)(this.props.selected) : (0, _date_utils.getHour)((0, _date_utils.newDate)()); this.list.scrollTop = 30 * (multiplier * currH); } }, { key: 'render', value: function render() { var _this2 = this; var height = null; if (this.props.monthRef) { height = this.props.monthRef.clientHeight - 39; } return _react2.default.createElement( 'div', { className: 'react-datepicker__time-container ' + (this.props.todayButton ? 'react-datepicker__time-container--with-today-button' : '') }, _react2.default.createElement( 'div', { className: 'react-datepicker__header react-datepicker__header--time' }, _react2.default.createElement( 'div', { className: 'react-datepicker-time__header' }, 'Time' ) ), _react2.default.createElement( 'div', { className: 'react-datepicker__time' }, _react2.default.createElement( 'div', { className: 'react-datepicker__time-box' }, _react2.default.createElement( 'ul', { className: 'react-datepicker__time-list', ref: function ref(list) { _this2.list = list; }, style: height ? { height: height } : {} }, this.renderTimes.bind(this)() ) ) ) ); } }], [{ key: 'defaultProps', get: function get() { return { intervals: 30, onTimeChange: function onTimeChange() {}, todayButton: null }; } }]); return Time; }(_react2.default.Component); Time.propTypes = { format: _propTypes2.default.string, intervals: _propTypes2.default.number, selected: _propTypes2.default.object, onChange: _propTypes2.default.func, todayButton: _propTypes2.default.string, minTime: _propTypes2.default.object, maxTime: _propTypes2.default.object, excludeTimes: _propTypes2.default.array, monthRef: _propTypes2.default.object }; exports.default = Time; /***/ }), /* 21 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.popperPlacementPositions = undefined; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _classnames = __webpack_require__(10); var _classnames2 = _interopRequireDefault(_classnames); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _reactPopper = __webpack_require__(22); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var popperPlacementPositions = exports.popperPlacementPositions = ['auto', 'auto-left', 'auto-right', 'bottom', 'bottom-end', 'bottom-start', 'left', 'left-end', 'left-start', 'right', 'right-end', 'right-start', 'top', 'top-end', 'top-start']; var PopperComponent = function (_React$Component) { _inherits(PopperComponent, _React$Component); function PopperComponent() { _classCallCheck(this, PopperComponent); return _possibleConstructorReturn(this, (PopperComponent.__proto__ || Object.getPrototypeOf(PopperComponent)).apply(this, arguments)); } _createClass(PopperComponent, [{ key: 'render', value: function render() { var _props = this.props, className = _props.className, hidePopper = _props.hidePopper, popperComponent = _props.popperComponent, popperModifiers = _props.popperModifiers, popperPlacement = _props.popperPlacement, targetComponent = _props.targetComponent; var popper = void 0; if (!hidePopper) { var classes = (0, _classnames2.default)('react-datepicker-popper', className); popper = _react2.default.createElement( _reactPopper.Popper, { className: classes, modifiers: popperModifiers, placement: popperPlacement }, popperComponent ); } if (this.props.popperContainer) { popper = _react2.default.createElement(this.props.popperContainer, {}, popper); } return _react2.default.createElement( _reactPopper.Manager, null, _react2.default.createElement( _reactPopper.Target, { className: 'react-datepicker-wrapper' }, targetComponent ), popper ); } }], [{ key: 'defaultProps', get: function get() { return { hidePopper: true, popperModifiers: { preventOverflow: { enabled: true, escapeWithReference: true, boundariesElement: 'viewport' } }, popperPlacement: 'bottom-start' }; } }]); return PopperComponent; }(_react2.default.Component); PopperComponent.propTypes = { className: _propTypes2.default.string, hidePopper: _propTypes2.default.bool, popperComponent: _propTypes2.default.element, popperModifiers: _propTypes2.default.object, // <datepicker/> props popperPlacement: _propTypes2.default.oneOf(popperPlacementPositions), // <datepicker/> props popperContainer: _propTypes2.default.func, targetComponent: _propTypes2.default.element }; exports.default = PopperComponent; /***/ }), /* 22 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.Arrow = exports.Popper = exports.Target = exports.Manager = undefined; var _Manager2 = __webpack_require__(23); var _Manager3 = _interopRequireDefault(_Manager2); var _Target2 = __webpack_require__(24); var _Target3 = _interopRequireDefault(_Target2); var _Popper2 = __webpack_require__(25); var _Popper3 = _interopRequireDefault(_Popper2); var _Arrow2 = __webpack_require__(27); var _Arrow3 = _interopRequireDefault(_Arrow2); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.Manager = _Manager3.default; exports.Target = _Target3.default; exports.Popper = _Popper3.default; exports.Arrow = _Arrow3.default; /***/ }), /* 23 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var Manager = function (_Component) { _inherits(Manager, _Component); function Manager() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Manager); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Manager.__proto__ || Object.getPrototypeOf(Manager)).call.apply(_ref, [this].concat(args))), _this), _this._setTargetNode = function (node) { _this._targetNode = node; }, _this._getTargetNode = function () { return _this._targetNode; }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Manager, [{ key: 'getChildContext', value: function getChildContext() { return { popperManager: { setTargetNode: this._setTargetNode, getTargetNode: this._getTargetNode } }; } }, { key: 'render', value: function render() { var _props = this.props, tag = _props.tag, children = _props.children, restProps = _objectWithoutProperties(_props, ['tag', 'children']); if (tag !== false) { return (0, _react.createElement)(tag, restProps, children); } else { return children; } } }]); return Manager; }(_react.Component); Manager.childContextTypes = { popperManager: _propTypes2.default.object.isRequired }; Manager.propTypes = { tag: _propTypes2.default.oneOfType([_propTypes2.default.string, _propTypes2.default.bool]) }; Manager.defaultProps = { tag: 'div' }; exports.default = Manager; /***/ }), /* 24 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } var Target = function Target(props, context) { var _props$component = props.component, component = _props$component === undefined ? 'div' : _props$component, innerRef = props.innerRef, children = props.children, restProps = _objectWithoutProperties(props, ['component', 'innerRef', 'children']); var popperManager = context.popperManager; var targetRef = function targetRef(node) { popperManager.setTargetNode(node); if (typeof innerRef === 'function') { innerRef(node); } }; if (typeof children === 'function') { var targetProps = { ref: targetRef }; return children({ targetProps: targetProps, restProps: restProps }); } var componentProps = _extends({}, restProps); if (typeof component === 'string') { componentProps.ref = targetRef; } else { componentProps.innerRef = targetRef; } return (0, _react.createElement)(component, componentProps, children); }; Target.contextTypes = { popperManager: _propTypes2.default.object.isRequired }; Target.propTypes = { component: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]), innerRef: _propTypes2.default.func, children: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]) }; exports.default = Target; /***/ }), /* 25 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); var _popper = __webpack_require__(26); var _popper2 = _interopRequireDefault(_popper); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var noop = function noop() { return null; }; var Popper = function (_Component) { _inherits(Popper, _Component); function Popper() { var _ref; var _temp, _this, _ret; _classCallCheck(this, Popper); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } return _ret = (_temp = (_this = _possibleConstructorReturn(this, (_ref = Popper.__proto__ || Object.getPrototypeOf(Popper)).call.apply(_ref, [this].concat(args))), _this), _this.state = {}, _this._setArrowNode = function (node) { _this._arrowNode = node; }, _this._getTargetNode = function () { return _this.context.popperManager.getTargetNode(); }, _this._getOffsets = function (data) { return Object.keys(data.offsets).map(function (key) { return data.offsets[key]; }); }, _this._isDataDirty = function (data) { if (_this.state.data) { return JSON.stringify(_this._getOffsets(_this.state.data)) !== JSON.stringify(_this._getOffsets(data)); } else { return true; } }, _this._updateStateModifier = { enabled: true, order: 900, fn: function fn(data) { if (_this._isDataDirty(data)) { _this.setState({ data: data }); } return data; } }, _this._getPopperStyle = function () { var data = _this.state.data; // If Popper isn't instantiated, hide the popperElement // to avoid flash of unstyled content if (!_this._popper || !data) { return { position: 'absolute', pointerEvents: 'none', opacity: 0 }; } var _data$offsets$popper = data.offsets.popper, top = _data$offsets$popper.top, left = _data$offsets$popper.left, position = _data$offsets$popper.position; return _extends({ position: position }, data.styles); }, _this._getPopperPlacement = function () { return !!_this.state.data ? _this.state.data.placement : undefined; }, _this._getPopperHide = function () { return !!_this.state.data && _this.state.data.hide ? '' : undefined; }, _this._getArrowStyle = function () { if (!_this.state.data || !_this.state.data.offsets.arrow) { return {}; } else { var _this$state$data$offs = _this.state.data.offsets.arrow, top = _this$state$data$offs.top, left = _this$state$data$offs.left; return { top: top, left: left }; } }, _temp), _possibleConstructorReturn(_this, _ret); } _createClass(Popper, [{ key: 'getChildContext', value: function getChildContext() { return { popper: { setArrowNode: this._setArrowNode, getArrowStyle: this._getArrowStyle } }; } }, { key: 'componentDidMount', value: function componentDidMount() { this._updatePopper(); } }, { key: 'componentDidUpdate', value: function componentDidUpdate(lastProps) { if (lastProps.placement !== this.props.placement || lastProps.eventsEnabled !== this.props.eventsEnabled) { this._updatePopper(); } if (this._popper && lastProps.children !== this.props.children) { this._popper.scheduleUpdate(); } } }, { key: 'componentWillUnmount', value: function componentWillUnmount() { this._destroyPopper(); } }, { key: '_updatePopper', value: function _updatePopper() { this._destroyPopper(); if (this._node) { this._createPopper(); } } }, { key: '_createPopper', value: function _createPopper() { var _props = this.props, placement = _props.placement, eventsEnabled = _props.eventsEnabled; var modifiers = _extends({}, this.props.modifiers, { applyStyle: { enabled: false }, updateState: this._updateStateModifier }); if (this._arrowNode) { modifiers.arrow = { element: this._arrowNode }; } this._popper = new _popper2.default(this._getTargetNode(), this._node, { placement: placement, eventsEnabled: eventsEnabled, modifiers: modifiers }); // schedule an update to make sure everything gets positioned correct // after being instantiated this._popper.scheduleUpdate(); } }, { key: '_destroyPopper', value: function _destroyPopper() { if (this._popper) { this._popper.destroy(); } } }, { key: 'render', value: function render() { var _this2 = this; var _props2 = this.props, component = _props2.component, innerRef = _props2.innerRef, placement = _props2.placement, eventsEnabled = _props2.eventsEnabled, modifiers = _props2.modifiers, children = _props2.children, restProps = _objectWithoutProperties(_props2, ['component', 'innerRef', 'placement', 'eventsEnabled', 'modifiers', 'children']); var popperRef = function popperRef(node) { _this2._node = node; if (typeof innerRef === 'function') { innerRef(node); } }; var popperStyle = this._getPopperStyle(); var popperPlacement = this._getPopperPlacement(); var popperHide = this._getPopperHide(); if (typeof children === 'function') { var _popperProps; var popperProps = (_popperProps = { ref: popperRef, style: popperStyle }, _defineProperty(_popperProps, 'data-placement', popperPlacement), _defineProperty(_popperProps, 'data-x-out-of-boundaries', popperHide), _popperProps); return children({ popperProps: popperProps, restProps: restProps, scheduleUpdate: this._popper && this._popper.scheduleUpdate }); } var componentProps = _extends({}, restProps, { style: _extends({}, restProps.style, popperStyle), 'data-placement': popperPlacement, 'data-x-out-of-boundaries': popperHide }); if (typeof component === 'string') { componentProps.ref = popperRef; } else { componentProps.innerRef = popperRef; } return (0, _react.createElement)(component, componentProps, children); } }]); return Popper; }(_react.Component); Popper.contextTypes = { popperManager: _propTypes2.default.object.isRequired }; Popper.childContextTypes = { popper: _propTypes2.default.object.isRequired }; Popper.propTypes = { component: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]), innerRef: _propTypes2.default.func, placement: _propTypes2.default.oneOf(_popper2.default.placements), eventsEnabled: _propTypes2.default.bool, modifiers: _propTypes2.default.object, children: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]) }; Popper.defaultProps = { component: 'div', placement: 'bottom', eventsEnabled: true, modifiers: {} }; exports.default = Popper; /***/ }), /* 26 */ /***/ (function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(global) {/**! * @fileOverview Kickass library to create and place poppers near their reference elements. * @version 1.12.6 * @license * Copyright (c) 2016 Federico Zivolo and contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ (function (global, factory) { true ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : (global.Popper = factory()); }(this, (function () { 'use strict'; var isBrowser = typeof window !== 'undefined' && typeof window.document !== 'undefined'; var longerTimeoutBrowsers = ['Edge', 'Trident', 'Firefox']; var timeoutDuration = 0; for (var i = 0; i < longerTimeoutBrowsers.length; i += 1) { if (isBrowser && navigator.userAgent.indexOf(longerTimeoutBrowsers[i]) >= 0) { timeoutDuration = 1; break; } } function microtaskDebounce(fn) { var called = false; return function () { if (called) { return; } called = true; Promise.resolve().then(function () { called = false; fn(); }); }; } function taskDebounce(fn) { var scheduled = false; return function () { if (!scheduled) { scheduled = true; setTimeout(function () { scheduled = false; fn(); }, timeoutDuration); } }; } var supportsMicroTasks = isBrowser && window.Promise; /** * Create a debounced version of a method, that's asynchronously deferred * but called in the minimum time possible. * * @method * @memberof Popper.Utils * @argument {Function} fn * @returns {Function} */ var debounce = supportsMicroTasks ? microtaskDebounce : taskDebounce; /** * Check if the given variable is a function * @method * @memberof Popper.Utils * @argument {Any} functionToCheck - variable to check * @returns {Boolean} answer to: is a function? */ function isFunction(functionToCheck) { var getType = {}; return functionToCheck && getType.toString.call(functionToCheck) === '[object Function]'; } /** * Get CSS computed property of the given element * @method * @memberof Popper.Utils * @argument {Eement} element * @argument {String} property */ function getStyleComputedProperty(element, property) { if (element.nodeType !== 1) { return []; } // NOTE: 1 DOM access here var css = window.getComputedStyle(element, null); return property ? css[property] : css; } /** * Returns the parentNode or the host of the element * @method * @memberof Popper.Utils * @argument {Element} element * @returns {Element} parent */ function getParentNode(element) { if (element.nodeName === 'HTML') { return element; } return element.parentNode || element.host; } /** * Returns the scrolling parent of the given element * @method * @memberof Popper.Utils * @argument {Element} element * @returns {Element} scroll parent */ function getScrollParent(element) { // Return body, `getScroll` will take care to get the correct `scrollTop` from it if (!element) { return window.document.body; } switch (element.nodeName) { case 'HTML': case 'BODY': return element.ownerDocument.body; case '#document': return element.body; } // Firefox want us to check `-x` and `-y` variations as well var _getStyleComputedProp = getStyleComputedProperty(element), overflow = _getStyleComputedProp.overflow, overflowX = _getStyleComputedProp.overflowX, overflowY = _getStyleComputedProp.overflowY; if (/(auto|scroll)/.test(overflow + overflowY + overflowX)) { return element; } return getScrollParent(getParentNode(element)); } /** * Returns the offset parent of the given element * @method * @memberof Popper.Utils * @argument {Element} element * @returns {Element} offset parent */ function getOffsetParent(element) { // NOTE: 1 DOM access here var offsetParent = element && element.offsetParent; var nodeName = offsetParent && offsetParent.nodeName; if (!nodeName || nodeName === 'BODY' || nodeName === 'HTML') { if (element) { return element.ownerDocument.documentElement; } return window.document.documentElement; } // .offsetParent will return the closest TD or TABLE in case // no offsetParent is present, I hate this job... if (['TD', 'TABLE'].indexOf(offsetParent.nodeName) !== -1 && getStyleComputedProperty(offsetParent, 'position') === 'static') { return getOffsetParent(offsetParent); } return offsetParent; } function isOffsetContainer(element) { var nodeName = element.nodeName; if (nodeName === 'BODY') { return false; } return nodeName === 'HTML' || getOffsetParent(element.firstElementChild) === element; } /** * Finds the root node (document, shadowDOM root) of the given element * @method * @memberof Popper.Utils * @argument {Element} node * @returns {Element} root node */ function getRoot(node) { if (node.parentNode !== null) { return getRoot(node.parentNode); } return node; } /** * Finds the offset parent common to the two provided nodes * @method * @memberof Popper.Utils * @argument {Element} element1 * @argument {Element} element2 * @returns {Element} common offset parent */ function findCommonOffsetParent(element1, element2) { // This check is needed to avoid errors in case one of the elements isn't defined for any reason if (!element1 || !element1.nodeType || !element2 || !element2.nodeType) { return window.document.documentElement; } // Here we make sure to give as "start" the element that comes first in the DOM var order = element1.compareDocumentPosition(element2) & Node.DOCUMENT_POSITION_FOLLOWING; var start = order ? element1 : element2; var end = order ? element2 : element1; // Get common ancestor container var range = document.createRange(); range.setStart(start, 0); range.setEnd(end, 0); var commonAncestorContainer = range.commonAncestorContainer; // Both nodes are inside #document if (element1 !== commonAncestorContainer && element2 !== commonAncestorContainer || start.contains(end)) { if (isOffsetContainer(commonAncestorContainer)) { return commonAncestorContainer; } return getOffsetParent(commonAncestorContainer); } // one of the nodes is inside shadowDOM, find which one var element1root = getRoot(element1); if (element1root.host) { return findCommonOffsetParent(element1root.host, element2); } else { return findCommonOffsetParent(element1, getRoot(element2).host); } } /** * Gets the scroll value of the given element in the given side (top and left) * @method * @memberof Popper.Utils * @argument {Element} element * @argument {String} side `top` or `left` * @returns {number} amount of scrolled pixels */ function getScroll(element) { var side = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'top'; var upperSide = side === 'top' ? 'scrollTop' : 'scrollLeft'; var nodeName = element.nodeName; if (nodeName === 'BODY' || nodeName === 'HTML') { var html = element.ownerDocument.documentElement; var scrollingElement = element.ownerDocument.scrollingElement || html; return scrollingElement[upperSide]; } return element[upperSide]; } /* * Sum or subtract the element scroll values (left and top) from a given rect object * @method * @memberof Popper.Utils * @param {Object} rect - Rect object you want to change * @param {HTMLElement} element - The element from the function reads the scroll values * @param {Boolean} subtract - set to true if you want to subtract the scroll values * @return {Object} rect - The modifier rect object */ function includeScroll(rect, element) { var subtract = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; var scrollTop = getScroll(element, 'top'); var scrollLeft = getScroll(element, 'left'); var modifier = subtract ? -1 : 1; rect.top += scrollTop * modifier; rect.bottom += scrollTop * modifier; rect.left += scrollLeft * modifier; rect.right += scrollLeft * modifier; return rect; } /* * Helper to detect borders of a given element * @method * @memberof Popper.Utils * @param {CSSStyleDeclaration} styles * Result of `getStyleComputedProperty` on the given element * @param {String} axis - `x` or `y` * @return {number} borders - The borders size of the given axis */ function getBordersSize(styles, axis) { var sideA = axis === 'x' ? 'Left' : 'Top'; var sideB = sideA === 'Left' ? 'Right' : 'Bottom'; return +styles['border' + sideA + 'Width'].split('px')[0] + +styles['border' + sideB + 'Width'].split('px')[0]; } /** * Tells if you are running Internet Explorer 10 * @method * @memberof Popper.Utils * @returns {Boolean} isIE10 */ var isIE10 = undefined; var isIE10$1 = function () { if (isIE10 === undefined) { isIE10 = navigator.appVersion.indexOf('MSIE 10') !== -1; } return isIE10; }; function getSize(axis, body, html, computedStyle) { return Math.max(body['offset' + axis], body['scroll' + axis], html['client' + axis], html['offset' + axis], html['scroll' + axis], isIE10$1() ? html['offset' + axis] + computedStyle['margin' + (axis === 'Height' ? 'Top' : 'Left')] + computedStyle['margin' + (axis === 'Height' ? 'Bottom' : 'Right')] : 0); } function getWindowSizes() { var body = window.document.body; var html = window.document.documentElement; var computedStyle = isIE10$1() && window.getComputedStyle(html); return { height: getSize('Height', body, html, computedStyle), width: getSize('Width', body, html, computedStyle) }; } var classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }; var createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var defineProperty = function (obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; /** * Given element offsets, generate an output similar to getBoundingClientRect * @method * @memberof Popper.Utils * @argument {Object} offsets * @returns {Object} ClientRect like output */ function getClientRect(offsets) { return _extends({}, offsets, { right: offsets.left + offsets.width, bottom: offsets.top + offsets.height }); } /** * Get bounding client rect of given element * @method * @memberof Popper.Utils * @param {HTMLElement} element * @return {Object} client rect */ function getBoundingClientRect(element) { var rect = {}; // IE10 10 FIX: Please, don't ask, the element isn't // considered in DOM in some circumstances... // This isn't reproducible in IE10 compatibility mode of IE11 if (isIE10$1()) { try { rect = element.getBoundingClientRect(); var scrollTop = getScroll(element, 'top'); var scrollLeft = getScroll(element, 'left'); rect.top += scrollTop; rect.left += scrollLeft; rect.bottom += scrollTop; rect.right += scrollLeft; } catch (err) {} } else { rect = element.getBoundingClientRect(); } var result = { left: rect.left, top: rect.top, width: rect.right - rect.left, height: rect.bottom - rect.top }; // subtract scrollbar size from sizes var sizes = element.nodeName === 'HTML' ? getWindowSizes() : {}; var width = sizes.width || element.clientWidth || result.right - result.left; var height = sizes.height || element.clientHeight || result.bottom - result.top; var horizScrollbar = element.offsetWidth - width; var vertScrollbar = element.offsetHeight - height; // if an hypothetical scrollbar is detected, we must be sure it's not a `border` // we make this check conditional for performance reasons if (horizScrollbar || vertScrollbar) { var styles = getStyleComputedProperty(element); horizScrollbar -= getBordersSize(styles, 'x'); vertScrollbar -= getBordersSize(styles, 'y'); result.width -= horizScrollbar; result.height -= vertScrollbar; } return getClientRect(result); } function getOffsetRectRelativeToArbitraryNode(children, parent) { var isIE10 = isIE10$1(); var isHTML = parent.nodeName === 'HTML'; var childrenRect = getBoundingClientRect(children); var parentRect = getBoundingClientRect(parent); var scrollParent = getScrollParent(children); var styles = getStyleComputedProperty(parent); var borderTopWidth = +styles.borderTopWidth.split('px')[0]; var borderLeftWidth = +styles.borderLeftWidth.split('px')[0]; var offsets = getClientRect({ top: childrenRect.top - parentRect.top - borderTopWidth, left: childrenRect.left - parentRect.left - borderLeftWidth, width: childrenRect.width, height: childrenRect.height }); offsets.marginTop = 0; offsets.marginLeft = 0; // Subtract margins of documentElement in case it's being used as parent // we do this only on HTML because it's the only element that behaves // differently when margins are applied to it. The margins are included in // the box of the documentElement, in the other cases not. if (!isIE10 && isHTML) { var marginTop = +styles.marginTop.split('px')[0]; var marginLeft = +styles.marginLeft.split('px')[0]; offsets.top -= borderTopWidth - marginTop; offsets.bottom -= borderTopWidth - marginTop; offsets.left -= borderLeftWidth - marginLeft; offsets.right -= borderLeftWidth - marginLeft; // Attach marginTop and marginLeft because in some circumstances we may need them offsets.marginTop = marginTop; offsets.marginLeft = marginLeft; } if (isIE10 ? parent.contains(scrollParent) : parent === scrollParent && scrollParent.nodeName !== 'BODY') { offsets = includeScroll(offsets, parent); } return offsets; } function getViewportOffsetRectRelativeToArtbitraryNode(element) { var html = element.ownerDocument.documentElement; var relativeOffset = getOffsetRectRelativeToArbitraryNode(element, html); var width = Math.max(html.clientWidth, window.innerWidth || 0); var height = Math.max(html.clientHeight, window.innerHeight || 0); var scrollTop = getScroll(html); var scrollLeft = getScroll(html, 'left'); var offset = { top: scrollTop - relativeOffset.top + relativeOffset.marginTop, left: scrollLeft - relativeOffset.left + relativeOffset.marginLeft, width: width, height: height }; return getClientRect(offset); } /** * Check if the given element is fixed or is inside a fixed parent * @method * @memberof Popper.Utils * @argument {Element} element * @argument {Element} customContainer * @returns {Boolean} answer to "isFixed?" */ function isFixed(element) { var nodeName = element.nodeName; if (nodeName === 'BODY' || nodeName === 'HTML') { return false; } if (getStyleComputedProperty(element, 'position') === 'fixed') { return true; } return isFixed(getParentNode(element)); } /** * Computed the boundaries limits and return them * @method * @memberof Popper.Utils * @param {HTMLElement} popper * @param {HTMLElement} reference * @param {number} padding * @param {HTMLElement} boundariesElement - Element used to define the boundaries * @returns {Object} Coordinates of the boundaries */ function getBoundaries(popper, reference, padding, boundariesElement) { // NOTE: 1 DOM access here var boundaries = { top: 0, left: 0 }; var offsetParent = findCommonOffsetParent(popper, reference); // Handle viewport case if (boundariesElement === 'viewport') { boundaries = getViewportOffsetRectRelativeToArtbitraryNode(offsetParent); } else { // Handle other cases based on DOM element used as boundaries var boundariesNode = void 0; if (boundariesElement === 'scrollParent') { boundariesNode = getScrollParent(getParentNode(popper)); if (boundariesNode.nodeName === 'BODY') { boundariesNode = popper.ownerDocument.documentElement; } } else if (boundariesElement === 'window') { boundariesNode = popper.ownerDocument.documentElement; } else { boundariesNode = boundariesElement; } var offsets = getOffsetRectRelativeToArbitraryNode(boundariesNode, offsetParent); // In case of HTML, we need a different computation if (boundariesNode.nodeName === 'HTML' && !isFixed(offsetParent)) { var _getWindowSizes = getWindowSizes(), height = _getWindowSizes.height, width = _getWindowSizes.width; boundaries.top += offsets.top - offsets.marginTop; boundaries.bottom = height + offsets.top; boundaries.left += offsets.left - offsets.marginLeft; boundaries.right = width + offsets.left; } else { // for all the other DOM elements, this one is good boundaries = offsets; } } // Add paddings boundaries.left += padding; boundaries.top += padding; boundaries.right -= padding; boundaries.bottom -= padding; return boundaries; } function getArea(_ref) { var width = _ref.width, height = _ref.height; return width * height; } /** * Utility used to transform the `auto` placement to the placement with more * available space. * @method * @memberof Popper.Utils * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function computeAutoPlacement(placement, refRect, popper, reference, boundariesElement) { var padding = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : 0; if (placement.indexOf('auto') === -1) { return placement; } var boundaries = getBoundaries(popper, reference, padding, boundariesElement); var rects = { top: { width: boundaries.width, height: refRect.top - boundaries.top }, right: { width: boundaries.right - refRect.right, height: boundaries.height }, bottom: { width: boundaries.width, height: boundaries.bottom - refRect.bottom }, left: { width: refRect.left - boundaries.left, height: boundaries.height } }; var sortedAreas = Object.keys(rects).map(function (key) { return _extends({ key: key }, rects[key], { area: getArea(rects[key]) }); }).sort(function (a, b) { return b.area - a.area; }); var filteredAreas = sortedAreas.filter(function (_ref2) { var width = _ref2.width, height = _ref2.height; return width >= popper.clientWidth && height >= popper.clientHeight; }); var computedPlacement = filteredAreas.length > 0 ? filteredAreas[0].key : sortedAreas[0].key; var variation = placement.split('-')[1]; return computedPlacement + (variation ? '-' + variation : ''); } /** * Get offsets to the reference element * @method * @memberof Popper.Utils * @param {Object} state * @param {Element} popper - the popper element * @param {Element} reference - the reference element (the popper will be relative to this) * @returns {Object} An object containing the offsets which will be applied to the popper */ function getReferenceOffsets(state, popper, reference) { var commonOffsetParent = findCommonOffsetParent(popper, reference); return getOffsetRectRelativeToArbitraryNode(reference, commonOffsetParent); } /** * Get the outer sizes of the given element (offset size + margins) * @method * @memberof Popper.Utils * @argument {Element} element * @returns {Object} object containing width and height properties */ function getOuterSizes(element) { var styles = window.getComputedStyle(element); var x = parseFloat(styles.marginTop) + parseFloat(styles.marginBottom); var y = parseFloat(styles.marginLeft) + parseFloat(styles.marginRight); var result = { width: element.offsetWidth + y, height: element.offsetHeight + x }; return result; } /** * Get the opposite placement of the given one * @method * @memberof Popper.Utils * @argument {String} placement * @returns {String} flipped placement */ function getOppositePlacement(placement) { var hash = { left: 'right', right: 'left', bottom: 'top', top: 'bottom' }; return placement.replace(/left|right|bottom|top/g, function (matched) { return hash[matched]; }); } /** * Get offsets to the popper * @method * @memberof Popper.Utils * @param {Object} position - CSS position the Popper will get applied * @param {HTMLElement} popper - the popper element * @param {Object} referenceOffsets - the reference offsets (the popper will be relative to this) * @param {String} placement - one of the valid placement options * @returns {Object} popperOffsets - An object containing the offsets which will be applied to the popper */ function getPopperOffsets(popper, referenceOffsets, placement) { placement = placement.split('-')[0]; // Get popper node sizes var popperRect = getOuterSizes(popper); // Add position, width and height to our offsets object var popperOffsets = { width: popperRect.width, height: popperRect.height }; // depending by the popper placement we have to compute its offsets slightly differently var isHoriz = ['right', 'left'].indexOf(placement) !== -1; var mainSide = isHoriz ? 'top' : 'left'; var secondarySide = isHoriz ? 'left' : 'top'; var measurement = isHoriz ? 'height' : 'width'; var secondaryMeasurement = !isHoriz ? 'height' : 'width'; popperOffsets[mainSide] = referenceOffsets[mainSide] + referenceOffsets[measurement] / 2 - popperRect[measurement] / 2; if (placement === secondarySide) { popperOffsets[secondarySide] = referenceOffsets[secondarySide] - popperRect[secondaryMeasurement]; } else { popperOffsets[secondarySide] = referenceOffsets[getOppositePlacement(secondarySide)]; } return popperOffsets; } /** * Mimics the `find` method of Array * @method * @memberof Popper.Utils * @argument {Array} arr * @argument prop * @argument value * @returns index or -1 */ function find(arr, check) { // use native find if supported if (Array.prototype.find) { return arr.find(check); } // use `filter` to obtain the same behavior of `find` return arr.filter(check)[0]; } /** * Return the index of the matching object * @method * @memberof Popper.Utils * @argument {Array} arr * @argument prop * @argument value * @returns index or -1 */ function findIndex(arr, prop, value) { // use native findIndex if supported if (Array.prototype.findIndex) { return arr.findIndex(function (cur) { return cur[prop] === value; }); } // use `find` + `indexOf` if `findIndex` isn't supported var match = find(arr, function (obj) { return obj[prop] === value; }); return arr.indexOf(match); } /** * Loop trough the list of modifiers and run them in order, * each of them will then edit the data object. * @method * @memberof Popper.Utils * @param {dataObject} data * @param {Array} modifiers * @param {String} ends - Optional modifier name used as stopper * @returns {dataObject} */ function runModifiers(modifiers, data, ends) { var modifiersToRun = ends === undefined ? modifiers : modifiers.slice(0, findIndex(modifiers, 'name', ends)); modifiersToRun.forEach(function (modifier) { if (modifier['function']) { // eslint-disable-line dot-notation console.warn('`modifier.function` is deprecated, use `modifier.fn`!'); } var fn = modifier['function'] || modifier.fn; // eslint-disable-line dot-notation if (modifier.enabled && isFunction(fn)) { // Add properties to offsets to make them a complete clientRect object // we do this before each modifier to make sure the previous one doesn't // mess with these values data.offsets.popper = getClientRect(data.offsets.popper); data.offsets.reference = getClientRect(data.offsets.reference); data = fn(data, modifier); } }); return data; } /** * Updates the position of the popper, computing the new offsets and applying * the new style.<br /> * Prefer `scheduleUpdate` over `update` because of performance reasons. * @method * @memberof Popper */ function update() { // if popper is destroyed, don't perform any further update if (this.state.isDestroyed) { return; } var data = { instance: this, styles: {}, arrowStyles: {}, attributes: {}, flipped: false, offsets: {} }; // compute reference element offsets data.offsets.reference = getReferenceOffsets(this.state, this.popper, this.reference); // compute auto placement, store placement inside the data object, // modifiers will be able to edit `placement` if needed // and refer to originalPlacement to know the original value data.placement = computeAutoPlacement(this.options.placement, data.offsets.reference, this.popper, this.reference, this.options.modifiers.flip.boundariesElement, this.options.modifiers.flip.padding); // store the computed placement inside `originalPlacement` data.originalPlacement = data.placement; // compute the popper offsets data.offsets.popper = getPopperOffsets(this.popper, data.offsets.reference, data.placement); data.offsets.popper.position = 'absolute'; // run the modifiers data = runModifiers(this.modifiers, data); // the first `update` will call `onCreate` callback // the other ones will call `onUpdate` callback if (!this.state.isCreated) { this.state.isCreated = true; this.options.onCreate(data); } else { this.options.onUpdate(data); } } /** * Helper used to know if the given modifier is enabled. * @method * @memberof Popper.Utils * @returns {Boolean} */ function isModifierEnabled(modifiers, modifierName) { return modifiers.some(function (_ref) { var name = _ref.name, enabled = _ref.enabled; return enabled && name === modifierName; }); } /** * Get the prefixed supported property name * @method * @memberof Popper.Utils * @argument {String} property (camelCase) * @returns {String} prefixed property (camelCase or PascalCase, depending on the vendor prefix) */ function getSupportedPropertyName(property) { var prefixes = [false, 'ms', 'Webkit', 'Moz', 'O']; var upperProp = property.charAt(0).toUpperCase() + property.slice(1); for (var i = 0; i < prefixes.length - 1; i++) { var prefix = prefixes[i]; var toCheck = prefix ? '' + prefix + upperProp : property; if (typeof window.document.body.style[toCheck] !== 'undefined') { return toCheck; } } return null; } /** * Destroy the popper * @method * @memberof Popper */ function destroy() { this.state.isDestroyed = true; // touch DOM only if `applyStyle` modifier is enabled if (isModifierEnabled(this.modifiers, 'applyStyle')) { this.popper.removeAttribute('x-placement'); this.popper.style.left = ''; this.popper.style.position = ''; this.popper.style.top = ''; this.popper.style[getSupportedPropertyName('transform')] = ''; } this.disableEventListeners(); // remove the popper if user explicity asked for the deletion on destroy // do not use `remove` because IE11 doesn't support it if (this.options.removeOnDestroy) { this.popper.parentNode.removeChild(this.popper); } return this; } /** * Get the window associated with the element * @argument {Element} element * @returns {Window} */ function getWindow(element) { var ownerDocument = element.ownerDocument; return ownerDocument ? ownerDocument.defaultView : window; } function attachToScrollParents(scrollParent, event, callback, scrollParents) { var isBody = scrollParent.nodeName === 'BODY'; var target = isBody ? scrollParent.ownerDocument.defaultView : scrollParent; target.addEventListener(event, callback, { passive: true }); if (!isBody) { attachToScrollParents(getScrollParent(target.parentNode), event, callback, scrollParents); } scrollParents.push(target); } /** * Setup needed event listeners used to update the popper position * @method * @memberof Popper.Utils * @private */ function setupEventListeners(reference, options, state, updateBound) { // Resize event listener on window state.updateBound = updateBound; getWindow(reference).addEventListener('resize', state.updateBound, { passive: true }); // Scroll event listener on scroll parents var scrollElement = getScrollParent(reference); attachToScrollParents(scrollElement, 'scroll', state.updateBound, state.scrollParents); state.scrollElement = scrollElement; state.eventsEnabled = true; return state; } /** * It will add resize/scroll events and start recalculating * position of the popper element when they are triggered. * @method * @memberof Popper */ function enableEventListeners() { if (!this.state.eventsEnabled) { this.state = setupEventListeners(this.reference, this.options, this.state, this.scheduleUpdate); } } /** * Remove event listeners used to update the popper position * @method * @memberof Popper.Utils * @private */ function removeEventListeners(reference, state) { // Remove resize event listener on window getWindow(reference).removeEventListener('resize', state.updateBound); // Remove scroll event listener on scroll parents state.scrollParents.forEach(function (target) { target.removeEventListener('scroll', state.updateBound); }); // Reset state state.updateBound = null; state.scrollParents = []; state.scrollElement = null; state.eventsEnabled = false; return state; } /** * It will remove resize/scroll events and won't recalculate popper position * when they are triggered. It also won't trigger onUpdate callback anymore, * unless you call `update` method manually. * @method * @memberof Popper */ function disableEventListeners() { if (this.state.eventsEnabled) { window.cancelAnimationFrame(this.scheduleUpdate); this.state = removeEventListeners(this.reference, this.state); } } /** * Tells if a given input is a number * @method * @memberof Popper.Utils * @param {*} input to check * @return {Boolean} */ function isNumeric(n) { return n !== '' && !isNaN(parseFloat(n)) && isFinite(n); } /** * Set the style to the given popper * @method * @memberof Popper.Utils * @argument {Element} element - Element to apply the style to * @argument {Object} styles * Object with a list of properties and values which will be applied to the element */ function setStyles(element, styles) { Object.keys(styles).forEach(function (prop) { var unit = ''; // add unit if the value is numeric and is one of the following if (['width', 'height', 'top', 'right', 'bottom', 'left'].indexOf(prop) !== -1 && isNumeric(styles[prop])) { unit = 'px'; } element.style[prop] = styles[prop] + unit; }); } /** * Set the attributes to the given popper * @method * @memberof Popper.Utils * @argument {Element} element - Element to apply the attributes to * @argument {Object} styles * Object with a list of properties and values which will be applied to the element */ function setAttributes(element, attributes) { Object.keys(attributes).forEach(function (prop) { var value = attributes[prop]; if (value !== false) { element.setAttribute(prop, attributes[prop]); } else { element.removeAttribute(prop); } }); } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by `update` method * @argument {Object} data.styles - List of style properties - values to apply to popper element * @argument {Object} data.attributes - List of attribute properties - values to apply to popper element * @argument {Object} options - Modifiers configuration and options * @returns {Object} The same data object */ function applyStyle(data) { // any property present in `data.styles` will be applied to the popper, // in this way we can make the 3rd party modifiers add custom styles to it // Be aware, modifiers could override the properties defined in the previous // lines of this modifier! setStyles(data.instance.popper, data.styles); // any property present in `data.attributes` will be applied to the popper, // they will be set as HTML attributes of the element setAttributes(data.instance.popper, data.attributes); // if arrowElement is defined and arrowStyles has some properties if (data.arrowElement && Object.keys(data.arrowStyles).length) { setStyles(data.arrowElement, data.arrowStyles); } return data; } /** * Set the x-placement attribute before everything else because it could be used * to add margins to the popper margins needs to be calculated to get the * correct popper offsets. * @method * @memberof Popper.modifiers * @param {HTMLElement} reference - The reference element used to position the popper * @param {HTMLElement} popper - The HTML element used as popper. * @param {Object} options - Popper.js options */ function applyStyleOnLoad(reference, popper, options, modifierOptions, state) { // compute reference element offsets var referenceOffsets = getReferenceOffsets(state, popper, reference); // compute auto placement, store placement inside the data object, // modifiers will be able to edit `placement` if needed // and refer to originalPlacement to know the original value var placement = computeAutoPlacement(options.placement, referenceOffsets, popper, reference, options.modifiers.flip.boundariesElement, options.modifiers.flip.padding); popper.setAttribute('x-placement', placement); // Apply `position` to popper before anything else because // without the position applied we can't guarantee correct computations setStyles(popper, { position: 'absolute' }); return options; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by `update` method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function computeStyle(data, options) { var x = options.x, y = options.y; var popper = data.offsets.popper; // Remove this legacy support in Popper.js v2 var legacyGpuAccelerationOption = find(data.instance.modifiers, function (modifier) { return modifier.name === 'applyStyle'; }).gpuAcceleration; if (legacyGpuAccelerationOption !== undefined) { console.warn('WARNING: `gpuAcceleration` option moved to `computeStyle` modifier and will not be supported in future versions of Popper.js!'); } var gpuAcceleration = legacyGpuAccelerationOption !== undefined ? legacyGpuAccelerationOption : options.gpuAcceleration; var offsetParent = getOffsetParent(data.instance.popper); var offsetParentRect = getBoundingClientRect(offsetParent); // Styles var styles = { position: popper.position }; // floor sides to avoid blurry text var offsets = { left: Math.floor(popper.left), top: Math.floor(popper.top), bottom: Math.floor(popper.bottom), right: Math.floor(popper.right) }; var sideA = x === 'bottom' ? 'top' : 'bottom'; var sideB = y === 'right' ? 'left' : 'right'; // if gpuAcceleration is set to `true` and transform is supported, // we use `translate3d` to apply the position to the popper we // automatically use the supported prefixed version if needed var prefixedProperty = getSupportedPropertyName('transform'); // now, let's make a step back and look at this code closely (wtf?) // If the content of the popper grows once it's been positioned, it // may happen that the popper gets misplaced because of the new content // overflowing its reference element // To avoid this problem, we provide two options (x and y), which allow // the consumer to define the offset origin. // If we position a popper on top of a reference element, we can set // `x` to `top` to make the popper grow towards its top instead of // its bottom. var left = void 0, top = void 0; if (sideA === 'bottom') { top = -offsetParentRect.height + offsets.bottom; } else { top = offsets.top; } if (sideB === 'right') { left = -offsetParentRect.width + offsets.right; } else { left = offsets.left; } if (gpuAcceleration && prefixedProperty) { styles[prefixedProperty] = 'translate3d(' + left + 'px, ' + top + 'px, 0)'; styles[sideA] = 0; styles[sideB] = 0; styles.willChange = 'transform'; } else { // othwerise, we use the standard `top`, `left`, `bottom` and `right` properties var invertTop = sideA === 'bottom' ? -1 : 1; var invertLeft = sideB === 'right' ? -1 : 1; styles[sideA] = top * invertTop; styles[sideB] = left * invertLeft; styles.willChange = sideA + ', ' + sideB; } // Attributes var attributes = { 'x-placement': data.placement }; // Update `data` attributes, styles and arrowStyles data.attributes = _extends({}, attributes, data.attributes); data.styles = _extends({}, styles, data.styles); data.arrowStyles = _extends({}, data.offsets.arrow, data.arrowStyles); return data; } /** * Helper used to know if the given modifier depends from another one.<br /> * It checks if the needed modifier is listed and enabled. * @method * @memberof Popper.Utils * @param {Array} modifiers - list of modifiers * @param {String} requestingName - name of requesting modifier * @param {String} requestedName - name of requested modifier * @returns {Boolean} */ function isModifierRequired(modifiers, requestingName, requestedName) { var requesting = find(modifiers, function (_ref) { var name = _ref.name; return name === requestingName; }); var isRequired = !!requesting && modifiers.some(function (modifier) { return modifier.name === requestedName && modifier.enabled && modifier.order < requesting.order; }); if (!isRequired) { var _requesting = '`' + requestingName + '`'; var requested = '`' + requestedName + '`'; console.warn(requested + ' modifier is required by ' + _requesting + ' modifier in order to work, be sure to include it before ' + _requesting + '!'); } return isRequired; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function arrow(data, options) { // arrow depends on keepTogether in order to work if (!isModifierRequired(data.instance.modifiers, 'arrow', 'keepTogether')) { return data; } var arrowElement = options.element; // if arrowElement is a string, suppose it's a CSS selector if (typeof arrowElement === 'string') { arrowElement = data.instance.popper.querySelector(arrowElement); // if arrowElement is not found, don't run the modifier if (!arrowElement) { return data; } } else { // if the arrowElement isn't a query selector we must check that the // provided DOM node is child of its popper node if (!data.instance.popper.contains(arrowElement)) { console.warn('WARNING: `arrow.element` must be child of its popper element!'); return data; } } var placement = data.placement.split('-')[0]; var _data$offsets = data.offsets, popper = _data$offsets.popper, reference = _data$offsets.reference; var isVertical = ['left', 'right'].indexOf(placement) !== -1; var len = isVertical ? 'height' : 'width'; var sideCapitalized = isVertical ? 'Top' : 'Left'; var side = sideCapitalized.toLowerCase(); var altSide = isVertical ? 'left' : 'top'; var opSide = isVertical ? 'bottom' : 'right'; var arrowElementSize = getOuterSizes(arrowElement)[len]; // // extends keepTogether behavior making sure the popper and its // reference have enough pixels in conjuction // // top/left side if (reference[opSide] - arrowElementSize < popper[side]) { data.offsets.popper[side] -= popper[side] - (reference[opSide] - arrowElementSize); } // bottom/right side if (reference[side] + arrowElementSize > popper[opSide]) { data.offsets.popper[side] += reference[side] + arrowElementSize - popper[opSide]; } // compute center of the popper var center = reference[side] + reference[len] / 2 - arrowElementSize / 2; // Compute the sideValue using the updated popper offsets // take popper margin in account because we don't have this info available var popperMarginSide = getStyleComputedProperty(data.instance.popper, 'margin' + sideCapitalized).replace('px', ''); var sideValue = center - getClientRect(data.offsets.popper)[side] - popperMarginSide; // prevent arrowElement from being placed not contiguously to its popper sideValue = Math.max(Math.min(popper[len] - arrowElementSize, sideValue), 0); data.arrowElement = arrowElement; data.offsets.arrow = {}; data.offsets.arrow[side] = Math.round(sideValue); data.offsets.arrow[altSide] = ''; // make sure to unset any eventual altSide value from the DOM node return data; } /** * Get the opposite placement variation of the given one * @method * @memberof Popper.Utils * @argument {String} placement variation * @returns {String} flipped placement variation */ function getOppositeVariation(variation) { if (variation === 'end') { return 'start'; } else if (variation === 'start') { return 'end'; } return variation; } /** * List of accepted placements to use as values of the `placement` option.<br /> * Valid placements are: * - `auto` * - `top` * - `right` * - `bottom` * - `left` * * Each placement can have a variation from this list: * - `-start` * - `-end` * * Variations are interpreted easily if you think of them as the left to right * written languages. Horizontally (`top` and `bottom`), `start` is left and `end` * is right.<br /> * Vertically (`left` and `right`), `start` is top and `end` is bottom. * * Some valid examples are: * - `top-end` (on top of reference, right aligned) * - `right-start` (on right of reference, top aligned) * - `bottom` (on bottom, centered) * - `auto-right` (on the side with more space available, alignment depends by placement) * * @static * @type {Array} * @enum {String} * @readonly * @method placements * @memberof Popper */ var placements = ['auto-start', 'auto', 'auto-end', 'top-start', 'top', 'top-end', 'right-start', 'right', 'right-end', 'bottom-end', 'bottom', 'bottom-start', 'left-end', 'left', 'left-start']; // Get rid of `auto` `auto-start` and `auto-end` var validPlacements = placements.slice(3); /** * Given an initial placement, returns all the subsequent placements * clockwise (or counter-clockwise). * * @method * @memberof Popper.Utils * @argument {String} placement - A valid placement (it accepts variations) * @argument {Boolean} counter - Set to true to walk the placements counterclockwise * @returns {Array} placements including their variations */ function clockwise(placement) { var counter = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; var index = validPlacements.indexOf(placement); var arr = validPlacements.slice(index + 1).concat(validPlacements.slice(0, index)); return counter ? arr.reverse() : arr; } var BEHAVIORS = { FLIP: 'flip', CLOCKWISE: 'clockwise', COUNTERCLOCKWISE: 'counterclockwise' }; /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function flip(data, options) { // if `inner` modifier is enabled, we can't use the `flip` modifier if (isModifierEnabled(data.instance.modifiers, 'inner')) { return data; } if (data.flipped && data.placement === data.originalPlacement) { // seems like flip is trying to loop, probably there's not enough space on any of the flippable sides return data; } var boundaries = getBoundaries(data.instance.popper, data.instance.reference, options.padding, options.boundariesElement); var placement = data.placement.split('-')[0]; var placementOpposite = getOppositePlacement(placement); var variation = data.placement.split('-')[1] || ''; var flipOrder = []; switch (options.behavior) { case BEHAVIORS.FLIP: flipOrder = [placement, placementOpposite]; break; case BEHAVIORS.CLOCKWISE: flipOrder = clockwise(placement); break; case BEHAVIORS.COUNTERCLOCKWISE: flipOrder = clockwise(placement, true); break; default: flipOrder = options.behavior; } flipOrder.forEach(function (step, index) { if (placement !== step || flipOrder.length === index + 1) { return data; } placement = data.placement.split('-')[0]; placementOpposite = getOppositePlacement(placement); var popperOffsets = data.offsets.popper; var refOffsets = data.offsets.reference; // using floor because the reference offsets may contain decimals we are not going to consider here var floor = Math.floor; var overlapsRef = placement === 'left' && floor(popperOffsets.right) > floor(refOffsets.left) || placement === 'right' && floor(popperOffsets.left) < floor(refOffsets.right) || placement === 'top' && floor(popperOffsets.bottom) > floor(refOffsets.top) || placement === 'bottom' && floor(popperOffsets.top) < floor(refOffsets.bottom); var overflowsLeft = floor(popperOffsets.left) < floor(boundaries.left); var overflowsRight = floor(popperOffsets.right) > floor(boundaries.right); var overflowsTop = floor(popperOffsets.top) < floor(boundaries.top); var overflowsBottom = floor(popperOffsets.bottom) > floor(boundaries.bottom); var overflowsBoundaries = placement === 'left' && overflowsLeft || placement === 'right' && overflowsRight || placement === 'top' && overflowsTop || placement === 'bottom' && overflowsBottom; // flip the variation if required var isVertical = ['top', 'bottom'].indexOf(placement) !== -1; var flippedVariation = !!options.flipVariations && (isVertical && variation === 'start' && overflowsLeft || isVertical && variation === 'end' && overflowsRight || !isVertical && variation === 'start' && overflowsTop || !isVertical && variation === 'end' && overflowsBottom); if (overlapsRef || overflowsBoundaries || flippedVariation) { // this boolean to detect any flip loop data.flipped = true; if (overlapsRef || overflowsBoundaries) { placement = flipOrder[index + 1]; } if (flippedVariation) { variation = getOppositeVariation(variation); } data.placement = placement + (variation ? '-' + variation : ''); // this object contains `position`, we want to preserve it along with // any additional property we may add in the future data.offsets.popper = _extends({}, data.offsets.popper, getPopperOffsets(data.instance.popper, data.offsets.reference, data.placement)); data = runModifiers(data.instance.modifiers, data, 'flip'); } }); return data; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function keepTogether(data) { var _data$offsets = data.offsets, popper = _data$offsets.popper, reference = _data$offsets.reference; var placement = data.placement.split('-')[0]; var floor = Math.floor; var isVertical = ['top', 'bottom'].indexOf(placement) !== -1; var side = isVertical ? 'right' : 'bottom'; var opSide = isVertical ? 'left' : 'top'; var measurement = isVertical ? 'width' : 'height'; if (popper[side] < floor(reference[opSide])) { data.offsets.popper[opSide] = floor(reference[opSide]) - popper[measurement]; } if (popper[opSide] > floor(reference[side])) { data.offsets.popper[opSide] = floor(reference[side]); } return data; } /** * Converts a string containing value + unit into a px value number * @function * @memberof {modifiers~offset} * @private * @argument {String} str - Value + unit string * @argument {String} measurement - `height` or `width` * @argument {Object} popperOffsets * @argument {Object} referenceOffsets * @returns {Number|String} * Value in pixels, or original string if no values were extracted */ function toValue(str, measurement, popperOffsets, referenceOffsets) { // separate value from unit var split = str.match(/((?:\-|\+)?\d*\.?\d*)(.*)/); var value = +split[1]; var unit = split[2]; // If it's not a number it's an operator, I guess if (!value) { return str; } if (unit.indexOf('%') === 0) { var element = void 0; switch (unit) { case '%p': element = popperOffsets; break; case '%': case '%r': default: element = referenceOffsets; } var rect = getClientRect(element); return rect[measurement] / 100 * value; } else if (unit === 'vh' || unit === 'vw') { // if is a vh or vw, we calculate the size based on the viewport var size = void 0; if (unit === 'vh') { size = Math.max(document.documentElement.clientHeight, window.innerHeight || 0); } else { size = Math.max(document.documentElement.clientWidth, window.innerWidth || 0); } return size / 100 * value; } else { // if is an explicit pixel unit, we get rid of the unit and keep the value // if is an implicit unit, it's px, and we return just the value return value; } } /** * Parse an `offset` string to extrapolate `x` and `y` numeric offsets. * @function * @memberof {modifiers~offset} * @private * @argument {String} offset * @argument {Object} popperOffsets * @argument {Object} referenceOffsets * @argument {String} basePlacement * @returns {Array} a two cells array with x and y offsets in numbers */ function parseOffset(offset, popperOffsets, referenceOffsets, basePlacement) { var offsets = [0, 0]; // Use height if placement is left or right and index is 0 otherwise use width // in this way the first offset will use an axis and the second one // will use the other one var useHeight = ['right', 'left'].indexOf(basePlacement) !== -1; // Split the offset string to obtain a list of values and operands // The regex addresses values with the plus or minus sign in front (+10, -20, etc) var fragments = offset.split(/(\+|\-)/).map(function (frag) { return frag.trim(); }); // Detect if the offset string contains a pair of values or a single one // they could be separated by comma or space var divider = fragments.indexOf(find(fragments, function (frag) { return frag.search(/,|\s/) !== -1; })); if (fragments[divider] && fragments[divider].indexOf(',') === -1) { console.warn('Offsets separated by white space(s) are deprecated, use a comma (,) instead.'); } // If divider is found, we divide the list of values and operands to divide // them by ofset X and Y. var splitRegex = /\s*,\s*|\s+/; var ops = divider !== -1 ? [fragments.slice(0, divider).concat([fragments[divider].split(splitRegex)[0]]), [fragments[divider].split(splitRegex)[1]].concat(fragments.slice(divider + 1))] : [fragments]; // Convert the values with units to absolute pixels to allow our computations ops = ops.map(function (op, index) { // Most of the units rely on the orientation of the popper var measurement = (index === 1 ? !useHeight : useHeight) ? 'height' : 'width'; var mergeWithPrevious = false; return op // This aggregates any `+` or `-` sign that aren't considered operators // e.g.: 10 + +5 => [10, +, +5] .reduce(function (a, b) { if (a[a.length - 1] === '' && ['+', '-'].indexOf(b) !== -1) { a[a.length - 1] = b; mergeWithPrevious = true; return a; } else if (mergeWithPrevious) { a[a.length - 1] += b; mergeWithPrevious = false; return a; } else { return a.concat(b); } }, []) // Here we convert the string values into number values (in px) .map(function (str) { return toValue(str, measurement, popperOffsets, referenceOffsets); }); }); // Loop trough the offsets arrays and execute the operations ops.forEach(function (op, index) { op.forEach(function (frag, index2) { if (isNumeric(frag)) { offsets[index] += frag * (op[index2 - 1] === '-' ? -1 : 1); } }); }); return offsets; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @argument {Number|String} options.offset=0 * The offset value as described in the modifier description * @returns {Object} The data object, properly modified */ function offset(data, _ref) { var offset = _ref.offset; var placement = data.placement, _data$offsets = data.offsets, popper = _data$offsets.popper, reference = _data$offsets.reference; var basePlacement = placement.split('-')[0]; var offsets = void 0; if (isNumeric(+offset)) { offsets = [+offset, 0]; } else { offsets = parseOffset(offset, popper, reference, basePlacement); } if (basePlacement === 'left') { popper.top += offsets[0]; popper.left -= offsets[1]; } else if (basePlacement === 'right') { popper.top += offsets[0]; popper.left += offsets[1]; } else if (basePlacement === 'top') { popper.left += offsets[0]; popper.top -= offsets[1]; } else if (basePlacement === 'bottom') { popper.left += offsets[0]; popper.top += offsets[1]; } data.popper = popper; return data; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by `update` method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function preventOverflow(data, options) { var boundariesElement = options.boundariesElement || getOffsetParent(data.instance.popper); // If offsetParent is the reference element, we really want to // go one step up and use the next offsetParent as reference to // avoid to make this modifier completely useless and look like broken if (data.instance.reference === boundariesElement) { boundariesElement = getOffsetParent(boundariesElement); } var boundaries = getBoundaries(data.instance.popper, data.instance.reference, options.padding, boundariesElement); options.boundaries = boundaries; var order = options.priority; var popper = data.offsets.popper; var check = { primary: function primary(placement) { var value = popper[placement]; if (popper[placement] < boundaries[placement] && !options.escapeWithReference) { value = Math.max(popper[placement], boundaries[placement]); } return defineProperty({}, placement, value); }, secondary: function secondary(placement) { var mainSide = placement === 'right' ? 'left' : 'top'; var value = popper[mainSide]; if (popper[placement] > boundaries[placement] && !options.escapeWithReference) { value = Math.min(popper[mainSide], boundaries[placement] - (placement === 'right' ? popper.width : popper.height)); } return defineProperty({}, mainSide, value); } }; order.forEach(function (placement) { var side = ['left', 'top'].indexOf(placement) !== -1 ? 'primary' : 'secondary'; popper = _extends({}, popper, check[side](placement)); }); data.offsets.popper = popper; return data; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by `update` method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function shift(data) { var placement = data.placement; var basePlacement = placement.split('-')[0]; var shiftvariation = placement.split('-')[1]; // if shift shiftvariation is specified, run the modifier if (shiftvariation) { var _data$offsets = data.offsets, reference = _data$offsets.reference, popper = _data$offsets.popper; var isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1; var side = isVertical ? 'left' : 'top'; var measurement = isVertical ? 'width' : 'height'; var shiftOffsets = { start: defineProperty({}, side, reference[side]), end: defineProperty({}, side, reference[side] + reference[measurement] - popper[measurement]) }; data.offsets.popper = _extends({}, popper, shiftOffsets[shiftvariation]); } return data; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by update method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function hide(data) { if (!isModifierRequired(data.instance.modifiers, 'hide', 'preventOverflow')) { return data; } var refRect = data.offsets.reference; var bound = find(data.instance.modifiers, function (modifier) { return modifier.name === 'preventOverflow'; }).boundaries; if (refRect.bottom < bound.top || refRect.left > bound.right || refRect.top > bound.bottom || refRect.right < bound.left) { // Avoid unnecessary DOM access if visibility hasn't changed if (data.hide === true) { return data; } data.hide = true; data.attributes['x-out-of-boundaries'] = ''; } else { // Avoid unnecessary DOM access if visibility hasn't changed if (data.hide === false) { return data; } data.hide = false; data.attributes['x-out-of-boundaries'] = false; } return data; } /** * @function * @memberof Modifiers * @argument {Object} data - The data object generated by `update` method * @argument {Object} options - Modifiers configuration and options * @returns {Object} The data object, properly modified */ function inner(data) { var placement = data.placement; var basePlacement = placement.split('-')[0]; var _data$offsets = data.offsets, popper = _data$offsets.popper, reference = _data$offsets.reference; var isHoriz = ['left', 'right'].indexOf(basePlacement) !== -1; var subtractLength = ['top', 'left'].indexOf(basePlacement) === -1; popper[isHoriz ? 'left' : 'top'] = reference[basePlacement] - (subtractLength ? popper[isHoriz ? 'width' : 'height'] : 0); data.placement = getOppositePlacement(placement); data.offsets.popper = getClientRect(popper); return data; } /** * Modifier function, each modifier can have a function of this type assigned * to its `fn` property.<br /> * These functions will be called on each update, this means that you must * make sure they are performant enough to avoid performance bottlenecks. * * @function ModifierFn * @argument {dataObject} data - The data object generated by `update` method * @argument {Object} options - Modifiers configuration and options * @returns {dataObject} The data object, properly modified */ /** * Modifiers are plugins used to alter the behavior of your poppers.<br /> * Popper.js uses a set of 9 modifiers to provide all the basic functionalities * needed by the library. * * Usually you don't want to override the `order`, `fn` and `onLoad` props. * All the other properties are configurations that could be tweaked. * @namespace modifiers */ var modifiers = { /** * Modifier used to shift the popper on the start or end of its reference * element.<br /> * It will read the variation of the `placement` property.<br /> * It can be one either `-end` or `-start`. * @memberof modifiers * @inner */ shift: { /** @prop {number} order=100 - Index used to define the order of execution */ order: 100, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: shift }, /** * The `offset` modifier can shift your popper on both its axis. * * It accepts the following units: * - `px` or unitless, interpreted as pixels * - `%` or `%r`, percentage relative to the length of the reference element * - `%p`, percentage relative to the length of the popper element * - `vw`, CSS viewport width unit * - `vh`, CSS viewport height unit * * For length is intended the main axis relative to the placement of the popper.<br /> * This means that if the placement is `top` or `bottom`, the length will be the * `width`. In case of `left` or `right`, it will be the height. * * You can provide a single value (as `Number` or `String`), or a pair of values * as `String` divided by a comma or one (or more) white spaces.<br /> * The latter is a deprecated method because it leads to confusion and will be * removed in v2.<br /> * Additionally, it accepts additions and subtractions between different units. * Note that multiplications and divisions aren't supported. * * Valid examples are: * ``` * 10 * '10%' * '10, 10' * '10%, 10' * '10 + 10%' * '10 - 5vh + 3%' * '-10px + 5vh, 5px - 6%' * ``` * > **NB**: If you desire to apply offsets to your poppers in a way that may make them overlap * > with their reference element, unfortunately, you will have to disable the `flip` modifier. * > More on this [reading this issue](https://github.com/FezVrasta/popper.js/issues/373) * * @memberof modifiers * @inner */ offset: { /** @prop {number} order=200 - Index used to define the order of execution */ order: 200, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: offset, /** @prop {Number|String} offset=0 * The offset value as described in the modifier description */ offset: 0 }, /** * Modifier used to prevent the popper from being positioned outside the boundary. * * An scenario exists where the reference itself is not within the boundaries.<br /> * We can say it has "escaped the boundaries" — or just "escaped".<br /> * In this case we need to decide whether the popper should either: * * - detach from the reference and remain "trapped" in the boundaries, or * - if it should ignore the boundary and "escape with its reference" * * When `escapeWithReference` is set to`true` and reference is completely * outside its boundaries, the popper will overflow (or completely leave) * the boundaries in order to remain attached to the edge of the reference. * * @memberof modifiers * @inner */ preventOverflow: { /** @prop {number} order=300 - Index used to define the order of execution */ order: 300, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: preventOverflow, /** * @prop {Array} [priority=['left','right','top','bottom']] * Popper will try to prevent overflow following these priorities by default, * then, it could overflow on the left and on top of the `boundariesElement` */ priority: ['left', 'right', 'top', 'bottom'], /** * @prop {number} padding=5 * Amount of pixel used to define a minimum distance between the boundaries * and the popper this makes sure the popper has always a little padding * between the edges of its container */ padding: 5, /** * @prop {String|HTMLElement} boundariesElement='scrollParent' * Boundaries used by the modifier, can be `scrollParent`, `window`, * `viewport` or any DOM element. */ boundariesElement: 'scrollParent' }, /** * Modifier used to make sure the reference and its popper stay near eachothers * without leaving any gap between the two. Expecially useful when the arrow is * enabled and you want to assure it to point to its reference element. * It cares only about the first axis, you can still have poppers with margin * between the popper and its reference element. * @memberof modifiers * @inner */ keepTogether: { /** @prop {number} order=400 - Index used to define the order of execution */ order: 400, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: keepTogether }, /** * This modifier is used to move the `arrowElement` of the popper to make * sure it is positioned between the reference element and its popper element. * It will read the outer size of the `arrowElement` node to detect how many * pixels of conjuction are needed. * * It has no effect if no `arrowElement` is provided. * @memberof modifiers * @inner */ arrow: { /** @prop {number} order=500 - Index used to define the order of execution */ order: 500, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: arrow, /** @prop {String|HTMLElement} element='[x-arrow]' - Selector or node used as arrow */ element: '[x-arrow]' }, /** * Modifier used to flip the popper's placement when it starts to overlap its * reference element. * * Requires the `preventOverflow` modifier before it in order to work. * * **NOTE:** this modifier will interrupt the current update cycle and will * restart it if it detects the need to flip the placement. * @memberof modifiers * @inner */ flip: { /** @prop {number} order=600 - Index used to define the order of execution */ order: 600, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: flip, /** * @prop {String|Array} behavior='flip' * The behavior used to change the popper's placement. It can be one of * `flip`, `clockwise`, `counterclockwise` or an array with a list of valid * placements (with optional variations). */ behavior: 'flip', /** * @prop {number} padding=5 * The popper will flip if it hits the edges of the `boundariesElement` */ padding: 5, /** * @prop {String|HTMLElement} boundariesElement='viewport' * The element which will define the boundaries of the popper position, * the popper will never be placed outside of the defined boundaries * (except if keepTogether is enabled) */ boundariesElement: 'viewport' }, /** * Modifier used to make the popper flow toward the inner of the reference element. * By default, when this modifier is disabled, the popper will be placed outside * the reference element. * @memberof modifiers * @inner */ inner: { /** @prop {number} order=700 - Index used to define the order of execution */ order: 700, /** @prop {Boolean} enabled=false - Whether the modifier is enabled or not */ enabled: false, /** @prop {ModifierFn} */ fn: inner }, /** * Modifier used to hide the popper when its reference element is outside of the * popper boundaries. It will set a `x-out-of-boundaries` attribute which can * be used to hide with a CSS selector the popper when its reference is * out of boundaries. * * Requires the `preventOverflow` modifier before it in order to work. * @memberof modifiers * @inner */ hide: { /** @prop {number} order=800 - Index used to define the order of execution */ order: 800, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: hide }, /** * Computes the style that will be applied to the popper element to gets * properly positioned. * * Note that this modifier will not touch the DOM, it just prepares the styles * so that `applyStyle` modifier can apply it. This separation is useful * in case you need to replace `applyStyle` with a custom implementation. * * This modifier has `850` as `order` value to maintain backward compatibility * with previous versions of Popper.js. Expect the modifiers ordering method * to change in future major versions of the library. * * @memberof modifiers * @inner */ computeStyle: { /** @prop {number} order=850 - Index used to define the order of execution */ order: 850, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: computeStyle, /** * @prop {Boolean} gpuAcceleration=true * If true, it uses the CSS 3d transformation to position the popper. * Otherwise, it will use the `top` and `left` properties. */ gpuAcceleration: true, /** * @prop {string} [x='bottom'] * Where to anchor the X axis (`bottom` or `top`). AKA X offset origin. * Change this if your popper should grow in a direction different from `bottom` */ x: 'bottom', /** * @prop {string} [x='left'] * Where to anchor the Y axis (`left` or `right`). AKA Y offset origin. * Change this if your popper should grow in a direction different from `right` */ y: 'right' }, /** * Applies the computed styles to the popper element. * * All the DOM manipulations are limited to this modifier. This is useful in case * you want to integrate Popper.js inside a framework or view library and you * want to delegate all the DOM manipulations to it. * * Note that if you disable this modifier, you must make sure the popper element * has its position set to `absolute` before Popper.js can do its work! * * Just disable this modifier and define you own to achieve the desired effect. * * @memberof modifiers * @inner */ applyStyle: { /** @prop {number} order=900 - Index used to define the order of execution */ order: 900, /** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */ enabled: true, /** @prop {ModifierFn} */ fn: applyStyle, /** @prop {Function} */ onLoad: applyStyleOnLoad, /** * @deprecated since version 1.10.0, the property moved to `computeStyle` modifier * @prop {Boolean} gpuAcceleration=true * If true, it uses the CSS 3d transformation to position the popper. * Otherwise, it will use the `top` and `left` properties. */ gpuAcceleration: undefined } }; /** * The `dataObject` is an object containing all the informations used by Popper.js * this object get passed to modifiers and to the `onCreate` and `onUpdate` callbacks. * @name dataObject * @property {Object} data.instance The Popper.js instance * @property {String} data.placement Placement applied to popper * @property {String} data.originalPlacement Placement originally defined on init * @property {Boolean} data.flipped True if popper has been flipped by flip modifier * @property {Boolean} data.hide True if the reference element is out of boundaries, useful to know when to hide the popper. * @property {HTMLElement} data.arrowElement Node used as arrow by arrow modifier * @property {Object} data.styles Any CSS property defined here will be applied to the popper, it expects the JavaScript nomenclature (eg. `marginBottom`) * @property {Object} data.arrowStyles Any CSS property defined here will be applied to the popper arrow, it expects the JavaScript nomenclature (eg. `marginBottom`) * @property {Object} data.boundaries Offsets of the popper boundaries * @property {Object} data.offsets The measurements of popper, reference and arrow elements. * @property {Object} data.offsets.popper `top`, `left`, `width`, `height` values * @property {Object} data.offsets.reference `top`, `left`, `width`, `height` values * @property {Object} data.offsets.arrow] `top` and `left` offsets, only one of them will be different from 0 */ /** * Default options provided to Popper.js constructor.<br /> * These can be overriden using the `options` argument of Popper.js.<br /> * To override an option, simply pass as 3rd argument an object with the same * structure of this object, example: * ``` * new Popper(ref, pop, { * modifiers: { * preventOverflow: { enabled: false } * } * }) * ``` * @type {Object} * @static * @memberof Popper */ var Defaults = { /** * Popper's placement * @prop {Popper.placements} placement='bottom' */ placement: 'bottom', /** * Whether events (resize, scroll) are initially enabled * @prop {Boolean} eventsEnabled=true */ eventsEnabled: true, /** * Set to true if you want to automatically remove the popper when * you call the `destroy` method. * @prop {Boolean} removeOnDestroy=false */ removeOnDestroy: false, /** * Callback called when the popper is created.<br /> * By default, is set to no-op.<br /> * Access Popper.js instance with `data.instance`. * @prop {onCreate} */ onCreate: function onCreate() {}, /** * Callback called when the popper is updated, this callback is not called * on the initialization/creation of the popper, but only on subsequent * updates.<br /> * By default, is set to no-op.<br /> * Access Popper.js instance with `data.instance`. * @prop {onUpdate} */ onUpdate: function onUpdate() {}, /** * List of modifiers used to modify the offsets before they are applied to the popper. * They provide most of the functionalities of Popper.js * @prop {modifiers} */ modifiers: modifiers }; /** * @callback onCreate * @param {dataObject} data */ /** * @callback onUpdate * @param {dataObject} data */ // Utils // Methods var Popper = function () { /** * Create a new Popper.js instance * @class Popper * @param {HTMLElement|referenceObject} reference - The reference element used to position the popper * @param {HTMLElement} popper - The HTML element used as popper. * @param {Object} options - Your custom options to override the ones defined in [Defaults](#defaults) * @return {Object} instance - The generated Popper.js instance */ function Popper(reference, popper) { var _this = this; var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; classCallCheck(this, Popper); this.scheduleUpdate = function () { return requestAnimationFrame(_this.update); }; // make update() debounced, so that it only runs at most once-per-tick this.update = debounce(this.update.bind(this)); // with {} we create a new object with the options inside it this.options = _extends({}, Popper.Defaults, options); // init state this.state = { isDestroyed: false, isCreated: false, scrollParents: [] }; // get reference and popper elements (allow jQuery wrappers) this.reference = reference && reference.jquery ? reference[0] : reference; this.popper = popper && popper.jquery ? popper[0] : popper; // Deep merge modifiers options this.options.modifiers = {}; Object.keys(_extends({}, Popper.Defaults.modifiers, options.modifiers)).forEach(function (name) { _this.options.modifiers[name] = _extends({}, Popper.Defaults.modifiers[name] || {}, options.modifiers ? options.modifiers[name] : {}); }); // Refactoring modifiers' list (Object => Array) this.modifiers = Object.keys(this.options.modifiers).map(function (name) { return _extends({ name: name }, _this.options.modifiers[name]); }) // sort the modifiers by order .sort(function (a, b) { return a.order - b.order; }); // modifiers have the ability to execute arbitrary code when Popper.js get inited // such code is executed in the same order of its modifier // they could add new properties to their options configuration // BE AWARE: don't add options to `options.modifiers.name` but to `modifierOptions`! this.modifiers.forEach(function (modifierOptions) { if (modifierOptions.enabled && isFunction(modifierOptions.onLoad)) { modifierOptions.onLoad(_this.reference, _this.popper, _this.options, modifierOptions, _this.state); } }); // fire the first update to position the popper in the right place this.update(); var eventsEnabled = this.options.eventsEnabled; if (eventsEnabled) { // setup event listeners, they will take care of update the position in specific situations this.enableEventListeners(); } this.state.eventsEnabled = eventsEnabled; } // We can't use class properties because they don't get listed in the // class prototype and break stuff like Sinon stubs createClass(Popper, [{ key: 'update', value: function update$$1() { return update.call(this); } }, { key: 'destroy', value: function destroy$$1() { return destroy.call(this); } }, { key: 'enableEventListeners', value: function enableEventListeners$$1() { return enableEventListeners.call(this); } }, { key: 'disableEventListeners', value: function disableEventListeners$$1() { return disableEventListeners.call(this); } /** * Schedule an update, it will run on the next UI update available * @method scheduleUpdate * @memberof Popper */ /** * Collection of utilities useful when writing custom modifiers. * Starting from version 1.7, this method is available only if you * include `popper-utils.js` before `popper.js`. * * **DEPRECATION**: This way to access PopperUtils is deprecated * and will be removed in v2! Use the PopperUtils module directly instead. * Due to the high instability of the methods contained in Utils, we can't * guarantee them to follow semver. Use them at your own risk! * @static * @private * @type {Object} * @deprecated since version 1.8 * @member Utils * @memberof Popper */ }]); return Popper; }(); /** * The `referenceObject` is an object that provides an interface compatible with Popper.js * and lets you use it as replacement of a real DOM node.<br /> * You can use this method to position a popper relatively to a set of coordinates * in case you don't have a DOM node to use as reference. * * ``` * new Popper(referenceObject, popperNode); * ``` * * NB: This feature isn't supported in Internet Explorer 10 * @name referenceObject * @property {Function} data.getBoundingClientRect * A function that returns a set of coordinates compatible with the native `getBoundingClientRect` method. * @property {number} data.clientWidth * An ES6 getter that will return the width of the virtual reference element. * @property {number} data.clientHeight * An ES6 getter that will return the height of the virtual reference element. */ Popper.Utils = (typeof window !== 'undefined' ? window : global).PopperUtils; Popper.placements = placements; Popper.Defaults = Defaults; return Popper; }))); //# sourceMappingURL=popper.js.map /* WEBPACK VAR INJECTION */}.call(exports, (function() { return this; }()))) /***/ }), /* 27 */ /***/ (function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _react = __webpack_require__(3); var _react2 = _interopRequireDefault(_react); var _propTypes = __webpack_require__(4); var _propTypes2 = _interopRequireDefault(_propTypes); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } var Arrow = function Arrow(props, context) { var _props$component = props.component, component = _props$component === undefined ? 'span' : _props$component, innerRef = props.innerRef, children = props.children, restProps = _objectWithoutProperties(props, ['component', 'innerRef', 'children']); var popper = context.popper; var arrowRef = function arrowRef(node) { popper.setArrowNode(node); if (typeof innerRef === 'function') { innerRef(node); } }; var arrowStyle = popper.getArrowStyle(); if (typeof children === 'function') { var arrowProps = { ref: arrowRef, style: arrowStyle }; return children({ arrowProps: arrowProps, restProps: restProps }); } var componentProps = _extends({}, restProps, { style: _extends({}, arrowStyle, restProps.style) }); if (typeof component === 'string') { componentProps.ref = arrowRef; } else { componentProps.innerRef = arrowRef; } return (0, _react.createElement)(component, componentProps, children); }; Arrow.contextTypes = { popper: _propTypes2.default.object.isRequired }; Arrow.propTypes = { component: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]), innerRef: _propTypes2.default.func, children: _propTypes2.default.oneOfType([_propTypes2.default.node, _propTypes2.default.func]) }; exports.default = Arrow; /***/ }) /******/ ]) }); ;
tholu/cdnjs
ajax/libs/react-datepicker/0.59.0/react-datepicker.js
JavaScript
mit
229,566
// { dg-options "-std=gnu++0x" } // { dg-do compile } // Copyright (C) 2007-2013 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. #include <unordered_set> using namespace std; template class unordered_multiset<int, hash<int>, equal_to<int>, allocator<char>>;
Xilinx/gcc
libstdc++-v3/testsuite/23_containers/unordered_multiset/requirements/explicit_instantiation/3.cc
C++
gpl-2.0
953
<?php /** * OG behavior handler. */ class OgBehaviorHandler extends EntityReference_BehaviorHandler_Abstract { /** * Implements EntityReference_BehaviorHandler_Abstract::access(). */ public function access($field, $instance) { return $field['settings']['handler'] == 'og' || strpos($field['settings']['handler'], 'og_') === 0; } /** * Implements EntityReference_BehaviorHandler_Abstract::load(). */ public function load($entity_type, $entities, $field, $instances, $langcode, &$items) { // Get the OG memberships from the field. $field_name = $field['field_name']; $target_type = $field['settings']['target_type']; foreach ($entities as $entity) { $wrapper = entity_metadata_wrapper($entity_type, $entity); if (empty($wrapper->{$field_name})) { // If the entity belongs to a bundle that was deleted, return early. continue; } $id = $wrapper->getIdentifier(); $items[$id] = array(); $gids = og_get_entity_groups($entity_type, $entity, array(OG_STATE_ACTIVE), $field_name); if (empty($gids[$target_type])) { continue; } foreach ($gids[$target_type] as $gid) { $items[$id][] = array( 'target_id' => $gid, ); } } } /** * Implements EntityReference_BehaviorHandler_Abstract::insert(). */ public function insert($entity_type, $entity, $field, $instance, $langcode, &$items) { if (!empty($entity->skip_og_membership)) { return; } $this->OgMembershipCrud($entity_type, $entity, $field, $instance, $langcode, $items); $items = array(); } /** * Implements EntityReference_BehaviorHandler_Abstract::access(). */ public function update($entity_type, $entity, $field, $instance, $langcode, &$items) { if (!empty($entity->skip_og_membership)) { return; } $this->OgMembershipCrud($entity_type, $entity, $field, $instance, $langcode, $items); $items = array(); } /** * Implements EntityReference_BehaviorHandler_Abstract::Delete() * * CRUD memberships from field, or if entity is marked for deleteing, * delete all the OG membership related to it. * * @see og_entity_delete(). */ public function delete($entity_type, $entity, $field, $instance, $langcode, &$items) { if (!empty($entity->skip_og_membership)) { return; } if (!empty($entity->delete_og_membership)) { // Delete all OG memberships related to this entity. $og_memberships = array(); foreach (og_get_entity_groups($entity_type, $entity) as $group_type => $ids) { $og_memberships = array_merge($og_memberships, array_keys($ids)); } if ($og_memberships) { og_membership_delete_multiple($og_memberships); } } else { $this->OgMembershipCrud($entity_type, $entity, $field, $instance, $langcode, $items); } } /** * Create, update or delete OG membership based on field values. */ public function OgMembershipCrud($entity_type, $entity, $field, $instance, $langcode, &$items) { if (!user_access('administer group') && !field_access('edit', $field, $entity_type, $entity)) { // User has no access to field. return; } if (!$diff = $this->groupAudiencegetDiff($entity_type, $entity, $field, $instance, $langcode, $items)) { return; } $field_name = $field['field_name']; $group_type = $field['settings']['target_type']; $diff += array('insert' => array(), 'delete' => array()); // Delete first, so we don't trigger cardinality errors. if ($diff['delete']) { og_membership_delete_multiple($diff['delete']); } if (!$diff['insert']) { return; } // Prepare an array with the membership state, if it was provided in the widget. $states = array(); foreach ($items as $item) { $gid = $item['target_id']; if (empty($item['state']) || !in_array($gid, $diff['insert'])) { // State isn't provided, or not an "insert" operation. continue; } $states[$gid] = $item['state']; } foreach ($diff['insert'] as $gid) { $values = array( 'entity_type' => $entity_type, 'entity' => $entity, 'field_name' => $field_name, ); if (!empty($states[$gid])) { $values['state'] = $states[$gid]; } og_group($group_type, $gid, $values); } } /** * Get the difference in group audience for a saved field. * * @return * Array with all the differences, or an empty array if none found. */ public function groupAudiencegetDiff($entity_type, $entity, $field, $instance, $langcode, $items) { $return = FALSE; $field_name = $field['field_name']; $wrapper = entity_metadata_wrapper($entity_type, $entity); $og_memberships = $wrapper->{$field_name . '__og_membership'}->value(); $new_memberships = array(); foreach ($items as $item) { $new_memberships[$item['target_id']] = TRUE; } foreach ($og_memberships as $og_membership) { $gid = $og_membership->gid; if (empty($new_memberships[$gid])) { // Membership was deleted. if ($og_membership->entity_type == 'user') { // Make sure this is not the group manager, if exists. $group = entity_load_single($og_membership->group_type, $og_membership->gid); if (!empty($group->uid) && $group->uid == $og_membership->etid) { continue; } } $return['delete'][] = $og_membership->id; unset($new_memberships[$gid]); } else { // Existing membership. unset($new_memberships[$gid]); } } if ($new_memberships) { // New memberships. $return['insert'] = array_keys($new_memberships); } return $return; } /** * Implements EntityReference_BehaviorHandler_Abstract::views_data_alter(). */ public function views_data_alter(&$data, $field) { // We need to override the default EntityReference table settings when OG // behavior is being used. if (og_is_group_audience_field($field['field_name'])) { $entity_types = array_keys($field['bundles']); // We need to join the base table for the entities // that this field is attached to. foreach ($entity_types as $entity_type) { $entity_info = entity_get_info($entity_type); $data['og_membership'] = array( 'table' => array( 'join' => array( $entity_info['base table'] => array( // Join entity base table on its id field with left_field. 'left_field' => $entity_info['entity keys']['id'], 'field' => 'etid', 'extra' => array( 0 => array( 'field' => 'entity_type', 'value' => $entity_type, ), ), ), ), ), // Copy the original config from the table definition. $field['field_name'] => $data['field_data_' . $field['field_name']][$field['field_name']], $field['field_name'] . '_target_id' => $data['field_data_' . $field['field_name']][$field['field_name'] . '_target_id'], ); // Change config with settings from og_membership table. foreach (array('filter', 'argument', 'sort') as $op) { $data['og_membership'][$field['field_name'] . '_target_id'][$op]['field'] = 'gid'; $data['og_membership'][$field['field_name'] . '_target_id'][$op]['table'] = 'og_membership'; unset($data['og_membership'][$field['field_name'] . '_target_id'][$op]['additional fields']); } } // Get rid of the original table configs. unset($data['field_data_' . $field['field_name']]); unset($data['field_revision_' . $field['field_name']]); } } /** * Implements EntityReference_BehaviorHandler_Abstract::validate(). * * Re-build $errors array to be keyed correctly by "default" and "admin" field * modes. * * @todo: Try to get the correct delta so we can highlight the invalid * reference. * * @see entityreference_field_validate(). */ public function validate($entity_type, $entity, $field, $instance, $langcode, $items, &$errors) { $new_errors = array(); $values = array('default' => array(), 'admin' => array()); // If the widget type name starts with 'og_' we suppose it is separated // into an admin and default part. if (strpos($instance['widget']['type'], 'og_') === 0) { foreach ($items as $item) { $values[$item['field_mode']][] = $item['target_id']; } } else { foreach ($items as $item) { $values['default'][] = $item['target_id']; } } $field_name = $field['field_name']; foreach ($values as $field_mode => $ids) { if (!$ids) { continue; } if ($field_mode == 'admin' && !user_access('administer group')) { // No need to validate the admin, as the user has no access to it. continue; } $instance['field_mode'] = $field_mode; $valid_ids = entityreference_get_selection_handler($field, $instance, $entity_type, $entity)->validateReferencableEntities($ids); if ($invalid_entities = array_diff($ids, $valid_ids)) { foreach ($invalid_entities as $id) { $new_errors[$field_mode][] = array( 'error' => 'og_invalid_entity', 'message' => t('The referenced group (@type: @id) is invalid.', array('@type' => $field['settings']['target_type'], '@id' => $id)), ); } } } if ($new_errors) { og_field_widget_register_errors($field_name, $new_errors); } // Errors for this field now handled, removing from the referenced array. unset($errors[$field_name]); } }
johnlaine1/installer
sites/all/modules/og/plugins/entityreference/behavior/OgBehaviorHandler.class.php
PHP
gpl-2.0
9,888
// 2005-12-20 Paolo Carlini <pcarlini@suse.de> // Copyright (C) 2005-2013 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 23.2.1.3 deque::swap #include <deque> #include <testsuite_hooks.h> #include <testsuite_allocator.h> // uneq_allocator as a non-empty allocator. void test01() { bool test __attribute__((unused)) = true; using namespace std; typedef __gnu_test::uneq_allocator<char> my_alloc; typedef deque<char, my_alloc> my_deque; const char title01[] = "Rivers of sand"; const char title02[] = "Concret PH"; const char title03[] = "Sonatas and Interludes for Prepared Piano"; const char title04[] = "never as tired as when i'm waking up"; const size_t N1 = sizeof(title01); const size_t N2 = sizeof(title02); const size_t N3 = sizeof(title03); const size_t N4 = sizeof(title04); my_deque::size_type size01, size02; my_alloc alloc01(1); my_deque deq01(alloc01); size01 = deq01.size(); my_deque deq02(alloc01); size02 = deq02.size(); deq01.swap(deq02); VERIFY( deq01.size() == size02 ); VERIFY( deq01.empty() ); VERIFY( deq02.size() == size01 ); VERIFY( deq02.empty() ); my_deque deq03(alloc01); size01 = deq03.size(); my_deque deq04(title02, title02 + N2, alloc01); size02 = deq04.size(); deq03.swap(deq04); VERIFY( deq03.size() == size02 ); VERIFY( equal(deq03.begin(), deq03.end(), title02) ); VERIFY( deq04.size() == size01 ); VERIFY( deq04.empty() ); my_deque deq05(title01, title01 + N1, alloc01); size01 = deq05.size(); my_deque deq06(title02, title02 + N2, alloc01); size02 = deq06.size(); deq05.swap(deq06); VERIFY( deq05.size() == size02 ); VERIFY( equal(deq05.begin(), deq05.end(), title02) ); VERIFY( deq06.size() == size01 ); VERIFY( equal(deq06.begin(), deq06.end(), title01) ); my_deque deq07(title01, title01 + N1, alloc01); size01 = deq07.size(); my_deque deq08(title03, title03 + N3, alloc01); size02 = deq08.size(); deq07.swap(deq08); VERIFY( deq07.size() == size02 ); VERIFY( equal(deq07.begin(), deq07.end(), title03) ); VERIFY( deq08.size() == size01 ); VERIFY( equal(deq08.begin(), deq08.end(), title01) ); my_deque deq09(title03, title03 + N3, alloc01); size01 = deq09.size(); my_deque deq10(title04, title04 + N4, alloc01); size02 = deq10.size(); deq09.swap(deq10); VERIFY( deq09.size() == size02 ); VERIFY( equal(deq09.begin(), deq09.end(), title04) ); VERIFY( deq10.size() == size01 ); VERIFY( equal(deq10.begin(), deq10.end(), title03) ); my_deque deq11(title04, title04 + N4, alloc01); size01 = deq11.size(); my_deque deq12(title01, title01 + N1, alloc01); size02 = deq12.size(); deq11.swap(deq12); VERIFY( deq11.size() == size02 ); VERIFY( equal(deq11.begin(), deq11.end(), title01) ); VERIFY( deq12.size() == size01 ); VERIFY( equal(deq12.begin(), deq12.end(), title04) ); my_deque deq13(title03, title03 + N3, alloc01); size01 = deq13.size(); my_deque deq14(title03, title03 + N3, alloc01); size02 = deq14.size(); deq13.swap(deq14); VERIFY( deq13.size() == size02 ); VERIFY( equal(deq13.begin(), deq13.end(), title03) ); VERIFY( deq14.size() == size01 ); VERIFY( equal(deq14.begin(), deq14.end(), title03) ); } int main() { test01(); return 0; }
skristiansson/eco32-gcc
libstdc++-v3/testsuite/23_containers/deque/modifiers/swap/2.cc
C++
gpl-2.0
3,966
<?php require_once($CFG->libdir.'/simpletest/testportfoliolib.php'); require_once($CFG->dirroot.'/portfolio/download/lib.php'); /* * TODO: The portfolio unit tests were obselete and did not work. * They have been commented out so that they do not break the * unit tests in Moodle 2. * * At some point: * 1. These tests should be audited to see which ones were valuable. * 2. The useful ones should be rewritten using the current standards * for writing test cases. * * This might be left until Moodle 2.1 when the test case framework * is due to change. Mock::generate('boxclient', 'mock_boxclient'); Mock::generatePartial('portfolio_plugin_download', 'mock_downloadplugin', array('ensure_ticket', 'ensure_account_tree')); */ class testPortfolioPluginDownload extends portfoliolib_test { public static $includecoverage = array('lib/portfoliolib.php', 'portfolio/download/lib.php'); public function setUp() { parent::setUp(); // $this->plugin = new mock_boxnetplugin($this); // $this->plugin->boxclient = new mock_boxclient(); } public function tearDown() { parent::tearDown(); } }
dhamma-dev/SEA
web/portfolio/download/simpletest/testportfolioplugindownload.php
PHP
gpl-3.0
1,151
/********** This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA **********/ // "liveMedia" // Copyright (c) 1996-2012 Live Networks, Inc. All rights reserved. // RTP sink for VP8 video // C++ header #ifndef _VP8_VIDEO_RTP_SINK_HH #define _VP8_VIDEO_RTP_SINK_HH #ifndef _VIDEO_RTP_SINK_HH #include "VideoRTPSink.hh" #endif class VP8VideoRTPSink: public VideoRTPSink { public: static VP8VideoRTPSink* createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); protected: VP8VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); // called only by createNew() virtual ~VP8VideoRTPSink(); private: // redefined virtual functions: virtual void doSpecialFrameHandling(unsigned fragmentationOffset, unsigned char* frameStart, unsigned numBytesInFrame, struct timeval framePresentationTime, unsigned numRemainingBytes); virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, unsigned numBytesInFrame) const; virtual unsigned specialHeaderSize() const; }; #endif
hungld87/live555-for-win32
liveMedia/include/VP8VideoRTPSink.hh
C++
lgpl-2.1
1,917
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Consumer; public class DateHistogramAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; private static final String INSTANT_FIELD = "instant"; private static final List<String> dataset = Arrays.asList( "2010-03-12T01:07:45", "2010-04-27T03:43:34", "2012-05-18T04:11:00", "2013-05-29T05:11:31", "2013-10-31T08:24:05", "2015-02-13T13:09:32", "2015-06-24T13:47:43", "2015-11-13T16:14:34", "2016-03-04T17:09:50", "2017-12-12T22:55:46"); public void testMatchNoDocs() throws IOException { testBothCases(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(0, histogram.getBuckets().size()) ); } public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(6, histogram.getBuckets().size()) ); testSearchAndReduceCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(8, histogram.getBuckets().size()) ); testBothCases(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), histogram -> assertEquals(6, histogram.getBuckets().size()) ); } public void testNoDocs() throws IOException { Query query = new MatchNoDocsQuery(); List<String> dates = Collections.emptyList(); Consumer<DateHistogramAggregationBuilder> aggregation = agg -> agg.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()) ); testSearchAndReduceCase(query, dates, aggregation, histogram -> assertNull(histogram) ); } public void testAggregateWrongField() throws IOException { testBothCases(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"), histogram -> assertEquals(0, histogram.getBuckets().size()) ); } public void testIntervalYear() throws IOException { testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); } public void testIntervalMonth() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testIntervalDay() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY).field(DATE_FIELD).minDocCount(1L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(4, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); } public void testIntervalHour() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", "2017-02-01T10:15:00.000Z", "2017-02-01T13:06:00.000Z", "2017-02-01T14:04:00.000Z", "2017-02-01T14:05:00.000Z", "2017-02-01T15:59:00.000Z", "2017-02-01T16:06:00.000Z", "2017-02-01T16:48:00.000Z", "2017-02-01T16:59:00.000Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.HOUR).field(DATE_FIELD).minDocCount(1L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(6, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(4); assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(5); assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testIntervalMinute() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", "2017-02-01T09:15:37.000Z", "2017-02-01T09:16:04.000Z", "2017-02-01T09:16:42.000Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MINUTE).field(DATE_FIELD).minDocCount(1L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); } ); } public void testIntervalSecond() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.074Z", "2017-02-01T00:00:37.688Z", "2017-02-01T00:00:37.210Z", "2017-02-01T00:00:37.380Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testMinDocCount() throws IOException { Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); List<String> timestamps = Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.074Z", "2017-02-01T00:00:13.688Z", "2017-02-01T00:00:21.380Z" ); // 5 sec interval with minDocCount = 0 testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(4, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T00:00:15.000Z", bucket.getKeyAsString()); assertEquals(0, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-01T00:00:20.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); // 5 sec interval with minDocCount = 3 testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(3L), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(1, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } private void testSearchCase(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); } private void testSearchAndReduceCase(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { executeTestCase(true, query, dataset, configure, verify); } private void testBothCases(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { testSearchCase(query, dataset, configure, verify); testSearchAndReduceCase(query, dataset, configure, verify); } private void executeTestCase(boolean reduced, Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String date : dataset) { if (frequently()) { indexWriter.commit(); } long instant = asLong(date); document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new LongPoint(INSTANT_FIELD, instant)); indexWriter.addDocument(document); document.clear(); } } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateHistogramAggregationBuilder aggregationBuilder = new DateHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); } DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name"); DateFieldMapper.DateFieldType fieldType = builder.fieldType(); fieldType.setHasDocValues(true); fieldType.setName(aggregationBuilder.field()); InternalDateHistogram histogram; if (reduced) { histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); } else { histogram = search(indexSearcher, query, aggregationBuilder, fieldType); } verify.accept(histogram); } } } private static long asLong(String dateTime) { return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis(); } }
a2lin/elasticsearch
core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
Java
apache-2.0
18,412
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.dashclock.phone; import com.google.android.apps.dashclock.LogUtils; import com.google.android.apps.dashclock.api.DashClockExtension; import com.google.android.apps.dashclock.api.ExtensionData; import net.nurik.roman.dashclock.R; import android.annotation.TargetApi; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.provider.ContactsContract; import android.provider.Telephony; import android.text.TextUtils; import java.util.HashSet; import java.util.Set; import static com.google.android.apps.dashclock.LogUtils.LOGD; import static com.google.android.apps.dashclock.LogUtils.LOGE; import static com.google.android.apps.dashclock.LogUtils.LOGW; /** * Unread SMS and MMS's extension. */ public class SmsExtension extends DashClockExtension { private static final String TAG = LogUtils.makeLogTag(SmsExtension.class); @Override protected void onInitialize(boolean isReconnect) { super.onInitialize(isReconnect); if (!isReconnect) { addWatchContentUris(new String[]{ TelephonyProviderConstants.MmsSms.CONTENT_URI.toString(), }); } } @Override protected void onUpdateData(int reason) { long lastUnreadThreadId = 0; Set<Long> unreadThreadIds = new HashSet<Long>(); Set<String> unreadThreadParticipantNames = new HashSet<String>(); boolean showingAllConversationParticipants = false; Cursor cursor = tryOpenSimpleThreadsCursor(); if (cursor != null) { while (cursor.moveToNext()) { if (cursor.getInt(SimpleThreadsQuery.READ) == 0) { long threadId = cursor.getLong(SimpleThreadsQuery._ID); unreadThreadIds.add(threadId); lastUnreadThreadId = threadId; // Some devices will fail on tryOpenMmsSmsCursor below, so // store a list of participants on unread threads as a fallback. String recipientIdsStr = cursor.getString(SimpleThreadsQuery.RECIPIENT_IDS); if (!TextUtils.isEmpty(recipientIdsStr)) { String[] recipientIds = TextUtils.split(recipientIdsStr, " "); for (String recipientId : recipientIds) { Cursor canonAddrCursor = tryOpenCanonicalAddressCursorById( Long.parseLong(recipientId)); if (canonAddrCursor == null) { continue; } if (canonAddrCursor.moveToFirst()) { String address = canonAddrCursor.getString( CanonicalAddressQuery.ADDRESS); String displayName = getDisplayNameForContact(0, address); if (!TextUtils.isEmpty(displayName)) { unreadThreadParticipantNames.add(displayName); } } canonAddrCursor.close(); } } } } cursor.close(); LOGD(TAG, "Unread thread IDs: [" + TextUtils.join(", ", unreadThreadIds) + "]"); } int unreadConversations = 0; StringBuilder names = new StringBuilder(); cursor = tryOpenMmsSmsCursor(); if (cursor != null) { // Most devices will hit this code path. while (cursor.moveToNext()) { // Get display name. SMS's are easy; MMS's not so much. long id = cursor.getLong(MmsSmsQuery._ID); long contactId = cursor.getLong(MmsSmsQuery.PERSON); String address = cursor.getString(MmsSmsQuery.ADDRESS); long threadId = cursor.getLong(MmsSmsQuery.THREAD_ID); if (unreadThreadIds != null && !unreadThreadIds.contains(threadId)) { // We have the list of all thread IDs (same as what the messaging app uses), and // this supposedly unread message's thread isn't in the list. This message is // likely an orphaned message whose thread was deleted. Not skipping it is // likely the cause of http://code.google.com/p/dashclock/issues/detail?id=8 LOGD(TAG, "Skipping probably orphaned message " + id + " with thread ID " + threadId); continue; } ++unreadConversations; lastUnreadThreadId = threadId; if (contactId == 0 && TextUtils.isEmpty(address) && id != 0) { // Try MMS addr query Cursor addrCursor = tryOpenMmsAddrCursor(id); if (addrCursor != null) { if (addrCursor.moveToFirst()) { contactId = addrCursor.getLong(MmsAddrQuery.CONTACT_ID); address = addrCursor.getString(MmsAddrQuery.ADDRESS); } addrCursor.close(); } } String displayName = getDisplayNameForContact(contactId, address); if (names.length() > 0) { names.append(", "); } names.append(displayName); } cursor.close(); } else { // In case the cursor is null (some Samsung devices like the Galaxy S4), use the // fall back on the list of participants in unread threads. unreadConversations = unreadThreadIds.size(); names.append(TextUtils.join(", ", unreadThreadParticipantNames)); showingAllConversationParticipants = true; } PackageManager pm = getPackageManager(); Intent clickIntent = null; if (unreadConversations == 1 && lastUnreadThreadId > 0) { clickIntent = new Intent(Intent.ACTION_VIEW, TelephonyProviderConstants.MmsSms.CONTENT_CONVERSATIONS_URI.buildUpon() .appendPath(Long.toString(lastUnreadThreadId)).build()); } // If the default SMS app doesn't support ACTION_VIEW on the conversation URI, // or if there are multiple unread conversations, try opening the app landing screen // by implicit intent. if (clickIntent == null || pm.resolveActivity(clickIntent, 0) == null) { clickIntent = Intent.makeMainSelectorActivity(Intent.ACTION_MAIN, Intent.CATEGORY_APP_MESSAGING); // If the default SMS app doesn't support CATEGORY_APP_MESSAGING, try KitKat's // new API to get the default package (if the API is available). if (pm.resolveActivity(clickIntent, 0) == null) { clickIntent = tryGetKitKatDefaultSmsActivity(); } } publishUpdate(new ExtensionData() .visible(unreadConversations > 0) .icon(R.drawable.ic_extension_sms) .status(Integer.toString(unreadConversations)) .expandedTitle( getResources().getQuantityString( R.plurals.sms_title_template, unreadConversations, unreadConversations)) .expandedBody(getString(showingAllConversationParticipants ? R.string.sms_body_all_participants_template : R.string.sms_body_template, names.toString())) .clickIntent(clickIntent)); } @TargetApi(Build.VERSION_CODES.KITKAT) private Intent tryGetKitKatDefaultSmsActivity() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { String smsPackage = Telephony.Sms.getDefaultSmsPackage(this); if (TextUtils.isEmpty(smsPackage)) { return null; } return new Intent() .setAction(Intent.ACTION_MAIN) .addCategory(Intent.CATEGORY_LAUNCHER) .setPackage(smsPackage); } return null; } /** * Returns the display name for the contact with the given ID and/or the given address * (phone number). One or both parameters should be provided. */ private String getDisplayNameForContact(long contactId, String address) { String displayName = address; if (contactId > 0) { Cursor contactCursor = tryOpenContactsCursorById(contactId); if (contactCursor != null) { if (contactCursor.moveToFirst()) { displayName = contactCursor.getString(RawContactsQuery.DISPLAY_NAME); } else { contactId = 0; } contactCursor.close(); } } if (contactId <= 0) { Cursor contactCursor = tryOpenContactsCursorByAddress(address); if (contactCursor != null) { if (contactCursor.moveToFirst()) { displayName = contactCursor.getString(ContactsQuery.DISPLAY_NAME); } contactCursor.close(); } } return displayName; } private Cursor tryOpenMmsSmsCursor() { try { return getContentResolver().query( TelephonyProviderConstants.MmsSms.CONTENT_CONVERSATIONS_URI, MmsSmsQuery.PROJECTION, TelephonyProviderConstants.Mms.READ + "=0 AND " + TelephonyProviderConstants.Mms.THREAD_ID + "!=0 AND (" + TelephonyProviderConstants.Mms.MESSAGE_BOX + "=" + TelephonyProviderConstants.Mms.MESSAGE_BOX_INBOX + " OR " + TelephonyProviderConstants.Sms.TYPE + "=" + TelephonyProviderConstants.Sms.MESSAGE_TYPE_INBOX + ")", null, null); } catch (Exception e) { // Catch all exceptions because the SMS provider is crashy // From developer console: "SQLiteException: table spam_filter already exists" LOGE(TAG, "Error accessing conversations cursor in SMS/MMS provider", e); return null; } } private Cursor tryOpenSimpleThreadsCursor() { try { return getContentResolver().query( TelephonyProviderConstants.Threads.CONTENT_URI .buildUpon() .appendQueryParameter("simple", "true") .build(), SimpleThreadsQuery.PROJECTION, null, null, null); } catch (Exception e) { LOGW(TAG, "Error accessing simple SMS threads cursor", e); return null; } } private Cursor tryOpenCanonicalAddressCursorById(long id) { try { return getContentResolver().query( TelephonyProviderConstants.CanonicalAddresses.CONTENT_URI.buildUpon() .build(), CanonicalAddressQuery.PROJECTION, TelephonyProviderConstants.CanonicalAddresses._ID + "=?", new String[]{Long.toString(id)}, null); } catch (Exception e) { LOGE(TAG, "Error accessing canonical addresses cursor", e); return null; } } private Cursor tryOpenMmsAddrCursor(long mmsMsgId) { try { return getContentResolver().query( TelephonyProviderConstants.Mms.CONTENT_URI.buildUpon() .appendPath(Long.toString(mmsMsgId)) .appendPath("addr") .build(), MmsAddrQuery.PROJECTION, TelephonyProviderConstants.Mms.Addr.MSG_ID + "=?", new String[]{Long.toString(mmsMsgId)}, null); } catch (Exception e) { // Catch all exceptions because the SMS provider is crashy // From developer console: "SQLiteException: table spam_filter already exists" LOGE(TAG, "Error accessing MMS addresses cursor", e); return null; } } private Cursor tryOpenContactsCursorById(long contactId) { try { return getContentResolver().query( ContactsContract.RawContacts.CONTENT_URI.buildUpon() .appendPath(Long.toString(contactId)) .build(), RawContactsQuery.PROJECTION, null, null, null); } catch (Exception e) { LOGE(TAG, "Error accessing contacts provider", e); return null; } } private Cursor tryOpenContactsCursorByAddress(String phoneNumber) { try { return getContentResolver().query( ContactsContract.PhoneLookup.CONTENT_FILTER_URI.buildUpon() .appendPath(Uri.encode(phoneNumber)).build(), ContactsQuery.PROJECTION, null, null, null); } catch (Exception e) { // Can be called by the content provider (from Google Play crash/ANR console) // java.lang.IllegalArgumentException: URI: content://com.android.contacts/phone_lookup/ LOGW(TAG, "Error looking up contact name", e); return null; } } private interface SimpleThreadsQuery { String[] PROJECTION = { TelephonyProviderConstants.Threads._ID, TelephonyProviderConstants.Threads.READ, TelephonyProviderConstants.Threads.RECIPIENT_IDS, }; int _ID = 0; int READ = 1; int RECIPIENT_IDS = 2; } private interface CanonicalAddressQuery { String[] PROJECTION = { TelephonyProviderConstants.CanonicalAddresses._ID, TelephonyProviderConstants.CanonicalAddresses.ADDRESS, }; int _ID = 0; int ADDRESS = 1; } private interface MmsSmsQuery { String[] PROJECTION = { TelephonyProviderConstants.Sms._ID, TelephonyProviderConstants.Sms.ADDRESS, TelephonyProviderConstants.Sms.PERSON, TelephonyProviderConstants.Sms.THREAD_ID, }; int _ID = 0; int ADDRESS = 1; int PERSON = 2; int THREAD_ID = 3; } private interface MmsAddrQuery { String[] PROJECTION = { TelephonyProviderConstants.Mms.Addr.ADDRESS, TelephonyProviderConstants.Mms.Addr.CONTACT_ID, }; int ADDRESS = 0; int CONTACT_ID = 1; } private interface RawContactsQuery { String[] PROJECTION = { ContactsContract.RawContacts.DISPLAY_NAME_PRIMARY, }; int DISPLAY_NAME = 0; } private interface ContactsQuery { String[] PROJECTION = { ContactsContract.Contacts.DISPLAY_NAME, }; int DISPLAY_NAME = 0; } }
ITVlab/neodash
module-dashclock/src/main/java/com/google/android/apps/dashclock/phone/SmsExtension.java
Java
apache-2.0
16,299
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.util; import com.facebook.buck.log.Logger; import java.io.IOException; import java.io.InputStream; public class ThriftWatcher { private static final Logger LOG = Logger.get(ThriftWatcher.class); private ThriftWatcher() { } /** * @return true if "thrift --version" can be executed successfully */ public static boolean isThriftAvailable() throws InterruptedException { try { LOG.debug("Checking if Thrift is available.."); InputStream output = new ProcessBuilder("thrift", "-version").start().getInputStream(); byte[] bytes = new byte[7]; output.read(bytes); boolean available = (new String(bytes)).equals("Thrift "); LOG.debug("Thrift available: %s", available); return available; } catch (IOException e) { return false; // Could not execute thrift. } } }
mread/buck
src/com/facebook/buck/util/ThriftWatcher.java
Java
apache-2.0
1,473
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4.http; import java.util.Map; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; import org.apache.camel.AsyncEndpoint; import org.apache.camel.Consumer; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.PollingConsumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.netty4.NettyConfiguration; import org.apache.camel.component.netty4.NettyEndpoint; import org.apache.camel.http.common.cookie.CookieHandler; import org.apache.camel.impl.SynchronousDelegateProducer; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Netty HTTP server and client using the Netty 4.x library. */ @UriEndpoint(firstVersion = "2.14.0", scheme = "netty4-http", extendsScheme = "netty4", title = "Netty4 HTTP", syntax = "netty4-http:protocol:host:port/path", consumerClass = NettyHttpConsumer.class, label = "http", lenientProperties = true, excludeProperties = "textline,delimiter,autoAppendDelimiter,decoderMaxLineLength,encoding,allowDefaultCodec,udpConnectionlessSending,networkInterface" + ",clientMode,reconnect,reconnectInterval,useByteBuf,udpByteArrayCodec,broadcast") public class NettyHttpEndpoint extends NettyEndpoint implements AsyncEndpoint, HeaderFilterStrategyAware { private static final Logger LOG = LoggerFactory.getLogger(NettyHttpEndpoint.class); @UriParam private NettyHttpConfiguration configuration; @UriParam(label = "advanced", name = "configuration", javaType = "org.apache.camel.component.netty4.http.NettyHttpConfiguration", description = "To use a custom configured NettyHttpConfiguration for configuring this endpoint.") private Object httpConfiguration; // to include in component docs as NettyHttpConfiguration is a @UriParams class @UriParam(label = "advanced") private NettyHttpBinding nettyHttpBinding; @UriParam(label = "advanced") private HeaderFilterStrategy headerFilterStrategy; @UriParam(label = "consumer,advanced") private boolean traceEnabled; @UriParam(label = "consumer,advanced") private String httpMethodRestrict; @UriParam(label = "consumer,advanced") private NettySharedHttpServer nettySharedHttpServer; @UriParam(label = "consumer,security") private NettyHttpSecurityConfiguration securityConfiguration; @UriParam(label = "consumer,security", prefix = "securityConfiguration.", multiValue = true) private Map<String, Object> securityOptions; // to include in component docs @UriParam(label = "producer") private CookieHandler cookieHandler; public NettyHttpEndpoint(String endpointUri, NettyHttpComponent component, NettyConfiguration configuration) { super(endpointUri, component, configuration); } @Override public NettyHttpComponent getComponent() { return (NettyHttpComponent) super.getComponent(); } @Override public Consumer createConsumer(Processor processor) throws Exception { NettyHttpConsumer answer = new NettyHttpConsumer(this, processor, getConfiguration()); configureConsumer(answer); if (nettySharedHttpServer != null) { answer.setNettyServerBootstrapFactory(nettySharedHttpServer.getServerBootstrapFactory()); LOG.info("NettyHttpConsumer: {} is using NettySharedHttpServer on port: {}", answer, nettySharedHttpServer.getPort()); } else { // reuse pipeline factory for the same address HttpServerBootstrapFactory factory = getComponent().getOrCreateHttpNettyServerBootstrapFactory(answer); // force using our server bootstrap factory answer.setNettyServerBootstrapFactory(factory); LOG.debug("Created NettyHttpConsumer: {} using HttpServerBootstrapFactory: {}", answer, factory); } return answer; } @Override public Producer createProducer() throws Exception { Producer answer = new NettyHttpProducer(this, getConfiguration()); if (isSynchronous()) { return new SynchronousDelegateProducer(answer); } else { return answer; } } @Override public PollingConsumer createPollingConsumer() throws Exception { throw new UnsupportedOperationException("This component does not support polling consumer"); } @Override public Exchange createExchange(ChannelHandlerContext ctx, Object message) throws Exception { Exchange exchange = createExchange(); FullHttpRequest request = (FullHttpRequest) message; Message in = getNettyHttpBinding().toCamelMessage(request, exchange, getConfiguration()); exchange.setIn(in); // setup the common message headers updateMessageHeader(in, ctx); // honor the character encoding String contentType = in.getHeader(Exchange.CONTENT_TYPE, String.class); String charset = NettyHttpHelper.getCharsetFromContentType(contentType); if (charset != null) { exchange.setProperty(Exchange.CHARSET_NAME, charset); in.setHeader(Exchange.HTTP_CHARACTER_ENCODING, charset); } return exchange; } @Override public boolean isLenientProperties() { // true to allow dynamic URI options to be configured and passed to external system for eg. the HttpProducer return true; } @Override public void setConfiguration(NettyConfiguration configuration) { super.setConfiguration(configuration); this.configuration = (NettyHttpConfiguration) configuration; } @Override public NettyHttpConfiguration getConfiguration() { return (NettyHttpConfiguration) super.getConfiguration(); } public NettyHttpBinding getNettyHttpBinding() { return nettyHttpBinding; } /** * To use a custom org.apache.camel.component.netty4.http.NettyHttpBinding for binding to/from Netty and Camel Message API. */ public void setNettyHttpBinding(NettyHttpBinding nettyHttpBinding) { this.nettyHttpBinding = nettyHttpBinding; } public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } /** * To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter headers. */ public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; getNettyHttpBinding().setHeaderFilterStrategy(headerFilterStrategy); } public boolean isTraceEnabled() { return traceEnabled; } /** * Specifies whether to enable HTTP TRACE for this Netty HTTP consumer. By default TRACE is turned off. */ public void setTraceEnabled(boolean traceEnabled) { this.traceEnabled = traceEnabled; } public String getHttpMethodRestrict() { return httpMethodRestrict; } /** * To disable HTTP methods on the Netty HTTP consumer. You can specify multiple separated by comma. */ public void setHttpMethodRestrict(String httpMethodRestrict) { this.httpMethodRestrict = httpMethodRestrict; } public NettySharedHttpServer getNettySharedHttpServer() { return nettySharedHttpServer; } /** * To use a shared Netty HTTP server. See Netty HTTP Server Example for more details. */ public void setNettySharedHttpServer(NettySharedHttpServer nettySharedHttpServer) { this.nettySharedHttpServer = nettySharedHttpServer; } public NettyHttpSecurityConfiguration getSecurityConfiguration() { return securityConfiguration; } /** * Refers to a org.apache.camel.component.netty4.http.NettyHttpSecurityConfiguration for configuring secure web resources. */ public void setSecurityConfiguration(NettyHttpSecurityConfiguration securityConfiguration) { this.securityConfiguration = securityConfiguration; } public Map<String, Object> getSecurityOptions() { return securityOptions; } /** * To configure NettyHttpSecurityConfiguration using key/value pairs from the map */ public void setSecurityOptions(Map<String, Object> securityOptions) { this.securityOptions = securityOptions; } public CookieHandler getCookieHandler() { return cookieHandler; } /** * Configure a cookie handler to maintain a HTTP session */ public void setCookieHandler(CookieHandler cookieHandler) { this.cookieHandler = cookieHandler; } @Override protected void doStart() throws Exception { super.doStart(); ObjectHelper.notNull(nettyHttpBinding, "nettyHttpBinding", this); ObjectHelper.notNull(headerFilterStrategy, "headerFilterStrategy", this); if (securityConfiguration != null) { ObjectHelper.notEmpty(securityConfiguration.getRealm(), "realm", securityConfiguration); ObjectHelper.notEmpty(securityConfiguration.getConstraint(), "restricted", securityConfiguration); if (securityConfiguration.getSecurityAuthenticator() == null) { // setup default JAAS authenticator if none was configured JAASSecurityAuthenticator jaas = new JAASSecurityAuthenticator(); jaas.setName(securityConfiguration.getRealm()); LOG.info("No SecurityAuthenticator configured, using JAASSecurityAuthenticator as authenticator: {}", jaas); securityConfiguration.setSecurityAuthenticator(jaas); } } } }
curso007/camel
components/camel-netty4-http/src/main/java/org/apache/camel/component/netty4/http/NettyHttpEndpoint.java
Java
apache-2.0
10,739
function getElements(className) { return Array.from(document.getElementsByClassName(className)); } window.onload = function() { // Force a reflow before any changes. document.body.clientWidth; getElements('remove').forEach(function(e) { e.remove(); }); getElements('remove-after').forEach(function(e) { e.parentNode.removeChild(e.nextSibling); }); };
nwjs/chromium.src
third_party/blink/web_tests/external/wpt/css/css-ruby/support/ruby-dynamic-removal.js
JavaScript
bsd-3-clause
374
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/metrics/task_switch_time_tracker.h" #include <string> #include "ash/test/task_switch_time_tracker_test_api.h" #include "base/test/histogram_tester.h" #include "testing/gtest/include/gtest/gtest.h" namespace ash { namespace { // A dummy histogram name. const std::string kHistogramName = "Dummy.Histogram"; } // namespace class TaskSwitchTimeTrackerTest : public testing::Test { public: TaskSwitchTimeTrackerTest(); ~TaskSwitchTimeTrackerTest() override; // testing::Test: void SetUp() override; void TearDown() override; // Wrapper to the test targets OnTaskSwitch() method. void OnTaskSwitch(); TaskSwitchTimeTracker* time_tracker() { return time_tracker_test_api_->time_tracker(); } protected: // Used to verify recorded histogram data. scoped_ptr<base::HistogramTester> histogram_tester_; // A Test API that wraps the test target. scoped_ptr<test::TaskSwitchTimeTrackerTestAPI> time_tracker_test_api_; private: DISALLOW_COPY_AND_ASSIGN(TaskSwitchTimeTrackerTest); }; TaskSwitchTimeTrackerTest::TaskSwitchTimeTrackerTest() { } TaskSwitchTimeTrackerTest::~TaskSwitchTimeTrackerTest() { } void TaskSwitchTimeTrackerTest::SetUp() { testing::Test::SetUp(); histogram_tester_.reset(new base::HistogramTester()); time_tracker_test_api_.reset( new test::TaskSwitchTimeTrackerTestAPI(kHistogramName)); // The TaskSwitchTimeTracker interprets a value of base::TimeTicks() as if the // |last_action_time_| has not been set. time_tracker_test_api_->Advance(base::TimeDelta::FromMilliseconds(1)); } void TaskSwitchTimeTrackerTest::TearDown() { testing::Test::TearDown(); time_tracker_test_api_.reset(); histogram_tester_.reset(); } void TaskSwitchTimeTrackerTest::OnTaskSwitch() { time_tracker()->OnTaskSwitch(); } // Verifies TaskSwitchTimeTracker::HasLastActionTime() returns false after // construction. TEST_F(TaskSwitchTimeTrackerTest, HasLastActionTimeShouldBeFalseAfterConstruction) { EXPECT_FALSE(time_tracker_test_api_->HasLastActionTime()); } // Verifies TaskSwitchTimeTracker::HasLastActionTime() returns true after the // first call to TaskSwitchTimeTracker::OnTaskSwitch() and no histogram data was // recorded. TEST_F(TaskSwitchTimeTrackerTest, HasLastActionTimeShouldBeTrueAfterOnTaskSwitch) { OnTaskSwitch(); histogram_tester_->ExpectTotalCount(kHistogramName, 0); } // Verfies that the histogram data is recorded in the correct buckets. TEST_F(TaskSwitchTimeTrackerTest, RecordAfterTwoTaskSwitches) { OnTaskSwitch(); time_tracker_test_api_->Advance(base::TimeDelta::FromMilliseconds(2)); OnTaskSwitch(); histogram_tester_->ExpectTotalCount(kHistogramName, 1); histogram_tester_->ExpectBucketCount(kHistogramName, 0, 1); time_tracker_test_api_->Advance(base::TimeDelta::FromSeconds(1)); OnTaskSwitch(); histogram_tester_->ExpectTotalCount(kHistogramName, 2); histogram_tester_->ExpectBucketCount(kHistogramName, 1, 1); } } // namespace ash
Chilledheart/chromium
ash/metrics/task_switch_time_tracker_unittest.cc
C++
bsd-3-clause
3,152
// { dg-do assemble { target fpic } } // { dg-options "-O0 -fpic" } // Origin: Jakub Jelinek <jakub@redhat.com> struct bar { bar() {} double x[3]; }; static bar y[4]; void foo(int z) { bar w; y[z] = w; }
shaotuanchen/sunflower_exp
tools/source/gcc-4.2.4/gcc/testsuite/g++.old-deja/g++.other/local-alloc1.C
C++
bsd-3-clause
215
/** * @license Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved. * For licensing, see LICENSE.md or http://ckeditor.com/license */ 'use strict'; ( function() { var template = '<img alt="" src="" />', templateBlock = new CKEDITOR.template( '<figure class="{captionedClass}">' + template + '<figcaption>{captionPlaceholder}</figcaption>' + '</figure>' ), alignmentsObj = { left: 0, center: 1, right: 2 }, regexPercent = /^\s*(\d+\%)\s*$/i; CKEDITOR.plugins.add( 'image2', { // jscs:disable maximumLineLength lang: 'af,ar,bg,bn,bs,ca,cs,cy,da,de,el,en,en-au,en-ca,en-gb,eo,es,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE% // jscs:enable maximumLineLength requires: 'widget,dialog', icons: 'image', hidpi: true, onLoad: function() { CKEDITOR.addCss( '.cke_image_nocaption{' + // This is to remove unwanted space so resize // wrapper is displayed property. 'line-height:0' + '}' + '.cke_editable.cke_image_sw, .cke_editable.cke_image_sw *{cursor:sw-resize !important}' + '.cke_editable.cke_image_se, .cke_editable.cke_image_se *{cursor:se-resize !important}' + '.cke_image_resizer{' + 'display:none;' + 'position:absolute;' + 'width:10px;' + 'height:10px;' + 'bottom:-5px;' + 'right:-5px;' + 'background:#000;' + 'outline:1px solid #fff;' + // Prevent drag handler from being misplaced (#11207). 'line-height:0;' + 'cursor:se-resize;' + '}' + '.cke_image_resizer_wrapper{' + 'position:relative;' + 'display:inline-block;' + 'line-height:0;' + '}' + // Bottom-left corner style of the resizer. '.cke_image_resizer.cke_image_resizer_left{' + 'right:auto;' + 'left:-5px;' + 'cursor:sw-resize;' + '}' + '.cke_widget_wrapper:hover .cke_image_resizer,' + '.cke_image_resizer.cke_image_resizing{' + 'display:block' + '}' + // Expand widget wrapper when linked inline image. '.cke_widget_wrapper>a{' + 'display:inline-block' + '}' ); }, init: function( editor ) { // Adapts configuration from original image plugin. Should be removed // when we'll rename image2 to image. var config = editor.config, lang = editor.lang.image2, image = widgetDef( editor ); // Since filebrowser plugin discovers config properties by dialog (plugin?) // names (sic!), this hack will be necessary as long as Image2 is not named // Image. And since Image2 will never be Image, for sure some filebrowser logic // got to be refined. config.filebrowserImage2BrowseUrl = config.filebrowserImageBrowseUrl; config.filebrowserImage2UploadUrl = config.filebrowserImageUploadUrl; // Add custom elementspath names to widget definition. image.pathName = lang.pathName; image.editables.caption.pathName = lang.pathNameCaption; // Register the widget. editor.widgets.add( 'image', image ); // Add toolbar button for this plugin. editor.ui.addButton && editor.ui.addButton( 'Image', { label: editor.lang.common.image, command: 'image', toolbar: 'insert,10' } ); // Register context menu option for editing widget. if ( editor.contextMenu ) { editor.addMenuGroup( 'image', 10 ); editor.addMenuItem( 'image', { label: lang.menu, command: 'image', group: 'image' } ); } CKEDITOR.dialog.add( 'image2', this.path + 'dialogs/image2.js' ); }, afterInit: function( editor ) { // Integrate with align commands (justify plugin). var align = { left: 1, right: 1, center: 1, block: 1 }, integrate = alignCommandIntegrator( editor ); for ( var value in align ) integrate( value ); // Integrate with link commands (link plugin). linkCommandIntegrator( editor ); } } ); // Wiget states (forms) depending on alignment and configuration. // // Non-captioned widget (inline styles) // ┌──────┬───────────────────────────────┬─────────────────────────────┐ // │Align │Internal form │Data │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │none │<wrapper> │<img /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │left │<wrapper style=”float:left”> │<img style=”float:left” /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │center│<wrapper> │<p style=”text-align:center”>│ // │ │ <p style=”text-align:center”> │ <img /> │ // │ │ <img /> │</p> │ // │ │ </p> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │right │<wrapper style=”float:right”> │<img style=”float:right” /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // └──────┴───────────────────────────────┴─────────────────────────────┘ // // Non-captioned widget (config.image2_alignClasses defined) // ┌──────┬───────────────────────────────┬─────────────────────────────┐ // │Align │Internal form │Data │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │none │<wrapper> │<img /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │left │<wrapper class=”left”> │<img class=”left” /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │center│<wrapper> │<p class=”center”> │ // │ │ <p class=”center”> │ <img /> │ // │ │ <img /> │</p> │ // │ │ </p> │ │ // │ │</wrapper> │ │ // ├──────┼───────────────────────────────┼─────────────────────────────┤ // │right │<wrapper class=”right”> │<img class=”right” /> │ // │ │ <img /> │ │ // │ │</wrapper> │ │ // └──────┴───────────────────────────────┴─────────────────────────────┘ // // Captioned widget (inline styles) // ┌──────┬────────────────────────────────────────┬────────────────────────────────────────┐ // │Align │Internal form │Data │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │none │<wrapper> │<figure /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │left │<wrapper style=”float:left”> │<figure style=”float:left” /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │center│<wrapper style=”text-align:center”> │<div style=”text-align:center”> │ // │ │ <figure style=”display:inline-block” />│ <figure style=”display:inline-block” />│ // │ │</wrapper> │</p> │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │right │<wrapper style=”float:right”> │<figure style=”float:right” /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // └──────┴────────────────────────────────────────┴────────────────────────────────────────┘ // // Captioned widget (config.image2_alignClasses defined) // ┌──────┬────────────────────────────────────────┬────────────────────────────────────────┐ // │Align │Internal form │Data │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │none │<wrapper> │<figure /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │left │<wrapper class=”left”> │<figure class=”left” /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │center│<wrapper class=”center”> │<div class=”center”> │ // │ │ <figure /> │ <figure /> │ // │ │</wrapper> │</p> │ // ├──────┼────────────────────────────────────────┼────────────────────────────────────────┤ // │right │<wrapper class=”right”> │<figure class=”right” /> │ // │ │ <figure /> │ │ // │ │</wrapper> │ │ // └──────┴────────────────────────────────────────┴────────────────────────────────────────┘ // // @param {CKEDITOR.editor} // @returns {Object} function widgetDef( editor ) { var alignClasses = editor.config.image2_alignClasses, captionedClass = editor.config.image2_captionedClass; function deflate() { if ( this.deflated ) return; // Remember whether widget was focused before destroyed. if ( editor.widgets.focused == this.widget ) this.focused = true; editor.widgets.destroy( this.widget ); // Mark widget was destroyed. this.deflated = true; } function inflate() { var editable = editor.editable(), doc = editor.document; // Create a new widget. This widget will be either captioned // non-captioned, block or inline according to what is the // new state of the widget. if ( this.deflated ) { this.widget = editor.widgets.initOn( this.element, 'image', this.widget.data ); // Once widget was re-created, it may become an inline element without // block wrapper (i.e. when unaligned, end not captioned). Let's do some // sort of autoparagraphing here (#10853). if ( this.widget.inline && !( new CKEDITOR.dom.elementPath( this.widget.wrapper, editable ).block ) ) { var block = doc.createElement( editor.activeEnterMode == CKEDITOR.ENTER_P ? 'p' : 'div' ); block.replace( this.widget.wrapper ); this.widget.wrapper.move( block ); } // The focus must be transferred from the old one (destroyed) // to the new one (just created). if ( this.focused ) { this.widget.focus(); delete this.focused; } delete this.deflated; } // If now widget was destroyed just update wrapper's alignment. // According to the new state. else { setWrapperAlign( this.widget, alignClasses ); } } return { allowedContent: getWidgetAllowedContent( editor ), requiredContent: 'img[src,alt]', features: getWidgetFeatures( editor ), styleableElements: 'img figure', // This widget converts style-driven dimensions to attributes. contentTransformations: [ [ 'img[width]: sizeToAttribute' ] ], // This widget has an editable caption. editables: { caption: { selector: 'figcaption', allowedContent: 'br em strong sub sup u s; a[!href]' } }, parts: { image: 'img', caption: 'figcaption' // parts#link defined in widget#init }, // The name of this widget's dialog. dialog: 'image2', // Template of the widget: plain image. template: template, data: function() { var features = this.features; // Image can't be captioned when figcaption is disallowed (#11004). if ( this.data.hasCaption && !editor.filter.checkFeature( features.caption ) ) this.data.hasCaption = false; // Image can't be aligned when floating is disallowed (#11004). if ( this.data.align != 'none' && !editor.filter.checkFeature( features.align ) ) this.data.align = 'none'; // Convert the internal form of the widget from the old state to the new one. this.shiftState( { widget: this, element: this.element, oldData: this.oldData, newData: this.data, deflate: deflate, inflate: inflate } ); // Update widget.parts.link since it will not auto-update unless widget // is destroyed and re-inited. if ( !this.data.link ) { if ( this.parts.link ) delete this.parts.link; } else { if ( !this.parts.link ) this.parts.link = this.parts.image.getParent(); } this.parts.image.setAttributes( { src: this.data.src, // This internal is required by the editor. 'data-cke-saved-src': this.data.src, alt: this.data.alt } ); // If shifting non-captioned -> captioned, remove classes // related to styles from <img/>. if ( this.oldData && !this.oldData.hasCaption && this.data.hasCaption ) { for ( var c in this.data.classes ) this.parts.image.removeClass( c ); } // Set dimensions of the image according to gathered data. // Do it only when the attributes are allowed (#11004). if ( editor.filter.checkFeature( features.dimension ) ) setDimensions( this ); // Cache current data. this.oldData = CKEDITOR.tools.extend( {}, this.data ); }, init: function() { var helpers = CKEDITOR.plugins.image2, image = this.parts.image, data = { hasCaption: !!this.parts.caption, src: image.getAttribute( 'src' ), alt: image.getAttribute( 'alt' ) || '', width: image.getAttribute( 'width' ) || '', height: image.getAttribute( 'height' ) || '', // Lock ratio is on by default (#10833). lock: this.ready ? helpers.checkHasNaturalRatio( image ) : true }; // If we used 'a' in widget#parts definition, it could happen that // selected element is a child of widget.parts#caption. Since there's no clever // way to solve it with CSS selectors, it's done like that. (#11783). var link = image.getAscendant( 'a' ); if ( link && this.wrapper.contains( link ) ) this.parts.link = link; // Depending on configuration, read style/class from element and // then remove it. Removed style/class will be set on wrapper in #data listener. // Note: Center alignment is detected during upcast, so only left/right cases // are checked below. if ( !data.align ) { var alignElement = data.hasCaption ? this.element : image; // Read the initial left/right alignment from the class set on element. if ( alignClasses ) { if ( alignElement.hasClass( alignClasses[ 0 ] ) ) { data.align = 'left'; } else if ( alignElement.hasClass( alignClasses[ 2 ] ) ) { data.align = 'right'; } if ( data.align ) { alignElement.removeClass( alignClasses[ alignmentsObj[ data.align ] ] ); } else { data.align = 'none'; } } // Read initial float style from figure/image and then remove it. else { data.align = alignElement.getStyle( 'float' ) || 'none'; alignElement.removeStyle( 'float' ); } } // Update data.link object with attributes if the link has been discovered. if ( editor.plugins.link && this.parts.link ) { data.link = CKEDITOR.plugins.link.parseLinkAttributes( editor, this.parts.link ); // Get rid of cke_widget_* classes in data. Otherwise // they might appear in link dialog. var advanced = data.link.advanced; if ( advanced && advanced.advCSSClasses ) { advanced.advCSSClasses = CKEDITOR.tools.trim( advanced.advCSSClasses.replace( /cke_\S+/, '' ) ); } } // Get rid of extra vertical space when there's no caption. // It will improve the look of the resizer. this.wrapper[ ( data.hasCaption ? 'remove' : 'add' ) + 'Class' ]( 'cke_image_nocaption' ); this.setData( data ); // Setup dynamic image resizing with mouse. // Don't initialize resizer when dimensions are disallowed (#11004). if ( editor.filter.checkFeature( this.features.dimension ) && editor.config.image2_disableResizer !== true ) setupResizer( this ); this.shiftState = helpers.stateShifter( this.editor ); // Add widget editing option to its context menu. this.on( 'contextMenu', function( evt ) { evt.data.image = CKEDITOR.TRISTATE_OFF; // Integrate context menu items for link. // Note that widget may be wrapped in a link, which // does not belong to that widget (#11814). if ( this.parts.link || this.wrapper.getAscendant( 'a' ) ) evt.data.link = evt.data.unlink = CKEDITOR.TRISTATE_OFF; } ); // Pass the reference to this widget to the dialog. this.on( 'dialog', function( evt ) { evt.data.widget = this; }, this ); }, // Overrides default method to handle internal mutability of Image2. // @see CKEDITOR.plugins.widget#addClass addClass: function( className ) { getStyleableElement( this ).addClass( className ); }, // Overrides default method to handle internal mutability of Image2. // @see CKEDITOR.plugins.widget#hasClass hasClass: function( className ) { return getStyleableElement( this ).hasClass( className ); }, // Overrides default method to handle internal mutability of Image2. // @see CKEDITOR.plugins.widget#removeClass removeClass: function( className ) { getStyleableElement( this ).removeClass( className ); }, // Overrides default method to handle internal mutability of Image2. // @see CKEDITOR.plugins.widget#getClasses getClasses: ( function() { var classRegex = new RegExp( '^(' + [].concat( captionedClass, alignClasses ).join( '|' ) + ')$' ); return function() { var classes = this.repository.parseElementClasses( getStyleableElement( this ).getAttribute( 'class' ) ); // Neither config.image2_captionedClass nor config.image2_alignClasses // do not belong to style classes. for ( var c in classes ) { if ( classRegex.test( c ) ) delete classes[ c ]; } return classes; }; } )(), upcast: upcastWidgetElement( editor ), downcast: downcastWidgetElement( editor ) }; } CKEDITOR.plugins.image2 = { stateShifter: function( editor ) { // Tag name used for centering non-captioned widgets. var doc = editor.document, alignClasses = editor.config.image2_alignClasses, captionedClass = editor.config.image2_captionedClass, editable = editor.editable(), // The order that stateActions get executed. It matters! shiftables = [ 'hasCaption', 'align', 'link' ]; // Atomic procedures, one per state variable. var stateActions = { align: function( shift, oldValue, newValue ) { var el = shift.element; // Alignment changed. if ( shift.changed.align ) { // No caption in the new state. if ( !shift.newData.hasCaption ) { // Changed to "center" (non-captioned). if ( newValue == 'center' ) { shift.deflate(); shift.element = wrapInCentering( editor, el ); } // Changed to "non-center" from "center" while caption removed. if ( !shift.changed.hasCaption && oldValue == 'center' && newValue != 'center' ) { shift.deflate(); shift.element = unwrapFromCentering( el ); } } } // Alignment remains and "center" removed caption. else if ( newValue == 'center' && shift.changed.hasCaption && !shift.newData.hasCaption ) { shift.deflate(); shift.element = wrapInCentering( editor, el ); } // Finally set display for figure. if ( !alignClasses && el.is( 'figure' ) ) { if ( newValue == 'center' ) el.setStyle( 'display', 'inline-block' ); else el.removeStyle( 'display' ); } }, hasCaption: function( shift, oldValue, newValue ) { // This action is for real state change only. if ( !shift.changed.hasCaption ) return; // Get <img/> or <a><img/></a> from widget. Note that widget element might itself // be what we're looking for. Also element can be <p style="text-align:center"><a>...</a></p>. var imageOrLink; if ( shift.element.is( { img: 1, a: 1 } ) ) imageOrLink = shift.element; else imageOrLink = shift.element.findOne( 'a,img' ); // Switching hasCaption always destroys the widget. shift.deflate(); // There was no caption, but the caption is to be added. if ( newValue ) { // Create new <figure> from widget template. var figure = CKEDITOR.dom.element.createFromHtml( templateBlock.output( { captionedClass: captionedClass, captionPlaceholder: editor.lang.image2.captionPlaceholder } ), doc ); // Replace element with <figure>. replaceSafely( figure, shift.element ); // Use old <img/> or <a><img/></a> instead of the one from the template, // so we won't lose additional attributes. imageOrLink.replace( figure.findOne( 'img' ) ); // Update widget's element. shift.element = figure; } // The caption was present, but now it's to be removed. else { // Unwrap <img/> or <a><img/></a> from figure. imageOrLink.replace( shift.element ); // Update widget's element. shift.element = imageOrLink; } }, link: function( shift, oldValue, newValue ) { if ( shift.changed.link ) { var img = shift.element.is( 'img' ) ? shift.element : shift.element.findOne( 'img' ), link = shift.element.is( 'a' ) ? shift.element : shift.element.findOne( 'a' ), // Why deflate: // If element is <img/>, it will be wrapped into <a>, // which becomes a new widget.element. // If element is <a><img/></a>, it will be unlinked // so <img/> becomes a new widget.element. needsDeflate = ( shift.element.is( 'a' ) && !newValue ) || ( shift.element.is( 'img' ) && newValue ), newEl; if ( needsDeflate ) shift.deflate(); // If unlinked the image, returned element is <img>. if ( !newValue ) newEl = unwrapFromLink( link ); else { // If linked the image, returned element is <a>. if ( !oldValue ) newEl = wrapInLink( img, shift.newData.link ); // Set and remove all attributes associated with this state. var attributes = CKEDITOR.plugins.link.getLinkAttributes( editor, newValue ); if ( !CKEDITOR.tools.isEmpty( attributes.set ) ) ( newEl || link ).setAttributes( attributes.set ); if ( attributes.removed.length ) ( newEl || link ).removeAttributes( attributes.removed ); } if ( needsDeflate ) shift.element = newEl; } } }; function wrapInCentering( editor, element ) { var attribsAndStyles = {}; if ( alignClasses ) attribsAndStyles.attributes = { 'class': alignClasses[ 1 ] }; else attribsAndStyles.styles = { 'text-align': 'center' }; // There's no gentle way to center inline element with CSS, so create p/div // that wraps widget contents and does the trick either with style or class. var center = doc.createElement( editor.activeEnterMode == CKEDITOR.ENTER_P ? 'p' : 'div', attribsAndStyles ); // Replace element with centering wrapper. replaceSafely( center, element ); element.move( center ); return center; } function unwrapFromCentering( element ) { var imageOrLink = element.findOne( 'a,img' ); imageOrLink.replace( element ); return imageOrLink; } // Wraps <img/> -> <a><img/></a>. // Returns reference to <a>. // // @param {CKEDITOR.dom.element} img // @param {Object} linkData // @returns {CKEDITOR.dom.element} function wrapInLink( img, linkData ) { var link = doc.createElement( 'a', { attributes: { href: linkData.url } } ); link.replace( img ); img.move( link ); return link; } // De-wraps <a><img/></a> -> <img/>. // Returns the reference to <img/> // // @param {CKEDITOR.dom.element} link // @returns {CKEDITOR.dom.element} function unwrapFromLink( link ) { var img = link.findOne( 'img' ); img.replace( link ); return img; } function replaceSafely( replacing, replaced ) { if ( replaced.getParent() ) { var range = editor.createRange(); range.moveToPosition( replaced, CKEDITOR.POSITION_BEFORE_START ); // Remove old element. Do it before insertion to avoid a case when // element is moved from 'replaced' element before it, what creates // a tricky case which insertElementIntorRange does not handle. replaced.remove(); editable.insertElementIntoRange( replacing, range ); } else { replacing.replace( replaced ); } } return function( shift ) { var name, i; shift.changed = {}; for ( i = 0; i < shiftables.length; i++ ) { name = shiftables[ i ]; shift.changed[ name ] = shift.oldData ? shift.oldData[ name ] !== shift.newData[ name ] : false; } // Iterate over possible state variables. for ( i = 0; i < shiftables.length; i++ ) { name = shiftables[ i ]; stateActions[ name ]( shift, shift.oldData ? shift.oldData[ name ] : null, shift.newData[ name ] ); } shift.inflate(); }; }, // Checks whether current ratio of the image match the natural one. // by comparing dimensions. // @param {CKEDITOR.dom.element} image // @returns {Boolean} checkHasNaturalRatio: function( image ) { var $ = image.$, natural = this.getNatural( image ); // The reason for two alternative comparisons is that the rounding can come from // both dimensions, e.g. there are two cases: // 1. height is computed as a rounded relation of the real height and the value of width, // 2. width is computed as a rounded relation of the real width and the value of heigh. return Math.round( $.clientWidth / natural.width * natural.height ) == $.clientHeight || Math.round( $.clientHeight / natural.height * natural.width ) == $.clientWidth; }, // Returns natural dimensions of the image. For modern browsers // it uses natural(Width|Height) for old ones (IE8), creates // a new image and reads dimensions. // @param {CKEDITOR.dom.element} image // @returns {Object} getNatural: function( image ) { var dimensions; if ( image.$.naturalWidth ) { dimensions = { width: image.$.naturalWidth, height: image.$.naturalHeight }; } else { var img = new Image(); img.src = image.getAttribute( 'src' ); dimensions = { width: img.width, height: img.height }; } return dimensions; } }; function setWrapperAlign( widget, alignClasses ) { var wrapper = widget.wrapper, align = widget.data.align, hasCaption = widget.data.hasCaption; if ( alignClasses ) { // Remove all align classes first. for ( var i = 3; i--; ) wrapper.removeClass( alignClasses[ i ] ); if ( align == 'center' ) { // Avoid touching non-captioned, centered widgets because // they have the class set on the element instead of wrapper: // // <div class="cke_widget_wrapper"> // <p class="center-class"> // <img /> // </p> // </div> if ( hasCaption ) { wrapper.addClass( alignClasses[ 1 ] ); } } else if ( align != 'none' ) { wrapper.addClass( alignClasses[ alignmentsObj[ align ] ] ); } } else { if ( align == 'center' ) { if ( hasCaption ) wrapper.setStyle( 'text-align', 'center' ); else wrapper.removeStyle( 'text-align' ); wrapper.removeStyle( 'float' ); } else { if ( align == 'none' ) wrapper.removeStyle( 'float' ); else wrapper.setStyle( 'float', align ); wrapper.removeStyle( 'text-align' ); } } } // Returns a function that creates widgets from all <img> and // <figure class="{config.image2_captionedClass}"> elements. // // @param {CKEDITOR.editor} editor // @returns {Function} function upcastWidgetElement( editor ) { var isCenterWrapper = centerWrapperChecker( editor ), captionedClass = editor.config.image2_captionedClass; // @param {CKEDITOR.htmlParser.element} el // @param {Object} data return function( el, data ) { var dimensions = { width: 1, height: 1 }, name = el.name, image; // #11110 Don't initialize on pasted fake objects. if ( el.attributes[ 'data-cke-realelement' ] ) return; // If a center wrapper is found, there are 3 possible cases: // // 1. <div style="text-align:center"><figure>...</figure></div>. // In this case centering is done with a class set on widget.wrapper. // Simply replace centering wrapper with figure (it's no longer necessary). // // 2. <p style="text-align:center"><img/></p>. // Nothing to do here: <p> remains for styling purposes. // // 3. <div style="text-align:center"><img/></div>. // Nothing to do here (2.) but that case is only possible in enterMode different // than ENTER_P. if ( isCenterWrapper( el ) ) { if ( name == 'div' ) { var figure = el.getFirst( 'figure' ); // Case #1. if ( figure ) { el.replaceWith( figure ); el = figure; } } // Cases #2 and #3 (handled transparently) // If there's a centering wrapper, save it in data. data.align = 'center'; // Image can be wrapped in link <a><img/></a>. image = el.getFirst( 'img' ) || el.getFirst( 'a' ).getFirst( 'img' ); } // No center wrapper has been found. else if ( name == 'figure' && el.hasClass( captionedClass ) ) { image = el.getFirst( 'img' ) || el.getFirst( 'a' ).getFirst( 'img' ); // Upcast linked image like <a><img/></a>. } else if ( isLinkedOrStandaloneImage( el ) ) { image = el.name == 'a' ? el.children[ 0 ] : el; } if ( !image ) return; // If there's an image, then cool, we got a widget. // Now just remove dimension attributes expressed with %. for ( var d in dimensions ) { var dimension = image.attributes[ d ]; if ( dimension && dimension.match( regexPercent ) ) delete image.attributes[ d ]; } return el; }; } // Returns a function which transforms the widget to the external format // according to the current configuration. // // @param {CKEDITOR.editor} function downcastWidgetElement( editor ) { var alignClasses = editor.config.image2_alignClasses; // @param {CKEDITOR.htmlParser.element} el return function( el ) { // In case of <a><img/></a>, <img/> is the element to hold // inline styles or classes (image2_alignClasses). var attrsHolder = el.name == 'a' ? el.getFirst() : el, attrs = attrsHolder.attributes, align = this.data.align; // De-wrap the image from resize handle wrapper. // Only block widgets have one. if ( !this.inline ) { var resizeWrapper = el.getFirst( 'span' ); if ( resizeWrapper ) resizeWrapper.replaceWith( resizeWrapper.getFirst( { img: 1, a: 1 } ) ); } if ( align && align != 'none' ) { var styles = CKEDITOR.tools.parseCssText( attrs.style || '' ); // When the widget is captioned (<figure>) and internally centering is done // with widget's wrapper style/class, in the external data representation, // <figure> must be wrapped with an element holding an style/class: // // <div style="text-align:center"> // <figure class="image" style="display:inline-block">...</figure> // </div> // or // <div class="some-center-class"> // <figure class="image">...</figure> // </div> // if ( align == 'center' && el.name == 'figure' ) { el = el.wrapWith( new CKEDITOR.htmlParser.element( 'div', alignClasses ? { 'class': alignClasses[ 1 ] } : { style: 'text-align:center' } ) ); } // If left/right, add float style to the downcasted element. else if ( align in { left: 1, right: 1 } ) { if ( alignClasses ) attrsHolder.addClass( alignClasses[ alignmentsObj[ align ] ] ); else styles[ 'float' ] = align; } // Update element styles. if ( !alignClasses && !CKEDITOR.tools.isEmpty( styles ) ) attrs.style = CKEDITOR.tools.writeCssText( styles ); } return el; }; } // Returns a function that checks if an element is a centering wrapper. // // @param {CKEDITOR.editor} editor // @returns {Function} function centerWrapperChecker( editor ) { var captionedClass = editor.config.image2_captionedClass, alignClasses = editor.config.image2_alignClasses, validChildren = { figure: 1, a: 1, img: 1 }; return function( el ) { // Wrapper must be either <div> or <p>. if ( !( el.name in { div: 1, p: 1 } ) ) return false; var children = el.children; // Centering wrapper can have only one child. if ( children.length !== 1 ) return false; var child = children[ 0 ]; // Only <figure> or <img /> can be first (only) child of centering wrapper, // regardless of its type. if ( !( child.name in validChildren ) ) return false; // If centering wrapper is <p>, only <img /> can be the child. // <p style="text-align:center"><img /></p> if ( el.name == 'p' ) { if ( !isLinkedOrStandaloneImage( child ) ) return false; } // Centering <div> can hold <img/> or <figure>, depending on enterMode. else { // If a <figure> is the first (only) child, it must have a class. // <div style="text-align:center"><figure>...</figure><div> if ( child.name == 'figure' ) { if ( !child.hasClass( captionedClass ) ) return false; } else { // Centering <div> can hold <img/> or <a><img/></a> only when enterMode // is ENTER_(BR|DIV). // <div style="text-align:center"><img /></div> // <div style="text-align:center"><a><img /></a></div> if ( editor.enterMode == CKEDITOR.ENTER_P ) return false; // Regardless of enterMode, a child which is not <figure> must be // either <img/> or <a><img/></a>. if ( !isLinkedOrStandaloneImage( child ) ) return false; } } // Centering wrapper got to be... centering. If image2_alignClasses are defined, // check for centering class. Otherwise, check the style. if ( alignClasses ? el.hasClass( alignClasses[ 1 ] ) : CKEDITOR.tools.parseCssText( el.attributes.style || '', true )[ 'text-align' ] == 'center' ) return true; return false; }; } // Checks whether element is <img/> or <a><img/></a>. // // @param {CKEDITOR.htmlParser.element} function isLinkedOrStandaloneImage( el ) { if ( el.name == 'img' ) return true; else if ( el.name == 'a' ) return el.children.length == 1 && el.getFirst( 'img' ); return false; } // Sets width and height of the widget image according to current widget data. // // @param {CKEDITOR.plugins.widget} widget function setDimensions( widget ) { var data = widget.data, dimensions = { width: data.width, height: data.height }, image = widget.parts.image; for ( var d in dimensions ) { if ( dimensions[ d ] ) image.setAttribute( d, dimensions[ d ] ); else image.removeAttribute( d ); } } // Defines all features related to drag-driven image resizing. // // @param {CKEDITOR.plugins.widget} widget function setupResizer( widget ) { var editor = widget.editor, editable = editor.editable(), doc = editor.document, // Store the resizer in a widget for testing (#11004). resizer = widget.resizer = doc.createElement( 'span' ); resizer.addClass( 'cke_image_resizer' ); resizer.setAttribute( 'title', editor.lang.image2.resizer ); resizer.append( new CKEDITOR.dom.text( '\u200b', doc ) ); // Inline widgets don't need a resizer wrapper as an image spans the entire widget. if ( !widget.inline ) { var imageOrLink = widget.parts.link || widget.parts.image, oldResizeWrapper = imageOrLink.getParent(), resizeWrapper = doc.createElement( 'span' ); resizeWrapper.addClass( 'cke_image_resizer_wrapper' ); resizeWrapper.append( imageOrLink ); resizeWrapper.append( resizer ); widget.element.append( resizeWrapper, true ); // Remove the old wrapper which could came from e.g. pasted HTML // and which could be corrupted (e.g. resizer span has been lost). if ( oldResizeWrapper.is( 'span' ) ) oldResizeWrapper.remove(); } else { widget.wrapper.append( resizer ); } // Calculate values of size variables and mouse offsets. resizer.on( 'mousedown', function( evt ) { var image = widget.parts.image, // "factor" can be either 1 or -1. I.e.: For right-aligned images, we need to // subtract the difference to get proper width, etc. Without "factor", // resizer starts working the opposite way. factor = widget.data.align == 'right' ? -1 : 1, // The x-coordinate of the mouse relative to the screen // when button gets pressed. startX = evt.data.$.screenX, startY = evt.data.$.screenY, // The initial dimensions and aspect ratio of the image. startWidth = image.$.clientWidth, startHeight = image.$.clientHeight, ratio = startWidth / startHeight, listeners = [], // A class applied to editable during resizing. cursorClass = 'cke_image_s' + ( !~factor ? 'w' : 'e' ), nativeEvt, newWidth, newHeight, updateData, moveDiffX, moveDiffY, moveRatio; // Save the undo snapshot first: before resizing. editor.fire( 'saveSnapshot' ); // Mousemove listeners are removed on mouseup. attachToDocuments( 'mousemove', onMouseMove, listeners ); // Clean up the mousemove listener. Update widget data if valid. attachToDocuments( 'mouseup', onMouseUp, listeners ); // The entire editable will have the special cursor while resizing goes on. editable.addClass( cursorClass ); // This is to always keep the resizer element visible while resizing. resizer.addClass( 'cke_image_resizing' ); // Attaches an event to a global document if inline editor. // Additionally, if classic (`iframe`-based) editor, also attaches the same event to `iframe`'s document. function attachToDocuments( name, callback, collection ) { var globalDoc = CKEDITOR.document, listeners = []; if ( !doc.equals( globalDoc ) ) listeners.push( globalDoc.on( name, callback ) ); listeners.push( doc.on( name, callback ) ); if ( collection ) { for ( var i = listeners.length; i--; ) collection.push( listeners.pop() ); } } // Calculate with first, and then adjust height, preserving ratio. function adjustToX() { newWidth = startWidth + factor * moveDiffX; newHeight = Math.round( newWidth / ratio ); } // Calculate height first, and then adjust width, preserving ratio. function adjustToY() { newHeight = startHeight - moveDiffY; newWidth = Math.round( newHeight * ratio ); } // This is how variables refer to the geometry. // Note: x corresponds to moveOffset, this is the position of mouse // Note: o corresponds to [startX, startY]. // // +--------------+--------------+ // | | | // | I | II | // | | | // +------------- o -------------+ _ _ _ // | | | ^ // | VI | III | | moveDiffY // | | x _ _ _ _ _ v // +--------------+---------|----+ // | | // <-------> // moveDiffX function onMouseMove( evt ) { nativeEvt = evt.data.$; // This is how far the mouse is from the point the button was pressed. moveDiffX = nativeEvt.screenX - startX; moveDiffY = startY - nativeEvt.screenY; // This is the aspect ratio of the move difference. moveRatio = Math.abs( moveDiffX / moveDiffY ); // Left, center or none-aligned widget. if ( factor == 1 ) { if ( moveDiffX <= 0 ) { // Case: IV. if ( moveDiffY <= 0 ) adjustToX(); // Case: I. else { if ( moveRatio >= ratio ) adjustToX(); else adjustToY(); } } else { // Case: III. if ( moveDiffY <= 0 ) { if ( moveRatio >= ratio ) adjustToY(); else adjustToX(); } // Case: II. else { adjustToY(); } } } // Right-aligned widget. It mirrors behaviours, so I becomes II, // IV becomes III and vice-versa. else { if ( moveDiffX <= 0 ) { // Case: IV. if ( moveDiffY <= 0 ) { if ( moveRatio >= ratio ) adjustToY(); else adjustToX(); } // Case: I. else { adjustToY(); } } else { // Case: III. if ( moveDiffY <= 0 ) adjustToX(); // Case: II. else { if ( moveRatio >= ratio ) { adjustToX(); } else { adjustToY(); } } } } // Don't update attributes if less than 10. // This is to prevent images to visually disappear. if ( newWidth >= 15 && newHeight >= 15 ) { image.setAttributes( { width: newWidth, height: newHeight } ); updateData = true; } else { updateData = false; } } function onMouseUp() { var l; while ( ( l = listeners.pop() ) ) l.removeListener(); // Restore default cursor by removing special class. editable.removeClass( cursorClass ); // This is to bring back the regular behaviour of the resizer. resizer.removeClass( 'cke_image_resizing' ); if ( updateData ) { widget.setData( { width: newWidth, height: newHeight } ); // Save another undo snapshot: after resizing. editor.fire( 'saveSnapshot' ); } // Don't update data twice or more. updateData = false; } } ); // Change the position of the widget resizer when data changes. widget.on( 'data', function() { resizer[ widget.data.align == 'right' ? 'addClass' : 'removeClass' ]( 'cke_image_resizer_left' ); } ); } // Integrates widget alignment setting with justify // plugin's commands (execution and refreshment). // @param {CKEDITOR.editor} editor // @param {String} value 'left', 'right', 'center' or 'block' function alignCommandIntegrator( editor ) { var execCallbacks = [], enabled; return function( value ) { var command = editor.getCommand( 'justify' + value ); // Most likely, the justify plugin isn't loaded. if ( !command ) return; // This command will be manually refreshed along with // other commands after exec. execCallbacks.push( function() { command.refresh( editor, editor.elementPath() ); } ); if ( value in { right: 1, left: 1, center: 1 } ) { command.on( 'exec', function( evt ) { var widget = getFocusedWidget( editor ); if ( widget ) { widget.setData( 'align', value ); // Once the widget changed its align, all the align commands // must be refreshed: the event is to be cancelled. for ( var i = execCallbacks.length; i--; ) execCallbacks[ i ](); evt.cancel(); } } ); } command.on( 'refresh', function( evt ) { var widget = getFocusedWidget( editor ), allowed = { right: 1, left: 1, center: 1 }; if ( !widget ) return; // Cache "enabled" on first use. This is because filter#checkFeature may // not be available during plugin's afterInit in the future — a moment when // alignCommandIntegrator is called. if ( enabled === undefined ) enabled = editor.filter.checkFeature( editor.widgets.registered.image.features.align ); // Don't allow justify commands when widget alignment is disabled (#11004). if ( !enabled ) this.setState( CKEDITOR.TRISTATE_DISABLED ); else { this.setState( ( widget.data.align == value ) ? ( CKEDITOR.TRISTATE_ON ) : ( ( value in allowed ) ? CKEDITOR.TRISTATE_OFF : CKEDITOR.TRISTATE_DISABLED ) ); } evt.cancel(); } ); }; } function linkCommandIntegrator( editor ) { // Nothing to integrate with if link is not loaded. if ( !editor.plugins.link ) return; CKEDITOR.on( 'dialogDefinition', function( evt ) { var dialog = evt.data; if ( dialog.name == 'link' ) { var def = dialog.definition; var onShow = def.onShow, onOk = def.onOk; def.onShow = function() { var widget = getFocusedWidget( editor ); // Widget cannot be enclosed in a link, i.e. // <a>foo<inline widget/>bar</a> if ( widget && ( widget.inline ? !widget.wrapper.getAscendant( 'a' ) : 1 ) ) this.setupContent( widget.data.link || {} ); else onShow.apply( this, arguments ); }; // Set widget data if linking the widget using // link dialog (instead of default action). // State shifter handles data change and takes // care of internal DOM structure of linked widget. def.onOk = function() { var widget = getFocusedWidget( editor ); // Widget cannot be enclosed in a link, i.e. // <a>foo<inline widget/>bar</a> if ( widget && ( widget.inline ? !widget.wrapper.getAscendant( 'a' ) : 1 ) ) { var data = {}; // Collect data from fields. this.commitContent( data ); // Set collected data to widget. widget.setData( 'link', data ); } else { onOk.apply( this, arguments ); } }; } } ); // Overwrite default behaviour of unlink command. editor.getCommand( 'unlink' ).on( 'exec', function( evt ) { var widget = getFocusedWidget( editor ); // Override unlink only when link truly belongs to the widget. // If wrapped inline widget in a link, let default unlink work (#11814). if ( !widget || !widget.parts.link ) return; widget.setData( 'link', null ); // Selection (which is fake) may not change if unlinked image in focused widget, // i.e. if captioned image. Let's refresh command state manually here. this.refresh( editor, editor.elementPath() ); evt.cancel(); } ); // Overwrite default refresh of unlink command. editor.getCommand( 'unlink' ).on( 'refresh', function( evt ) { var widget = getFocusedWidget( editor ); if ( !widget ) return; // Note that widget may be wrapped in a link, which // does not belong to that widget (#11814). this.setState( widget.data.link || widget.wrapper.getAscendant( 'a' ) ? CKEDITOR.TRISTATE_OFF : CKEDITOR.TRISTATE_DISABLED ); evt.cancel(); } ); } // Returns the focused widget, if of the type specific for this plugin. // If no widget is focused, `null` is returned. // // @param {CKEDITOR.editor} // @returns {CKEDITOR.plugins.widget} function getFocusedWidget( editor ) { var widget = editor.widgets.focused; if ( widget && widget.name == 'image' ) return widget; return null; } // Returns a set of widget allowedContent rules, depending // on configurations like config#image2_alignClasses or // config#image2_captionedClass. // // @param {CKEDITOR.editor} // @returns {Object} function getWidgetAllowedContent( editor ) { var alignClasses = editor.config.image2_alignClasses, rules = { // Widget may need <div> or <p> centering wrapper. div: { match: centerWrapperChecker( editor ) }, p: { match: centerWrapperChecker( editor ) }, img: { attributes: '!src,alt,width,height' }, figure: { classes: '!' + editor.config.image2_captionedClass }, figcaption: true }; if ( alignClasses ) { // Centering class from the config. rules.div.classes = alignClasses[ 1 ]; rules.p.classes = rules.div.classes; // Left/right classes from the config. rules.img.classes = alignClasses[ 0 ] + ',' + alignClasses[ 2 ]; rules.figure.classes += ',' + rules.img.classes; } else { // Centering with text-align. rules.div.styles = 'text-align'; rules.p.styles = 'text-align'; rules.img.styles = 'float'; rules.figure.styles = 'float,display'; } return rules; } // Returns a set of widget feature rules, depending // on editor configuration. Note that the following may not cover // all the possible cases since requiredContent supports a single // tag only. // // @param {CKEDITOR.editor} // @returns {Object} function getWidgetFeatures( editor ) { var alignClasses = editor.config.image2_alignClasses, features = { dimension: { requiredContent: 'img[width,height]' }, align: { requiredContent: 'img' + ( alignClasses ? '(' + alignClasses[ 0 ] + ')' : '{float}' ) }, caption: { requiredContent: 'figcaption' } }; return features; } // Returns element which is styled, considering current // state of the widget. // // @see CKEDITOR.plugins.widget#applyStyle // @param {CKEDITOR.plugins.widget} widget // @returns {CKEDITOR.dom.element} function getStyleableElement( widget ) { return widget.data.hasCaption ? widget.element : widget.parts.image; } } )(); /** * A CSS class applied to the `<figure>` element of a captioned image. * * // Changes the class to "captionedImage". * config.image2_captionedClass = 'captionedImage'; * * @cfg {String} [image2_captionedClass='image'] * @member CKEDITOR.config */ CKEDITOR.config.image2_captionedClass = 'image'; /** * Determines whether dimension inputs should be automatically filled when the image URL changes in the Enhanced Image * plugin dialog window. * * config.image2_prefillDimensions = false; * * @since 4.5 * @cfg {Boolean} [image2_prefillDimensions=true] * @member CKEDITOR.config */ /** * Disables the image resizer. By default the resizer is enabled. * * config.image2_disableResizer = true; * * @since 4.5 * @cfg {Boolean} [image2_disableResizer=false] * @member CKEDITOR.config */ /** * CSS classes applied to aligned images. Useful to take control over the way * the images are aligned, i.e. to customize output HTML and integrate external stylesheets. * * Classes should be defined in an array of three elements, containing left, center, and right * alignment classes, respectively. For example: * * config.image2_alignClasses = [ 'align-left', 'align-center', 'align-right' ]; * * **Note**: Once this configuration option is set, the plugin will no longer produce inline * styles for alignment. It means that e.g. the following HTML will be produced: * * <img alt="My image" class="custom-center-class" src="foo.png" /> * * instead of: * * <img alt="My image" style="float:left" src="foo.png" /> * * **Note**: Once this configuration option is set, corresponding style definitions * must be supplied to the editor: * * * For [classic editor](#!/guide/dev_framed) it can be done by defining additional * styles in the {@link CKEDITOR.config#contentsCss stylesheets loaded by the editor}. The same * styles must be provided on the target page where the content will be loaded. * * For [inline editor](#!/guide/dev_inline) the styles can be defined directly * with `<style> ... <style>` or `<link href="..." rel="stylesheet">`, i.e. within the `<head>` * of the page. * * For example, considering the following configuration: * * config.image2_alignClasses = [ 'align-left', 'align-center', 'align-right' ]; * * CSS rules can be defined as follows: * * .align-left { * float: left; * } * * .align-right { * float: right; * } * * .align-center { * text-align: center; * } * * .align-center > figure { * display: inline-block; * } * * @since 4.4 * @cfg {String[]} [image2_alignClasses=null] * @member CKEDITOR.config */
quepasso/dashboard
web/bundles/ivoryckeditor/plugins/image2/plugin.js
JavaScript
mit
58,206
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Ldap * @subpackage Filter * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id$ */ /** * @see Zend_Ldap_Filter_Abstract */ #require_once 'Zend/Ldap/Filter/Abstract.php'; /** * @see Zend_Ldap_Filter_String */ #require_once 'Zend/Ldap/Filter/String.php'; /** * Zend_Ldap_Filter_Logical provides a base implementation for a grouping filter. * * @category Zend * @package Zend_Ldap * @subpackage Filter * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ abstract class Zend_Ldap_Filter_Logical extends Zend_Ldap_Filter_Abstract { const TYPE_AND = '&'; const TYPE_OR = '|'; /** * All the sub-filters for this grouping filter. * * @var array */ private $_subfilters; /** * The grouping symbol. * * @var string */ private $_symbol; /** * Creates a new grouping filter. * * @param array $subfilters * @param string $symbol */ protected function __construct(array $subfilters, $symbol) { foreach ($subfilters as $key => $s) { if (is_string($s)) $subfilters[$key] = new Zend_Ldap_Filter_String($s); else if (!($s instanceof Zend_Ldap_Filter_Abstract)) { /** * @see Zend_Ldap_Filter_Exception */ #require_once 'Zend/Ldap/Filter/Exception.php'; throw new Zend_Ldap_Filter_Exception('Only strings or Zend_Ldap_Filter_Abstract allowed.'); } } $this->_subfilters = $subfilters; $this->_symbol = $symbol; } /** * Adds a filter to this grouping filter. * * @param Zend_Ldap_Filter_Abstract $filter * @return Zend_Ldap_Filter_Logical */ public function addFilter(Zend_Ldap_Filter_Abstract $filter) { $new = clone $this; $new->_subfilters[] = $filter; return $new; } /** * Returns a string representation of the filter. * * @return string */ public function toString() { $return = '(' . $this->_symbol; foreach ($this->_subfilters as $sub) $return .= $sub->toString(); $return .= ')'; return $return; } }
hansbonini/cloud9-magento
www/lib/Zend/Ldap/Filter/Logical.php
PHP
mit
2,952
/* * Copyright (C) 2015 Actor LLC. <https://actor.im> */ package im.actor.core; import com.google.j2objc.annotations.ObjectiveCName; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import im.actor.runtime.mtproto.ConnectionEndpoint; /** * Configuration builder for starting up messenger object */ public class ConfigurationBuilder { private ArrayList<ConnectionEndpoint> endpoints = new ArrayList<ConnectionEndpoint>(); private PhoneBookProvider phoneBookProvider; private boolean enableContactsLogging = false; private boolean enableNetworkLogging = false; private boolean enableFilesLogging = false; private NotificationProvider notificationProvider; private ApiConfiguration apiConfiguration; private AnalyticsProvider analyticsProvider; private PlatformType platformType = PlatformType.GENERIC; private DeviceCategory deviceCategory = DeviceCategory.UNKNOWN; private int minDelay = 100; private int maxDelay = 15000; private int maxFailureCount = 50; /** * Set App Type * * @param platformType App Type * @return this */ @NotNull @ObjectiveCName("setPlatformType:") public ConfigurationBuilder setPlatformType(@NotNull PlatformType platformType) { this.platformType = platformType; return this; } /** * Setting Device Type * * @param deviceCategory Device Type * @return this */ @NotNull @ObjectiveCName("setDeviceCategory:") public ConfigurationBuilder setDeviceCategory(@NotNull DeviceCategory deviceCategory) { this.deviceCategory = deviceCategory; return this; } /** * Set Analytics Provider * * @param analyticsProvider the Analytics Provicer * @return this */ @NotNull @ObjectiveCName("setAnalyticsProvider:") public ConfigurationBuilder setAnalyticsProvider(@NotNull AnalyticsProvider analyticsProvider) { this.analyticsProvider = analyticsProvider; return this; } /** * Set API Configuration * * @param apiConfiguration API Configuration * @return this */ @NotNull @ObjectiveCName("setApiConfiguration:") public ConfigurationBuilder setApiConfiguration(@NotNull ApiConfiguration apiConfiguration) { this.apiConfiguration = apiConfiguration; return this; } /** * Set Notification provider * * @param notificationProvider Notification provider * @return this */ @NotNull @ObjectiveCName("setNotificationProvider:") public ConfigurationBuilder setNotificationProvider(@NotNull NotificationProvider notificationProvider) { this.notificationProvider = notificationProvider; return this; } /** * Set Enable contacts logging * * @param enableContactsLogging Enable contacts logging flag * @return this */ @NotNull @ObjectiveCName("setEnableContactsLogging:") public ConfigurationBuilder setEnableContactsLogging(boolean enableContactsLogging) { this.enableContactsLogging = enableContactsLogging; return this; } /** * Set Enable Network logging * * @param enableNetworkLogging Enable network logging * @return this */ @NotNull @ObjectiveCName("setEnableNetworkLogging:") public ConfigurationBuilder setEnableNetworkLogging(boolean enableNetworkLogging) { this.enableNetworkLogging = enableNetworkLogging; return this; } /** * Set Enable file operations loggging * * @param enableFilesLogging Enable files logging * @return this */ @NotNull @ObjectiveCName("setEnableFilesLogging:") public ConfigurationBuilder setEnableFilesLogging(boolean enableFilesLogging) { this.enableFilesLogging = enableFilesLogging; return this; } /** * Set Phone Book provider * * @param phoneBookProvider phone book provider * @return this */ @NotNull @ObjectiveCName("setPhoneBookProvider:") public ConfigurationBuilder setPhoneBookProvider(@NotNull PhoneBookProvider phoneBookProvider) { this.phoneBookProvider = phoneBookProvider; return this; } /** * Set min backoff delay * * @param minDelay min connection exponential backoff delay * @return this */ @ObjectiveCName("setMinDelay:") public ConfigurationBuilder setMinDelay(int minDelay) { this.minDelay = minDelay; return this; } /** * Set max backoff delay * * @param maxDelay max connection exponential backoff delay * @return this */ @ObjectiveCName("setMaxDelay:") public ConfigurationBuilder setMaxDelay(int maxDelay) { this.maxDelay = maxDelay; return this; } /** * Set max connection exponential backoff failure count * * @param maxFailureCount max connection exponential backoff failure count * @return this */ @ObjectiveCName("setMaxFailureCount:") public ConfigurationBuilder setMaxFailureCount(int maxFailureCount) { this.maxFailureCount = maxFailureCount; return this; } /** * Adding Endpoint for API * Valid URLs are: * tcp://[host]:[port] * tls://[host]:[port] * ws://[host]:[port] * wss://[host]:[port] * * @param url endpoint url * @return this */ @NotNull @ObjectiveCName("addEndpoint:") public ConfigurationBuilder addEndpoint(@NotNull String url) { // Manual baggy parsing for GWT // TODO: Correct URL parsing String scheme = url.substring(0, url.indexOf(":")).toLowerCase(); String host = url.substring(url.indexOf("://") + "://".length()); if (host.endsWith("/")) { host = host.substring(0, host.length() - 1); } int port = -1; if (host.contains(":")) { String[] parts = host.split(":"); host = parts[0]; port = Integer.parseInt(parts[1]); } if (scheme.equals("ssl") || scheme.equals("tls")) { if (port <= 0) { port = 443; } endpoints.add(new ConnectionEndpoint(host, port, ConnectionEndpoint.Type.TCP_TLS)); } else if (scheme.equals("tcp")) { if (port <= 0) { port = 80; } endpoints.add(new ConnectionEndpoint(host, port, ConnectionEndpoint.Type.TCP)); } else if (scheme.equals("ws")) { if (port <= 0) { port = 80; } endpoints.add(new ConnectionEndpoint(host, port, ConnectionEndpoint.Type.WS)); } else if (scheme.equals("wss")) { if (port <= 0) { port = 443; } endpoints.add(new ConnectionEndpoint(host, port, ConnectionEndpoint.Type.WS_TLS)); } else { throw new RuntimeException("Unknown scheme type: " + scheme); } return this; } /** * Build configuration * * @return result configuration */ @NotNull @ObjectiveCName("build") public Configuration build() { if (endpoints.size() == 0) { throw new RuntimeException("Endpoints not set"); } if (phoneBookProvider == null) { throw new RuntimeException("Phonebook Provider not set"); } if (apiConfiguration == null) { throw new RuntimeException("Api Configuration not set"); } if (deviceCategory == null) { throw new RuntimeException("Device Category not set"); } if (platformType == null) { throw new RuntimeException("App Category not set"); } return new Configuration(endpoints.toArray(new ConnectionEndpoint[endpoints.size()]), phoneBookProvider, notificationProvider, apiConfiguration, enableContactsLogging, enableNetworkLogging, enableFilesLogging, analyticsProvider, deviceCategory, platformType, minDelay, maxDelay, maxFailureCount); } }
luoxiaoshenghustedu/actor-platform
actor-apps/core/src/main/java/im/actor/core/ConfigurationBuilder.java
Java
mit
8,199
using Microsoft.IdentityModel.Clients.ActiveDirectory; using Office365Api.Graph.Simple.MailAndFiles.Helpers; using Office365Api.Graph.Simple.MailAndFiles.Models; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Web; using System.Web.Mvc; namespace Office365Api.Graph.Simple.MailAndFiles.Controllers { public class HomeController : Controller { // The URL that auth should redirect to after a successful login. Uri loginRedirectUri => new Uri(Url.Action(nameof(Authorize), "Home", null, Request.Url.Scheme)); // The URL to redirect to after a logout. Uri logoutRedirectUri => new Uri(Url.Action(nameof(Index), "Home", null, Request.Url.Scheme)); public ActionResult Index() { // Let's get the user details from the session, stored when user was signed in. if (Session[Helpers.SessionKeys.Login.UserInfo] != null) { ViewBag.Name = (Session[Helpers.SessionKeys.Login.UserInfo] as UserInformation).Name; } return View(); } public ActionResult Logout() { Session.Clear(); return Redirect(Settings.LogoutAuthority + logoutRedirectUri.ToString()); } public ActionResult Login() { if (string.IsNullOrEmpty(Settings.ClientId) || string.IsNullOrEmpty(Settings.ClientSecret)) { ViewBag.Message = "Please set your client ID and client secret in the Web.config file"; return View(); } var authContext = new AuthenticationContext(Settings.AzureADAuthority); // Generate the parameterized URL for Azure login. Uri authUri = authContext.GetAuthorizationRequestURL( Settings.O365UnifiedAPIResource, Settings.ClientId, loginRedirectUri, UserIdentifier.AnyUser, null); // Redirect the browser to the login page, then come back to the Authorize method below. return Redirect(authUri.ToString()); } public async Task<ActionResult> Authorize() { var authContext = new AuthenticationContext(Settings.AzureADAuthority); // Get the token. var authResult = await authContext.AcquireTokenByAuthorizationCodeAsync( Request.Params["code"], // the auth 'code' parameter from the Azure redirect. loginRedirectUri, // same redirectUri as used before in Login method. new ClientCredential(Settings.ClientId, Settings.ClientSecret), // use the client ID and secret to establish app identity. Settings.O365UnifiedAPIResource); // Save the token in the session. Session[SessionKeys.Login.AccessToken] = authResult.AccessToken; // Get info about the current logged in user. Session[SessionKeys.Login.UserInfo] = await GraphHelper.GetUserInfoAsync(authResult.AccessToken); return RedirectToAction(nameof(Index), "PersonalData"); } } }
yagoto/PnP
Samples/MicrosoftGraph.Office365.Simple.MailAndFiles/Office365Api.Graph.Simple.MailAndFiles/Controllers/HomeController.cs
C#
mit
3,267
from random import shuffle def bogosort(arr): while not sorted(arr) == arr: shuffle(arr) return arr
warreee/Algorithm-Implementations
Bogosort/Python/jcla1/bogosort.py
Python
mit
116
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================================= ** ** ** ** Purpose: The exception class used when there is insufficient execution stack ** to allow most Framework methods to execute. ** ** =============================================================================*/ namespace System { using System; using System.Runtime.Serialization; [Serializable] public sealed class InsufficientExecutionStackException : SystemException { public InsufficientExecutionStackException() : base(Environment.GetResourceString("Arg_InsufficientExecutionStackException")) { SetErrorCode(__HResults.COR_E_INSUFFICIENTEXECUTIONSTACK); } public InsufficientExecutionStackException(String message) : base(message) { SetErrorCode(__HResults.COR_E_INSUFFICIENTEXECUTIONSTACK); } public InsufficientExecutionStackException(String message, Exception innerException) : base(message, innerException) { SetErrorCode(__HResults.COR_E_INSUFFICIENTEXECUTIONSTACK); } private InsufficientExecutionStackException(SerializationInfo info, StreamingContext context) : base(info, context) { } } }
bartonjs/coreclr
src/mscorlib/src/System/InsufficientExecutionStackException.cs
C#
mit
1,547
/* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */ // // Copyright (c) 2009 INESC Porto // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License version 2 as // published by the Free Software Foundation; // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // // Author: Pedro Fortuna <pedro.fortuna@inescporto.pt> <pedro.fortuna@gmail.com> // #include "ns3/histogram.h" #include "ns3/test.h" using namespace ns3; /** * \ingroup flow-monitor * \defgroup flow-monitor-test FlowMonitor module tests */ /** * \ingroup flow-monitor-test * \ingroup tests * * \brief FlowMonitor Histogram Test */ class HistogramTestCase : public ns3::TestCase { private: public: HistogramTestCase (); virtual void DoRun (void); }; HistogramTestCase::HistogramTestCase () : ns3::TestCase ("Histogram") { } void HistogramTestCase::DoRun (void) { Histogram h0 (3.5); // Testing floating-point bin widths { for (int i=1; i <= 10; i++) { h0.AddValue (3.4); } for (int i=1; i <= 5; i++) { h0.AddValue (3.6); } NS_TEST_EXPECT_MSG_EQ_TOL (h0.GetBinWidth (0), 3.5, 1e-6, ""); NS_TEST_EXPECT_MSG_EQ (h0.GetNBins (), 2, ""); NS_TEST_EXPECT_MSG_EQ_TOL (h0.GetBinStart (1), 3.5, 1e-6, ""); NS_TEST_EXPECT_MSG_EQ (h0.GetBinCount (0), 10, ""); NS_TEST_EXPECT_MSG_EQ (h0.GetBinCount (1), 5, ""); } { // Testing bin expansion h0.AddValue (74.3); NS_TEST_EXPECT_MSG_EQ (h0.GetNBins (), 22, ""); NS_TEST_EXPECT_MSG_EQ (h0.GetBinCount (21), 1, ""); } } /** * \ingroup flow-monitor-test * \ingroup tests * * \brief FlowMonitor Histogram TestSuite */ class HistogramTestSuite : public TestSuite { public: HistogramTestSuite (); }; HistogramTestSuite::HistogramTestSuite () : TestSuite ("histogram", UNIT) { AddTestCase (new HistogramTestCase, TestCase::QUICK); } static HistogramTestSuite g_HistogramTestSuite; //!< Static variable for test initialization
Viyom/Implementation-of-TCP-Delayed-Congestion-Response--DCR--in-ns-3
src/flow-monitor/test/histogram-test-suite.cc
C++
gpl-2.0
2,452
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. require_once '../../../config.php'; require_once $CFG->dirroot.'/grade/export/lib.php'; require_once 'grade_export_ods.php'; $id = required_param('id', PARAM_INT); // course id $groupid = optional_param('groupid', 0, PARAM_INT); $itemids = required_param('itemids', PARAM_RAW); $export_feedback = optional_param('export_feedback', 0, PARAM_BOOL); $updatedgradesonly = optional_param('updatedgradesonly', false, PARAM_BOOL); $displaytype = optional_param('displaytype', $CFG->grade_export_displaytype, PARAM_INT); $decimalpoints = optional_param('decimalpoints', $CFG->grade_export_decimalpoints, PARAM_INT); if (!$course = $DB->get_record('course', array('id'=>$id))) { print_error('nocourseid'); } require_login($course); $context = get_context_instance(CONTEXT_COURSE, $id); require_capability('moodle/grade:export', $context); require_capability('gradeexport/ods:view', $context); if (groups_get_course_groupmode($COURSE) == SEPARATEGROUPS and !has_capability('moodle/site:accessallgroups', $context)) { if (!groups_is_member($groupid, $USER->id)) { print_error('cannotaccessgroup', 'grades'); } } // print all the exported data here $export = new grade_export_ods($course, $groupid, $itemids, $export_feedback, $updatedgradesonly, $displaytype, $decimalpoints); $export->print_grades();
dhamma-dev/SEA
web/grade/export/ods/export.php
PHP
gpl-3.0
2,057
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudformation.model; /** * Resource Status */ public enum ResourceStatus { CREATE_IN_PROGRESS("CREATE_IN_PROGRESS"), CREATE_FAILED("CREATE_FAILED"), CREATE_COMPLETE("CREATE_COMPLETE"), DELETE_IN_PROGRESS("DELETE_IN_PROGRESS"), DELETE_FAILED("DELETE_FAILED"), DELETE_COMPLETE("DELETE_COMPLETE"), DELETE_SKIPPED("DELETE_SKIPPED"), UPDATE_IN_PROGRESS("UPDATE_IN_PROGRESS"), UPDATE_FAILED("UPDATE_FAILED"), UPDATE_COMPLETE("UPDATE_COMPLETE"); private String value; private ResourceStatus(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return ResourceStatus corresponding to the value */ public static ResourceStatus fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } else if ("CREATE_IN_PROGRESS".equals(value)) { return ResourceStatus.CREATE_IN_PROGRESS; } else if ("CREATE_FAILED".equals(value)) { return ResourceStatus.CREATE_FAILED; } else if ("CREATE_COMPLETE".equals(value)) { return ResourceStatus.CREATE_COMPLETE; } else if ("DELETE_IN_PROGRESS".equals(value)) { return ResourceStatus.DELETE_IN_PROGRESS; } else if ("DELETE_FAILED".equals(value)) { return ResourceStatus.DELETE_FAILED; } else if ("DELETE_COMPLETE".equals(value)) { return ResourceStatus.DELETE_COMPLETE; } else if ("DELETE_SKIPPED".equals(value)) { return ResourceStatus.DELETE_SKIPPED; } else if ("UPDATE_IN_PROGRESS".equals(value)) { return ResourceStatus.UPDATE_IN_PROGRESS; } else if ("UPDATE_FAILED".equals(value)) { return ResourceStatus.UPDATE_FAILED; } else if ("UPDATE_COMPLETE".equals(value)) { return ResourceStatus.UPDATE_COMPLETE; } else { throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } } }
xuzha/aws-sdk-java
aws-java-sdk-cloudformation/src/main/java/com/amazonaws/services/cloudformation/model/ResourceStatus.java
Java
apache-2.0
2,846
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.optimization.general; import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.Well44497b; import org.apache.commons.math3.util.MathUtils; import org.apache.commons.math3.util.FastMath; import org.apache.commons.math3.distribution.RealDistribution; import org.apache.commons.math3.distribution.UniformRealDistribution; import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; /** * Factory for generating a cloud of points that approximate a circle. */ public class RandomCirclePointGenerator { /** RNG for the x-coordinate of the center. */ private final RealDistribution cX; /** RNG for the y-coordinate of the center. */ private final RealDistribution cY; /** RNG for the parametric position of the point. */ private final RealDistribution tP; /** Radius of the circle. */ private final double radius; /** * @param x Abscissa of the circle center. * @param y Ordinate of the circle center. * @param radius Radius of the circle. * @param xSigma Error on the x-coordinate of the circumference points. * @param ySigma Error on the y-coordinate of the circumference points. * @param seed RNG seed. */ public RandomCirclePointGenerator(double x, double y, double radius, double xSigma, double ySigma, long seed) { final RandomGenerator rng = new Well44497b(seed); this.radius = radius; cX = new NormalDistribution(rng, x, xSigma, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); cY = new NormalDistribution(rng, y, ySigma, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); tP = new UniformRealDistribution(rng, 0, MathUtils.TWO_PI, UniformRealDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); } /** * Point generator. * * @param n Number of points to create. * @return the cloud of {@code n} points. */ public Vector2D[] generate(int n) { final Vector2D[] cloud = new Vector2D[n]; for (int i = 0; i < n; i++) { cloud[i] = create(); } return cloud; } /** * Create one point. * * @return a point. */ private Vector2D create() { final double t = tP.sample(); final double pX = cX.sample() + radius * FastMath.cos(t); final double pY = cY.sample() + radius * FastMath.sin(t); return new Vector2D(pX, pY); } }
tknandu/CommonsMath_Modifed
math (trunk)/src/test/java/org/apache/commons/math3/optimization/general/RandomCirclePointGenerator.java
Java
apache-2.0
3,637
# Copyright 2011 James McCauley # Copyright 2008 (C) Nicira, Inc. # # This file is part of POX. # # POX is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # POX is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with POX. If not, see <http://www.gnu.org/licenses/>. # This file is derived from the packet library in NOX, which was # developed by Nicira, Inc. #====================================================================== # # UDP Header Format # # 0 7 8 15 16 23 24 31 # +--------+--------+--------+--------+ # | Source | Destination | # | Port | Port | # +--------+--------+--------+--------+ # | | | # | Length | Checksum | # +--------+--------+--------+--------+ # | # | data octets ... # +---------------- ... #====================================================================== import struct from packet_utils import * from dhcp import * from dns import * from rip import * from packet_base import packet_base # We grab ipv4 later to prevent cyclic dependency #_ipv4 = None class udp(packet_base): "UDP packet struct" MIN_LEN = 8 def __init__(self, raw=None, prev=None, **kw): #global _ipv4 #if not _ipv4: # from ipv4 import ipv4 # _ipv4 = ipv4 packet_base.__init__(self) self.prev = prev self.srcport = 0 self.dstport = 0 self.len = 8 self.csum = 0 if raw is not None: self.parse(raw) self._init(kw) def __str__(self): s = '[UDP %s>%s l:%s c:%02x]' % (self.srcport, self.dstport, self.len, self.csum) return s def parse(self, raw): assert isinstance(raw, bytes) self.raw = raw dlen = len(raw) if dlen < udp.MIN_LEN: self.msg('(udp parse) warning UDP packet data too short to parse header: data len %u' % dlen) return (self.srcport, self.dstport, self.len, self.csum) \ = struct.unpack('!HHHH', raw[:udp.MIN_LEN]) self.hdr_len = udp.MIN_LEN self.payload_len = self.len - self.hdr_len self.parsed = True if self.len < udp.MIN_LEN: self.msg('(udp parse) warning invalid UDP len %u' % self.len) return if (self.dstport == dhcp.SERVER_PORT or self.dstport == dhcp.CLIENT_PORT): self.next = dhcp(raw=raw[udp.MIN_LEN:],prev=self) elif (self.dstport == dns.SERVER_PORT or self.srcport == dns.SERVER_PORT): self.next = dns(raw=raw[udp.MIN_LEN:],prev=self) elif ( (self.dstport == rip.RIP_PORT or self.srcport == rip.RIP_PORT) ): # and isinstance(self.prev, _ipv4) # and self.prev.dstip == rip.RIP2_ADDRESS ): self.next = rip(raw=raw[udp.MIN_LEN:],prev=self) elif dlen < self.len: self.msg('(udp parse) warning UDP packet data shorter than UDP len: %u < %u' % (dlen, self.len)) return else: self.payload = raw[udp.MIN_LEN:] def hdr(self, payload): self.len = len(payload) + udp.MIN_LEN self.csum = self.checksum() return struct.pack('!HHHH', self.srcport, self.dstport, self.len, self.csum) def checksum(self, unparsed=False): """ Calculates the checksum. If unparsed, calculates it on the raw, unparsed data. This is useful for validating that it is correct on an incoming packet. """ if self.prev.__class__.__name__ != 'ipv4': self.msg('packet not in ipv4, cannot calculate checksum ' + 'over psuedo-header' ) return 0 if unparsed: payload_len = len(self.raw) payload = self.raw else: if isinstance(self.next, packet_base): payload = self.next.pack() elif self.next is None: payload = bytes() else: payload = self.next payload_len = udp.MIN_LEN + len(payload) ippacket = struct.pack('!IIBBH', self.prev.srcip.toUnsigned(), self.prev.dstip.toUnsigned(), 0, self.prev.protocol, payload_len) if not unparsed: myhdr = struct.pack('!HHHH', self.srcport, self.dstport, payload_len, 0) payload = myhdr + payload r = checksum(ippacket + payload, 0, 9) return 0xffff if r == 0 else r
srijanmishra/RouteFlow
pox/pox/lib/packet/udp.py
Python
apache-2.0
5,386
cask "air-connect" do version "2.0.1,26526" sha256 "e8f93fbcb626241f9cbe0f934cf9dada319f3f80399ec83558aa696988575b2a" url "https://www.avatron.com/updates/software/airconnect_mac/acmac#{version.before_comma.no_dots}.zip" name "Air Connect" homepage "https://avatron.com/get-air-connect/" livecheck do url "https://avatron.com/updates/software/airconnect_mac/appcast.xml" strategy :sparkle end app "Air Connect.app" end
stephenwade/homebrew-cask
Casks/air-connect.rb
Ruby
bsd-2-clause
446
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package iface_b import "testshared/iface_i" //go:noinline func F() interface{} { return (*iface_i.T)(nil) } //go:noinline func G() iface_i.I { return (*iface_i.T)(nil) }
akutz/go
misc/cgo/testshared/testdata/iface_b/b.go
GO
bsd-3-clause
335
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/login/hwid_checker.h" #include <cstdio> #include "base/chromeos/chromeos_version.h" #include "base/command_line.h" #include "base/logging.h" #include "base/strings/string_util.h" #include "chrome/browser/chromeos/system/statistics_provider.h" #include "chrome/common/chrome_switches.h" #include "chromeos/chromeos_switches.h" #include "third_party/re2/re2/re2.h" #include "third_party/zlib/zlib.h" namespace { unsigned CalculateCRC32(const std::string& data) { return static_cast<unsigned>(crc32( 0, reinterpret_cast<const Bytef*>(data.c_str()), data.length())); } std::string CalculateHWIDv2Checksum(const std::string& data) { unsigned crc32 = CalculateCRC32(data); // We take four least significant decimal digits of CRC-32. char checksum[5]; int snprintf_result = snprintf(checksum, 5, "%04u", crc32 % 10000); LOG_ASSERT(snprintf_result == 4); return checksum; } bool IsCorrectHWIDv2(const std::string& hwid) { std::string body; std::string checksum; if (!RE2::FullMatch(hwid, "([\\s\\S]*) (\\d{4})", &body, &checksum)) return false; return CalculateHWIDv2Checksum(body) == checksum; } bool IsExceptionalHWID(const std::string& hwid) { return RE2::PartialMatch(hwid, "^(SPRING [A-D])|(FALCO A)"); } std::string CalculateExceptionalHWIDChecksum(const std::string& data) { static const char base32_alphabet[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; unsigned crc32 = CalculateCRC32(data); // We take 10 least significant bits of CRC-32 and encode them in 2 characters // using Base32 alphabet. std::string checksum; checksum += base32_alphabet[(crc32 >> 5) & 0x1f]; checksum += base32_alphabet[crc32 & 0x1f]; return checksum; } bool IsCorrectExceptionalHWID(const std::string& hwid) { if (!IsExceptionalHWID(hwid)) return false; std::string bom; if (!RE2::FullMatch(hwid, "[A-Z0-9]+ ((?:[A-Z2-7]{4}-)*[A-Z2-7]{1,4})", &bom)) return false; if (bom.length() < 2) return false; std::string hwid_without_dashes; RemoveChars(hwid, "-", &hwid_without_dashes); LOG_ASSERT(hwid_without_dashes.length() >= 2); std::string not_checksum = hwid_without_dashes.substr(0, hwid_without_dashes.length() - 2); std::string checksum = hwid_without_dashes.substr(hwid_without_dashes.length() - 2); return CalculateExceptionalHWIDChecksum(not_checksum) == checksum; } std::string CalculateHWIDv3Checksum(const std::string& data) { static const char base8_alphabet[] = "23456789"; static const char base32_alphabet[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; unsigned crc32 = CalculateCRC32(data); // We take 8 least significant bits of CRC-32 and encode them in 2 characters. std::string checksum; checksum += base8_alphabet[(crc32 >> 5) & 0x7]; checksum += base32_alphabet[crc32 & 0x1f]; return checksum; } bool IsCorrectHWIDv3(const std::string& hwid) { if (IsExceptionalHWID(hwid)) return false; std::string regex = "([A-Z0-9]+ (?:[A-Z2-7][2-9][A-Z2-7]-)*[A-Z2-7])([2-9][A-Z2-7])"; std::string not_checksum, checksum; if (!RE2::FullMatch(hwid, regex, &not_checksum, &checksum)) return false; RemoveChars(not_checksum, "-", &not_checksum); return CalculateHWIDv3Checksum(not_checksum) == checksum; } } // anonymous namespace namespace chromeos { bool IsHWIDCorrect(const std::string& hwid) { return IsCorrectHWIDv2(hwid) || IsCorrectExceptionalHWID(hwid) || IsCorrectHWIDv3(hwid); } bool IsMachineHWIDCorrect() { #if !defined(GOOGLE_CHROME_BUILD) return true; #endif CommandLine* cmd_line = CommandLine::ForCurrentProcess(); if (cmd_line->HasSwitch(::switches::kTestType) || cmd_line->HasSwitch(chromeos::switches::kSkipHWIDCheck)) return true; if (!base::chromeos::IsRunningOnChromeOS()) return true; std::string hwid; chromeos::system::StatisticsProvider* stats = chromeos::system::StatisticsProvider::GetInstance(); if (!stats->GetMachineStatistic(chromeos::system::kHardwareClass, &hwid)) { LOG(ERROR) << "Couldn't get machine statistic 'hardware_class'."; return false; } if (!chromeos::IsHWIDCorrect(hwid)) { LOG(ERROR) << "Machine has malformed HWID '" << hwid << "'."; return false; } return true; } } // namespace chromeos
windyuuy/opera
chromium/src/chrome/browser/chromeos/login/hwid_checker.cc
C++
bsd-3-clause
4,448
export { VolumeMuteFilled16 as default } from "../../";
markogresak/DefinitelyTyped
types/carbon__icons-react/es/volume--mute--filled/16.d.ts
TypeScript
mit
56
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; using Tests.ExpressionCompiler; namespace Tests.ExpressionCompiler { public interface I { void M(); } public class C : IEquatable<C>, I { void I.M() { } public override bool Equals(object o) { return o is C && Equals((C)o); } public bool Equals(C c) { return c != null; } public override int GetHashCode() { return 0; } } public class D : C, IEquatable<D> { public int Val; public string S; public D() { } public D(int val) : this(val, "") { } public D(int val, string s) { Val = val; S = s; } public override bool Equals(object o) { return o is D && Equals((D)o); } public bool Equals(D d) { return d != null && d.Val == Val; } public override int GetHashCode() { return Val; } } public enum E { A = 1, B = 2 } public enum El : long { A, B, C } public struct S : IEquatable<S> { public override bool Equals(object o) { return (o is S) && Equals((S)o); } public bool Equals(S other) { return true; } public override int GetHashCode() { return 0; } } public struct Sp : IEquatable<Sp> { public Sp(int i, double d) { I = i; D = d; } public int I; public double D; public override bool Equals(object o) { return (o is Sp) && Equals((Sp)o); } public bool Equals(Sp other) { return other.I == I && other.D.Equals(D); } public override int GetHashCode() { return I.GetHashCode() ^ D.GetHashCode(); } } public struct Ss : IEquatable<Ss> { public Ss(S s) { Val = s; } public S Val; public override bool Equals(object o) { return (o is Ss) && Equals((Ss)o); } public bool Equals(Ss other) { return other.Val.Equals(Val); } public override int GetHashCode() { return Val.GetHashCode(); } } public struct Sc : IEquatable<Sc> { public Sc(string s) { S = s; } public string S; public override bool Equals(object o) { return (o is Sc) && Equals((Sc)o); } public bool Equals(Sc other) { return other.S == S; } public override int GetHashCode() { return S.GetHashCode(); } } public struct Scs : IEquatable<Scs> { public Scs(string s, S val) { S = s; Val = val; } public string S; public S Val; public override bool Equals(object o) { return (o is Scs) && Equals((Scs)o); } public bool Equals(Scs other) { return other.S == S && other.Val.Equals(Val); } public override int GetHashCode() { return S.GetHashCode() ^ Val.GetHashCode(); } } public class BaseClass { } public class FC { public int II; public static int SI; public const int CI = 42; public static readonly int RI = 42; } public struct FS { public int II; public static int SI; public const int CI = 42; public static readonly int RI = 42; } public class PC { public int II { get; set; } public static int SI { get; set; } public int this[int i] { get { return 1; } set { } } } public struct PS { public int II { get; set; } public static int SI { get; set; } } }
comdiv/corefx
src/System.Linq.Expressions/tests/HelperTypes.cs
C#
mit
4,525
<?php require_once('HTML/QuickForm/submit.php'); /** * HTML class for a submit type element * * @author Jamie Pratt * @access public */ class MoodleQuickForm_cancel extends MoodleQuickForm_submit { // {{{ constructor /** * Class constructor * * @since 1.0 * @access public * @return void */ function MoodleQuickForm_cancel($elementName=null, $value=null, $attributes=null) { if ($elementName==null){ $elementName='cancel'; } if ($value==null){ $value=get_string('cancel'); } MoodleQuickForm_submit::MoodleQuickForm_submit($elementName, $value, $attributes); $this->updateAttributes(array('onclick'=>'skipClientValidation = true; return true;')); } //end constructor function onQuickFormEvent($event, $arg, &$caller) { switch ($event) { case 'createElement': $className = get_class($this); $this->$className($arg[0], $arg[1], $arg[2]); $caller->_registerCancelButton($this->getName()); return true; break; } return parent::onQuickFormEvent($event, $arg, $caller); } // end func onQuickFormEvent function getFrozenHtml(){ return HTML_QuickForm_submit::getFrozenHtml(); } function freeze(){ return HTML_QuickForm_submit::freeze(); } // }}} } //end class MoodleQuickForm_cancel ?>
feniix/moodle
lib/form/cancel.php
PHP
gpl-2.0
1,501
package teammates.test.pageobjects; public class EntityNotFoundPage extends AppPage { public EntityNotFoundPage(Browser browser) { super(browser); } @Override protected boolean containsExpectedPageContents() { return getPageSource().contains("TEAMMATES could not locate what you were trying to access."); } }
LiHaoTan/teammates
src/test/java/teammates/test/pageobjects/EntityNotFoundPage.java
Java
gpl-2.0
349
/* * Copyright (C) 2012-2013 Team XBMC * http://xbmc.org * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBMC; see the file COPYING. If not, see * <http://www.gnu.org/licenses/>. * */ #include "FileItem.h" #include "epg/Epg.h" #include "guilib/GUIWindowManager.h" #include "input/Key.h" #include "view/ViewState.h" #include "pvr/PVRManager.h" #include "GUIDialogPVRGuideInfo.h" #include "GUIDialogPVRGuideOSD.h" using namespace PVR; #define CONTROL_LIST 11 CGUIDialogPVRGuideOSD::CGUIDialogPVRGuideOSD() : CGUIDialog(WINDOW_DIALOG_PVR_OSD_GUIDE, "DialogPVRGuideOSD.xml") { m_vecItems = new CFileItemList; } CGUIDialogPVRGuideOSD::~CGUIDialogPVRGuideOSD() { delete m_vecItems; } bool CGUIDialogPVRGuideOSD::OnMessage(CGUIMessage& message) { switch (message.GetMessage()) { case GUI_MSG_CLICKED: { int iControl = message.GetSenderId(); if (m_viewControl.HasControl(iControl)) // list/thumb control { int iItem = m_viewControl.GetSelectedItem(); int iAction = message.GetParam1(); if (iAction == ACTION_SELECT_ITEM || iAction == ACTION_MOUSE_LEFT_CLICK) { ShowInfo(iItem); return true; } } } break; } return CGUIDialog::OnMessage(message); } void CGUIDialogPVRGuideOSD::OnInitWindow() { /* Close dialog immediately if no TV or radio channel is playing */ if (!g_PVRManager.IsPlaying()) { Close(); return; } // lock our display, as this window is rendered from the player thread g_graphicsContext.Lock(); m_viewControl.SetCurrentView(DEFAULT_VIEW_LIST); // empty the list ready for population Clear(); g_PVRManager.GetCurrentEpg(*m_vecItems); m_viewControl.SetItems(*m_vecItems); g_graphicsContext.Unlock(); // call init CGUIDialog::OnInitWindow(); // select the active entry unsigned int iSelectedItem = 0; for (int iEpgPtr = 0; iEpgPtr < m_vecItems->Size(); ++iEpgPtr) { CFileItemPtr entry = m_vecItems->Get(iEpgPtr); if (entry->HasEPGInfoTag() && entry->GetEPGInfoTag()->IsActive()) { iSelectedItem = iEpgPtr; break; } } m_viewControl.SetSelectedItem(iSelectedItem); } void CGUIDialogPVRGuideOSD::OnDeinitWindow(int nextWindowID) { CGUIDialog::OnDeinitWindow(nextWindowID); Clear(); } void CGUIDialogPVRGuideOSD::Clear() { m_viewControl.Clear(); m_vecItems->Clear(); } void CGUIDialogPVRGuideOSD::ShowInfo(int item) { /* Check file item is in list range and get his pointer */ if (item < 0 || item >= (int)m_vecItems->Size()) return; CFileItemPtr pItem = m_vecItems->Get(item); /* Load programme info dialog */ CGUIDialogPVRGuideInfo* pDlgInfo = (CGUIDialogPVRGuideInfo*)g_windowManager.GetWindow(WINDOW_DIALOG_PVR_GUIDE_INFO); if (!pDlgInfo) return; /* inform dialog about the file item and open dialog window */ pDlgInfo->SetProgInfo(pItem->GetEPGInfoTag()); pDlgInfo->Open(); } void CGUIDialogPVRGuideOSD::OnWindowLoaded() { CGUIDialog::OnWindowLoaded(); m_viewControl.Reset(); m_viewControl.SetParentWindow(GetID()); m_viewControl.AddView(GetControl(CONTROL_LIST)); } void CGUIDialogPVRGuideOSD::OnWindowUnload() { CGUIDialog::OnWindowUnload(); m_viewControl.Reset(); } CGUIControl *CGUIDialogPVRGuideOSD::GetFirstFocusableControl(int id) { if (m_viewControl.HasControl(id)) id = m_viewControl.GetCurrentControl(); return CGUIWindow::GetFirstFocusableControl(id); }
Shine-/xbmc
xbmc/pvr/dialogs/GUIDialogPVRGuideOSD.cpp
C++
gpl-2.0
3,993
/* Copyright (C) 2015 Daniel Preussker <f0o@devilcode.org> * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ /** * VictorOps Generic-API Transport - Based on PagerDuty transport * @author f0o <f0o@devilcode.org> * @author laf <neil@librenms.org> * @copyright 2015 f0o, laf, LibreNMS * @license GPL * @package LibreNMS * @subpackage Alerts */ $url = $opts['url']; $protocol = array( 'entity_id' => ($obj['id'] ? $obj['id'] : $obj['uid']), 'state_start_time' => strtotime($obj['timestamp']), 'monitoring_tool' => 'librenms', ); if( $obj['state'] == 0 ) { $protocol['message_type'] = 'recovery'; } elseif( $obj['state'] == 2 ) { $protocol['message_type'] = 'acknowledgement'; } elseif ($obj['state'] == 1) { $protocol['message_type'] = 'critical'; } foreach( $obj['faults'] as $fault=>$data ) { $protocol['state_message'] .= $data['string']; } $curl = curl_init(); set_curl_proxy($curl); curl_setopt($curl, CURLOPT_URL, $url ); curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1); curl_setopt($curl, CURLOPT_HTTPHEADER, array('Content-type'=> 'application/json')); curl_setopt($curl, CURLOPT_POSTFIELDS, json_encode($protocol)); $ret = curl_exec($curl); $code = curl_getinfo($curl, CURLINFO_HTTP_CODE); if( $code != 200 ) { var_dump("VictorOps returned Error, retry later"); //FIXME: propper debuging return false; } return true;
NetworkNub/librenms
includes/alerts/transport.victorops.php
PHP
gpl-3.0
1,967
/***************************************************************************** * * PROJECT: Multi Theft Auto v1.0 * LICENSE: See LICENSE in the top level directory * FILE: mods/deathmatch/logic/packets/CPlayerStatsPacket.cpp * PURPOSE: Player statistics packet class * DEVELOPERS: Jax <> * * Multi Theft Auto is available from http://www.multitheftauto.com/ * *****************************************************************************/ #include "StdInc.h" CPlayerStatsPacket::~CPlayerStatsPacket ( void ) { Clear ( ); } bool CPlayerStatsPacket::Write ( NetBitStreamInterface& BitStream ) const { // Write the source player. if ( m_pSourceElement ) { ElementID ID = m_pSourceElement->GetID (); BitStream.Write ( ID ); // Write the stats unsigned short usNumStats = static_cast < unsigned short >( m_List.size () ); BitStream.WriteCompressed ( usNumStats ); map < unsigned short, sPlayerStat > ::const_iterator iter = m_List.begin (); for ( ; iter != m_List.end () ; ++iter ) { const sPlayerStat& playerStat = (*iter).second; BitStream.Write ( playerStat.id ); BitStream.Write ( playerStat.value ); } return true; } return false; } void CPlayerStatsPacket::Add ( unsigned short usID, float fValue ) { map < unsigned short, sPlayerStat > ::iterator iter = m_List.find ( usID ); if ( iter != m_List.end ( ) ) { if ( fValue == 0.0f ) { m_List.erase ( iter ); } else { sPlayerStat& stat = (*iter).second; stat.value = fValue; } } else { sPlayerStat stat; stat.id = usID; stat.value = fValue; m_List[ usID ] = stat; } } void CPlayerStatsPacket::Remove ( unsigned short usID, float fValue ) { map < unsigned short, sPlayerStat > ::iterator iter = m_List.find ( usID ); if ( iter != m_List.end ( ) ) { m_List.erase ( iter ); } } void CPlayerStatsPacket::Clear ( void ) { m_List.clear (); }
zneext/mtasa-blue
Server/mods/deathmatch/logic/packets/CPlayerStatsPacket.cpp
C++
gpl-3.0
2,217
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.protocol.datatransfer; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PacketHeaderProto; import org.apache.hadoop.hdfs.util.ByteBufferOutputStream; import com.google.common.base.Preconditions; import com.google.common.primitives.Shorts; import com.google.common.primitives.Ints; import com.google.protobuf.InvalidProtocolBufferException; /** * Header data for each packet that goes through the read/write pipelines. * Includes all of the information about the packet, excluding checksums and * actual data. * * This data includes: * - the offset in bytes into the HDFS block of the data in this packet * - the sequence number of this packet in the pipeline * - whether or not this is the last packet in the pipeline * - the length of the data in this packet * - whether or not this packet should be synced by the DNs. * * When serialized, this header is written out as a protocol buffer, preceded * by a 4-byte integer representing the full packet length, and a 2-byte short * representing the header length. */ @InterfaceAudience.Private @InterfaceStability.Evolving public class PacketHeader { private static final int MAX_PROTO_SIZE = PacketHeaderProto.newBuilder() .setOffsetInBlock(0) .setSeqno(0) .setLastPacketInBlock(false) .setDataLen(0) .setSyncBlock(false) .build().getSerializedSize(); public static final int PKT_LENGTHS_LEN = Ints.BYTES + Shorts.BYTES; public static final int PKT_MAX_HEADER_LEN = PKT_LENGTHS_LEN + MAX_PROTO_SIZE; private int packetLen; private PacketHeaderProto proto; public PacketHeader() { } public PacketHeader(int packetLen, long offsetInBlock, long seqno, boolean lastPacketInBlock, int dataLen, boolean syncBlock) { this.packetLen = packetLen; Preconditions.checkArgument(packetLen >= Ints.BYTES, "packet len %s should always be at least 4 bytes", packetLen); PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder() .setOffsetInBlock(offsetInBlock) .setSeqno(seqno) .setLastPacketInBlock(lastPacketInBlock) .setDataLen(dataLen); if (syncBlock) { // Only set syncBlock if it is specified. // This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721 // because it changes the length of the packet header, and BlockReceiver // in that version did not support variable-length headers. builder.setSyncBlock(true); } proto = builder.build(); } public int getDataLen() { return proto.getDataLen(); } public boolean isLastPacketInBlock() { return proto.getLastPacketInBlock(); } public long getSeqno() { return proto.getSeqno(); } public long getOffsetInBlock() { return proto.getOffsetInBlock(); } public int getPacketLen() { return packetLen; } public boolean getSyncBlock() { return proto.getSyncBlock(); } @Override public String toString() { return "PacketHeader with packetLen=" + packetLen + " header data: " + proto.toString(); } public void setFieldsFromData( int packetLen, byte[] headerData) throws InvalidProtocolBufferException { this.packetLen = packetLen; proto = PacketHeaderProto.parseFrom(headerData); } public void readFields(ByteBuffer buf) throws IOException { packetLen = buf.getInt(); short protoLen = buf.getShort(); byte[] data = new byte[protoLen]; buf.get(data); proto = PacketHeaderProto.parseFrom(data); } public void readFields(DataInputStream in) throws IOException { this.packetLen = in.readInt(); short protoLen = in.readShort(); byte[] data = new byte[protoLen]; in.readFully(data); proto = PacketHeaderProto.parseFrom(data); } /** * @return the number of bytes necessary to write out this header, * including the length-prefixing of the payload and header */ public int getSerializedSize() { return PKT_LENGTHS_LEN + proto.getSerializedSize(); } /** * Write the header into the buffer. * This requires that PKT_HEADER_LEN bytes are available. */ public void putInBuffer(final ByteBuffer buf) { assert proto.getSerializedSize() <= MAX_PROTO_SIZE : "Expected " + (MAX_PROTO_SIZE) + " got: " + proto.getSerializedSize(); try { buf.putInt(packetLen); buf.putShort((short) proto.getSerializedSize()); proto.writeTo(new ByteBufferOutputStream(buf)); } catch (IOException e) { throw new RuntimeException(e); } } public void write(DataOutputStream out) throws IOException { assert proto.getSerializedSize() <= MAX_PROTO_SIZE : "Expected " + (MAX_PROTO_SIZE) + " got: " + proto.getSerializedSize(); out.writeInt(packetLen); out.writeShort(proto.getSerializedSize()); proto.writeTo(out); } public byte[] getBytes() { ByteBuffer buf = ByteBuffer.allocate(getSerializedSize()); putInBuffer(buf); return buf.array(); } /** * Perform a sanity check on the packet, returning true if it is sane. * @param lastSeqNo the previous sequence number received - we expect the * current sequence number to be larger by 1. */ public boolean sanityCheck(long lastSeqNo) { // We should only have a non-positive data length for the last packet if (proto.getDataLen() <= 0 && !proto.getLastPacketInBlock()) return false; // The last packet should not contain data if (proto.getLastPacketInBlock() && proto.getDataLen() != 0) return false; // Seqnos should always increase by 1 with each packet received return proto.getSeqno() == lastSeqNo + 1; } @Override public boolean equals(Object o) { if (!(o instanceof PacketHeader)) return false; PacketHeader other = (PacketHeader)o; return this.proto.equals(other.proto); } @Override public int hashCode() { return (int)proto.getSeqno(); } }
dennishuo/hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PacketHeader.java
Java
apache-2.0
7,020
/** * Modules in this bundle * @license * * opentype.js: * license: MIT (http://opensource.org/licenses/MIT) * author: Frederik De Bleser <frederik@debleser.be> * version: 0.6.7 * * tiny-inflate: * license: MIT (http://opensource.org/licenses/MIT) * author: Devon Govett <devongovett@gmail.com> * maintainers: devongovett <devongovett@gmail.com> * homepage: https://github.com/devongovett/tiny-inflate * version: 1.0.2 * * This header is generated by licensify (https://github.com/twada/licensify) */ (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.opentype = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ var TINF_OK = 0; var TINF_DATA_ERROR = -3; function Tree() { this.table = new Uint16Array(16); /* table of code length counts */ this.trans = new Uint16Array(288); /* code -> symbol translation table */ } function Data(source, dest) { this.source = source; this.sourceIndex = 0; this.tag = 0; this.bitcount = 0; this.dest = dest; this.destLen = 0; this.ltree = new Tree(); /* dynamic length/symbol tree */ this.dtree = new Tree(); /* dynamic distance tree */ } /* --------------------------------------------------- * * -- uninitialized global data (static structures) -- * * --------------------------------------------------- */ var sltree = new Tree(); var sdtree = new Tree(); /* extra bits and base tables for length codes */ var length_bits = new Uint8Array(30); var length_base = new Uint16Array(30); /* extra bits and base tables for distance codes */ var dist_bits = new Uint8Array(30); var dist_base = new Uint16Array(30); /* special ordering of code length codes */ var clcidx = new Uint8Array([ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]); /* used by tinf_decode_trees, avoids allocations every call */ var code_tree = new Tree(); var lengths = new Uint8Array(288 + 32); /* ----------------------- * * -- utility functions -- * * ----------------------- */ /* build extra bits and base tables */ function tinf_build_bits_base(bits, base, delta, first) { var i, sum; /* build bits table */ for (i = 0; i < delta; ++i) bits[i] = 0; for (i = 0; i < 30 - delta; ++i) bits[i + delta] = i / delta | 0; /* build base table */ for (sum = first, i = 0; i < 30; ++i) { base[i] = sum; sum += 1 << bits[i]; } } /* build the fixed huffman trees */ function tinf_build_fixed_trees(lt, dt) { var i; /* build fixed length tree */ for (i = 0; i < 7; ++i) lt.table[i] = 0; lt.table[7] = 24; lt.table[8] = 152; lt.table[9] = 112; for (i = 0; i < 24; ++i) lt.trans[i] = 256 + i; for (i = 0; i < 144; ++i) lt.trans[24 + i] = i; for (i = 0; i < 8; ++i) lt.trans[24 + 144 + i] = 280 + i; for (i = 0; i < 112; ++i) lt.trans[24 + 144 + 8 + i] = 144 + i; /* build fixed distance tree */ for (i = 0; i < 5; ++i) dt.table[i] = 0; dt.table[5] = 32; for (i = 0; i < 32; ++i) dt.trans[i] = i; } /* given an array of code lengths, build a tree */ var offs = new Uint16Array(16); function tinf_build_tree(t, lengths, off, num) { var i, sum; /* clear code length count table */ for (i = 0; i < 16; ++i) t.table[i] = 0; /* scan symbol lengths, and sum code length counts */ for (i = 0; i < num; ++i) t.table[lengths[off + i]]++; t.table[0] = 0; /* compute offset table for distribution sort */ for (sum = 0, i = 0; i < 16; ++i) { offs[i] = sum; sum += t.table[i]; } /* create code->symbol translation table (symbols sorted by code) */ for (i = 0; i < num; ++i) { if (lengths[off + i]) t.trans[offs[lengths[off + i]]++] = i; } } /* ---------------------- * * -- decode functions -- * * ---------------------- */ /* get one bit from source stream */ function tinf_getbit(d) { /* check if tag is empty */ if (!d.bitcount--) { /* load next tag */ d.tag = d.source[d.sourceIndex++]; d.bitcount = 7; } /* shift bit out of tag */ var bit = d.tag & 1; d.tag >>>= 1; return bit; } /* read a num bit value from a stream and add base */ function tinf_read_bits(d, num, base) { if (!num) return base; while (d.bitcount < 24) { d.tag |= d.source[d.sourceIndex++] << d.bitcount; d.bitcount += 8; } var val = d.tag & (0xffff >>> (16 - num)); d.tag >>>= num; d.bitcount -= num; return val + base; } /* given a data stream and a tree, decode a symbol */ function tinf_decode_symbol(d, t) { while (d.bitcount < 24) { d.tag |= d.source[d.sourceIndex++] << d.bitcount; d.bitcount += 8; } var sum = 0, cur = 0, len = 0; var tag = d.tag; /* get more bits while code value is above sum */ do { cur = 2 * cur + (tag & 1); tag >>>= 1; ++len; sum += t.table[len]; cur -= t.table[len]; } while (cur >= 0); d.tag = tag; d.bitcount -= len; return t.trans[sum + cur]; } /* given a data stream, decode dynamic trees from it */ function tinf_decode_trees(d, lt, dt) { var hlit, hdist, hclen; var i, num, length; /* get 5 bits HLIT (257-286) */ hlit = tinf_read_bits(d, 5, 257); /* get 5 bits HDIST (1-32) */ hdist = tinf_read_bits(d, 5, 1); /* get 4 bits HCLEN (4-19) */ hclen = tinf_read_bits(d, 4, 4); for (i = 0; i < 19; ++i) lengths[i] = 0; /* read code lengths for code length alphabet */ for (i = 0; i < hclen; ++i) { /* get 3 bits code length (0-7) */ var clen = tinf_read_bits(d, 3, 0); lengths[clcidx[i]] = clen; } /* build code length tree */ tinf_build_tree(code_tree, lengths, 0, 19); /* decode code lengths for the dynamic trees */ for (num = 0; num < hlit + hdist;) { var sym = tinf_decode_symbol(d, code_tree); switch (sym) { case 16: /* copy previous code length 3-6 times (read 2 bits) */ var prev = lengths[num - 1]; for (length = tinf_read_bits(d, 2, 3); length; --length) { lengths[num++] = prev; } break; case 17: /* repeat code length 0 for 3-10 times (read 3 bits) */ for (length = tinf_read_bits(d, 3, 3); length; --length) { lengths[num++] = 0; } break; case 18: /* repeat code length 0 for 11-138 times (read 7 bits) */ for (length = tinf_read_bits(d, 7, 11); length; --length) { lengths[num++] = 0; } break; default: /* values 0-15 represent the actual code lengths */ lengths[num++] = sym; break; } } /* build dynamic trees */ tinf_build_tree(lt, lengths, 0, hlit); tinf_build_tree(dt, lengths, hlit, hdist); } /* ----------------------------- * * -- block inflate functions -- * * ----------------------------- */ /* given a stream and two trees, inflate a block of data */ function tinf_inflate_block_data(d, lt, dt) { while (1) { var sym = tinf_decode_symbol(d, lt); /* check for end of block */ if (sym === 256) { return TINF_OK; } if (sym < 256) { d.dest[d.destLen++] = sym; } else { var length, dist, offs; var i; sym -= 257; /* possibly get more bits from length code */ length = tinf_read_bits(d, length_bits[sym], length_base[sym]); dist = tinf_decode_symbol(d, dt); /* possibly get more bits from distance code */ offs = d.destLen - tinf_read_bits(d, dist_bits[dist], dist_base[dist]); /* copy match */ for (i = offs; i < offs + length; ++i) { d.dest[d.destLen++] = d.dest[i]; } } } } /* inflate an uncompressed block of data */ function tinf_inflate_uncompressed_block(d) { var length, invlength; var i; /* unread from bitbuffer */ while (d.bitcount > 8) { d.sourceIndex--; d.bitcount -= 8; } /* get length */ length = d.source[d.sourceIndex + 1]; length = 256 * length + d.source[d.sourceIndex]; /* get one's complement of length */ invlength = d.source[d.sourceIndex + 3]; invlength = 256 * invlength + d.source[d.sourceIndex + 2]; /* check length */ if (length !== (~invlength & 0x0000ffff)) return TINF_DATA_ERROR; d.sourceIndex += 4; /* copy block */ for (i = length; i; --i) d.dest[d.destLen++] = d.source[d.sourceIndex++]; /* make sure we start next block on a byte boundary */ d.bitcount = 0; return TINF_OK; } /* inflate stream from source to dest */ function tinf_uncompress(source, dest) { var d = new Data(source, dest); var bfinal, btype, res; do { /* read final block flag */ bfinal = tinf_getbit(d); /* read block type (2 bits) */ btype = tinf_read_bits(d, 2, 0); /* decompress block */ switch (btype) { case 0: /* decompress uncompressed block */ res = tinf_inflate_uncompressed_block(d); break; case 1: /* decompress block with fixed huffman trees */ res = tinf_inflate_block_data(d, sltree, sdtree); break; case 2: /* decompress block with dynamic huffman trees */ tinf_decode_trees(d, d.ltree, d.dtree); res = tinf_inflate_block_data(d, d.ltree, d.dtree); break; default: res = TINF_DATA_ERROR; } if (res !== TINF_OK) throw new Error('Data error'); } while (!bfinal); if (d.destLen < d.dest.length) { if (typeof d.dest.slice === 'function') return d.dest.slice(0, d.destLen); else return d.dest.subarray(0, d.destLen); } return d.dest; } /* -------------------- * * -- initialization -- * * -------------------- */ /* build fixed huffman trees */ tinf_build_fixed_trees(sltree, sdtree); /* build extra bits and base tables */ tinf_build_bits_base(length_bits, length_base, 4, 3); tinf_build_bits_base(dist_bits, dist_base, 2, 1); /* fix a special case */ length_bits[28] = 0; length_base[28] = 258; module.exports = tinf_uncompress; },{}],2:[function(require,module,exports){ // Run-time checking of preconditions. 'use strict'; exports.fail = function(message) { throw new Error(message); }; // Precondition function that checks if the given predicate is true. // If not, it will throw an error. exports.argument = function(predicate, message) { if (!predicate) { exports.fail(message); } }; // Precondition function that checks if the given assertion is true. // If not, it will throw an error. exports.assert = exports.argument; },{}],3:[function(require,module,exports){ // Drawing utility functions. 'use strict'; // Draw a line on the given context from point `x1,y1` to point `x2,y2`. function line(ctx, x1, y1, x2, y2) { ctx.beginPath(); ctx.moveTo(x1, y1); ctx.lineTo(x2, y2); ctx.stroke(); } exports.line = line; },{}],4:[function(require,module,exports){ // Glyph encoding 'use strict'; var cffStandardStrings = [ '.notdef', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown', 'cent', 'sterling', 'fraction', 'yen', 'florin', 'section', 'currency', 'quotesingle', 'quotedblleft', 'guillemotleft', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash', 'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet', 'quotesinglbase', 'quotedblbase', 'quotedblright', 'guillemotright', 'ellipsis', 'perthousand', 'questiondown', 'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash', 'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash', 'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu', 'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn', 'onequarter', 'divide', 'brokenbar', 'degree', 'thorn', 'threequarters', 'twosuperior', 'registered', 'minus', 'eth', 'multiply', 'threesuperior', 'copyright', 'Aacute', 'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde', 'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde', 'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde', 'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave', 'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex', 'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute', 'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex', 'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex', 'odieresis', 'ograve', 'otilde', 'scaron', 'uacute', 'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis', 'zcaron', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', '266 ff', 'onedotenleader', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'questionsmall', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior', 'ff', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior', 'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', 'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall', 'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall', 'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior', 'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'questiondownsmall', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', 'zerosuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior', 'Agravesmall', 'Aacutesmall', 'Acircumflexsmall', 'Atildesmall', 'Adieresissmall', 'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall', 'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Igravesmall', 'Iacutesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall', 'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall', 'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall', 'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall', 'Yacutesmall', 'Thornsmall', 'Ydieresissmall', '001.000', '001.001', '001.002', '001.003', 'Black', 'Bold', 'Book', 'Light', 'Medium', 'Regular', 'Roman', 'Semibold']; var cffStandardEncoding = [ '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'exclamdown', 'cent', 'sterling', 'fraction', 'yen', 'florin', 'section', 'currency', 'quotesingle', 'quotedblleft', 'guillemotleft', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', '', 'endash', 'dagger', 'daggerdbl', 'periodcentered', '', 'paragraph', 'bullet', 'quotesinglbase', 'quotedblbase', 'quotedblright', 'guillemotright', 'ellipsis', 'perthousand', '', 'questiondown', '', 'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'dieresis', '', 'ring', 'cedilla', '', 'hungarumlaut', 'ogonek', 'caron', 'emdash', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'AE', '', 'ordfeminine', '', '', '', '', 'Lslash', 'Oslash', 'OE', 'ordmasculine', '', '', '', '', '', 'ae', '', '', '', 'dotlessi', '', '', 'lslash', 'oslash', 'oe', 'germandbls']; var cffExpertEncoding = [ '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclamsmall', 'Hungarumlautsmall', '', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'questionsmall', '', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', '', '', 'isuperior', '', '', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', '', '', 'rsuperior', 'ssuperior', 'tsuperior', '', 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', '', 'parenrightinferior', 'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'exclamdownsmall', 'centoldstyle', 'Lslashsmall', '', '', 'Scaronsmall', 'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall', '', 'Dotaccentsmall', '', '', 'Macronsmall', '', '', 'figuredash', 'hypheninferior', '', '', 'Ogoneksmall', 'Ringsmall', 'Cedillasmall', '', '', '', 'onequarter', 'onehalf', 'threequarters', 'questiondownsmall', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', '', '', 'zerosuperior', 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior', 'Agravesmall', 'Aacutesmall', 'Acircumflexsmall', 'Atildesmall', 'Adieresissmall', 'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall', 'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Igravesmall', 'Iacutesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall', 'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall', 'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall', 'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall', 'Yacutesmall', 'Thornsmall', 'Ydieresissmall']; var standardNames = [ '.notdef', '.null', 'nonmarkingreturn', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'Adieresis', 'Aring', 'Ccedilla', 'Eacute', 'Ntilde', 'Odieresis', 'Udieresis', 'aacute', 'agrave', 'acircumflex', 'adieresis', 'atilde', 'aring', 'ccedilla', 'eacute', 'egrave', 'ecircumflex', 'edieresis', 'iacute', 'igrave', 'icircumflex', 'idieresis', 'ntilde', 'oacute', 'ograve', 'ocircumflex', 'odieresis', 'otilde', 'uacute', 'ugrave', 'ucircumflex', 'udieresis', 'dagger', 'degree', 'cent', 'sterling', 'section', 'bullet', 'paragraph', 'germandbls', 'registered', 'copyright', 'trademark', 'acute', 'dieresis', 'notequal', 'AE', 'Oslash', 'infinity', 'plusminus', 'lessequal', 'greaterequal', 'yen', 'mu', 'partialdiff', 'summation', 'product', 'pi', 'integral', 'ordfeminine', 'ordmasculine', 'Omega', 'ae', 'oslash', 'questiondown', 'exclamdown', 'logicalnot', 'radical', 'florin', 'approxequal', 'Delta', 'guillemotleft', 'guillemotright', 'ellipsis', 'nonbreakingspace', 'Agrave', 'Atilde', 'Otilde', 'OE', 'oe', 'endash', 'emdash', 'quotedblleft', 'quotedblright', 'quoteleft', 'quoteright', 'divide', 'lozenge', 'ydieresis', 'Ydieresis', 'fraction', 'currency', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'daggerdbl', 'periodcentered', 'quotesinglbase', 'quotedblbase', 'perthousand', 'Acircumflex', 'Ecircumflex', 'Aacute', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Oacute', 'Ocircumflex', 'apple', 'Ograve', 'Uacute', 'Ucircumflex', 'Ugrave', 'dotlessi', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron', 'Lslash', 'lslash', 'Scaron', 'scaron', 'Zcaron', 'zcaron', 'brokenbar', 'Eth', 'eth', 'Yacute', 'yacute', 'Thorn', 'thorn', 'minus', 'multiply', 'onesuperior', 'twosuperior', 'threesuperior', 'onehalf', 'onequarter', 'threequarters', 'franc', 'Gbreve', 'gbreve', 'Idotaccent', 'Scedilla', 'scedilla', 'Cacute', 'cacute', 'Ccaron', 'ccaron', 'dcroat']; /** * This is the encoding used for fonts created from scratch. * It loops through all glyphs and finds the appropriate unicode value. * Since it's linear time, other encodings will be faster. * @exports opentype.DefaultEncoding * @class * @constructor * @param {opentype.Font} */ function DefaultEncoding(font) { this.font = font; } DefaultEncoding.prototype.charToGlyphIndex = function(c) { var code = c.charCodeAt(0); var glyphs = this.font.glyphs; if (glyphs) { for (var i = 0; i < glyphs.length; i += 1) { var glyph = glyphs.get(i); for (var j = 0; j < glyph.unicodes.length; j += 1) { if (glyph.unicodes[j] === code) { return i; } } } } else { return null; } }; /** * @exports opentype.CmapEncoding * @class * @constructor * @param {Object} cmap - a object with the cmap encoded data */ function CmapEncoding(cmap) { this.cmap = cmap; } /** * @param {string} c - the character * @return {number} The glyph index. */ CmapEncoding.prototype.charToGlyphIndex = function(c) { return this.cmap.glyphIndexMap[c.charCodeAt(0)] || 0; }; /** * @exports opentype.CffEncoding * @class * @constructor * @param {string} encoding - The encoding * @param {Array} charset - The charcater set. */ function CffEncoding(encoding, charset) { this.encoding = encoding; this.charset = charset; } /** * @param {string} s - The character * @return {number} The index. */ CffEncoding.prototype.charToGlyphIndex = function(s) { var code = s.charCodeAt(0); var charName = this.encoding[code]; return this.charset.indexOf(charName); }; /** * @exports opentype.GlyphNames * @class * @constructor * @param {Object} post */ function GlyphNames(post) { var i; switch (post.version) { case 1: this.names = exports.standardNames.slice(); break; case 2: this.names = new Array(post.numberOfGlyphs); for (i = 0; i < post.numberOfGlyphs; i++) { if (post.glyphNameIndex[i] < exports.standardNames.length) { this.names[i] = exports.standardNames[post.glyphNameIndex[i]]; } else { this.names[i] = post.names[post.glyphNameIndex[i] - exports.standardNames.length]; } } break; case 2.5: this.names = new Array(post.numberOfGlyphs); for (i = 0; i < post.numberOfGlyphs; i++) { this.names[i] = exports.standardNames[i + post.glyphNameIndex[i]]; } break; case 3: this.names = []; break; } } /** * Gets the index of a glyph by name. * @param {string} name - The glyph name * @return {number} The index */ GlyphNames.prototype.nameToGlyphIndex = function(name) { return this.names.indexOf(name); }; /** * @param {number} gid * @return {string} */ GlyphNames.prototype.glyphIndexToName = function(gid) { return this.names[gid]; }; /** * @alias opentype.addGlyphNames * @param {opentype.Font} */ function addGlyphNames(font) { var glyph; var glyphIndexMap = font.tables.cmap.glyphIndexMap; var charCodes = Object.keys(glyphIndexMap); for (var i = 0; i < charCodes.length; i += 1) { var c = charCodes[i]; var glyphIndex = glyphIndexMap[c]; glyph = font.glyphs.get(glyphIndex); glyph.addUnicode(parseInt(c)); } for (i = 0; i < font.glyphs.length; i += 1) { glyph = font.glyphs.get(i); if (font.cffEncoding) { glyph.name = font.cffEncoding.charset[i]; } else if (font.glyphNames.names) { glyph.name = font.glyphNames.glyphIndexToName(i); } } } exports.cffStandardStrings = cffStandardStrings; exports.cffStandardEncoding = cffStandardEncoding; exports.cffExpertEncoding = cffExpertEncoding; exports.standardNames = standardNames; exports.DefaultEncoding = DefaultEncoding; exports.CmapEncoding = CmapEncoding; exports.CffEncoding = CffEncoding; exports.GlyphNames = GlyphNames; exports.addGlyphNames = addGlyphNames; },{}],5:[function(require,module,exports){ // The Font object 'use strict'; var path = require('./path'); var sfnt = require('./tables/sfnt'); var encoding = require('./encoding'); var glyphset = require('./glyphset'); var Substitution = require('./substitution'); var util = require('./util'); /** * @typedef FontOptions * @type Object * @property {Boolean} empty - whether to create a new empty font * @property {string} familyName * @property {string} styleName * @property {string=} fullName * @property {string=} postScriptName * @property {string=} designer * @property {string=} designerURL * @property {string=} manufacturer * @property {string=} manufacturerURL * @property {string=} license * @property {string=} licenseURL * @property {string=} version * @property {string=} description * @property {string=} copyright * @property {string=} trademark * @property {Number} unitsPerEm * @property {Number} ascender * @property {Number} descender * @property {Number} createdTimestamp * @property {string=} weightClass * @property {string=} widthClass * @property {string=} fsSelection */ /** * A Font represents a loaded OpenType font file. * It contains a set of glyphs and methods to draw text on a drawing context, * or to get a path representing the text. * @exports opentype.Font * @class * @param {FontOptions} * @constructor */ function Font(options) { options = options || {}; if (!options.empty) { // Check that we've provided the minimum set of names. util.checkArgument(options.familyName, 'When creating a new Font object, familyName is required.'); util.checkArgument(options.styleName, 'When creating a new Font object, styleName is required.'); util.checkArgument(options.unitsPerEm, 'When creating a new Font object, unitsPerEm is required.'); util.checkArgument(options.ascender, 'When creating a new Font object, ascender is required.'); util.checkArgument(options.descender, 'When creating a new Font object, descender is required.'); util.checkArgument(options.descender < 0, 'Descender should be negative (e.g. -512).'); // OS X will complain if the names are empty, so we put a single space everywhere by default. this.names = { fontFamily: {en: options.familyName || ' '}, fontSubfamily: {en: options.styleName || ' '}, fullName: {en: options.fullName || options.familyName + ' ' + options.styleName}, postScriptName: {en: options.postScriptName || options.familyName + options.styleName}, designer: {en: options.designer || ' '}, designerURL: {en: options.designerURL || ' '}, manufacturer: {en: options.manufacturer || ' '}, manufacturerURL: {en: options.manufacturerURL || ' '}, license: {en: options.license || ' '}, licenseURL: {en: options.licenseURL || ' '}, version: {en: options.version || 'Version 0.1'}, description: {en: options.description || ' '}, copyright: {en: options.copyright || ' '}, trademark: {en: options.trademark || ' '} }; this.unitsPerEm = options.unitsPerEm || 1000; this.ascender = options.ascender; this.descender = options.descender; this.createdTimestamp = options.createdTimestamp; this.tables = { os2: { usWeightClass: options.weightClass || this.usWeightClasses.MEDIUM, usWidthClass: options.widthClass || this.usWidthClasses.MEDIUM, fsSelection: options.fsSelection || this.fsSelectionValues.REGULAR } }; } this.supported = true; // Deprecated: parseBuffer will throw an error if font is not supported. this.glyphs = new glyphset.GlyphSet(this, options.glyphs || []); this.encoding = new encoding.DefaultEncoding(this); this.substitution = new Substitution(this); this.tables = this.tables || {}; } /** * Check if the font has a glyph for the given character. * @param {string} * @return {Boolean} */ Font.prototype.hasChar = function(c) { return this.encoding.charToGlyphIndex(c) !== null; }; /** * Convert the given character to a single glyph index. * Note that this function assumes that there is a one-to-one mapping between * the given character and a glyph; for complex scripts this might not be the case. * @param {string} * @return {Number} */ Font.prototype.charToGlyphIndex = function(s) { return this.encoding.charToGlyphIndex(s); }; /** * Convert the given character to a single Glyph object. * Note that this function assumes that there is a one-to-one mapping between * the given character and a glyph; for complex scripts this might not be the case. * @param {string} * @return {opentype.Glyph} */ Font.prototype.charToGlyph = function(c) { var glyphIndex = this.charToGlyphIndex(c); var glyph = this.glyphs.get(glyphIndex); if (!glyph) { // .notdef glyph = this.glyphs.get(0); } return glyph; }; /** * Convert the given text to a list of Glyph objects. * Note that there is no strict one-to-one mapping between characters and * glyphs, so the list of returned glyphs can be larger or smaller than the * length of the given string. * @param {string} * @return {opentype.Glyph[]} */ Font.prototype.stringToGlyphs = function(s) { var glyphs = []; for (var i = 0; i < s.length; i += 1) { var c = s[i]; glyphs.push(this.charToGlyph(c)); } return glyphs; }; /** * @param {string} * @return {Number} */ Font.prototype.nameToGlyphIndex = function(name) { return this.glyphNames.nameToGlyphIndex(name); }; /** * @param {string} * @return {opentype.Glyph} */ Font.prototype.nameToGlyph = function(name) { var glyphIndex = this.nameToGlyphIndex(name); var glyph = this.glyphs.get(glyphIndex); if (!glyph) { // .notdef glyph = this.glyphs.get(0); } return glyph; }; /** * @param {Number} * @return {String} */ Font.prototype.glyphIndexToName = function(gid) { if (!this.glyphNames.glyphIndexToName) { return ''; } return this.glyphNames.glyphIndexToName(gid); }; /** * Retrieve the value of the kerning pair between the left glyph (or its index) * and the right glyph (or its index). If no kerning pair is found, return 0. * The kerning value gets added to the advance width when calculating the spacing * between glyphs. * @param {opentype.Glyph} leftGlyph * @param {opentype.Glyph} rightGlyph * @return {Number} */ Font.prototype.getKerningValue = function(leftGlyph, rightGlyph) { leftGlyph = leftGlyph.index || leftGlyph; rightGlyph = rightGlyph.index || rightGlyph; var gposKerning = this.getGposKerningValue; return gposKerning ? gposKerning(leftGlyph, rightGlyph) : (this.kerningPairs[leftGlyph + ',' + rightGlyph] || 0); }; /** * @typedef GlyphRenderOptions * @type Object * @property {boolean} [kerning] - whether to include kerning values */ /** * Helper function that invokes the given callback for each glyph in the given text. * The callback gets `(glyph, x, y, fontSize, options)`.* @param {string} text * @param {string} text - The text to apply. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options * @param {Function} callback */ Font.prototype.forEachGlyph = function(text, x, y, fontSize, options, callback) { x = x !== undefined ? x : 0; y = y !== undefined ? y : 0; fontSize = fontSize !== undefined ? fontSize : 72; options = options || {}; var kerning = options.kerning === undefined ? true : options.kerning; var fontScale = 1 / this.unitsPerEm * fontSize; var glyphs = this.stringToGlyphs(text); for (var i = 0; i < glyphs.length; i += 1) { var glyph = glyphs[i]; callback(glyph, x, y, fontSize, options); if (glyph.advanceWidth) { x += glyph.advanceWidth * fontScale; } if (kerning && i < glyphs.length - 1) { var kerningValue = this.getKerningValue(glyph, glyphs[i + 1]); x += kerningValue * fontScale; } if (options.letterSpacing) { x += options.letterSpacing * fontSize; } else if (options.tracking) { x += (options.tracking / 1000) * fontSize; } } }; /** * Create a Path object that represents the given text. * @param {string} text - The text to create. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options * @return {opentype.Path} */ Font.prototype.getPath = function(text, x, y, fontSize, options) { var fullPath = new path.Path(); this.forEachGlyph(text, x, y, fontSize, options, function(glyph, gX, gY, gFontSize) { var glyphPath = glyph.getPath(gX, gY, gFontSize); fullPath.extend(glyphPath); }); return fullPath; }; /** * Create an array of Path objects that represent the glyps of a given text. * @param {string} text - The text to create. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options * @return {opentype.Path[]} */ Font.prototype.getPaths = function(text, x, y, fontSize, options) { var glyphPaths = []; this.forEachGlyph(text, x, y, fontSize, options, function(glyph, gX, gY, gFontSize) { var glyphPath = glyph.getPath(gX, gY, gFontSize); glyphPaths.push(glyphPath); }); return glyphPaths; }; /** * Draw the text on the given drawing context. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {string} text - The text to create. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options */ Font.prototype.draw = function(ctx, text, x, y, fontSize, options) { this.getPath(text, x, y, fontSize, options).draw(ctx); }; /** * Draw the points of all glyphs in the text. * On-curve points will be drawn in blue, off-curve points will be drawn in red. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {string} text - The text to create. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options */ Font.prototype.drawPoints = function(ctx, text, x, y, fontSize, options) { this.forEachGlyph(text, x, y, fontSize, options, function(glyph, gX, gY, gFontSize) { glyph.drawPoints(ctx, gX, gY, gFontSize); }); }; /** * Draw lines indicating important font measurements for all glyphs in the text. * Black lines indicate the origin of the coordinate system (point 0,0). * Blue lines indicate the glyph bounding box. * Green line indicates the advance width of the glyph. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {string} text - The text to create. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {GlyphRenderOptions=} options */ Font.prototype.drawMetrics = function(ctx, text, x, y, fontSize, options) { this.forEachGlyph(text, x, y, fontSize, options, function(glyph, gX, gY, gFontSize) { glyph.drawMetrics(ctx, gX, gY, gFontSize); }); }; /** * @param {string} * @return {string} */ Font.prototype.getEnglishName = function(name) { var translations = this.names[name]; if (translations) { return translations.en; } }; /** * Validate */ Font.prototype.validate = function() { var warnings = []; var _this = this; function assert(predicate, message) { if (!predicate) { warnings.push(message); } } function assertNamePresent(name) { var englishName = _this.getEnglishName(name); assert(englishName && englishName.trim().length > 0, 'No English ' + name + ' specified.'); } // Identification information assertNamePresent('fontFamily'); assertNamePresent('weightName'); assertNamePresent('manufacturer'); assertNamePresent('copyright'); assertNamePresent('version'); // Dimension information assert(this.unitsPerEm > 0, 'No unitsPerEm specified.'); }; /** * Convert the font object to a SFNT data structure. * This structure contains all the necessary tables and metadata to create a binary OTF file. * @return {opentype.Table} */ Font.prototype.toTables = function() { return sfnt.fontToTable(this); }; /** * @deprecated Font.toBuffer is deprecated. Use Font.toArrayBuffer instead. */ Font.prototype.toBuffer = function() { console.warn('Font.toBuffer is deprecated. Use Font.toArrayBuffer instead.'); return this.toArrayBuffer(); }; /** * Converts a `opentype.Font` into an `ArrayBuffer` * @return {ArrayBuffer} */ Font.prototype.toArrayBuffer = function() { var sfntTable = this.toTables(); var bytes = sfntTable.encode(); var buffer = new ArrayBuffer(bytes.length); var intArray = new Uint8Array(buffer); for (var i = 0; i < bytes.length; i++) { intArray[i] = bytes[i]; } return buffer; }; /** * Initiate a download of the OpenType font. */ Font.prototype.download = function(fileName) { var familyName = this.getEnglishName('fontFamily'); var styleName = this.getEnglishName('fontSubfamily'); fileName = fileName || familyName.replace(/\s/g, '') + '-' + styleName + '.otf'; var arrayBuffer = this.toArrayBuffer(); if (util.isBrowser()) { window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem; window.requestFileSystem(window.TEMPORARY, arrayBuffer.byteLength, function(fs) { fs.root.getFile(fileName, {create: true}, function(fileEntry) { fileEntry.createWriter(function(writer) { var dataView = new DataView(arrayBuffer); var blob = new Blob([dataView], {type: 'font/opentype'}); writer.write(blob); writer.addEventListener('writeend', function() { // Navigating to the file will download it. location.href = fileEntry.toURL(); }, false); }); }); }, function(err) { throw new Error(err.name + ': ' + err.message); }); } else { var fs = require('fs'); var buffer = util.arrayBufferToNodeBuffer(arrayBuffer); fs.writeFileSync(fileName, buffer); } }; /** * @private */ Font.prototype.fsSelectionValues = { ITALIC: 0x001, //1 UNDERSCORE: 0x002, //2 NEGATIVE: 0x004, //4 OUTLINED: 0x008, //8 STRIKEOUT: 0x010, //16 BOLD: 0x020, //32 REGULAR: 0x040, //64 USER_TYPO_METRICS: 0x080, //128 WWS: 0x100, //256 OBLIQUE: 0x200 //512 }; /** * @private */ Font.prototype.usWidthClasses = { ULTRA_CONDENSED: 1, EXTRA_CONDENSED: 2, CONDENSED: 3, SEMI_CONDENSED: 4, MEDIUM: 5, SEMI_EXPANDED: 6, EXPANDED: 7, EXTRA_EXPANDED: 8, ULTRA_EXPANDED: 9 }; /** * @private */ Font.prototype.usWeightClasses = { THIN: 100, EXTRA_LIGHT: 200, LIGHT: 300, NORMAL: 400, MEDIUM: 500, SEMI_BOLD: 600, BOLD: 700, EXTRA_BOLD: 800, BLACK: 900 }; exports.Font = Font; },{"./encoding":4,"./glyphset":7,"./path":11,"./substitution":12,"./tables/sfnt":31,"./util":33,"fs":undefined}],6:[function(require,module,exports){ // The Glyph object 'use strict'; var check = require('./check'); var draw = require('./draw'); var path = require('./path'); function getPathDefinition(glyph, path) { var _path = path || { commands: [] }; return { configurable: true, get: function() { if (typeof _path === 'function') { _path = _path(); } return _path; }, set: function(p) { _path = p; } }; } /** * @typedef GlyphOptions * @type Object * @property {string} [name] - The glyph name * @property {number} [unicode] * @property {Array} [unicodes] * @property {number} [xMin] * @property {number} [yMin] * @property {number} [xMax] * @property {number} [yMax] * @property {number} [advanceWidth] */ // A Glyph is an individual mark that often corresponds to a character. // Some glyphs, such as ligatures, are a combination of many characters. // Glyphs are the basic building blocks of a font. // // The `Glyph` class contains utility methods for drawing the path and its points. /** * @exports opentype.Glyph * @class * @param {GlyphOptions} * @constructor */ function Glyph(options) { // By putting all the code on a prototype function (which is only declared once) // we reduce the memory requirements for larger fonts by some 2% this.bindConstructorValues(options); } /** * @param {GlyphOptions} */ Glyph.prototype.bindConstructorValues = function(options) { this.index = options.index || 0; // These three values cannnot be deferred for memory optimization: this.name = options.name || null; this.unicode = options.unicode || undefined; this.unicodes = options.unicodes || options.unicode !== undefined ? [options.unicode] : []; // But by binding these values only when necessary, we reduce can // the memory requirements by almost 3% for larger fonts. if (options.xMin) { this.xMin = options.xMin; } if (options.yMin) { this.yMin = options.yMin; } if (options.xMax) { this.xMax = options.xMax; } if (options.yMax) { this.yMax = options.yMax; } if (options.advanceWidth) { this.advanceWidth = options.advanceWidth; } // The path for a glyph is the most memory intensive, and is bound as a value // with a getter/setter to ensure we actually do path parsing only once the // path is actually needed by anything. Object.defineProperty(this, 'path', getPathDefinition(this, options.path)); }; /** * @param {number} */ Glyph.prototype.addUnicode = function(unicode) { if (this.unicodes.length === 0) { this.unicode = unicode; } this.unicodes.push(unicode); }; /** * Convert the glyph to a Path we can draw on a drawing context. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {Object=} options - xScale, yScale to strech the glyph. * @return {opentype.Path} */ Glyph.prototype.getPath = function(x, y, fontSize, options) { x = x !== undefined ? x : 0; y = y !== undefined ? y : 0; options = options !== undefined ? options : {xScale: 1.0, yScale: 1.0}; fontSize = fontSize !== undefined ? fontSize : 72; var scale = 1 / this.path.unitsPerEm * fontSize; var xScale = options.xScale * scale; var yScale = options.yScale * scale; var p = new path.Path(); var commands = this.path.commands; for (var i = 0; i < commands.length; i += 1) { var cmd = commands[i]; if (cmd.type === 'M') { p.moveTo(x + (cmd.x * xScale), y + (-cmd.y * yScale)); } else if (cmd.type === 'L') { p.lineTo(x + (cmd.x * xScale), y + (-cmd.y * yScale)); } else if (cmd.type === 'Q') { p.quadraticCurveTo(x + (cmd.x1 * xScale), y + (-cmd.y1 * yScale), x + (cmd.x * xScale), y + (-cmd.y * yScale)); } else if (cmd.type === 'C') { p.curveTo(x + (cmd.x1 * xScale), y + (-cmd.y1 * yScale), x + (cmd.x2 * xScale), y + (-cmd.y2 * yScale), x + (cmd.x * xScale), y + (-cmd.y * yScale)); } else if (cmd.type === 'Z') { p.closePath(); } } return p; }; /** * Split the glyph into contours. * This function is here for backwards compatibility, and to * provide raw access to the TrueType glyph outlines. * @return {Array} */ Glyph.prototype.getContours = function() { if (this.points === undefined) { return []; } var contours = []; var currentContour = []; for (var i = 0; i < this.points.length; i += 1) { var pt = this.points[i]; currentContour.push(pt); if (pt.lastPointOfContour) { contours.push(currentContour); currentContour = []; } } check.argument(currentContour.length === 0, 'There are still points left in the current contour.'); return contours; }; /** * Calculate the xMin/yMin/xMax/yMax/lsb/rsb for a Glyph. * @return {Object} */ Glyph.prototype.getMetrics = function() { var commands = this.path.commands; var xCoords = []; var yCoords = []; for (var i = 0; i < commands.length; i += 1) { var cmd = commands[i]; if (cmd.type !== 'Z') { xCoords.push(cmd.x); yCoords.push(cmd.y); } if (cmd.type === 'Q' || cmd.type === 'C') { xCoords.push(cmd.x1); yCoords.push(cmd.y1); } if (cmd.type === 'C') { xCoords.push(cmd.x2); yCoords.push(cmd.y2); } } var metrics = { xMin: Math.min.apply(null, xCoords), yMin: Math.min.apply(null, yCoords), xMax: Math.max.apply(null, xCoords), yMax: Math.max.apply(null, yCoords), leftSideBearing: this.leftSideBearing }; if (!isFinite(metrics.xMin)) { metrics.xMin = 0; } if (!isFinite(metrics.xMax)) { metrics.xMax = this.advanceWidth; } if (!isFinite(metrics.yMin)) { metrics.yMin = 0; } if (!isFinite(metrics.yMax)) { metrics.yMax = 0; } metrics.rightSideBearing = this.advanceWidth - metrics.leftSideBearing - (metrics.xMax - metrics.xMin); return metrics; }; /** * Draw the glyph on the given context. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. * @param {Object=} options - xScale, yScale to strech the glyph. */ Glyph.prototype.draw = function(ctx, x, y, fontSize, options) { this.getPath(x, y, fontSize, options).draw(ctx); }; /** * Draw the points of the glyph. * On-curve points will be drawn in blue, off-curve points will be drawn in red. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. */ Glyph.prototype.drawPoints = function(ctx, x, y, fontSize) { function drawCircles(l, x, y, scale) { var PI_SQ = Math.PI * 2; ctx.beginPath(); for (var j = 0; j < l.length; j += 1) { ctx.moveTo(x + (l[j].x * scale), y + (l[j].y * scale)); ctx.arc(x + (l[j].x * scale), y + (l[j].y * scale), 2, 0, PI_SQ, false); } ctx.closePath(); ctx.fill(); } x = x !== undefined ? x : 0; y = y !== undefined ? y : 0; fontSize = fontSize !== undefined ? fontSize : 24; var scale = 1 / this.path.unitsPerEm * fontSize; var blueCircles = []; var redCircles = []; var path = this.path; for (var i = 0; i < path.commands.length; i += 1) { var cmd = path.commands[i]; if (cmd.x !== undefined) { blueCircles.push({x: cmd.x, y: -cmd.y}); } if (cmd.x1 !== undefined) { redCircles.push({x: cmd.x1, y: -cmd.y1}); } if (cmd.x2 !== undefined) { redCircles.push({x: cmd.x2, y: -cmd.y2}); } } ctx.fillStyle = 'blue'; drawCircles(blueCircles, x, y, scale); ctx.fillStyle = 'red'; drawCircles(redCircles, x, y, scale); }; /** * Draw lines indicating important font measurements. * Black lines indicate the origin of the coordinate system (point 0,0). * Blue lines indicate the glyph bounding box. * Green line indicates the advance width of the glyph. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context, like Canvas. * @param {number} [x=0] - Horizontal position of the beginning of the text. * @param {number} [y=0] - Vertical position of the *baseline* of the text. * @param {number} [fontSize=72] - Font size in pixels. We scale the glyph units by `1 / unitsPerEm * fontSize`. */ Glyph.prototype.drawMetrics = function(ctx, x, y, fontSize) { var scale; x = x !== undefined ? x : 0; y = y !== undefined ? y : 0; fontSize = fontSize !== undefined ? fontSize : 24; scale = 1 / this.path.unitsPerEm * fontSize; ctx.lineWidth = 1; // Draw the origin ctx.strokeStyle = 'black'; draw.line(ctx, x, -10000, x, 10000); draw.line(ctx, -10000, y, 10000, y); // This code is here due to memory optimization: by not using // defaults in the constructor, we save a notable amount of memory. var xMin = this.xMin || 0; var yMin = this.yMin || 0; var xMax = this.xMax || 0; var yMax = this.yMax || 0; var advanceWidth = this.advanceWidth || 0; // Draw the glyph box ctx.strokeStyle = 'blue'; draw.line(ctx, x + (xMin * scale), -10000, x + (xMin * scale), 10000); draw.line(ctx, x + (xMax * scale), -10000, x + (xMax * scale), 10000); draw.line(ctx, -10000, y + (-yMin * scale), 10000, y + (-yMin * scale)); draw.line(ctx, -10000, y + (-yMax * scale), 10000, y + (-yMax * scale)); // Draw the advance width ctx.strokeStyle = 'green'; draw.line(ctx, x + (advanceWidth * scale), -10000, x + (advanceWidth * scale), 10000); }; exports.Glyph = Glyph; },{"./check":2,"./draw":3,"./path":11}],7:[function(require,module,exports){ // The GlyphSet object 'use strict'; var _glyph = require('./glyph'); // Define a property on the glyph that depends on the path being loaded. function defineDependentProperty(glyph, externalName, internalName) { Object.defineProperty(glyph, externalName, { get: function() { // Request the path property to make sure the path is loaded. glyph.path; // jshint ignore:line return glyph[internalName]; }, set: function(newValue) { glyph[internalName] = newValue; }, enumerable: true, configurable: true }); } /** * A GlyphSet represents all glyphs available in the font, but modelled using * a deferred glyph loader, for retrieving glyphs only once they are absolutely * necessary, to keep the memory footprint down. * @exports opentype.GlyphSet * @class * @param {opentype.Font} * @param {Array} */ function GlyphSet(font, glyphs) { this.font = font; this.glyphs = {}; if (Array.isArray(glyphs)) { for (var i = 0; i < glyphs.length; i++) { this.glyphs[i] = glyphs[i]; } } this.length = (glyphs && glyphs.length) || 0; } /** * @param {number} index * @return {opentype.Glyph} */ GlyphSet.prototype.get = function(index) { if (typeof this.glyphs[index] === 'function') { this.glyphs[index] = this.glyphs[index](); } return this.glyphs[index]; }; /** * @param {number} index * @param {Object} */ GlyphSet.prototype.push = function(index, loader) { this.glyphs[index] = loader; this.length++; }; /** * @alias opentype.glyphLoader * @param {opentype.Font} font * @param {number} index * @return {opentype.Glyph} */ function glyphLoader(font, index) { return new _glyph.Glyph({index: index, font: font}); } /** * Generate a stub glyph that can be filled with all metadata *except* * the "points" and "path" properties, which must be loaded only once * the glyph's path is actually requested for text shaping. * @alias opentype.ttfGlyphLoader * @param {opentype.Font} font * @param {number} index * @param {Function} parseGlyph * @param {Object} data * @param {number} position * @param {Function} buildPath * @return {opentype.Glyph} */ function ttfGlyphLoader(font, index, parseGlyph, data, position, buildPath) { return function() { var glyph = new _glyph.Glyph({index: index, font: font}); glyph.path = function() { parseGlyph(glyph, data, position); var path = buildPath(font.glyphs, glyph); path.unitsPerEm = font.unitsPerEm; return path; }; defineDependentProperty(glyph, 'xMin', '_xMin'); defineDependentProperty(glyph, 'xMax', '_xMax'); defineDependentProperty(glyph, 'yMin', '_yMin'); defineDependentProperty(glyph, 'yMax', '_yMax'); return glyph; }; } /** * @alias opentype.cffGlyphLoader * @param {opentype.Font} font * @param {number} index * @param {Function} parseCFFCharstring * @param {string} charstring * @return {opentype.Glyph} */ function cffGlyphLoader(font, index, parseCFFCharstring, charstring) { return function() { var glyph = new _glyph.Glyph({index: index, font: font}); glyph.path = function() { var path = parseCFFCharstring(font, glyph, charstring); path.unitsPerEm = font.unitsPerEm; return path; }; return glyph; }; } exports.GlyphSet = GlyphSet; exports.glyphLoader = glyphLoader; exports.ttfGlyphLoader = ttfGlyphLoader; exports.cffGlyphLoader = cffGlyphLoader; },{"./glyph":6}],8:[function(require,module,exports){ // The Layout object is the prototype of Substition objects, and provides utility methods to manipulate // common layout tables (GPOS, GSUB, GDEF...) 'use strict'; var check = require('./check'); function searchTag(arr, tag) { /* jshint bitwise: false */ var imin = 0; var imax = arr.length - 1; while (imin <= imax) { var imid = (imin + imax) >>> 1; var val = arr[imid].tag; if (val === tag) { return imid; } else if (val < tag) { imin = imid + 1; } else { imax = imid - 1; } } // Not found: return -1-insertion point return -imin - 1; } function binSearch(arr, value) { /* jshint bitwise: false */ var imin = 0; var imax = arr.length - 1; while (imin <= imax) { var imid = (imin + imax) >>> 1; var val = arr[imid]; if (val === value) { return imid; } else if (val < value) { imin = imid + 1; } else { imax = imid - 1; } } // Not found: return -1-insertion point return -imin - 1; } /** * @exports opentype.Layout * @class */ var Layout = { /** * Binary search an object by "tag" property * @instance * @function searchTag * @memberof opentype.Layout * @param {Array} arr * @param {string} tag * @return {number} */ searchTag: searchTag, /** * Binary search in a list of numbers * @instance * @function binSearch * @memberof opentype.Layout * @param {Array} arr * @param {number} value * @return {number} */ binSearch: binSearch, /** * Returns all scripts in the substitution table. * @instance * @return {Array} */ getScriptNames: function() { var gsub = this.getGsubTable(); if (!gsub) { return []; } return gsub.scripts.map(function(script) { return script.tag; }); }, /** * Returns all LangSysRecords in the given script. * @instance * @param {string} script - Use 'DFLT' for default script * @param {boolean} create - forces the creation of this script table if it doesn't exist. * @return {Object} An object with tag and script properties. */ getScriptTable: function(script, create) { var gsub = this.getGsubTable(create); if (gsub) { var scripts = gsub.scripts; var pos = searchTag(gsub.scripts, script); if (pos >= 0) { return scripts[pos].script; } else { var scr = { tag: script, script: { defaultLangSys: { reserved: 0, reqFeatureIndex: 0xffff, featureIndexes: [] }, langSysRecords: [] } }; scripts.splice(-1 - pos, 0, scr.script); return scr; } } }, /** * Returns a language system table * @instance * @param {string} script - Use 'DFLT' for default script * @param {string} language - Use 'DFLT' for default language * @param {boolean} create - forces the creation of this langSysTable if it doesn't exist. * @return {Object} */ getLangSysTable: function(script, language, create) { var scriptTable = this.getScriptTable(script, create); if (scriptTable) { if (language === 'DFLT') { return scriptTable.defaultLangSys; } var pos = searchTag(scriptTable.langSysRecords, language); if (pos >= 0) { return scriptTable.langSysRecords[pos].langSys; } else if (create) { var langSysRecord = { tag: language, langSys: { reserved: 0, reqFeatureIndex: 0xffff, featureIndexes: [] } }; scriptTable.langSysRecords.splice(-1 - pos, 0, langSysRecord); return langSysRecord.langSys; } } }, /** * Get a specific feature table. * @instance * @param {string} script - Use 'DFLT' for default script * @param {string} language - Use 'DFLT' for default language * @param {string} feature - One of the codes listed at https://www.microsoft.com/typography/OTSPEC/featurelist.htm * @param {boolean} create - forces the creation of the feature table if it doesn't exist. * @return {Object} */ getFeatureTable: function(script, language, feature, create) { var langSysTable = this.getLangSysTable(script, language, create); if (langSysTable) { var featureRecord; var featIndexes = langSysTable.featureIndexes; var allFeatures = this.font.tables.gsub.features; // The FeatureIndex array of indices is in arbitrary order, // even if allFeatures is sorted alphabetically by feature tag. for (var i = 0; i < featIndexes.length; i++) { featureRecord = allFeatures[featIndexes[i]]; if (featureRecord.tag === feature) { return featureRecord.feature; } } if (create) { var index = allFeatures.length; // Automatic ordering of features would require to shift feature indexes in the script list. check.assert(index === 0 || feature >= allFeatures[index - 1].tag, 'Features must be added in alphabetical order.'); featureRecord = { tag: feature, feature: { params: 0, lookupListIndexes: [] } }; allFeatures.push(featureRecord); featIndexes.push(index); return featureRecord.feature; } } }, /** * Get the first lookup table of a given type for a script/language/feature. * @instance * @param {string} script - Use 'DFLT' for default script * @param {string} language - Use 'DFLT' for default language * @param {string} feature - 4-letter feature code * @param {number} lookupType - 1 to 8 * @param {boolean} create - forces the creation of the lookup table if it doesn't exist, with no subtables. * @return {Object} */ getLookupTable: function(script, language, feature, lookupType, create) { var featureTable = this.getFeatureTable(script, language, feature, create); if (featureTable) { var lookupTable; var lookupListIndexes = featureTable.lookupListIndexes; var allLookups = this.font.tables.gsub.lookups; // lookupListIndexes are in no particular order, so use naïve search. for (var i = 0; i < lookupListIndexes.length; i++) { lookupTable = allLookups[lookupListIndexes[i]]; if (lookupTable.lookupType === lookupType) { return lookupTable; } } if (create) { lookupTable = { lookupType: lookupType, lookupFlag: 0, subtables: [], markFilteringSet: undefined }; var index = allLookups.length; allLookups.push(lookupTable); lookupListIndexes.push(index); return lookupTable; } } }, /** * Returns the list of glyph indexes of a coverage table. * Format 1: the list is stored raw * Format 2: compact list as range records. * @instance * @param {Object} coverageTable * @return {Array} */ expandCoverage: function(coverageTable) { if (coverageTable.format === 1) { return coverageTable.glyphs; } else { var glyphs = []; var ranges = coverageTable.ranges; for (var i = 0; i < ranges; i++) { var range = ranges[i]; var start = range.start; var end = range.end; for (var j = start; j <= end; j++) { glyphs.push(j); } } return glyphs; } } }; module.exports = Layout; },{"./check":2}],9:[function(require,module,exports){ // opentype.js // https://github.com/nodebox/opentype.js // (c) 2015 Frederik De Bleser // opentype.js may be freely distributed under the MIT license. /* global DataView, Uint8Array, XMLHttpRequest */ 'use strict'; var inflate = require('tiny-inflate'); var encoding = require('./encoding'); var _font = require('./font'); var glyph = require('./glyph'); var parse = require('./parse'); var path = require('./path'); var util = require('./util'); var cmap = require('./tables/cmap'); var cff = require('./tables/cff'); var fvar = require('./tables/fvar'); var glyf = require('./tables/glyf'); var gpos = require('./tables/gpos'); var gsub = require('./tables/gsub'); var head = require('./tables/head'); var hhea = require('./tables/hhea'); var hmtx = require('./tables/hmtx'); var kern = require('./tables/kern'); var ltag = require('./tables/ltag'); var loca = require('./tables/loca'); var maxp = require('./tables/maxp'); var _name = require('./tables/name'); var os2 = require('./tables/os2'); var post = require('./tables/post'); var meta = require('./tables/meta'); /** * The opentype library. * @namespace opentype */ // File loaders ///////////////////////////////////////////////////////// /** * Loads a font from a file. The callback throws an error message as the first parameter if it fails * and the font as an ArrayBuffer in the second parameter if it succeeds. * @param {string} path - The path of the file * @param {Function} callback - The function to call when the font load completes */ function loadFromFile(path, callback) { var fs = require('fs'); fs.readFile(path, function(err, buffer) { if (err) { return callback(err.message); } callback(null, util.nodeBufferToArrayBuffer(buffer)); }); } /** * Loads a font from a URL. The callback throws an error message as the first parameter if it fails * and the font as an ArrayBuffer in the second parameter if it succeeds. * @param {string} url - The URL of the font file. * @param {Function} callback - The function to call when the font load completes */ function loadFromUrl(url, callback) { var request = new XMLHttpRequest(); request.open('get', url, true); request.responseType = 'arraybuffer'; request.onload = function() { if (request.status !== 200) { return callback('Font could not be loaded: ' + request.statusText); } return callback(null, request.response); }; request.send(); } // Table Directory Entries ////////////////////////////////////////////// /** * Parses OpenType table entries. * @param {DataView} * @param {Number} * @return {Object[]} */ function parseOpenTypeTableEntries(data, numTables) { var tableEntries = []; var p = 12; for (var i = 0; i < numTables; i += 1) { var tag = parse.getTag(data, p); var checksum = parse.getULong(data, p + 4); var offset = parse.getULong(data, p + 8); var length = parse.getULong(data, p + 12); tableEntries.push({tag: tag, checksum: checksum, offset: offset, length: length, compression: false}); p += 16; } return tableEntries; } /** * Parses WOFF table entries. * @param {DataView} * @param {Number} * @return {Object[]} */ function parseWOFFTableEntries(data, numTables) { var tableEntries = []; var p = 44; // offset to the first table directory entry. for (var i = 0; i < numTables; i += 1) { var tag = parse.getTag(data, p); var offset = parse.getULong(data, p + 4); var compLength = parse.getULong(data, p + 8); var origLength = parse.getULong(data, p + 12); var compression; if (compLength < origLength) { compression = 'WOFF'; } else { compression = false; } tableEntries.push({tag: tag, offset: offset, compression: compression, compressedLength: compLength, originalLength: origLength}); p += 20; } return tableEntries; } /** * @typedef TableData * @type Object * @property {DataView} data - The DataView * @property {number} offset - The data offset. */ /** * @param {DataView} * @param {Object} * @return {TableData} */ function uncompressTable(data, tableEntry) { if (tableEntry.compression === 'WOFF') { var inBuffer = new Uint8Array(data.buffer, tableEntry.offset + 2, tableEntry.compressedLength - 2); var outBuffer = new Uint8Array(tableEntry.originalLength); inflate(inBuffer, outBuffer); if (outBuffer.byteLength !== tableEntry.originalLength) { throw new Error('Decompression error: ' + tableEntry.tag + ' decompressed length doesn\'t match recorded length'); } var view = new DataView(outBuffer.buffer, 0); return {data: view, offset: 0}; } else { return {data: data, offset: tableEntry.offset}; } } // Public API /////////////////////////////////////////////////////////// /** * Parse the OpenType file data (as an ArrayBuffer) and return a Font object. * Throws an error if the font could not be parsed. * @param {ArrayBuffer} * @return {opentype.Font} */ function parseBuffer(buffer) { var indexToLocFormat; var ltagTable; // Since the constructor can also be called to create new fonts from scratch, we indicate this // should be an empty font that we'll fill with our own data. var font = new _font.Font({empty: true}); // OpenType fonts use big endian byte ordering. // We can't rely on typed array view types, because they operate with the endianness of the host computer. // Instead we use DataViews where we can specify endianness. var data = new DataView(buffer, 0); var numTables; var tableEntries = []; var signature = parse.getTag(data, 0); if (signature === String.fromCharCode(0, 1, 0, 0)) { font.outlinesFormat = 'truetype'; numTables = parse.getUShort(data, 4); tableEntries = parseOpenTypeTableEntries(data, numTables); } else if (signature === 'OTTO') { font.outlinesFormat = 'cff'; numTables = parse.getUShort(data, 4); tableEntries = parseOpenTypeTableEntries(data, numTables); } else if (signature === 'wOFF') { var flavor = parse.getTag(data, 4); if (flavor === String.fromCharCode(0, 1, 0, 0)) { font.outlinesFormat = 'truetype'; } else if (flavor === 'OTTO') { font.outlinesFormat = 'cff'; } else { throw new Error('Unsupported OpenType flavor ' + signature); } numTables = parse.getUShort(data, 12); tableEntries = parseWOFFTableEntries(data, numTables); } else { throw new Error('Unsupported OpenType signature ' + signature); } var cffTableEntry; var fvarTableEntry; var glyfTableEntry; var gposTableEntry; var gsubTableEntry; var hmtxTableEntry; var kernTableEntry; var locaTableEntry; var nameTableEntry; var metaTableEntry; for (var i = 0; i < numTables; i += 1) { var tableEntry = tableEntries[i]; var table; switch (tableEntry.tag) { case 'cmap': table = uncompressTable(data, tableEntry); font.tables.cmap = cmap.parse(table.data, table.offset); font.encoding = new encoding.CmapEncoding(font.tables.cmap); break; case 'fvar': fvarTableEntry = tableEntry; break; case 'head': table = uncompressTable(data, tableEntry); font.tables.head = head.parse(table.data, table.offset); font.unitsPerEm = font.tables.head.unitsPerEm; indexToLocFormat = font.tables.head.indexToLocFormat; break; case 'hhea': table = uncompressTable(data, tableEntry); font.tables.hhea = hhea.parse(table.data, table.offset); font.ascender = font.tables.hhea.ascender; font.descender = font.tables.hhea.descender; font.numberOfHMetrics = font.tables.hhea.numberOfHMetrics; break; case 'hmtx': hmtxTableEntry = tableEntry; break; case 'ltag': table = uncompressTable(data, tableEntry); ltagTable = ltag.parse(table.data, table.offset); break; case 'maxp': table = uncompressTable(data, tableEntry); font.tables.maxp = maxp.parse(table.data, table.offset); font.numGlyphs = font.tables.maxp.numGlyphs; break; case 'name': nameTableEntry = tableEntry; break; case 'OS/2': table = uncompressTable(data, tableEntry); font.tables.os2 = os2.parse(table.data, table.offset); break; case 'post': table = uncompressTable(data, tableEntry); font.tables.post = post.parse(table.data, table.offset); font.glyphNames = new encoding.GlyphNames(font.tables.post); break; case 'glyf': glyfTableEntry = tableEntry; break; case 'loca': locaTableEntry = tableEntry; break; case 'CFF ': cffTableEntry = tableEntry; break; case 'kern': kernTableEntry = tableEntry; break; case 'GPOS': gposTableEntry = tableEntry; break; case 'GSUB': gsubTableEntry = tableEntry; break; case 'meta': metaTableEntry = tableEntry; break; } } var nameTable = uncompressTable(data, nameTableEntry); font.tables.name = _name.parse(nameTable.data, nameTable.offset, ltagTable); font.names = font.tables.name; if (glyfTableEntry && locaTableEntry) { var shortVersion = indexToLocFormat === 0; var locaTable = uncompressTable(data, locaTableEntry); var locaOffsets = loca.parse(locaTable.data, locaTable.offset, font.numGlyphs, shortVersion); var glyfTable = uncompressTable(data, glyfTableEntry); font.glyphs = glyf.parse(glyfTable.data, glyfTable.offset, locaOffsets, font); } else if (cffTableEntry) { var cffTable = uncompressTable(data, cffTableEntry); cff.parse(cffTable.data, cffTable.offset, font); } else { throw new Error('Font doesn\'t contain TrueType or CFF outlines.'); } var hmtxTable = uncompressTable(data, hmtxTableEntry); hmtx.parse(hmtxTable.data, hmtxTable.offset, font.numberOfHMetrics, font.numGlyphs, font.glyphs); encoding.addGlyphNames(font); if (kernTableEntry) { var kernTable = uncompressTable(data, kernTableEntry); font.kerningPairs = kern.parse(kernTable.data, kernTable.offset); } else { font.kerningPairs = {}; } if (gposTableEntry) { var gposTable = uncompressTable(data, gposTableEntry); gpos.parse(gposTable.data, gposTable.offset, font); } if (gsubTableEntry) { var gsubTable = uncompressTable(data, gsubTableEntry); font.tables.gsub = gsub.parse(gsubTable.data, gsubTable.offset); } if (fvarTableEntry) { var fvarTable = uncompressTable(data, fvarTableEntry); font.tables.fvar = fvar.parse(fvarTable.data, fvarTable.offset, font.names); } if (metaTableEntry) { var metaTable = uncompressTable(data, metaTableEntry); font.tables.meta = meta.parse(metaTable.data, metaTable.offset); font.metas = font.tables.meta; } return font; } /** * Asynchronously load the font from a URL or a filesystem. When done, call the callback * with two arguments `(err, font)`. The `err` will be null on success, * the `font` is a Font object. * We use the node.js callback convention so that * opentype.js can integrate with frameworks like async.js. * @alias opentype.load * @param {string} url - The URL of the font to load. * @param {Function} callback - The callback. */ function load(url, callback) { var isNode = typeof window === 'undefined'; var loadFn = isNode ? loadFromFile : loadFromUrl; loadFn(url, function(err, arrayBuffer) { if (err) { return callback(err); } var font; try { font = parseBuffer(arrayBuffer); } catch (e) { return callback(e, null); } return callback(null, font); }); } /** * Synchronously load the font from a URL or file. * When done, returns the font object or throws an error. * @alias opentype.loadSync * @param {string} url - The URL of the font to load. * @return {opentype.Font} */ function loadSync(url) { var fs = require('fs'); var buffer = fs.readFileSync(url); return parseBuffer(util.nodeBufferToArrayBuffer(buffer)); } exports._parse = parse; exports.Font = _font.Font; exports.Glyph = glyph.Glyph; exports.Path = path.Path; exports.parse = parseBuffer; exports.load = load; exports.loadSync = loadSync; },{"./encoding":4,"./font":5,"./glyph":6,"./parse":10,"./path":11,"./tables/cff":14,"./tables/cmap":15,"./tables/fvar":16,"./tables/glyf":17,"./tables/gpos":18,"./tables/gsub":19,"./tables/head":20,"./tables/hhea":21,"./tables/hmtx":22,"./tables/kern":23,"./tables/loca":24,"./tables/ltag":25,"./tables/maxp":26,"./tables/meta":27,"./tables/name":28,"./tables/os2":29,"./tables/post":30,"./util":33,"fs":undefined,"tiny-inflate":1}],10:[function(require,module,exports){ // Parsing utility functions 'use strict'; var check = require('./check'); // Retrieve an unsigned byte from the DataView. exports.getByte = function getByte(dataView, offset) { return dataView.getUint8(offset); }; exports.getCard8 = exports.getByte; // Retrieve an unsigned 16-bit short from the DataView. // The value is stored in big endian. function getUShort(dataView, offset) { return dataView.getUint16(offset, false); } exports.getUShort = exports.getCard16 = getUShort; // Retrieve a signed 16-bit short from the DataView. // The value is stored in big endian. exports.getShort = function(dataView, offset) { return dataView.getInt16(offset, false); }; // Retrieve an unsigned 32-bit long from the DataView. // The value is stored in big endian. exports.getULong = function(dataView, offset) { return dataView.getUint32(offset, false); }; // Retrieve a 32-bit signed fixed-point number (16.16) from the DataView. // The value is stored in big endian. exports.getFixed = function(dataView, offset) { var decimal = dataView.getInt16(offset, false); var fraction = dataView.getUint16(offset + 2, false); return decimal + fraction / 65535; }; // Retrieve a 4-character tag from the DataView. // Tags are used to identify tables. exports.getTag = function(dataView, offset) { var tag = ''; for (var i = offset; i < offset + 4; i += 1) { tag += String.fromCharCode(dataView.getInt8(i)); } return tag; }; // Retrieve an offset from the DataView. // Offsets are 1 to 4 bytes in length, depending on the offSize argument. exports.getOffset = function(dataView, offset, offSize) { var v = 0; for (var i = 0; i < offSize; i += 1) { v <<= 8; v += dataView.getUint8(offset + i); } return v; }; // Retrieve a number of bytes from start offset to the end offset from the DataView. exports.getBytes = function(dataView, startOffset, endOffset) { var bytes = []; for (var i = startOffset; i < endOffset; i += 1) { bytes.push(dataView.getUint8(i)); } return bytes; }; // Convert the list of bytes to a string. exports.bytesToString = function(bytes) { var s = ''; for (var i = 0; i < bytes.length; i += 1) { s += String.fromCharCode(bytes[i]); } return s; }; var typeOffsets = { byte: 1, uShort: 2, short: 2, uLong: 4, fixed: 4, longDateTime: 8, tag: 4 }; // A stateful parser that changes the offset whenever a value is retrieved. // The data is a DataView. function Parser(data, offset) { this.data = data; this.offset = offset; this.relativeOffset = 0; } Parser.prototype.parseByte = function() { var v = this.data.getUint8(this.offset + this.relativeOffset); this.relativeOffset += 1; return v; }; Parser.prototype.parseChar = function() { var v = this.data.getInt8(this.offset + this.relativeOffset); this.relativeOffset += 1; return v; }; Parser.prototype.parseCard8 = Parser.prototype.parseByte; Parser.prototype.parseUShort = function() { var v = this.data.getUint16(this.offset + this.relativeOffset); this.relativeOffset += 2; return v; }; Parser.prototype.parseCard16 = Parser.prototype.parseUShort; Parser.prototype.parseSID = Parser.prototype.parseUShort; Parser.prototype.parseOffset16 = Parser.prototype.parseUShort; Parser.prototype.parseShort = function() { var v = this.data.getInt16(this.offset + this.relativeOffset); this.relativeOffset += 2; return v; }; Parser.prototype.parseF2Dot14 = function() { var v = this.data.getInt16(this.offset + this.relativeOffset) / 16384; this.relativeOffset += 2; return v; }; Parser.prototype.parseULong = function() { var v = exports.getULong(this.data, this.offset + this.relativeOffset); this.relativeOffset += 4; return v; }; Parser.prototype.parseFixed = function() { var v = exports.getFixed(this.data, this.offset + this.relativeOffset); this.relativeOffset += 4; return v; }; Parser.prototype.parseString = function(length) { var dataView = this.data; var offset = this.offset + this.relativeOffset; var string = ''; this.relativeOffset += length; for (var i = 0; i < length; i++) { string += String.fromCharCode(dataView.getUint8(offset + i)); } return string; }; Parser.prototype.parseTag = function() { return this.parseString(4); }; // LONGDATETIME is a 64-bit integer. // JavaScript and unix timestamps traditionally use 32 bits, so we // only take the last 32 bits. // + Since until 2038 those bits will be filled by zeros we can ignore them. Parser.prototype.parseLongDateTime = function() { var v = exports.getULong(this.data, this.offset + this.relativeOffset + 4); // Subtract seconds between 01/01/1904 and 01/01/1970 // to convert Apple Mac timstamp to Standard Unix timestamp v -= 2082844800; this.relativeOffset += 8; return v; }; Parser.prototype.parseVersion = function() { var major = getUShort(this.data, this.offset + this.relativeOffset); // How to interpret the minor version is very vague in the spec. 0x5000 is 5, 0x1000 is 1 // This returns the correct number if minor = 0xN000 where N is 0-9 var minor = getUShort(this.data, this.offset + this.relativeOffset + 2); this.relativeOffset += 4; return major + minor / 0x1000 / 10; }; Parser.prototype.skip = function(type, amount) { if (amount === undefined) { amount = 1; } this.relativeOffset += typeOffsets[type] * amount; }; ///// Parsing lists and records /////////////////////////////// // Parse a list of 16 bit integers. The length of the list can be read on the stream // or provided as an argument. Parser.prototype.parseOffset16List = Parser.prototype.parseUShortList = function(count) { if (count === undefined) { count = this.parseUShort(); } var offsets = new Array(count); var dataView = this.data; var offset = this.offset + this.relativeOffset; for (var i = 0; i < count; i++) { offsets[i] = dataView.getUint16(offset); offset += 2; } this.relativeOffset += count * 2; return offsets; }; /** * Parse a list of items. * Record count is optional, if omitted it is read from the stream. * itemCallback is one of the Parser methods. */ Parser.prototype.parseList = function(count, itemCallback) { if (!itemCallback) { itemCallback = count; count = this.parseUShort(); } var list = new Array(count); for (var i = 0; i < count; i++) { list[i] = itemCallback.call(this); } return list; }; /** * Parse a list of records. * Record count is optional, if omitted it is read from the stream. * Example of recordDescription: { sequenceIndex: Parser.uShort, lookupListIndex: Parser.uShort } */ Parser.prototype.parseRecordList = function(count, recordDescription) { // If the count argument is absent, read it in the stream. if (!recordDescription) { recordDescription = count; count = this.parseUShort(); } var records = new Array(count); var fields = Object.keys(recordDescription); for (var i = 0; i < count; i++) { var rec = {}; for (var j = 0; j < fields.length; j++) { var fieldName = fields[j]; var fieldType = recordDescription[fieldName]; rec[fieldName] = fieldType.call(this); } records[i] = rec; } return records; }; // Parse a data structure into an object // Example of description: { sequenceIndex: Parser.uShort, lookupListIndex: Parser.uShort } Parser.prototype.parseStruct = function(description) { if (typeof description === 'function') { return description.call(this); } else { var fields = Object.keys(description); var struct = {}; for (var j = 0; j < fields.length; j++) { var fieldName = fields[j]; var fieldType = description[fieldName]; struct[fieldName] = fieldType.call(this); } return struct; } }; Parser.prototype.parsePointer = function(description) { var structOffset = this.parseOffset16(); if (structOffset > 0) { // NULL offset => return indefined return new Parser(this.data, this.offset + structOffset).parseStruct(description); } }; /** * Parse a list of offsets to lists of 16-bit integers, * or a list of offsets to lists of offsets to any kind of items. * If itemCallback is not provided, a list of list of UShort is assumed. * If provided, itemCallback is called on each item and must parse the item. * See examples in tables/gsub.js */ Parser.prototype.parseListOfLists = function(itemCallback) { var offsets = this.parseOffset16List(); var count = offsets.length; var relativeOffset = this.relativeOffset; var list = new Array(count); for (var i = 0; i < count; i++) { var start = offsets[i]; if (start === 0) { // NULL offset list[i] = undefined; // Add i as owned property to list. Convenient with assert. continue; } this.relativeOffset = start; if (itemCallback) { var subOffsets = this.parseOffset16List(); var subList = new Array(subOffsets.length); for (var j = 0; j < subOffsets.length; j++) { this.relativeOffset = start + subOffsets[j]; subList[j] = itemCallback.call(this); } list[i] = subList; } else { list[i] = this.parseUShortList(); } } this.relativeOffset = relativeOffset; return list; }; ///// Complex tables parsing ////////////////////////////////// // Parse a coverage table in a GSUB, GPOS or GDEF table. // https://www.microsoft.com/typography/OTSPEC/chapter2.htm // parser.offset must point to the start of the table containing the coverage. Parser.prototype.parseCoverage = function() { var startOffset = this.offset + this.relativeOffset; var format = this.parseUShort(); var count = this.parseUShort(); if (format === 1) { return { format: 1, glyphs: this.parseUShortList(count) }; } else if (format === 2) { var ranges = new Array(count); for (var i = 0; i < count; i++) { ranges[i] = { start: this.parseUShort(), end: this.parseUShort(), index: this.parseUShort() }; } return { format: 2, ranges: ranges }; } check.assert(false, '0x' + startOffset.toString(16) + ': Coverage format must be 1 or 2.'); }; // Parse a Class Definition Table in a GSUB, GPOS or GDEF table. // https://www.microsoft.com/typography/OTSPEC/chapter2.htm Parser.prototype.parseClassDef = function() { var startOffset = this.offset + this.relativeOffset; var format = this.parseUShort(); if (format === 1) { return { format: 1, startGlyph: this.parseUShort(), classes: this.parseUShortList() }; } else if (format === 2) { return { format: 2, ranges: this.parseRecordList({ start: Parser.uShort, end: Parser.uShort, classId: Parser.uShort }) }; } check.assert(false, '0x' + startOffset.toString(16) + ': ClassDef format must be 1 or 2.'); }; ///// Static methods /////////////////////////////////// // These convenience methods can be used as callbacks and should be called with "this" context set to a Parser instance. Parser.list = function(count, itemCallback) { return function() { return this.parseList(count, itemCallback); }; }; Parser.recordList = function(count, recordDescription) { return function() { return this.parseRecordList(count, recordDescription); }; }; Parser.pointer = function(description) { return function() { return this.parsePointer(description); }; }; Parser.tag = Parser.prototype.parseTag; Parser.byte = Parser.prototype.parseByte; Parser.uShort = Parser.offset16 = Parser.prototype.parseUShort; Parser.uShortList = Parser.prototype.parseUShortList; Parser.struct = Parser.prototype.parseStruct; Parser.coverage = Parser.prototype.parseCoverage; Parser.classDef = Parser.prototype.parseClassDef; ///// Script, Feature, Lookup lists /////////////////////////////////////////////// // https://www.microsoft.com/typography/OTSPEC/chapter2.htm var langSysTable = { reserved: Parser.uShort, reqFeatureIndex: Parser.uShort, featureIndexes: Parser.uShortList }; Parser.prototype.parseScriptList = function() { return this.parsePointer(Parser.recordList({ tag: Parser.tag, script: Parser.pointer({ defaultLangSys: Parser.pointer(langSysTable), langSysRecords: Parser.recordList({ tag: Parser.tag, langSys: Parser.pointer(langSysTable) }) }) })); }; Parser.prototype.parseFeatureList = function() { return this.parsePointer(Parser.recordList({ tag: Parser.tag, feature: Parser.pointer({ featureParams: Parser.offset16, lookupListIndexes: Parser.uShortList }) })); }; Parser.prototype.parseLookupList = function(lookupTableParsers) { return this.parsePointer(Parser.list(Parser.pointer(function() { var lookupType = this.parseUShort(); check.argument(1 <= lookupType && lookupType <= 8, 'GSUB lookup type ' + lookupType + ' unknown.'); var lookupFlag = this.parseUShort(); var useMarkFilteringSet = lookupFlag & 0x10; return { lookupType: lookupType, lookupFlag: lookupFlag, subtables: this.parseList(Parser.pointer(lookupTableParsers[lookupType])), markFilteringSet: useMarkFilteringSet ? this.parseUShort() : undefined }; }))); }; exports.Parser = Parser; },{"./check":2}],11:[function(require,module,exports){ // Geometric objects 'use strict'; /** * A bézier path containing a set of path commands similar to a SVG path. * Paths can be drawn on a context using `draw`. * @exports opentype.Path * @class * @constructor */ function Path() { this.commands = []; this.fill = 'black'; this.stroke = null; this.strokeWidth = 1; } /** * @param {number} x * @param {number} y */ Path.prototype.moveTo = function(x, y) { this.commands.push({ type: 'M', x: x, y: y }); }; /** * @param {number} x * @param {number} y */ Path.prototype.lineTo = function(x, y) { this.commands.push({ type: 'L', x: x, y: y }); }; /** * Draws cubic curve * @function * curveTo * @memberof opentype.Path.prototype * @param {number} x1 - x of control 1 * @param {number} y1 - y of control 1 * @param {number} x2 - x of control 2 * @param {number} y2 - y of control 2 * @param {number} x - x of path point * @param {number} y - y of path point */ /** * Draws cubic curve * @function * bezierCurveTo * @memberof opentype.Path.prototype * @param {number} x1 - x of control 1 * @param {number} y1 - y of control 1 * @param {number} x2 - x of control 2 * @param {number} y2 - y of control 2 * @param {number} x - x of path point * @param {number} y - y of path point * @see curveTo */ Path.prototype.curveTo = Path.prototype.bezierCurveTo = function(x1, y1, x2, y2, x, y) { this.commands.push({ type: 'C', x1: x1, y1: y1, x2: x2, y2: y2, x: x, y: y }); }; /** * Draws quadratic curve * @function * quadraticCurveTo * @memberof opentype.Path.prototype * @param {number} x1 - x of control * @param {number} y1 - y of control * @param {number} x - x of path point * @param {number} y - y of path point */ /** * Draws quadratic curve * @function * quadTo * @memberof opentype.Path.prototype * @param {number} x1 - x of control * @param {number} y1 - y of control * @param {number} x - x of path point * @param {number} y - y of path point */ Path.prototype.quadTo = Path.prototype.quadraticCurveTo = function(x1, y1, x, y) { this.commands.push({ type: 'Q', x1: x1, y1: y1, x: x, y: y }); }; /** * Closes the path * @function closePath * @memberof opentype.Path.prototype */ /** * Close the path * @function close * @memberof opentype.Path.prototype */ Path.prototype.close = Path.prototype.closePath = function() { this.commands.push({ type: 'Z' }); }; /** * Add the given path or list of commands to the commands of this path. * @param {Array} */ Path.prototype.extend = function(pathOrCommands) { if (pathOrCommands.commands) { pathOrCommands = pathOrCommands.commands; } Array.prototype.push.apply(this.commands, pathOrCommands); }; /** * Draw the path to a 2D context. * @param {CanvasRenderingContext2D} ctx - A 2D drawing context. */ Path.prototype.draw = function(ctx) { ctx.beginPath(); for (var i = 0; i < this.commands.length; i += 1) { var cmd = this.commands[i]; if (cmd.type === 'M') { ctx.moveTo(cmd.x, cmd.y); } else if (cmd.type === 'L') { ctx.lineTo(cmd.x, cmd.y); } else if (cmd.type === 'C') { ctx.bezierCurveTo(cmd.x1, cmd.y1, cmd.x2, cmd.y2, cmd.x, cmd.y); } else if (cmd.type === 'Q') { ctx.quadraticCurveTo(cmd.x1, cmd.y1, cmd.x, cmd.y); } else if (cmd.type === 'Z') { ctx.closePath(); } } if (this.fill) { ctx.fillStyle = this.fill; ctx.fill(); } if (this.stroke) { ctx.strokeStyle = this.stroke; ctx.lineWidth = this.strokeWidth; ctx.stroke(); } }; /** * Convert the Path to a string of path data instructions * See http://www.w3.org/TR/SVG/paths.html#PathData * @param {number} [decimalPlaces=2] - The amount of decimal places for floating-point values * @return {string} */ Path.prototype.toPathData = function(decimalPlaces) { decimalPlaces = decimalPlaces !== undefined ? decimalPlaces : 2; function floatToString(v) { if (Math.round(v) === v) { return '' + Math.round(v); } else { return v.toFixed(decimalPlaces); } } function packValues() { var s = ''; for (var i = 0; i < arguments.length; i += 1) { var v = arguments[i]; if (v >= 0 && i > 0) { s += ' '; } s += floatToString(v); } return s; } var d = ''; for (var i = 0; i < this.commands.length; i += 1) { var cmd = this.commands[i]; if (cmd.type === 'M') { d += 'M' + packValues(cmd.x, cmd.y); } else if (cmd.type === 'L') { d += 'L' + packValues(cmd.x, cmd.y); } else if (cmd.type === 'C') { d += 'C' + packValues(cmd.x1, cmd.y1, cmd.x2, cmd.y2, cmd.x, cmd.y); } else if (cmd.type === 'Q') { d += 'Q' + packValues(cmd.x1, cmd.y1, cmd.x, cmd.y); } else if (cmd.type === 'Z') { d += 'Z'; } } return d; }; /** * Convert the path to an SVG <path> element, as a string. * @param {number} [decimalPlaces=2] - The amount of decimal places for floating-point values * @return {string} */ Path.prototype.toSVG = function(decimalPlaces) { var svg = '<path d="'; svg += this.toPathData(decimalPlaces); svg += '"'; if (this.fill && this.fill !== 'black') { if (this.fill === null) { svg += ' fill="none"'; } else { svg += ' fill="' + this.fill + '"'; } } if (this.stroke) { svg += ' stroke="' + this.stroke + '" stroke-width="' + this.strokeWidth + '"'; } svg += '/>'; return svg; }; exports.Path = Path; },{}],12:[function(require,module,exports){ // The Substitution object provides utility methods to manipulate // the GSUB substitution table. 'use strict'; var check = require('./check'); var Layout = require('./layout'); /** * @exports opentype.Substitution * @class * @extends opentype.Layout * @param {opentype.Font} * @constructor */ var Substitution = function(font) { this.font = font; }; // Check if 2 arrays of primitives are equal. function arraysEqual(ar1, ar2) { var n = ar1.length; if (n !== ar2.length) { return false; } for (var i = 0; i < n; i++) { if (ar1[i] !== ar2[i]) { return false; } } return true; } // Find the first subtable of a lookup table in a particular format. function getSubstFormat(lookupTable, format, defaultSubtable) { var subtables = lookupTable.subtables; for (var i = 0; i < subtables.length; i++) { var subtable = subtables[i]; if (subtable.substFormat === format) { return subtable; } } if (defaultSubtable) { subtables.push(defaultSubtable); return defaultSubtable; } } Substitution.prototype = Layout; /** * Get or create the GSUB table. * @param {boolean} create - Whether to create a new one. * @return {Object} gsub - The GSUB table. */ Substitution.prototype.getGsubTable = function(create) { var gsub = this.font.tables.gsub; if (!gsub && create) { // Generate a default empty GSUB table with just a DFLT script and dflt lang sys. this.font.tables.gsub = gsub = { version: 1, scripts: [{ tag: 'DFLT', script: { defaultLangSys: { reserved: 0, reqFeatureIndex: 0xffff, featureIndexes: [] }, langSysRecords: [] } }], features: [], lookups: [] }; } return gsub; }; /** * List all single substitutions (lookup type 1) for a given script, language, and feature. * @param {string} script * @param {string} language * @param {string} feature - 4-character feature name ('aalt', 'salt', 'ss01'...) * @return {Array} substitutions - The list of substitutions. */ Substitution.prototype.getSingle = function(feature, script, language) { var substitutions = []; var lookupTable = this.getLookupTable(script, language, feature, 1); if (!lookupTable) { return substitutions; } var subtables = lookupTable.subtables; for (var i = 0; i < subtables.length; i++) { var subtable = subtables[i]; var glyphs = this.expandCoverage(subtable.coverage); var j; if (subtable.substFormat === 1) { var delta = subtable.deltaGlyphId; for (j = 0; j < glyphs.length; j++) { var glyph = glyphs[j]; substitutions.push({ sub: glyph, by: glyph + delta }); } } else { var substitute = subtable.substitute; for (j = 0; j < glyphs.length; j++) { substitutions.push({ sub: glyphs[j], by: substitute[j] }); } } } return substitutions; }; /** * List all alternates (lookup type 3) for a given script, language, and feature. * @param {string} script * @param {string} language * @param {string} feature - 4-character feature name ('aalt', 'salt'...) * @return {Array} alternates - The list of alternates */ Substitution.prototype.getAlternates = function(feature, script, language) { var alternates = []; var lookupTable = this.getLookupTable(script, language, feature, 3); if (!lookupTable) { return alternates; } var subtables = lookupTable.subtables; for (var i = 0; i < subtables.length; i++) { var subtable = subtables[i]; var glyphs = this.expandCoverage(subtable.coverage); var alternateSets = subtable.alternateSets; for (var j = 0; j < glyphs.length; j++) { alternates.push({ sub: glyphs[j], by: alternateSets[j] }); } } return alternates; }; /** * List all ligatures (lookup type 4) for a given script, language, and feature. * The result is an array of ligature objects like { sub: [ids], by: id } * @param {string} feature - 4-letter feature name ('liga', 'rlig', 'dlig'...) * @param {string} script * @param {string} language * @return {Array} ligatures - The list of ligatures. */ Substitution.prototype.getLigatures = function(feature, script, language) { var ligatures = []; var lookupTable = this.getLookupTable(script, language, feature, 4); if (!lookupTable) { return []; } var subtables = lookupTable.subtables; for (var i = 0; i < subtables.length; i++) { var subtable = subtables[i]; var glyphs = this.expandCoverage(subtable.coverage); var ligatureSets = subtable.ligatureSets; for (var j = 0; j < glyphs.length; j++) { var startGlyph = glyphs[j]; var ligSet = ligatureSets[j]; for (var k = 0; k < ligSet.length; k++) { var lig = ligSet[k]; ligatures.push({ sub: [startGlyph].concat(lig.components), by: lig.ligGlyph }); } } } return ligatures; }; /** * Add or modify a single substitution (lookup type 1) * Format 2, more flexible, is always used. * @param {string} feature - 4-letter feature name ('liga', 'rlig', 'dlig'...) * @param {Object} substitution - { sub: id, delta: number } for format 1 or { sub: id, by: id } for format 2. * @param {string} [script='DFLT'] * @param {string} [language='DFLT'] */ Substitution.prototype.addSingle = function(feature, substitution, script, language) { var lookupTable = this.getLookupTable(script, language, feature, 1, true); var subtable = getSubstFormat(lookupTable, 2, { // lookup type 1 subtable, format 2, coverage format 1 substFormat: 2, coverage: { format: 1, glyphs: [] }, substitute: [] }); check.assert(subtable.coverage.format === 1, 'Ligature: unable to modify coverage table format ' + subtable.coverage.format); var coverageGlyph = substitution.sub; var pos = this.binSearch(subtable.coverage.glyphs, coverageGlyph); if (pos < 0) { pos = -1 - pos; subtable.coverage.glyphs.splice(pos, 0, coverageGlyph); subtable.substitute.splice(pos, 0, 0); } subtable.substitute[pos] = substitution.by; }; /** * Add or modify an alternate substitution (lookup type 1) * @param {string} feature - 4-letter feature name ('liga', 'rlig', 'dlig'...) * @param {Object} substitution - { sub: id, by: [ids] } * @param {string} [script='DFLT'] * @param {string} [language='DFLT'] */ Substitution.prototype.addAlternate = function(feature, substitution, script, language) { var lookupTable = this.getLookupTable(script, language, feature, 3, true); var subtable = getSubstFormat(lookupTable, 1, { // lookup type 3 subtable, format 1, coverage format 1 substFormat: 1, coverage: { format: 1, glyphs: [] }, alternateSets: [] }); check.assert(subtable.coverage.format === 1, 'Ligature: unable to modify coverage table format ' + subtable.coverage.format); var coverageGlyph = substitution.sub; var pos = this.binSearch(subtable.coverage.glyphs, coverageGlyph); if (pos < 0) { pos = -1 - pos; subtable.coverage.glyphs.splice(pos, 0, coverageGlyph); subtable.alternateSets.splice(pos, 0, 0); } subtable.alternateSets[pos] = substitution.by; }; /** * Add a ligature (lookup type 4) * Ligatures with more components must be stored ahead of those with fewer components in order to be found * @param {string} feature - 4-letter feature name ('liga', 'rlig', 'dlig'...) * @param {Object} ligature - { sub: [ids], by: id } * @param {string} [script='DFLT'] * @param {string} [language='DFLT'] */ Substitution.prototype.addLigature = function(feature, ligature, script, language) { script = script || 'DFLT'; language = language || 'DFLT'; var lookupTable = this.getLookupTable(script, language, feature, 4, true); var subtable = lookupTable.subtables[0]; if (!subtable) { subtable = { // lookup type 4 subtable, format 1, coverage format 1 substFormat: 1, coverage: { format: 1, glyphs: [] }, ligatureSets: [] }; lookupTable.subtables[0] = subtable; } check.assert(subtable.coverage.format === 1, 'Ligature: unable to modify coverage table format ' + subtable.coverage.format); var coverageGlyph = ligature.sub[0]; var ligComponents = ligature.sub.slice(1); var ligatureTable = { ligGlyph: ligature.by, components: ligComponents }; var pos = this.binSearch(subtable.coverage.glyphs, coverageGlyph); if (pos >= 0) { // ligatureSet already exists var ligatureSet = subtable.ligatureSets[pos]; for (var i = 0; i < ligatureSet.length; i++) { // If ligature already exists, return. if (arraysEqual(ligatureSet[i].components, ligComponents)) { return; } } // ligature does not exist: add it. ligatureSet.push(ligatureTable); } else { // Create a new ligatureSet and add coverage for the first glyph. pos = -1 - pos; subtable.coverage.glyphs.splice(pos, 0, coverageGlyph); subtable.ligatureSets.splice(pos, 0, [ligatureTable]); } }; /** * List all feature data for a given script and language. * @param {string} feature - 4-letter feature name * @param {string} [script='DFLT'] * @param {string} [language='DFLT'] * @return {Array} substitutions - The list of substitutions. */ Substitution.prototype.getFeature = function(feature, script, language) { script = script || 'DFLT'; language = language || 'DFLT'; if (/ss\d\d/.test(feature)) { // ss01 - ss20 return this.getSingle(feature, script, language); } switch (feature) { case 'aalt': case 'salt': return this.getSingle(feature, script, language) .concat(this.getAlternates(feature, script, language)); case 'dlig': case 'liga': case 'rlig': return this.getLigatures(feature, script, language); } }; /** * Add a substitution to a feature for a given script and language. * @param {string} feature - 4-letter feature name * @param {Object} sub - the substitution to add (an object like { sub: id or [ids], by: id or [ids] }) * @param {string} [script='DFLT'] * @param {string} [language='DFLT'] */ Substitution.prototype.add = function(feature, sub, script, language) { script = script || 'DFLT'; language = language || 'DFLT'; if (/ss\d\d/.test(feature)) { // ss01 - ss20 return this.addSingle(feature, sub, script, language); } switch (feature) { case 'aalt': case 'salt': if (typeof sub.by === 'number') { return this.addSingle(feature, sub, script, language); } return this.addAlternate(feature, sub, script, language); case 'dlig': case 'liga': case 'rlig': return this.addLigature(feature, sub, script, language); } }; module.exports = Substitution; },{"./check":2,"./layout":8}],13:[function(require,module,exports){ // Table metadata 'use strict'; var check = require('./check'); var encode = require('./types').encode; var sizeOf = require('./types').sizeOf; /** * @exports opentype.Table * @class * @param {string} tableName * @param {Array} fields * @param {Object} options * @constructor */ function Table(tableName, fields, options) { var i; for (i = 0; i < fields.length; i += 1) { var field = fields[i]; this[field.name] = field.value; } this.tableName = tableName; this.fields = fields; if (options) { var optionKeys = Object.keys(options); for (i = 0; i < optionKeys.length; i += 1) { var k = optionKeys[i]; var v = options[k]; if (this[k] !== undefined) { this[k] = v; } } } } /** * Encodes the table and returns an array of bytes * @return {Array} */ Table.prototype.encode = function() { return encode.TABLE(this); }; /** * Get the size of the table. * @return {number} */ Table.prototype.sizeOf = function() { return sizeOf.TABLE(this); }; /** * @private */ function ushortList(itemName, list, count) { if (count === undefined) { count = list.length; } var fields = new Array(list.length + 1); fields[0] = {name: itemName + 'Count', type: 'USHORT', value: count}; for (var i = 0; i < list.length; i++) { fields[i + 1] = {name: itemName + i, type: 'USHORT', value: list[i]}; } return fields; } /** * @private */ function tableList(itemName, records, itemCallback) { var count = records.length; var fields = new Array(count + 1); fields[0] = {name: itemName + 'Count', type: 'USHORT', value: count}; for (var i = 0; i < count; i++) { fields[i + 1] = {name: itemName + i, type: 'TABLE', value: itemCallback(records[i], i)}; } return fields; } /** * @private */ function recordList(itemName, records, itemCallback) { var count = records.length; var fields = []; fields[0] = {name: itemName + 'Count', type: 'USHORT', value: count}; for (var i = 0; i < count; i++) { fields = fields.concat(itemCallback(records[i], i)); } return fields; } // Common Layout Tables /** * @exports opentype.Coverage * @class * @param {opentype.Table} * @constructor * @extends opentype.Table */ function Coverage(coverageTable) { if (coverageTable.format === 1) { Table.call(this, 'coverageTable', [{name: 'coverageFormat', type: 'USHORT', value: 1}] .concat(ushortList('glyph', coverageTable.glyphs)) ); } else { check.assert(false, 'Can\'t create coverage table format 2 yet.'); } } Coverage.prototype = Object.create(Table.prototype); Coverage.prototype.constructor = Coverage; function ScriptList(scriptListTable) { Table.call(this, 'scriptListTable', recordList('scriptRecord', scriptListTable, function(scriptRecord, i) { var script = scriptRecord.script; var defaultLangSys = script.defaultLangSys; check.assert(!!defaultLangSys, 'Unable to write GSUB: script ' + scriptRecord.tag + ' has no default language system.'); return [ {name: 'scriptTag' + i, type: 'TAG', value: scriptRecord.tag}, {name: 'script' + i, type: 'TABLE', value: new Table('scriptTable', [ {name: 'defaultLangSys', type: 'TABLE', value: new Table('defaultLangSys', [ {name: 'lookupOrder', type: 'USHORT', value: 0}, {name: 'reqFeatureIndex', type: 'USHORT', value: defaultLangSys.reqFeatureIndex}] .concat(ushortList('featureIndex', defaultLangSys.featureIndexes)))} ].concat(recordList('langSys', script.langSysRecords, function(langSysRecord, i) { var langSys = langSysRecord.langSys; return [ {name: 'langSysTag' + i, type: 'TAG', value: langSysRecord.tag}, {name: 'langSys' + i, type: 'TABLE', value: new Table('langSys', [ {name: 'lookupOrder', type: 'USHORT', value: 0}, {name: 'reqFeatureIndex', type: 'USHORT', value: langSys.reqFeatureIndex} ].concat(ushortList('featureIndex', langSys.featureIndexes)))} ]; })))} ]; }) ); } ScriptList.prototype = Object.create(Table.prototype); ScriptList.prototype.constructor = ScriptList; /** * @exports opentype.FeatureList * @class * @param {opentype.Table} * @constructor * @extends opentype.Table */ function FeatureList(featureListTable) { Table.call(this, 'featureListTable', recordList('featureRecord', featureListTable, function(featureRecord, i) { var feature = featureRecord.feature; return [ {name: 'featureTag' + i, type: 'TAG', value: featureRecord.tag}, {name: 'feature' + i, type: 'TABLE', value: new Table('featureTable', [ {name: 'featureParams', type: 'USHORT', value: feature.featureParams}, ].concat(ushortList('lookupListIndex', feature.lookupListIndexes)))} ]; }) ); } FeatureList.prototype = Object.create(Table.prototype); FeatureList.prototype.constructor = FeatureList; /** * @exports opentype.LookupList * @class * @param {opentype.Table} * @param {Object} * @constructor * @extends opentype.Table */ function LookupList(lookupListTable, subtableMakers) { Table.call(this, 'lookupListTable', tableList('lookup', lookupListTable, function(lookupTable) { var subtableCallback = subtableMakers[lookupTable.lookupType]; check.assert(!!subtableCallback, 'Unable to write GSUB lookup type ' + lookupTable.lookupType + ' tables.'); return new Table('lookupTable', [ {name: 'lookupType', type: 'USHORT', value: lookupTable.lookupType}, {name: 'lookupFlag', type: 'USHORT', value: lookupTable.lookupFlag} ].concat(tableList('subtable', lookupTable.subtables, subtableCallback))); })); } LookupList.prototype = Object.create(Table.prototype); LookupList.prototype.constructor = LookupList; // Record = same as Table, but inlined (a Table has an offset and its data is further in the stream) // Don't use offsets inside Records (probable bug), only in Tables. exports.Record = exports.Table = Table; exports.Coverage = Coverage; exports.ScriptList = ScriptList; exports.FeatureList = FeatureList; exports.LookupList = LookupList; exports.ushortList = ushortList; exports.tableList = tableList; exports.recordList = recordList; },{"./check":2,"./types":32}],14:[function(require,module,exports){ // The `CFF` table contains the glyph outlines in PostScript format. // https://www.microsoft.com/typography/OTSPEC/cff.htm // http://download.microsoft.com/download/8/0/1/801a191c-029d-4af3-9642-555f6fe514ee/cff.pdf // http://download.microsoft.com/download/8/0/1/801a191c-029d-4af3-9642-555f6fe514ee/type2.pdf 'use strict'; var encoding = require('../encoding'); var glyphset = require('../glyphset'); var parse = require('../parse'); var path = require('../path'); var table = require('../table'); // Custom equals function that can also check lists. function equals(a, b) { if (a === b) { return true; } else if (Array.isArray(a) && Array.isArray(b)) { if (a.length !== b.length) { return false; } for (var i = 0; i < a.length; i += 1) { if (!equals(a[i], b[i])) { return false; } } return true; } else { return false; } } // Parse a `CFF` INDEX array. // An index array consists of a list of offsets, then a list of objects at those offsets. function parseCFFIndex(data, start, conversionFn) { //var i, objectOffset, endOffset; var offsets = []; var objects = []; var count = parse.getCard16(data, start); var i; var objectOffset; var endOffset; if (count !== 0) { var offsetSize = parse.getByte(data, start + 2); objectOffset = start + ((count + 1) * offsetSize) + 2; var pos = start + 3; for (i = 0; i < count + 1; i += 1) { offsets.push(parse.getOffset(data, pos, offsetSize)); pos += offsetSize; } // The total size of the index array is 4 header bytes + the value of the last offset. endOffset = objectOffset + offsets[count]; } else { endOffset = start + 2; } for (i = 0; i < offsets.length - 1; i += 1) { var value = parse.getBytes(data, objectOffset + offsets[i], objectOffset + offsets[i + 1]); if (conversionFn) { value = conversionFn(value); } objects.push(value); } return {objects: objects, startOffset: start, endOffset: endOffset}; } // Parse a `CFF` DICT real value. function parseFloatOperand(parser) { var s = ''; var eof = 15; var lookup = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'E', 'E-', null, '-']; while (true) { var b = parser.parseByte(); var n1 = b >> 4; var n2 = b & 15; if (n1 === eof) { break; } s += lookup[n1]; if (n2 === eof) { break; } s += lookup[n2]; } return parseFloat(s); } // Parse a `CFF` DICT operand. function parseOperand(parser, b0) { var b1; var b2; var b3; var b4; if (b0 === 28) { b1 = parser.parseByte(); b2 = parser.parseByte(); return b1 << 8 | b2; } if (b0 === 29) { b1 = parser.parseByte(); b2 = parser.parseByte(); b3 = parser.parseByte(); b4 = parser.parseByte(); return b1 << 24 | b2 << 16 | b3 << 8 | b4; } if (b0 === 30) { return parseFloatOperand(parser); } if (b0 >= 32 && b0 <= 246) { return b0 - 139; } if (b0 >= 247 && b0 <= 250) { b1 = parser.parseByte(); return (b0 - 247) * 256 + b1 + 108; } if (b0 >= 251 && b0 <= 254) { b1 = parser.parseByte(); return -(b0 - 251) * 256 - b1 - 108; } throw new Error('Invalid b0 ' + b0); } // Convert the entries returned by `parseDict` to a proper dictionary. // If a value is a list of one, it is unpacked. function entriesToObject(entries) { var o = {}; for (var i = 0; i < entries.length; i += 1) { var key = entries[i][0]; var values = entries[i][1]; var value; if (values.length === 1) { value = values[0]; } else { value = values; } if (o.hasOwnProperty(key)) { throw new Error('Object ' + o + ' already has key ' + key); } o[key] = value; } return o; } // Parse a `CFF` DICT object. // A dictionary contains key-value pairs in a compact tokenized format. function parseCFFDict(data, start, size) { start = start !== undefined ? start : 0; var parser = new parse.Parser(data, start); var entries = []; var operands = []; size = size !== undefined ? size : data.length; while (parser.relativeOffset < size) { var op = parser.parseByte(); // The first byte for each dict item distinguishes between operator (key) and operand (value). // Values <= 21 are operators. if (op <= 21) { // Two-byte operators have an initial escape byte of 12. if (op === 12) { op = 1200 + parser.parseByte(); } entries.push([op, operands]); operands = []; } else { // Since the operands (values) come before the operators (keys), we store all operands in a list // until we encounter an operator. operands.push(parseOperand(parser, op)); } } return entriesToObject(entries); } // Given a String Index (SID), return the value of the string. // Strings below index 392 are standard CFF strings and are not encoded in the font. function getCFFString(strings, index) { if (index <= 390) { index = encoding.cffStandardStrings[index]; } else { index = strings[index - 391]; } return index; } // Interpret a dictionary and return a new dictionary with readable keys and values for missing entries. // This function takes `meta` which is a list of objects containing `operand`, `name` and `default`. function interpretDict(dict, meta, strings) { var newDict = {}; // Because we also want to include missing values, we start out from the meta list // and lookup values in the dict. for (var i = 0; i < meta.length; i += 1) { var m = meta[i]; var value = dict[m.op]; if (value === undefined) { value = m.value !== undefined ? m.value : null; } if (m.type === 'SID') { value = getCFFString(strings, value); } newDict[m.name] = value; } return newDict; } // Parse the CFF header. function parseCFFHeader(data, start) { var header = {}; header.formatMajor = parse.getCard8(data, start); header.formatMinor = parse.getCard8(data, start + 1); header.size = parse.getCard8(data, start + 2); header.offsetSize = parse.getCard8(data, start + 3); header.startOffset = start; header.endOffset = start + 4; return header; } var TOP_DICT_META = [ {name: 'version', op: 0, type: 'SID'}, {name: 'notice', op: 1, type: 'SID'}, {name: 'copyright', op: 1200, type: 'SID'}, {name: 'fullName', op: 2, type: 'SID'}, {name: 'familyName', op: 3, type: 'SID'}, {name: 'weight', op: 4, type: 'SID'}, {name: 'isFixedPitch', op: 1201, type: 'number', value: 0}, {name: 'italicAngle', op: 1202, type: 'number', value: 0}, {name: 'underlinePosition', op: 1203, type: 'number', value: -100}, {name: 'underlineThickness', op: 1204, type: 'number', value: 50}, {name: 'paintType', op: 1205, type: 'number', value: 0}, {name: 'charstringType', op: 1206, type: 'number', value: 2}, {name: 'fontMatrix', op: 1207, type: ['real', 'real', 'real', 'real', 'real', 'real'], value: [0.001, 0, 0, 0.001, 0, 0]}, {name: 'uniqueId', op: 13, type: 'number'}, {name: 'fontBBox', op: 5, type: ['number', 'number', 'number', 'number'], value: [0, 0, 0, 0]}, {name: 'strokeWidth', op: 1208, type: 'number', value: 0}, {name: 'xuid', op: 14, type: [], value: null}, {name: 'charset', op: 15, type: 'offset', value: 0}, {name: 'encoding', op: 16, type: 'offset', value: 0}, {name: 'charStrings', op: 17, type: 'offset', value: 0}, {name: 'private', op: 18, type: ['number', 'offset'], value: [0, 0]} ]; var PRIVATE_DICT_META = [ {name: 'subrs', op: 19, type: 'offset', value: 0}, {name: 'defaultWidthX', op: 20, type: 'number', value: 0}, {name: 'nominalWidthX', op: 21, type: 'number', value: 0} ]; // Parse the CFF top dictionary. A CFF table can contain multiple fonts, each with their own top dictionary. // The top dictionary contains the essential metadata for the font, together with the private dictionary. function parseCFFTopDict(data, strings) { var dict = parseCFFDict(data, 0, data.byteLength); return interpretDict(dict, TOP_DICT_META, strings); } // Parse the CFF private dictionary. We don't fully parse out all the values, only the ones we need. function parseCFFPrivateDict(data, start, size, strings) { var dict = parseCFFDict(data, start, size); return interpretDict(dict, PRIVATE_DICT_META, strings); } // Parse the CFF charset table, which contains internal names for all the glyphs. // This function will return a list of glyph names. // See Adobe TN #5176 chapter 13, "Charsets". function parseCFFCharset(data, start, nGlyphs, strings) { var i; var sid; var count; var parser = new parse.Parser(data, start); // The .notdef glyph is not included, so subtract 1. nGlyphs -= 1; var charset = ['.notdef']; var format = parser.parseCard8(); if (format === 0) { for (i = 0; i < nGlyphs; i += 1) { sid = parser.parseSID(); charset.push(getCFFString(strings, sid)); } } else if (format === 1) { while (charset.length <= nGlyphs) { sid = parser.parseSID(); count = parser.parseCard8(); for (i = 0; i <= count; i += 1) { charset.push(getCFFString(strings, sid)); sid += 1; } } } else if (format === 2) { while (charset.length <= nGlyphs) { sid = parser.parseSID(); count = parser.parseCard16(); for (i = 0; i <= count; i += 1) { charset.push(getCFFString(strings, sid)); sid += 1; } } } else { throw new Error('Unknown charset format ' + format); } return charset; } // Parse the CFF encoding data. Only one encoding can be specified per font. // See Adobe TN #5176 chapter 12, "Encodings". function parseCFFEncoding(data, start, charset) { var i; var code; var enc = {}; var parser = new parse.Parser(data, start); var format = parser.parseCard8(); if (format === 0) { var nCodes = parser.parseCard8(); for (i = 0; i < nCodes; i += 1) { code = parser.parseCard8(); enc[code] = i; } } else if (format === 1) { var nRanges = parser.parseCard8(); code = 1; for (i = 0; i < nRanges; i += 1) { var first = parser.parseCard8(); var nLeft = parser.parseCard8(); for (var j = first; j <= first + nLeft; j += 1) { enc[j] = code; code += 1; } } } else { throw new Error('Unknown encoding format ' + format); } return new encoding.CffEncoding(enc, charset); } // Take in charstring code and return a Glyph object. // The encoding is described in the Type 2 Charstring Format // https://www.microsoft.com/typography/OTSPEC/charstr2.htm function parseCFFCharstring(font, glyph, code) { var c1x; var c1y; var c2x; var c2y; var p = new path.Path(); var stack = []; var nStems = 0; var haveWidth = false; var width = font.defaultWidthX; var open = false; var x = 0; var y = 0; function newContour(x, y) { if (open) { p.closePath(); } p.moveTo(x, y); open = true; } function parseStems() { var hasWidthArg; // The number of stem operators on the stack is always even. // If the value is uneven, that means a width is specified. hasWidthArg = stack.length % 2 !== 0; if (hasWidthArg && !haveWidth) { width = stack.shift() + font.nominalWidthX; } nStems += stack.length >> 1; stack.length = 0; haveWidth = true; } function parse(code) { var b1; var b2; var b3; var b4; var codeIndex; var subrCode; var jpx; var jpy; var c3x; var c3y; var c4x; var c4y; var i = 0; while (i < code.length) { var v = code[i]; i += 1; switch (v) { case 1: // hstem parseStems(); break; case 3: // vstem parseStems(); break; case 4: // vmoveto if (stack.length > 1 && !haveWidth) { width = stack.shift() + font.nominalWidthX; haveWidth = true; } y += stack.pop(); newContour(x, y); break; case 5: // rlineto while (stack.length > 0) { x += stack.shift(); y += stack.shift(); p.lineTo(x, y); } break; case 6: // hlineto while (stack.length > 0) { x += stack.shift(); p.lineTo(x, y); if (stack.length === 0) { break; } y += stack.shift(); p.lineTo(x, y); } break; case 7: // vlineto while (stack.length > 0) { y += stack.shift(); p.lineTo(x, y); if (stack.length === 0) { break; } x += stack.shift(); p.lineTo(x, y); } break; case 8: // rrcurveto while (stack.length > 0) { c1x = x + stack.shift(); c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y + stack.shift(); p.curveTo(c1x, c1y, c2x, c2y, x, y); } break; case 10: // callsubr codeIndex = stack.pop() + font.subrsBias; subrCode = font.subrs[codeIndex]; if (subrCode) { parse(subrCode); } break; case 11: // return return; case 12: // flex operators v = code[i]; i += 1; switch (v) { case 35: // flex // |- dx1 dy1 dx2 dy2 dx3 dy3 dx4 dy4 dx5 dy5 dx6 dy6 fd flex (12 35) |- c1x = x + stack.shift(); // dx1 c1y = y + stack.shift(); // dy1 c2x = c1x + stack.shift(); // dx2 c2y = c1y + stack.shift(); // dy2 jpx = c2x + stack.shift(); // dx3 jpy = c2y + stack.shift(); // dy3 c3x = jpx + stack.shift(); // dx4 c3y = jpy + stack.shift(); // dy4 c4x = c3x + stack.shift(); // dx5 c4y = c3y + stack.shift(); // dy5 x = c4x + stack.shift(); // dx6 y = c4y + stack.shift(); // dy6 stack.shift(); // flex depth p.curveTo(c1x, c1y, c2x, c2y, jpx, jpy); p.curveTo(c3x, c3y, c4x, c4y, x, y); break; case 34: // hflex // |- dx1 dx2 dy2 dx3 dx4 dx5 dx6 hflex (12 34) |- c1x = x + stack.shift(); // dx1 c1y = y; // dy1 c2x = c1x + stack.shift(); // dx2 c2y = c1y + stack.shift(); // dy2 jpx = c2x + stack.shift(); // dx3 jpy = c2y; // dy3 c3x = jpx + stack.shift(); // dx4 c3y = c2y; // dy4 c4x = c3x + stack.shift(); // dx5 c4y = y; // dy5 x = c4x + stack.shift(); // dx6 p.curveTo(c1x, c1y, c2x, c2y, jpx, jpy); p.curveTo(c3x, c3y, c4x, c4y, x, y); break; case 36: // hflex1 // |- dx1 dy1 dx2 dy2 dx3 dx4 dx5 dy5 dx6 hflex1 (12 36) |- c1x = x + stack.shift(); // dx1 c1y = y + stack.shift(); // dy1 c2x = c1x + stack.shift(); // dx2 c2y = c1y + stack.shift(); // dy2 jpx = c2x + stack.shift(); // dx3 jpy = c2y; // dy3 c3x = jpx + stack.shift(); // dx4 c3y = c2y; // dy4 c4x = c3x + stack.shift(); // dx5 c4y = c3y + stack.shift(); // dy5 x = c4x + stack.shift(); // dx6 p.curveTo(c1x, c1y, c2x, c2y, jpx, jpy); p.curveTo(c3x, c3y, c4x, c4y, x, y); break; case 37: // flex1 // |- dx1 dy1 dx2 dy2 dx3 dy3 dx4 dy4 dx5 dy5 d6 flex1 (12 37) |- c1x = x + stack.shift(); // dx1 c1y = y + stack.shift(); // dy1 c2x = c1x + stack.shift(); // dx2 c2y = c1y + stack.shift(); // dy2 jpx = c2x + stack.shift(); // dx3 jpy = c2y + stack.shift(); // dy3 c3x = jpx + stack.shift(); // dx4 c3y = jpy + stack.shift(); // dy4 c4x = c3x + stack.shift(); // dx5 c4y = c3y + stack.shift(); // dy5 if (Math.abs(c4x - x) > Math.abs(c4y - y)) { x = c4x + stack.shift(); } else { y = c4y + stack.shift(); } p.curveTo(c1x, c1y, c2x, c2y, jpx, jpy); p.curveTo(c3x, c3y, c4x, c4y, x, y); break; default: console.log('Glyph ' + glyph.index + ': unknown operator ' + 1200 + v); stack.length = 0; } break; case 14: // endchar if (stack.length > 0 && !haveWidth) { width = stack.shift() + font.nominalWidthX; haveWidth = true; } if (open) { p.closePath(); open = false; } break; case 18: // hstemhm parseStems(); break; case 19: // hintmask case 20: // cntrmask parseStems(); i += (nStems + 7) >> 3; break; case 21: // rmoveto if (stack.length > 2 && !haveWidth) { width = stack.shift() + font.nominalWidthX; haveWidth = true; } y += stack.pop(); x += stack.pop(); newContour(x, y); break; case 22: // hmoveto if (stack.length > 1 && !haveWidth) { width = stack.shift() + font.nominalWidthX; haveWidth = true; } x += stack.pop(); newContour(x, y); break; case 23: // vstemhm parseStems(); break; case 24: // rcurveline while (stack.length > 2) { c1x = x + stack.shift(); c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y + stack.shift(); p.curveTo(c1x, c1y, c2x, c2y, x, y); } x += stack.shift(); y += stack.shift(); p.lineTo(x, y); break; case 25: // rlinecurve while (stack.length > 6) { x += stack.shift(); y += stack.shift(); p.lineTo(x, y); } c1x = x + stack.shift(); c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y + stack.shift(); p.curveTo(c1x, c1y, c2x, c2y, x, y); break; case 26: // vvcurveto if (stack.length % 2) { x += stack.shift(); } while (stack.length > 0) { c1x = x; c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x; y = c2y + stack.shift(); p.curveTo(c1x, c1y, c2x, c2y, x, y); } break; case 27: // hhcurveto if (stack.length % 2) { y += stack.shift(); } while (stack.length > 0) { c1x = x + stack.shift(); c1y = y; c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y; p.curveTo(c1x, c1y, c2x, c2y, x, y); } break; case 28: // shortint b1 = code[i]; b2 = code[i + 1]; stack.push(((b1 << 24) | (b2 << 16)) >> 16); i += 2; break; case 29: // callgsubr codeIndex = stack.pop() + font.gsubrsBias; subrCode = font.gsubrs[codeIndex]; if (subrCode) { parse(subrCode); } break; case 30: // vhcurveto while (stack.length > 0) { c1x = x; c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y + (stack.length === 1 ? stack.shift() : 0); p.curveTo(c1x, c1y, c2x, c2y, x, y); if (stack.length === 0) { break; } c1x = x + stack.shift(); c1y = y; c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); y = c2y + stack.shift(); x = c2x + (stack.length === 1 ? stack.shift() : 0); p.curveTo(c1x, c1y, c2x, c2y, x, y); } break; case 31: // hvcurveto while (stack.length > 0) { c1x = x + stack.shift(); c1y = y; c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); y = c2y + stack.shift(); x = c2x + (stack.length === 1 ? stack.shift() : 0); p.curveTo(c1x, c1y, c2x, c2y, x, y); if (stack.length === 0) { break; } c1x = x; c1y = y + stack.shift(); c2x = c1x + stack.shift(); c2y = c1y + stack.shift(); x = c2x + stack.shift(); y = c2y + (stack.length === 1 ? stack.shift() : 0); p.curveTo(c1x, c1y, c2x, c2y, x, y); } break; default: if (v < 32) { console.log('Glyph ' + glyph.index + ': unknown operator ' + v); } else if (v < 247) { stack.push(v - 139); } else if (v < 251) { b1 = code[i]; i += 1; stack.push((v - 247) * 256 + b1 + 108); } else if (v < 255) { b1 = code[i]; i += 1; stack.push(-(v - 251) * 256 - b1 - 108); } else { b1 = code[i]; b2 = code[i + 1]; b3 = code[i + 2]; b4 = code[i + 3]; i += 4; stack.push(((b1 << 24) | (b2 << 16) | (b3 << 8) | b4) / 65536); } } } } parse(code); glyph.advanceWidth = width; return p; } // Subroutines are encoded using the negative half of the number space. // See type 2 chapter 4.7 "Subroutine operators". function calcCFFSubroutineBias(subrs) { var bias; if (subrs.length < 1240) { bias = 107; } else if (subrs.length < 33900) { bias = 1131; } else { bias = 32768; } return bias; } // Parse the `CFF` table, which contains the glyph outlines in PostScript format. function parseCFFTable(data, start, font) { font.tables.cff = {}; var header = parseCFFHeader(data, start); var nameIndex = parseCFFIndex(data, header.endOffset, parse.bytesToString); var topDictIndex = parseCFFIndex(data, nameIndex.endOffset); var stringIndex = parseCFFIndex(data, topDictIndex.endOffset, parse.bytesToString); var globalSubrIndex = parseCFFIndex(data, stringIndex.endOffset); font.gsubrs = globalSubrIndex.objects; font.gsubrsBias = calcCFFSubroutineBias(font.gsubrs); var topDictData = new DataView(new Uint8Array(topDictIndex.objects[0]).buffer); var topDict = parseCFFTopDict(topDictData, stringIndex.objects); font.tables.cff.topDict = topDict; var privateDictOffset = start + topDict['private'][1]; var privateDict = parseCFFPrivateDict(data, privateDictOffset, topDict['private'][0], stringIndex.objects); font.defaultWidthX = privateDict.defaultWidthX; font.nominalWidthX = privateDict.nominalWidthX; if (privateDict.subrs !== 0) { var subrOffset = privateDictOffset + privateDict.subrs; var subrIndex = parseCFFIndex(data, subrOffset); font.subrs = subrIndex.objects; font.subrsBias = calcCFFSubroutineBias(font.subrs); } else { font.subrs = []; font.subrsBias = 0; } // Offsets in the top dict are relative to the beginning of the CFF data, so add the CFF start offset. var charStringsIndex = parseCFFIndex(data, start + topDict.charStrings); font.nGlyphs = charStringsIndex.objects.length; var charset = parseCFFCharset(data, start + topDict.charset, font.nGlyphs, stringIndex.objects); if (topDict.encoding === 0) { // Standard encoding font.cffEncoding = new encoding.CffEncoding(encoding.cffStandardEncoding, charset); } else if (topDict.encoding === 1) { // Expert encoding font.cffEncoding = new encoding.CffEncoding(encoding.cffExpertEncoding, charset); } else { font.cffEncoding = parseCFFEncoding(data, start + topDict.encoding, charset); } // Prefer the CMAP encoding to the CFF encoding. font.encoding = font.encoding || font.cffEncoding; font.glyphs = new glyphset.GlyphSet(font); for (var i = 0; i < font.nGlyphs; i += 1) { var charString = charStringsIndex.objects[i]; font.glyphs.push(i, glyphset.cffGlyphLoader(font, i, parseCFFCharstring, charString)); } } // Convert a string to a String ID (SID). // The list of strings is modified in place. function encodeString(s, strings) { var sid; // Is the string in the CFF standard strings? var i = encoding.cffStandardStrings.indexOf(s); if (i >= 0) { sid = i; } // Is the string already in the string index? i = strings.indexOf(s); if (i >= 0) { sid = i + encoding.cffStandardStrings.length; } else { sid = encoding.cffStandardStrings.length + strings.length; strings.push(s); } return sid; } function makeHeader() { return new table.Record('Header', [ {name: 'major', type: 'Card8', value: 1}, {name: 'minor', type: 'Card8', value: 0}, {name: 'hdrSize', type: 'Card8', value: 4}, {name: 'major', type: 'Card8', value: 1} ]); } function makeNameIndex(fontNames) { var t = new table.Record('Name INDEX', [ {name: 'names', type: 'INDEX', value: []} ]); t.names = []; for (var i = 0; i < fontNames.length; i += 1) { t.names.push({name: 'name_' + i, type: 'NAME', value: fontNames[i]}); } return t; } // Given a dictionary's metadata, create a DICT structure. function makeDict(meta, attrs, strings) { var m = {}; for (var i = 0; i < meta.length; i += 1) { var entry = meta[i]; var value = attrs[entry.name]; if (value !== undefined && !equals(value, entry.value)) { if (entry.type === 'SID') { value = encodeString(value, strings); } m[entry.op] = {name: entry.name, type: entry.type, value: value}; } } return m; } // The Top DICT houses the global font attributes. function makeTopDict(attrs, strings) { var t = new table.Record('Top DICT', [ {name: 'dict', type: 'DICT', value: {}} ]); t.dict = makeDict(TOP_DICT_META, attrs, strings); return t; } function makeTopDictIndex(topDict) { var t = new table.Record('Top DICT INDEX', [ {name: 'topDicts', type: 'INDEX', value: []} ]); t.topDicts = [{name: 'topDict_0', type: 'TABLE', value: topDict}]; return t; } function makeStringIndex(strings) { var t = new table.Record('String INDEX', [ {name: 'strings', type: 'INDEX', value: []} ]); t.strings = []; for (var i = 0; i < strings.length; i += 1) { t.strings.push({name: 'string_' + i, type: 'STRING', value: strings[i]}); } return t; } function makeGlobalSubrIndex() { // Currently we don't use subroutines. return new table.Record('Global Subr INDEX', [ {name: 'subrs', type: 'INDEX', value: []} ]); } function makeCharsets(glyphNames, strings) { var t = new table.Record('Charsets', [ {name: 'format', type: 'Card8', value: 0} ]); for (var i = 0; i < glyphNames.length; i += 1) { var glyphName = glyphNames[i]; var glyphSID = encodeString(glyphName, strings); t.fields.push({name: 'glyph_' + i, type: 'SID', value: glyphSID}); } return t; } function glyphToOps(glyph) { var ops = []; var path = glyph.path; ops.push({name: 'width', type: 'NUMBER', value: glyph.advanceWidth}); var x = 0; var y = 0; for (var i = 0; i < path.commands.length; i += 1) { var dx; var dy; var cmd = path.commands[i]; if (cmd.type === 'Q') { // CFF only supports bézier curves, so convert the quad to a bézier. var _13 = 1 / 3; var _23 = 2 / 3; // We're going to create a new command so we don't change the original path. cmd = { type: 'C', x: cmd.x, y: cmd.y, x1: _13 * x + _23 * cmd.x1, y1: _13 * y + _23 * cmd.y1, x2: _13 * cmd.x + _23 * cmd.x1, y2: _13 * cmd.y + _23 * cmd.y1 }; } if (cmd.type === 'M') { dx = Math.round(cmd.x - x); dy = Math.round(cmd.y - y); ops.push({name: 'dx', type: 'NUMBER', value: dx}); ops.push({name: 'dy', type: 'NUMBER', value: dy}); ops.push({name: 'rmoveto', type: 'OP', value: 21}); x = Math.round(cmd.x); y = Math.round(cmd.y); } else if (cmd.type === 'L') { dx = Math.round(cmd.x - x); dy = Math.round(cmd.y - y); ops.push({name: 'dx', type: 'NUMBER', value: dx}); ops.push({name: 'dy', type: 'NUMBER', value: dy}); ops.push({name: 'rlineto', type: 'OP', value: 5}); x = Math.round(cmd.x); y = Math.round(cmd.y); } else if (cmd.type === 'C') { var dx1 = Math.round(cmd.x1 - x); var dy1 = Math.round(cmd.y1 - y); var dx2 = Math.round(cmd.x2 - cmd.x1); var dy2 = Math.round(cmd.y2 - cmd.y1); dx = Math.round(cmd.x - cmd.x2); dy = Math.round(cmd.y - cmd.y2); ops.push({name: 'dx1', type: 'NUMBER', value: dx1}); ops.push({name: 'dy1', type: 'NUMBER', value: dy1}); ops.push({name: 'dx2', type: 'NUMBER', value: dx2}); ops.push({name: 'dy2', type: 'NUMBER', value: dy2}); ops.push({name: 'dx', type: 'NUMBER', value: dx}); ops.push({name: 'dy', type: 'NUMBER', value: dy}); ops.push({name: 'rrcurveto', type: 'OP', value: 8}); x = Math.round(cmd.x); y = Math.round(cmd.y); } // Contours are closed automatically. } ops.push({name: 'endchar', type: 'OP', value: 14}); return ops; } function makeCharStringsIndex(glyphs) { var t = new table.Record('CharStrings INDEX', [ {name: 'charStrings', type: 'INDEX', value: []} ]); for (var i = 0; i < glyphs.length; i += 1) { var glyph = glyphs.get(i); var ops = glyphToOps(glyph); t.charStrings.push({name: glyph.name, type: 'CHARSTRING', value: ops}); } return t; } function makePrivateDict(attrs, strings) { var t = new table.Record('Private DICT', [ {name: 'dict', type: 'DICT', value: {}} ]); t.dict = makeDict(PRIVATE_DICT_META, attrs, strings); return t; } function makeCFFTable(glyphs, options) { var t = new table.Table('CFF ', [ {name: 'header', type: 'RECORD'}, {name: 'nameIndex', type: 'RECORD'}, {name: 'topDictIndex', type: 'RECORD'}, {name: 'stringIndex', type: 'RECORD'}, {name: 'globalSubrIndex', type: 'RECORD'}, {name: 'charsets', type: 'RECORD'}, {name: 'charStringsIndex', type: 'RECORD'}, {name: 'privateDict', type: 'RECORD'} ]); var fontScale = 1 / options.unitsPerEm; // We use non-zero values for the offsets so that the DICT encodes them. // This is important because the size of the Top DICT plays a role in offset calculation, // and the size shouldn't change after we've written correct offsets. var attrs = { version: options.version, fullName: options.fullName, familyName: options.familyName, weight: options.weightName, fontBBox: options.fontBBox || [0, 0, 0, 0], fontMatrix: [fontScale, 0, 0, fontScale, 0, 0], charset: 999, encoding: 0, charStrings: 999, private: [0, 999] }; var privateAttrs = {}; var glyphNames = []; var glyph; // Skip first glyph (.notdef) for (var i = 1; i < glyphs.length; i += 1) { glyph = glyphs.get(i); glyphNames.push(glyph.name); } var strings = []; t.header = makeHeader(); t.nameIndex = makeNameIndex([options.postScriptName]); var topDict = makeTopDict(attrs, strings); t.topDictIndex = makeTopDictIndex(topDict); t.globalSubrIndex = makeGlobalSubrIndex(); t.charsets = makeCharsets(glyphNames, strings); t.charStringsIndex = makeCharStringsIndex(glyphs); t.privateDict = makePrivateDict(privateAttrs, strings); // Needs to come at the end, to encode all custom strings used in the font. t.stringIndex = makeStringIndex(strings); var startOffset = t.header.sizeOf() + t.nameIndex.sizeOf() + t.topDictIndex.sizeOf() + t.stringIndex.sizeOf() + t.globalSubrIndex.sizeOf(); attrs.charset = startOffset; // We use the CFF standard encoding; proper encoding will be handled in cmap. attrs.encoding = 0; attrs.charStrings = attrs.charset + t.charsets.sizeOf(); attrs.private[1] = attrs.charStrings + t.charStringsIndex.sizeOf(); // Recreate the Top DICT INDEX with the correct offsets. topDict = makeTopDict(attrs, strings); t.topDictIndex = makeTopDictIndex(topDict); return t; } exports.parse = parseCFFTable; exports.make = makeCFFTable; },{"../encoding":4,"../glyphset":7,"../parse":10,"../path":11,"../table":13}],15:[function(require,module,exports){ // The `cmap` table stores the mappings from characters to glyphs. // https://www.microsoft.com/typography/OTSPEC/cmap.htm 'use strict'; var check = require('../check'); var parse = require('../parse'); var table = require('../table'); function parseCmapTableFormat12(cmap, p) { var i; //Skip reserved. p.parseUShort(); // Length in bytes of the sub-tables. cmap.length = p.parseULong(); cmap.language = p.parseULong(); var groupCount; cmap.groupCount = groupCount = p.parseULong(); cmap.glyphIndexMap = {}; for (i = 0; i < groupCount; i += 1) { var startCharCode = p.parseULong(); var endCharCode = p.parseULong(); var startGlyphId = p.parseULong(); for (var c = startCharCode; c <= endCharCode; c += 1) { cmap.glyphIndexMap[c] = startGlyphId; startGlyphId++; } } } function parseCmapTableFormat4(cmap, p, data, start, offset) { var i; // Length in bytes of the sub-tables. cmap.length = p.parseUShort(); cmap.language = p.parseUShort(); // segCount is stored x 2. var segCount; cmap.segCount = segCount = p.parseUShort() >> 1; // Skip searchRange, entrySelector, rangeShift. p.skip('uShort', 3); // The "unrolled" mapping from character codes to glyph indices. cmap.glyphIndexMap = {}; var endCountParser = new parse.Parser(data, start + offset + 14); var startCountParser = new parse.Parser(data, start + offset + 16 + segCount * 2); var idDeltaParser = new parse.Parser(data, start + offset + 16 + segCount * 4); var idRangeOffsetParser = new parse.Parser(data, start + offset + 16 + segCount * 6); var glyphIndexOffset = start + offset + 16 + segCount * 8; for (i = 0; i < segCount - 1; i += 1) { var glyphIndex; var endCount = endCountParser.parseUShort(); var startCount = startCountParser.parseUShort(); var idDelta = idDeltaParser.parseShort(); var idRangeOffset = idRangeOffsetParser.parseUShort(); for (var c = startCount; c <= endCount; c += 1) { if (idRangeOffset !== 0) { // The idRangeOffset is relative to the current position in the idRangeOffset array. // Take the current offset in the idRangeOffset array. glyphIndexOffset = (idRangeOffsetParser.offset + idRangeOffsetParser.relativeOffset - 2); // Add the value of the idRangeOffset, which will move us into the glyphIndex array. glyphIndexOffset += idRangeOffset; // Then add the character index of the current segment, multiplied by 2 for USHORTs. glyphIndexOffset += (c - startCount) * 2; glyphIndex = parse.getUShort(data, glyphIndexOffset); if (glyphIndex !== 0) { glyphIndex = (glyphIndex + idDelta) & 0xFFFF; } } else { glyphIndex = (c + idDelta) & 0xFFFF; } cmap.glyphIndexMap[c] = glyphIndex; } } } // Parse the `cmap` table. This table stores the mappings from characters to glyphs. // There are many available formats, but we only support the Windows format 4 and 12. // This function returns a `CmapEncoding` object or null if no supported format could be found. function parseCmapTable(data, start) { var i; var cmap = {}; cmap.version = parse.getUShort(data, start); check.argument(cmap.version === 0, 'cmap table version should be 0.'); // The cmap table can contain many sub-tables, each with their own format. // We're only interested in a "platform 3" table. This is a Windows format. cmap.numTables = parse.getUShort(data, start + 2); var offset = -1; for (i = cmap.numTables - 1; i >= 0; i -= 1) { var platformId = parse.getUShort(data, start + 4 + (i * 8)); var encodingId = parse.getUShort(data, start + 4 + (i * 8) + 2); if (platformId === 3 && (encodingId === 0 || encodingId === 1 || encodingId === 10)) { offset = parse.getULong(data, start + 4 + (i * 8) + 4); break; } } if (offset === -1) { // There is no cmap table in the font that we support, so return null. // This font will be marked as unsupported. return null; } var p = new parse.Parser(data, start + offset); cmap.format = p.parseUShort(); if (cmap.format === 12) { parseCmapTableFormat12(cmap, p); } else if (cmap.format === 4) { parseCmapTableFormat4(cmap, p, data, start, offset); } else { throw new Error('Only format 4 and 12 cmap tables are supported.'); } return cmap; } function addSegment(t, code, glyphIndex) { t.segments.push({ end: code, start: code, delta: -(code - glyphIndex), offset: 0 }); } function addTerminatorSegment(t) { t.segments.push({ end: 0xFFFF, start: 0xFFFF, delta: 1, offset: 0 }); } function makeCmapTable(glyphs) { var i; var t = new table.Table('cmap', [ {name: 'version', type: 'USHORT', value: 0}, {name: 'numTables', type: 'USHORT', value: 1}, {name: 'platformID', type: 'USHORT', value: 3}, {name: 'encodingID', type: 'USHORT', value: 1}, {name: 'offset', type: 'ULONG', value: 12}, {name: 'format', type: 'USHORT', value: 4}, {name: 'length', type: 'USHORT', value: 0}, {name: 'language', type: 'USHORT', value: 0}, {name: 'segCountX2', type: 'USHORT', value: 0}, {name: 'searchRange', type: 'USHORT', value: 0}, {name: 'entrySelector', type: 'USHORT', value: 0}, {name: 'rangeShift', type: 'USHORT', value: 0} ]); t.segments = []; for (i = 0; i < glyphs.length; i += 1) { var glyph = glyphs.get(i); for (var j = 0; j < glyph.unicodes.length; j += 1) { addSegment(t, glyph.unicodes[j], i); } t.segments = t.segments.sort(function(a, b) { return a.start - b.start; }); } addTerminatorSegment(t); var segCount; segCount = t.segments.length; t.segCountX2 = segCount * 2; t.searchRange = Math.pow(2, Math.floor(Math.log(segCount) / Math.log(2))) * 2; t.entrySelector = Math.log(t.searchRange / 2) / Math.log(2); t.rangeShift = t.segCountX2 - t.searchRange; // Set up parallel segment arrays. var endCounts = []; var startCounts = []; var idDeltas = []; var idRangeOffsets = []; var glyphIds = []; for (i = 0; i < segCount; i += 1) { var segment = t.segments[i]; endCounts = endCounts.concat({name: 'end_' + i, type: 'USHORT', value: segment.end}); startCounts = startCounts.concat({name: 'start_' + i, type: 'USHORT', value: segment.start}); idDeltas = idDeltas.concat({name: 'idDelta_' + i, type: 'SHORT', value: segment.delta}); idRangeOffsets = idRangeOffsets.concat({name: 'idRangeOffset_' + i, type: 'USHORT', value: segment.offset}); if (segment.glyphId !== undefined) { glyphIds = glyphIds.concat({name: 'glyph_' + i, type: 'USHORT', value: segment.glyphId}); } } t.fields = t.fields.concat(endCounts); t.fields.push({name: 'reservedPad', type: 'USHORT', value: 0}); t.fields = t.fields.concat(startCounts); t.fields = t.fields.concat(idDeltas); t.fields = t.fields.concat(idRangeOffsets); t.fields = t.fields.concat(glyphIds); t.length = 14 + // Subtable header endCounts.length * 2 + 2 + // reservedPad startCounts.length * 2 + idDeltas.length * 2 + idRangeOffsets.length * 2 + glyphIds.length * 2; return t; } exports.parse = parseCmapTable; exports.make = makeCmapTable; },{"../check":2,"../parse":10,"../table":13}],16:[function(require,module,exports){ // The `fvar` table stores font variation axes and instances. // https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6fvar.html 'use strict'; var check = require('../check'); var parse = require('../parse'); var table = require('../table'); function addName(name, names) { var nameString = JSON.stringify(name); var nameID = 256; for (var nameKey in names) { var n = parseInt(nameKey); if (!n || n < 256) { continue; } if (JSON.stringify(names[nameKey]) === nameString) { return n; } if (nameID <= n) { nameID = n + 1; } } names[nameID] = name; return nameID; } function makeFvarAxis(n, axis, names) { var nameID = addName(axis.name, names); return [ {name: 'tag_' + n, type: 'TAG', value: axis.tag}, {name: 'minValue_' + n, type: 'FIXED', value: axis.minValue << 16}, {name: 'defaultValue_' + n, type: 'FIXED', value: axis.defaultValue << 16}, {name: 'maxValue_' + n, type: 'FIXED', value: axis.maxValue << 16}, {name: 'flags_' + n, type: 'USHORT', value: 0}, {name: 'nameID_' + n, type: 'USHORT', value: nameID} ]; } function parseFvarAxis(data, start, names) { var axis = {}; var p = new parse.Parser(data, start); axis.tag = p.parseTag(); axis.minValue = p.parseFixed(); axis.defaultValue = p.parseFixed(); axis.maxValue = p.parseFixed(); p.skip('uShort', 1); // reserved for flags; no values defined axis.name = names[p.parseUShort()] || {}; return axis; } function makeFvarInstance(n, inst, axes, names) { var nameID = addName(inst.name, names); var fields = [ {name: 'nameID_' + n, type: 'USHORT', value: nameID}, {name: 'flags_' + n, type: 'USHORT', value: 0} ]; for (var i = 0; i < axes.length; ++i) { var axisTag = axes[i].tag; fields.push({ name: 'axis_' + n + ' ' + axisTag, type: 'FIXED', value: inst.coordinates[axisTag] << 16 }); } return fields; } function parseFvarInstance(data, start, axes, names) { var inst = {}; var p = new parse.Parser(data, start); inst.name = names[p.parseUShort()] || {}; p.skip('uShort', 1); // reserved for flags; no values defined inst.coordinates = {}; for (var i = 0; i < axes.length; ++i) { inst.coordinates[axes[i].tag] = p.parseFixed(); } return inst; } function makeFvarTable(fvar, names) { var result = new table.Table('fvar', [ {name: 'version', type: 'ULONG', value: 0x10000}, {name: 'offsetToData', type: 'USHORT', value: 0}, {name: 'countSizePairs', type: 'USHORT', value: 2}, {name: 'axisCount', type: 'USHORT', value: fvar.axes.length}, {name: 'axisSize', type: 'USHORT', value: 20}, {name: 'instanceCount', type: 'USHORT', value: fvar.instances.length}, {name: 'instanceSize', type: 'USHORT', value: 4 + fvar.axes.length * 4} ]); result.offsetToData = result.sizeOf(); for (var i = 0; i < fvar.axes.length; i++) { result.fields = result.fields.concat(makeFvarAxis(i, fvar.axes[i], names)); } for (var j = 0; j < fvar.instances.length; j++) { result.fields = result.fields.concat(makeFvarInstance(j, fvar.instances[j], fvar.axes, names)); } return result; } function parseFvarTable(data, start, names) { var p = new parse.Parser(data, start); var tableVersion = p.parseULong(); check.argument(tableVersion === 0x00010000, 'Unsupported fvar table version.'); var offsetToData = p.parseOffset16(); // Skip countSizePairs. p.skip('uShort', 1); var axisCount = p.parseUShort(); var axisSize = p.parseUShort(); var instanceCount = p.parseUShort(); var instanceSize = p.parseUShort(); var axes = []; for (var i = 0; i < axisCount; i++) { axes.push(parseFvarAxis(data, start + offsetToData + i * axisSize, names)); } var instances = []; var instanceStart = start + offsetToData + axisCount * axisSize; for (var j = 0; j < instanceCount; j++) { instances.push(parseFvarInstance(data, instanceStart + j * instanceSize, axes, names)); } return {axes: axes, instances: instances}; } exports.make = makeFvarTable; exports.parse = parseFvarTable; },{"../check":2,"../parse":10,"../table":13}],17:[function(require,module,exports){ // The `glyf` table describes the glyphs in TrueType outline format. // http://www.microsoft.com/typography/otspec/glyf.htm 'use strict'; var check = require('../check'); var glyphset = require('../glyphset'); var parse = require('../parse'); var path = require('../path'); // Parse the coordinate data for a glyph. function parseGlyphCoordinate(p, flag, previousValue, shortVectorBitMask, sameBitMask) { var v; if ((flag & shortVectorBitMask) > 0) { // The coordinate is 1 byte long. v = p.parseByte(); // The `same` bit is re-used for short values to signify the sign of the value. if ((flag & sameBitMask) === 0) { v = -v; } v = previousValue + v; } else { // The coordinate is 2 bytes long. // If the `same` bit is set, the coordinate is the same as the previous coordinate. if ((flag & sameBitMask) > 0) { v = previousValue; } else { // Parse the coordinate as a signed 16-bit delta value. v = previousValue + p.parseShort(); } } return v; } // Parse a TrueType glyph. function parseGlyph(glyph, data, start) { var p = new parse.Parser(data, start); glyph.numberOfContours = p.parseShort(); glyph._xMin = p.parseShort(); glyph._yMin = p.parseShort(); glyph._xMax = p.parseShort(); glyph._yMax = p.parseShort(); var flags; var flag; if (glyph.numberOfContours > 0) { var i; // This glyph is not a composite. var endPointIndices = glyph.endPointIndices = []; for (i = 0; i < glyph.numberOfContours; i += 1) { endPointIndices.push(p.parseUShort()); } glyph.instructionLength = p.parseUShort(); glyph.instructions = []; for (i = 0; i < glyph.instructionLength; i += 1) { glyph.instructions.push(p.parseByte()); } var numberOfCoordinates = endPointIndices[endPointIndices.length - 1] + 1; flags = []; for (i = 0; i < numberOfCoordinates; i += 1) { flag = p.parseByte(); flags.push(flag); // If bit 3 is set, we repeat this flag n times, where n is the next byte. if ((flag & 8) > 0) { var repeatCount = p.parseByte(); for (var j = 0; j < repeatCount; j += 1) { flags.push(flag); i += 1; } } } check.argument(flags.length === numberOfCoordinates, 'Bad flags.'); if (endPointIndices.length > 0) { var points = []; var point; // X/Y coordinates are relative to the previous point, except for the first point which is relative to 0,0. if (numberOfCoordinates > 0) { for (i = 0; i < numberOfCoordinates; i += 1) { flag = flags[i]; point = {}; point.onCurve = !!(flag & 1); point.lastPointOfContour = endPointIndices.indexOf(i) >= 0; points.push(point); } var px = 0; for (i = 0; i < numberOfCoordinates; i += 1) { flag = flags[i]; point = points[i]; point.x = parseGlyphCoordinate(p, flag, px, 2, 16); px = point.x; } var py = 0; for (i = 0; i < numberOfCoordinates; i += 1) { flag = flags[i]; point = points[i]; point.y = parseGlyphCoordinate(p, flag, py, 4, 32); py = point.y; } } glyph.points = points; } else { glyph.points = []; } } else if (glyph.numberOfContours === 0) { glyph.points = []; } else { glyph.isComposite = true; glyph.points = []; glyph.components = []; var moreComponents = true; while (moreComponents) { flags = p.parseUShort(); var component = { glyphIndex: p.parseUShort(), xScale: 1, scale01: 0, scale10: 0, yScale: 1, dx: 0, dy: 0 }; if ((flags & 1) > 0) { // The arguments are words if ((flags & 2) > 0) { // values are offset component.dx = p.parseShort(); component.dy = p.parseShort(); } else { // values are matched points component.matchedPoints = [p.parseUShort(), p.parseUShort()]; } } else { // The arguments are bytes if ((flags & 2) > 0) { // values are offset component.dx = p.parseChar(); component.dy = p.parseChar(); } else { // values are matched points component.matchedPoints = [p.parseByte(), p.parseByte()]; } } if ((flags & 8) > 0) { // We have a scale component.xScale = component.yScale = p.parseF2Dot14(); } else if ((flags & 64) > 0) { // We have an X / Y scale component.xScale = p.parseF2Dot14(); component.yScale = p.parseF2Dot14(); } else if ((flags & 128) > 0) { // We have a 2x2 transformation component.xScale = p.parseF2Dot14(); component.scale01 = p.parseF2Dot14(); component.scale10 = p.parseF2Dot14(); component.yScale = p.parseF2Dot14(); } glyph.components.push(component); moreComponents = !!(flags & 32); } } } // Transform an array of points and return a new array. function transformPoints(points, transform) { var newPoints = []; for (var i = 0; i < points.length; i += 1) { var pt = points[i]; var newPt = { x: transform.xScale * pt.x + transform.scale01 * pt.y + transform.dx, y: transform.scale10 * pt.x + transform.yScale * pt.y + transform.dy, onCurve: pt.onCurve, lastPointOfContour: pt.lastPointOfContour }; newPoints.push(newPt); } return newPoints; } function getContours(points) { var contours = []; var currentContour = []; for (var i = 0; i < points.length; i += 1) { var pt = points[i]; currentContour.push(pt); if (pt.lastPointOfContour) { contours.push(currentContour); currentContour = []; } } check.argument(currentContour.length === 0, 'There are still points left in the current contour.'); return contours; } // Convert the TrueType glyph outline to a Path. function getPath(points) { var p = new path.Path(); if (!points) { return p; } var contours = getContours(points); for (var i = 0; i < contours.length; i += 1) { var contour = contours[i]; var firstPt = contour[0]; var lastPt = contour[contour.length - 1]; var curvePt; var realFirstPoint; if (firstPt.onCurve) { curvePt = null; // The first point will be consumed by the moveTo command, // so skip it in the loop. realFirstPoint = true; } else { if (lastPt.onCurve) { // If the first point is off-curve and the last point is on-curve, // start at the last point. firstPt = lastPt; } else { // If both first and last points are off-curve, start at their middle. firstPt = { x: (firstPt.x + lastPt.x) / 2, y: (firstPt.y + lastPt.y) / 2 }; } curvePt = firstPt; // The first point is synthesized, so don't skip the real first point. realFirstPoint = false; } p.moveTo(firstPt.x, firstPt.y); for (var j = realFirstPoint ? 1 : 0; j < contour.length; j += 1) { var pt = contour[j]; var prevPt = j === 0 ? firstPt : contour[j - 1]; if (prevPt.onCurve && pt.onCurve) { // This is a straight line. p.lineTo(pt.x, pt.y); } else if (prevPt.onCurve && !pt.onCurve) { curvePt = pt; } else if (!prevPt.onCurve && !pt.onCurve) { var midPt = { x: (prevPt.x + pt.x) / 2, y: (prevPt.y + pt.y) / 2 }; p.quadraticCurveTo(prevPt.x, prevPt.y, midPt.x, midPt.y); curvePt = pt; } else if (!prevPt.onCurve && pt.onCurve) { // Previous point off-curve, this point on-curve. p.quadraticCurveTo(curvePt.x, curvePt.y, pt.x, pt.y); curvePt = null; } else { throw new Error('Invalid state.'); } } if (firstPt !== lastPt) { // Connect the last and first points if (curvePt) { p.quadraticCurveTo(curvePt.x, curvePt.y, firstPt.x, firstPt.y); } else { p.lineTo(firstPt.x, firstPt.y); } } } p.closePath(); return p; } function buildPath(glyphs, glyph) { if (glyph.isComposite) { for (var j = 0; j < glyph.components.length; j += 1) { var component = glyph.components[j]; var componentGlyph = glyphs.get(component.glyphIndex); // Force the ttfGlyphLoader to parse the glyph. componentGlyph.getPath(); if (componentGlyph.points) { var transformedPoints; if (component.matchedPoints === undefined) { // component positioned by offset transformedPoints = transformPoints(componentGlyph.points, component); } else { // component positioned by matched points if ((component.matchedPoints[0] > glyph.points.length - 1) || (component.matchedPoints[1] > componentGlyph.points.length - 1)) { throw Error('Matched points out of range in ' + glyph.name); } var firstPt = glyph.points[component.matchedPoints[0]]; var secondPt = componentGlyph.points[component.matchedPoints[1]]; var transform = { xScale: component.xScale, scale01: component.scale01, scale10: component.scale10, yScale: component.yScale, dx: 0, dy: 0 }; secondPt = transformPoints([secondPt], transform)[0]; transform.dx = firstPt.x - secondPt.x; transform.dy = firstPt.y - secondPt.y; transformedPoints = transformPoints(componentGlyph.points, transform); } glyph.points = glyph.points.concat(transformedPoints); } } } return getPath(glyph.points); } // Parse all the glyphs according to the offsets from the `loca` table. function parseGlyfTable(data, start, loca, font) { var glyphs = new glyphset.GlyphSet(font); var i; // The last element of the loca table is invalid. for (i = 0; i < loca.length - 1; i += 1) { var offset = loca[i]; var nextOffset = loca[i + 1]; if (offset !== nextOffset) { glyphs.push(i, glyphset.ttfGlyphLoader(font, i, parseGlyph, data, start + offset, buildPath)); } else { glyphs.push(i, glyphset.glyphLoader(font, i)); } } return glyphs; } exports.parse = parseGlyfTable; },{"../check":2,"../glyphset":7,"../parse":10,"../path":11}],18:[function(require,module,exports){ // The `GPOS` table contains kerning pairs, among other things. // https://www.microsoft.com/typography/OTSPEC/gpos.htm 'use strict'; var check = require('../check'); var parse = require('../parse'); // Parse ScriptList and FeatureList tables of GPOS, GSUB, GDEF, BASE, JSTF tables. // These lists are unused by now, this function is just the basis for a real parsing. function parseTaggedListTable(data, start) { var p = new parse.Parser(data, start); var n = p.parseUShort(); var list = []; for (var i = 0; i < n; i++) { list[p.parseTag()] = { offset: p.parseUShort() }; } return list; } // Parse a coverage table in a GSUB, GPOS or GDEF table. // Format 1 is a simple list of glyph ids, // Format 2 is a list of ranges. It is expanded in a list of glyphs, maybe not the best idea. function parseCoverageTable(data, start) { var p = new parse.Parser(data, start); var format = p.parseUShort(); var count = p.parseUShort(); if (format === 1) { return p.parseUShortList(count); } else if (format === 2) { var coverage = []; for (; count--;) { var begin = p.parseUShort(); var end = p.parseUShort(); var index = p.parseUShort(); for (var i = begin; i <= end; i++) { coverage[index++] = i; } } return coverage; } } // Parse a Class Definition Table in a GSUB, GPOS or GDEF table. // Returns a function that gets a class value from a glyph ID. function parseClassDefTable(data, start) { var p = new parse.Parser(data, start); var format = p.parseUShort(); if (format === 1) { // Format 1 specifies a range of consecutive glyph indices, one class per glyph ID. var startGlyph = p.parseUShort(); var glyphCount = p.parseUShort(); var classes = p.parseUShortList(glyphCount); return function(glyphID) { return classes[glyphID - startGlyph] || 0; }; } else if (format === 2) { // Format 2 defines multiple groups of glyph indices that belong to the same class. var rangeCount = p.parseUShort(); var startGlyphs = []; var endGlyphs = []; var classValues = []; for (var i = 0; i < rangeCount; i++) { startGlyphs[i] = p.parseUShort(); endGlyphs[i] = p.parseUShort(); classValues[i] = p.parseUShort(); } return function(glyphID) { var l = 0; var r = startGlyphs.length - 1; while (l < r) { var c = (l + r + 1) >> 1; if (glyphID < startGlyphs[c]) { r = c - 1; } else { l = c; } } if (startGlyphs[l] <= glyphID && glyphID <= endGlyphs[l]) { return classValues[l] || 0; } return 0; }; } } // Parse a pair adjustment positioning subtable, format 1 or format 2 // The subtable is returned in the form of a lookup function. function parsePairPosSubTable(data, start) { var p = new parse.Parser(data, start); // This part is common to format 1 and format 2 subtables var format = p.parseUShort(); var coverageOffset = p.parseUShort(); var coverage = parseCoverageTable(data, start + coverageOffset); // valueFormat 4: XAdvance only, 1: XPlacement only, 0: no ValueRecord for second glyph // Only valueFormat1=4 and valueFormat2=0 is supported. var valueFormat1 = p.parseUShort(); var valueFormat2 = p.parseUShort(); var value1; var value2; if (valueFormat1 !== 4 || valueFormat2 !== 0) return; var sharedPairSets = {}; if (format === 1) { // Pair Positioning Adjustment: Format 1 var pairSetCount = p.parseUShort(); var pairSet = []; // Array of offsets to PairSet tables-from beginning of PairPos subtable-ordered by Coverage Index var pairSetOffsets = p.parseOffset16List(pairSetCount); for (var firstGlyph = 0; firstGlyph < pairSetCount; firstGlyph++) { var pairSetOffset = pairSetOffsets[firstGlyph]; var sharedPairSet = sharedPairSets[pairSetOffset]; if (!sharedPairSet) { // Parse a pairset table in a pair adjustment subtable format 1 sharedPairSet = {}; p.relativeOffset = pairSetOffset; var pairValueCount = p.parseUShort(); for (; pairValueCount--;) { var secondGlyph = p.parseUShort(); if (valueFormat1) value1 = p.parseShort(); if (valueFormat2) value2 = p.parseShort(); // We only support valueFormat1 = 4 and valueFormat2 = 0, // so value1 is the XAdvance and value2 is empty. sharedPairSet[secondGlyph] = value1; } } pairSet[coverage[firstGlyph]] = sharedPairSet; } return function(leftGlyph, rightGlyph) { var pairs = pairSet[leftGlyph]; if (pairs) return pairs[rightGlyph]; }; } else if (format === 2) { // Pair Positioning Adjustment: Format 2 var classDef1Offset = p.parseUShort(); var classDef2Offset = p.parseUShort(); var class1Count = p.parseUShort(); var class2Count = p.parseUShort(); var getClass1 = parseClassDefTable(data, start + classDef1Offset); var getClass2 = parseClassDefTable(data, start + classDef2Offset); // Parse kerning values by class pair. var kerningMatrix = []; for (var i = 0; i < class1Count; i++) { var kerningRow = kerningMatrix[i] = []; for (var j = 0; j < class2Count; j++) { if (valueFormat1) value1 = p.parseShort(); if (valueFormat2) value2 = p.parseShort(); // We only support valueFormat1 = 4 and valueFormat2 = 0, // so value1 is the XAdvance and value2 is empty. kerningRow[j] = value1; } } // Convert coverage list to a hash var covered = {}; for (i = 0; i < coverage.length; i++) covered[coverage[i]] = 1; // Get the kerning value for a specific glyph pair. return function(leftGlyph, rightGlyph) { if (!covered[leftGlyph]) return; var class1 = getClass1(leftGlyph); var class2 = getClass2(rightGlyph); var kerningRow = kerningMatrix[class1]; if (kerningRow) { return kerningRow[class2]; } }; } } // Parse a LookupTable (present in of GPOS, GSUB, GDEF, BASE, JSTF tables). function parseLookupTable(data, start) { var p = new parse.Parser(data, start); var lookupType = p.parseUShort(); var lookupFlag = p.parseUShort(); var useMarkFilteringSet = lookupFlag & 0x10; var subTableCount = p.parseUShort(); var subTableOffsets = p.parseOffset16List(subTableCount); var table = { lookupType: lookupType, lookupFlag: lookupFlag, markFilteringSet: useMarkFilteringSet ? p.parseUShort() : -1 }; // LookupType 2, Pair adjustment if (lookupType === 2) { var subtables = []; for (var i = 0; i < subTableCount; i++) { subtables.push(parsePairPosSubTable(data, start + subTableOffsets[i])); } // Return a function which finds the kerning values in the subtables. table.getKerningValue = function(leftGlyph, rightGlyph) { for (var i = subtables.length; i--;) { var value = subtables[i](leftGlyph, rightGlyph); if (value !== undefined) return value; } return 0; }; } return table; } // Parse the `GPOS` table which contains, among other things, kerning pairs. // https://www.microsoft.com/typography/OTSPEC/gpos.htm function parseGposTable(data, start, font) { var p = new parse.Parser(data, start); var tableVersion = p.parseFixed(); check.argument(tableVersion === 1, 'Unsupported GPOS table version.'); // ScriptList and FeatureList - ignored for now parseTaggedListTable(data, start + p.parseUShort()); // 'kern' is the feature we are looking for. parseTaggedListTable(data, start + p.parseUShort()); // LookupList var lookupListOffset = p.parseUShort(); p.relativeOffset = lookupListOffset; var lookupCount = p.parseUShort(); var lookupTableOffsets = p.parseOffset16List(lookupCount); var lookupListAbsoluteOffset = start + lookupListOffset; for (var i = 0; i < lookupCount; i++) { var table = parseLookupTable(data, lookupListAbsoluteOffset + lookupTableOffsets[i]); if (table.lookupType === 2 && !font.getGposKerningValue) font.getGposKerningValue = table.getKerningValue; } } exports.parse = parseGposTable; },{"../check":2,"../parse":10}],19:[function(require,module,exports){ // The `GSUB` table contains ligatures, among other things. // https://www.microsoft.com/typography/OTSPEC/gsub.htm 'use strict'; var check = require('../check'); var Parser = require('../parse').Parser; var subtableParsers = new Array(9); // subtableParsers[0] is unused var table = require('../table'); // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#SS subtableParsers[1] = function parseLookup1() { var start = this.offset + this.relativeOffset; var substFormat = this.parseUShort(); if (substFormat === 1) { return { substFormat: 1, coverage: this.parsePointer(Parser.coverage), deltaGlyphId: this.parseUShort() }; } else if (substFormat === 2) { return { substFormat: 2, coverage: this.parsePointer(Parser.coverage), substitute: this.parseOffset16List() }; } check.assert(false, '0x' + start.toString(16) + ': lookup type 1 format must be 1 or 2.'); }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#MS subtableParsers[2] = function parseLookup2() { var substFormat = this.parseUShort(); check.argument(substFormat === 1, 'GSUB Multiple Substitution Subtable identifier-format must be 1'); return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), sequences: this.parseListOfLists() }; }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#AS subtableParsers[3] = function parseLookup3() { var substFormat = this.parseUShort(); check.argument(substFormat === 1, 'GSUB Alternate Substitution Subtable identifier-format must be 1'); return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), alternateSets: this.parseListOfLists() }; }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#LS subtableParsers[4] = function parseLookup4() { var substFormat = this.parseUShort(); check.argument(substFormat === 1, 'GSUB ligature table identifier-format must be 1'); return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), ligatureSets: this.parseListOfLists(function() { return { ligGlyph: this.parseUShort(), components: this.parseUShortList(this.parseUShort() - 1) }; }) }; }; var lookupRecordDesc = { sequenceIndex: Parser.uShort, lookupListIndex: Parser.uShort }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#CSF subtableParsers[5] = function parseLookup5() { var start = this.offset + this.relativeOffset; var substFormat = this.parseUShort(); if (substFormat === 1) { return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), ruleSets: this.parseListOfLists(function() { var glyphCount = this.parseUShort(); var substCount = this.parseUShort(); return { input: this.parseUShortList(glyphCount - 1), lookupRecords: this.parseRecordList(substCount, lookupRecordDesc) }; }) }; } else if (substFormat === 2) { return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), classDef: this.parsePointer(Parser.classDef), classSets: this.parseListOfLists(function() { var glyphCount = this.parseUShort(); var substCount = this.parseUShort(); return { classes: this.parseUShortList(glyphCount - 1), lookupRecords: this.parseRecordList(substCount, lookupRecordDesc) }; }) }; } else if (substFormat === 3) { var glyphCount = this.parseUShort(); var substCount = this.parseUShort(); return { substFormat: substFormat, coverages: this.parseList(glyphCount, Parser.pointer(Parser.coverage)), lookupRecords: this.parseRecordList(substCount, lookupRecordDesc) }; } check.assert(false, '0x' + start.toString(16) + ': lookup type 5 format must be 1, 2 or 3.'); }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#CC subtableParsers[6] = function parseLookup6() { var start = this.offset + this.relativeOffset; var substFormat = this.parseUShort(); if (substFormat === 1) { return { substFormat: 1, coverage: this.parsePointer(Parser.coverage), chainRuleSets: this.parseListOfLists(function() { return { backtrack: this.parseUShortList(), input: this.parseUShortList(this.parseShort() - 1), lookahead: this.parseUShortList(), lookupRecords: this.parseRecordList(lookupRecordDesc) }; }) }; } else if (substFormat === 2) { return { substFormat: 2, coverage: this.parsePointer(Parser.coverage), backtrackClassDef: this.parsePointer(Parser.classDef), inputClassDef: this.parsePointer(Parser.classDef), lookaheadClassDef: this.parsePointer(Parser.classDef), chainClassSet: this.parseListOfLists(function() { return { backtrack: this.parseUShortList(), input: this.parseUShortList(this.parseShort() - 1), lookahead: this.parseUShortList(), lookupRecords: this.parseRecordList(lookupRecordDesc) }; }) }; } else if (substFormat === 3) { return { substFormat: 3, backtrackCoverage: this.parseList(Parser.pointer(Parser.coverage)), inputCoverage: this.parseList(Parser.pointer(Parser.coverage)), lookaheadCoverage: this.parseList(Parser.pointer(Parser.coverage)), lookupRecords: this.parseRecordList(lookupRecordDesc) }; } check.assert(false, '0x' + start.toString(16) + ': lookup type 6 format must be 1, 2 or 3.'); }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#ES subtableParsers[7] = function parseLookup7() { // Extension Substitution subtable var substFormat = this.parseUShort(); check.argument(substFormat === 1, 'GSUB Extension Substitution subtable identifier-format must be 1'); var extensionLookupType = this.parseUShort(); var extensionParser = new Parser(this.data, this.offset + this.parseULong()); return { substFormat: 1, lookupType: extensionLookupType, extension: subtableParsers[extensionLookupType].call(extensionParser) }; }; // https://www.microsoft.com/typography/OTSPEC/GSUB.htm#RCCS subtableParsers[8] = function parseLookup8() { var substFormat = this.parseUShort(); check.argument(substFormat === 1, 'GSUB Reverse Chaining Contextual Single Substitution Subtable identifier-format must be 1'); return { substFormat: substFormat, coverage: this.parsePointer(Parser.coverage), backtrackCoverage: this.parseList(Parser.pointer(Parser.coverage)), lookaheadCoverage: this.parseList(Parser.pointer(Parser.coverage)), substitutes: this.parseUShortList() }; }; // https://www.microsoft.com/typography/OTSPEC/gsub.htm function parseGsubTable(data, start) { start = start || 0; var p = new Parser(data, start); var tableVersion = p.parseVersion(); check.argument(tableVersion === 1, 'Unsupported GSUB table version.'); return { version: tableVersion, scripts: p.parseScriptList(), features: p.parseFeatureList(), lookups: p.parseLookupList(subtableParsers) }; } // GSUB Writing ////////////////////////////////////////////// var subtableMakers = new Array(9); subtableMakers[1] = function makeLookup1(subtable) { if (subtable.substFormat === 1) { return new table.Table('substitutionTable', [ {name: 'substFormat', type: 'USHORT', value: 1}, {name: 'coverage', type: 'TABLE', value: new table.Coverage(subtable.coverage)}, {name: 'deltaGlyphID', type: 'USHORT', value: subtable.deltaGlyphId} ]); } else { return new table.Table('substitutionTable', [ {name: 'substFormat', type: 'USHORT', value: 2}, {name: 'coverage', type: 'TABLE', value: new table.Coverage(subtable.coverage)} ].concat(table.ushortList('substitute', subtable.substitute))); } check.fail('Lookup type 1 substFormat must be 1 or 2.'); }; subtableMakers[3] = function makeLookup3(subtable) { check.assert(subtable.substFormat === 1, 'Lookup type 3 substFormat must be 1.'); return new table.Table('substitutionTable', [ {name: 'substFormat', type: 'USHORT', value: 1}, {name: 'coverage', type: 'TABLE', value: new table.Coverage(subtable.coverage)} ].concat(table.tableList('altSet', subtable.alternateSets, function(alternateSet) { return new table.Table('alternateSetTable', table.ushortList('alternate', alternateSet)); }))); }; subtableMakers[4] = function makeLookup4(subtable) { check.assert(subtable.substFormat === 1, 'Lookup type 4 substFormat must be 1.'); return new table.Table('substitutionTable', [ {name: 'substFormat', type: 'USHORT', value: 1}, {name: 'coverage', type: 'TABLE', value: new table.Coverage(subtable.coverage)} ].concat(table.tableList('ligSet', subtable.ligatureSets, function(ligatureSet) { return new table.Table('ligatureSetTable', table.tableList('ligature', ligatureSet, function(ligature) { return new table.Table('ligatureTable', [{name: 'ligGlyph', type: 'USHORT', value: ligature.ligGlyph}] .concat(table.ushortList('component', ligature.components, ligature.components.length + 1)) ); })); }))); }; function makeGsubTable(gsub) { return new table.Table('GSUB', [ {name: 'version', type: 'ULONG', value: 0x10000}, {name: 'scripts', type: 'TABLE', value: new table.ScriptList(gsub.scripts)}, {name: 'features', type: 'TABLE', value: new table.FeatureList(gsub.features)}, {name: 'lookups', type: 'TABLE', value: new table.LookupList(gsub.lookups, subtableMakers)} ]); } exports.parse = parseGsubTable; exports.make = makeGsubTable; },{"../check":2,"../parse":10,"../table":13}],20:[function(require,module,exports){ // The `head` table contains global information about the font. // https://www.microsoft.com/typography/OTSPEC/head.htm 'use strict'; var check = require('../check'); var parse = require('../parse'); var table = require('../table'); // Parse the header `head` table function parseHeadTable(data, start) { var head = {}; var p = new parse.Parser(data, start); head.version = p.parseVersion(); head.fontRevision = Math.round(p.parseFixed() * 1000) / 1000; head.checkSumAdjustment = p.parseULong(); head.magicNumber = p.parseULong(); check.argument(head.magicNumber === 0x5F0F3CF5, 'Font header has wrong magic number.'); head.flags = p.parseUShort(); head.unitsPerEm = p.parseUShort(); head.created = p.parseLongDateTime(); head.modified = p.parseLongDateTime(); head.xMin = p.parseShort(); head.yMin = p.parseShort(); head.xMax = p.parseShort(); head.yMax = p.parseShort(); head.macStyle = p.parseUShort(); head.lowestRecPPEM = p.parseUShort(); head.fontDirectionHint = p.parseShort(); head.indexToLocFormat = p.parseShort(); head.glyphDataFormat = p.parseShort(); return head; } function makeHeadTable(options) { // Apple Mac timestamp epoch is 01/01/1904 not 01/01/1970 var timestamp = Math.round(new Date().getTime() / 1000) + 2082844800; var createdTimestamp = timestamp; if (options.createdTimestamp) { createdTimestamp = options.createdTimestamp + 2082844800; } return new table.Table('head', [ {name: 'version', type: 'FIXED', value: 0x00010000}, {name: 'fontRevision', type: 'FIXED', value: 0x00010000}, {name: 'checkSumAdjustment', type: 'ULONG', value: 0}, {name: 'magicNumber', type: 'ULONG', value: 0x5F0F3CF5}, {name: 'flags', type: 'USHORT', value: 0}, {name: 'unitsPerEm', type: 'USHORT', value: 1000}, {name: 'created', type: 'LONGDATETIME', value: createdTimestamp}, {name: 'modified', type: 'LONGDATETIME', value: timestamp}, {name: 'xMin', type: 'SHORT', value: 0}, {name: 'yMin', type: 'SHORT', value: 0}, {name: 'xMax', type: 'SHORT', value: 0}, {name: 'yMax', type: 'SHORT', value: 0}, {name: 'macStyle', type: 'USHORT', value: 0}, {name: 'lowestRecPPEM', type: 'USHORT', value: 0}, {name: 'fontDirectionHint', type: 'SHORT', value: 2}, {name: 'indexToLocFormat', type: 'SHORT', value: 0}, {name: 'glyphDataFormat', type: 'SHORT', value: 0} ], options); } exports.parse = parseHeadTable; exports.make = makeHeadTable; },{"../check":2,"../parse":10,"../table":13}],21:[function(require,module,exports){ // The `hhea` table contains information for horizontal layout. // https://www.microsoft.com/typography/OTSPEC/hhea.htm 'use strict'; var parse = require('../parse'); var table = require('../table'); // Parse the horizontal header `hhea` table function parseHheaTable(data, start) { var hhea = {}; var p = new parse.Parser(data, start); hhea.version = p.parseVersion(); hhea.ascender = p.parseShort(); hhea.descender = p.parseShort(); hhea.lineGap = p.parseShort(); hhea.advanceWidthMax = p.parseUShort(); hhea.minLeftSideBearing = p.parseShort(); hhea.minRightSideBearing = p.parseShort(); hhea.xMaxExtent = p.parseShort(); hhea.caretSlopeRise = p.parseShort(); hhea.caretSlopeRun = p.parseShort(); hhea.caretOffset = p.parseShort(); p.relativeOffset += 8; hhea.metricDataFormat = p.parseShort(); hhea.numberOfHMetrics = p.parseUShort(); return hhea; } function makeHheaTable(options) { return new table.Table('hhea', [ {name: 'version', type: 'FIXED', value: 0x00010000}, {name: 'ascender', type: 'FWORD', value: 0}, {name: 'descender', type: 'FWORD', value: 0}, {name: 'lineGap', type: 'FWORD', value: 0}, {name: 'advanceWidthMax', type: 'UFWORD', value: 0}, {name: 'minLeftSideBearing', type: 'FWORD', value: 0}, {name: 'minRightSideBearing', type: 'FWORD', value: 0}, {name: 'xMaxExtent', type: 'FWORD', value: 0}, {name: 'caretSlopeRise', type: 'SHORT', value: 1}, {name: 'caretSlopeRun', type: 'SHORT', value: 0}, {name: 'caretOffset', type: 'SHORT', value: 0}, {name: 'reserved1', type: 'SHORT', value: 0}, {name: 'reserved2', type: 'SHORT', value: 0}, {name: 'reserved3', type: 'SHORT', value: 0}, {name: 'reserved4', type: 'SHORT', value: 0}, {name: 'metricDataFormat', type: 'SHORT', value: 0}, {name: 'numberOfHMetrics', type: 'USHORT', value: 0} ], options); } exports.parse = parseHheaTable; exports.make = makeHheaTable; },{"../parse":10,"../table":13}],22:[function(require,module,exports){ // The `hmtx` table contains the horizontal metrics for all glyphs. // https://www.microsoft.com/typography/OTSPEC/hmtx.htm 'use strict'; var parse = require('../parse'); var table = require('../table'); // Parse the `hmtx` table, which contains the horizontal metrics for all glyphs. // This function augments the glyph array, adding the advanceWidth and leftSideBearing to each glyph. function parseHmtxTable(data, start, numMetrics, numGlyphs, glyphs) { var advanceWidth; var leftSideBearing; var p = new parse.Parser(data, start); for (var i = 0; i < numGlyphs; i += 1) { // If the font is monospaced, only one entry is needed. This last entry applies to all subsequent glyphs. if (i < numMetrics) { advanceWidth = p.parseUShort(); leftSideBearing = p.parseShort(); } var glyph = glyphs.get(i); glyph.advanceWidth = advanceWidth; glyph.leftSideBearing = leftSideBearing; } } function makeHmtxTable(glyphs) { var t = new table.Table('hmtx', []); for (var i = 0; i < glyphs.length; i += 1) { var glyph = glyphs.get(i); var advanceWidth = glyph.advanceWidth || 0; var leftSideBearing = glyph.leftSideBearing || 0; t.fields.push({name: 'advanceWidth_' + i, type: 'USHORT', value: advanceWidth}); t.fields.push({name: 'leftSideBearing_' + i, type: 'SHORT', value: leftSideBearing}); } return t; } exports.parse = parseHmtxTable; exports.make = makeHmtxTable; },{"../parse":10,"../table":13}],23:[function(require,module,exports){ // The `kern` table contains kerning pairs. // Note that some fonts use the GPOS OpenType layout table to specify kerning. // https://www.microsoft.com/typography/OTSPEC/kern.htm 'use strict'; var check = require('../check'); var parse = require('../parse'); function parseWindowsKernTable(p) { var pairs = {}; // Skip nTables. p.skip('uShort'); var subtableVersion = p.parseUShort(); check.argument(subtableVersion === 0, 'Unsupported kern sub-table version.'); // Skip subtableLength, subtableCoverage p.skip('uShort', 2); var nPairs = p.parseUShort(); // Skip searchRange, entrySelector, rangeShift. p.skip('uShort', 3); for (var i = 0; i < nPairs; i += 1) { var leftIndex = p.parseUShort(); var rightIndex = p.parseUShort(); var value = p.parseShort(); pairs[leftIndex + ',' + rightIndex] = value; } return pairs; } function parseMacKernTable(p) { var pairs = {}; // The Mac kern table stores the version as a fixed (32 bits) but we only loaded the first 16 bits. // Skip the rest. p.skip('uShort'); var nTables = p.parseULong(); //check.argument(nTables === 1, 'Only 1 subtable is supported (got ' + nTables + ').'); if (nTables > 1) { console.warn('Only the first kern subtable is supported.'); } p.skip('uLong'); var coverage = p.parseUShort(); var subtableVersion = coverage & 0xFF; p.skip('uShort'); if (subtableVersion === 0) { var nPairs = p.parseUShort(); // Skip searchRange, entrySelector, rangeShift. p.skip('uShort', 3); for (var i = 0; i < nPairs; i += 1) { var leftIndex = p.parseUShort(); var rightIndex = p.parseUShort(); var value = p.parseShort(); pairs[leftIndex + ',' + rightIndex] = value; } } return pairs; } // Parse the `kern` table which contains kerning pairs. function parseKernTable(data, start) { var p = new parse.Parser(data, start); var tableVersion = p.parseUShort(); if (tableVersion === 0) { return parseWindowsKernTable(p); } else if (tableVersion === 1) { return parseMacKernTable(p); } else { throw new Error('Unsupported kern table version (' + tableVersion + ').'); } } exports.parse = parseKernTable; },{"../check":2,"../parse":10}],24:[function(require,module,exports){ // The `loca` table stores the offsets to the locations of the glyphs in the font. // https://www.microsoft.com/typography/OTSPEC/loca.htm 'use strict'; var parse = require('../parse'); // Parse the `loca` table. This table stores the offsets to the locations of the glyphs in the font, // relative to the beginning of the glyphData table. // The number of glyphs stored in the `loca` table is specified in the `maxp` table (under numGlyphs) // The loca table has two versions: a short version where offsets are stored as uShorts, and a long // version where offsets are stored as uLongs. The `head` table specifies which version to use // (under indexToLocFormat). function parseLocaTable(data, start, numGlyphs, shortVersion) { var p = new parse.Parser(data, start); var parseFn = shortVersion ? p.parseUShort : p.parseULong; // There is an extra entry after the last index element to compute the length of the last glyph. // That's why we use numGlyphs + 1. var glyphOffsets = []; for (var i = 0; i < numGlyphs + 1; i += 1) { var glyphOffset = parseFn.call(p); if (shortVersion) { // The short table version stores the actual offset divided by 2. glyphOffset *= 2; } glyphOffsets.push(glyphOffset); } return glyphOffsets; } exports.parse = parseLocaTable; },{"../parse":10}],25:[function(require,module,exports){ // The `ltag` table stores IETF BCP-47 language tags. It allows supporting // languages for which TrueType does not assign a numeric code. // https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6ltag.html // http://www.w3.org/International/articles/language-tags/ // http://www.iana.org/assignments/language-subtag-registry/language-subtag-registry 'use strict'; var check = require('../check'); var parse = require('../parse'); var table = require('../table'); function makeLtagTable(tags) { var result = new table.Table('ltag', [ {name: 'version', type: 'ULONG', value: 1}, {name: 'flags', type: 'ULONG', value: 0}, {name: 'numTags', type: 'ULONG', value: tags.length} ]); var stringPool = ''; var stringPoolOffset = 12 + tags.length * 4; for (var i = 0; i < tags.length; ++i) { var pos = stringPool.indexOf(tags[i]); if (pos < 0) { pos = stringPool.length; stringPool += tags[i]; } result.fields.push({name: 'offset ' + i, type: 'USHORT', value: stringPoolOffset + pos}); result.fields.push({name: 'length ' + i, type: 'USHORT', value: tags[i].length}); } result.fields.push({name: 'stringPool', type: 'CHARARRAY', value: stringPool}); return result; } function parseLtagTable(data, start) { var p = new parse.Parser(data, start); var tableVersion = p.parseULong(); check.argument(tableVersion === 1, 'Unsupported ltag table version.'); // The 'ltag' specification does not define any flags; skip the field. p.skip('uLong', 1); var numTags = p.parseULong(); var tags = []; for (var i = 0; i < numTags; i++) { var tag = ''; var offset = start + p.parseUShort(); var length = p.parseUShort(); for (var j = offset; j < offset + length; ++j) { tag += String.fromCharCode(data.getInt8(j)); } tags.push(tag); } return tags; } exports.make = makeLtagTable; exports.parse = parseLtagTable; },{"../check":2,"../parse":10,"../table":13}],26:[function(require,module,exports){ // The `maxp` table establishes the memory requirements for the font. // We need it just to get the number of glyphs in the font. // https://www.microsoft.com/typography/OTSPEC/maxp.htm 'use strict'; var parse = require('../parse'); var table = require('../table'); // Parse the maximum profile `maxp` table. function parseMaxpTable(data, start) { var maxp = {}; var p = new parse.Parser(data, start); maxp.version = p.parseVersion(); maxp.numGlyphs = p.parseUShort(); if (maxp.version === 1.0) { maxp.maxPoints = p.parseUShort(); maxp.maxContours = p.parseUShort(); maxp.maxCompositePoints = p.parseUShort(); maxp.maxCompositeContours = p.parseUShort(); maxp.maxZones = p.parseUShort(); maxp.maxTwilightPoints = p.parseUShort(); maxp.maxStorage = p.parseUShort(); maxp.maxFunctionDefs = p.parseUShort(); maxp.maxInstructionDefs = p.parseUShort(); maxp.maxStackElements = p.parseUShort(); maxp.maxSizeOfInstructions = p.parseUShort(); maxp.maxComponentElements = p.parseUShort(); maxp.maxComponentDepth = p.parseUShort(); } return maxp; } function makeMaxpTable(numGlyphs) { return new table.Table('maxp', [ {name: 'version', type: 'FIXED', value: 0x00005000}, {name: 'numGlyphs', type: 'USHORT', value: numGlyphs} ]); } exports.parse = parseMaxpTable; exports.make = makeMaxpTable; },{"../parse":10,"../table":13}],27:[function(require,module,exports){ // The `GPOS` table contains kerning pairs, among other things. // https://www.microsoft.com/typography/OTSPEC/gpos.htm 'use strict'; var types = require('../types'); var decode = types.decode; var check = require('../check'); var parse = require('../parse'); var table = require('../table'); // Parse the metadata `meta` table. // https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6meta.html function parseMetaTable(data, start) { var p = new parse.Parser(data, start); var tableVersion = p.parseULong(); check.argument(tableVersion === 1, 'Unsupported META table version.'); p.parseULong(); // flags - currently unused and set to 0 p.parseULong(); // tableOffset var numDataMaps = p.parseULong(); var tags = {}; for (var i = 0; i < numDataMaps; i++) { var tag = p.parseTag(); var dataOffset = p.parseULong(); var dataLength = p.parseULong(); var text = decode.UTF8(data, start + dataOffset, dataLength); tags[tag] = text; } return tags; } function makeMetaTable(tags) { var numTags = Object.keys(tags).length; var stringPool = ''; var stringPoolOffset = 16 + numTags * 12; var result = new table.Table('meta', [ {name: 'version', type: 'ULONG', value: 1}, {name: 'flags', type: 'ULONG', value: 0}, {name: 'offset', type: 'ULONG', value: stringPoolOffset}, {name: 'numTags', type: 'ULONG', value: numTags} ]); for (var tag in tags) { var pos = stringPool.length; stringPool += tags[tag]; result.fields.push({name: 'tag ' + tag, type: 'TAG', value: tag}); result.fields.push({name: 'offset ' + tag, type: 'ULONG', value: stringPoolOffset + pos}); result.fields.push({name: 'length ' + tag, type: 'ULONG', value: tags[tag].length}); } result.fields.push({name: 'stringPool', type: 'CHARARRAY', value: stringPool}); return result; } exports.parse = parseMetaTable; exports.make = makeMetaTable; },{"../check":2,"../parse":10,"../table":13,"../types":32}],28:[function(require,module,exports){ // The `name` naming table. // https://www.microsoft.com/typography/OTSPEC/name.htm 'use strict'; var types = require('../types'); var decode = types.decode; var encode = types.encode; var parse = require('../parse'); var table = require('../table'); // NameIDs for the name table. var nameTableNames = [ 'copyright', // 0 'fontFamily', // 1 'fontSubfamily', // 2 'uniqueID', // 3 'fullName', // 4 'version', // 5 'postScriptName', // 6 'trademark', // 7 'manufacturer', // 8 'designer', // 9 'description', // 10 'manufacturerURL', // 11 'designerURL', // 12 'license', // 13 'licenseURL', // 14 'reserved', // 15 'preferredFamily', // 16 'preferredSubfamily', // 17 'compatibleFullName', // 18 'sampleText', // 19 'postScriptFindFontName', // 20 'wwsFamily', // 21 'wwsSubfamily' // 22 ]; var macLanguages = { 0: 'en', 1: 'fr', 2: 'de', 3: 'it', 4: 'nl', 5: 'sv', 6: 'es', 7: 'da', 8: 'pt', 9: 'no', 10: 'he', 11: 'ja', 12: 'ar', 13: 'fi', 14: 'el', 15: 'is', 16: 'mt', 17: 'tr', 18: 'hr', 19: 'zh-Hant', 20: 'ur', 21: 'hi', 22: 'th', 23: 'ko', 24: 'lt', 25: 'pl', 26: 'hu', 27: 'es', 28: 'lv', 29: 'se', 30: 'fo', 31: 'fa', 32: 'ru', 33: 'zh', 34: 'nl-BE', 35: 'ga', 36: 'sq', 37: 'ro', 38: 'cz', 39: 'sk', 40: 'si', 41: 'yi', 42: 'sr', 43: 'mk', 44: 'bg', 45: 'uk', 46: 'be', 47: 'uz', 48: 'kk', 49: 'az-Cyrl', 50: 'az-Arab', 51: 'hy', 52: 'ka', 53: 'mo', 54: 'ky', 55: 'tg', 56: 'tk', 57: 'mn-CN', 58: 'mn', 59: 'ps', 60: 'ks', 61: 'ku', 62: 'sd', 63: 'bo', 64: 'ne', 65: 'sa', 66: 'mr', 67: 'bn', 68: 'as', 69: 'gu', 70: 'pa', 71: 'or', 72: 'ml', 73: 'kn', 74: 'ta', 75: 'te', 76: 'si', 77: 'my', 78: 'km', 79: 'lo', 80: 'vi', 81: 'id', 82: 'tl', 83: 'ms', 84: 'ms-Arab', 85: 'am', 86: 'ti', 87: 'om', 88: 'so', 89: 'sw', 90: 'rw', 91: 'rn', 92: 'ny', 93: 'mg', 94: 'eo', 128: 'cy', 129: 'eu', 130: 'ca', 131: 'la', 132: 'qu', 133: 'gn', 134: 'ay', 135: 'tt', 136: 'ug', 137: 'dz', 138: 'jv', 139: 'su', 140: 'gl', 141: 'af', 142: 'br', 143: 'iu', 144: 'gd', 145: 'gv', 146: 'ga', 147: 'to', 148: 'el-polyton', 149: 'kl', 150: 'az', 151: 'nn' }; // MacOS language ID → MacOS script ID // // Note that the script ID is not sufficient to determine what encoding // to use in TrueType files. For some languages, MacOS used a modification // of a mainstream script. For example, an Icelandic name would be stored // with smRoman in the TrueType naming table, but the actual encoding // is a special Icelandic version of the normal Macintosh Roman encoding. // As another example, Inuktitut uses an 8-bit encoding for Canadian Aboriginal // Syllables but MacOS had run out of available script codes, so this was // done as a (pretty radical) "modification" of Ethiopic. // // http://unicode.org/Public/MAPPINGS/VENDORS/APPLE/Readme.txt var macLanguageToScript = { 0: 0, // langEnglish → smRoman 1: 0, // langFrench → smRoman 2: 0, // langGerman → smRoman 3: 0, // langItalian → smRoman 4: 0, // langDutch → smRoman 5: 0, // langSwedish → smRoman 6: 0, // langSpanish → smRoman 7: 0, // langDanish → smRoman 8: 0, // langPortuguese → smRoman 9: 0, // langNorwegian → smRoman 10: 5, // langHebrew → smHebrew 11: 1, // langJapanese → smJapanese 12: 4, // langArabic → smArabic 13: 0, // langFinnish → smRoman 14: 6, // langGreek → smGreek 15: 0, // langIcelandic → smRoman (modified) 16: 0, // langMaltese → smRoman 17: 0, // langTurkish → smRoman (modified) 18: 0, // langCroatian → smRoman (modified) 19: 2, // langTradChinese → smTradChinese 20: 4, // langUrdu → smArabic 21: 9, // langHindi → smDevanagari 22: 21, // langThai → smThai 23: 3, // langKorean → smKorean 24: 29, // langLithuanian → smCentralEuroRoman 25: 29, // langPolish → smCentralEuroRoman 26: 29, // langHungarian → smCentralEuroRoman 27: 29, // langEstonian → smCentralEuroRoman 28: 29, // langLatvian → smCentralEuroRoman 29: 0, // langSami → smRoman 30: 0, // langFaroese → smRoman (modified) 31: 4, // langFarsi → smArabic (modified) 32: 7, // langRussian → smCyrillic 33: 25, // langSimpChinese → smSimpChinese 34: 0, // langFlemish → smRoman 35: 0, // langIrishGaelic → smRoman (modified) 36: 0, // langAlbanian → smRoman 37: 0, // langRomanian → smRoman (modified) 38: 29, // langCzech → smCentralEuroRoman 39: 29, // langSlovak → smCentralEuroRoman 40: 0, // langSlovenian → smRoman (modified) 41: 5, // langYiddish → smHebrew 42: 7, // langSerbian → smCyrillic 43: 7, // langMacedonian → smCyrillic 44: 7, // langBulgarian → smCyrillic 45: 7, // langUkrainian → smCyrillic (modified) 46: 7, // langByelorussian → smCyrillic 47: 7, // langUzbek → smCyrillic 48: 7, // langKazakh → smCyrillic 49: 7, // langAzerbaijani → smCyrillic 50: 4, // langAzerbaijanAr → smArabic 51: 24, // langArmenian → smArmenian 52: 23, // langGeorgian → smGeorgian 53: 7, // langMoldavian → smCyrillic 54: 7, // langKirghiz → smCyrillic 55: 7, // langTajiki → smCyrillic 56: 7, // langTurkmen → smCyrillic 57: 27, // langMongolian → smMongolian 58: 7, // langMongolianCyr → smCyrillic 59: 4, // langPashto → smArabic 60: 4, // langKurdish → smArabic 61: 4, // langKashmiri → smArabic 62: 4, // langSindhi → smArabic 63: 26, // langTibetan → smTibetan 64: 9, // langNepali → smDevanagari 65: 9, // langSanskrit → smDevanagari 66: 9, // langMarathi → smDevanagari 67: 13, // langBengali → smBengali 68: 13, // langAssamese → smBengali 69: 11, // langGujarati → smGujarati 70: 10, // langPunjabi → smGurmukhi 71: 12, // langOriya → smOriya 72: 17, // langMalayalam → smMalayalam 73: 16, // langKannada → smKannada 74: 14, // langTamil → smTamil 75: 15, // langTelugu → smTelugu 76: 18, // langSinhalese → smSinhalese 77: 19, // langBurmese → smBurmese 78: 20, // langKhmer → smKhmer 79: 22, // langLao → smLao 80: 30, // langVietnamese → smVietnamese 81: 0, // langIndonesian → smRoman 82: 0, // langTagalog → smRoman 83: 0, // langMalayRoman → smRoman 84: 4, // langMalayArabic → smArabic 85: 28, // langAmharic → smEthiopic 86: 28, // langTigrinya → smEthiopic 87: 28, // langOromo → smEthiopic 88: 0, // langSomali → smRoman 89: 0, // langSwahili → smRoman 90: 0, // langKinyarwanda → smRoman 91: 0, // langRundi → smRoman 92: 0, // langNyanja → smRoman 93: 0, // langMalagasy → smRoman 94: 0, // langEsperanto → smRoman 128: 0, // langWelsh → smRoman (modified) 129: 0, // langBasque → smRoman 130: 0, // langCatalan → smRoman 131: 0, // langLatin → smRoman 132: 0, // langQuechua → smRoman 133: 0, // langGuarani → smRoman 134: 0, // langAymara → smRoman 135: 7, // langTatar → smCyrillic 136: 4, // langUighur → smArabic 137: 26, // langDzongkha → smTibetan 138: 0, // langJavaneseRom → smRoman 139: 0, // langSundaneseRom → smRoman 140: 0, // langGalician → smRoman 141: 0, // langAfrikaans → smRoman 142: 0, // langBreton → smRoman (modified) 143: 28, // langInuktitut → smEthiopic (modified) 144: 0, // langScottishGaelic → smRoman (modified) 145: 0, // langManxGaelic → smRoman (modified) 146: 0, // langIrishGaelicScript → smRoman (modified) 147: 0, // langTongan → smRoman 148: 6, // langGreekAncient → smRoman 149: 0, // langGreenlandic → smRoman 150: 0, // langAzerbaijanRoman → smRoman 151: 0 // langNynorsk → smRoman }; // While Microsoft indicates a region/country for all its language // IDs, we omit the region code if it's equal to the "most likely // region subtag" according to Unicode CLDR. For scripts, we omit // the subtag if it is equal to the Suppress-Script entry in the // IANA language subtag registry for IETF BCP 47. // // For example, Microsoft states that its language code 0x041A is // Croatian in Croatia. We transform this to the BCP 47 language code 'hr' // and not 'hr-HR' because Croatia is the default country for Croatian, // according to Unicode CLDR. As another example, Microsoft states // that 0x101A is Croatian (Latin) in Bosnia-Herzegovina. We transform // this to 'hr-BA' and not 'hr-Latn-BA' because Latin is the default script // for the Croatian language, according to IANA. // // http://www.unicode.org/cldr/charts/latest/supplemental/likely_subtags.html // http://www.iana.org/assignments/language-subtag-registry/language-subtag-registry var windowsLanguages = { 0x0436: 'af', 0x041C: 'sq', 0x0484: 'gsw', 0x045E: 'am', 0x1401: 'ar-DZ', 0x3C01: 'ar-BH', 0x0C01: 'ar', 0x0801: 'ar-IQ', 0x2C01: 'ar-JO', 0x3401: 'ar-KW', 0x3001: 'ar-LB', 0x1001: 'ar-LY', 0x1801: 'ary', 0x2001: 'ar-OM', 0x4001: 'ar-QA', 0x0401: 'ar-SA', 0x2801: 'ar-SY', 0x1C01: 'aeb', 0x3801: 'ar-AE', 0x2401: 'ar-YE', 0x042B: 'hy', 0x044D: 'as', 0x082C: 'az-Cyrl', 0x042C: 'az', 0x046D: 'ba', 0x042D: 'eu', 0x0423: 'be', 0x0845: 'bn', 0x0445: 'bn-IN', 0x201A: 'bs-Cyrl', 0x141A: 'bs', 0x047E: 'br', 0x0402: 'bg', 0x0403: 'ca', 0x0C04: 'zh-HK', 0x1404: 'zh-MO', 0x0804: 'zh', 0x1004: 'zh-SG', 0x0404: 'zh-TW', 0x0483: 'co', 0x041A: 'hr', 0x101A: 'hr-BA', 0x0405: 'cs', 0x0406: 'da', 0x048C: 'prs', 0x0465: 'dv', 0x0813: 'nl-BE', 0x0413: 'nl', 0x0C09: 'en-AU', 0x2809: 'en-BZ', 0x1009: 'en-CA', 0x2409: 'en-029', 0x4009: 'en-IN', 0x1809: 'en-IE', 0x2009: 'en-JM', 0x4409: 'en-MY', 0x1409: 'en-NZ', 0x3409: 'en-PH', 0x4809: 'en-SG', 0x1C09: 'en-ZA', 0x2C09: 'en-TT', 0x0809: 'en-GB', 0x0409: 'en', 0x3009: 'en-ZW', 0x0425: 'et', 0x0438: 'fo', 0x0464: 'fil', 0x040B: 'fi', 0x080C: 'fr-BE', 0x0C0C: 'fr-CA', 0x040C: 'fr', 0x140C: 'fr-LU', 0x180C: 'fr-MC', 0x100C: 'fr-CH', 0x0462: 'fy', 0x0456: 'gl', 0x0437: 'ka', 0x0C07: 'de-AT', 0x0407: 'de', 0x1407: 'de-LI', 0x1007: 'de-LU', 0x0807: 'de-CH', 0x0408: 'el', 0x046F: 'kl', 0x0447: 'gu', 0x0468: 'ha', 0x040D: 'he', 0x0439: 'hi', 0x040E: 'hu', 0x040F: 'is', 0x0470: 'ig', 0x0421: 'id', 0x045D: 'iu', 0x085D: 'iu-Latn', 0x083C: 'ga', 0x0434: 'xh', 0x0435: 'zu', 0x0410: 'it', 0x0810: 'it-CH', 0x0411: 'ja', 0x044B: 'kn', 0x043F: 'kk', 0x0453: 'km', 0x0486: 'quc', 0x0487: 'rw', 0x0441: 'sw', 0x0457: 'kok', 0x0412: 'ko', 0x0440: 'ky', 0x0454: 'lo', 0x0426: 'lv', 0x0427: 'lt', 0x082E: 'dsb', 0x046E: 'lb', 0x042F: 'mk', 0x083E: 'ms-BN', 0x043E: 'ms', 0x044C: 'ml', 0x043A: 'mt', 0x0481: 'mi', 0x047A: 'arn', 0x044E: 'mr', 0x047C: 'moh', 0x0450: 'mn', 0x0850: 'mn-CN', 0x0461: 'ne', 0x0414: 'nb', 0x0814: 'nn', 0x0482: 'oc', 0x0448: 'or', 0x0463: 'ps', 0x0415: 'pl', 0x0416: 'pt', 0x0816: 'pt-PT', 0x0446: 'pa', 0x046B: 'qu-BO', 0x086B: 'qu-EC', 0x0C6B: 'qu', 0x0418: 'ro', 0x0417: 'rm', 0x0419: 'ru', 0x243B: 'smn', 0x103B: 'smj-NO', 0x143B: 'smj', 0x0C3B: 'se-FI', 0x043B: 'se', 0x083B: 'se-SE', 0x203B: 'sms', 0x183B: 'sma-NO', 0x1C3B: 'sms', 0x044F: 'sa', 0x1C1A: 'sr-Cyrl-BA', 0x0C1A: 'sr', 0x181A: 'sr-Latn-BA', 0x081A: 'sr-Latn', 0x046C: 'nso', 0x0432: 'tn', 0x045B: 'si', 0x041B: 'sk', 0x0424: 'sl', 0x2C0A: 'es-AR', 0x400A: 'es-BO', 0x340A: 'es-CL', 0x240A: 'es-CO', 0x140A: 'es-CR', 0x1C0A: 'es-DO', 0x300A: 'es-EC', 0x440A: 'es-SV', 0x100A: 'es-GT', 0x480A: 'es-HN', 0x080A: 'es-MX', 0x4C0A: 'es-NI', 0x180A: 'es-PA', 0x3C0A: 'es-PY', 0x280A: 'es-PE', 0x500A: 'es-PR', // Microsoft has defined two different language codes for // “Spanish with modern sorting” and “Spanish with traditional // sorting”. This makes sense for collation APIs, and it would be // possible to express this in BCP 47 language tags via Unicode // extensions (eg., es-u-co-trad is Spanish with traditional // sorting). However, for storing names in fonts, the distinction // does not make sense, so we give “es” in both cases. 0x0C0A: 'es', 0x040A: 'es', 0x540A: 'es-US', 0x380A: 'es-UY', 0x200A: 'es-VE', 0x081D: 'sv-FI', 0x041D: 'sv', 0x045A: 'syr', 0x0428: 'tg', 0x085F: 'tzm', 0x0449: 'ta', 0x0444: 'tt', 0x044A: 'te', 0x041E: 'th', 0x0451: 'bo', 0x041F: 'tr', 0x0442: 'tk', 0x0480: 'ug', 0x0422: 'uk', 0x042E: 'hsb', 0x0420: 'ur', 0x0843: 'uz-Cyrl', 0x0443: 'uz', 0x042A: 'vi', 0x0452: 'cy', 0x0488: 'wo', 0x0485: 'sah', 0x0478: 'ii', 0x046A: 'yo' }; // Returns a IETF BCP 47 language code, for example 'zh-Hant' // for 'Chinese in the traditional script'. function getLanguageCode(platformID, languageID, ltag) { switch (platformID) { case 0: // Unicode if (languageID === 0xFFFF) { return 'und'; } else if (ltag) { return ltag[languageID]; } break; case 1: // Macintosh return macLanguages[languageID]; case 3: // Windows return windowsLanguages[languageID]; } return undefined; } var utf16 = 'utf-16'; // MacOS script ID → encoding. This table stores the default case, // which can be overridden by macLanguageEncodings. var macScriptEncodings = { 0: 'macintosh', // smRoman 1: 'x-mac-japanese', // smJapanese 2: 'x-mac-chinesetrad', // smTradChinese 3: 'x-mac-korean', // smKorean 6: 'x-mac-greek', // smGreek 7: 'x-mac-cyrillic', // smCyrillic 9: 'x-mac-devanagai', // smDevanagari 10: 'x-mac-gurmukhi', // smGurmukhi 11: 'x-mac-gujarati', // smGujarati 12: 'x-mac-oriya', // smOriya 13: 'x-mac-bengali', // smBengali 14: 'x-mac-tamil', // smTamil 15: 'x-mac-telugu', // smTelugu 16: 'x-mac-kannada', // smKannada 17: 'x-mac-malayalam', // smMalayalam 18: 'x-mac-sinhalese', // smSinhalese 19: 'x-mac-burmese', // smBurmese 20: 'x-mac-khmer', // smKhmer 21: 'x-mac-thai', // smThai 22: 'x-mac-lao', // smLao 23: 'x-mac-georgian', // smGeorgian 24: 'x-mac-armenian', // smArmenian 25: 'x-mac-chinesesimp', // smSimpChinese 26: 'x-mac-tibetan', // smTibetan 27: 'x-mac-mongolian', // smMongolian 28: 'x-mac-ethiopic', // smEthiopic 29: 'x-mac-ce', // smCentralEuroRoman 30: 'x-mac-vietnamese', // smVietnamese 31: 'x-mac-extarabic' // smExtArabic }; // MacOS language ID → encoding. This table stores the exceptional // cases, which override macScriptEncodings. For writing MacOS naming // tables, we need to emit a MacOS script ID. Therefore, we cannot // merge macScriptEncodings into macLanguageEncodings. // // http://unicode.org/Public/MAPPINGS/VENDORS/APPLE/Readme.txt var macLanguageEncodings = { 15: 'x-mac-icelandic', // langIcelandic 17: 'x-mac-turkish', // langTurkish 18: 'x-mac-croatian', // langCroatian 24: 'x-mac-ce', // langLithuanian 25: 'x-mac-ce', // langPolish 26: 'x-mac-ce', // langHungarian 27: 'x-mac-ce', // langEstonian 28: 'x-mac-ce', // langLatvian 30: 'x-mac-icelandic', // langFaroese 37: 'x-mac-romanian', // langRomanian 38: 'x-mac-ce', // langCzech 39: 'x-mac-ce', // langSlovak 40: 'x-mac-ce', // langSlovenian 143: 'x-mac-inuit', // langInuktitut 146: 'x-mac-gaelic' // langIrishGaelicScript }; function getEncoding(platformID, encodingID, languageID) { switch (platformID) { case 0: // Unicode return utf16; case 1: // Apple Macintosh return macLanguageEncodings[languageID] || macScriptEncodings[encodingID]; case 3: // Microsoft Windows if (encodingID === 1 || encodingID === 10) { return utf16; } break; } return undefined; } // Parse the naming `name` table. // FIXME: Format 1 additional fields are not supported yet. // ltag is the content of the `ltag' table, such as ['en', 'zh-Hans', 'de-CH-1904']. function parseNameTable(data, start, ltag) { var name = {}; var p = new parse.Parser(data, start); var format = p.parseUShort(); var count = p.parseUShort(); var stringOffset = p.offset + p.parseUShort(); for (var i = 0; i < count; i++) { var platformID = p.parseUShort(); var encodingID = p.parseUShort(); var languageID = p.parseUShort(); var nameID = p.parseUShort(); var property = nameTableNames[nameID] || nameID; var byteLength = p.parseUShort(); var offset = p.parseUShort(); var language = getLanguageCode(platformID, languageID, ltag); var encoding = getEncoding(platformID, encodingID, languageID); if (encoding !== undefined && language !== undefined) { var text; if (encoding === utf16) { text = decode.UTF16(data, stringOffset + offset, byteLength); } else { text = decode.MACSTRING(data, stringOffset + offset, byteLength, encoding); } if (text) { var translations = name[property]; if (translations === undefined) { translations = name[property] = {}; } translations[language] = text; } } } var langTagCount = 0; if (format === 1) { // FIXME: Also handle Microsoft's 'name' table 1. langTagCount = p.parseUShort(); } return name; } // {23: 'foo'} → {'foo': 23} // ['bar', 'baz'] → {'bar': 0, 'baz': 1} function reverseDict(dict) { var result = {}; for (var key in dict) { result[dict[key]] = parseInt(key); } return result; } function makeNameRecord(platformID, encodingID, languageID, nameID, length, offset) { return new table.Record('NameRecord', [ {name: 'platformID', type: 'USHORT', value: platformID}, {name: 'encodingID', type: 'USHORT', value: encodingID}, {name: 'languageID', type: 'USHORT', value: languageID}, {name: 'nameID', type: 'USHORT', value: nameID}, {name: 'length', type: 'USHORT', value: length}, {name: 'offset', type: 'USHORT', value: offset} ]); } // Finds the position of needle in haystack, or -1 if not there. // Like String.indexOf(), but for arrays. function findSubArray(needle, haystack) { var needleLength = needle.length; var limit = haystack.length - needleLength + 1; loop: for (var pos = 0; pos < limit; pos++) { for (; pos < limit; pos++) { for (var k = 0; k < needleLength; k++) { if (haystack[pos + k] !== needle[k]) { continue loop; } } return pos; } } return -1; } function addStringToPool(s, pool) { var offset = findSubArray(s, pool); if (offset < 0) { offset = pool.length; for (var i = 0, len = s.length; i < len; ++i) { pool.push(s[i]); } } return offset; } function makeNameTable(names, ltag) { var nameID; var nameIDs = []; var namesWithNumericKeys = {}; var nameTableIds = reverseDict(nameTableNames); for (var key in names) { var id = nameTableIds[key]; if (id === undefined) { id = key; } nameID = parseInt(id); if (isNaN(nameID)) { throw new Error('Name table entry "' + key + '" does not exist, see nameTableNames for complete list.'); } namesWithNumericKeys[nameID] = names[key]; nameIDs.push(nameID); } var macLanguageIds = reverseDict(macLanguages); var windowsLanguageIds = reverseDict(windowsLanguages); var nameRecords = []; var stringPool = []; for (var i = 0; i < nameIDs.length; i++) { nameID = nameIDs[i]; var translations = namesWithNumericKeys[nameID]; for (var lang in translations) { var text = translations[lang]; // For MacOS, we try to emit the name in the form that was introduced // in the initial version of the TrueType spec (in the late 1980s). // However, this can fail for various reasons: the requested BCP 47 // language code might not have an old-style Mac equivalent; // we might not have a codec for the needed character encoding; // or the name might contain characters that cannot be expressed // in the old-style Macintosh encoding. In case of failure, we emit // the name in a more modern fashion (Unicode encoding with BCP 47 // language tags) that is recognized by MacOS 10.5, released in 2009. // If fonts were only read by operating systems, we could simply // emit all names in the modern form; this would be much easier. // However, there are many applications and libraries that read // 'name' tables directly, and these will usually only recognize // the ancient form (silently skipping the unrecognized names). var macPlatform = 1; // Macintosh var macLanguage = macLanguageIds[lang]; var macScript = macLanguageToScript[macLanguage]; var macEncoding = getEncoding(macPlatform, macScript, macLanguage); var macName = encode.MACSTRING(text, macEncoding); if (macName === undefined) { macPlatform = 0; // Unicode macLanguage = ltag.indexOf(lang); if (macLanguage < 0) { macLanguage = ltag.length; ltag.push(lang); } macScript = 4; // Unicode 2.0 and later macName = encode.UTF16(text); } var macNameOffset = addStringToPool(macName, stringPool); nameRecords.push(makeNameRecord(macPlatform, macScript, macLanguage, nameID, macName.length, macNameOffset)); var winLanguage = windowsLanguageIds[lang]; if (winLanguage !== undefined) { var winName = encode.UTF16(text); var winNameOffset = addStringToPool(winName, stringPool); nameRecords.push(makeNameRecord(3, 1, winLanguage, nameID, winName.length, winNameOffset)); } } } nameRecords.sort(function(a, b) { return ((a.platformID - b.platformID) || (a.encodingID - b.encodingID) || (a.languageID - b.languageID) || (a.nameID - b.nameID)); }); var t = new table.Table('name', [ {name: 'format', type: 'USHORT', value: 0}, {name: 'count', type: 'USHORT', value: nameRecords.length}, {name: 'stringOffset', type: 'USHORT', value: 6 + nameRecords.length * 12} ]); for (var r = 0; r < nameRecords.length; r++) { t.fields.push({name: 'record_' + r, type: 'RECORD', value: nameRecords[r]}); } t.fields.push({name: 'strings', type: 'LITERAL', value: stringPool}); return t; } exports.parse = parseNameTable; exports.make = makeNameTable; },{"../parse":10,"../table":13,"../types":32}],29:[function(require,module,exports){ // The `OS/2` table contains metrics required in OpenType fonts. // https://www.microsoft.com/typography/OTSPEC/os2.htm 'use strict'; var parse = require('../parse'); var table = require('../table'); var unicodeRanges = [ {begin: 0x0000, end: 0x007F}, // Basic Latin {begin: 0x0080, end: 0x00FF}, // Latin-1 Supplement {begin: 0x0100, end: 0x017F}, // Latin Extended-A {begin: 0x0180, end: 0x024F}, // Latin Extended-B {begin: 0x0250, end: 0x02AF}, // IPA Extensions {begin: 0x02B0, end: 0x02FF}, // Spacing Modifier Letters {begin: 0x0300, end: 0x036F}, // Combining Diacritical Marks {begin: 0x0370, end: 0x03FF}, // Greek and Coptic {begin: 0x2C80, end: 0x2CFF}, // Coptic {begin: 0x0400, end: 0x04FF}, // Cyrillic {begin: 0x0530, end: 0x058F}, // Armenian {begin: 0x0590, end: 0x05FF}, // Hebrew {begin: 0xA500, end: 0xA63F}, // Vai {begin: 0x0600, end: 0x06FF}, // Arabic {begin: 0x07C0, end: 0x07FF}, // NKo {begin: 0x0900, end: 0x097F}, // Devanagari {begin: 0x0980, end: 0x09FF}, // Bengali {begin: 0x0A00, end: 0x0A7F}, // Gurmukhi {begin: 0x0A80, end: 0x0AFF}, // Gujarati {begin: 0x0B00, end: 0x0B7F}, // Oriya {begin: 0x0B80, end: 0x0BFF}, // Tamil {begin: 0x0C00, end: 0x0C7F}, // Telugu {begin: 0x0C80, end: 0x0CFF}, // Kannada {begin: 0x0D00, end: 0x0D7F}, // Malayalam {begin: 0x0E00, end: 0x0E7F}, // Thai {begin: 0x0E80, end: 0x0EFF}, // Lao {begin: 0x10A0, end: 0x10FF}, // Georgian {begin: 0x1B00, end: 0x1B7F}, // Balinese {begin: 0x1100, end: 0x11FF}, // Hangul Jamo {begin: 0x1E00, end: 0x1EFF}, // Latin Extended Additional {begin: 0x1F00, end: 0x1FFF}, // Greek Extended {begin: 0x2000, end: 0x206F}, // General Punctuation {begin: 0x2070, end: 0x209F}, // Superscripts And Subscripts {begin: 0x20A0, end: 0x20CF}, // Currency Symbol {begin: 0x20D0, end: 0x20FF}, // Combining Diacritical Marks For Symbols {begin: 0x2100, end: 0x214F}, // Letterlike Symbols {begin: 0x2150, end: 0x218F}, // Number Forms {begin: 0x2190, end: 0x21FF}, // Arrows {begin: 0x2200, end: 0x22FF}, // Mathematical Operators {begin: 0x2300, end: 0x23FF}, // Miscellaneous Technical {begin: 0x2400, end: 0x243F}, // Control Pictures {begin: 0x2440, end: 0x245F}, // Optical Character Recognition {begin: 0x2460, end: 0x24FF}, // Enclosed Alphanumerics {begin: 0x2500, end: 0x257F}, // Box Drawing {begin: 0x2580, end: 0x259F}, // Block Elements {begin: 0x25A0, end: 0x25FF}, // Geometric Shapes {begin: 0x2600, end: 0x26FF}, // Miscellaneous Symbols {begin: 0x2700, end: 0x27BF}, // Dingbats {begin: 0x3000, end: 0x303F}, // CJK Symbols And Punctuation {begin: 0x3040, end: 0x309F}, // Hiragana {begin: 0x30A0, end: 0x30FF}, // Katakana {begin: 0x3100, end: 0x312F}, // Bopomofo {begin: 0x3130, end: 0x318F}, // Hangul Compatibility Jamo {begin: 0xA840, end: 0xA87F}, // Phags-pa {begin: 0x3200, end: 0x32FF}, // Enclosed CJK Letters And Months {begin: 0x3300, end: 0x33FF}, // CJK Compatibility {begin: 0xAC00, end: 0xD7AF}, // Hangul Syllables {begin: 0xD800, end: 0xDFFF}, // Non-Plane 0 * {begin: 0x10900, end: 0x1091F}, // Phoenicia {begin: 0x4E00, end: 0x9FFF}, // CJK Unified Ideographs {begin: 0xE000, end: 0xF8FF}, // Private Use Area (plane 0) {begin: 0x31C0, end: 0x31EF}, // CJK Strokes {begin: 0xFB00, end: 0xFB4F}, // Alphabetic Presentation Forms {begin: 0xFB50, end: 0xFDFF}, // Arabic Presentation Forms-A {begin: 0xFE20, end: 0xFE2F}, // Combining Half Marks {begin: 0xFE10, end: 0xFE1F}, // Vertical Forms {begin: 0xFE50, end: 0xFE6F}, // Small Form Variants {begin: 0xFE70, end: 0xFEFF}, // Arabic Presentation Forms-B {begin: 0xFF00, end: 0xFFEF}, // Halfwidth And Fullwidth Forms {begin: 0xFFF0, end: 0xFFFF}, // Specials {begin: 0x0F00, end: 0x0FFF}, // Tibetan {begin: 0x0700, end: 0x074F}, // Syriac {begin: 0x0780, end: 0x07BF}, // Thaana {begin: 0x0D80, end: 0x0DFF}, // Sinhala {begin: 0x1000, end: 0x109F}, // Myanmar {begin: 0x1200, end: 0x137F}, // Ethiopic {begin: 0x13A0, end: 0x13FF}, // Cherokee {begin: 0x1400, end: 0x167F}, // Unified Canadian Aboriginal Syllabics {begin: 0x1680, end: 0x169F}, // Ogham {begin: 0x16A0, end: 0x16FF}, // Runic {begin: 0x1780, end: 0x17FF}, // Khmer {begin: 0x1800, end: 0x18AF}, // Mongolian {begin: 0x2800, end: 0x28FF}, // Braille Patterns {begin: 0xA000, end: 0xA48F}, // Yi Syllables {begin: 0x1700, end: 0x171F}, // Tagalog {begin: 0x10300, end: 0x1032F}, // Old Italic {begin: 0x10330, end: 0x1034F}, // Gothic {begin: 0x10400, end: 0x1044F}, // Deseret {begin: 0x1D000, end: 0x1D0FF}, // Byzantine Musical Symbols {begin: 0x1D400, end: 0x1D7FF}, // Mathematical Alphanumeric Symbols {begin: 0xFF000, end: 0xFFFFD}, // Private Use (plane 15) {begin: 0xFE00, end: 0xFE0F}, // Variation Selectors {begin: 0xE0000, end: 0xE007F}, // Tags {begin: 0x1900, end: 0x194F}, // Limbu {begin: 0x1950, end: 0x197F}, // Tai Le {begin: 0x1980, end: 0x19DF}, // New Tai Lue {begin: 0x1A00, end: 0x1A1F}, // Buginese {begin: 0x2C00, end: 0x2C5F}, // Glagolitic {begin: 0x2D30, end: 0x2D7F}, // Tifinagh {begin: 0x4DC0, end: 0x4DFF}, // Yijing Hexagram Symbols {begin: 0xA800, end: 0xA82F}, // Syloti Nagri {begin: 0x10000, end: 0x1007F}, // Linear B Syllabary {begin: 0x10140, end: 0x1018F}, // Ancient Greek Numbers {begin: 0x10380, end: 0x1039F}, // Ugaritic {begin: 0x103A0, end: 0x103DF}, // Old Persian {begin: 0x10450, end: 0x1047F}, // Shavian {begin: 0x10480, end: 0x104AF}, // Osmanya {begin: 0x10800, end: 0x1083F}, // Cypriot Syllabary {begin: 0x10A00, end: 0x10A5F}, // Kharoshthi {begin: 0x1D300, end: 0x1D35F}, // Tai Xuan Jing Symbols {begin: 0x12000, end: 0x123FF}, // Cuneiform {begin: 0x1D360, end: 0x1D37F}, // Counting Rod Numerals {begin: 0x1B80, end: 0x1BBF}, // Sundanese {begin: 0x1C00, end: 0x1C4F}, // Lepcha {begin: 0x1C50, end: 0x1C7F}, // Ol Chiki {begin: 0xA880, end: 0xA8DF}, // Saurashtra {begin: 0xA900, end: 0xA92F}, // Kayah Li {begin: 0xA930, end: 0xA95F}, // Rejang {begin: 0xAA00, end: 0xAA5F}, // Cham {begin: 0x10190, end: 0x101CF}, // Ancient Symbols {begin: 0x101D0, end: 0x101FF}, // Phaistos Disc {begin: 0x102A0, end: 0x102DF}, // Carian {begin: 0x1F030, end: 0x1F09F} // Domino Tiles ]; function getUnicodeRange(unicode) { for (var i = 0; i < unicodeRanges.length; i += 1) { var range = unicodeRanges[i]; if (unicode >= range.begin && unicode < range.end) { return i; } } return -1; } // Parse the OS/2 and Windows metrics `OS/2` table function parseOS2Table(data, start) { var os2 = {}; var p = new parse.Parser(data, start); os2.version = p.parseUShort(); os2.xAvgCharWidth = p.parseShort(); os2.usWeightClass = p.parseUShort(); os2.usWidthClass = p.parseUShort(); os2.fsType = p.parseUShort(); os2.ySubscriptXSize = p.parseShort(); os2.ySubscriptYSize = p.parseShort(); os2.ySubscriptXOffset = p.parseShort(); os2.ySubscriptYOffset = p.parseShort(); os2.ySuperscriptXSize = p.parseShort(); os2.ySuperscriptYSize = p.parseShort(); os2.ySuperscriptXOffset = p.parseShort(); os2.ySuperscriptYOffset = p.parseShort(); os2.yStrikeoutSize = p.parseShort(); os2.yStrikeoutPosition = p.parseShort(); os2.sFamilyClass = p.parseShort(); os2.panose = []; for (var i = 0; i < 10; i++) { os2.panose[i] = p.parseByte(); } os2.ulUnicodeRange1 = p.parseULong(); os2.ulUnicodeRange2 = p.parseULong(); os2.ulUnicodeRange3 = p.parseULong(); os2.ulUnicodeRange4 = p.parseULong(); os2.achVendID = String.fromCharCode(p.parseByte(), p.parseByte(), p.parseByte(), p.parseByte()); os2.fsSelection = p.parseUShort(); os2.usFirstCharIndex = p.parseUShort(); os2.usLastCharIndex = p.parseUShort(); os2.sTypoAscender = p.parseShort(); os2.sTypoDescender = p.parseShort(); os2.sTypoLineGap = p.parseShort(); os2.usWinAscent = p.parseUShort(); os2.usWinDescent = p.parseUShort(); if (os2.version >= 1) { os2.ulCodePageRange1 = p.parseULong(); os2.ulCodePageRange2 = p.parseULong(); } if (os2.version >= 2) { os2.sxHeight = p.parseShort(); os2.sCapHeight = p.parseShort(); os2.usDefaultChar = p.parseUShort(); os2.usBreakChar = p.parseUShort(); os2.usMaxContent = p.parseUShort(); } return os2; } function makeOS2Table(options) { return new table.Table('OS/2', [ {name: 'version', type: 'USHORT', value: 0x0003}, {name: 'xAvgCharWidth', type: 'SHORT', value: 0}, {name: 'usWeightClass', type: 'USHORT', value: 0}, {name: 'usWidthClass', type: 'USHORT', value: 0}, {name: 'fsType', type: 'USHORT', value: 0}, {name: 'ySubscriptXSize', type: 'SHORT', value: 650}, {name: 'ySubscriptYSize', type: 'SHORT', value: 699}, {name: 'ySubscriptXOffset', type: 'SHORT', value: 0}, {name: 'ySubscriptYOffset', type: 'SHORT', value: 140}, {name: 'ySuperscriptXSize', type: 'SHORT', value: 650}, {name: 'ySuperscriptYSize', type: 'SHORT', value: 699}, {name: 'ySuperscriptXOffset', type: 'SHORT', value: 0}, {name: 'ySuperscriptYOffset', type: 'SHORT', value: 479}, {name: 'yStrikeoutSize', type: 'SHORT', value: 49}, {name: 'yStrikeoutPosition', type: 'SHORT', value: 258}, {name: 'sFamilyClass', type: 'SHORT', value: 0}, {name: 'bFamilyType', type: 'BYTE', value: 0}, {name: 'bSerifStyle', type: 'BYTE', value: 0}, {name: 'bWeight', type: 'BYTE', value: 0}, {name: 'bProportion', type: 'BYTE', value: 0}, {name: 'bContrast', type: 'BYTE', value: 0}, {name: 'bStrokeVariation', type: 'BYTE', value: 0}, {name: 'bArmStyle', type: 'BYTE', value: 0}, {name: 'bLetterform', type: 'BYTE', value: 0}, {name: 'bMidline', type: 'BYTE', value: 0}, {name: 'bXHeight', type: 'BYTE', value: 0}, {name: 'ulUnicodeRange1', type: 'ULONG', value: 0}, {name: 'ulUnicodeRange2', type: 'ULONG', value: 0}, {name: 'ulUnicodeRange3', type: 'ULONG', value: 0}, {name: 'ulUnicodeRange4', type: 'ULONG', value: 0}, {name: 'achVendID', type: 'CHARARRAY', value: 'XXXX'}, {name: 'fsSelection', type: 'USHORT', value: 0}, {name: 'usFirstCharIndex', type: 'USHORT', value: 0}, {name: 'usLastCharIndex', type: 'USHORT', value: 0}, {name: 'sTypoAscender', type: 'SHORT', value: 0}, {name: 'sTypoDescender', type: 'SHORT', value: 0}, {name: 'sTypoLineGap', type: 'SHORT', value: 0}, {name: 'usWinAscent', type: 'USHORT', value: 0}, {name: 'usWinDescent', type: 'USHORT', value: 0}, {name: 'ulCodePageRange1', type: 'ULONG', value: 0}, {name: 'ulCodePageRange2', type: 'ULONG', value: 0}, {name: 'sxHeight', type: 'SHORT', value: 0}, {name: 'sCapHeight', type: 'SHORT', value: 0}, {name: 'usDefaultChar', type: 'USHORT', value: 0}, {name: 'usBreakChar', type: 'USHORT', value: 0}, {name: 'usMaxContext', type: 'USHORT', value: 0} ], options); } exports.unicodeRanges = unicodeRanges; exports.getUnicodeRange = getUnicodeRange; exports.parse = parseOS2Table; exports.make = makeOS2Table; },{"../parse":10,"../table":13}],30:[function(require,module,exports){ // The `post` table stores additional PostScript information, such as glyph names. // https://www.microsoft.com/typography/OTSPEC/post.htm 'use strict'; var encoding = require('../encoding'); var parse = require('../parse'); var table = require('../table'); // Parse the PostScript `post` table function parsePostTable(data, start) { var post = {}; var p = new parse.Parser(data, start); var i; post.version = p.parseVersion(); post.italicAngle = p.parseFixed(); post.underlinePosition = p.parseShort(); post.underlineThickness = p.parseShort(); post.isFixedPitch = p.parseULong(); post.minMemType42 = p.parseULong(); post.maxMemType42 = p.parseULong(); post.minMemType1 = p.parseULong(); post.maxMemType1 = p.parseULong(); switch (post.version) { case 1: post.names = encoding.standardNames.slice(); break; case 2: post.numberOfGlyphs = p.parseUShort(); post.glyphNameIndex = new Array(post.numberOfGlyphs); for (i = 0; i < post.numberOfGlyphs; i++) { post.glyphNameIndex[i] = p.parseUShort(); } post.names = []; for (i = 0; i < post.numberOfGlyphs; i++) { if (post.glyphNameIndex[i] >= encoding.standardNames.length) { var nameLength = p.parseChar(); post.names.push(p.parseString(nameLength)); } } break; case 2.5: post.numberOfGlyphs = p.parseUShort(); post.offset = new Array(post.numberOfGlyphs); for (i = 0; i < post.numberOfGlyphs; i++) { post.offset[i] = p.parseChar(); } break; } return post; } function makePostTable() { return new table.Table('post', [ {name: 'version', type: 'FIXED', value: 0x00030000}, {name: 'italicAngle', type: 'FIXED', value: 0}, {name: 'underlinePosition', type: 'FWORD', value: 0}, {name: 'underlineThickness', type: 'FWORD', value: 0}, {name: 'isFixedPitch', type: 'ULONG', value: 0}, {name: 'minMemType42', type: 'ULONG', value: 0}, {name: 'maxMemType42', type: 'ULONG', value: 0}, {name: 'minMemType1', type: 'ULONG', value: 0}, {name: 'maxMemType1', type: 'ULONG', value: 0} ]); } exports.parse = parsePostTable; exports.make = makePostTable; },{"../encoding":4,"../parse":10,"../table":13}],31:[function(require,module,exports){ // The `sfnt` wrapper provides organization for the tables in the font. // It is the top-level data structure in a font. // https://www.microsoft.com/typography/OTSPEC/otff.htm // Recommendations for creating OpenType Fonts: // http://www.microsoft.com/typography/otspec140/recom.htm 'use strict'; var check = require('../check'); var table = require('../table'); var cmap = require('./cmap'); var cff = require('./cff'); var head = require('./head'); var hhea = require('./hhea'); var hmtx = require('./hmtx'); var ltag = require('./ltag'); var maxp = require('./maxp'); var _name = require('./name'); var os2 = require('./os2'); var post = require('./post'); var gsub = require('./gsub'); var meta = require('./meta'); function log2(v) { return Math.log(v) / Math.log(2) | 0; } function computeCheckSum(bytes) { while (bytes.length % 4 !== 0) { bytes.push(0); } var sum = 0; for (var i = 0; i < bytes.length; i += 4) { sum += (bytes[i] << 24) + (bytes[i + 1] << 16) + (bytes[i + 2] << 8) + (bytes[i + 3]); } sum %= Math.pow(2, 32); return sum; } function makeTableRecord(tag, checkSum, offset, length) { return new table.Record('Table Record', [ {name: 'tag', type: 'TAG', value: tag !== undefined ? tag : ''}, {name: 'checkSum', type: 'ULONG', value: checkSum !== undefined ? checkSum : 0}, {name: 'offset', type: 'ULONG', value: offset !== undefined ? offset : 0}, {name: 'length', type: 'ULONG', value: length !== undefined ? length : 0} ]); } function makeSfntTable(tables) { var sfnt = new table.Table('sfnt', [ {name: 'version', type: 'TAG', value: 'OTTO'}, {name: 'numTables', type: 'USHORT', value: 0}, {name: 'searchRange', type: 'USHORT', value: 0}, {name: 'entrySelector', type: 'USHORT', value: 0}, {name: 'rangeShift', type: 'USHORT', value: 0} ]); sfnt.tables = tables; sfnt.numTables = tables.length; var highestPowerOf2 = Math.pow(2, log2(sfnt.numTables)); sfnt.searchRange = 16 * highestPowerOf2; sfnt.entrySelector = log2(highestPowerOf2); sfnt.rangeShift = sfnt.numTables * 16 - sfnt.searchRange; var recordFields = []; var tableFields = []; var offset = sfnt.sizeOf() + (makeTableRecord().sizeOf() * sfnt.numTables); while (offset % 4 !== 0) { offset += 1; tableFields.push({name: 'padding', type: 'BYTE', value: 0}); } for (var i = 0; i < tables.length; i += 1) { var t = tables[i]; check.argument(t.tableName.length === 4, 'Table name' + t.tableName + ' is invalid.'); var tableLength = t.sizeOf(); var tableRecord = makeTableRecord(t.tableName, computeCheckSum(t.encode()), offset, tableLength); recordFields.push({name: tableRecord.tag + ' Table Record', type: 'RECORD', value: tableRecord}); tableFields.push({name: t.tableName + ' table', type: 'RECORD', value: t}); offset += tableLength; check.argument(!isNaN(offset), 'Something went wrong calculating the offset.'); while (offset % 4 !== 0) { offset += 1; tableFields.push({name: 'padding', type: 'BYTE', value: 0}); } } // Table records need to be sorted alphabetically. recordFields.sort(function(r1, r2) { if (r1.value.tag > r2.value.tag) { return 1; } else { return -1; } }); sfnt.fields = sfnt.fields.concat(recordFields); sfnt.fields = sfnt.fields.concat(tableFields); return sfnt; } // Get the metrics for a character. If the string has more than one character // this function returns metrics for the first available character. // You can provide optional fallback metrics if no characters are available. function metricsForChar(font, chars, notFoundMetrics) { for (var i = 0; i < chars.length; i += 1) { var glyphIndex = font.charToGlyphIndex(chars[i]); if (glyphIndex > 0) { var glyph = font.glyphs.get(glyphIndex); return glyph.getMetrics(); } } return notFoundMetrics; } function average(vs) { var sum = 0; for (var i = 0; i < vs.length; i += 1) { sum += vs[i]; } return sum / vs.length; } // Convert the font object to a SFNT data structure. // This structure contains all the necessary tables and metadata to create a binary OTF file. function fontToSfntTable(font) { var xMins = []; var yMins = []; var xMaxs = []; var yMaxs = []; var advanceWidths = []; var leftSideBearings = []; var rightSideBearings = []; var firstCharIndex; var lastCharIndex = 0; var ulUnicodeRange1 = 0; var ulUnicodeRange2 = 0; var ulUnicodeRange3 = 0; var ulUnicodeRange4 = 0; for (var i = 0; i < font.glyphs.length; i += 1) { var glyph = font.glyphs.get(i); var unicode = glyph.unicode | 0; if (isNaN(glyph.advanceWidth)) { throw new Error('Glyph ' + glyph.name + ' (' + i + '): advanceWidth is not a number.'); } if (firstCharIndex > unicode || firstCharIndex === undefined) { // ignore .notdef char if (unicode > 0) { firstCharIndex = unicode; } } if (lastCharIndex < unicode) { lastCharIndex = unicode; } var position = os2.getUnicodeRange(unicode); if (position < 32) { ulUnicodeRange1 |= 1 << position; } else if (position < 64) { ulUnicodeRange2 |= 1 << position - 32; } else if (position < 96) { ulUnicodeRange3 |= 1 << position - 64; } else if (position < 123) { ulUnicodeRange4 |= 1 << position - 96; } else { throw new Error('Unicode ranges bits > 123 are reserved for internal usage'); } // Skip non-important characters. if (glyph.name === '.notdef') continue; var metrics = glyph.getMetrics(); xMins.push(metrics.xMin); yMins.push(metrics.yMin); xMaxs.push(metrics.xMax); yMaxs.push(metrics.yMax); leftSideBearings.push(metrics.leftSideBearing); rightSideBearings.push(metrics.rightSideBearing); advanceWidths.push(glyph.advanceWidth); } var globals = { xMin: Math.min.apply(null, xMins), yMin: Math.min.apply(null, yMins), xMax: Math.max.apply(null, xMaxs), yMax: Math.max.apply(null, yMaxs), advanceWidthMax: Math.max.apply(null, advanceWidths), advanceWidthAvg: average(advanceWidths), minLeftSideBearing: Math.min.apply(null, leftSideBearings), maxLeftSideBearing: Math.max.apply(null, leftSideBearings), minRightSideBearing: Math.min.apply(null, rightSideBearings) }; globals.ascender = font.ascender; globals.descender = font.descender; var headTable = head.make({ flags: 3, // 00000011 (baseline for font at y=0; left sidebearing point at x=0) unitsPerEm: font.unitsPerEm, xMin: globals.xMin, yMin: globals.yMin, xMax: globals.xMax, yMax: globals.yMax, lowestRecPPEM: 3, createdTimestamp: font.createdTimestamp }); var hheaTable = hhea.make({ ascender: globals.ascender, descender: globals.descender, advanceWidthMax: globals.advanceWidthMax, minLeftSideBearing: globals.minLeftSideBearing, minRightSideBearing: globals.minRightSideBearing, xMaxExtent: globals.maxLeftSideBearing + (globals.xMax - globals.xMin), numberOfHMetrics: font.glyphs.length }); var maxpTable = maxp.make(font.glyphs.length); var os2Table = os2.make({ xAvgCharWidth: Math.round(globals.advanceWidthAvg), usWeightClass: font.tables.os2.usWeightClass, usWidthClass: font.tables.os2.usWidthClass, usFirstCharIndex: firstCharIndex, usLastCharIndex: lastCharIndex, ulUnicodeRange1: ulUnicodeRange1, ulUnicodeRange2: ulUnicodeRange2, ulUnicodeRange3: ulUnicodeRange3, ulUnicodeRange4: ulUnicodeRange4, fsSelection: font.tables.os2.fsSelection, // REGULAR // See http://typophile.com/node/13081 for more info on vertical metrics. // We get metrics for typical characters (such as "x" for xHeight). // We provide some fallback characters if characters are unavailable: their // ordering was chosen experimentally. sTypoAscender: globals.ascender, sTypoDescender: globals.descender, sTypoLineGap: 0, usWinAscent: globals.yMax, usWinDescent: Math.abs(globals.yMin), ulCodePageRange1: 1, // FIXME: hard-code Latin 1 support for now sxHeight: metricsForChar(font, 'xyvw', {yMax: Math.round(globals.ascender / 2)}).yMax, sCapHeight: metricsForChar(font, 'HIKLEFJMNTZBDPRAGOQSUVWXY', globals).yMax, usDefaultChar: font.hasChar(' ') ? 32 : 0, // Use space as the default character, if available. usBreakChar: font.hasChar(' ') ? 32 : 0 // Use space as the break character, if available. }); var hmtxTable = hmtx.make(font.glyphs); var cmapTable = cmap.make(font.glyphs); var englishFamilyName = font.getEnglishName('fontFamily'); var englishStyleName = font.getEnglishName('fontSubfamily'); var englishFullName = englishFamilyName + ' ' + englishStyleName; var postScriptName = font.getEnglishName('postScriptName'); if (!postScriptName) { postScriptName = englishFamilyName.replace(/\s/g, '') + '-' + englishStyleName; } var names = {}; for (var n in font.names) { names[n] = font.names[n]; } if (!names.uniqueID) { names.uniqueID = {en: font.getEnglishName('manufacturer') + ':' + englishFullName}; } if (!names.postScriptName) { names.postScriptName = {en: postScriptName}; } if (!names.preferredFamily) { names.preferredFamily = font.names.fontFamily; } if (!names.preferredSubfamily) { names.preferredSubfamily = font.names.fontSubfamily; } var languageTags = []; var nameTable = _name.make(names, languageTags); var ltagTable = (languageTags.length > 0 ? ltag.make(languageTags) : undefined); var postTable = post.make(); var cffTable = cff.make(font.glyphs, { version: font.getEnglishName('version'), fullName: englishFullName, familyName: englishFamilyName, weightName: englishStyleName, postScriptName: postScriptName, unitsPerEm: font.unitsPerEm, fontBBox: [0, globals.yMin, globals.ascender, globals.advanceWidthMax] }); var metaTable = (font.metas && Object.keys(font.metas).length > 0) ? meta.make(font.metas) : undefined; // The order does not matter because makeSfntTable() will sort them. var tables = [headTable, hheaTable, maxpTable, os2Table, nameTable, cmapTable, postTable, cffTable, hmtxTable]; if (ltagTable) { tables.push(ltagTable); } // Optional tables if (font.tables.gsub) { tables.push(gsub.make(font.tables.gsub)); } if (metaTable) { tables.push(metaTable); } var sfntTable = makeSfntTable(tables); // Compute the font's checkSum and store it in head.checkSumAdjustment. var bytes = sfntTable.encode(); var checkSum = computeCheckSum(bytes); var tableFields = sfntTable.fields; var checkSumAdjusted = false; for (i = 0; i < tableFields.length; i += 1) { if (tableFields[i].name === 'head table') { tableFields[i].value.checkSumAdjustment = 0xB1B0AFBA - checkSum; checkSumAdjusted = true; break; } } if (!checkSumAdjusted) { throw new Error('Could not find head table with checkSum to adjust.'); } return sfntTable; } exports.computeCheckSum = computeCheckSum; exports.make = makeSfntTable; exports.fontToTable = fontToSfntTable; },{"../check":2,"../table":13,"./cff":14,"./cmap":15,"./gsub":19,"./head":20,"./hhea":21,"./hmtx":22,"./ltag":25,"./maxp":26,"./meta":27,"./name":28,"./os2":29,"./post":30}],32:[function(require,module,exports){ // Data types used in the OpenType font file. // All OpenType fonts use Motorola-style byte ordering (Big Endian) /* global WeakMap */ 'use strict'; var check = require('./check'); var LIMIT16 = 32768; // The limit at which a 16-bit number switches signs == 2^15 var LIMIT32 = 2147483648; // The limit at which a 32-bit number switches signs == 2 ^ 31 /** * @exports opentype.decode * @class */ var decode = {}; /** * @exports opentype.encode * @class */ var encode = {}; /** * @exports opentype.sizeOf * @class */ var sizeOf = {}; // Return a function that always returns the same value. function constant(v) { return function() { return v; }; } // OpenType data types ////////////////////////////////////////////////////// /** * Convert an 8-bit unsigned integer to a list of 1 byte. * @param {number} * @returns {Array} */ encode.BYTE = function(v) { check.argument(v >= 0 && v <= 255, 'Byte value should be between 0 and 255.'); return [v]; }; /** * @constant * @type {number} */ sizeOf.BYTE = constant(1); /** * Convert a 8-bit signed integer to a list of 1 byte. * @param {string} * @returns {Array} */ encode.CHAR = function(v) { return [v.charCodeAt(0)]; }; /** * @constant * @type {number} */ sizeOf.CHAR = constant(1); /** * Convert an ASCII string to a list of bytes. * @param {string} * @returns {Array} */ encode.CHARARRAY = function(v) { var b = []; for (var i = 0; i < v.length; i += 1) { b[i] = v.charCodeAt(i); } return b; }; /** * @param {Array} * @returns {number} */ sizeOf.CHARARRAY = function(v) { return v.length; }; /** * Convert a 16-bit unsigned integer to a list of 2 bytes. * @param {number} * @returns {Array} */ encode.USHORT = function(v) { return [(v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.USHORT = constant(2); /** * Convert a 16-bit signed integer to a list of 2 bytes. * @param {number} * @returns {Array} */ encode.SHORT = function(v) { // Two's complement if (v >= LIMIT16) { v = -(2 * LIMIT16 - v); } return [(v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.SHORT = constant(2); /** * Convert a 24-bit unsigned integer to a list of 3 bytes. * @param {number} * @returns {Array} */ encode.UINT24 = function(v) { return [(v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.UINT24 = constant(3); /** * Convert a 32-bit unsigned integer to a list of 4 bytes. * @param {number} * @returns {Array} */ encode.ULONG = function(v) { return [(v >> 24) & 0xFF, (v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.ULONG = constant(4); /** * Convert a 32-bit unsigned integer to a list of 4 bytes. * @param {number} * @returns {Array} */ encode.LONG = function(v) { // Two's complement if (v >= LIMIT32) { v = -(2 * LIMIT32 - v); } return [(v >> 24) & 0xFF, (v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.LONG = constant(4); encode.FIXED = encode.ULONG; sizeOf.FIXED = sizeOf.ULONG; encode.FWORD = encode.SHORT; sizeOf.FWORD = sizeOf.SHORT; encode.UFWORD = encode.USHORT; sizeOf.UFWORD = sizeOf.USHORT; /** * Convert a 32-bit Apple Mac timestamp integer to a list of 8 bytes, 64-bit timestamp. * @param {number} * @returns {Array} */ encode.LONGDATETIME = function(v) { return [0, 0, 0, 0, (v >> 24) & 0xFF, (v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.LONGDATETIME = constant(8); /** * Convert a 4-char tag to a list of 4 bytes. * @param {string} * @returns {Array} */ encode.TAG = function(v) { check.argument(v.length === 4, 'Tag should be exactly 4 ASCII characters.'); return [v.charCodeAt(0), v.charCodeAt(1), v.charCodeAt(2), v.charCodeAt(3)]; }; /** * @constant * @type {number} */ sizeOf.TAG = constant(4); // CFF data types /////////////////////////////////////////////////////////// encode.Card8 = encode.BYTE; sizeOf.Card8 = sizeOf.BYTE; encode.Card16 = encode.USHORT; sizeOf.Card16 = sizeOf.USHORT; encode.OffSize = encode.BYTE; sizeOf.OffSize = sizeOf.BYTE; encode.SID = encode.USHORT; sizeOf.SID = sizeOf.USHORT; // Convert a numeric operand or charstring number to a variable-size list of bytes. /** * Convert a numeric operand or charstring number to a variable-size list of bytes. * @param {number} * @returns {Array} */ encode.NUMBER = function(v) { if (v >= -107 && v <= 107) { return [v + 139]; } else if (v >= 108 && v <= 1131) { v = v - 108; return [(v >> 8) + 247, v & 0xFF]; } else if (v >= -1131 && v <= -108) { v = -v - 108; return [(v >> 8) + 251, v & 0xFF]; } else if (v >= -32768 && v <= 32767) { return encode.NUMBER16(v); } else { return encode.NUMBER32(v); } }; /** * @param {number} * @returns {number} */ sizeOf.NUMBER = function(v) { return encode.NUMBER(v).length; }; /** * Convert a signed number between -32768 and +32767 to a three-byte value. * This ensures we always use three bytes, but is not the most compact format. * @param {number} * @returns {Array} */ encode.NUMBER16 = function(v) { return [28, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.NUMBER16 = constant(3); /** * Convert a signed number between -(2^31) and +(2^31-1) to a five-byte value. * This is useful if you want to be sure you always use four bytes, * at the expense of wasting a few bytes for smaller numbers. * @param {number} * @returns {Array} */ encode.NUMBER32 = function(v) { return [29, (v >> 24) & 0xFF, (v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF]; }; /** * @constant * @type {number} */ sizeOf.NUMBER32 = constant(5); /** * @param {number} * @returns {Array} */ encode.REAL = function(v) { var value = v.toString(); // Some numbers use an epsilon to encode the value. (e.g. JavaScript will store 0.0000001 as 1e-7) // This code converts it back to a number without the epsilon. var m = /\.(\d*?)(?:9{5,20}|0{5,20})\d{0,2}(?:e(.+)|$)/.exec(value); if (m) { var epsilon = parseFloat('1e' + ((m[2] ? +m[2] : 0) + m[1].length)); value = (Math.round(v * epsilon) / epsilon).toString(); } var nibbles = ''; var i; var ii; for (i = 0, ii = value.length; i < ii; i += 1) { var c = value[i]; if (c === 'e') { nibbles += value[++i] === '-' ? 'c' : 'b'; } else if (c === '.') { nibbles += 'a'; } else if (c === '-') { nibbles += 'e'; } else { nibbles += c; } } nibbles += (nibbles.length & 1) ? 'f' : 'ff'; var out = [30]; for (i = 0, ii = nibbles.length; i < ii; i += 2) { out.push(parseInt(nibbles.substr(i, 2), 16)); } return out; }; /** * @param {number} * @returns {number} */ sizeOf.REAL = function(v) { return encode.REAL(v).length; }; encode.NAME = encode.CHARARRAY; sizeOf.NAME = sizeOf.CHARARRAY; encode.STRING = encode.CHARARRAY; sizeOf.STRING = sizeOf.CHARARRAY; /** * @param {DataView} data * @param {number} offset * @param {number} numBytes * @returns {string} */ decode.UTF8 = function(data, offset, numBytes) { var codePoints = []; var numChars = numBytes; for (var j = 0; j < numChars; j++, offset += 1) { codePoints[j] = data.getUint8(offset); } return String.fromCharCode.apply(null, codePoints); }; /** * @param {DataView} data * @param {number} offset * @param {number} numBytes * @returns {string} */ decode.UTF16 = function(data, offset, numBytes) { var codePoints = []; var numChars = numBytes / 2; for (var j = 0; j < numChars; j++, offset += 2) { codePoints[j] = data.getUint16(offset); } return String.fromCharCode.apply(null, codePoints); }; /** * Convert a JavaScript string to UTF16-BE. * @param {string} * @returns {Array} */ encode.UTF16 = function(v) { var b = []; for (var i = 0; i < v.length; i += 1) { var codepoint = v.charCodeAt(i); b[b.length] = (codepoint >> 8) & 0xFF; b[b.length] = codepoint & 0xFF; } return b; }; /** * @param {string} * @returns {number} */ sizeOf.UTF16 = function(v) { return v.length * 2; }; // Data for converting old eight-bit Macintosh encodings to Unicode. // This representation is optimized for decoding; encoding is slower // and needs more memory. The assumption is that all opentype.js users // want to open fonts, but saving a font will be comperatively rare // so it can be more expensive. Keyed by IANA character set name. // // Python script for generating these strings: // // s = u''.join([chr(c).decode('mac_greek') for c in range(128, 256)]) // print(s.encode('utf-8')) /** * @private */ var eightBitMacEncodings = { 'x-mac-croatian': // Python: 'mac_croatian' 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø' + '¿¡¬√ƒ≈Ć«Č… ÀÃÕŒœĐ—“”‘’÷◊©⁄€‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ', 'x-mac-cyrillic': // Python: 'mac_cyrillic' 'АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњ' + 'јЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю', 'x-mac-gaelic': // http://unicode.org/Public/MAPPINGS/VENDORS/APPLE/GAELIC.TXT 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØḂ±≤≥ḃĊċḊḋḞḟĠġṀæø' + 'ṁṖṗɼƒſṠ«»… ÀÃÕŒœ–—“”‘’ṡẛÿŸṪ€‹›Ŷŷṫ·Ỳỳ⁊ÂÊÁËÈÍÎÏÌÓÔ♣ÒÚÛÙıÝýŴŵẄẅẀẁẂẃ', 'x-mac-greek': // Python: 'mac_greek' 'Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦€ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩ' + 'άΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ\u00AD', 'x-mac-icelandic': // Python: 'mac_iceland' 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüÝ°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø' + '¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ', 'x-mac-inuit': // http://unicode.org/Public/MAPPINGS/VENDORS/APPLE/INUIT.TXT 'ᐃᐄᐅᐆᐊᐋᐱᐲᐳᐴᐸᐹᑉᑎᑏᑐᑑᑕᑖᑦᑭᑮᑯᑰᑲᑳᒃᒋᒌᒍᒎᒐᒑ°ᒡᒥᒦ•¶ᒧ®©™ᒨᒪᒫᒻᓂᓃᓄᓅᓇᓈᓐᓯᓰᓱᓲᓴᓵᔅᓕᓖᓗ' + 'ᓘᓚᓛᓪᔨᔩᔪᔫᔭ… ᔮᔾᕕᕖᕗ–—“”‘’ᕘᕙᕚᕝᕆᕇᕈᕉᕋᕌᕐᕿᖀᖁᖂᖃᖄᖅᖏᖐᖑᖒᖓᖔᖕᙱᙲᙳᙴᙵᙶᖖᖠᖡᖢᖣᖤᖥᖦᕼŁł', 'x-mac-ce': // Python: 'mac_latin2' 'ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅ' + 'ņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ', macintosh: // Python: 'mac_roman' 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø' + '¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ', 'x-mac-romanian': // Python: 'mac_romanian' 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂȘ∞±≤≥¥µ∂∑∏π∫ªºΩăș' + '¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€‹›Țț‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ', 'x-mac-turkish': // Python: 'mac_turkish' 'ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø' + '¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙˆ˜¯˘˙˚¸˝˛ˇ' }; /** * Decodes an old-style Macintosh string. Returns either a Unicode JavaScript * string, or 'undefined' if the encoding is unsupported. For example, we do * not support Chinese, Japanese or Korean because these would need large * mapping tables. * @param {DataView} dataView * @param {number} offset * @param {number} dataLength * @param {string} encoding * @returns {string} */ decode.MACSTRING = function(dataView, offset, dataLength, encoding) { var table = eightBitMacEncodings[encoding]; if (table === undefined) { return undefined; } var result = ''; for (var i = 0; i < dataLength; i++) { var c = dataView.getUint8(offset + i); // In all eight-bit Mac encodings, the characters 0x00..0x7F are // mapped to U+0000..U+007F; we only need to look up the others. if (c <= 0x7F) { result += String.fromCharCode(c); } else { result += table[c & 0x7F]; } } return result; }; // Helper function for encode.MACSTRING. Returns a dictionary for mapping // Unicode character codes to their 8-bit MacOS equivalent. This table // is not exactly a super cheap data structure, but we do not care because // encoding Macintosh strings is only rarely needed in typical applications. var macEncodingTableCache = typeof WeakMap === 'function' && new WeakMap(); var macEncodingCacheKeys; var getMacEncodingTable = function(encoding) { // Since we use encoding as a cache key for WeakMap, it has to be // a String object and not a literal. And at least on NodeJS 2.10.1, // WeakMap requires that the same String instance is passed for cache hits. if (!macEncodingCacheKeys) { macEncodingCacheKeys = {}; for (var e in eightBitMacEncodings) { /*jshint -W053 */ // Suppress "Do not use String as a constructor." macEncodingCacheKeys[e] = new String(e); } } var cacheKey = macEncodingCacheKeys[encoding]; if (cacheKey === undefined) { return undefined; } // We can't do "if (cache.has(key)) {return cache.get(key)}" here: // since garbage collection may run at any time, it could also kick in // between the calls to cache.has() and cache.get(). In that case, // we would return 'undefined' even though we do support the encoding. if (macEncodingTableCache) { var cachedTable = macEncodingTableCache.get(cacheKey); if (cachedTable !== undefined) { return cachedTable; } } var decodingTable = eightBitMacEncodings[encoding]; if (decodingTable === undefined) { return undefined; } var encodingTable = {}; for (var i = 0; i < decodingTable.length; i++) { encodingTable[decodingTable.charCodeAt(i)] = i + 0x80; } if (macEncodingTableCache) { macEncodingTableCache.set(cacheKey, encodingTable); } return encodingTable; }; /** * Encodes an old-style Macintosh string. Returns a byte array upon success. * If the requested encoding is unsupported, or if the input string contains * a character that cannot be expressed in the encoding, the function returns * 'undefined'. * @param {string} str * @param {string} encoding * @returns {Array} */ encode.MACSTRING = function(str, encoding) { var table = getMacEncodingTable(encoding); if (table === undefined) { return undefined; } var result = []; for (var i = 0; i < str.length; i++) { var c = str.charCodeAt(i); // In all eight-bit Mac encodings, the characters 0x00..0x7F are // mapped to U+0000..U+007F; we only need to look up the others. if (c >= 0x80) { c = table[c]; if (c === undefined) { // str contains a Unicode character that cannot be encoded // in the requested encoding. return undefined; } } result[i] = c; // result.push(c); } return result; }; /** * @param {string} str * @param {string} encoding * @returns {number} */ sizeOf.MACSTRING = function(str, encoding) { var b = encode.MACSTRING(str, encoding); if (b !== undefined) { return b.length; } else { return 0; } }; // Convert a list of values to a CFF INDEX structure. // The values should be objects containing name / type / value. /** * @param {Array} l * @returns {Array} */ encode.INDEX = function(l) { var i; //var offset, offsets, offsetEncoder, encodedOffsets, encodedOffset, data, // i, v; // Because we have to know which data type to use to encode the offsets, // we have to go through the values twice: once to encode the data and // calculate the offets, then again to encode the offsets using the fitting data type. var offset = 1; // First offset is always 1. var offsets = [offset]; var data = []; for (i = 0; i < l.length; i += 1) { var v = encode.OBJECT(l[i]); Array.prototype.push.apply(data, v); offset += v.length; offsets.push(offset); } if (data.length === 0) { return [0, 0]; } var encodedOffsets = []; var offSize = (1 + Math.floor(Math.log(offset) / Math.log(2)) / 8) | 0; var offsetEncoder = [undefined, encode.BYTE, encode.USHORT, encode.UINT24, encode.ULONG][offSize]; for (i = 0; i < offsets.length; i += 1) { var encodedOffset = offsetEncoder(offsets[i]); Array.prototype.push.apply(encodedOffsets, encodedOffset); } return Array.prototype.concat(encode.Card16(l.length), encode.OffSize(offSize), encodedOffsets, data); }; /** * @param {Array} * @returns {number} */ sizeOf.INDEX = function(v) { return encode.INDEX(v).length; }; /** * Convert an object to a CFF DICT structure. * The keys should be numeric. * The values should be objects containing name / type / value. * @param {Object} m * @returns {Array} */ encode.DICT = function(m) { var d = []; var keys = Object.keys(m); var length = keys.length; for (var i = 0; i < length; i += 1) { // Object.keys() return string keys, but our keys are always numeric. var k = parseInt(keys[i], 0); var v = m[k]; // Value comes before the key. d = d.concat(encode.OPERAND(v.value, v.type)); d = d.concat(encode.OPERATOR(k)); } return d; }; /** * @param {Object} * @returns {number} */ sizeOf.DICT = function(m) { return encode.DICT(m).length; }; /** * @param {number} * @returns {Array} */ encode.OPERATOR = function(v) { if (v < 1200) { return [v]; } else { return [12, v - 1200]; } }; /** * @param {Array} v * @param {string} * @returns {Array} */ encode.OPERAND = function(v, type) { var d = []; if (Array.isArray(type)) { for (var i = 0; i < type.length; i += 1) { check.argument(v.length === type.length, 'Not enough arguments given for type' + type); d = d.concat(encode.OPERAND(v[i], type[i])); } } else { if (type === 'SID') { d = d.concat(encode.NUMBER(v)); } else if (type === 'offset') { // We make it easy for ourselves and always encode offsets as // 4 bytes. This makes offset calculation for the top dict easier. d = d.concat(encode.NUMBER32(v)); } else if (type === 'number') { d = d.concat(encode.NUMBER(v)); } else if (type === 'real') { d = d.concat(encode.REAL(v)); } else { throw new Error('Unknown operand type ' + type); // FIXME Add support for booleans } } return d; }; encode.OP = encode.BYTE; sizeOf.OP = sizeOf.BYTE; // memoize charstring encoding using WeakMap if available var wmm = typeof WeakMap === 'function' && new WeakMap(); /** * Convert a list of CharString operations to bytes. * @param {Array} * @returns {Array} */ encode.CHARSTRING = function(ops) { // See encode.MACSTRING for why we don't do "if (wmm && wmm.has(ops))". if (wmm) { var cachedValue = wmm.get(ops); if (cachedValue !== undefined) { return cachedValue; } } var d = []; var length = ops.length; for (var i = 0; i < length; i += 1) { var op = ops[i]; d = d.concat(encode[op.type](op.value)); } if (wmm) { wmm.set(ops, d); } return d; }; /** * @param {Array} * @returns {number} */ sizeOf.CHARSTRING = function(ops) { return encode.CHARSTRING(ops).length; }; // Utility functions //////////////////////////////////////////////////////// /** * Convert an object containing name / type / value to bytes. * @param {Object} * @returns {Array} */ encode.OBJECT = function(v) { var encodingFunction = encode[v.type]; check.argument(encodingFunction !== undefined, 'No encoding function for type ' + v.type); return encodingFunction(v.value); }; /** * @param {Object} * @returns {number} */ sizeOf.OBJECT = function(v) { var sizeOfFunction = sizeOf[v.type]; check.argument(sizeOfFunction !== undefined, 'No sizeOf function for type ' + v.type); return sizeOfFunction(v.value); }; /** * Convert a table object to bytes. * A table contains a list of fields containing the metadata (name, type and default value). * The table itself has the field values set as attributes. * @param {opentype.Table} * @returns {Array} */ encode.TABLE = function(table) { var d = []; var length = table.fields.length; var subtables = []; var subtableOffsets = []; var i; for (i = 0; i < length; i += 1) { var field = table.fields[i]; var encodingFunction = encode[field.type]; check.argument(encodingFunction !== undefined, 'No encoding function for field type ' + field.type + ' (' + field.name + ')'); var value = table[field.name]; if (value === undefined) { value = field.value; } var bytes = encodingFunction(value); if (field.type === 'TABLE') { subtableOffsets.push(d.length); d = d.concat([0, 0]); subtables.push(bytes); } else { d = d.concat(bytes); } } for (i = 0; i < subtables.length; i += 1) { var o = subtableOffsets[i]; var offset = d.length; check.argument(offset < 65536, 'Table ' + table.tableName + ' too big.'); d[o] = offset >> 8; d[o + 1] = offset & 0xff; d = d.concat(subtables[i]); } return d; }; /** * @param {opentype.Table} * @returns {number} */ sizeOf.TABLE = function(table) { var numBytes = 0; var length = table.fields.length; for (var i = 0; i < length; i += 1) { var field = table.fields[i]; var sizeOfFunction = sizeOf[field.type]; check.argument(sizeOfFunction !== undefined, 'No sizeOf function for field type ' + field.type + ' (' + field.name + ')'); var value = table[field.name]; if (value === undefined) { value = field.value; } numBytes += sizeOfFunction(value); // Subtables take 2 more bytes for offsets. if (field.type === 'TABLE') { numBytes += 2; } } return numBytes; }; encode.RECORD = encode.TABLE; sizeOf.RECORD = sizeOf.TABLE; // Merge in a list of bytes. encode.LITERAL = function(v) { return v; }; sizeOf.LITERAL = function(v) { return v.length; }; exports.decode = decode; exports.encode = encode; exports.sizeOf = sizeOf; },{"./check":2}],33:[function(require,module,exports){ 'use strict'; exports.isBrowser = function() { return typeof window !== 'undefined'; }; exports.isNode = function() { return typeof window === 'undefined'; }; exports.nodeBufferToArrayBuffer = function(buffer) { var ab = new ArrayBuffer(buffer.length); var view = new Uint8Array(ab); for (var i = 0; i < buffer.length; ++i) { view[i] = buffer[i]; } return ab; }; exports.arrayBufferToNodeBuffer = function(ab) { var buffer = new Buffer(ab.byteLength); var view = new Uint8Array(ab); for (var i = 0; i < buffer.length; ++i) { buffer[i] = view[i]; } return buffer; }; exports.checkArgument = function(expression, message) { if (!expression) { throw message; } }; },{}]},{},[9])(9) });
joeyparrish/cdnjs
ajax/libs/opentype.js/0.6.7/opentype.js
JavaScript
mit
295,992
/* * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.nashorn.test.models; /** * Simple function interface with a varargs SAM method. */ @FunctionalInterface public interface VarArgConsumer { public void apply(Object... o); }
FauxFaux/jdk9-nashorn
test/src/jdk/nashorn/test/models/VarArgConsumer.java
Java
gpl-2.0
1,400
import { onBlur } from "../display/focus" import { setGuttersForLineNumbers, updateGutters } from "../display/gutters" import { alignHorizontally } from "../display/line_numbers" import { loadMode, resetModeState } from "../display/mode_state" import { initScrollbars, updateScrollbars } from "../display/scrollbars" import { updateSelection } from "../display/selection" import { regChange } from "../display/view_tracking" import { getKeyMap } from "../input/keymap" import { defaultSpecialCharPlaceholder } from "../line/line_data" import { Pos } from "../line/pos" import { findMaxLine } from "../line/spans" import { clearCaches, compensateForHScroll, estimateLineHeights } from "../measurement/position_measurement" import { replaceRange } from "../model/changes" import { mobile, windows } from "../util/browser" import { addClass, rmClass } from "../util/dom" import { off, on } from "../util/event" import { themeChanged } from "./utils" export let Init = {toString: function(){return "CodeMirror.Init"}} export let defaults = {} export let optionHandlers = {} export function defineOptions(CodeMirror) { let optionHandlers = CodeMirror.optionHandlers function option(name, deflt, handle, notOnInit) { CodeMirror.defaults[name] = deflt if (handle) optionHandlers[name] = notOnInit ? (cm, val, old) => {if (old != Init) handle(cm, val, old)} : handle } CodeMirror.defineOption = option // Passed to option handlers when there is no old value. CodeMirror.Init = Init // These two are, on init, called from the constructor because they // have to be initialized before the editor can start at all. option("value", "", (cm, val) => cm.setValue(val), true) option("mode", null, (cm, val) => { cm.doc.modeOption = val loadMode(cm) }, true) option("indentUnit", 2, loadMode, true) option("indentWithTabs", false) option("smartIndent", true) option("tabSize", 4, cm => { resetModeState(cm) clearCaches(cm) regChange(cm) }, true) option("lineSeparator", null, (cm, val) => { cm.doc.lineSep = val if (!val) return let newBreaks = [], lineNo = cm.doc.first cm.doc.iter(line => { for (let pos = 0;;) { let found = line.text.indexOf(val, pos) if (found == -1) break pos = found + val.length newBreaks.push(Pos(lineNo, found)) } lineNo++ }) for (let i = newBreaks.length - 1; i >= 0; i--) replaceRange(cm.doc, val, newBreaks[i], Pos(newBreaks[i].line, newBreaks[i].ch + val.length)) }) option("specialChars", /[\u0000-\u001f\u007f-\u009f\u00ad\u061c\u200b-\u200f\u2028\u2029\ufeff]/g, (cm, val, old) => { cm.state.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g") if (old != Init) cm.refresh() }) option("specialCharPlaceholder", defaultSpecialCharPlaceholder, cm => cm.refresh(), true) option("electricChars", true) option("inputStyle", mobile ? "contenteditable" : "textarea", () => { throw new Error("inputStyle can not (yet) be changed in a running editor") // FIXME }, true) option("spellcheck", false, (cm, val) => cm.getInputField().spellcheck = val, true) option("rtlMoveVisually", !windows) option("wholeLineUpdateBefore", true) option("theme", "default", cm => { themeChanged(cm) guttersChanged(cm) }, true) option("keyMap", "default", (cm, val, old) => { let next = getKeyMap(val) let prev = old != Init && getKeyMap(old) if (prev && prev.detach) prev.detach(cm, next) if (next.attach) next.attach(cm, prev || null) }) option("extraKeys", null) option("configureMouse", null) option("lineWrapping", false, wrappingChanged, true) option("gutters", [], cm => { setGuttersForLineNumbers(cm.options) guttersChanged(cm) }, true) option("fixedGutter", true, (cm, val) => { cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0" cm.refresh() }, true) option("coverGutterNextToScrollbar", false, cm => updateScrollbars(cm), true) option("scrollbarStyle", "native", cm => { initScrollbars(cm) updateScrollbars(cm) cm.display.scrollbars.setScrollTop(cm.doc.scrollTop) cm.display.scrollbars.setScrollLeft(cm.doc.scrollLeft) }, true) option("lineNumbers", false, cm => { setGuttersForLineNumbers(cm.options) guttersChanged(cm) }, true) option("firstLineNumber", 1, guttersChanged, true) option("lineNumberFormatter", integer => integer, guttersChanged, true) option("showCursorWhenSelecting", false, updateSelection, true) option("resetSelectionOnContextMenu", true) option("lineWiseCopyCut", true) option("pasteLinesPerSelection", true) option("readOnly", false, (cm, val) => { if (val == "nocursor") { onBlur(cm) cm.display.input.blur() } cm.display.input.readOnlyChanged(val) }) option("disableInput", false, (cm, val) => {if (!val) cm.display.input.reset()}, true) option("dragDrop", true, dragDropChanged) option("allowDropFileTypes", null) option("cursorBlinkRate", 530) option("cursorScrollMargin", 0) option("cursorHeight", 1, updateSelection, true) option("singleCursorHeightPerLine", true, updateSelection, true) option("workTime", 100) option("workDelay", 100) option("flattenSpans", true, resetModeState, true) option("addModeClass", false, resetModeState, true) option("pollInterval", 100) option("undoDepth", 200, (cm, val) => cm.doc.history.undoDepth = val) option("historyEventDelay", 1250) option("viewportMargin", 10, cm => cm.refresh(), true) option("maxHighlightLength", 10000, resetModeState, true) option("moveInputWithCursor", true, (cm, val) => { if (!val) cm.display.input.resetPosition() }) option("tabindex", null, (cm, val) => cm.display.input.getField().tabIndex = val || "") option("autofocus", null) option("direction", "ltr", (cm, val) => cm.doc.setDirection(val), true) } function guttersChanged(cm) { updateGutters(cm) regChange(cm) alignHorizontally(cm) } function dragDropChanged(cm, value, old) { let wasOn = old && old != Init if (!value != !wasOn) { let funcs = cm.display.dragFunctions let toggle = value ? on : off toggle(cm.display.scroller, "dragstart", funcs.start) toggle(cm.display.scroller, "dragenter", funcs.enter) toggle(cm.display.scroller, "dragover", funcs.over) toggle(cm.display.scroller, "dragleave", funcs.leave) toggle(cm.display.scroller, "drop", funcs.drop) } } function wrappingChanged(cm) { if (cm.options.lineWrapping) { addClass(cm.display.wrapper, "CodeMirror-wrap") cm.display.sizer.style.minWidth = "" cm.display.sizerWidth = null } else { rmClass(cm.display.wrapper, "CodeMirror-wrap") findMaxLine(cm) } estimateLineHeights(cm) regChange(cm) clearCaches(cm) setTimeout(() => updateScrollbars(cm), 100) }
AfrikaBurn/Main
web/libraries/codemirror/src/edit/options.js
JavaScript
gpl-2.0
6,874
/** * @license Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved. * For licensing, see LICENSE.md or http://ckeditor.com/license */ /** * @fileOverview Defines the {@link CKEDITOR.lang} object, for the * Serbian (Cyrillic) language. */ /**#@+ @type String @example */ /** * Contains the dictionary of language entries. * @namespace */ CKEDITOR.lang[ 'sr' ] = { // ARIA description. editor: 'Rich Text Editor', // MISSING editorPanel: 'Rich Text Editor panel', // MISSING // Common messages and labels. common: { // Screenreader titles. Please note that screenreaders are not always capable // of reading non-English words. So be careful while translating it. editorHelp: 'Press ALT 0 for help', // MISSING browseServer: 'Претражи сервер', url: 'УРЛ', protocol: 'Протокол', upload: 'Пошаљи', uploadSubmit: 'Пошаљи на сервер', image: 'Слика', flash: 'Флеш елемент', form: 'Форма', checkbox: 'Поље за потврду', radio: 'Радио-дугме', textField: 'Текстуално поље', textarea: 'Зона текста', hiddenField: 'Скривено поље', button: 'Дугме', select: 'Изборно поље', imageButton: 'Дугме са сликом', notSet: '<није постављено>', id: 'Ид', name: 'Назив', langDir: 'Смер језика', langDirLtr: 'С лева на десно (LTR)', langDirRtl: 'С десна на лево (RTL)', langCode: 'Kôд језика', longDescr: 'Пун опис УРЛ', cssClass: 'Stylesheet класе', advisoryTitle: 'Advisory наслов', cssStyle: 'Стил', ok: 'OK', cancel: 'Oткажи', close: 'Затвори', preview: 'Изглед странице', resize: 'Resize', // MISSING generalTab: 'Опште', advancedTab: 'Напредни тагови', validateNumberFailed: 'Ова вредност није цигра.', confirmNewPage: 'Any unsaved changes to this content will be lost. Are you sure you want to load new page?', // MISSING confirmCancel: 'You have changed some options. Are you sure you want to close the dialog window?', // MISSING options: 'Опције', target: 'Meтa', targetNew: 'New Window (_blank)', // MISSING targetTop: 'Topmost Window (_top)', // MISSING targetSelf: 'Same Window (_self)', // MISSING targetParent: 'Parent Window (_parent)', // MISSING langDirLTR: 'С лева на десно (LTR)', langDirRTL: 'С десна на лево (RTL)', styles: 'Стил', cssClasses: 'Stylesheet класе', width: 'Ширина', height: 'Висина', align: 'Равнање', alignLeft: 'Лево', alignRight: 'Десно', alignCenter: 'Средина', alignJustify: 'Обострано равнање', alignTop: 'Врх', alignMiddle: 'Средина', alignBottom: 'Доле', alignNone: 'None', // MISSING invalidValue : 'Invalid value.', // MISSING invalidHeight: 'Height must be a number.', // MISSING invalidWidth: 'Width must be a number.', // MISSING invalidCssLength: 'Value specified for the "%1" field must be a positive number with or without a valid CSS measurement unit (px, %, in, cm, mm, em, ex, pt, or pc).', // MISSING invalidHtmlLength: 'Value specified for the "%1" field must be a positive number with or without a valid HTML measurement unit (px or %).', // MISSING invalidInlineStyle: 'Value specified for the inline style must consist of one or more tuples with the format of "name : value", separated by semi-colons.', // MISSING cssLengthTooltip: 'Enter a number for a value in pixels or a number with a valid CSS unit (px, %, in, cm, mm, em, ex, pt, or pc).', // MISSING // Put the voice-only part of the label in the span. unavailable: '%1<span class="cke_accessibility">, unavailable</span>' // MISSING } };
noskill/Firesoft
src/main/webapp/ckeditor/lang/sr.js
JavaScript
gpl-3.0
3,935
/* * DarkTooltip v0.4.0 * Simple customizable tooltip with confirm option and 3d effects * (c)2014 Rubén Torres - rubentdlh@gmail.com * Released under the MIT license */ (function($) { function DarkTooltip(element, options){ this.bearer = element; this.options = options; this.hideEvent; this.mouseOverMode=(this.options.trigger == "hover" || this.options.trigger == "mouseover" || this.options.trigger == "onmouseover"); } DarkTooltip.prototype = { show: function(){ var dt = this; if(this.options.modal){ this.modalLayer.css('display', 'block'); } //Close all other tooltips this.tooltip.css('display', 'block'); //Set event to prevent tooltip from closig when mouse is over the tooltip if(dt.mouseOverMode){ this.tooltip.mouseover( function(){ clearTimeout(dt.hideEvent); }); this.tooltip.mouseout( function(){ clearTimeout(dt.hideEvent); dt.hide(); }); } }, hide: function(){ var dt=this; this.hideEvent = setTimeout( function(){ dt.tooltip.hide(); }, 100); if(dt.options.modal){ dt.modalLayer.hide(); } this.options.onClose(); }, toggle: function(){ if(this.tooltip.is(":visible")){ this.hide(); }else{ this.show(); } }, addAnimation: function(){ switch(this.options.animation){ case 'none': break; case 'fadeIn': this.tooltip.addClass('animated'); this.tooltip.addClass('fadeIn'); break; case 'flipIn': this.tooltip.addClass('animated'); this.tooltip.addClass('flipIn'); break; } }, setContent: function(){ $(this.bearer).css('cursor', 'pointer'); //Get tooltip content if(this.options.content){ this.content = this.options.content; }else if(this.bearer.attr("data-tooltip")){ this.content = this.bearer.attr("data-tooltip"); }else{ // console.log("No content for tooltip: " + this.bearer.selector); return; } if(this.content.charAt(0) == '#'){ if (this.options.delete_content){ var content = $(this.content).html(); $(this.content).html(''); this.content = content; delete content; } else{ $(this.content).hide(); this.content = $(this.content).html(); } this.contentType='html'; }else{ this.contentType='text'; } tooltipId = ""; if(this.bearer.attr("id") != ""){ tooltipId = "id='darktooltip-" + this.bearer.attr("id") + "'"; } //Create modal layer this.modalLayer = $("<ins class='darktooltip-modal-layer'></ins>"); //Create tooltip container this.tooltip = $("<ins " + tooltipId + " class = 'dark-tooltip " + this.options.theme + " " + this.options.size + " " + this.options.gravity + "'><div>" + this.content + "</div><div class = 'tip'></div></ins>"); this.tip = this.tooltip.find(".tip"); $("body").append(this.modalLayer); $("body").append(this.tooltip); //Adjust size for html tooltip if(this.contentType == 'html'){ this.tooltip.css('max-width','none'); } this.tooltip.css('opacity', this.options.opacity); this.addAnimation(); if(this.options.confirm){ this.addConfirm(); } }, setPositions: function(){ var leftPos = this.bearer.offset().left; var topPos = this.bearer.offset().top; switch(this.options.gravity){ case 'south': leftPos += this.bearer.outerWidth()/2 - this.tooltip.outerWidth()/2; topPos += -this.tooltip.outerHeight() - this.tip.outerHeight()/2; break; case 'west': leftPos += this.bearer.outerWidth() + this.tip.outerWidth()/2; topPos += this.bearer.outerHeight()/2 - (this.tooltip.outerHeight()/2); break; case 'north': leftPos += this.bearer.outerWidth()/2 - (this.tooltip.outerWidth()/2); topPos += this.bearer.outerHeight() + this.tip.outerHeight()/2; break; case 'east': leftPos += -this.tooltip.outerWidth() - this.tip.outerWidth()/2; topPos += this.bearer.outerHeight()/2 - this.tooltip.outerHeight()/2; break; } if(this.options.autoLeft){ this.tooltip.css('left', leftPos); } if(this.options.autoTop){ this.tooltip.css('top', topPos); } }, setEvents: function(){ var dt = this; var delay = dt.options.hoverDelay; var setTimeoutConst; if(dt.mouseOverMode){ this.bearer.mouseenter( function(){ //Timeout for hover mouse delay setTimeoutConst = setTimeout( function(){ dt.setPositions(); dt.show(); }, delay); }).mouseleave( function(){ clearTimeout(setTimeoutConst ); dt.hide(); }); }else if(this.options.trigger == "click" || this.options.trigger == "onclik"){ this.tooltip.click( function(e){ e.stopPropagation(); }); this.bearer.click( function(e){ e.preventDefault(); dt.setPositions(); dt.toggle(); e.stopPropagation(); }); $('html').click(function(){ dt.hide(); }) } }, activate: function(){ this.setContent(); if(this.content){ this.setEvents(); } }, addConfirm: function(){ this.tooltip.append("<ul class = 'confirm'><li class = 'darktooltip-yes'>" + this.options.yes +"</li><li class = 'darktooltip-no'>"+ this.options.no +"</li></ul>"); this.setConfirmEvents(); }, setConfirmEvents: function(){ var dt = this; this.tooltip.find('li.darktooltip-yes').click( function(e){ dt.onYes(); e.stopPropagation(); }); this.tooltip.find('li.darktooltip-no').click( function(e){ dt.onNo(); e.stopPropagation(); }); }, finalMessage: function(){ if(this.options.finalMessage){ var dt = this; dt.tooltip.find('div:first').html(this.options.finalMessage); dt.tooltip.find('ul').remove(); dt.setPositions(); setTimeout( function(){ dt.hide(); dt.setContent(); }, dt.options.finalMessageDuration); }else{ this.hide(); } }, onYes: function(){ this.options.onYes(this.bearer); this.finalMessage(); }, onNo: function(){ this.options.onNo(this.bearer); this.hide(); } } $.fn.darkTooltip = function(options) { return this.each(function(){ options = $.extend({}, $.fn.darkTooltip.defaults, options); var tooltip = new DarkTooltip($(this), options); tooltip.activate(); }); } $.fn.darkTooltip.defaults = { animation: 'none', confirm: false, content:'', finalMessage: '', finalMessageDuration: 1000, gravity: 'south', hoverDelay: 0, modal: false, no: 'No', onNo: function(){}, onYes: function(){}, opacity: 0.9, size: 'medium', theme: 'dark', trigger: 'hover', yes: 'Yes', autoTop: true, autoLeft: true, onClose: function(){} }; })(jQuery);
gtison/kf
src/main/resources/static/js/jquery.darktooltip.js
JavaScript
apache-2.0
6,675
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver.compactions; import java.util.List; import org.apache.hadoop.hbase.regionserver.StoreFile; public abstract class StoreFileListGenerator extends MockStoreFileGenerator implements Iterable<List<StoreFile>> { public static final int MAX_FILE_GEN_ITERS = 10; public static final int NUM_FILES_GEN = 1000; StoreFileListGenerator(final Class klass) { super(klass); } }
Guavus/hbase
hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
Java
apache-2.0
1,237
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.schema.validation; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.nifi.serialization.record.validation.SchemaValidationResult; import org.apache.nifi.serialization.record.validation.ValidationError; public class StandardSchemaValidationResult implements SchemaValidationResult { private final List<ValidationError> validationErrors = new ArrayList<>(); @Override public boolean isValid() { return validationErrors.isEmpty(); } @Override public Collection<ValidationError> getValidationErrors() { return Collections.unmodifiableList(validationErrors); } public void addValidationError(final ValidationError validationError) { this.validationErrors.add(validationError); } }
mcgilman/nifi
nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/schema/validation/StandardSchemaValidationResult.java
Java
apache-2.0
1,648
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.xcontent.XContentParser; import java.util.List; public class ForceMergeResponseTests extends AbstractBroadcastResponseTestCase<ForceMergeResponse> { @Override protected ForceMergeResponse createTestInstance(int totalShards, int successfulShards, int failedShards, List<DefaultShardOperationFailedException> failures) { return new ForceMergeResponse(totalShards, successfulShards, failedShards, failures); } @Override protected ForceMergeResponse doParseInstance(XContentParser parser) { return ForceMergeResponse.fromXContent(parser); } }
gfyoung/elasticsearch
server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java
Java
apache-2.0
1,679
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.configuration; import org.apache.ignite.internal.util.typedef.internal.S; /** * ODBC configuration. * <p> * Deprecated as of Apache Ignite 2.1. Please use {@link ClientConnectorConfiguration} and * {@link IgniteConfiguration#setClientConnectorConfiguration(ClientConnectorConfiguration)} instead. */ @Deprecated public class OdbcConfiguration { /** Default TCP host. */ public static final String DFLT_TCP_HOST = "0.0.0.0"; /** Default minimum TCP port range value. */ public static final int DFLT_TCP_PORT_FROM = 10800; /** Default maximum TCP port range value. */ public static final int DFLT_TCP_PORT_TO = 10810; /** Default socket send and receive buffer size. */ public static final int DFLT_SOCK_BUF_SIZE = 0; /** Default max number of open cursors per connection. */ public static final int DFLT_MAX_OPEN_CURSORS = 128; /** Default size of thread pool. */ public static final int DFLT_THREAD_POOL_SIZE = IgniteConfiguration.DFLT_PUBLIC_THREAD_CNT; /** Endpoint address. */ private String endpointAddr; /** Socket send buffer size. */ private int sockSndBufSize = DFLT_SOCK_BUF_SIZE; /** Socket receive buffer size. */ private int sockRcvBufSize = DFLT_SOCK_BUF_SIZE; /** Max number of opened cursors per connection. */ private int maxOpenCursors = DFLT_MAX_OPEN_CURSORS; /** Thread pool size. */ private int threadPoolSize = DFLT_THREAD_POOL_SIZE; /** * Creates ODBC server configuration with all default values. */ public OdbcConfiguration() { // No-op. } /** * Creates ODBC server configuration by copying all properties from given configuration. * * @param cfg ODBC server configuration. */ public OdbcConfiguration(OdbcConfiguration cfg) { assert cfg != null; endpointAddr = cfg.getEndpointAddress(); maxOpenCursors = cfg.getMaxOpenCursors(); sockRcvBufSize = cfg.getSocketReceiveBufferSize(); sockSndBufSize = cfg.getSocketSendBufferSize(); threadPoolSize = cfg.getThreadPoolSize(); } /** * Get ODBC endpoint address. Ignite will listen for incoming TCP connections on this address. Either single port * or port range could be used. In the latter case Ignite will start listening on the first available port * form the range. * <p> * The following address formats are permitted: * <ul> * <li>{@code hostname} - will use provided hostname and default port range;</li> * <li>{@code hostname:port} - will use provided hostname and port;</li> * <li>{@code hostname:port_from..port_to} - will use provided hostname and port range.</li> * </ul> * <p> * When set to {@code null}, ODBC processor will be bound to {@link #DFLT_TCP_HOST} host and default port range. * <p> * Default port range is from {@link #DFLT_TCP_PORT_FROM} to {@link #DFLT_TCP_PORT_TO}. * * @return ODBC endpoint address. */ public String getEndpointAddress() { return endpointAddr; } /** * Set ODBC endpoint address. See {@link #getEndpointAddress()} for more information. * * @param addr ODBC endpoint address. * @return This instance for chaining. */ public OdbcConfiguration setEndpointAddress(String addr) { this.endpointAddr = addr; return this; } /** * Gets maximum number of opened cursors per connection. * <p> * Defaults to {@link #DFLT_MAX_OPEN_CURSORS}. * * @return Maximum number of opened cursors. */ public int getMaxOpenCursors() { return maxOpenCursors; } /** * Sets maximum number of opened cursors per connection. See {@link #getMaxOpenCursors()}. * * @param maxOpenCursors Maximum number of opened cursors. * @return This instance for chaining. */ public OdbcConfiguration setMaxOpenCursors(int maxOpenCursors) { this.maxOpenCursors = maxOpenCursors; return this; } /** * Gets socket send buffer size. When set to zero, operation system default will be used. * <p> * Defaults to {@link #DFLT_SOCK_BUF_SIZE} * * @return Socket send buffer size in bytes. */ public int getSocketSendBufferSize() { return sockSndBufSize; } /** * Sets socket send buffer size. See {@link #getSocketSendBufferSize()} for more information. * * @param sockSndBufSize Socket send buffer size in bytes. * @return This instance for chaining. */ public OdbcConfiguration setSocketSendBufferSize(int sockSndBufSize) { this.sockSndBufSize = sockSndBufSize; return this; } /** * Gets socket receive buffer size. When set to zero, operation system default will be used. * <p> * Defaults to {@link #DFLT_SOCK_BUF_SIZE}. * * @return Socket receive buffer size in bytes. */ public int getSocketReceiveBufferSize() { return sockRcvBufSize; } /** * Sets socket receive buffer size. See {@link #getSocketReceiveBufferSize()} for more information. * * @param sockRcvBufSize Socket receive buffer size in bytes. * @return This instance for chaining. */ public OdbcConfiguration setSocketReceiveBufferSize(int sockRcvBufSize) { this.sockRcvBufSize = sockRcvBufSize; return this; } /** * Size of thread pool that is in charge of processing ODBC tasks. * <p> * Defaults {@link #DFLT_THREAD_POOL_SIZE}. * * @return Thread pool that is in charge of processing ODBC tasks. */ public int getThreadPoolSize() { return threadPoolSize; } /** * Sets thread pool that is in charge of processing ODBC tasks. See {@link #getThreadPoolSize()} for more * information. * * @param threadPoolSize Thread pool that is in charge of processing ODBC tasks. * @return This instance for chaining. */ public OdbcConfiguration setThreadPoolSize(int threadPoolSize) { this.threadPoolSize = threadPoolSize; return this; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(OdbcConfiguration.class, this); } }
alexzaitzev/ignite
modules/core/src/main/java/org/apache/ignite/configuration/OdbcConfiguration.java
Java
apache-2.0
7,131
#!/usr/bin/env python3 # Copyright (c) 2017-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test logic for setting nMinimumChainWork on command line. Nodes don't consider themselves out of "initial block download" until their active chain has more work than nMinimumChainWork. Nodes don't download blocks from a peer unless the peer's best known block has more work than nMinimumChainWork. While in initial block download, nodes won't relay blocks to their peers, so test that this parameter functions as intended by verifying that block relay only succeeds past a given node once its nMinimumChainWork has been exceeded. """ import time from test_framework.test_framework import BitcoinTestFramework from test_framework.util import connect_nodes, assert_equal # 2 hashes required per regtest block (with no difficulty adjustment) REGTEST_WORK_PER_BLOCK = 2 class MinimumChainWorkTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]] self.node_min_work = [0, 101, 101] def setup_network(self): # This test relies on the chain setup being: # node0 <- node1 <- node2 # Before leaving IBD, nodes prefer to download blocks from outbound # peers, so ensure that we're mining on an outbound peer and testing # block relay to inbound peers. self.setup_nodes() for i in range(self.num_nodes-1): connect_nodes(self.nodes[i+1], i) def run_test(self): # Start building a chain on node0. node2 shouldn't be able to sync until node1's # minchainwork is exceeded starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work self.log.info("Testing relay across node %d (minChainWork = %d)", 1, self.node_min_work[1]) starting_blockcount = self.nodes[2].getblockcount() num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK) self.log.info("Generating %d blocks on node0", num_blocks_to_generate) hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate, self.nodes[0].get_deterministic_priv_key().address) self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork']) # Sleep a few seconds and verify that node2 didn't get any new blocks # or headers. We sleep, rather than sync_blocks(node0, node1) because # it's reasonable either way for node1 to get the blocks, or not get # them (since they're below node1's minchainwork). time.sleep(3) self.log.info("Verifying node 2 has no more blocks than before") self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes]) # Node2 shouldn't have any new headers yet, because node1 should not # have relayed anything. assert_equal(len(self.nodes[2].getchaintips()), 1) assert_equal(self.nodes[2].getchaintips()[0]['height'], 0) assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash() assert_equal(self.nodes[2].getblockcount(), starting_blockcount) self.log.info("Generating one more block") self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) self.log.info("Verifying nodes are all synced") # Because nodes in regtest are all manual connections (eg using # addnode), node1 should not have disconnected node0. If not for that, # we'd expect node1 to have disconnected node0 for serving an # insufficient work chain, in which case we'd need to reconnect them to # continue the test. self.sync_all() self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes]) if __name__ == '__main__': MinimumChainWorkTest().main()
afk11/bitcoin
test/functional/feature_minchainwork.py
Python
mit
4,122
import * as Boom from '@hapi/boom'; import * as http from 'http'; import * as https from 'https'; export interface Host { name: string; port: number; } export interface CustomRequest { authorization: string; contentType: string; host: string; method: string; port: number; url: string; } export interface ParseRequestOptions { host?: string | undefined; hostHeaderName?: string | undefined; name?: string | undefined; port?: number | undefined; } export const version: string; export const limits: { /** Limit the length of uris and headers to avoid a DoS attack on string matching */ maxMatchLength: number; }; export function now(localtimeOffsetMsec: number): number; export function nowSecs(localtimeOffsetMsec: number): number; export function parseAuthorizationHeader(header: string, keys?: string[]): Record<string, string>; export function parseContentType(header?: string): string; export function parseHost(req: http.RequestOptions | https.RequestOptions, hostHeaderName?: string): Host | null; export function parseRequest( req: http.RequestOptions | https.RequestOptions, options?: ParseRequestOptions, ): CustomRequest; export function unauthorized( message?: string, attributes?: string | Boom.unauthorized.Attributes, ): Boom.Boom & Boom.unauthorized.MissingAuth;
markogresak/DefinitelyTyped
types/hawk/lib/utils.d.ts
TypeScript
mit
1,362
/* * Copyright (c) 1996, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ public class X11CNS11643P2 extends X11CNS11643 { public X11CNS11643P2() { super(2, "X11CNS11643P2"); } }
rokn/Count_Words_2015
testing/openjdk/jdk/test/sun/nio/cs/X11CNS11643P2.java
Java
mit
1,335
module.exports = function(client, test) { test.ok(typeof client == 'object'); this.testPageAction = function() { return this; }; };
miguelangel6/nightwatchbamboo
tests/extra/pageobjects/SimplePageFn.js
JavaScript
mit
142
package builder import ( "os" "github.com/spf13/cobra" kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "github.com/openshift/origin/pkg/build/builder/cmd" ocmd "github.com/openshift/origin/pkg/cmd/cli/cmd" "github.com/openshift/origin/pkg/cmd/templates" ) var ( s2iBuilderLong = templates.LongDesc(` Perform a Source-to-Image build This command executes a Source-to-Image build using arguments passed via the environment. It expects to be run inside of a container.`) dockerBuilderLong = templates.LongDesc(` Perform a Docker build This command executes a Docker build using arguments passed via the environment. It expects to be run inside of a container.`) ) // NewCommandS2IBuilder provides a CLI handler for S2I build type func NewCommandS2IBuilder(name string) *cobra.Command { cmd := &cobra.Command{ Use: name, Short: "Run a Source-to-Image build", Long: s2iBuilderLong, Run: func(c *cobra.Command, args []string) { err := cmd.RunS2IBuild(c.OutOrStderr()) kcmdutil.CheckErr(err) }, } cmd.AddCommand(ocmd.NewCmdVersion(name, nil, os.Stdout, ocmd.VersionOptions{})) return cmd } // NewCommandDockerBuilder provides a CLI handler for Docker build type func NewCommandDockerBuilder(name string) *cobra.Command { cmd := &cobra.Command{ Use: name, Short: "Run a Docker build", Long: dockerBuilderLong, Run: func(c *cobra.Command, args []string) { err := cmd.RunDockerBuild(c.OutOrStderr()) kcmdutil.CheckErr(err) }, } cmd.AddCommand(ocmd.NewCmdVersion(name, nil, os.Stdout, ocmd.VersionOptions{})) return cmd }
rawlingsj/gofabric8
vendor/github.com/openshift/origin/pkg/cmd/infra/builder/builder.go
GO
apache-2.0
1,590
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Threading; namespace QuantConnect.Util { /// <summary> /// Provides extension methods to make working with the <see cref="ReaderWriterLockSlim"/> class easier /// </summary> public static class ReaderWriterLockSlimExtensions { /// <summary> /// Opens the read lock /// </summary> /// <param name="readerWriterLockSlim">The lock to open for read</param> /// <returns>A disposable reference which will release the lock upon disposal</returns> public static IDisposable Read(this ReaderWriterLockSlim readerWriterLockSlim) { return new ReaderLockToken(readerWriterLockSlim); } /// <summary> /// Opens the write lock /// </summary> /// <param name="readerWriterLockSlim">The lock to open for write</param> /// <returns>A disposale reference which will release thelock upon disposal</returns> public static IDisposable Write(this ReaderWriterLockSlim readerWriterLockSlim) { return new WriteLockToken(readerWriterLockSlim); } private sealed class ReaderLockToken : ReaderWriterLockSlimToken { public ReaderLockToken(ReaderWriterLockSlim readerWriterLockSlim) : base(readerWriterLockSlim) { } protected override void EnterLock(ReaderWriterLockSlim readerWriterLockSlim) { readerWriterLockSlim.EnterReadLock(); } protected override void ExitLock(ReaderWriterLockSlim readerWriterLockSlim) { readerWriterLockSlim.ExitReadLock(); } } private sealed class WriteLockToken : ReaderWriterLockSlimToken { public WriteLockToken(ReaderWriterLockSlim readerWriterLockSlim) : base(readerWriterLockSlim) { } protected override void EnterLock(ReaderWriterLockSlim readerWriterLockSlim) { readerWriterLockSlim.EnterWriteLock(); } protected override void ExitLock(ReaderWriterLockSlim readerWriterLockSlim) { readerWriterLockSlim.ExitWriteLock(); } } private abstract class ReaderWriterLockSlimToken : IDisposable { private ReaderWriterLockSlim _readerWriterLockSlim; public ReaderWriterLockSlimToken(ReaderWriterLockSlim readerWriterLockSlim) { _readerWriterLockSlim = readerWriterLockSlim; // ReSharper disable once DoNotCallOverridableMethodsInConstructor -- we control the subclasses, this is fine EnterLock(_readerWriterLockSlim); } protected abstract void EnterLock(ReaderWriterLockSlim readerWriterLockSlim); protected abstract void ExitLock(ReaderWriterLockSlim readerWriterLockSlim); public void Dispose() { if (_readerWriterLockSlim != null) { ExitLock(_readerWriterLockSlim); _readerWriterLockSlim = null; } } } } }
young-zhang/Lean
Common/Util/ReaderWriterLockSlimExtensions.cs
C#
apache-2.0
3,963
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.test.web.servlet.showcase.secured; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.test.context.support.WithUserDetails; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import static org.springframework.security.test.web.servlet.response.SecurityMockMvcResultMatchers.authenticated; import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = WithUserDetailsClassLevelAuthenticationTests.Config.class) @WebAppConfiguration @WithUserDetails("admin") public class WithUserDetailsClassLevelAuthenticationTests { @Autowired private WebApplicationContext context; private MockMvc mvc; @Before public void setup() { mvc = MockMvcBuilders.webAppContextSetup(context).apply(springSecurity()).build(); } @Test public void requestRootUrlWithAdmin() throws Exception { mvc.perform(get("/")) // Ensure we got past Security .andExpect(status().isNotFound()) // Ensure it appears we are authenticated with user .andExpect( authenticated().withUsername("admin").withRoles("ADMIN", "USER")); } @Test public void requestProtectedUrlWithAdmin() throws Exception { mvc.perform(get("/admin")) // Ensure we got past Security .andExpect(status().isNotFound()) // Ensure it appears we are authenticated with user .andExpect( authenticated().withUsername("admin").withRoles("ADMIN", "USER")); } @EnableWebSecurity @EnableWebMvc static class Config extends WebSecurityConfigurerAdapter { // @formatter:off @Override protected void configure(HttpSecurity http) throws Exception { http .authorizeRequests() .antMatchers("/admin/**").hasRole("ADMIN") .anyRequest().authenticated() .and() .formLogin(); } // @formatter:on @Bean @Override public UserDetailsService userDetailsServiceBean() throws Exception { return super.userDetailsServiceBean(); } // @formatter:off @Autowired public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { auth .inMemoryAuthentication() .withUser("user").password("password").roles("USER").and() .withUser("admin").password("password").roles("USER","ADMIN"); } // @formatter:on } }
ractive/spring-security
test/src/test/java/org/springframework/security/test/web/servlet/showcase/secured/WithUserDetailsClassLevelAuthenticationTests.java
Java
apache-2.0
4,216
// runoutput // Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package main import "fmt" // Check that expressions like (c*n + d*(n+k)) get correctly merged by // the compiler into (c+d)*n + d*k (with c+d and d*k computed at // compile time). // // The merging is performed by a combination of the multiplication // merge rules // (c*n + d*n) -> (c+d)*n // and the distributive multiplication rules // c * (d+x) -> c*d + c*x // Generate a MergeTest that looks like this: // // a8, b8 = m1*n8 + m2*(n8+k), (m1+m2)*n8 + m2*k // if a8 != b8 { // // print error msg and panic // } func makeMergeAddTest(m1, m2, k int, size string) string { model := " a" + size + ", b" + size model += fmt.Sprintf(" = %%d*n%s + %%d*(n%s+%%d), (%%d+%%d)*n%s + (%%d*%%d)", size, size, size) test := fmt.Sprintf(model, m1, m2, k, m1, m2, m2, k) test += fmt.Sprintf(` if a%s != b%s { fmt.Printf("MergeAddTest(%d, %d, %d, %s) failed\n") fmt.Printf("%%d != %%d\n", a%s, b%s) panic("FAIL") } `, size, size, m1, m2, k, size, size, size) return test + "\n" } // Check that expressions like (c*n - d*(n+k)) get correctly merged by // the compiler into (c-d)*n - d*k (with c-d and d*k computed at // compile time). // // The merging is performed by a combination of the multiplication // merge rules // (c*n - d*n) -> (c-d)*n // and the distributive multiplication rules // c * (d-x) -> c*d - c*x // Generate a MergeTest that looks like this: // // a8, b8 = m1*n8 - m2*(n8+k), (m1-m2)*n8 - m2*k // if a8 != b8 { // // print error msg and panic // } func makeMergeSubTest(m1, m2, k int, size string) string { model := " a" + size + ", b" + size model += fmt.Sprintf(" = %%d*n%s - %%d*(n%s+%%d), (%%d-%%d)*n%s - (%%d*%%d)", size, size, size) test := fmt.Sprintf(model, m1, m2, k, m1, m2, m2, k) test += fmt.Sprintf(` if a%s != b%s { fmt.Printf("MergeSubTest(%d, %d, %d, %s) failed\n") fmt.Printf("%%d != %%d\n", a%s, b%s) panic("FAIL") } `, size, size, m1, m2, k, size, size, size) return test + "\n" } func makeAllSizes(m1, m2, k int) string { var tests string tests += makeMergeAddTest(m1, m2, k, "8") tests += makeMergeAddTest(m1, m2, k, "16") tests += makeMergeAddTest(m1, m2, k, "32") tests += makeMergeAddTest(m1, m2, k, "64") tests += makeMergeSubTest(m1, m2, k, "8") tests += makeMergeSubTest(m1, m2, k, "16") tests += makeMergeSubTest(m1, m2, k, "32") tests += makeMergeSubTest(m1, m2, k, "64") tests += "\n" return tests } func main() { fmt.Println(`package main import "fmt" var n8 int8 = 42 var n16 int16 = 42 var n32 int32 = 42 var n64 int64 = 42 func main() { var a8, b8 int8 var a16, b16 int16 var a32, b32 int32 var a64, b64 int64 `) fmt.Println(makeAllSizes(03, 05, 0)) // 3*n + 5*n fmt.Println(makeAllSizes(17, 33, 0)) fmt.Println(makeAllSizes(80, 45, 0)) fmt.Println(makeAllSizes(32, 64, 0)) fmt.Println(makeAllSizes(7, 11, +1)) // 7*n + 11*(n+1) fmt.Println(makeAllSizes(9, 13, +2)) fmt.Println(makeAllSizes(11, 16, -1)) fmt.Println(makeAllSizes(17, 9, -2)) fmt.Println("}") }
codestation/go
test/mergemul.go
GO
bsd-3-clause
3,236
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq.Expressions; namespace System.Dynamic { /// <summary> /// Represents a dynamic object, that can have its operations bound at runtime. /// </summary> /// <remarks> /// Objects that want to participate in the binding process should implement an IDynamicMetaObjectProvider interface, /// and implement <see cref="IDynamicMetaObjectProvider.GetMetaObject" /> to return a <see cref="DynamicMetaObject" />. /// </remarks> public interface IDynamicMetaObjectProvider { /// <summary> /// Returns the <see cref="DynamicMetaObject" /> responsible for binding operations performed on this object. /// </summary> /// <param name="parameter">The expression tree representation of the runtime value.</param> /// <returns>The <see cref="DynamicMetaObject" /> to bind this object.</returns> DynamicMetaObject GetMetaObject(Expression parameter); } }
iamjasonp/corefx
src/System.Dynamic.Runtime/src/System/Dynamic/IDynamicMetaObjectProvider.cs
C#
mit
1,145
// tslint:disable:no-unnecessary-generics import { ComponentType } from 'react'; import { EditorColor } from '../../'; declare namespace withColorContext { interface Props { colors: EditorColor[]; disableCustomColors: boolean; hasColorsToChoose: boolean; } } // prettier-ignore declare function withColorContext< ProvidedProps extends Partial<withColorContext.Props>, OwnProps extends any = any, T extends ComponentType<ProvidedProps & OwnProps> = ComponentType<ProvidedProps & OwnProps> >(component: T): T extends ComponentType<infer U> ? ComponentType< Omit<U, 'colors' | 'disableCustomColors' | 'hasColorsToChoose'> & Omit<ProvidedProps, 'hasColorsToChoose'>> : never; export default withColorContext;
markogresak/DefinitelyTyped
types/wordpress__block-editor/components/color-palette/with-color-context.d.ts
TypeScript
mit
777
<?php /* $Id$ */ /** * translated by: Pietro Danesi <danone at users.sourceforge.net> 2002-03-29 * Revised by: "DPhantom" <dphantom at users.sourceforge.net> 2002-04-16 * Revised by: "Luca Rebellato" <rebeluca at users.sourceforge.net> 2007-07-26 */ $charset = 'iso-8859-1'; $text_dir = 'ltr'; $number_thousands_separator = '.'; $number_decimal_separator = ','; // shortcuts for Byte, Kilo, Mega, Giga, Tera, Peta, Exa $byteUnits = array('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB'); $day_of_week = array('Dom', 'Lun', 'Mar', 'Mer', 'Gio', 'Ven', 'Sab'); //italian days $month = array('Gen', 'Feb', 'Mar', 'Apr', 'Mag', 'Giu', 'Lug', 'Ago', 'Set', 'Ott', 'Nov', 'Dic'); //italian months // See http://www.php.net/manual/en/function.strftime.php to define the // variable below $datefmt = '%d %B, %Y at %I:%M %p'; //italian time $timespanfmt = '%s giorni, %s ore, %s minuti e %s secondi'; $strAbortedClients = 'Fallito'; $strAccessDenied = 'Accesso negato'; $strAccessDeniedCreateConfig = 'La ragione di questo è che probabilmente non hai creato alcun file di configurazione. Potresti voler usare %1$ssetup script%2$s per crearne uno.'; $strAccessDeniedExplanation = 'phpMyAdmin ha provato a connettersi al server MySQL, e il server ha rifiutato la connessione. Si dovrebbe controllare il nome dell\'host, l\'username e la password nel file config.inc.php ed assicurarsi che corrispondano alle informazioni fornite dall\'amministratore del server MySQL.'; $strAction = 'Azione'; $strAddAutoIncrement = 'Aggiungi valore AUTO_INCREMENT'; $strAddClause = 'Aggiungi %s'; $strAddConstraints = 'Aggiungi vincoli'; $strAddDeleteColumn = 'Aggiungi/Cancella campo'; $strAddDeleteRow = 'Aggiungi/Cancella criterio'; $strAddFields = 'Aggiungi %s campo(i)'; $strAddHeaderComment = 'Aggiunge un commento personalizzato all\'header (\\n per tornare a capo)'; $strAddIntoComments = 'Aggiungi nei commenti'; $strAddNewField = 'Aggiungi un nuovo campo'; $strAddPrivilegesOnDb = 'Aggiungi privilegi sul seguente database'; $strAddPrivilegesOnTbl = 'Aggiungi privilegi sulla seguente tabella'; $strAddSearchConditions = 'Aggiungi condizioni di ricerca (corpo della clausola "where"):'; $strAddToIndex = 'Aggiungi all\'indice&nbsp;%s&nbsp;colonna/e'; $strAddUser = 'Aggiungi un nuovo utente'; $strAddUserMessage = 'Hai aggiunto un nuovo utente.'; $strAdministration = 'Amministrazione'; $strAffectedRows = 'Righe interessate:'; $strAfter = 'Dopo %s'; $strAfterInsertBack = 'Indietro'; $strAfterInsertNewInsert = 'Inserisci un nuovo record'; $strAfterInsertNext = 'Modifica il record successivo'; $strAfterInsertSame = 'Torna a questa pagina'; $strAllowInterrupt = 'Permette di interrompere il processo di importazione nel caso lo script rilevi che è troppo vicino al tempo limite. Questo potrebbe essere un buon modo di importare grandi file, tuttavia potrebbe interrompere la transazione.'; $strAllTableSameWidth = 'mostra tutte le Tabelle con la stessa larghezza?'; $strAll = 'Tutti'; $strAlterOrderBy = 'Altera tabella ordinata per'; $strAnalyzeTable = 'Analizza tabella'; $strAnd = 'e'; $strAndThen = 'e quindi'; $strAngularLinks = 'Link angolari'; $strAnIndex = 'Un indice è stato aggiunto in %s'; $strAnyHost = 'Qualsiasi host'; $strAny = 'Qualsiasi'; $strAnyUser = 'Qualsiasi utente'; $strApproximateCount = 'Può essere approssimato. Vedere FAQ 3.11'; $strAPrimaryKey = 'Una chiave primaria è stata aggiunta in %s'; $strArabic = 'Arabo'; $strArmenian = 'Armeno'; $strAscending = 'Crescente'; $strAtBeginningOfTable = 'All\'inizio della tabella'; $strAtEndOfTable = 'Alla fine della tabella'; $strAttr = 'Attributi'; $strAutomaticLayout = 'Impaginazione automatica'; $strBack = 'Indietro'; $strBaltic = 'Baltico'; $strBeginCut = 'INIZIO CUT'; $strBeginRaw = 'INIZIO RAW'; $strBinary = 'Binario'; $strBinaryDoNotEdit = 'Tipo di dato Binario - non modificare'; $strBinaryLog = 'Log binario'; $strBinLogEventType = 'Tipo di evento'; $strBinLogInfo = 'Informazioni'; $strBinLogName = 'Nome del Log'; $strBinLogOriginalPosition = 'Posizione originale'; $strBinLogPosition = 'Posizione'; $strBinLogServerId = 'ID del server'; $strBookmarkAllUsers = 'Permetti ad ogni utente di accedere a questo bookmark'; $strBookmarkCreated = 'Segnalibro %s creato'; $strBookmarkDeleted = 'Il bookmark è stato cancellato.'; $strBookmarkLabel = 'Etichetta'; $strBookmarkQuery = 'Query SQL aggiunte ai preferiti'; $strBookmarkReplace = 'Sostituzione dei segnalibri esistenti con lo stesso nome'; $strBookmarkThis = 'Aggiungi ai preferiti questa query SQL'; $strBookmarkView = 'Visualizza solo'; $strBrowseDistinctValues = 'Naviga tra i valori DISTINCT'; $strBrowseForeignValues = 'Sfoglia le opzioni straniere'; $strBrowse = 'Mostra'; $strBufferPoolActivity = 'Attività del Buffer Pool'; $strBufferPool = 'Buffer Pool'; $strBufferPoolUsage = 'Utilizzo del Buffer Pool'; $strBufferReadMissesInPercent = 'Non letto in %'; $strBufferReadMisses = 'Non letto'; $strBufferWriteWaits = 'In attesa di scrittura'; $strBufferWriteWaitsInPercent = 'In attesa di scrittura in %'; $strBulgarian = 'Bulgaro'; $strBusyPages = 'Pagine occupate'; $strBzError = 'phpMyAdmin non è capace di comprimere il dump a causa dell\'estensione Bz2 errata in questa versione di PHP. Vi raccomandiamo vivamente di settare il parametro <code>$cfg[\'BZipDump\']</code> nel vostro file di configurazione di phpMyAdmin a <code>FALSE</code>. Se volete utilizzare le capacità di compressione Bz2, dovreste aggiornare il PHP all\'ultima versione. Date un\'occhiata al bug report %s per uteriori dettagli.'; $strBzip = '"compresso con bzip2"'; $strCalendar = 'Calendario'; $strCancel = 'Annulla'; $strCanNotLoadExportPlugins = 'Non posso caricare i plugins di esportazione. Controlla l\'installazione!'; $strCanNotLoadImportPlugins = 'Non posso caricare i plugins di importazione, controlla la tua configurazione!'; $strCannotLogin = 'Impossibile eseguire il login nel server MySQL'; $strCantLoad = 'Impossibile caricare l\'estensione %s,<br />prego controllare la configurazione di PHP'; $strCantLoadRecodeIconv = 'Impossibile caricare l\'estensione iconv o recode necessaria per la conversione del set di caratteri, configurare il PHP per permettere di utilizzare queste estenzioni o disabilitare la conversione dei set di caratteri in phpMyAdmin.'; $strCantRenameIdxToPrimary = 'Impossibile rinominare l\'indice a PRIMARIO!'; $strCantUseRecodeIconv = 'Impossibile utilizzare le funzioni iconv o libiconv o recode_string in quanto l\'estensione deve essere caricata. Controllare la configurazione del PHP.'; $strCardinality = 'Cardinalità'; $strCaseInsensitive = 'case-insensitive'; $strCaseSensitive = 'case-sensitive'; $strCentralEuropean = 'Europeo Centrale'; $strChangeCopyModeCopy = '... mantieni quello vecchio.'; $strChangeCopyMode = 'Crea un nuovo utente con gli stessi privilegi e ...'; $strChangeCopyModeDeleteAndReload = ' ... cancella quello vecchio dalla tabella degli utenti e in seguito ricarica i privilegi.'; $strChangeCopyModeJustDelete = ' ... cancella quello vecchio dalla tabella degli utenti.'; $strChangeCopyModeRevoke = ' ... revoca tutti i privilegi attivi da quello vecchio e in seguito cancellalo.'; $strChangeCopyUser = 'Cambia le Informazioni di Login / Copia Utente'; $strChangeDisplay = 'Scegli il campo da mostrare'; $strChange = 'Modifica'; $strChangePassword = 'Cambia password'; $strCharsetOfFile = 'Set di caratteri del file:'; $strCharsetsAndCollations = 'Set di Caratteri e Collations'; $strCharset = 'Set di caratteri'; $strCharsets = 'Set di caratteri'; $strCheckAll = 'Seleziona tutti'; $strCheckOverhead = 'Controllo addizionale'; $strCheckPrivs = 'Controlla i privilegi'; $strCheckPrivsLong = 'Controlla i privilegi per il database &quot;%s&quot;.'; $strCheckTable = 'Controlla tabella'; $strChoosePage = 'Prego scegliere una Page da modificare'; $strColComFeat = 'Visualizzazione commenti delle colonne'; $strCollation = 'Collation'; $strColumnNames = 'Nomi delle colonne'; $strColumnPrivileges = 'Privilegi relativi alle colonne'; $strCommand = 'Comando'; $strComments = 'Commenti'; $strCommentsForTable = 'Commenti per la tabella'; $strCompatibleHashing = 'Compatibile con MySQL 4.0'; $strCompleteInserts = 'Inserimenti completi'; $strCompression = 'Compressione'; $strCompressionWillBeDetected = 'Il tipo di compressione del file importato sarà automaticamente rilevato da: %s'; $strConfigDefaultFileError = 'Non posso leggere la configurazione da: "%1$s"'; $strConfigFileError = 'phpMyAdmin non riesce a leggere il file di configurazione!<br />Questo può accadere se il php trova un parse error in esso oppure il php non trova il file.<br />Richiamate il file di configurazione direttamente utilizzando il link sotto e leggete il/i messaggio/i di errore del php che ricevete. Nella maggior parte dei casi ci sono un apostrofo o una virgoletta mancanti.<br />Se ricevete una pagina bianca, allora è tutto a posto.'; $strConfigureTableCoord = 'Prego, configurare le coordinate per la tabella %s'; $strConnectionError = 'Impossibile connettersi: impostazioni non valide.'; $strConnections = 'Connessioni'; $strConstraintsForDumped = 'Limiti per le tabelle scaricate'; $strConstraintsForTable = 'Limiti per la tabella'; $strControluserFailed = 'Connessione per controluser come definito nella configurazione fallita.'; $strCookiesRequired = 'Da questo punto in poi, i cookies devono essere abilitati.'; $strCopy = 'Copia'; $strCopyDatabaseOK = 'Il Database %s è stato copiato in %s'; $strCopyTable = 'Copia la tabella nel (database<b>.</b>tabella):'; $strCopyTableOK = 'La tabella %s è stata copiata su %s.'; $strCopyTableSameNames = 'Impossibile copiare la tabella su se stessa!'; $strCouldNotKill = 'phpMyAdmin non è in grado di terminare il thread %s. Probabilmente è già stato terminato.'; $strCreate = 'Crea'; $strCreateDatabaseBeforeCopying = 'CREATE DATABASE prima di copiare'; $strCreateIndex = 'Crea un indice su&nbsp;%s&nbsp;columns'; $strCreateIndexTopic = 'Crea un nuovo indice'; $strCreateNewDatabase = 'Crea un nuovo database'; $strCreateNewTable = 'Crea una nuova tabella nel database %s'; $strCreatePage = 'Crea una nuova pagina'; $strCreatePdfFeat = 'Creazione di PDF'; $strCreateRelation = 'Crea relazioni'; $strCreateTable = 'Crea tabelle'; $strCreateUserDatabase = 'Database per l\'utente'; $strCreateUserDatabaseName = 'Crea un database con lo stesso nome e concedi tutti i privilegi'; $strCreateUserDatabaseNone = 'None'; $strCreateUserDatabaseWildcard = 'Concedi tutti i privilegi al nome con caratteri jolly (username\_%)'; $strCreationDates = 'Creazione/Aggiornamento/Controllo date'; $strCriteria = 'Criterio'; $strCroatian = 'Croato'; $strCSV = 'CSV'; $strCyrillic = 'Cirillico'; $strCzech = 'Ceco'; $strCzechSlovak = 'Ceco-Slovacco'; $strDanish = 'Danese'; $strDatabase = 'Database'; $strDatabaseEmpty = 'Il nome del DataBase è vuoto!'; $strDatabaseExportOptions = 'Opzioni di esportazione del database'; $strDatabaseHasBeenDropped = 'Il Database %s è stato eliminato.'; $strDatabases = 'Database'; $strDatabasesDropped = '%s databases sono stati cancellati correttamente.'; $strDatabasesStatsDisable = 'Disabilita le Statistiche'; $strDatabasesStatsEnable = 'Abilita le Statistiche'; $strDatabasesStatsHeavyTraffic = 'N.B.: Abilitare qui le statistiche del Database potrebbe causare del traffico intenso fra il server web e MySQL.'; $strDatabasesStats = 'Statistiche dei databases'; $strData = 'Dati'; $strDataDict = 'Data Dictionary'; $strDataOnly = 'Solo dati'; $strDataPages = 'Pagine contenenti dati'; $strDBComment = 'Commento al Database: '; $strDBCopy = 'Copia il Database in'; $strDbIsEmpty = 'Il databse sembra essere vuoto!'; $strDbPrivileges = 'Privilegi specifici al database'; $strDBRename = 'Rinomina il DataBase in'; $strDbSpecific = 'specifico del database'; $strDefaultEngine = '%s è il motore di memorizzazione predefinito su questo server MySQL.'; $strDefault = 'Predefinito'; $strDefaultValueHelp = 'Per i valori predefiniti, prego inserire un singolo valore, senza backslashes escaping o virgolette, utilizzando questo formato: a'; $strDefragment = 'Deframmenta la tabella'; $strDelayedInserts = 'Utilizza inserimenti ritardati'; $strDeleteAndFlush = 'Cancella gli utenti e dopo ricarica i privilegi.'; $strDeleteAndFlushDescr = 'Questa è la vita più giusta, ma il caricamento dei privilegi può durare qualche secondo.'; $strDeleted = 'La riga è stata cancellata'; $strDeletedRows = 'Righe cancellate:'; $strDelete = 'Elimina'; $strDeleteNoUsersSelected = 'Nessun utente selezionato per la cancellazione!'; $strDeleteRelation = 'Elimina relazione'; $strDeleting = 'Cancellazione in corso di %s'; $strDelimiter = 'Delimitatori'; $strDelOld = 'La Pagina corrente contiene Riferimenti a Tabelle che non esistono più. Volete cancellare questi Riferimenti?'; $strDescending = 'Decrescente'; $strDescription = 'Descrizione'; $strDesigner = 'Designer'; $strDesignerHelpDisplayField = 'Il campi da mostrare sono in colore rosa. Per impostare/togliere un campo come campo da mostrare, clicca l\'icona "Scegli il campo da mostrare", e poi clicca sul nome appropriato del campo.'; $strDictionary = 'dizionario'; $strDirectLinks = 'Link diretti'; $strDirtyPages = 'Pagine sporche'; $strDisabled = 'Disabilitata'; $strDisableForeignChecks = 'Disabilita i controlli sulle chiavi straniere'; $strDisplayFeat = 'Mostra Caratteristiche'; $strDisplayOrder = 'Ordine di visualizzazione:'; $strDisplayPDF = 'Mostra lo schema del PDF'; $strDoAQuery = 'Esegui "query da esempio" (carattere jolly: "%")'; $strDocSQL = 'DocSQL'; $strDocu = 'Documentazione'; $strDoYouReally = 'Confermi: '; $strDropDatabaseStrongWarning = 'Si sta per DISTRUGGERE COMPLETAMENTE un intero DataBase!'; $strDrop = 'Elimina'; $strDropUsersDb = 'Elimina i databases gli stessi nomi degli utenti.'; $strDumpingData = 'Dump dei dati per la tabella'; $strDumpSaved = 'Il dump è stato salvato sul file %s.'; $strDumpXRows = 'Dump di %s righe a partire dalla riga %s.'; $strDynamic = 'dinamico'; $strEdit = 'Modifica'; $strEditPDFPages = 'Modifica pagine PDF'; $strEditPrivileges = 'Modifica Privilegi'; $strEffective = 'Effettivo'; $strEmptyResultSet = 'MySQL ha restituito un insieme vuoto (i.e. zero righe).'; $strEmpty = 'Svuota'; $strEnabled = 'Abilitata'; $strEncloseInTransaction = 'Includi export in una transazione'; $strEndCut = 'FINE CUT'; $strEnd = 'Fine'; $strEndRaw = 'FINE RAW'; $strEngineAvailable = '%s è disponibile su questo server MySQL.'; $strEngineDisabled = '%s è stato disabilitato su questo server MySQL.'; $strEngines = 'Motori'; $strEngineUnsupported = 'Questo server MySQL non supporta il motore di memorizzazione %s.'; $strEnglish = 'Inglese'; $strEnglishPrivileges = 'Nota: i nomi dei privilegi di MySQL sono in Inglese'; $strError = 'Errore'; $strErrorInZipFile = 'Errore nell\'archivio ZIP:'; $strErrorRelationAdded = 'Errore: relazione non aggiunta.'; $strErrorRelationExists = 'Errore: relazione già esistente.'; $strErrorRenamingTable = 'Errore nel rinominare la tabella %1$s in %2$s'; $strErrorSaveTable = 'Errore nel salvare le coordinate per il Designer.'; $strEscapeWildcards = 'I caratteri jolly _ e % dovrebbero essere preceduti da un \ per l\'utilizzo letterale'; $strEsperanto = 'Esperanto'; $strEstonian = 'Estone'; $strEvent = 'Eventi'; $strExcelEdition = 'Edizione Excel'; $strExecuteBookmarked = 'Esegue la query dalle preferite'; $strExplain = 'Spiega SQL'; $strExport = 'Esporta'; $strExportImportToScale = 'Importa/esporta alla dimensione'; $strExportMustBeFile = 'Il tipo di esportazione selezionato necessita di essere salvato in un file!'; $strExtendedInserts = 'Inserimenti estesi'; $strExtra = 'Extra'; $strFailedAttempts = 'Tentativi falliti'; $strField = 'Campo'; $strFieldHasBeenDropped = 'Il campo %s è stato eliminato'; $strFieldInsertFromFileTempDirNotExists = 'Errore nello spostare il file caricato, vedi FAQ 1.11'; $strFields = 'Campi'; $strFieldsEnclosedBy = 'Campo composto da'; $strFieldsEscapedBy = 'Campo impedito da'; $strFieldsTerminatedBy = 'Campo terminato da'; $strFileAlreadyExists = 'Il file %s esiste già sul server: prego, cambiare nome del file o selezionare l\'opzione "sovrascrivi".'; $strFileCouldNotBeRead = 'Il file non può essere letto'; $strFileNameTemplateDescriptionDatabase = 'nome database'; $strFileNameTemplateDescription = 'Questo valore è interpretato usando %1$sstrftime%2$s, in questo modo puoi usare stringhe di formattazione per le date/tempi. Verranno anche aggiunte le seguenti trasformazioni: %3$s. Il testo rimanente resterà invariato.'; $strFileNameTemplateDescriptionServer = 'nome server'; $strFileNameTemplateDescriptionTable = 'nome tabella'; $strFileNameTemplate = 'Nome file template'; $strFileNameTemplateRemember = 'ricorda il template'; $strFiles = 'File'; $strFileToImport = 'File importato'; $strFixed = 'fisso'; $strFlushPrivilegesNote = 'N.B.: phpMyAdmin legge i privilegi degli utenti direttamente nella tabella dei privilegi di MySQL. Il contenuto di questa tabella può differire dai privilegi usati dal server se sono stati fatti cambiamenti manuali. In questo caso, Si dovrebbero %srinfrescare i privilegi%s prima di continuare.'; $strFlushQueryCache = 'Rinfresca la cache delle query'; $strFlushTable = 'Inizializza ("FLUSH") la tabella'; $strFlushTables = 'Rinfresca (chiudi) tutte le tabelle'; $strFontSize = 'Dimensione font'; $strForeignKeyError = 'Errore creando una foreign key (controlla il tipo di dati)'; $strFormat = 'Formato'; $strFormEmpty = 'Valore mancante nel form!'; $strFreePages = 'Pagine libere'; $strFullText = 'Testo completo'; $strFunction = 'Funzione'; $strFunctions = 'Funzioni'; $strGenBy = 'Generato da'; $strGeneralRelationFeat = 'Caratteristiche Generali di Relazione'; $strGenerate = 'Genera'; $strGeneratePassword = 'Genera Password'; $strGenTime = 'Generato il'; $strGeorgian = 'Georgiano'; $strGerman = 'Tedesco'; $strGlobal = 'globale'; $strGlobalPrivileges = 'Privilegi globali'; $strGlobalValue = 'Valore globale'; $strGo = 'Esegui'; $strGrantOption = 'Grant'; $strGreek = 'Greco'; $strGzip = '"compresso con gzip"'; $strHandler = 'Handler'; $strHasBeenAltered = 'è stato modificato.'; $strHasBeenCreated = 'è stato creato.'; $strHaveToShow = 'Devi scegliere almeno una Colonna da mostrare'; $strHebrew = 'Ebreo'; $strHelp = 'Aiuto'; $strHexForBLOB = 'Usa dati esadecimali per BLOB'; $strHide = 'Nascondi'; $strHideShowAll = 'Mostra/nascondi tutto'; $strHideShowNoRelation = 'Mostra/nascondi tabella senza relazioni'; $strHome = 'Home'; $strHomepageOfficial = 'Home page ufficiale di phpMyAdmin'; $strHostEmpty = 'Il nome di host è vuoto!'; $strHost = 'Host'; $strHTMLExcel = 'Microsoft Excel 2000'; $strHTMLWord = 'Microsoft Word 2000'; $strHungarian = 'Ungherese'; $strIcelandic = 'Islandese'; $strId = 'ID'; $strIdxFulltext = 'Testo completo'; $strIEUnsupported = 'Internet explorer non supporta questa funzione.'; $strIgnoreDuplicates = 'Ignora le righe duplicate'; $strIgnore = 'Ignora'; $strIgnoreInserts = 'Utilizza gli IGNORE INSERTS'; $strImportExportCoords = 'Importa/esporta le coordinate per PDF schema'; $strImportFiles = 'Importa file'; $strImportFormat = 'Formato del file importato'; $strImport = 'Importa'; $strImportSuccessfullyFinished = 'Importazione eseguita con successo, %d query eseguite.'; $strIndexes = 'Indici'; $strIndexesSeemEqual = 'I seguenti indici sembrano essere uguali e uno di essi deve essere rimosso:'; $strIndexHasBeenDropped = 'L\'indice %s è stato eliminato'; $strIndex = 'Indice'; $strIndexName = 'Nome dell\'indice&nbsp;:'; $strIndexType = 'Tipo di indice&nbsp;:'; $strIndexWarningTable = 'Problemi con gli indici della tabella `%s`'; $strInnoDBAutoextendIncrementDesc = ' La dimensione di incremento per aumentare la dimensione di una tabella autoextending quando diventa piena.'; $strInnoDBAutoextendIncrement = 'Incremento autoextend'; $strInnoDBBufferPoolSizeDesc = 'La dimensione del buffer di memoria InnoDB cacha dati e indici delle proprie tabelle.'; $strInnoDBBufferPoolSize = 'Dimensione del Buffer pool'; $strInnoDBDataFilePath = 'File dati'; $strInnoDBDataHomeDirDesc = 'La parte comune del path della directory per tutti i file dati InnoDB.'; $strInnoDBDataHomeDir = 'Home directory dei dati'; $strInnoDBPages = 'pagine'; $strInnoDBRelationAdded = 'Aggiunta relazione InnoDB'; $strInnodbStat = 'Stato InnoDB'; $strInsecureMySQL = 'Il file di configurazione in uso contiene impostazioni (root con nessuna password) che corrispondono ai privilegi dell\'account MySQL predefinito. Un server MySQL funzionante con queste impostazioni è aperto a intrusioni, e si dovrebbe realmente riparare a questa falla nella sicurezza.'; $strInsertAsNewRow = 'Inserisci come nuova riga'; $strInsertedRowId = 'Inserito id riga:'; $strInsertedRows = 'Righe inserite:'; $strInsert = 'Inserisci'; $strInternalNotNecessary = '* Non è necessaria una relazione interna quando già esiste in InnoDB.'; $strInternalRelationAdded = 'Aggiunte relazioni internet'; $strInternalRelations = 'Relazioni interne'; $strInUse = 'in uso'; $strInvalidAuthMethod = 'Metodo di autenticazione settato nella configurazione non valido:'; $strInvalidColumn = 'Colonna specificata (%s) invalida!'; $strInvalidColumnCount = 'Il contatore delle colonne deve essere superiore a 0.'; $strInvalidCSVFieldCount = 'Contatore di campo non valido nell\'input CSV alla linea %d.'; $strInvalidCSVFormat = 'Formato non valido per l\'input CSV alla linea %d.'; $strInvalidCSVParameter = 'Parametro non valido per importazione CSV: %s'; $strInvalidDatabase = 'Database non valido'; $strInvalidFieldAddCount = 'Deviaggiungere come minimo un campo.'; $strInvalidFieldCount = 'la tabella deve avere come minimo un dato.'; $strInvalidLDIImport = 'Questo plugin non supporta importazioni di dati compressi!'; $strInvalidRowNumber = '%d non è un numero valido di righe.'; $strInvalidServerHostname = 'Nome host per il server %1$s non valido. Controlla la tua configurazione.'; $strInvalidServerIndex = 'Server index non valido: "%s"'; $strInvalidTableName = 'Nome tabella non valido'; $strJapanese = 'Giapponese'; $strJoins = 'Joins'; $strJumpToDB = 'Passa al database &quot;%s&quot;.'; $strJustDelete = 'Cancella soltanto gli utenti dalle tabelle dei privilegi.'; $strJustDeleteDescr = 'Gli utenti &quot;cancellati&quot; saranno ancora in grado di accedere al servercome al solito finché i privilegi non verraanno ricaricati.'; $strKeepPass = 'Non cambiare la password'; $strKeyCache = 'Key cache'; $strKeyname = 'Nome chiave'; $strKill = 'Rimuovi'; $strKnownExternalBug = 'La %s funzionalità è affetta da un bug noto, vedi %s'; $strKorean = 'Coreano'; $strLandscape = 'Orizzontale'; $strLanguage = 'Lingua'; $strLanguageUnknown = 'Lingua non conosciuta : %1$s.'; $strLatchedPages = 'Latched pages'; $strLatexCaption = 'Sottotitolo della tabella'; $strLatexContent = 'Contenuto della tabella __TABLE__'; $strLatexContinuedCaption = 'Sottotitolo della tabella continuato'; $strLatexContinued = '(continua)'; $strLatexIncludeCaption = 'Includi sottotitolo della tabella'; $strLatexLabel = 'Chiave etichetta'; $strLaTeX = 'LaTeX'; $strLatexStructure = 'Struttura della tabella __TABLE__'; $strLatvian = 'Lituano'; $strLDI = 'CSV usando LOAD DATA'; $strLDILocal = 'Usa LOCAL keyword'; $strLengthSet = 'Lunghezza/Set*'; $strLimitNumRows = 'record per pagina'; $strLinesTerminatedBy = 'Linee terminate da'; $strLinkNotFound = 'Link non trovato'; $strLinksTo = 'Collegamenti a'; $strLithuanian = 'Lituano'; $strLocalhost = 'Locale'; $strLocationTextfile = 'Percorso del file'; $strLogin = 'Connetti'; $strLoginInformation = 'Informazioni di Login'; $strLogout = 'Disconnetti'; $strLogPassword = 'Password:'; $strLogServer = 'Server'; $strLogUsername = 'Nome utente:'; $strLongOperation = 'Questa operazione potrebbe impiegare molto tempo. Procedere comunque?'; $strMaxConnects = 'max. connessioni contemporanee'; $strMaximalQueryLength = 'Lunghezza massima di una query creata'; $strMaximumSize = 'Dimensione massima: %s%s'; $strMbExtensionMissing = 'L\'estensione PHP mbstring non è stata trovata e sembra che si stia utilizzando un set di caratteri multibyte. Senza l\'estensione mbstring, phpMyAdmin non è in grado di dividere correttamente le stringhe di caratteri e questo può portare a risultati inaspettati.'; $strMbOverloadWarning = 'Avete abilitato mbstring.func_overload nella configurazione del PHP. Questa opzione è incompatibile con phpMyAdmin e potrebbe causare la corruzione di alcuni dati!'; $strMIME_available_mime = 'Tipi-MIME disponibili'; $strMIME_available_transform = 'Trasformazioni disponibili'; $strMIME_description = 'Descrizione'; $strMIME_MIMEtype = 'tipo MIME'; $strMIME_nodescription = 'Nessuna descrizione è disponibile per questa trasformazione.<br />Prego, chiedere all\'autore cosa %s faccia.'; $strMIME_transformation_note = 'Per una lista di opzioni di trasformazione disponibili e le loro rispettive trasformazioni di tipi-MIME, cliccate su %strasformazione descrizioni%s'; $strMIME_transformation_options_note = 'Prego, immettere i valori per le opzioni di trasformazioneutilizzando questo formato: \'a\', 100, b,\'c\'...<br />Se c\'è la necessità di immettere un backslash ("\") o un apostrofo ("\'") tra questi valori, essi vanno backslashati (per es. \'\\\\xyz\' or \'a\\\'b\').'; $strMIME_transformation_options = 'Opzioni di Transformation'; $strMIME_transformation = 'Trasformazione del Browser'; $strMIMETypesForTable = 'MIME TYPES FOR TABLE'; $strMIME_without = 'Tipi-MIME stampati in italics non hanno una funzione di trasformazione separata'; $strModifications = 'Le modifiche sono state salvate'; $strModifyIndexTopic = 'Modifica un indice'; $strModify = 'Modifica'; $strMoveMenu = 'Muovi menù'; $strMoveTableOK = 'La tabella %s è stata spostata in %s.'; $strMoveTableSameNames = 'Impossibile spostare la tabella su se stessa!'; $strMoveTable = 'Sposta la tabella nel (database<b>.</b>tabella):'; $strMultilingual = 'multilingua'; $strMyISAMDataPointerSizeDesc = 'Dimensione del puntatore predefinito in Bytes, che deve essere usata da CREATE TABLE per le tabelle MyISAM quando non è stata specificata l\'opzione MAX_ROWS.'; $strMyISAMDataPointerSize = 'Domensione del puntatore dati'; $strMyISAMMaxExtraSortFileSizeDesc = 'Se il file temporaneo è usato per la creazione veloce di un indice MyISAM, occuperebbe più spazio dell\'utilizzo del metodo key cache con la quantità ivi specificata: perciò si deve prediligere il metodo key cache.'; $strMyISAMMaxExtraSortFileSize = 'Dimensione massima per i file temporanei nella creazione di un indice'; $strMyISAMMaxSortFileSizeDesc = 'La dimensione massima dei file temporanei MySQL può essere utilizzata nella rigenerazione di un indice MyISAM (durante un REPAIR TABLE, ALTER TABLE, o LOAD DATA INFILE).'; $strMyISAMMaxSortFileSize = 'Dimensione massima dei file temporanei di ordinamento'; $strMyISAMRecoverOptionsDesc = 'La modalità di irppristino automatico di tabelle MyISAM corrotte, come impostato tramite l\'opzione di lan cio del server --myisam-recover.'; $strMyISAMRecoverOptions = 'Modalità di ripristino automatico'; $strMyISAMRepairThreadsDesc = 'Se questo valore è maggiore di 1, gli indici della tabella MyISAM vengono creati in parallelo (ogni indice nel suo thread) durante il processo di ordinamento Repair by.'; $strMyISAMRepairThreads = 'Thread di riparazione'; $strMyISAMSortBufferSizeDesc = 'Il buffer che viene allocato nell\'ordinamento degli indici MyISAM durante un REPAIR TABLE o nella creazione degli indici con CREATE INDEX o ALTER TABLE.'; $strMyISAMSortBufferSize = 'Ordina la dimensione del buffer'; $strMySQLCharset = 'Set di caratteri MySQL'; $strMysqlClientVersion = 'Versione MySQL client'; $strMySQLConnectionCollation = 'collazione della connessione di MySQL'; $strMysqlLibDiffersServerVersion = 'Le tue librerie di PHP per MySQL versione %s sono diverse dalla versione di MySQL server %s. Potrebbe causare comportamenti imprevedibili.'; $strMySQLSaid = 'Messaggio di MySQL: '; $strMySQLShowProcess = 'Visualizza processi in esecuzione'; $strMySQLShowStatus = 'Visualizza informazioni di runtime di MySQL'; $strMySQLShowVars = 'Visualizza variabili di sistema di MySQL'; $strName = 'Nome'; $strNext = 'Prossimo'; $strNoActivity = 'Nessuna attività da %s secondi o più, si prega di autenticarsi nuovamente'; $strNoDatabases = 'Nessun database'; $strNoDatabasesSelected = 'Nessun database selezionato.'; $strNoDataReceived = 'Non sono stati ricevuti dati da importare. O non è stato indicato alcun nome file, oppure è stato superata la dimensione massima consentita per il file, impostata nella configurazione di PHP. Vedi FAQ 1.16.'; $strNoDescription = 'nessuna Description'; $strNoDetailsForEngine = 'Non è disponibile nessuna informazione dettagliata sullo stato di questo motore di memorizzazione.'; $strNoDropDatabases = 'I comandi "DROP DATABASE" sono disabilitati.'; $strNoExplain = 'Non Spiegare SQL'; $strNoFilesFoundInZip = 'Non sono stati trovati file ZIP all\'interno dell\'archivio!'; $strNoFrames = 'phpMyAdmin funziona meglio con browser che supportano frames'; $strNoIndex = 'Nessun indice definito!'; $strNoIndexPartsDefined = 'Nessuna parte di indice definita!'; $strNoModification = 'Nessun cambiamento'; $strNone = 'Nessuno'; $strNo = ' No '; $strNoOptions = 'Questo formato non ha opzioni'; $strNoPassword = 'Nessuna Password'; $strNoPermission = 'Il server web non possiede i privilegi per salvare il file %s.'; $strNoPhp = 'senza codice PHP'; $strNoPrivileges = 'Nessun Privilegio'; $strNoRights = 'Non hai i permessi per effettuare questa operazione!'; $strNoRowsSelected = 'Nessuna riga selezionata'; $strNoSpace = 'Spazio insufficiente per salvare il file %s.'; $strNoTablesFound = 'Non ci sono tabelle nel database.'; $strNoThemeSupport = 'Nessun supporto per i temi, si prega di controllare la configurazione e/o i temi nella cartella %s.'; $strNotNumber = 'Questo non è un numero!'; $strNotOK = 'non OK'; $strNotSet = '<b>%s</b> tabella non trovata o non settata in %s'; $strNoUsersFound = 'Nessun utente trovato.'; $strNoValidateSQL = 'Non Validare SQL'; $strNull = 'Null'; $strNumberOfFields = 'Numero di campi'; $strNumberOfTables = 'Numero di tabelle'; $strNumSearchResultsInTable = '%s corrisponde/ono nella tabella <i>%s</i>'; $strNumSearchResultsTotal = '<b>Totale:</b> <i>%s</i> corrispondenza/e'; $strNumTables = 'Tabelle'; $strOK = 'OK'; $strOpenDocumentSpreadsheet = 'Foglio di calcolo nel formato Open Document'; $strOpenDocumentText = 'Testo nel formato Open Document'; $strOpenNewWindow = 'Apri una nuova finestra di PhpMyAdmin'; $strOperations = 'Operazioni'; $strOperator = 'Operatore'; $strOptimizeTable = 'Ottimizza tabella'; $strOptions = 'Opzioni'; $strOr = 'Oppure'; $strOverhead = 'In eccesso'; $strOverwriteExisting = 'Sovrascrivi file(s) esistente/i'; $strPageNumber = 'Numero pagina:'; $strPagesToBeFlushed = 'Pagine che devono essere flushate'; $strPaperSize = 'Dimensioni carta'; $strPartialImport = 'Importazione parziale'; $strPartialText = 'Testo parziale'; $strPasswordChanged = 'La password per l\'utente %s è cambiata con successo.'; $strPasswordEmpty = 'La password è vuota!'; $strPasswordHashing = 'Password Hashing'; $strPasswordNotSame = 'La password non coincide!'; $strPassword = 'Password'; $strPdfDbSchema = 'Schema del database "%s" - Pagina %s'; $strPdfInvalidTblName = 'La tabella "%s" non esiste!'; $strPdfNoTables = 'Nessuna Tabella'; $strPDF = 'PDF'; $strPDFReportExplanation = '(Genera un report contenete i dati di una singola tabella)'; $strPDFReportTitle = 'Titolo del Report'; $strPerHour = 'all\'ora'; $strPerMinute = 'al minuto'; $strPerSecond = 'al secondo'; $strPersian = 'Persiano'; $strPhoneBook = 'rubrica'; $strPHP40203 = 'Si sta utilizzando PHP 4.2.3, che presenta un serio bug con le stringhe multi-byte (mbstring). Vedi report PHP 19404. Questa versione di PHP non è raccomandata per l\'utilizzo con phpMyAdmin.'; $strPhp = 'Crea il codice PHP'; $strPHPVersion = 'Versione PHP'; $strPleaseSelectPrimaryOrUniqueKey = 'Seleziona la chiave primaria o una chiave univoca'; $strPmaDocumentation = 'Documentazione di phpMyAdmin'; $strPmaUriError = 'La direttiva <tt>$cfg[\'PmaAbsoluteUri\']</tt> DEVE essere impostata nel file di configurazione!'; $strPmaWiki = 'phpMyAdmin wiki'; $strPolish = 'Polacco'; $strPortrait = 'Verticale'; $strPos1 = 'Inizio'; $strPrevious = 'Precedente'; $strPrimaryKeyHasBeenDropped = 'La chiave primaria è stata eliminata'; $strPrimaryKeyName = 'Il nome della chiave primaria deve essere... PRIMARY!'; $strPrimaryKeyWarning = '("PRIMARY" <b>deve</b> essere il nome di, e <b>solo di</b>, una chiave primaria!)'; $strPrimary = 'Primaria'; $strPrint = 'Stampa'; $strPrintViewFull = 'Vista stampa (con full text)'; $strPrintView = 'Visualizza per stampa'; $strPrivDescAllPrivileges = 'Comprende tutti i privilegi tranne GRANT.'; $strPrivDescAlter = 'Permette di alterare la struttura di tabelle esistenti.'; $strPrivDescAlterRoutine = 'Permette l\'alterazione e l\'eliminazione di routines memorizzate.'; $strPrivDescCreateDb = 'Permette di creare nuove tabelle e nuovi databases.'; $strPrivDescCreateRoutine = 'Permette la creazione di routines memorizzate.'; $strPrivDescCreateTbl = 'Permette di creare nuove tabelle.'; $strPrivDescCreateTmpTable = 'Permette di creare tabelle temporanee.'; $strPrivDescCreateUser = 'Permette di creare, cancellare e rinominare gli account utente.'; $strPrivDescCreateView = 'Permette la creazione di nuove viste.'; $strPrivDescDelete = 'Permette di cancellare dati.'; $strPrivDescDropDb = 'Permette di eliminare databases e tabelle.'; $strPrivDescDropTbl = 'Permette di eliminare tabelle.'; $strPrivDescExecute5 = 'Permette l\'esecuzione di routines memorizzate.'; $strPrivDescExecute = 'Permette di eseguire procedure memorizzate; Non ha effetto in questa versione di MySQL.'; $strPrivDescFile = 'Permette di importare dati da e esportare dati in file.'; $strPrivDescGrant = 'Permette di aggiungere utenti e privilegi senza ricaricare le tabelle dei privilegi.'; $strPrivDescIndex = 'Permette di creare ed eliminare gli indici.'; $strPrivDescInsert = 'Permette di inserire e sovrascrivere dati.'; $strPrivDescLockTables = 'Permette di bloccare le tabelle per il thread corrente.'; $strPrivDescMaxConnections = 'Limita il numero di nuove connessioni che un utente può aprire in un\'ora.'; $strPrivDescMaxQuestions = 'Limita il numero di query che un utente può mandare al server in un\'ora.'; $strPrivDescMaxUpdates = 'Limita il numero di comandi che possono cambiare una tabella o un database che un utente può eseguire in un\'ora.'; $strPrivDescMaxUserConnections = 'Limite di connessioni simultanee che un utente può fare.'; $strPrivDescProcess3 = 'Permette di killare i processi di altri utenti.'; $strPrivDescProcess4 = 'Permette di vedere le query complete nella lista dei processi.'; $strPrivDescReferences = 'Non ha alcun effetto in questa versione di MySQL.'; $strPrivDescReload = 'Permette di ricaricare i parametri del server e di resettare la cache del server.'; $strPrivDescReplClient = 'Accorda il diritto ad un utente di domandare dove sono i masters/slaves.'; $strPrivDescReplSlave = 'Necessario per la replicazione degli slaves.'; $strPrivDescSelect = 'Permette di leggere i dati.'; $strPrivDescShowDb = 'Accorda l\'accesso alla lista completa dei databases.'; $strPrivDescShowView = 'Permette di effettuare query del tipo SHOW CREATE VIEW.'; $strPrivDescShutdown = 'Permette di chiudere il server.'; $strPrivDescSuper = 'Permette altre connessioni, anche se è stato raggiunto il massimo numero di connessioni; Necessario per molte operazioni di amministrazione come il settaggio di variabili globali o la cancellazione dei threads di altri utenti.'; $strPrivDescUpdate = 'Permette di cambiare i dati.'; $strPrivDescUsage = 'Nessun privilegio.'; $strPrivileges = 'Privilegi'; $strPrivilegesReloaded = 'I privilegi sono stati ricaricati con successo.'; $strProcedures = 'Procedure'; $strProcesses = 'Processi'; $strProcesslist = 'Lista Processi'; $strProfiling = 'Profiling'; $strProtocolVersion = 'Versione protocollo'; $strPutColNames = 'Mette i nomi delle colonne alla prima riga'; $strQBEDel = 'Elimina'; $strQBEIns = 'Aggiungi'; $strQBE = 'Query da esempio'; $strQueryCache = 'Cache delle query'; $strQueryFrame = 'Finestra della Query'; $strQueryOnDb = 'SQL-query sul database <b>%s</b>:'; $strQueryResultsOperations = 'Risultato delle operazioni di Query'; $strQuerySQLHistory = 'Storico dell\'SQL'; $strQueryStatistics = '<b>Query delle Statistiche</b>: Dall\'avvio, %s query sono state effettuate sul server.'; $strQueryTime = 'La query ha impiegato %01.4f sec'; $strQueryType = 'Tipo di Query'; $strQueryWindowLock = 'Non sovrascrivere questa query da fuori della finestra'; $strReadRequests = 'Richieste di lettura'; $strReceived = 'Ricevuti'; $strRecommended = 'raccomandato'; $strRecords = 'Record'; $strReferentialIntegrity = 'Controlla l\'integrità delle referenze:'; $strRefresh = 'Aggiorna'; $strRelationalSchema = 'Schema relazionale'; $strRelationDeleted = 'Relazione cancellata'; $strRelationNotWorking = 'Le caratteristiche aggiuntive sono state disattivate per funzionare con le tabelle linkate. Per scoprire perché clicca %squi%s.'; $strRelationsForTable = 'RELATIONS FOR TABLE'; $strRelations = 'Relazioni'; $strRelationView = 'Vedi relazioni'; $strReloadingThePrivileges = 'Caricamento dei privilegi in corso'; $strReloadPrivileges = 'Ricarica i privilegi'; $strReload = 'Ricarica'; $strRemoveSelectedUsers = 'Rimuove gli utenti selezionati'; $strRenameDatabaseOK = 'Il DataBase %s è stato rinominato in %s'; $strRenameTableOK = 'La tabella %s è stata rinominata %s'; $strRenameTable = 'Rinomina la tabella in'; $strRepairTable = 'Ripara tabella'; $strReplaceNULLBy = 'Sostituisci NULL con'; $strReplaceTable = 'Sostituisci i dati della tabella col file'; $strReplication = 'Replicazione'; $strReset = 'Riavvia'; $strResourceLimits = 'Limiti di risorse'; $strRestartInsertion = 'Riprendi inserimento con la riga %s'; $strReType = 'Reinserisci'; $strRevokeAndDeleteDescr = 'Gli utenti UTILIZZERANNO comunque il privilegio finché i privilegi non saranno ricaricati.'; $strRevokeAndDelete = 'Revoca tutti i privilegi attivi agli utenti e dopo li cancella.'; $strRevokeMessage = 'Hai revocato i privilegi per %s'; $strRevoke = 'Revoca'; $strRomanian = 'Rumeno'; $strRoutineReturnType = 'Tipo di risultato'; $strRoutines = 'Routines'; $strRowLength = 'Lunghezza riga'; $strRowsFrom = 'righe a partire da'; $strRowSize = 'Dimensione riga'; $strRowsModeFlippedHorizontal = 'orizzontale (headers ruotati)'; $strRowsModeHorizontal = ' orizzontale '; $strRowsModeOptions = ' in modalità %s e ripeti gli headers dopo %s celle '; $strRowsModeVertical = ' verticale '; $strRows = 'Righe'; $strRowsStatistic = 'Statistiche righe'; $strRunning = 'in esecuzione su %s'; $strRunQuery = 'Invia Query'; $strRunSQLQuery = 'Esegui la/e query SQL sul database %s'; $strRunSQLQueryOnServer = 'Eseguendo query SQL sul server %s'; $strRussian = 'Russo'; $strSaveOnServer = 'Salva sul server nella directory %s'; $strSavePosition = 'Salva la posizione'; $strSave = 'Salva'; $strScaleFactorSmall = 'Il fattore di scala è troppo piccolo per riempire lo schema nella pagina'; $strSearch = 'Cerca'; $strSearchFormTitle = 'Cerca nel database'; $strSearchInTables = 'Nella/e tabella/e:'; $strSearchNeedle = 'parola/e o valore/i da cercare (carattere jolly: "%"):'; $strSearchOption1 = 'almeno una delle parole'; $strSearchOption2 = 'tutte le parole'; $strSearchOption3 = 'la frase esatta'; $strSearchOption4 = 'come espressione regolare'; $strSearchResultsFor = 'Cerca i risultati per "<i>%s</i>" %s:'; $strSearchType = 'Trova:'; $strSecretRequired = 'Adesso c\'è bisogno di una password per il file di configurazione (blowfish_secret).'; $strSelectADb = 'Prego, selezionare un database'; $strSelectAll = 'Seleziona Tutto'; $strSelectBinaryLog = 'Selezionare il log binario da visualizzare'; $strSelectFields = 'Seleziona campi (almeno uno):'; $strSelectForeignKey = 'Seleziona Foreign Key'; $strSelectNumRows = 'nella query'; $strSelectReferencedKey = 'Seleziona le chiavi referenziali'; $strSelectTables = 'Seleziona Tables'; $strSend = 'Salva con nome...'; $strSent = 'Spediti'; $strServerChoice = 'Scelta del server'; $strServerNotResponding = 'Il server non risponde'; $strServer = 'Server'; $strServers = 'Servers'; $strServerStatusDelayedInserts = 'Inserimento ritardato'; $strServerStatus = 'Informazioni di Runtime'; $strServerStatusUptime = 'Questo server MySQL sta girando da %s. E\' stato avviato il %s.'; $strServerTabVariables = 'Variabili'; $strServerTrafficNotes = '<b>Traffico del server</b>: Queste tabelle mostrano le statistiche del traffico di retedi questo server MySQL dal momento del suo avvio.'; $strServerVars = 'Variabili e parametri del Server'; $strServerVersion = 'Versione MySQL'; $strSessionStartupErrorGeneral = 'Non posso far partire la sessione senza errori, controlla gli errori nel log di PHP e/o del tuo server web e configura correttamente la tua installazione di PHP.'; $strSessionValue = 'Valore sessione'; $strSetEnumVal = 'Se il tipo di campo è "enum" o "set", immettere i valori usando il formato: \'a\',\'b\',\'c\'...<br />Se comunque dovete mettere dei backslashes ("\") o dei single quote ("\'") davanti a questi valori, backslashateli (per esempio \'\\\\xyz\' o \'a\\\'b\').'; $strShowAll = 'Mostra tutti'; $strShowColor = 'Mostra il colore'; $strShowDatadictAs = 'Formato del Data Dictionary'; $strShowFullQueries = 'Mostra query complete'; $strShowGrid = 'Mostra la griglia'; $strShowHideLeftMenu = 'Mostra/nascondi il menù di sinistra'; $strShowingBookmark = 'Mostrando i segnalibri'; $strShowingPhp = 'Mostrando il codice PHP'; $strShowingRecords = 'Visualizzazione record '; $strShowingSQL = 'Mostrando la query SQL'; $strShow = 'Mostra'; $strShowOpenTables = 'Mostra le tabelle aperte'; $strShowPHPInfo = 'Mostra le info sul PHP'; $strShowSlaveHosts = 'Mostra gli hosts slave'; $strShowSlaveStatus = 'Mostra lo stato degli slave'; $strShowStatusBinlog_cache_disk_useDescr = 'Il numero delle transazioni che usano la cache temporanea del log binario, ma che oltrepassano il valore di binlog_cache_size e usano un file temporaneo per salvare gli statements dalle transazioni.'; $strShowStatusBinlog_cache_useDescr = 'Il numero delle transazioni che usano la cache temporanea del log binario.'; $strShowStatusCreated_tmp_disk_tablesDescr = 'Il numero delle tabelle temporanee create automaticamente sul disco dal server mentre esegue i comandi. Se il valore Created_tmp_disk_tables è grande, potresti voler aumentare il valore tmp_table_size, per fare im modo che le tabelle temporanee siano memory-based anzichè disk-based.'; $strShowStatusCreated_tmp_filesDescr = 'Numero di file temporanei che mysqld ha creato.'; $strShowStatusCreated_tmp_tablesDescr = 'Il numero di tabelle temporanee create automaticamente in memoria dal server durante l\'esecuzione dei comandi.'; $strShowStatusDelayed_errorsDescr = 'Numero di righe scritte con INSERT DELAYED in cui ci sono stati degli errori (probabilmete chiave dublicata).'; $strShowStatusDelayed_insert_threadsDescr = 'Il numero di processi INSERT DELAYED in uso. Ciascuna tabella su cui è usato INSERT DELAYED occupa un thread.'; $strShowStatusDelayed_writesDescr = 'Il numero di righe INSERT DELAYED scritte.'; $strShowStatusFlush_commandsDescr = 'Il numero di comandi FLUSH eseguiti.'; $strShowStatusHandler_commitDescr = 'Il numero di comandi interni COMMIT eseguiti.'; $strShowStatusHandler_deleteDescr = 'Il numero di volte in cui una riga è stata cancellata da una tabella.'; $strShowStatusHandler_discoverDescr = 'Il server MySQL può chiedere al motore di storage NDB Cluster se conosce una tabella sulla base di un nome dato. Questo è chaiamto discovery. Handler_discover indica il numero di volte che una tabella è stata trovata.'; $strShowStatusHandler_read_firstDescr = 'Il numero di volte che il primo valore è stato letto da un indice. Se è troppo alto è probabile che il server stia facendo molte scansioni complete degli indici; per esempio, SELECT col1 FROM foo, assumento che col1 sia indicizzata.'; $strShowStatusHandler_read_keyDescr = 'Il numero di richieste per leggere una riga basata su di una chiave. Se è alta, è un buon indice che le tue query e le tue tabelle sono correttamente indicizzate.'; $strShowStatusHandler_read_nextDescr = 'Il numero di richieste per leggere la riga successiva nell\'ordine delle chiavi. Questo valore è incrementato se stai facendo una query su di una colonna indice con un range costante, oppure se stai facendo una scansione degli indici.'; $strShowStatusHandler_read_prevDescr = 'Il numero di richieste per leggere la riga precedente nell\'ordine delle chiavi. Questo metodo di lettura è principalmente utilizzato per ottimizzare ORDER BY ... DESC.'; $strShowStatusHandler_read_rndDescr = 'Il numero di richieste per leggere una riga basata su una posizione fissa. Questo valore è alto se stai facendo molte richieste che richiedono un ordinamento dei risultati. Probabilmente hai molte query che che richiedono a MySQL di leggere l\'intera tabella oppure ci sono dei joins che non usano le chiavi correttamente.'; $strShowStatusHandler_read_rnd_nextDescr = 'Il numero di richieste per leggere la riga successiva in un file di dati. Questo valore è alto se stai facendo molte scansioni della tabella. Generalmente è un segnale che le tue tabelle non sono correttamente indicizzate, o che le query non sono state scritte per trarre vantaggi dagli indici che hai.'; $strShowStatusHandler_rollbackDescr = 'Il numero di comandi ROLLBACK interni.'; $strShowStatusHandler_updateDescr = 'Il numero di richieste per aggiornare una riga in una tabella.'; $strShowStatusHandler_writeDescr = 'Il numero di richieste per inserire una riga in una tabella.'; $strShowStatusInnodb_buffer_pool_pages_dataDescr = 'Il numero di pagine che contengono dati (sporchi o puliti).'; $strShowStatusInnodb_buffer_pool_pages_dirtyDescr = 'Il numero di pagine attualmente sporche.'; $strShowStatusInnodb_buffer_pool_pages_flushedDescr = 'Il numero di buffer pool pages che hanno avuto richiesta di essere aggiornate.'; $strShowStatusInnodb_buffer_pool_pages_freeDescr = 'Il numero di pagine libere.'; $strShowStatusInnodb_buffer_pool_pages_latchedDescr = 'Il numero di pagine bloccate in un InnoDB buffer pool. Queste pagine sono attualmente in lettura o in scittura e non possono essere aggiornate o rimosse per altre ragioni.'; $strShowStatusInnodb_buffer_pool_pages_miscDescr = 'Il numero di pagine occupate perchè sono state allocate per amministrazione, come row locks o per hash index adattivi. Questo valore può essere calcolato come Innodb_buffer_pool_pages_total - Innodb_buffer_pool_pages_free - Innodb_buffer_pool_pages_data.'; $strShowStatusInnodb_buffer_pool_pages_totalDescr = 'Il numero totale di buffer pool, in pagine.'; $strShowStatusInnodb_buffer_pool_read_ahead_rndDescr = 'Il numero di read-aheads "random" InnoDB iniziate. Questo accade quando una query legge una porzione di una tabella, ma in ordine casuale.'; $strShowStatusInnodb_buffer_pool_read_ahead_seqDescr = 'Il numero di read-aheads InnoDB sequanziali. Questo accade quando InnoDB esegue una scansione completa sequenziale di una tabella.'; $strShowStatusInnodb_buffer_pool_read_requestsDescr = 'Il numero di richieste logiche che InnoDb ha fatto.'; $strShowStatusInnodb_buffer_pool_readsDescr = 'Il numero di richieste logiche che InnoDB non può soddisfare dal buffer pool e che devono fare una lettura di una pagina singola.'; $strShowStatusInnodb_buffer_pool_wait_freeDescr = 'Normalmente le sritture nel buffer pool InnoDB vengono effettuate in background. Tuttavia se è necessario leggere o creare una pagina, e non sono disponibile pagine pulite è necessario attendere che le pagine siano aggiornate prima. Questo contatore conta le istanze di queste attese. Se la dimesione del buffer pool è stata settata correttamente questo valore dovrebbe essere basso.'; $strShowStatusInnodb_buffer_pool_write_requestsDescr = 'Il numero di scritture effettuate nel buffer pool InnoDB.'; $strShowStatusInnodb_data_fsyncsDescr = 'Il numero delle operazioni fsync() fino ad ora.'; $strShowStatusInnodb_data_pending_fsyncsDescr = 'Il numero di operazioni fsync() in attesa.'; $strShowStatusInnodb_data_pending_readsDescr = 'Il numero di letture in attesa.'; $strShowStatusInnodb_data_pending_writesDescr = 'Il numero di scritture in attesa.'; $strShowStatusInnodb_data_readDescr = 'La quantità di dati letti fino ad ora, in bytes.'; $strShowStatusInnodb_data_readsDescr = 'Il numero totale di dati letti.'; $strShowStatusInnodb_data_writesDescr = 'Il numero totale di dati scritti.'; $strShowStatusInnodb_data_writtenDescr = 'La quantità di dati scritti fino ad ora, in bytes.'; $strShowStatusInnodb_dblwr_pages_writtenDescr = 'Il numero di scritture doublewrite che sono state eseguite ed il numero che sono state scritte a questo scopo.'; $strShowStatusInnodb_dblwr_writesDescr = 'Il numero di scritture doublewrite che sono state eseguite ed il numero che sono state scritte a questo scopo.'; $strShowStatusInnodb_log_waitsDescr = 'Il numero di attese che abbiamo avuto perchè il buffer di log era troppo piccolo e abbiamo duvuto attendere che fosse aggiornato prima di continuare.'; $strShowStatusInnodb_log_write_requestsDescr = 'Il numero di richieste di scrittura dei log.'; $strShowStatusInnodb_log_writesDescr = 'Il numero scritture fisiche del log file.'; $strShowStatusInnodb_os_log_fsyncsDescr = 'Il numero di scritture fsync fatte sul log file.'; $strShowStatusInnodb_os_log_pending_fsyncsDescr = 'Il numero degli fsyncs in sospeso sul log file.'; $strShowStatusInnodb_os_log_pending_writesDescr = 'Il numero di scritture in sospeso sul log file.'; $strShowStatusInnodb_os_log_writtenDescr = 'Il numero di bytes scritti sul log file.'; $strShowStatusInnodb_pages_createdDescr = 'Il numero di pagine create.'; $strShowStatusInnodb_page_sizeDescr = 'La dimesione di-compilazione delle pagine InnoDB (default 16KB). Molti valori sono conteggiati nelle pagine; la dimesione delle pagine permette di convertirli facilmente in bytes.'; $strShowStatusInnodb_pages_readDescr = 'Il numero di pagine lette.'; $strShowStatusInnodb_pages_writtenDescr = 'Il numero di pagine scritte.'; $strShowStatusInnodb_row_lock_current_waitsDescr = 'Il numero di row locks attualmente in attesa.'; $strShowStatusInnodb_row_lock_time_avgDescr = 'Il tempo medio per l\'acquisizione di un row lock, in millisecondi.'; $strShowStatusInnodb_row_lock_timeDescr = 'Il tempo totale per l\'acquisizione di un row locks, in millisecondi.'; $strShowStatusInnodb_row_lock_time_maxDescr = 'Il tempo massimo per l\'acquisizione di un row lock, in millisecondi.'; $strShowStatusInnodb_row_lock_waitsDescr = 'Il numero di volte che un row lock ha dovuto attendere.'; $strShowStatusInnodb_rows_deletedDescr = 'Il numero di righe cancellate da una tabella InnoDB.'; $strShowStatusInnodb_rows_insertedDescr = 'Il numero di righe inserite da una tabella InnoDB.'; $strShowStatusInnodb_rows_readDescr = 'Il numero di righe lette da una tabella InnoDB.'; $strShowStatusInnodb_rows_updatedDescr = 'Il numero di righe aggiornate da una tabella InnoDB.'; $strShowStatusKey_blocks_not_flushedDescr = 'Il numero di blocchi chaive aggiunti nella cache chiave che sono stati cambiati, ma che non sono stai aggiornati su disco. E\' conosciuto con il nome di Not_flushed_key_blocks.'; $strShowStatusKey_blocks_unusedDescr = 'Il numero di blocchi non usati nella cache chiave. Puoi usare questo valore per determinare quanta cache chiave è in uso.'; $strShowStatusKey_blocks_usedDescr = 'Il numero di blocchi usati nella cache chiave. The number of used blocks in the key cache. Questo valore è un\'importante segnale che indica il numero massimo di blocchi che sono stati in uso contemporaneamente.'; $strShowStatusKey_read_requestsDescr = 'Il numero di richieste per le ggere un blocco chiave dalla cache.'; $strShowStatusKey_readsDescr = 'Il numero di letture fisiche dal disco di un blocco chiave. Se Key_reads è grande allora il valore key_buffer_size è probabilmente troppo piccolo. IIl rapporto di cache miss rate può essere calcolato come Key_reads/Key_read_requests.'; $strShowStatusKey_write_requestsDescr = 'Il numero di richieste per scrivere una blocco chiave nella cache.'; $strShowStatusKey_writesDescr = 'Il numero di scritture fisiche di un blocco chiave sul disco.'; $strShowStatusLast_query_costDescr = 'Il costo totale dell\'ultima query compilata così come computato dall\'ottimizzatore delle query. Utile per comparare il costo di differenti query per la stessa operazione di query. Il valore di default è 0, che significa che nessuna query è stata ancora compilata.'; $strShowStatusNot_flushed_delayed_rowsDescr = 'In numero di righe in attesa di essere scritte nella coda INSERT DELAYED.'; $strShowStatusOpened_tablesDescr = 'Il numero di tabelle che sono state aperte. Se il valore opened_tables è grande, probabilmente il valore di table cache è troppo piccolo.'; $strShowStatusOpen_filesDescr = 'Il numero di file che sono aperti.'; $strShowStatusOpen_streamsDescr = 'il numero di stream che sono aperti (usato principalmente per il logging).'; $strShowStatusOpen_tablesDescr = 'Il numero di tabelle che sono aperte.'; $strShowStatusQcache_free_blocksDescr = 'Il numero di blocchi di memoria liberi nella cache delle query.'; $strShowStatusQcache_free_memoryDescr = 'L\'ammontare di memoria libera nella cache delle query.'; $strShowStatusQcache_hitsDescr = 'Il numero di cache hits.'; $strShowStatusQcache_insertsDescr = 'Il numero di query aggiunte alla cache.'; $strShowStatusQcache_lowmem_prunesDescr = 'Il numero di query che sono state rimosse dalla cache per liberare memoria per la cache di nuove query. Questa informazione può aiutarti per parametrare la dimensione della cache delle query. La cache delle query usa una strategia di "meno usate recentemente" (LRU - least recently used) per decidere quali query rimuovere dalla cache.'; $strShowStatusQcache_not_cachedDescr = 'Il numero di query non in cache (impossibilità di inserirle nella cache oppure non inserite per i settaggi del parametro query_cache_type).'; $strShowStatusQcache_queries_in_cacheDescr = 'Il numero di query registrate nella cache.'; $strShowStatusQcache_total_blocksDescr = 'Il numero totale di blocchi nella cache delle query.'; $strShowStatusReset = 'Reset'; $strShowStatusRpl_statusDescr = 'Lo sato delle repliche failsafe (non ancora implementato).'; $strShowStatusSelect_full_joinDescr = 'Il numero di joins che non usano gli indici. (Se questo valore non è 0, dovresti controllare attentamente gli indici delle tue tabelle.)'; $strShowStatusSelect_full_range_joinDescr = 'Il numero di joins che usano una ricerca limitata su di una tabella di riferimento.'; $strShowStatusSelect_range_checkDescr = 'Il numero di joins senza chiavi che controllano per l\'uso di una chiave dopo ogni riga. (Se questo valore non è 0, dovresti controllare attentamente gli indici delle tue tabelle.)'; $strShowStatusSelect_rangeDescr = 'Il numero di joins che usano un range sulla prima tabella. (Non è, solitamente, un valore critico anche se è grande.)'; $strShowStatusSelect_scanDescr = 'Il numero di join che hanno effettuato una scansione completa della prima tabella.'; $strShowStatusSlave_open_temp_tablesDescr = 'Il numero di tabelle temporaneamente aperte da processi SQL slave.'; $strShowStatusSlave_retried_transactionsDescr = 'Numero totale di volte (dalla partenza) in cui la replica slave SQL ha ritentato una transazione.'; $strShowStatusSlave_runningDescr = 'Questa chiave è ON se questo è un server slave connesso ad un server master.'; $strShowStatusSlow_launch_threadsDescr = 'Numero di processi che hanno impiegato più di "slow_launch_time" secondi per partire.'; $strShowStatusSlow_queriesDescr = 'Numero di query che hanno impiegato più di "long_query_time" seconds.'; $strShowStatusSort_merge_passesDescr = 'Il numero di fusioni passate all\'algoritmo di ordianemento che sono state fatte. Se questo valore è grande, dovresti incrementare la variabile di sistema sort_buffer_size.'; $strShowStatusSort_rangeDescr = 'Il numero di ordinamenti che sono stati eseguiti in un intervallo.'; $strShowStatusSort_rowsDescr = 'Il numero di righe ordinate.'; $strShowStatusSort_scanDescr = 'Il numero di ordinamenti che sono stati fatti leggendo la tabella.'; $strShowStatusTable_locks_immediateDescr = 'Il numero di volte che un table lock è stato eseguito immediatamente.'; $strShowStatusTable_locks_waitedDescr = 'Il numero di volte che un table lock è stato eseguito immediatamente ed era necessaria un\'attesa. Se è alto, potresti avere dei problemi con le performance, dovresti prima ottimizzare le query, oppure sia utilizzare le repliche, sia dividere le tabelle.'; $strShowStatusThreads_cachedDescr = 'Il numero dei processi nella cache dei processi. L\'hit rate della cache può essere calcolato come processi_creati/connessioni. Se questo valore è rosso devi aumentare la tua thread_cache_size.'; $strShowStatusThreads_connectedDescr = 'Il numero di connessioni correntemente aperte.'; $strShowStatusThreads_createdDescr = 'Il numero di processi creati per gestire le connessioni. Se Threads_created è grosso, devi probabilmente aumentare il valore thread_cache_size. (Normalmente questo non fornisce un significatico incremento delle performace se hai una buona implementazione dei processi.)'; $strShowStatusThreads_runningDescr = 'Il numero di processi non in attesa.'; $strShowTableDimension = 'Mostra la dimensione delle tabelle'; $strShowTables = 'Mostra le tabelle'; $strShowThisQuery = 'Mostra questa query di nuovo'; $strSimplifiedChinese = 'Cinese Semplificato'; $strSingly = '(singolarmente)'; $strSize = 'Dimensione'; $strSkipQueries = 'Numero di record (query) da saltare a partire dall\'inizio'; $strSlovak = 'Slovacco'; $strSlovenian = 'Sloveno'; $strSmallBigAll = 'Piccolo/grande'; $strSnapToGrid = 'Calamita alla griglia'; $strSocketProblem = '(o il socket del server locale MySQL non è correttamente configurato)'; $strSortByKey = 'Ordina per chiave'; $strSorting = 'Ordinando'; $strSort = 'Ordinamento'; $strSpaceUsage = 'Spazio utilizzato'; $strSpanish = 'Spagnolo'; $strSplitWordsWithSpace = 'Le parole sono spezzate sulle spaziature (" ").'; $strSQLCompatibility = 'Modo di compatibilità SQL'; $strSQLExportType = 'Tipo di esportazione'; $strSQLParserBugMessage = 'C\'è la possibilità che ci sia un bug nel parser SQL. Per favore, esaminate la query accuratamente, e controllate che le virgolette siano corrette e non sbagliate. Altre possibili cause d\'errori possono essere che si stia cercando di uploadare un file binario al di fuori di un\'area di testo virgolettata. Si può anche provare la query MySQL dalla riga di comando di MySQL. L\'errore qui sotto restituito dal server MySQL, se ce n\'è uno, può anche aiutare nella diagnostica del problema. Se ci sono ancora problemi, o se il parser SQL di phpMyAdmin sbaglia quando invece l\'interfaccia a riga di comando non mostra problemi, si può ridurre la query SQL in ingresso alla singola query che causa problemi, e inviare un bug report con i dati riportati nella sezione CUT qui sotto:'; $strSQLParserUserError = 'Pare che ci sia un errore nella query SQL immessa. L\'errore del server MySQL mostrato qui sotto, se c\'è, può anche aiutare nella risoluzione del problema'; $strSQLQuery = 'query SQL'; $strSQLResult = 'Risultato SQL'; $strSQL = 'SQL'; $strSQPBugInvalidIdentifer = 'Identificatore Non Valido'; $strSQPBugUnclosedQuote = 'Virgolette Non Chiuse'; $strSQPBugUnknownPunctuation = 'Stringa di Punctuation Sconosciuta'; $strStandInStructureForView = 'Struttura Stand-in per le viste'; $strStatCheckTime = 'Ultimo controllo'; $strStatCreateTime = 'Creazione'; $strStatement = 'Istruzioni'; $strStatisticsOverrun = 'Su di un server sovraccarico, il contatore dei bytes potrebbe incrementarsi, e per questa ragione le statistiche riportate dal server MySQL potrebbero non essere corrette.'; $strStatUpdateTime = 'Ultimo cambiamento'; $strStatus = 'Stato'; $strStorageEngine = 'Motore di Memorizzazione'; $strStorageEngines = 'Motori di Memorizzazione'; $strStrucCSV = 'dati CSV'; $strStrucData = 'Struttura e dati'; $strStrucExcelCSV = 'CSV per dati MS Excel'; $strStrucNativeExcel = 'Dati nativi di MS Excel'; $strStrucOnly = 'Solo struttura'; $strStructPropose = 'Proponi la struttura della tabella'; $strStructureForView = 'Struttura per la vista'; $strStructure = 'Struttura'; $strSubmit = 'Invia'; $strSuccess = 'La query è stata eseguita con successo'; $strSuhosin = 'Sul server è in esecuzione Suhosin. Controlla la documentazione: %sdocumentation%s per possibili problemi.'; $strSum = 'Totali'; $strSwedish = 'Svedese'; $strSwitchToDatabase = 'Passare al Database copiato'; $strSwitchToTable = 'Passa alla tabella copiata'; $strTableAlreadyExists = 'La tabella %s esiste già!'; $strTableComments = 'Commenti sulla tabella'; $strTableEmpty = 'Il nome della tabella è vuoto!'; $strTableHasBeenDropped = 'La tabella %s è stata eliminata'; $strTableHasBeenEmptied = 'La tabella %s è stata svuotata'; $strTableHasBeenFlushed = 'La tabella %s è stata inizializzata'; $strTableIsEmpty = 'La tabella sembra essere vuota!'; $strTableMaintenance = 'Amministrazione tabella'; $strTableName = 'Nome tabella'; $strTableOfContents = 'Tabella dei contenuti'; $strTableOptions = 'Opzioni della tabella'; $strTables = '%s tabella(e)'; $strTableStructure = 'Struttura della tabella'; $strTable = 'Tabella'; $strTakeIt = 'prendilo'; $strTblPrivileges = 'Privilegi relativi alle tabelle'; $strTempData = 'Dati temporanei'; $strTextAreaLength = ' A causa della sua lunghezza,<br /> questo campo non può essere modificato '; $strThai = 'Thai'; $strThemeDefaultNotFound = 'Tema di default %s non trovato!'; $strThemeNoPreviewAvailable = 'Nessuna preview disponibile.'; $strThemeNotFound = 'Tema %s non trovato!'; $strThemeNoValidImgPath = 'Nessun percorso per le immagini per il tema %s trovato!'; $strThemePathNotFound = 'Percorso per il tema non trovato %s!'; $strTheme = 'Tema / Stile'; $strThisHost = 'Questo Host'; $strThreads = 'Processi'; $strThreadSuccessfullyKilled = 'Il thread %s è stato terminato con successo.'; $strTimeoutInfo = 'Una precedente importazione è entrata in timeout, dopo un nuovo inoltro riprenderà dalla posizione: %d.'; $strTimeoutNothingParsed = 'Nell\'ultima esecuzione nessun dato è stato processato, questo, solitamente, vuole dire che che phpMyAdmin non è in grado di ultimare l\'operazione fino a che non verrà aumentato il parametro php time limits.'; $strTimeoutPassed = 'Superato il tempo limite dello script, se vuoi finire l\'importazione inoltra nuovamente il file e il processo riprenderà.'; $strTime = 'Tempo'; $strToFromPage = 'da/per pagina'; $strToggleScratchboard = '(dis)attiva scratchboard'; $strToggleSmallBig = 'Cambia grande/piccolo'; $strToSelectRelation = 'Per selezionare una relazione, click :'; $strTotal = 'Totali'; $strTotalUC = 'Totale'; $strTraditionalChinese = 'Cinese Tradizionale'; $strTraditionalSpanish = 'Spagnolo tradizionale'; $strTraffic = 'Traffico'; $strTransactionCoordinator = 'Coordinatore delle transazioni'; $strTransformation_application_octetstream__download = 'Visualizza un collegamento per trasferire i dati di un campo in formato binario. La prima opzione è il nome del file binario. La seconda opzione è un nome di campo possibile di una riga della tabella che contiene il nome di schedario. Se fornite una seconda opzione dovete avere la prima opzione settata ad una stringa vuota'; $strTransformation_application_octetstream__hex = 'Mostra una rappresentazione esadecimale dei dati. Il primo parametro, opzionale, specifica ogni quanto deve essere aggiunto uno spazio (default a 2 nibbles).'; $strTransformation_image_jpeg__inline = 'Mostra un thumbnalil cliccabile; opzioni: larghezza,altezza in pixel (mantiere la proporzione iniziale)'; $strTransformation_image_jpeg__link = 'Mostra un link a questa immagine (download blob diretto, i.e.).'; $strTransformation_image_png__inline = 'Vedi immagine/jpeg: inline'; $strTransformation_text_plain__dateformat = 'Mostra i campi TIME, TIMESTAMP, DATETIME o il TIMESTAMP UNIX come data formattata. La prima opzione è l\'offset (in ore) che verrà aggiunto all\'ora (Default: 0). Usare la seconda opzione per specificare un differente formato di data/ora. La terza opzione determina se vuoi vedere l\'ora locale o UTC (usa "local" o "utc" per questo). In relazione a questo, il formato data ha differenti valori - per "local" guarda la documentazione della funzione PHP strftime(); per "utc" viene usata la funzione gmdate().'; $strTransformation_text_plain__external = 'SOLO PER LINUX: Lancia un\'applicazione esterna e riempie i dati dei campi tramite lo standard input. Restituisce lo standard output dell\'applicazione. L\'impostazione predefinita è Tidy, per stampare in maniera corretta il codice HTML. Per motivi di sicurezza, dovete editare manualmente il file libraries/transformations/text_plain__external.inc.php e inserire gli strumenti che permettete di utilizzare. La prima opzione è così il numero del programma che volete utilizzare e la seconda sono i parametri per il programma. Il terzo parametro, se impostato a 1 convertirà l\'output utilizzando htmlspecialchars() (Predefinito: 1). Un quarto parametro, se impostato a 1 inserirà un NOWRAP al contenuto della cella così che l\'intero output sarà mostrato senza essere riformattato (Predefinito: 1)'; $strTransformation_text_plain__formatted = 'Preserva l\'originale formattazione del campo. Nessun Escaping viene applicato.'; $strTransformation_text_plain__imagelink = 'Mostra un collegamento ad una immagine esterna; il campo contiene il nome del file; la prima opzione è un prefisso come "http://tuodominio.com/", la seconda opzione è la larghezza in pixel, la terza è l\'altezza.'; $strTransformation_text_plain__link = 'Mostra un collegamento, il campo contiene il nome del file; la prima opzione è un prefisso come "http://tuodominio.com/", la seconda opzione è un titolo per il collegamento.'; $strTransformation_text_plain__sql = 'Formatta il testo come query SQL con evidenziazione della sintassi.'; $strTransformation_text_plain__substr = 'Mostra soltanto una parte della stringa. La prima opzione è l\'offset che serve a definire dove inizia l\'output del vostro testo (Prefinito: 0). La seconda opzione è un offset che indica quanto testo viene restituito. Se vuoto, restituisce tutto il testo rimanente. La terza opzione definisce quali caratteri saranno aggiunti in fondo all\'output quando una soptto-stringa viene restituita (Predefinito: ...) .'; $strTriggers = 'Triggers'; $strTruncateQueries = 'Tronca le Query Mostrate'; $strTurkish = 'Turco'; $strType = 'Tipo'; $strUkrainian = 'Ucraino'; $strUncheckAll = 'Deseleziona tutti'; $strUnicode = 'Unicode'; $strUnique = 'Unica'; $strUnknown = 'sconosciuto'; $strUnselectAll = 'Deseleziona Tutto'; $strUnsupportedCompressionDetected = 'Stai cercando di importare un file con un tipo di compressione non supportato. Altrimenti il supporto per questo tipo di compressione non è stato ancora implementato o è stato disabilitato dalla tua configurazione.'; $strUpdatePrivMessage = 'Hai aggiornato i permessi per %s.'; $strUpdateProfileMessage = 'Il profilo è stato aggiornato.'; $strUpdateQuery = 'Aggiorna Query'; $strUpdComTab = 'Prego leggere la documentazione su come aggiornare la vostra tabella Column_comments'; $strUpgrade = 'Si dovrebbe aggiornare %s alla versione %s o successiva.'; $strUploadErrorCantWrite = 'Non riesco a scrivere il file su disco.'; $strUploadErrorExtension = 'Caricamento del file interrotto per estensione errata.'; $strUploadErrorFormSize = 'Il file caricato eccede il parametro MAX_FILE_SIZE specificato nel form HTML.'; $strUploadErrorIniSize = 'Il file caricato eccede il parametro upload_max_filesize in php.ini.'; $strUploadErrorNoTempDir = 'Non trovo la cartella temporanea.'; $strUploadErrorPartial = 'Il file è stato solo parzialmente caricato.'; $strUploadErrorUnknown = 'Errore sconosciuto nel caricamento del file.'; $strUploadLimit = 'Stai probabilmente cercando di uplodare un file troppo grosso. Fai riferimento alla documentazione %sdocumentation%s Per i modi di aggirare questo limite.'; $strUploadsNotAllowed = 'Non è permesso l\'upload dei file su questo server.'; $strUsage = 'Utilizzo'; $strUseBackquotes = 'Usa i backquotes con i nomi delle tabelle e dei campi'; $strUsedPhpExtensions = 'Estensioni PHP usate'; $strUseHostTable = 'Utilizza la Tabella dell\'Host'; $strUserAlreadyExists = 'L\'utente %s esiste già!'; $strUserEmpty = 'Il nome utente è vuoto!'; $strUserName = 'Nome utente'; $strUserNotFound = 'L\'utente selezionato non è stato trovato nella tabella dei privilegi.'; $strUserOverview = 'Vista d\'insieme dell\'utente'; $strUsersDeleted = 'Gli utenti selezionati sono stati cancellati con successo.'; $strUsersHavingAccessToDb = 'Utenti che hanno accesso a &quot;%s&quot;'; $strUser = 'Utente'; $strUseTabKey = 'Usare il tasto TAB per spostare il cursore di valore in valore, o CTRL+frecce per spostarlo altrove'; $strUseTables = 'Utilizza tabelle'; $strUseTextField = 'Utilizza campo text'; $strUseThisValue = 'Usa questa opzione'; $strValidateSQL = 'Valida SQL'; $strValidatorError = 'L\' SQL validator non può essere inizializzato. Prego controllare di avere installato le estensioni php necessarie come descritto nella %sdocumentazione%s.'; $strValue = 'Valore'; $strVar = 'Variabile'; $strVersionInformation = 'Informazioni sulla versione'; $strViewDumpDatabases = 'Visualizza il dump (schema) dei databases'; $strViewDumpDB = 'Visualizza dump (schema) del database'; $strViewDump = 'Visualizza dump (schema) della tabella'; $strViewHasBeenDropped = 'La vista %s è stata eliminata'; $strViewMaxExactCount = 'Questa vista ha più di %d righe. Per informazioni fare riferimento a %sdocumentation%s.'; $strViewName = 'Nome VISTA'; $strView = 'Vista'; $strWebServerUploadDirectory = 'directory di upload del web-server'; $strWebServerUploadDirectoryError = 'La directory impostata per l\'upload non può essere trovata'; $strWelcome = 'Benvenuto in %s'; $strWestEuropean = 'Europeo Occidentale'; $strWildcard = 'wildcard'; $strWindowNotFound = 'La finestra destinataria del browser non può essere aggiornata. Può darsi che sia stata chiusa la finestra madre o che il vostro browser stia bloccando gli aggiornamenti fra browsers a causa di qualche impostazione di sicurezza'; $strWithChecked = 'Se selezionati:'; $strWriteRequests = 'Richieste di scrittura'; $strWrongUser = 'Nome utente o password errati. Accesso negato.'; $strXML = 'XML'; $strYes = 'Sì'; $strZeroRemovesTheLimit = 'N.B.: 0 (zero) significa nessun limite.'; $strZip = '"compresso con zip"'; ?>
mwhitlaw/openemr
phpmyadmin/lang/italian-iso-8859-1.inc.php
PHP
gpl-2.0
70,896
/* Copyright (c) 2003, 2005 MySQL AB Use is subject to license terms This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */ /** * @file SQLTransactTest.cpp */ #include <common.hpp> #define STR_MESSAGE_LENGTH 200 #define STR_NAME_LEN 20 #define STR_PHONE_LEN 20 #define STR_ADDRESS_LEN 20 using namespace std; SQLHDBC STR_hdbc; SQLHSTMT STR_hstmt; SQLHENV STR_henv; SQLHDESC STR_hdesc; void Transact_DisplayError(SQLSMALLINT STR_HandleType, SQLHSTMT STR_InputHandle); int STR_Display_Result(SQLHSTMT EXDR_InputHandle); /** * Test: * -#Test to request a commit or a rollback operation for * all active transactions associated with a specific * environment or connection handle * * @return Zero, if test succeeded */ int SQLTransactTest() { SQLRETURN STR_ret; ndbout << endl << "Start SQLTransact Testing" << endl; //************************************ //** Allocate An Environment Handle ** //************************************ STR_ret = SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &STR_henv); if (STR_ret == SQL_SUCCESS || STR_ret == SQL_SUCCESS_WITH_INFO) ndbout << "Allocated an environment Handle!" << endl; //********************************************* //** Set the ODBC application Version to 3.x ** //********************************************* STR_ret = SQLSetEnvAttr(STR_henv, SQL_ATTR_ODBC_VERSION, (SQLPOINTER) SQL_OV_ODBC3, SQL_IS_UINTEGER); if (STR_ret == SQL_SUCCESS || STR_ret == SQL_SUCCESS_WITH_INFO) ndbout << "Set the ODBC application Version to 3.x!" << endl; //********************************** //** Allocate A Connection Handle ** //********************************** STR_ret = SQLAllocHandle(SQL_HANDLE_DBC, STR_henv, &STR_hdbc); if (STR_ret == SQL_SUCCESS || STR_ret == SQL_SUCCESS_WITH_INFO) ndbout << "Allocated a connection Handle!" << endl; // ******************* // ** Connect to DB ** // ******************* STR_ret = SQLConnect(STR_hdbc, (SQLCHAR *) connectString(), SQL_NTS, (SQLCHAR *) "", SQL_NTS, (SQLCHAR *) "", SQL_NTS); if (STR_ret == SQL_SUCCESS || STR_ret == SQL_SUCCESS_WITH_INFO) ndbout << "Connected to DB : OK!" << endl; else { ndbout << "Failure to Connect DB!" << endl; return NDBT_FAILED; } //******************************* //** Allocate statement handle ** //******************************* STR_ret = SQLAllocHandle(SQL_HANDLE_STMT, STR_hdbc, &STR_hstmt); if(STR_ret == SQL_SUCCESS || STR_ret == SQL_SUCCESS_WITH_INFO) ndbout << "Allocated a statement handle!" << endl; //******************************** //** Turn Manual-Commit Mode On ** //******************************** STR_ret = SQLSetConnectOption(STR_hdbc, SQL_AUTOCOMMIT, (UDWORD) SQL_AUTOCOMMIT_OFF); //********************************************** //** Prepare and Execute a prepared statement ** //********************************************** STR_ret = SQLExecDirect(STR_hstmt, (SQLCHAR*)"SELECT * FROM Customers", SQL_NTS); if (STR_ret == SQL_INVALID_HANDLE) { ndbout << "Handle Type is SQL_HANDLE_STMT, but SQL_INVALID_HANDLE" << endl; ndbout << "still appeared. Please check program" << endl; } if (STR_ret == SQL_ERROR || STR_ret == SQL_SUCCESS_WITH_INFO) Transact_DisplayError(SQL_HANDLE_STMT, STR_hstmt); //************************* //** Display the results ** //************************* STR_Display_Result(STR_hstmt); //**************************** //** Commit the transaction ** //**************************** STR_ret = SQLTransact(STR_henv, STR_hdbc, SQL_COMMIT); //**************** // Free Handles ** //**************** SQLDisconnect(STR_hdbc); SQLFreeHandle(SQL_HANDLE_STMT, STR_hstmt); SQLFreeHandle(SQL_HANDLE_DBC, STR_hdbc); SQLFreeHandle(SQL_HANDLE_ENV, STR_henv); return NDBT_OK; } void Transact_DisplayError(SQLSMALLINT STR_HandleType, SQLHSTMT STR_InputHandle) { SQLCHAR STR_Sqlstate[5]; SQLINTEGER STR_NativeError; SQLSMALLINT STR_i, STR_MsgLen; SQLCHAR STR_Msg[STR_MESSAGE_LENGTH]; SQLRETURN SQLSTATEs; STR_i = 1; ndbout << "-------------------------------------------------" << endl; ndbout << "Error diagnostics:" << endl; while ((SQLSTATEs = SQLGetDiagRec(STR_HandleType, STR_InputHandle, STR_i, STR_Sqlstate, &STR_NativeError, STR_Msg, sizeof(STR_Msg), &STR_MsgLen)) != SQL_NO_DATA) { ndbout << "the HandleType is:" << STR_HandleType << endl; ndbout << "the InputHandle is :" << (long)STR_InputHandle << endl; ndbout << "the STR_Msg is: " << (char *) STR_Msg << endl; ndbout << "the output state is:" << (char *)STR_Sqlstate << endl; STR_i ++; // break; } ndbout << "-------------------------------------------------" << endl; } int STR_Display_Result(SQLHSTMT STR_InputHandle) { SQLRETURN STR_retcode; unsigned long STR_CustID; SQLCHAR STR_Name[STR_NAME_LEN], STR_Phone[STR_PHONE_LEN]; SQLCHAR STR_Address[STR_ADDRESS_LEN]; //********************* //** Bind columns 1 ** //********************* STR_retcode =SQLBindCol(STR_InputHandle, 1, SQL_C_ULONG, &STR_CustID, sizeof(STR_CustID), NULL); if (STR_retcode == SQL_ERROR) { ndbout << "Executing SQLBindCol, SQL_ERROR happened!" << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); return NDBT_FAILED; } //********************* //** Bind columns 2 ** //********************* STR_retcode =SQLBindCol(STR_InputHandle, 2, SQL_C_CHAR, &STR_Name, STR_NAME_LEN, NULL); if (STR_retcode == SQL_ERROR) { ndbout << "Executing SQLBindCol, SQL_ERROR happened!" << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); return NDBT_FAILED; } //********************* //** Bind columns 3 ** //********************* STR_retcode = SQLBindCol(STR_InputHandle, 3, SQL_C_CHAR, &STR_Address, STR_ADDRESS_LEN, NULL); if (STR_retcode == SQL_ERROR) { ndbout << "Executing SQLBindCol, SQL_ERROR happened!" << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); return NDBT_FAILED; } //********************* //** Bind columns 4 ** //********************* STR_retcode = SQLBindCol(STR_InputHandle, 4, SQL_C_CHAR, &STR_Phone, STR_PHONE_LEN, NULL); if (STR_retcode == SQL_ERROR) { ndbout << "Executing SQLBindCol, SQL_ERROR happened!" << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); return NDBT_FAILED; } //***************************************** //* Fetch and print each row of data. On ** //* an error, display a message and exit ** //***************************************** if (STR_retcode != SQL_ERROR) STR_retcode = SQLFetch(STR_InputHandle); ndbout << endl << "STR_retcode = SQLFetch(STR_InputHandle) = " << STR_retcode << endl; if (STR_retcode == SQL_ERROR) { ndbout << "Executing SQLFetch, SQL_ERROR happened!" << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); return NDBT_FAILED; } else if (STR_retcode == SQL_SUCCESS_WITH_INFO) { ndbout << "CustID = " << (int)STR_CustID << endl; ndbout << "Name = " << (char *)STR_Name << endl; ndbout << "Address = " << (char *)STR_Address << endl; ndbout << "Phone = " << (char *)STR_Phone << endl; Transact_DisplayError(SQL_HANDLE_STMT, STR_InputHandle); } else { ndbout << "CustID = " << (int)STR_CustID << endl; ndbout << "Name = " << (char *)STR_Name << endl; ndbout << "Address = " << (char *)STR_Address << endl; ndbout << "Phone = " << (char *)STR_Phone << endl; } return 0; }
fengshao0907/mysql
storage/ndb/test/odbc/client/SQLTransactTest.cpp
C++
gpl-2.0
8,798
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.chaos.actions; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey; /** * Action that restarts a random HRegionServer */ public class RestartRandomRsAction extends RestartActionBaseAction { public RestartRandomRsAction(long sleepTime) { super(sleepTime); } @Override public void perform() throws Exception { LOG.info("Performing action: Restart random region server"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); restartRs(server, sleepTime); } }
Guavus/hbase
hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.java
Java
apache-2.0
1,416
// Copyright JS Foundation and other contributors, http://js.foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var a = 20; var b = a >> "2"; assert(b == 5)
slaff/jerryscript
tests/jerry-test-suite/11/11.07/11.07.02/11.07.02-003.js
JavaScript
apache-2.0
674
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.example.customsettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import static org.elasticsearch.example.customsettings.ExampleCustomSettingsConfig.VALIDATED_SETTING; /** * {@link ExampleCustomSettingsConfigTests} is a unit test class for {@link ExampleCustomSettingsConfig}. * <p> * It's a JUnit test class that extends {@link ESTestCase} which provides useful methods for testing. * <p> * The tests can be executed in the IDE or using the command: ./gradlew :example-plugins:custom-settings:test */ public class ExampleCustomSettingsConfigTests extends ESTestCase { public void testValidatedSetting() { final String expected = randomAlphaOfLengthBetween(1, 5); final String actual = VALIDATED_SETTING.get(Settings.builder().put(VALIDATED_SETTING.getKey(), expected).build()); assertEquals(expected, actual); final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> VALIDATED_SETTING.get(Settings.builder().put("custom.validated", "it's forbidden").build())); assertEquals("Setting must not contain [forbidden]", exception.getMessage()); } }
GlenRSmith/elasticsearch
plugins/examples/custom-settings/src/test/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfigTests.java
Java
apache-2.0
1,583
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ /** * Contains the action definitions for SLM. For the transport and rest action implementations, please see the {@code ilm} module's * {@code org.elasticsearch.xpack.slm} package. */ package org.elasticsearch.xpack.core.slm.action;
GlenRSmith/elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/package-info.java
Java
apache-2.0
490
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/basictypes.h" #include "base/strings/stringprintf.h" #include "chrome/browser/sync/profile_sync_service_harness.h" #include "chrome/browser/sync/test/integration/performance/sync_timing_helper.h" #include "chrome/browser/sync/test/integration/sync_test.h" #include "chrome/browser/sync/test/integration/typed_urls_helper.h" #include "sync/sessions/sync_session_context.h" using typed_urls_helper::AddUrlToHistory; using typed_urls_helper::AssertAllProfilesHaveSameURLsAsVerifier; using typed_urls_helper::DeleteUrlsFromHistory; using typed_urls_helper::GetTypedUrlsFromClient; // This number should be as far away from a multiple of // kDefaultMaxCommitBatchSize as possible, so that sync cycle counts // for batch operations stay the same even if some batches end up not // being completely full. static const int kNumUrls = 163; // This compile assert basically asserts that kNumUrls is right in the // middle between two multiples of kDefaultMaxCommitBatchSize. COMPILE_ASSERT( ((kNumUrls % syncer::kDefaultMaxCommitBatchSize) >= (syncer::kDefaultMaxCommitBatchSize / 2)) && ((kNumUrls % syncer::kDefaultMaxCommitBatchSize) <= ((syncer::kDefaultMaxCommitBatchSize + 1) / 2)), kNumUrlsShouldBeBetweenTwoMultiplesOfkDefaultMaxCommitBatchSize); class TypedUrlsSyncPerfTest : public SyncTest { public: TypedUrlsSyncPerfTest() : SyncTest(TWO_CLIENT), url_number_(0) {} // Adds |num_urls| new unique typed urls to |profile|. void AddURLs(int profile, int num_urls); // Update all typed urls in |profile| by visiting them once again. void UpdateURLs(int profile); // Removes all typed urls for |profile|. void RemoveURLs(int profile); // Returns the number of typed urls stored in |profile|. int GetURLCount(int profile); private: // Returns a new unique typed URL. GURL NextURL(); // Returns a unique URL according to the integer |n|. GURL IntToURL(int n); int url_number_; DISALLOW_COPY_AND_ASSIGN(TypedUrlsSyncPerfTest); }; void TypedUrlsSyncPerfTest::AddURLs(int profile, int num_urls) { for (int i = 0; i < num_urls; ++i) { AddUrlToHistory(profile, NextURL()); } } void TypedUrlsSyncPerfTest::UpdateURLs(int profile) { history::URLRows urls = GetTypedUrlsFromClient(profile); for (history::URLRows::const_iterator it = urls.begin(); it != urls.end(); ++it) { AddUrlToHistory(profile, it->url()); } } void TypedUrlsSyncPerfTest::RemoveURLs(int profile) { const history::URLRows& urls = GetTypedUrlsFromClient(profile); std::vector<GURL> gurls; for (history::URLRows::const_iterator it = urls.begin(); it != urls.end(); ++it) { gurls.push_back(it->url()); } DeleteUrlsFromHistory(profile, gurls); } int TypedUrlsSyncPerfTest::GetURLCount(int profile) { return GetTypedUrlsFromClient(profile).size(); } GURL TypedUrlsSyncPerfTest::NextURL() { return IntToURL(url_number_++); } GURL TypedUrlsSyncPerfTest::IntToURL(int n) { return GURL(base::StringPrintf("http://history%d.google.com/", n)); } IN_PROC_BROWSER_TEST_F(TypedUrlsSyncPerfTest, P0) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; // TCM ID - 7985716. AddURLs(0, kNumUrls); base::TimeDelta dt = SyncTimingHelper::TimeMutualSyncCycle(GetClient(0), GetClient(1)); ASSERT_EQ(kNumUrls, GetURLCount(1)); SyncTimingHelper::PrintResult("typed_urls", "add_typed_urls", dt); // TCM ID - 7981755. UpdateURLs(0); dt = SyncTimingHelper::TimeMutualSyncCycle(GetClient(0), GetClient(1)); ASSERT_EQ(kNumUrls, GetURLCount(1)); SyncTimingHelper::PrintResult("typed_urls", "update_typed_urls", dt); // TCM ID - 7651271. RemoveURLs(0); dt = SyncTimingHelper::TimeMutualSyncCycle(GetClient(0), GetClient(1)); ASSERT_EQ(0, GetURLCount(1)); SyncTimingHelper::PrintResult("typed_urls", "delete_typed_urls", dt); }
espadrine/opera
chromium/src/chrome/browser/sync/test/integration/performance/typed_urls_sync_perf_test.cc
C++
bsd-3-clause
4,017
<?php namespace Sabre\VObject\Recur\EventIterator; use DateTime; use DateTimeZone; use Sabre\VObject\Reader; class ExpandFloatingTimesTest extends \PHPUnit_Framework_TestCase { use \Sabre\VObject\PHPUnitAssertions; function testExpand() { $input = <<<ICS BEGIN:VCALENDAR VERSION:2.0 BEGIN:VEVENT UID:foo DTSTART:20150109T090000 DTEND:20150109T100000 RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20191002T070000Z;BYDAY=FR END:VEVENT END:VCALENDAR ICS; $vcal = Reader::read($input); $this->assertInstanceOf('Sabre\\VObject\\Component\\VCalendar', $vcal); $vcal = $vcal->expand(new DateTime('2015-01-01'), new DateTime('2015-01-31')); $output = <<<ICS BEGIN:VCALENDAR VERSION:2.0 BEGIN:VEVENT UID:foo DTSTART:20150109T090000Z DTEND:20150109T100000Z RECURRENCE-ID:20150109T090000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150116T090000Z DTEND:20150116T100000Z RECURRENCE-ID:20150116T090000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150123T090000Z DTEND:20150123T100000Z RECURRENCE-ID:20150123T090000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150130T090000Z DTEND:20150130T100000Z RECURRENCE-ID:20150130T090000Z END:VEVENT END:VCALENDAR ICS; $this->assertVObjectEqualsVObject($output, $vcal); } function testExpandWithReferenceTimezone() { $input = <<<ICS BEGIN:VCALENDAR VERSION:2.0 BEGIN:VEVENT UID:foo DTSTART:20150109T090000 DTEND:20150109T100000 RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20191002T070000Z;BYDAY=FR END:VEVENT END:VCALENDAR ICS; $vcal = Reader::read($input); $this->assertInstanceOf('Sabre\\VObject\\Component\\VCalendar', $vcal); $vcal = $vcal->expand( new DateTime('2015-01-01'), new DateTime('2015-01-31'), new DateTimeZone('Europe/Berlin') ); $output = <<<ICS BEGIN:VCALENDAR VERSION:2.0 BEGIN:VEVENT UID:foo DTSTART:20150109T080000Z DTEND:20150109T090000Z RECURRENCE-ID:20150109T080000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150116T080000Z DTEND:20150116T090000Z RECURRENCE-ID:20150116T080000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150123T080000Z DTEND:20150123T090000Z RECURRENCE-ID:20150123T080000Z END:VEVENT BEGIN:VEVENT UID:foo DTSTART:20150130T080000Z DTEND:20150130T090000Z RECURRENCE-ID:20150130T080000Z END:VEVENT END:VCALENDAR ICS; $this->assertVObjectEqualsVObject($output, $vcal); } }
ArcherSys/ArcherSys
vendor/sabre/vobject/tests/VObject/Recur/EventIterator/ExpandFloatingTimesTest.php
PHP
mit
2,389
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 const ( // When kubelet is started with the "external" cloud provider, then // it sets this annotation on the node to denote an ip address set from the // cmd line flag. This ip is verified with the cloudprovider as valid by // the cloud-controller-manager AnnotationProvidedIPAddr = "alpha.kubernetes.io/provided-node-ip" )
pragkent/aliyun-disk
vendor/k8s.io/kubernetes/staging/src/k8s.io/apimachinery/pkg/apis/meta/v1/well_known_annotations.go
GO
mit
913
import React from 'react'; import pure from 'recompose/pure'; import SvgIcon from '../../SvgIcon'; let PlacesRvHookup = (props) => ( <SvgIcon {...props}> <path d="M20 17v-6c0-1.1-.9-2-2-2H7V7l-3 3 3 3v-2h4v3H4v3c0 1.1.9 2 2 2h2c0 1.66 1.34 3 3 3s3-1.34 3-3h8v-2h-2zm-9 3c-.55 0-1-.45-1-1s.45-1 1-1 1 .45 1 1-.45 1-1 1zm7-6h-4v-3h4v3zM17 2v2H9v2h8v2l3-3z"/> </SvgIcon> ); PlacesRvHookup = pure(PlacesRvHookup); PlacesRvHookup.displayName = 'PlacesRvHookup'; PlacesRvHookup.muiName = 'SvgIcon'; export default PlacesRvHookup;
ichiohta/material-ui
src/svg-icons/places/rv-hookup.js
JavaScript
mit
534
/** * @abstract * @class Ext.chart.series.Cartesian * @extends Ext.chart.series.Series * * Common base class for series implementations which plot values using x/y coordinates. * * @constructor */ Ext.define('Ext.chart.series.Cartesian', { extend: 'Ext.chart.series.Series', config: { /** * The field used to access the x axis value from the items from the data * source. * * @cfg {String} xField */ xField: null, /** * The field used to access the y-axis value from the items from the data * source. * * @cfg {String} yField */ yField: null, /** * @cfg {Ext.chart.axis.Axis} xAxis The chart axis bound to the series on the x-axis. */ xAxis: null, /** * @cfg {Ext.chart.axis.Axis} yAxis The chart axis bound to the series on the y-axis. */ yAxis: null }, directions: ['X', 'Y'], fieldCategoryX: ['X'], fieldCategoryY: ['Y'], updateXAxis: function (axis) { axis.processData(this); }, updateYAxis: function (axis) { axis.processData(this); }, coordinateX: function () { return this.coordinate('X', 0, 2); }, coordinateY: function () { return this.coordinate('Y', 1, 2); }, getItemForPoint: function (x, y) { if (this.getSprites()) { var me = this, sprite = me.getSprites()[0], store = me.getStore(), item; if(me.getHidden()) { return null; } if (sprite) { var index = sprite.getIndexNearPoint(x, y); if (index !== -1) { item = { series: this, category: this.getItemInstancing() ? 'items' : 'markers', index: index, record: store.getData().items[index], field: this.getYField(), sprite: sprite }; return item; } } } }, createSprite: function () { var sprite = this.callSuper(), xAxis = this.getXAxis(); sprite.setFlipXY(this.getChart().getFlipXY()); if (sprite.setAggregator && xAxis && xAxis.getAggregator) { if (xAxis.getAggregator) { sprite.setAggregator({strategy: xAxis.getAggregator()}); } else { sprite.setAggregator({}); } } return sprite; }, getSprites: function () { var me = this, chart = this.getChart(), animation = chart && chart.getAnimate(), itemInstancing = me.getItemInstancing(), sprites = me.sprites, sprite; if (!chart) { return []; } if (!sprites.length) { sprite = me.createSprite(); } else { sprite = sprites[0]; } if (animation) { me.getLabel().getTemplate().fx.setConfig(animation); if (itemInstancing) { sprite.itemsMarker.getTemplate().fx.setConfig(animation); } sprite.fx.setConfig(animation); } return sprites; }, provideLegendInfo: function (target) { var style = this.getStyle(); target.push({ name: this.getTitle() || this.getYField() || this.getId(), mark: style.fillStyle || style.strokeStyle || 'black', disabled: false, series: this.getId(), index: 0 }); }, getXRange: function () { return [this.dataRange[0], this.dataRange[2]]; }, getYRange: function () { return [this.dataRange[1], this.dataRange[3]]; } }) ;
DawidMyslak/native-vs-html5_android-performance
www/TakePhoto/touch/src/chart/series/Cartesian.js
JavaScript
mit
3,929
package cloudhsm import ( "github.com/awslabs/aws-sdk-go/aws" "github.com/awslabs/aws-sdk-go/internal/protocol/jsonrpc" "github.com/awslabs/aws-sdk-go/internal/signer/v4" ) // CloudHSM is a client for CloudHSM. type CloudHSM struct { *aws.Service } // Used for custom service initialization logic var initService func(*aws.Service) // Used for custom request initialization logic var initRequest func(*aws.Request) // New returns a new CloudHSM client. func New(config *aws.Config) *CloudHSM { if config == nil { config = &aws.Config{} } service := &aws.Service{ Config: aws.DefaultConfig.Merge(config), ServiceName: "cloudhsm", APIVersion: "2014-05-30", JSONVersion: "1.1", TargetPrefix: "CloudHsmFrontendService", } service.Initialize() // Handlers service.Handlers.Sign.PushBack(v4.Sign) service.Handlers.Build.PushBack(jsonrpc.Build) service.Handlers.Unmarshal.PushBack(jsonrpc.Unmarshal) service.Handlers.UnmarshalMeta.PushBack(jsonrpc.UnmarshalMeta) service.Handlers.UnmarshalError.PushBack(jsonrpc.UnmarshalError) // Run custom service initialization if present if initService != nil { initService(service) } return &CloudHSM{service} } // newRequest creates a new request for a CloudHSM operation and runs any // custom request initialization. func (c *CloudHSM) newRequest(op *aws.Operation, params, data interface{}) *aws.Request { req := aws.NewRequest(c.Service, op, params, data) // Run custom request initialization if present if initRequest != nil { initRequest(req) } return req }
askreet/howiroll
vendor/src/github.com/awslabs/aws-sdk-go/service/cloudhsm/service.go
GO
mit
1,559
package registry import ( "fmt" "sort" "strings" "text/tabwriter" "golang.org/x/net/context" "github.com/docker/docker/api/types" registrytypes "github.com/docker/docker/api/types/registry" "github.com/docker/docker/cli" "github.com/docker/docker/cli/command" "github.com/docker/docker/opts" "github.com/docker/docker/pkg/stringutils" "github.com/docker/docker/registry" "github.com/spf13/cobra" ) type searchOptions struct { term string noTrunc bool limit int filter opts.FilterOpt // Deprecated stars uint automated bool } // NewSearchCommand creates a new `docker search` command func NewSearchCommand(dockerCli *command.DockerCli) *cobra.Command { opts := searchOptions{filter: opts.NewFilterOpt()} cmd := &cobra.Command{ Use: "search [OPTIONS] TERM", Short: "Search the Docker Hub for images", Args: cli.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { opts.term = args[0] return runSearch(dockerCli, opts) }, } flags := cmd.Flags() flags.BoolVar(&opts.noTrunc, "no-trunc", false, "Don't truncate output") flags.VarP(&opts.filter, "filter", "f", "Filter output based on conditions provided") flags.IntVar(&opts.limit, "limit", registry.DefaultSearchLimit, "Max number of search results") flags.BoolVar(&opts.automated, "automated", false, "Only show automated builds") flags.UintVarP(&opts.stars, "stars", "s", 0, "Only displays with at least x stars") flags.MarkDeprecated("automated", "use --filter=is-automated=true instead") flags.MarkDeprecated("stars", "use --filter=stars=3 instead") return cmd } func runSearch(dockerCli *command.DockerCli, opts searchOptions) error { indexInfo, err := registry.ParseSearchIndexInfo(opts.term) if err != nil { return err } ctx := context.Background() authConfig := command.ResolveAuthConfig(ctx, dockerCli, indexInfo) requestPrivilege := command.RegistryAuthenticationPrivilegedFunc(dockerCli, indexInfo, "search") encodedAuth, err := command.EncodeAuthToBase64(authConfig) if err != nil { return err } options := types.ImageSearchOptions{ RegistryAuth: encodedAuth, PrivilegeFunc: requestPrivilege, Filters: opts.filter.Value(), Limit: opts.limit, } clnt := dockerCli.Client() unorderedResults, err := clnt.ImageSearch(ctx, opts.term, options) if err != nil { return err } results := searchResultsByStars(unorderedResults) sort.Sort(results) w := tabwriter.NewWriter(dockerCli.Out(), 10, 1, 3, ' ', 0) fmt.Fprintf(w, "NAME\tDESCRIPTION\tSTARS\tOFFICIAL\tAUTOMATED\n") for _, res := range results { // --automated and -s, --stars are deprecated since Docker 1.12 if (opts.automated && !res.IsAutomated) || (int(opts.stars) > res.StarCount) { continue } desc := strings.Replace(res.Description, "\n", " ", -1) desc = strings.Replace(desc, "\r", " ", -1) if !opts.noTrunc { desc = stringutils.Ellipsis(desc, 45) } fmt.Fprintf(w, "%s\t%s\t%d\t", res.Name, desc, res.StarCount) if res.IsOfficial { fmt.Fprint(w, "[OK]") } fmt.Fprint(w, "\t") if res.IsAutomated { fmt.Fprint(w, "[OK]") } fmt.Fprint(w, "\n") } w.Flush() return nil } // searchResultsByStars sorts search results in descending order by number of stars. type searchResultsByStars []registrytypes.SearchResult func (r searchResultsByStars) Len() int { return len(r) } func (r searchResultsByStars) Swap(i, j int) { r[i], r[j] = r[j], r[i] } func (r searchResultsByStars) Less(i, j int) bool { return r[j].StarCount < r[i].StarCount }
Originate/exosphere
vendor/github.com/moby/moby/cli/command/registry/search.go
GO
mit
3,550
/* * Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "ScriptedCreature.h" #include "blood_furnace.h" enum Yells { SAY_AGGRO = 0, SAY_KILL = 1, SAY_DIE = 2 }; enum Spells { SPELL_ACID_SPRAY = 38153, SPELL_EXPLODING_BREAKER = 30925, SPELL_KNOCKDOWN = 20276, SPELL_DOMINATION = 25772 }; enum Events { EVENT_ACID_SPRAY = 1, EVENT_EXPLODING_BREAKER, EVENT_DOMINATION, EVENT_KNOCKDOWN }; class boss_the_maker : public CreatureScript { public: boss_the_maker() : CreatureScript("boss_the_maker") { } struct boss_the_makerAI : public BossAI { boss_the_makerAI(Creature* creature) : BossAI(creature, DATA_THE_MAKER) { } void EnterCombat(Unit* /*who*/) override { _EnterCombat(); Talk(SAY_AGGRO); events.ScheduleEvent(EVENT_ACID_SPRAY, 15000); events.ScheduleEvent(EVENT_EXPLODING_BREAKER, 6000); events.ScheduleEvent(EVENT_DOMINATION, 120000); events.ScheduleEvent(EVENT_KNOCKDOWN, 10000); } void KilledUnit(Unit* who) override { if (who->GetTypeId() == TYPEID_PLAYER) Talk(SAY_KILL); } void JustDied(Unit* /*killer*/) override { _JustDied(); Talk(SAY_DIE); } void ExecuteEvent(uint32 eventId) override { switch (eventId) { case EVENT_ACID_SPRAY: DoCastVictim(SPELL_ACID_SPRAY); events.ScheduleEvent(EVENT_ACID_SPRAY, urand(15000, 23000)); break; case EVENT_EXPLODING_BREAKER: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 30.0f, true)) DoCast(target, SPELL_EXPLODING_BREAKER); events.ScheduleEvent(EVENT_EXPLODING_BREAKER, urand(4000, 12000)); break; case EVENT_DOMINATION: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 0.0f, true)) DoCast(target, SPELL_DOMINATION); events.ScheduleEvent(EVENT_DOMINATION, 120000); break; case EVENT_KNOCKDOWN: DoCastVictim(SPELL_KNOCKDOWN); events.ScheduleEvent(EVENT_KNOCKDOWN, urand(4000, 12000)); break; default: break; } } }; CreatureAI* GetAI(Creature* creature) const override { return GetBloodFurnaceAI<boss_the_makerAI>(creature); } }; void AddSC_boss_the_maker() { new boss_the_maker(); }
Keldo/TrinityCore
src/server/scripts/Outland/HellfireCitadel/BloodFurnace/boss_the_maker.cpp
C++
gpl-2.0
3,723
<?php /** * Test LocoAdmin::resolve_file_domain */ class ResolveFileDomainTest extends PHPUnit_Framework_TestCase { public function testDomainOnlySeparatesFromFileExtension(){ $domain = LocoAdmin::resolve_file_domain( '/foo.pot' ); $this->assertEquals( 'foo', $domain ); } public function testFullLocaleSeparatesFromDomainByHyphen(){ $domain = LocoAdmin::resolve_file_domain( '/foo-en_GB.po' ); $this->assertEquals( 'foo', $domain ); } public function testFullLocaleWithLongLanguageSeparatesFromDomainByHyphen(){ $domain = LocoAdmin::resolve_file_domain( '/foo-rup_MK.po' ); $this->assertEquals( 'foo', $domain ); } public function testLanguageCodeSeparatesFromDomainByHyphen(){ $domain = LocoAdmin::resolve_file_domain( '/foo-en.po' ); $this->assertEquals( 'foo', $domain ); } public function testValidLanguageCodeNotUsedAsDomain(){ $domain = LocoAdmin::resolve_file_domain( '/fr_FR.po' ); $this->assertSame( '', $domain ); } public function testInvalidLanguageCodeNotUsedAsDomain(){ $domain = LocoAdmin::resolve_file_domain( '/en_EN.po' ); $this->assertSame( '', $domain ); } public function testValidLanguageCodeNotUsedAsDomainWhenPot(){ $domain = LocoAdmin::resolve_file_domain( '/fr_FR.pot' ); $this->assertSame( '', $domain ); } public function testInvalidLanguageCodeNotUsedAsDomainWhenPot(){ $domain = LocoAdmin::resolve_file_domain( '/en_EN.pot' ); $this->assertSame( '', $domain ); } }
trungdovan87/bongda69
wp-content/plugins/loco-translate/lib/test/tests/ResolveFileDomainTest.php
PHP
gpl-2.0
1,625
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apiclient import ( "strings" "github.com/pkg/errors" v1 "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/util/intstr" core "k8s.io/client-go/testing" netutils "k8s.io/utils/net" "k8s.io/kubernetes/cmd/kubeadm/app/constants" ) // InitDryRunGetter implements the DryRunGetter interface and can be used to GET/LIST values in the dryrun fake clientset // Need to handle these routes in a special manner: // - GET /default/services/kubernetes -- must return a valid Service // - GET /clusterrolebindings/system:nodes -- can safely return a NotFound error // - GET /kube-system/secrets/bootstrap-token-* -- can safely return a NotFound error // - GET /nodes/<node-name> -- must return a valid Node // - ...all other, unknown GETs/LISTs will be logged type InitDryRunGetter struct { controlPlaneName string serviceSubnet string } // InitDryRunGetter should implement the DryRunGetter interface var _ DryRunGetter = &InitDryRunGetter{} // NewInitDryRunGetter creates a new instance of the InitDryRunGetter struct func NewInitDryRunGetter(controlPlaneName string, serviceSubnet string) *InitDryRunGetter { return &InitDryRunGetter{ controlPlaneName: controlPlaneName, serviceSubnet: serviceSubnet, } } // HandleGetAction handles GET actions to the dryrun clientset this interface supports func (idr *InitDryRunGetter) HandleGetAction(action core.GetAction) (bool, runtime.Object, error) { funcs := []func(core.GetAction) (bool, runtime.Object, error){ idr.handleKubernetesService, idr.handleGetNode, idr.handleSystemNodesClusterRoleBinding, idr.handleGetBootstrapToken, } for _, f := range funcs { handled, obj, err := f(action) if handled { return handled, obj, err } } return false, nil, nil } // HandleListAction handles GET actions to the dryrun clientset this interface supports. // Currently there are no known LIST calls during kubeadm init this code has to take care of. func (idr *InitDryRunGetter) HandleListAction(action core.ListAction) (bool, runtime.Object, error) { return false, nil, nil } // handleKubernetesService returns a faked Kubernetes service in order to be able to continue running kubeadm init. // The CoreDNS addon code GETs the Kubernetes service in order to extract the service subnet func (idr *InitDryRunGetter) handleKubernetesService(action core.GetAction) (bool, runtime.Object, error) { if action.GetName() != "kubernetes" || action.GetNamespace() != metav1.NamespaceDefault || action.GetResource().Resource != "services" { // We can't handle this event return false, nil, nil } _, svcSubnet, err := netutils.ParseCIDRSloppy(idr.serviceSubnet) if err != nil { return true, nil, errors.Wrapf(err, "error parsing CIDR %q", idr.serviceSubnet) } internalAPIServerVirtualIP, err := netutils.GetIndexedIP(svcSubnet, 1) if err != nil { return true, nil, errors.Wrapf(err, "unable to get first IP address from the given CIDR (%s)", svcSubnet.String()) } // The only used field of this Service object is the ClusterIP, which CoreDNS uses to calculate its own IP return true, &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kubernetes", Namespace: metav1.NamespaceDefault, Labels: map[string]string{ "component": "apiserver", "provider": "kubernetes", }, }, Spec: v1.ServiceSpec{ ClusterIP: internalAPIServerVirtualIP.String(), Ports: []v1.ServicePort{ { Name: "https", Port: 443, TargetPort: intstr.FromInt(6443), }, }, }, }, nil } // handleGetNode returns a fake node object for the purpose of moving kubeadm init forwards. func (idr *InitDryRunGetter) handleGetNode(action core.GetAction) (bool, runtime.Object, error) { if action.GetName() != idr.controlPlaneName || action.GetResource().Resource != "nodes" { // We can't handle this event return false, nil, nil } return true, &v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: idr.controlPlaneName, Labels: map[string]string{ "kubernetes.io/hostname": idr.controlPlaneName, }, Annotations: map[string]string{}, }, }, nil } // handleSystemNodesClusterRoleBinding handles the GET call to the system:nodes clusterrolebinding func (idr *InitDryRunGetter) handleSystemNodesClusterRoleBinding(action core.GetAction) (bool, runtime.Object, error) { if action.GetName() != constants.NodesClusterRoleBinding || action.GetResource().Resource != "clusterrolebindings" { // We can't handle this event return false, nil, nil } // We can safely return a NotFound error here as the code will just proceed normally and don't care about modifying this clusterrolebinding // This can only happen on an upgrade; and in that case the ClientBackedDryRunGetter impl will be used return true, nil, apierrors.NewNotFound(action.GetResource().GroupResource(), "clusterrolebinding not found") } // handleGetBootstrapToken handles the case where kubeadm init creates the default token; and the token code GETs the // bootstrap token secret first in order to check if it already exists func (idr *InitDryRunGetter) handleGetBootstrapToken(action core.GetAction) (bool, runtime.Object, error) { if !strings.HasPrefix(action.GetName(), "bootstrap-token-") || action.GetNamespace() != metav1.NamespaceSystem || action.GetResource().Resource != "secrets" { // We can't handle this event return false, nil, nil } // We can safely return a NotFound error here as the code will just proceed normally and create the Bootstrap Token return true, nil, apierrors.NewNotFound(action.GetResource().GroupResource(), "secret not found") }
lojies/kubernetes
cmd/kubeadm/app/util/apiclient/init_dryrun.go
GO
apache-2.0
6,278
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon; import java.io.InputStream; import java.util.List; import java.util.Locale; import org.eclipse.swt.widgets.Composite; import org.pentaho.di.core.EngineMetaInterface; import org.pentaho.ui.xul.XulOverlay; import org.pentaho.ui.xul.impl.XulEventHandler; /** * A SpoonPerspective is able to modify the look of the application and display it's own UI. Only one perspective can be * active at a time though they can run concurrently. SpoonPerspectives are most likely to be registered as part of a * SpoonPlugin. * * @author nbaker */ public interface SpoonPerspective { /** * Returns a unique identifier for this perspective * * @return unique ID */ public String getId(); /** * Returns the main UI for the perspective. * * @return UI Composite */ public Composite getUI(); /** * Returns a localized name for the perspective * * @param l * current Locale * @return localized name */ public String getDisplayName( Locale l ); /** * Perspectives will be represented in spoon by an icon on the main toolbar. This method returns the InputStream for * that icon. * * @return icon InputStream */ public InputStream getPerspectiveIcon(); /** * Called by Spoon whenever the active state of a perspective changes. * * @param active */ public void setActive( boolean active ); /** * A list of Xul Overlays to be applied and removed when the perspective is loaded or unloaded * * @return List of XulOverlays. */ public List<XulOverlay> getOverlays(); /** * Returns a list of Xul Event Handlers (controllers) to be added to Xul Containers in Spoon. Perspectives may * overwrite existing event handlers by registering one with the same ID. * * @return list of XulEventHandlers */ public List<XulEventHandler> getEventHandlers(); /** * Allows outside code to register to for activation events for this perspective. * * @param listener */ public void addPerspectiveListener( SpoonPerspectiveListener listener ); /** * Return the active EngineMeta in the case of perspectives with save-able content. * * @return active EngineMetaInterface */ public EngineMetaInterface getActiveMeta(); }
wseyler/pentaho-kettle
ui/src/main/java/org/pentaho/di/ui/spoon/SpoonPerspective.java
Java
apache-2.0
3,197
package liquibase.change.core.supplier; import liquibase.change.Change; import liquibase.change.ColumnConfig; import liquibase.change.core.CreateTableChange; import liquibase.change.core.RenameTableChange; import liquibase.diff.DiffResult; import liquibase.sdk.supplier.change.AbstractChangeSupplier; import liquibase.structure.core.Table; import static junit.framework.TestCase.assertNotNull; public class RenameTableChangeSupplier extends AbstractChangeSupplier<RenameTableChange> { public RenameTableChangeSupplier() { super(RenameTableChange.class); } @Override public Change[] prepareDatabase(RenameTableChange change) throws Exception { CreateTableChange createTableChange = new CreateTableChange(); createTableChange.setCatalogName(change.getCatalogName()); createTableChange.setSchemaName(change.getSchemaName()); createTableChange.setTableName(change.getOldTableName()); createTableChange.addColumn(new ColumnConfig().setName("id").setType("int")); createTableChange.addColumn(new ColumnConfig().setName("other_column").setType("varchar(10)")); return new Change[] {createTableChange }; } @Override public void checkDiffResult(DiffResult diffResult, RenameTableChange change) { assertNotNull(diffResult.getMissingObject(new Table(change.getCatalogName(), change.getSchemaName(), change.getOldTableName()))); assertNotNull(diffResult.getUnexpectedObject(new Table(change.getCatalogName(), change.getSchemaName(), change.getNewTableName()))); } }
tjardo83/liquibase
liquibase-core/src/main/java/liquibase/change/core/supplier/RenameTableChangeSupplier.java
Java
apache-2.0
1,575
// Copyright (C) 2004-2014 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 27.6.1.3 unformatted input functions // NB: ostream has a particular "seeks" category. Adopt this for istreams too. // @require@ %-*.tst %-*.txt // @diff@ %-*.tst %-*.txt #include <istream> #include <sstream> #include <fstream> #include <testsuite_hooks.h> // stringstreams void test05(void) { typedef std::wistream::off_type off_type; bool test __attribute__((unused)) = true; std::wistream::pos_type pos01, pos02, pos03, pos04, pos05, pos06; std::ios_base::iostate state01, state02; const char str_lit01[] = "wistream_seeks-1.tst"; std::wifstream if01(str_lit01); std::wifstream if02(str_lit01); std::wifstream if03(str_lit01); VERIFY( if01.good() ); VERIFY( if02.good() ); VERIFY( if03.good() ); std::wstringbuf strbuf01(std::ios_base::in | std::ios_base::out); if01 >> &strbuf01; // initialize stringbufs that are ios_base::out std::wstringbuf strbuf03(strbuf01.str(), std::ios_base::out); // initialize stringbufs that are ios_base::in std::wstringbuf strbuf02(strbuf01.str(), std::ios_base::in); std::wistream is01(&strbuf01); std::wistream is02(&strbuf02); std::wistream is03(&strbuf03); // pos_type tellg() // in | out pos01 = is01.tellg(); pos02 = is01.tellg(); VERIFY( pos01 == pos02 ); // in pos03 = is02.tellg(); pos04 = is02.tellg(); VERIFY( pos03 == pos04 ); // out pos05 = is03.tellg(); pos06 = is03.tellg(); VERIFY( pos05 == pos06 ); // cur // NB: see library issues list 136. It's the v-3 interp that seekg // only sets the input buffer, or else istreams with buffers that // have _M_mode == ios_base::out will fail to have consistency // between seekg and tellg. state01 = is01.rdstate(); is01.seekg(10, std::ios_base::cur); state02 = is01.rdstate(); pos01 = is01.tellg(); VERIFY( pos01 == pos02 + off_type(10) ); VERIFY( state01 == state02 ); pos02 = is01.tellg(); VERIFY( pos02 == pos01 ); } int main() { test05(); return 0; }
xinchoubiology/gcc
libstdc++-v3/testsuite/27_io/basic_istream/tellg/wchar_t/sstream.cc
C++
gpl-2.0
2,726
'use strict'; var grunt = require('grunt'); exports.concat_sourcemap = { setUp: function(done) { // setup here if necessary done(); }, default_options: function(test) { test.expect(2); var actual = grunt.file.read('tmp/default_options.js'); var expected = grunt.file.read('test/expected/default_options.js'); test.equal(actual, expected, 'should join files with default separator.'); var actualMap = grunt.file.read('tmp/default_options.js.map'); var expectedMap = grunt.file.read('test/expected/default_options.js.map'); test.equal(actualMap, expectedMap, 'should write a source map file.'); test.done(); }, options_with_sourceRoot: function(test) { test.expect(2); var actual = grunt.file.read('tmp/options_with_sourceRoot.js'); var expected = grunt.file.read('test/expected/options_with_sourceRoot.js'); test.equal(actual, expected, 'should not affect a output joined file.'); var actualMap = grunt.file.read('tmp/options_with_sourceRoot.js.map'); var expectedMap = grunt.file.read('test/expected/options_with_sourceRoot.js.map'); test.equal(actualMap, expectedMap, 'should write a source map file including `sourceRoot` property.'); test.done(); }, options_with_sourcesContent: function(test) { test.expect(2); var actual = grunt.file.read('tmp/options_with_sourcesContent.js'); var expected = grunt.file.read('test/expected/options_with_sourcesContent.js'); test.equal(actual, expected, 'should not affect a output joined file.'); var actualMap = grunt.file.read('tmp/options_with_sourcesContent.js.map'); var expectedMap = grunt.file.read('test/expected/options_with_sourcesContent.js.map'); test.equal(actualMap, expectedMap, 'should write a source map file including `sourcesContent` property.'); test.done(); }, options_with_process: function(test) { test.expect(1); var actual = grunt.file.read('tmp/options_with_process.js'); var expected = grunt.file.read('test/expected/options_with_process.js'); test.equal(actual, expected, 'should use process function to modify concatenated file'); test.done(); }, with_coffee: function(test) { test.expect(1); var actualMap = grunt.file.read('tmp/with_coffee.js.map'); var expectedMap = grunt.file.read('test/expected/with_coffee.js.map'); test.equal(actualMap, expectedMap, 'should resolve combined source map.'); test.done(); }, css_files: function(test) { var actualContent, expectedContent, actualMap, expectedMap; test.expect(2); actualContent = grunt.file.read('tmp/css_files.css'); expectedContent = grunt.file.read('test/expected/css_files.css'); test.equal(actualContent, expectedContent, 'should output linking line as `/*# sourceMappingURL=<URL> */`.'); actualMap = grunt.file.read('tmp/css_files.css.map'); expectedMap = grunt.file.read('test/expected/css_files.css.map'); test.equal(actualMap, expectedMap, 'should write a source map.'); test.done(); }, css_files_with_sass_generated: function(test) { var actualContent, expectedContent, actualMap, expectedMap; test.expect(2); actualContent = grunt.file.read('tmp/css_files_with_sass_generated.css'); expectedContent = grunt.file.read('test/expected/css_files_with_sass_generated.css'); test.equal(actualContent, expectedContent, 'should concatenate contents except for linking lines.'); actualMap = grunt.file.read('tmp/css_files_with_sass_generated.css.map'); expectedMap = grunt.file.read('test/expected/css_files_with_sass_generated.css.map'); test.equal(actualMap, expectedMap, 'should write a source map resolving combined source map.'); test.done(); }, file_with_linking: function(test) { var actualContent, expectedContent, actualMap, expectedMap; test.expect(2); actualContent = grunt.file.read('tmp/file_with_linking.js'); expectedContent = grunt.file.read('test/expected/file_with_linking.js'); test.equal(actualContent, expectedContent, 'should concatenate contents and resolve the linking.'); actualMap = grunt.file.read('tmp/file_with_linking.js.map'); expectedMap = grunt.file.read('test/expected/file_with_linking.js.map'); test.equal(actualMap, expectedMap, 'should concatenate contents and resolve the linking.'); test.done(); }, file_with_old_linking: function(test) { var actualContent, expectedContent, actualMap, expectedMap; test.expect(2); actualContent = grunt.file.read('tmp/file_with_old_linking.js'); expectedContent = grunt.file.read('test/expected/file_with_old_linking.js'); test.equal(actualContent, expectedContent, 'should concatenate contents and resolve the old linking.'); actualMap = grunt.file.read('tmp/file_with_old_linking.js.map'); expectedMap = grunt.file.read('test/expected/file_with_old_linking.js.map'); test.equal(actualMap, expectedMap, 'should concatenate contents and resolve the old linking.'); test.done(); }, };
alexsmander/alexmattorr
wp-content/themes/portfolio/node_modules/grunt-concat-sourcemap/test/concat_sourcemap_test.js
JavaScript
gpl-2.0
5,041
#!/usr/bin/python """ Ansible module to manage the ssh known_hosts file. Copyright(c) 2014, Matthew Vernon <mcv21@cam.ac.uk> This module is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This module is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this module. If not, see <http://www.gnu.org/licenses/>. """ ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: known_hosts short_description: Add or remove a host from the C(known_hosts) file description: - The C(known_hosts) module lets you add or remove a host keys from the C(known_hosts) file. - Starting at Ansible 2.2, multiple entries per host are allowed, but only one for each key type supported by ssh. This is useful if you're going to want to use the M(git) module over ssh, for example. - If you have a very large number of host keys to manage, you will find the M(template) module more useful. version_added: "1.9" options: name: aliases: [ 'host' ] description: - The host to add or remove (must match a host specified in key) required: true default: null key: description: - The SSH public host key, as a string (required if state=present, optional when state=absent, in which case all keys for the host are removed). The key must be in the right format for ssh (see sshd(1), section "SSH_KNOWN_HOSTS FILE FORMAT") required: false default: null path: description: - The known_hosts file to edit required: no default: "(homedir)+/.ssh/known_hosts" hash_host: description: - Hash the hostname in the known_hosts file required: no default: no version_added: "2.3" state: description: - I(present) to add the host key, I(absent) to remove it. choices: [ "present", "absent" ] required: no default: present requirements: [ ] author: "Matthew Vernon (@mcv21)" ''' EXAMPLES = ''' - name: tell the host about our servers it might want to ssh to known_hosts: path: /etc/ssh/ssh_known_hosts name: foo.com.invalid key: "{{ lookup('file', 'pubkeys/foo.com.invalid') }}" ''' # Makes sure public host keys are present or absent in the given known_hosts # file. # # Arguments # ========= # name = hostname whose key should be added (alias: host) # key = line(s) to add to known_hosts file # path = the known_hosts file to edit (default: ~/.ssh/known_hosts) # hash_host = yes|no (default: no) hash the hostname in the known_hosts file # state = absent|present (default: present) import os import os.path import tempfile import errno import re from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.basic import AnsibleModule def enforce_state(module, params): """ Add or remove key. """ host = params["name"] key = params.get("key", None) port = params.get("port", None) path = params.get("path") hash_host = params.get("hash_host") state = params.get("state") # Find the ssh-keygen binary sshkeygen = module.get_bin_path("ssh-keygen", True) # Trailing newline in files gets lost, so re-add if necessary if key and key[-1] != '\n': key += '\n' if key is None and state != "absent": module.fail_json(msg="No key specified when adding a host") sanity_check(module, host, key, sshkeygen) found, replace_or_add, found_line, key = search_for_host_key(module, host, key, hash_host, path, sshkeygen) params['diff'] = compute_diff(path, found_line, replace_or_add, state, key) # We will change state if found==True & state!="present" # or found==False & state=="present" # i.e found XOR (state=="present") # Alternatively, if replace is true (i.e. key present, and we must change # it) if module.check_mode: module.exit_json(changed=replace_or_add or (state == "present") != found, diff=params['diff']) # Now do the work. # Only remove whole host if found and no key provided if found and key is None and state == "absent": module.run_command([sshkeygen, '-R', host, '-f', path], check_rc=True) params['changed'] = True # Next, add a new (or replacing) entry if replace_or_add or found != (state == "present"): try: inf = open(path, "r") except IOError: e = get_exception() if e.errno == errno.ENOENT: inf = None else: module.fail_json(msg="Failed to read %s: %s" % (path, str(e))) try: outf = tempfile.NamedTemporaryFile(mode='w+', dir=os.path.dirname(path)) if inf is not None: for line_number, line in enumerate(inf): if found_line == (line_number + 1) and (replace_or_add or state == 'absent'): continue # skip this line to replace its key outf.write(line) inf.close() if state == 'present': outf.write(key) outf.flush() module.atomic_move(outf.name, path) except (IOError, OSError): e = get_exception() module.fail_json(msg="Failed to write to file %s: %s" % (path, str(e))) try: outf.close() except: pass params['changed'] = True return params def sanity_check(module, host, key, sshkeygen): '''Check supplied key is sensible host and key are parameters provided by the user; If the host provided is inconsistent with the key supplied, then this function quits, providing an error to the user. sshkeygen is the path to ssh-keygen, found earlier with get_bin_path ''' # If no key supplied, we're doing a removal, and have nothing to check here. if key is None: return # Rather than parsing the key ourselves, get ssh-keygen to do it # (this is essential for hashed keys, but otherwise useful, as the # key question is whether ssh-keygen thinks the key matches the host). # The approach is to write the key to a temporary file, # and then attempt to look up the specified host in that file. try: outf = tempfile.NamedTemporaryFile(mode='w+') outf.write(key) outf.flush() except IOError: e = get_exception() module.fail_json(msg="Failed to write to temporary file %s: %s" % (outf.name, str(e))) sshkeygen_command = [sshkeygen, '-F', host, '-f', outf.name] rc, stdout, stderr = module.run_command(sshkeygen_command, check_rc=True) try: outf.close() except: pass if stdout == '': # host not found module.fail_json(msg="Host parameter does not match hashed host field in supplied key") def search_for_host_key(module, host, key, hash_host, path, sshkeygen): '''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line) Looks up host and keytype in the known_hosts file path; if it's there, looks to see if one of those entries matches key. Returns: found (Boolean): is host found in path? replace_or_add (Boolean): is the key in path different to that supplied by user? found_line (int or None): the line where a key of the same type was found if found=False, then replace is always False. sshkeygen is the path to ssh-keygen, found earlier with get_bin_path ''' if os.path.exists(path) is False: return False, False, None, key sshkeygen_command = [sshkeygen, '-F', host, '-f', path] # openssh >=6.4 has changed ssh-keygen behaviour such that it returns # 1 if no host is found, whereas previously it returned 0 rc, stdout, stderr = module.run_command(sshkeygen_command, check_rc=False) if stdout == '' and stderr == '' and (rc == 0 or rc == 1): return False, False, None, key # host not found, no other errors if rc != 0: # something went wrong module.fail_json(msg="ssh-keygen failed (rc=%d, stdout='%s',stderr='%s')" % (rc, stdout, stderr)) # If user supplied no key, we don't want to try and replace anything with it if key is None: return True, False, None, key lines = stdout.split('\n') new_key = normalize_known_hosts_key(key) sshkeygen_command.insert(1, '-H') rc, stdout, stderr = module.run_command(sshkeygen_command, check_rc=False) if rc not in (0, 1) or stderr != '': # something went wrong module.fail_json(msg="ssh-keygen failed to hash host (rc=%d, stdout='%s',stderr='%s')" % (rc, stdout, stderr)) hashed_lines = stdout.split('\n') for lnum, l in enumerate(lines): if l == '': continue elif l[0] == '#': # info output from ssh-keygen; contains the line number where key was found try: # This output format has been hardcoded in ssh-keygen since at least OpenSSH 4.0 # It always outputs the non-localized comment before the found key found_line = int(re.search(r'found: line (\d+)', l).group(1)) except IndexError: module.fail_json(msg="failed to parse output of ssh-keygen for line number: '%s'" % l) else: found_key = normalize_known_hosts_key(l) if hash_host is True: if found_key['host'][:3] == '|1|': new_key['host'] = found_key['host'] else: hashed_host = normalize_known_hosts_key(hashed_lines[lnum]) found_key['host'] = hashed_host['host'] key = key.replace(host, found_key['host']) if new_key == found_key: # found a match return True, False, found_line, key # found exactly the same key, don't replace elif new_key['type'] == found_key['type']: # found a different key for the same key type return True, True, found_line, key # No match found, return found and replace, but no line return True, True, None, key def normalize_known_hosts_key(key): ''' Transform a key, either taken from a known_host file or provided by the user, into a normalized form. The host part (which might include multiple hostnames or be hashed) gets replaced by the provided host. Also, any spurious information gets removed from the end (like the username@host tag usually present in hostkeys, but absent in known_hosts files) ''' k = key.strip() # trim trailing newline k = key.split() d = dict() # The optional "marker" field, used for @cert-authority or @revoked if k[0][0] == '@': d['options'] = k[0] d['host'] = k[1] d['type'] = k[2] d['key'] = k[3] else: d['host'] = k[0] d['type'] = k[1] d['key'] = k[2] return d def compute_diff(path, found_line, replace_or_add, state, key): diff = { 'before_header': path, 'after_header': path, 'before': '', 'after': '', } try: inf = open(path, "r") except IOError: e = get_exception() if e.errno == errno.ENOENT: diff['before_header'] = '/dev/null' else: diff['before'] = inf.read() inf.close() lines = diff['before'].splitlines(1) if (replace_or_add or state == 'absent') and found_line is not None and 1 <= found_line <= len(lines): del lines[found_line - 1] if state == 'present' and (replace_or_add or found_line is None): lines.append(key) diff['after'] = ''.join(lines) return diff def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=True, type='str', aliases=['host']), key=dict(required=False, type='str'), path=dict(default="~/.ssh/known_hosts", type='path'), hash_host=dict(required=False, type='bool', default=False), state=dict(default='present', choices=['absent', 'present']), ), supports_check_mode=True ) results = enforce_state(module, module.params) module.exit_json(**results) if __name__ == '__main__': main()
andreaso/ansible
lib/ansible/modules/system/known_hosts.py
Python
gpl-3.0
12,710
// Package logging provides access to the Google Cloud Logging API. // // See https://cloud.google.com/logging/docs/ // // Usage example: // // import "google.golang.org/api/logging/v1beta3" // ... // loggingService, err := logging.New(oauthHttpClient) package logging // import "google.golang.org/api/logging/v1beta3" import ( "bytes" "encoding/json" "errors" "fmt" context "golang.org/x/net/context" ctxhttp "golang.org/x/net/context/ctxhttp" gensupport "google.golang.org/api/gensupport" googleapi "google.golang.org/api/googleapi" "io" "net/http" "net/url" "strconv" "strings" ) // Always reference these packages, just in case the auto-generated code // below doesn't. var _ = bytes.NewBuffer var _ = strconv.Itoa var _ = fmt.Sprintf var _ = json.NewDecoder var _ = io.Copy var _ = url.Parse var _ = gensupport.MarshalJSON var _ = googleapi.Version var _ = errors.New var _ = strings.Replace var _ = context.Canceled var _ = ctxhttp.Do const apiId = "logging:v1beta3" const apiName = "logging" const apiVersion = "v1beta3" const basePath = "https://logging.googleapis.com/" // OAuth2 scopes used by this API. const ( // View and manage your data across Google Cloud Platform services CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform" // View your data across Google Cloud Platform services CloudPlatformReadOnlyScope = "https://www.googleapis.com/auth/cloud-platform.read-only" // Administrate log data for your projects LoggingAdminScope = "https://www.googleapis.com/auth/logging.admin" // View log data for your projects LoggingReadScope = "https://www.googleapis.com/auth/logging.read" // Submit log data for your projects LoggingWriteScope = "https://www.googleapis.com/auth/logging.write" ) func New(client *http.Client) (*Service, error) { if client == nil { return nil, errors.New("client is nil") } s := &Service{client: client, BasePath: basePath} s.Projects = NewProjectsService(s) return s, nil } type Service struct { client *http.Client BasePath string // API endpoint base URL UserAgent string // optional additional User-Agent fragment Projects *ProjectsService } func (s *Service) userAgent() string { if s.UserAgent == "" { return googleapi.UserAgent } return googleapi.UserAgent + " " + s.UserAgent } func NewProjectsService(s *Service) *ProjectsService { rs := &ProjectsService{s: s} rs.LogServices = NewProjectsLogServicesService(s) rs.Logs = NewProjectsLogsService(s) rs.Metrics = NewProjectsMetricsService(s) rs.Sinks = NewProjectsSinksService(s) return rs } type ProjectsService struct { s *Service LogServices *ProjectsLogServicesService Logs *ProjectsLogsService Metrics *ProjectsMetricsService Sinks *ProjectsSinksService } func NewProjectsLogServicesService(s *Service) *ProjectsLogServicesService { rs := &ProjectsLogServicesService{s: s} rs.Indexes = NewProjectsLogServicesIndexesService(s) rs.Sinks = NewProjectsLogServicesSinksService(s) return rs } type ProjectsLogServicesService struct { s *Service Indexes *ProjectsLogServicesIndexesService Sinks *ProjectsLogServicesSinksService } func NewProjectsLogServicesIndexesService(s *Service) *ProjectsLogServicesIndexesService { rs := &ProjectsLogServicesIndexesService{s: s} return rs } type ProjectsLogServicesIndexesService struct { s *Service } func NewProjectsLogServicesSinksService(s *Service) *ProjectsLogServicesSinksService { rs := &ProjectsLogServicesSinksService{s: s} return rs } type ProjectsLogServicesSinksService struct { s *Service } func NewProjectsLogsService(s *Service) *ProjectsLogsService { rs := &ProjectsLogsService{s: s} rs.Entries = NewProjectsLogsEntriesService(s) rs.Sinks = NewProjectsLogsSinksService(s) return rs } type ProjectsLogsService struct { s *Service Entries *ProjectsLogsEntriesService Sinks *ProjectsLogsSinksService } func NewProjectsLogsEntriesService(s *Service) *ProjectsLogsEntriesService { rs := &ProjectsLogsEntriesService{s: s} return rs } type ProjectsLogsEntriesService struct { s *Service } func NewProjectsLogsSinksService(s *Service) *ProjectsLogsSinksService { rs := &ProjectsLogsSinksService{s: s} return rs } type ProjectsLogsSinksService struct { s *Service } func NewProjectsMetricsService(s *Service) *ProjectsMetricsService { rs := &ProjectsMetricsService{s: s} return rs } type ProjectsMetricsService struct { s *Service } func NewProjectsSinksService(s *Service) *ProjectsSinksService { rs := &ProjectsSinksService{s: s} return rs } type ProjectsSinksService struct { s *Service } // Empty: A generic empty message that you can re-use to avoid defining // duplicated empty messages in your APIs. A typical example is to use // it as the request or the response type of an API method. For // instance: service Foo { rpc Bar(google.protobuf.Empty) returns // (google.protobuf.Empty); } The JSON representation for `Empty` is // empty JSON object `{}`. type Empty struct { // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` } // HttpRequest: A common proto for logging HTTP requests. type HttpRequest struct { // CacheHit: Whether or not an entity was served from cache (with or // without validation). CacheHit bool `json:"cacheHit,omitempty"` // Referer: Referer (a.k.a. referrer) URL of request, as defined in // http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html. Referer string `json:"referer,omitempty"` // RemoteIp: IP address of the client who issues the HTTP request. Could // be either IPv4 or IPv6. RemoteIp string `json:"remoteIp,omitempty"` // RequestMethod: Request method, such as `GET`, `HEAD`, `PUT` or // `POST`. RequestMethod string `json:"requestMethod,omitempty"` // RequestSize: Size of the HTTP request message in bytes, including // request headers and the request body. RequestSize int64 `json:"requestSize,omitempty,string"` // RequestUrl: Contains the scheme (http|https), the host name, the path // and the query portion of the URL that was requested. RequestUrl string `json:"requestUrl,omitempty"` // ResponseSize: Size of the HTTP response message in bytes sent back to // the client, including response headers and response body. ResponseSize int64 `json:"responseSize,omitempty,string"` // Status: A response code indicates the status of response, e.g., 200. Status int64 `json:"status,omitempty"` // UserAgent: User agent sent by the client, e.g., "Mozilla/4.0 // (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)". UserAgent string `json:"userAgent,omitempty"` // ValidatedWithOriginServer: Whether or not the response was validated // with the origin server before being served from cache. This field is // only meaningful if cache_hit is True. ValidatedWithOriginServer bool `json:"validatedWithOriginServer,omitempty"` // ForceSendFields is a list of field names (e.g. "CacheHit") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "CacheHit") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *HttpRequest) MarshalJSON() ([]byte, error) { type noMethod HttpRequest raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogMetricsResponse: Result returned from ListLogMetrics. type ListLogMetricsResponse struct { // Metrics: The list of metrics that was requested. Metrics []*LogMetric `json:"metrics,omitempty"` // NextPageToken: If there are more results, then `nextPageToken` is // returned in the response. To get the next batch of entries, use the // value of `nextPageToken` as `pageToken` in the next call of // `ListLogMetrics`. If `nextPageToken` is empty, then there are no more // results. NextPageToken string `json:"nextPageToken,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Metrics") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Metrics") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogMetricsResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogMetricsResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogServiceIndexesResponse: Result returned from // ListLogServiceIndexesRequest. type ListLogServiceIndexesResponse struct { // NextPageToken: If there are more results, then `nextPageToken` is // returned in the response. To get the next batch of indexes, use the // value of `nextPageToken` as `pageToken` in the next call of // `ListLogServiceIndexes`. If `nextPageToken` is empty, then there are // no more results. NextPageToken string `json:"nextPageToken,omitempty"` // ServiceIndexPrefixes: A list of log service index values. Each index // value has the form "/value1/value2/...", where `value1` is a value // in the primary index, `value2` is a value in the secondary index, and // so forth. ServiceIndexPrefixes []string `json:"serviceIndexPrefixes,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "NextPageToken") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "NextPageToken") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogServiceIndexesResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogServiceIndexesResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogServiceSinksResponse: Result returned from // `ListLogServiceSinks`. type ListLogServiceSinksResponse struct { // Sinks: The requested log service sinks. If a returned `LogSink` // object has an empty `destination` field, the client can retrieve the // complete `LogSink` object by calling `logServices.sinks.get`. Sinks []*LogSink `json:"sinks,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Sinks") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Sinks") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogServiceSinksResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogServiceSinksResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogServicesResponse: Result returned from // `ListLogServicesRequest`. type ListLogServicesResponse struct { // LogServices: A list of log services. LogServices []*LogService `json:"logServices,omitempty"` // NextPageToken: If there are more results, then `nextPageToken` is // returned in the response. To get the next batch of services, use the // value of `nextPageToken` as `pageToken` in the next call of // `ListLogServices`. If `nextPageToken` is empty, then there are no // more results. NextPageToken string `json:"nextPageToken,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "LogServices") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "LogServices") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogServicesResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogServicesResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogSinksResponse: Result returned from `ListLogSinks`. type ListLogSinksResponse struct { // Sinks: The requested log sinks. If a returned `LogSink` object has an // empty `destination` field, the client can retrieve the complete // `LogSink` object by calling `log.sinks.get`. Sinks []*LogSink `json:"sinks,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Sinks") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Sinks") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogSinksResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogSinksResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListLogsResponse: Result returned from ListLogs. type ListLogsResponse struct { // Logs: A list of log descriptions matching the criteria. Logs []*Log `json:"logs,omitempty"` // NextPageToken: If there are more results, then `nextPageToken` is // returned in the response. To get the next batch of logs, use the // value of `nextPageToken` as `pageToken` in the next call of // `ListLogs`. If `nextPageToken` is empty, then there are no more // results. NextPageToken string `json:"nextPageToken,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Logs") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Logs") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListLogsResponse) MarshalJSON() ([]byte, error) { type noMethod ListLogsResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListSinksResponse: Result returned from `ListSinks`. type ListSinksResponse struct { // Sinks: The requested sinks. If a returned `LogSink` object has an // empty `destination` field, the client can retrieve the complete // `LogSink` object by calling `projects.sinks.get`. Sinks []*LogSink `json:"sinks,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Sinks") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Sinks") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListSinksResponse) MarshalJSON() ([]byte, error) { type noMethod ListSinksResponse raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Log: _Output only._ Describes a log, which is a named stream of log // entries. type Log struct { // DisplayName: _Optional._ The common name of the log. Example: // "request_log". DisplayName string `json:"displayName,omitempty"` // Name: The resource name of the log. Example: // "/projects/my-gcp-project-id/logs/LOG_NAME", where `LOG_NAME` is // the URL-encoded given name of the log. The log includes those log // entries whose `LogEntry.log` field contains this given name. To avoid // name collisions, it is a best practice to prefix the given log name // with the service name, but this is not required. Examples of log // given names: "appengine.googleapis.com/request_log", // "apache-access". Name string `json:"name,omitempty"` // PayloadType: _Optional_. A URI representing the expected payload type // for log entries. PayloadType string `json:"payloadType,omitempty"` // ForceSendFields is a list of field names (e.g. "DisplayName") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "DisplayName") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Log) MarshalJSON() ([]byte, error) { type noMethod Log raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogEntry: An individual entry in a log. type LogEntry struct { // HttpRequest: Information about the HTTP request associated with this // log entry, if applicable. HttpRequest *HttpRequest `json:"httpRequest,omitempty"` // InsertId: A unique ID for the log entry. If you provide this field, // the logging service considers other log entries in the same log with // the same ID as duplicates which can be removed. InsertId string `json:"insertId,omitempty"` // Log: The log to which this entry belongs. When a log entry is // ingested, the value of this field is set by the logging system. Log string `json:"log,omitempty"` // Metadata: Information about the log entry. Metadata *LogEntryMetadata `json:"metadata,omitempty"` // ProtoPayload: The log entry payload, represented as a protocol buffer // that is expressed as a JSON object. You can only pass `protoPayload` // values that belong to a set of approved types. ProtoPayload LogEntryProtoPayload `json:"protoPayload,omitempty"` // StructPayload: The log entry payload, represented as a structure that // is expressed as a JSON object. StructPayload LogEntryStructPayload `json:"structPayload,omitempty"` // TextPayload: The log entry payload, represented as a Unicode string // (UTF-8). TextPayload string `json:"textPayload,omitempty"` // ForceSendFields is a list of field names (e.g. "HttpRequest") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "HttpRequest") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogEntry) MarshalJSON() ([]byte, error) { type noMethod LogEntry raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } type LogEntryProtoPayload interface{} type LogEntryStructPayload interface{} // LogEntryMetadata: Additional data that is associated with a log // entry, set by the service creating the log entry. type LogEntryMetadata struct { // Labels: A set of (key, value) data that provides additional // information about the log entry. If the log entry is from one of the // Google Cloud Platform sources listed below, the indicated (key, // value) information must be provided: Google App Engine, service_name // `appengine.googleapis.com`: "appengine.googleapis.com/module_id", // "appengine.googleapis.com/version_id", and one of: // "appengine.googleapis.com/replica_index", // "appengine.googleapis.com/clone_id", or else provide the following // Compute Engine labels: Google Compute Engine, service_name // `compute.googleapis.com`: "compute.googleapis.com/resource_type", // "instance" "compute.googleapis.com/resource_id", Labels map[string]string `json:"labels,omitempty"` // ProjectId: The project ID of the Google Cloud Platform service that // created the log entry. ProjectId string `json:"projectId,omitempty"` // Region: The region name of the Google Cloud Platform service that // created the log entry. For example, "us-central1". Region string `json:"region,omitempty"` // ServiceName: The API name of the Google Cloud Platform service that // created the log entry. For example, "compute.googleapis.com". ServiceName string `json:"serviceName,omitempty"` // Severity: The severity of the log entry. // // Possible values: // "DEFAULT" // "DEBUG" // "INFO" // "NOTICE" // "WARNING" // "ERROR" // "CRITICAL" // "ALERT" // "EMERGENCY" Severity string `json:"severity,omitempty"` // Timestamp: The time the event described by the log entry occurred. // Timestamps must be later than January 1, 1970. Timestamp string `json:"timestamp,omitempty"` // UserId: The fully-qualified email address of the authenticated user // that performed or requested the action represented by the log entry. // If the log entry does not apply to an action taken by an // authenticated user, then the field should be empty. UserId string `json:"userId,omitempty"` // Zone: The zone of the Google Cloud Platform service that created the // log entry. For example, "us-central1-a". Zone string `json:"zone,omitempty"` // ForceSendFields is a list of field names (e.g. "Labels") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Labels") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogEntryMetadata) MarshalJSON() ([]byte, error) { type noMethod LogEntryMetadata raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogError: Describes a problem with a logging resource or operation. type LogError struct { // Resource: A resource name associated with this error. For example, // the name of a Cloud Storage bucket that has insufficient permissions // to be a destination for log entries. Resource string `json:"resource,omitempty"` // Status: The error description, including a classification code, an // error message, and other details. Status *Status `json:"status,omitempty"` // TimeNanos: The time the error was observed, in nanoseconds since the // Unix epoch. TimeNanos int64 `json:"timeNanos,omitempty,string"` // ForceSendFields is a list of field names (e.g. "Resource") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Resource") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogError) MarshalJSON() ([]byte, error) { type noMethod LogError raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogLine: Application log line emitted while processing a request. type LogLine struct { // LogMessage: App provided log message. LogMessage string `json:"logMessage,omitempty"` // Severity: Severity of log. // // Possible values: // "DEFAULT" // "DEBUG" // "INFO" // "NOTICE" // "WARNING" // "ERROR" // "CRITICAL" // "ALERT" // "EMERGENCY" Severity string `json:"severity,omitempty"` // SourceLocation: Line of code that generated this log message. SourceLocation *SourceLocation `json:"sourceLocation,omitempty"` // Time: Time when log entry was made. May be inaccurate. Time string `json:"time,omitempty"` // ForceSendFields is a list of field names (e.g. "LogMessage") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "LogMessage") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogLine) MarshalJSON() ([]byte, error) { type noMethod LogLine raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogMetric: Describes a logs-based metric. The value of the metric is // the number of log entries in your project that match a logs filter. type LogMetric struct { // Description: A description of this metric. Description string `json:"description,omitempty"` // Filter: An [advanced logs // filter](/logging/docs/view/advanced_filters). Example: "log:syslog // AND metadata.severity>=ERROR". Filter string `json:"filter,omitempty"` // Name: The client-assigned name for this metric, such as // "severe_errors". Metric names are limited to 1000 characters and // can include only the following characters: `A-Z`, `a-z`, `0-9`, and // the special characters `_-.,+!*',()%/\`. The slash character (`/`) // denotes a hierarchy of name pieces, and it cannot be the first // character of the name. Name string `json:"name,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Description") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Description") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogMetric) MarshalJSON() ([]byte, error) { type noMethod LogMetric raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogService: _Output only._ Describes a service that writes log // entries. type LogService struct { // IndexKeys: A list of the names of the keys used to index and label // individual log entries from this service. The first two keys are used // as the primary and secondary index, respectively. Additional keys may // be used to label the entries. For example, App Engine indexes its // entries by module and by version, so its `indexKeys` field is the // following: [ "appengine.googleapis.com/module_id", // "appengine.googleapis.com/version_id" ] IndexKeys []string `json:"indexKeys,omitempty"` // Name: The service's name. Example: "appengine.googleapis.com". Log // names beginning with this string are reserved for this service. This // value can appear in the `LogEntry.metadata.serviceName` field of log // entries associated with this log service. Name string `json:"name,omitempty"` // ForceSendFields is a list of field names (e.g. "IndexKeys") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "IndexKeys") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogService) MarshalJSON() ([]byte, error) { type noMethod LogService raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogSink: Describes where log entries are written outside of Cloud // Logging. type LogSink struct { // Destination: The resource name of the destination. Cloud Logging // writes designated log entries to this destination. For example, // "storage.googleapis.com/my-output-bucket". Destination string `json:"destination,omitempty"` // Errors: _Output only._ If any errors occur when invoking a sink // method, then this field contains descriptions of the errors. Errors []*LogError `json:"errors,omitempty"` // Filter: An advanced logs filter. If present, only log entries // matching the filter are written. Only project sinks use this field; // log sinks and log service sinks must not include a filter. Filter string `json:"filter,omitempty"` // Name: The client-assigned name of this sink. For example, // "my-syslog-sink". The name must be unique among the sinks of a // similar kind in the project. Name string `json:"name,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Destination") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Destination") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogSink) MarshalJSON() ([]byte, error) { type noMethod LogSink raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // RequestLog: Complete log information about a single request to an // application. type RequestLog struct { // AppEngineRelease: App Engine release version string. AppEngineRelease string `json:"appEngineRelease,omitempty"` // AppId: Identifies the application that handled this request. AppId string `json:"appId,omitempty"` // Cost: An indication of the relative cost of serving this request. Cost float64 `json:"cost,omitempty"` // EndTime: Time at which request was known to end processing. EndTime string `json:"endTime,omitempty"` // Finished: If true, represents a finished request. Otherwise, the // request is active. Finished bool `json:"finished,omitempty"` // Host: The Internet host and port number of the resource being // requested. Host string `json:"host,omitempty"` // HttpVersion: HTTP version of request. HttpVersion string `json:"httpVersion,omitempty"` // InstanceId: An opaque identifier for the instance that handled the // request. InstanceId string `json:"instanceId,omitempty"` // InstanceIndex: If the instance that processed this request was // individually addressable (i.e. belongs to a manually scaled module), // this is the index of the instance. InstanceIndex int64 `json:"instanceIndex,omitempty"` // Ip: Origin IP address. Ip string `json:"ip,omitempty"` // Latency: Latency of the request. Latency string `json:"latency,omitempty"` // Line: List of log lines emitted by the application while serving this // request, if requested. Line []*LogLine `json:"line,omitempty"` // MegaCycles: Number of CPU megacycles used to process request. MegaCycles int64 `json:"megaCycles,omitempty,string"` // Method: Request method, such as `GET`, `HEAD`, `PUT`, `POST`, or // `DELETE`. Method string `json:"method,omitempty"` // ModuleId: Identifies the module of the application that handled this // request. ModuleId string `json:"moduleId,omitempty"` // Nickname: A string that identifies a logged-in user who made this // request, or empty if the user is not logged in. Most likely, this is // the part of the user's email before the '@' sign. The field value is // the same for different requests from the same user, but different // users may have a similar name. This information is also available to // the application via Users API. This field will be populated starting // with App Engine 1.9.21. Nickname string `json:"nickname,omitempty"` // PendingTime: Time this request spent in the pending request queue, if // it was pending at all. PendingTime string `json:"pendingTime,omitempty"` // Referrer: Referrer URL of request. Referrer string `json:"referrer,omitempty"` // RequestId: Globally unique identifier for a request, based on request // start time. Request IDs for requests which started later will compare // greater as strings than those for requests which started earlier. RequestId string `json:"requestId,omitempty"` // Resource: Contains the path and query portion of the URL that was // requested. For example, if the URL was // "http://example.com/app?name=val", the resource would be // "/app?name=val". Any trailing fragment (separated by a '#' character) // will not be included. Resource string `json:"resource,omitempty"` // ResponseSize: Size in bytes sent back to client by request. ResponseSize int64 `json:"responseSize,omitempty,string"` // SourceReference: Source code for the application that handled this // request. There can be more than one source reference per deployed // application if source code is distributed among multiple // repositories. SourceReference []*SourceReference `json:"sourceReference,omitempty"` // StartTime: Time at which request was known to have begun processing. StartTime string `json:"startTime,omitempty"` // Status: Response status of request. Status int64 `json:"status,omitempty"` // TaskName: Task name of the request (for an offline request). TaskName string `json:"taskName,omitempty"` // TaskQueueName: Queue name of the request (for an offline request). TaskQueueName string `json:"taskQueueName,omitempty"` // TraceId: Cloud Trace identifier of the trace for this request. TraceId string `json:"traceId,omitempty"` // UrlMapEntry: File or class within URL mapping used for request. // Useful for tracking down the source code which was responsible for // managing request. Especially for multiply mapped handlers. UrlMapEntry string `json:"urlMapEntry,omitempty"` // UserAgent: User agent used for making request. UserAgent string `json:"userAgent,omitempty"` // VersionId: Version of the application that handled this request. VersionId string `json:"versionId,omitempty"` // WasLoadingRequest: Was this request a loading request for this // instance? WasLoadingRequest bool `json:"wasLoadingRequest,omitempty"` // ForceSendFields is a list of field names (e.g. "AppEngineRelease") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AppEngineRelease") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *RequestLog) MarshalJSON() ([]byte, error) { type noMethod RequestLog raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SourceLocation: Specifies a location in a source file. type SourceLocation struct { // File: Source file name. May or may not be a fully qualified name, // depending on the runtime environment. File string `json:"file,omitempty"` // FunctionName: Human-readable name of the function or method being // invoked, with optional context such as the class or package name, for // use in contexts such as the logs viewer where file:line number is // less meaningful. This may vary by language, for example: in Java: // qual.if.ied.Class.method in Go: dir/package.func in Python: function // ... FunctionName string `json:"functionName,omitempty"` // Line: Line within the source file. Line int64 `json:"line,omitempty,string"` // ForceSendFields is a list of field names (e.g. "File") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "File") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SourceLocation) MarshalJSON() ([]byte, error) { type noMethod SourceLocation raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SourceReference: A reference to a particular snapshot of the source // tree used to build and deploy an application. type SourceReference struct { // Repository: Optional. A URI string identifying the repository. // Example: "https://github.com/GoogleCloudPlatform/kubernetes.git" Repository string `json:"repository,omitempty"` // RevisionId: The canonical (and persistent) identifier of the deployed // revision. Example (git): "0035781c50ec7aa23385dc841529ce8a4b70db1b" RevisionId string `json:"revisionId,omitempty"` // ForceSendFields is a list of field names (e.g. "Repository") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Repository") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SourceReference) MarshalJSON() ([]byte, error) { type noMethod SourceReference raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Status: The `Status` type defines a logical error model that is // suitable for different programming environments, including REST APIs // and RPC APIs. It is used by [gRPC](https://github.com/grpc). The // error model is designed to be: - Simple to use and understand for // most users - Flexible enough to meet unexpected needs # Overview The // `Status` message contains three pieces of data: error code, error // message, and error details. The error code should be an enum value of // google.rpc.Code, but it may accept additional error codes if needed. // The error message should be a developer-facing English message that // helps developers *understand* and *resolve* the error. If a localized // user-facing error message is needed, put the localized message in the // error details or localize it in the client. The optional error // details may contain arbitrary information about the error. There is a // predefined set of error detail types in the package `google.rpc` // which can be used for common error conditions. # Language mapping The // `Status` message is the logical representation of the error model, // but it is not necessarily the actual wire format. When the `Status` // message is exposed in different client libraries and different wire // protocols, it can be mapped differently. For example, it will likely // be mapped to some exceptions in Java, but more likely mapped to some // error codes in C. # Other uses The error model and the `Status` // message can be used in a variety of environments, either with or // without APIs, to provide a consistent developer experience across // different environments. Example uses of this error model include: - // Partial errors. If a service needs to return partial errors to the // client, it may embed the `Status` in the normal response to indicate // the partial errors. - Workflow errors. A typical workflow has // multiple steps. Each step may have a `Status` message for error // reporting purpose. - Batch operations. If a client uses batch request // and batch response, the `Status` message should be used directly // inside batch response, one for each error sub-response. - // Asynchronous operations. If an API call embeds asynchronous operation // results in its response, the status of those operations should be // represented directly using the `Status` message. - Logging. If some // API errors are stored in logs, the message `Status` could be used // directly after any stripping needed for security/privacy reasons. type Status struct { // Code: The status code, which should be an enum value of // google.rpc.Code. Code int64 `json:"code,omitempty"` // Details: A list of messages that carry the error details. There will // be a common set of message types for APIs to use. Details []StatusDetails `json:"details,omitempty"` // Message: A developer-facing error message, which should be in // English. Any user-facing error message should be localized and sent // in the google.rpc.Status.details field, or localized by the client. Message string `json:"message,omitempty"` // ForceSendFields is a list of field names (e.g. "Code") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Code") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Status) MarshalJSON() ([]byte, error) { type noMethod Status raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } type StatusDetails interface{} // WriteLogEntriesRequest: The parameters to WriteLogEntries. type WriteLogEntriesRequest struct { // CommonLabels: Metadata labels that apply to all log entries in this // request, so that you don't have to repeat them in each log entry's // `metadata.labels` field. If any of the log entries contains a (key, // value) with the same key that is in `commonLabels`, then the entry's // (key, value) overrides the one in `commonLabels`. CommonLabels map[string]string `json:"commonLabels,omitempty"` // Entries: Log entries to insert. Entries []*LogEntry `json:"entries,omitempty"` // ForceSendFields is a list of field names (e.g. "CommonLabels") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "CommonLabels") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *WriteLogEntriesRequest) MarshalJSON() ([]byte, error) { type noMethod WriteLogEntriesRequest raw := noMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // WriteLogEntriesResponse: Result returned from WriteLogEntries. empty type WriteLogEntriesResponse struct { // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` } // method id "logging.projects.logServices.list": type ProjectsLogServicesListCall struct { s *Service projectsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists the log services that have log entries in this project. func (r *ProjectsLogServicesService) List(projectsId string) *ProjectsLogServicesListCall { c := &ProjectsLogServicesListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId return c } // PageSize sets the optional parameter "pageSize": The maximum number // of `LogService` objects to return in one operation. func (c *ProjectsLogServicesListCall) PageSize(pageSize int64) *ProjectsLogServicesListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": An opaque token, // returned as `nextPageToken` by a prior `ListLogServices` operation. // If `pageToken` is supplied, then the other fields of this request are // ignored, and instead the previous `ListLogServices` operation is // continued. func (c *ProjectsLogServicesListCall) PageToken(pageToken string) *ProjectsLogServicesListCall { c.urlParams_.Set("pageToken", pageToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesListCall) Fields(s ...googleapi.Field) *ProjectsLogServicesListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogServicesListCall) IfNoneMatch(entityTag string) *ProjectsLogServicesListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesListCall) Context(ctx context.Context) *ProjectsLogServicesListCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.list" call. // Exactly one of *ListLogServicesResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListLogServicesResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogServicesListCall) Do(opts ...googleapi.CallOption) (*ListLogServicesResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogServicesResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists the log services that have log entries in this project.", // "httpMethod": "GET", // "id": "logging.projects.logServices.list", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "pageSize": { // "description": "The maximum number of `LogService` objects to return in one operation.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "An opaque token, returned as `nextPageToken` by a prior `ListLogServices` operation. If `pageToken` is supplied, then the other fields of this request are ignored, and instead the previous `ListLogServices` operation is continued.", // "location": "query", // "type": "string" // }, // "projectsId": { // "description": "Part of `projectName`. The resource name of the project whose services are to be listed.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices", // "response": { // "$ref": "ListLogServicesResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ProjectsLogServicesListCall) Pages(ctx context.Context, f func(*ListLogServicesResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "logging.projects.logServices.indexes.list": type ProjectsLogServicesIndexesListCall struct { s *Service projectsId string logServicesId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists the current index values for a log service. func (r *ProjectsLogServicesIndexesService) List(projectsId string, logServicesId string) *ProjectsLogServicesIndexesListCall { c := &ProjectsLogServicesIndexesListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId return c } // Depth sets the optional parameter "depth": A non-negative integer // that limits the number of levels of the index hierarchy that are // returned. If `depth` is 1 (default), only the first index key value // is returned. If `depth` is 2, both primary and secondary key values // are returned. If `depth` is 0, the depth is the number of // slash-separators in the `indexPrefix` field, not counting a slash // appearing as the last character of the prefix. If the `indexPrefix` // field is empty, the default depth is 1. It is an error for `depth` to // be any positive value less than the number of components in // `indexPrefix`. func (c *ProjectsLogServicesIndexesListCall) Depth(depth int64) *ProjectsLogServicesIndexesListCall { c.urlParams_.Set("depth", fmt.Sprint(depth)) return c } // IndexPrefix sets the optional parameter "indexPrefix": Restricts the // index values returned to be those with a specified prefix for each // index key. This field has the form "/prefix1/prefix2/...", in order // corresponding to the `LogService indexKeys`. Non-empty prefixes must // begin with `/`. For example, App Engine's two keys are the module ID // and the version ID. Following is the effect of using various values // for `indexPrefix`: + "/Mod/" retrieves `/Mod/10` and `/Mod/11` but // not `/ModA/10`. + "/Mod` retrieves `/Mod/10`, `/Mod/11` and // `/ModA/10` but not `/XXX/33`. + "/Mod/1" retrieves `/Mod/10` and // `/Mod/11` but not `/ModA/10`. + "/Mod/10/" retrieves `/Mod/10` // only. + An empty prefix or "/" retrieves all values. func (c *ProjectsLogServicesIndexesListCall) IndexPrefix(indexPrefix string) *ProjectsLogServicesIndexesListCall { c.urlParams_.Set("indexPrefix", indexPrefix) return c } // PageSize sets the optional parameter "pageSize": The maximum number // of log service index resources to return in one operation. func (c *ProjectsLogServicesIndexesListCall) PageSize(pageSize int64) *ProjectsLogServicesIndexesListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": An opaque token, // returned as `nextPageToken` by a prior `ListLogServiceIndexes` // operation. If `pageToken` is supplied, then the other fields of this // request are ignored, and instead the previous `ListLogServiceIndexes` // operation is continued. func (c *ProjectsLogServicesIndexesListCall) PageToken(pageToken string) *ProjectsLogServicesIndexesListCall { c.urlParams_.Set("pageToken", pageToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesIndexesListCall) Fields(s ...googleapi.Field) *ProjectsLogServicesIndexesListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogServicesIndexesListCall) IfNoneMatch(entityTag string) *ProjectsLogServicesIndexesListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesIndexesListCall) Context(ctx context.Context) *ProjectsLogServicesIndexesListCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesIndexesListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/indexes") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.indexes.list" call. // Exactly one of *ListLogServiceIndexesResponse or error will be // non-nil. Any non-2xx status code is an error. Response headers are in // either *ListLogServiceIndexesResponse.ServerResponse.Header or (if a // response was returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogServicesIndexesListCall) Do(opts ...googleapi.CallOption) (*ListLogServiceIndexesResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogServiceIndexesResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists the current index values for a log service.", // "httpMethod": "GET", // "id": "logging.projects.logServices.indexes.list", // "parameterOrder": [ // "projectsId", // "logServicesId" // ], // "parameters": { // "depth": { // "description": "A non-negative integer that limits the number of levels of the index hierarchy that are returned. If `depth` is 1 (default), only the first index key value is returned. If `depth` is 2, both primary and secondary key values are returned. If `depth` is 0, the depth is the number of slash-separators in the `indexPrefix` field, not counting a slash appearing as the last character of the prefix. If the `indexPrefix` field is empty, the default depth is 1. It is an error for `depth` to be any positive value less than the number of components in `indexPrefix`.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "indexPrefix": { // "description": "Restricts the index values returned to be those with a specified prefix for each index key. This field has the form `\"/prefix1/prefix2/...\"`, in order corresponding to the `LogService indexKeys`. Non-empty prefixes must begin with `/`. For example, App Engine's two keys are the module ID and the version ID. Following is the effect of using various values for `indexPrefix`: + `\"/Mod/\"` retrieves `/Mod/10` and `/Mod/11` but not `/ModA/10`. + `\"/Mod` retrieves `/Mod/10`, `/Mod/11` and `/ModA/10` but not `/XXX/33`. + `\"/Mod/1\"` retrieves `/Mod/10` and `/Mod/11` but not `/ModA/10`. + `\"/Mod/10/\"` retrieves `/Mod/10` only. + An empty prefix or `\"/\"` retrieves all values.", // "location": "query", // "type": "string" // }, // "logServicesId": { // "description": "Part of `serviceName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "pageSize": { // "description": "The maximum number of log service index resources to return in one operation.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "An opaque token, returned as `nextPageToken` by a prior `ListLogServiceIndexes` operation. If `pageToken` is supplied, then the other fields of this request are ignored, and instead the previous `ListLogServiceIndexes` operation is continued.", // "location": "query", // "type": "string" // }, // "projectsId": { // "description": "Part of `serviceName`. The resource name of a log service whose service indexes are requested. Example: `\"projects/my-project-id/logServices/appengine.googleapis.com\"`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/indexes", // "response": { // "$ref": "ListLogServiceIndexesResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ProjectsLogServicesIndexesListCall) Pages(ctx context.Context, f func(*ListLogServiceIndexesResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "logging.projects.logServices.sinks.create": type ProjectsLogServicesSinksCreateCall struct { s *Service projectsId string logServicesId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Create: Creates a log service sink. All log entries from a specified // log service are written to the destination. func (r *ProjectsLogServicesSinksService) Create(projectsId string, logServicesId string, logsink *LogSink) *ProjectsLogServicesSinksCreateCall { c := &ProjectsLogServicesSinksCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesSinksCreateCall) Fields(s ...googleapi.Field) *ProjectsLogServicesSinksCreateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesSinksCreateCall) Context(ctx context.Context) *ProjectsLogServicesSinksCreateCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesSinksCreateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.sinks.create" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogServicesSinksCreateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Creates a log service sink. All log entries from a specified log service are written to the destination.", // "httpMethod": "POST", // "id": "logging.projects.logServices.sinks.create", // "parameterOrder": [ // "projectsId", // "logServicesId" // ], // "parameters": { // "logServicesId": { // "description": "Part of `serviceName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `serviceName`. The resource name of the log service to which the sink is bound.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logServices.sinks.delete": type ProjectsLogServicesSinksDeleteCall struct { s *Service projectsId string logServicesId string sinksId string urlParams_ gensupport.URLParams ctx_ context.Context } // Delete: Deletes a log service sink. After deletion, no new log // entries are written to the destination. func (r *ProjectsLogServicesSinksService) Delete(projectsId string, logServicesId string, sinksId string) *ProjectsLogServicesSinksDeleteCall { c := &ProjectsLogServicesSinksDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesSinksDeleteCall) Fields(s ...googleapi.Field) *ProjectsLogServicesSinksDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesSinksDeleteCall) Context(ctx context.Context) *ProjectsLogServicesSinksDeleteCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesSinksDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.sinks.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogServicesSinksDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a log service sink. After deletion, no new log entries are written to the destination.", // "httpMethod": "DELETE", // "id": "logging.projects.logServices.sinks.delete", // "parameterOrder": [ // "projectsId", // "logServicesId", // "sinksId" // ], // "parameters": { // "logServicesId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the log service sink to delete.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logServices.sinks.get": type ProjectsLogServicesSinksGetCall struct { s *Service projectsId string logServicesId string sinksId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // Get: Gets a log service sink. func (r *ProjectsLogServicesSinksService) Get(projectsId string, logServicesId string, sinksId string) *ProjectsLogServicesSinksGetCall { c := &ProjectsLogServicesSinksGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesSinksGetCall) Fields(s ...googleapi.Field) *ProjectsLogServicesSinksGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogServicesSinksGetCall) IfNoneMatch(entityTag string) *ProjectsLogServicesSinksGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesSinksGetCall) Context(ctx context.Context) *ProjectsLogServicesSinksGetCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesSinksGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.sinks.get" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogServicesSinksGetCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Gets a log service sink.", // "httpMethod": "GET", // "id": "logging.projects.logServices.sinks.get", // "parameterOrder": [ // "projectsId", // "logServicesId", // "sinksId" // ], // "parameters": { // "logServicesId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the log service sink to return.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}", // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.logServices.sinks.list": type ProjectsLogServicesSinksListCall struct { s *Service projectsId string logServicesId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists log service sinks associated with a log service. func (r *ProjectsLogServicesSinksService) List(projectsId string, logServicesId string) *ProjectsLogServicesSinksListCall { c := &ProjectsLogServicesSinksListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesSinksListCall) Fields(s ...googleapi.Field) *ProjectsLogServicesSinksListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogServicesSinksListCall) IfNoneMatch(entityTag string) *ProjectsLogServicesSinksListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesSinksListCall) Context(ctx context.Context) *ProjectsLogServicesSinksListCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesSinksListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.sinks.list" call. // Exactly one of *ListLogServiceSinksResponse or error will be non-nil. // Any non-2xx status code is an error. Response headers are in either // *ListLogServiceSinksResponse.ServerResponse.Header or (if a response // was returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogServicesSinksListCall) Do(opts ...googleapi.CallOption) (*ListLogServiceSinksResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogServiceSinksResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists log service sinks associated with a log service.", // "httpMethod": "GET", // "id": "logging.projects.logServices.sinks.list", // "parameterOrder": [ // "projectsId", // "logServicesId" // ], // "parameters": { // "logServicesId": { // "description": "Part of `serviceName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `serviceName`. The log service whose sinks are wanted.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks", // "response": { // "$ref": "ListLogServiceSinksResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.logServices.sinks.update": type ProjectsLogServicesSinksUpdateCall struct { s *Service projectsId string logServicesId string sinksId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Update: Updates a log service sink. If the sink does not exist, it is // created. func (r *ProjectsLogServicesSinksService) Update(projectsId string, logServicesId string, sinksId string, logsink *LogSink) *ProjectsLogServicesSinksUpdateCall { c := &ProjectsLogServicesSinksUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logServicesId = logServicesId c.sinksId = sinksId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogServicesSinksUpdateCall) Fields(s ...googleapi.Field) *ProjectsLogServicesSinksUpdateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogServicesSinksUpdateCall) Context(ctx context.Context) *ProjectsLogServicesSinksUpdateCall { c.ctx_ = ctx return c } func (c *ProjectsLogServicesSinksUpdateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("PUT", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logServicesId": c.logServicesId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logServices.sinks.update" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogServicesSinksUpdateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Updates a log service sink. If the sink does not exist, it is created.", // "httpMethod": "PUT", // "id": "logging.projects.logServices.sinks.update", // "parameterOrder": [ // "projectsId", // "logServicesId", // "sinksId" // ], // "parameters": { // "logServicesId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the log service sink to update.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logServices/{logServicesId}/sinks/{sinksId}", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logs.delete": type ProjectsLogsDeleteCall struct { s *Service projectsId string logsId string urlParams_ gensupport.URLParams ctx_ context.Context } // Delete: Deletes a log and all its log entries. The log will reappear // if it receives new entries. func (r *ProjectsLogsService) Delete(projectsId string, logsId string) *ProjectsLogsDeleteCall { c := &ProjectsLogsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsDeleteCall) Fields(s ...googleapi.Field) *ProjectsLogsDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsDeleteCall) Context(ctx context.Context) *ProjectsLogsDeleteCall { c.ctx_ = ctx return c } func (c *ProjectsLogsDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogsDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a log and all its log entries. The log will reappear if it receives new entries.", // "httpMethod": "DELETE", // "id": "logging.projects.logs.delete", // "parameterOrder": [ // "projectsId", // "logsId" // ], // "parameters": { // "logsId": { // "description": "Part of `logName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `logName`. The resource name of the log to be deleted.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logs.list": type ProjectsLogsListCall struct { s *Service projectsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists the logs in the project. Only logs that have entries are // listed. func (r *ProjectsLogsService) List(projectsId string) *ProjectsLogsListCall { c := &ProjectsLogsListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId return c } // PageSize sets the optional parameter "pageSize": The maximum number // of results to return. func (c *ProjectsLogsListCall) PageSize(pageSize int64) *ProjectsLogsListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": An opaque token, // returned as `nextPageToken` by a prior `ListLogs` operation. If // `pageToken` is supplied, then the other fields of this request are // ignored, and instead the previous `ListLogs` operation is continued. func (c *ProjectsLogsListCall) PageToken(pageToken string) *ProjectsLogsListCall { c.urlParams_.Set("pageToken", pageToken) return c } // ServiceIndexPrefix sets the optional parameter "serviceIndexPrefix": // The purpose of this field is to restrict the listed logs to those // with entries of a certain kind. If `serviceName` is the name of a log // service, then this field may contain values for the log service's // indexes. Only logs that have entries whose indexes include the values // are listed. The format for this field is "/val1/val2.../valN", // where `val1` is a value for the first index, `val2` for the second // index, etc. An empty value (a single slash) for an index matches all // values, and you can omit values for later indexes entirely. func (c *ProjectsLogsListCall) ServiceIndexPrefix(serviceIndexPrefix string) *ProjectsLogsListCall { c.urlParams_.Set("serviceIndexPrefix", serviceIndexPrefix) return c } // ServiceName sets the optional parameter "serviceName": If not empty, // this field must be a log service name such as // "compute.googleapis.com". Only logs associated with that that log // service are listed. func (c *ProjectsLogsListCall) ServiceName(serviceName string) *ProjectsLogsListCall { c.urlParams_.Set("serviceName", serviceName) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsListCall) Fields(s ...googleapi.Field) *ProjectsLogsListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogsListCall) IfNoneMatch(entityTag string) *ProjectsLogsListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsListCall) Context(ctx context.Context) *ProjectsLogsListCall { c.ctx_ = ctx return c } func (c *ProjectsLogsListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.list" call. // Exactly one of *ListLogsResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListLogsResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogsListCall) Do(opts ...googleapi.CallOption) (*ListLogsResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogsResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists the logs in the project. Only logs that have entries are listed.", // "httpMethod": "GET", // "id": "logging.projects.logs.list", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "pageSize": { // "description": "The maximum number of results to return.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "An opaque token, returned as `nextPageToken` by a prior `ListLogs` operation. If `pageToken` is supplied, then the other fields of this request are ignored, and instead the previous `ListLogs` operation is continued.", // "location": "query", // "type": "string" // }, // "projectsId": { // "description": "Part of `projectName`. The resource name of the project whose logs are requested. If both `serviceName` and `serviceIndexPrefix` are empty, then all logs with entries in this project are listed.", // "location": "path", // "required": true, // "type": "string" // }, // "serviceIndexPrefix": { // "description": "The purpose of this field is to restrict the listed logs to those with entries of a certain kind. If `serviceName` is the name of a log service, then this field may contain values for the log service's indexes. Only logs that have entries whose indexes include the values are listed. The format for this field is `\"/val1/val2.../valN\"`, where `val1` is a value for the first index, `val2` for the second index, etc. An empty value (a single slash) for an index matches all values, and you can omit values for later indexes entirely.", // "location": "query", // "type": "string" // }, // "serviceName": { // "description": "If not empty, this field must be a log service name such as `\"compute.googleapis.com\"`. Only logs associated with that that log service are listed.", // "location": "query", // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs", // "response": { // "$ref": "ListLogsResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ProjectsLogsListCall) Pages(ctx context.Context, f func(*ListLogsResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "logging.projects.logs.entries.write": type ProjectsLogsEntriesWriteCall struct { s *Service projectsId string logsId string writelogentriesrequest *WriteLogEntriesRequest urlParams_ gensupport.URLParams ctx_ context.Context } // Write: Writes log entries to Cloud Logging. Each entry consists of a // `LogEntry` object. You must fill in all the fields of the object, // including one of the payload fields. You may supply a map, // `commonLabels`, that holds default (key, value) data for the // `entries[].metadata.labels` map in each entry, saving you the trouble // of creating identical copies for each entry. func (r *ProjectsLogsEntriesService) Write(projectsId string, logsId string, writelogentriesrequest *WriteLogEntriesRequest) *ProjectsLogsEntriesWriteCall { c := &ProjectsLogsEntriesWriteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId c.writelogentriesrequest = writelogentriesrequest return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsEntriesWriteCall) Fields(s ...googleapi.Field) *ProjectsLogsEntriesWriteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsEntriesWriteCall) Context(ctx context.Context) *ProjectsLogsEntriesWriteCall { c.ctx_ = ctx return c } func (c *ProjectsLogsEntriesWriteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.writelogentriesrequest) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/entries:write") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.entries.write" call. // Exactly one of *WriteLogEntriesResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *WriteLogEntriesResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogsEntriesWriteCall) Do(opts ...googleapi.CallOption) (*WriteLogEntriesResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &WriteLogEntriesResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Writes log entries to Cloud Logging. Each entry consists of a `LogEntry` object. You must fill in all the fields of the object, including one of the payload fields. You may supply a map, `commonLabels`, that holds default (key, value) data for the `entries[].metadata.labels` map in each entry, saving you the trouble of creating identical copies for each entry.", // "httpMethod": "POST", // "id": "logging.projects.logs.entries.write", // "parameterOrder": [ // "projectsId", // "logsId" // ], // "parameters": { // "logsId": { // "description": "Part of `logName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `logName`. The resource name of the log that will receive the log entries.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/entries:write", // "request": { // "$ref": "WriteLogEntriesRequest" // }, // "response": { // "$ref": "WriteLogEntriesResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.write" // ] // } } // method id "logging.projects.logs.sinks.create": type ProjectsLogsSinksCreateCall struct { s *Service projectsId string logsId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Create: Creates a log sink. All log entries for a specified log are // written to the destination. func (r *ProjectsLogsSinksService) Create(projectsId string, logsId string, logsink *LogSink) *ProjectsLogsSinksCreateCall { c := &ProjectsLogsSinksCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsSinksCreateCall) Fields(s ...googleapi.Field) *ProjectsLogsSinksCreateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsSinksCreateCall) Context(ctx context.Context) *ProjectsLogsSinksCreateCall { c.ctx_ = ctx return c } func (c *ProjectsLogsSinksCreateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.sinks.create" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogsSinksCreateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Creates a log sink. All log entries for a specified log are written to the destination.", // "httpMethod": "POST", // "id": "logging.projects.logs.sinks.create", // "parameterOrder": [ // "projectsId", // "logsId" // ], // "parameters": { // "logsId": { // "description": "Part of `logName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `logName`. The resource name of the log to which to the sink is bound.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/sinks", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logs.sinks.delete": type ProjectsLogsSinksDeleteCall struct { s *Service projectsId string logsId string sinksId string urlParams_ gensupport.URLParams ctx_ context.Context } // Delete: Deletes a log sink. After deletion, no new log entries are // written to the destination. func (r *ProjectsLogsSinksService) Delete(projectsId string, logsId string, sinksId string) *ProjectsLogsSinksDeleteCall { c := &ProjectsLogsSinksDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsSinksDeleteCall) Fields(s ...googleapi.Field) *ProjectsLogsSinksDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsSinksDeleteCall) Context(ctx context.Context) *ProjectsLogsSinksDeleteCall { c.ctx_ = ctx return c } func (c *ProjectsLogsSinksDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.sinks.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogsSinksDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a log sink. After deletion, no new log entries are written to the destination.", // "httpMethod": "DELETE", // "id": "logging.projects.logs.sinks.delete", // "parameterOrder": [ // "projectsId", // "logsId", // "sinksId" // ], // "parameters": { // "logsId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the log sink to delete.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.logs.sinks.get": type ProjectsLogsSinksGetCall struct { s *Service projectsId string logsId string sinksId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // Get: Gets a log sink. func (r *ProjectsLogsSinksService) Get(projectsId string, logsId string, sinksId string) *ProjectsLogsSinksGetCall { c := &ProjectsLogsSinksGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsSinksGetCall) Fields(s ...googleapi.Field) *ProjectsLogsSinksGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogsSinksGetCall) IfNoneMatch(entityTag string) *ProjectsLogsSinksGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsSinksGetCall) Context(ctx context.Context) *ProjectsLogsSinksGetCall { c.ctx_ = ctx return c } func (c *ProjectsLogsSinksGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.sinks.get" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogsSinksGetCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Gets a log sink.", // "httpMethod": "GET", // "id": "logging.projects.logs.sinks.get", // "parameterOrder": [ // "projectsId", // "logsId", // "sinksId" // ], // "parameters": { // "logsId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the log sink to return.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}", // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.logs.sinks.list": type ProjectsLogsSinksListCall struct { s *Service projectsId string logsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists log sinks associated with a log. func (r *ProjectsLogsSinksService) List(projectsId string, logsId string) *ProjectsLogsSinksListCall { c := &ProjectsLogsSinksListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsSinksListCall) Fields(s ...googleapi.Field) *ProjectsLogsSinksListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsLogsSinksListCall) IfNoneMatch(entityTag string) *ProjectsLogsSinksListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsSinksListCall) Context(ctx context.Context) *ProjectsLogsSinksListCall { c.ctx_ = ctx return c } func (c *ProjectsLogsSinksListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.sinks.list" call. // Exactly one of *ListLogSinksResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListLogSinksResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsLogsSinksListCall) Do(opts ...googleapi.CallOption) (*ListLogSinksResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogSinksResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists log sinks associated with a log.", // "httpMethod": "GET", // "id": "logging.projects.logs.sinks.list", // "parameterOrder": [ // "projectsId", // "logsId" // ], // "parameters": { // "logsId": { // "description": "Part of `logName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `logName`. The log whose sinks are wanted. For example, `\"compute.google.com/syslog\"`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/sinks", // "response": { // "$ref": "ListLogSinksResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.logs.sinks.update": type ProjectsLogsSinksUpdateCall struct { s *Service projectsId string logsId string sinksId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Update: Updates a log sink. If the sink does not exist, it is // created. func (r *ProjectsLogsSinksService) Update(projectsId string, logsId string, sinksId string, logsink *LogSink) *ProjectsLogsSinksUpdateCall { c := &ProjectsLogsSinksUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsId = logsId c.sinksId = sinksId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsLogsSinksUpdateCall) Fields(s ...googleapi.Field) *ProjectsLogsSinksUpdateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsLogsSinksUpdateCall) Context(ctx context.Context) *ProjectsLogsSinksUpdateCall { c.ctx_ = ctx return c } func (c *ProjectsLogsSinksUpdateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("PUT", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "logsId": c.logsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.logs.sinks.update" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsLogsSinksUpdateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Updates a log sink. If the sink does not exist, it is created.", // "httpMethod": "PUT", // "id": "logging.projects.logs.sinks.update", // "parameterOrder": [ // "projectsId", // "logsId", // "sinksId" // ], // "parameters": { // "logsId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `sinkName`. The resource name of the sink to update.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/logs/{logsId}/sinks/{sinksId}", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.metrics.create": type ProjectsMetricsCreateCall struct { s *Service projectsId string logmetric *LogMetric urlParams_ gensupport.URLParams ctx_ context.Context } // Create: Creates a logs-based metric. func (r *ProjectsMetricsService) Create(projectsId string, logmetric *LogMetric) *ProjectsMetricsCreateCall { c := &ProjectsMetricsCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logmetric = logmetric return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsMetricsCreateCall) Fields(s ...googleapi.Field) *ProjectsMetricsCreateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsMetricsCreateCall) Context(ctx context.Context) *ProjectsMetricsCreateCall { c.ctx_ = ctx return c } func (c *ProjectsMetricsCreateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logmetric) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/metrics") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.metrics.create" call. // Exactly one of *LogMetric or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *LogMetric.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ProjectsMetricsCreateCall) Do(opts ...googleapi.CallOption) (*LogMetric, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogMetric{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Creates a logs-based metric.", // "httpMethod": "POST", // "id": "logging.projects.metrics.create", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "projectsId": { // "description": "Part of `projectName`. The resource name of the project in which to create the metric.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/metrics", // "request": { // "$ref": "LogMetric" // }, // "response": { // "$ref": "LogMetric" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.write" // ] // } } // method id "logging.projects.metrics.delete": type ProjectsMetricsDeleteCall struct { s *Service projectsId string metricsId string urlParams_ gensupport.URLParams ctx_ context.Context } // Delete: Deletes a logs-based metric. func (r *ProjectsMetricsService) Delete(projectsId string, metricsId string) *ProjectsMetricsDeleteCall { c := &ProjectsMetricsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.metricsId = metricsId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsMetricsDeleteCall) Fields(s ...googleapi.Field) *ProjectsMetricsDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsMetricsDeleteCall) Context(ctx context.Context) *ProjectsMetricsDeleteCall { c.ctx_ = ctx return c } func (c *ProjectsMetricsDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/metrics/{metricsId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "metricsId": c.metricsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.metrics.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsMetricsDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a logs-based metric.", // "httpMethod": "DELETE", // "id": "logging.projects.metrics.delete", // "parameterOrder": [ // "projectsId", // "metricsId" // ], // "parameters": { // "metricsId": { // "description": "Part of `metricName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `metricName`. The resource name of the metric to delete.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/metrics/{metricsId}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.write" // ] // } } // method id "logging.projects.metrics.get": type ProjectsMetricsGetCall struct { s *Service projectsId string metricsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // Get: Gets a logs-based metric. func (r *ProjectsMetricsService) Get(projectsId string, metricsId string) *ProjectsMetricsGetCall { c := &ProjectsMetricsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.metricsId = metricsId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsMetricsGetCall) Fields(s ...googleapi.Field) *ProjectsMetricsGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsMetricsGetCall) IfNoneMatch(entityTag string) *ProjectsMetricsGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsMetricsGetCall) Context(ctx context.Context) *ProjectsMetricsGetCall { c.ctx_ = ctx return c } func (c *ProjectsMetricsGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/metrics/{metricsId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "metricsId": c.metricsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.metrics.get" call. // Exactly one of *LogMetric or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *LogMetric.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ProjectsMetricsGetCall) Do(opts ...googleapi.CallOption) (*LogMetric, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogMetric{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Gets a logs-based metric.", // "httpMethod": "GET", // "id": "logging.projects.metrics.get", // "parameterOrder": [ // "projectsId", // "metricsId" // ], // "parameters": { // "metricsId": { // "description": "Part of `metricName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `metricName`. The resource name of the desired metric.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/metrics/{metricsId}", // "response": { // "$ref": "LogMetric" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.metrics.list": type ProjectsMetricsListCall struct { s *Service projectsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists the logs-based metrics associated with a project. func (r *ProjectsMetricsService) List(projectsId string) *ProjectsMetricsListCall { c := &ProjectsMetricsListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId return c } // PageSize sets the optional parameter "pageSize": The maximum number // of `LogMetric` objects to return in one operation. func (c *ProjectsMetricsListCall) PageSize(pageSize int64) *ProjectsMetricsListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": An opaque token, // returned as `nextPageToken` by a prior `ListLogMetrics` operation. If // `pageToken` is supplied, then the other fields of this request are // ignored, and instead the previous `ListLogMetrics` operation is // continued. func (c *ProjectsMetricsListCall) PageToken(pageToken string) *ProjectsMetricsListCall { c.urlParams_.Set("pageToken", pageToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsMetricsListCall) Fields(s ...googleapi.Field) *ProjectsMetricsListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsMetricsListCall) IfNoneMatch(entityTag string) *ProjectsMetricsListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsMetricsListCall) Context(ctx context.Context) *ProjectsMetricsListCall { c.ctx_ = ctx return c } func (c *ProjectsMetricsListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/metrics") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.metrics.list" call. // Exactly one of *ListLogMetricsResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListLogMetricsResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsMetricsListCall) Do(opts ...googleapi.CallOption) (*ListLogMetricsResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListLogMetricsResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists the logs-based metrics associated with a project.", // "httpMethod": "GET", // "id": "logging.projects.metrics.list", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "pageSize": { // "description": "The maximum number of `LogMetric` objects to return in one operation.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "An opaque token, returned as `nextPageToken` by a prior `ListLogMetrics` operation. If `pageToken` is supplied, then the other fields of this request are ignored, and instead the previous `ListLogMetrics` operation is continued.", // "location": "query", // "type": "string" // }, // "projectsId": { // "description": "Part of `projectName`. The resource name for the project whose metrics are wanted.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/metrics", // "response": { // "$ref": "ListLogMetricsResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ProjectsMetricsListCall) Pages(ctx context.Context, f func(*ListLogMetricsResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "logging.projects.metrics.update": type ProjectsMetricsUpdateCall struct { s *Service projectsId string metricsId string logmetric *LogMetric urlParams_ gensupport.URLParams ctx_ context.Context } // Update: Creates or updates a logs-based metric. func (r *ProjectsMetricsService) Update(projectsId string, metricsId string, logmetric *LogMetric) *ProjectsMetricsUpdateCall { c := &ProjectsMetricsUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.metricsId = metricsId c.logmetric = logmetric return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsMetricsUpdateCall) Fields(s ...googleapi.Field) *ProjectsMetricsUpdateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsMetricsUpdateCall) Context(ctx context.Context) *ProjectsMetricsUpdateCall { c.ctx_ = ctx return c } func (c *ProjectsMetricsUpdateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logmetric) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/metrics/{metricsId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("PUT", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "metricsId": c.metricsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.metrics.update" call. // Exactly one of *LogMetric or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *LogMetric.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ProjectsMetricsUpdateCall) Do(opts ...googleapi.CallOption) (*LogMetric, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogMetric{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Creates or updates a logs-based metric.", // "httpMethod": "PUT", // "id": "logging.projects.metrics.update", // "parameterOrder": [ // "projectsId", // "metricsId" // ], // "parameters": { // "metricsId": { // "description": "Part of `metricName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // }, // "projectsId": { // "description": "Part of `metricName`. The resource name of the metric to update.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/metrics/{metricsId}", // "request": { // "$ref": "LogMetric" // }, // "response": { // "$ref": "LogMetric" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.write" // ] // } } // method id "logging.projects.sinks.create": type ProjectsSinksCreateCall struct { s *Service projectsId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Create: Creates a project sink. A logs filter determines which log // entries are written to the destination. func (r *ProjectsSinksService) Create(projectsId string, logsink *LogSink) *ProjectsSinksCreateCall { c := &ProjectsSinksCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsSinksCreateCall) Fields(s ...googleapi.Field) *ProjectsSinksCreateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsSinksCreateCall) Context(ctx context.Context) *ProjectsSinksCreateCall { c.ctx_ = ctx return c } func (c *ProjectsSinksCreateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.sinks.create" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsSinksCreateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Creates a project sink. A logs filter determines which log entries are written to the destination.", // "httpMethod": "POST", // "id": "logging.projects.sinks.create", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "projectsId": { // "description": "Part of `projectName`. The resource name of the project to which the sink is bound.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/sinks", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.sinks.delete": type ProjectsSinksDeleteCall struct { s *Service projectsId string sinksId string urlParams_ gensupport.URLParams ctx_ context.Context } // Delete: Deletes a project sink. After deletion, no new log entries // are written to the destination. func (r *ProjectsSinksService) Delete(projectsId string, sinksId string) *ProjectsSinksDeleteCall { c := &ProjectsSinksDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsSinksDeleteCall) Fields(s ...googleapi.Field) *ProjectsSinksDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsSinksDeleteCall) Context(ctx context.Context) *ProjectsSinksDeleteCall { c.ctx_ = ctx return c } func (c *ProjectsSinksDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.sinks.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsSinksDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a project sink. After deletion, no new log entries are written to the destination.", // "httpMethod": "DELETE", // "id": "logging.projects.sinks.delete", // "parameterOrder": [ // "projectsId", // "sinksId" // ], // "parameters": { // "projectsId": { // "description": "Part of `sinkName`. The resource name of the project sink to delete.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/sinks/{sinksId}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } } // method id "logging.projects.sinks.get": type ProjectsSinksGetCall struct { s *Service projectsId string sinksId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // Get: Gets a project sink. func (r *ProjectsSinksService) Get(projectsId string, sinksId string) *ProjectsSinksGetCall { c := &ProjectsSinksGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.sinksId = sinksId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsSinksGetCall) Fields(s ...googleapi.Field) *ProjectsSinksGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsSinksGetCall) IfNoneMatch(entityTag string) *ProjectsSinksGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsSinksGetCall) Context(ctx context.Context) *ProjectsSinksGetCall { c.ctx_ = ctx return c } func (c *ProjectsSinksGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.sinks.get" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsSinksGetCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Gets a project sink.", // "httpMethod": "GET", // "id": "logging.projects.sinks.get", // "parameterOrder": [ // "projectsId", // "sinksId" // ], // "parameters": { // "projectsId": { // "description": "Part of `sinkName`. The resource name of the project sink to return.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/sinks/{sinksId}", // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.sinks.list": type ProjectsSinksListCall struct { s *Service projectsId string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context } // List: Lists project sinks associated with a project. func (r *ProjectsSinksService) List(projectsId string) *ProjectsSinksListCall { c := &ProjectsSinksListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsSinksListCall) Fields(s ...googleapi.Field) *ProjectsSinksListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ProjectsSinksListCall) IfNoneMatch(entityTag string) *ProjectsSinksListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsSinksListCall) Context(ctx context.Context) *ProjectsSinksListCall { c.ctx_ = ctx return c } func (c *ProjectsSinksListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/sinks") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.sinks.list" call. // Exactly one of *ListSinksResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListSinksResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ProjectsSinksListCall) Do(opts ...googleapi.CallOption) (*ListSinksResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListSinksResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Lists project sinks associated with a project.", // "httpMethod": "GET", // "id": "logging.projects.sinks.list", // "parameterOrder": [ // "projectsId" // ], // "parameters": { // "projectsId": { // "description": "Part of `projectName`. The project whose sinks are wanted.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/sinks", // "response": { // "$ref": "ListSinksResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/cloud-platform.read-only", // "https://www.googleapis.com/auth/logging.admin", // "https://www.googleapis.com/auth/logging.read" // ] // } } // method id "logging.projects.sinks.update": type ProjectsSinksUpdateCall struct { s *Service projectsId string sinksId string logsink *LogSink urlParams_ gensupport.URLParams ctx_ context.Context } // Update: Updates a project sink. If the sink does not exist, it is // created. The destination, filter, or both may be updated. func (r *ProjectsSinksService) Update(projectsId string, sinksId string, logsink *LogSink) *ProjectsSinksUpdateCall { c := &ProjectsSinksUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.projectsId = projectsId c.sinksId = sinksId c.logsink = logsink return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ProjectsSinksUpdateCall) Fields(s ...googleapi.Field) *ProjectsSinksUpdateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ProjectsSinksUpdateCall) Context(ctx context.Context) *ProjectsSinksUpdateCall { c.ctx_ = ctx return c } func (c *ProjectsSinksUpdateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.logsink) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1beta3/projects/{projectsId}/sinks/{sinksId}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("PUT", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "projectsId": c.projectsId, "sinksId": c.sinksId, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "logging.projects.sinks.update" call. // Exactly one of *LogSink or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *LogSink.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *ProjectsSinksUpdateCall) Do(opts ...googleapi.CallOption) (*LogSink, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &LogSink{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := json.NewDecoder(res.Body).Decode(target); err != nil { return nil, err } return ret, nil // { // "description": "Updates a project sink. If the sink does not exist, it is created. The destination, filter, or both may be updated.", // "httpMethod": "PUT", // "id": "logging.projects.sinks.update", // "parameterOrder": [ // "projectsId", // "sinksId" // ], // "parameters": { // "projectsId": { // "description": "Part of `sinkName`. The resource name of the project sink to update.", // "location": "path", // "required": true, // "type": "string" // }, // "sinksId": { // "description": "Part of `sinkName`. See documentation of `projectsId`.", // "location": "path", // "required": true, // "type": "string" // } // }, // "path": "v1beta3/projects/{projectsId}/sinks/{sinksId}", // "request": { // "$ref": "LogSink" // }, // "response": { // "$ref": "LogSink" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform", // "https://www.googleapis.com/auth/logging.admin" // ] // } }
BradErz/kops
vendor/google.golang.org/api/logging/v1beta3/logging-gen.go
GO
apache-2.0
174,915
/* Copyright 2015 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package thirdpartyresourcedata import ( "encoding/json" "reflect" "testing" "time" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/api/v1" "k8s.io/kubernetes/pkg/apis/experimental" "k8s.io/kubernetes/pkg/runtime" ) type Foo struct { unversioned.TypeMeta `json:",inline"` api.ObjectMeta `json:"metadata,omitempty" description:"standard object metadata"` SomeField string `json:"someField"` OtherField int `json:"otherField"` } type FooList struct { unversioned.TypeMeta `json:",inline"` unversioned.ListMeta `json:"metadata,omitempty" description:"standard list metadata; see http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata"` items []Foo `json:"items"` } func TestCodec(t *testing.T) { tests := []struct { obj *Foo expectErr bool name string }{ { obj: &Foo{ObjectMeta: api.ObjectMeta{Name: "bar"}}, expectErr: true, name: "missing kind", }, { obj: &Foo{ObjectMeta: api.ObjectMeta{Name: "bar"}, TypeMeta: unversioned.TypeMeta{Kind: "Foo"}}, name: "basic", }, { obj: &Foo{ObjectMeta: api.ObjectMeta{Name: "bar", ResourceVersion: "baz"}, TypeMeta: unversioned.TypeMeta{Kind: "Foo"}}, name: "resource version", }, { obj: &Foo{ ObjectMeta: api.ObjectMeta{ Name: "bar", CreationTimestamp: unversioned.Time{time.Unix(100, 0)}, }, TypeMeta: unversioned.TypeMeta{Kind: "Foo"}, }, name: "creation time", }, { obj: &Foo{ ObjectMeta: api.ObjectMeta{ Name: "bar", ResourceVersion: "baz", Labels: map[string]string{"foo": "bar", "baz": "blah"}, }, TypeMeta: unversioned.TypeMeta{Kind: "Foo"}, }, name: "labels", }, } for _, test := range tests { codec := thirdPartyResourceDataCodec{kind: "Foo"} data, err := json.Marshal(test.obj) if err != nil { t.Errorf("[%s] unexpected error: %v", test.name, err) continue } obj, err := codec.Decode(data) if err != nil && !test.expectErr { t.Errorf("[%s] unexpected error: %v", test.name, err) continue } if test.expectErr { if err == nil { t.Errorf("[%s] unexpected non-error", test.name) } continue } rsrcObj, ok := obj.(*experimental.ThirdPartyResourceData) if !ok { t.Errorf("[%s] unexpected object: %v", test.name, obj) continue } if !reflect.DeepEqual(rsrcObj.ObjectMeta, test.obj.ObjectMeta) { t.Errorf("[%s]\nexpected\n%v\nsaw\n%v\n", test.name, rsrcObj.ObjectMeta, test.obj.ObjectMeta) } var output Foo if err := json.Unmarshal(rsrcObj.Data, &output); err != nil { t.Errorf("[%s] unexpected error: %v", test.name, err) continue } if !reflect.DeepEqual(&output, test.obj) { t.Errorf("[%s]\nexpected\n%v\nsaw\n%v\n", test.name, test.obj, &output) } data, err = codec.Encode(rsrcObj) if err != nil { t.Errorf("[%s] unexpected error: %v", test.name, err) } var output2 Foo if err := json.Unmarshal(data, &output2); err != nil { t.Errorf("[%s] unexpected error: %v", test.name, err) continue } if !reflect.DeepEqual(&output2, test.obj) { t.Errorf("[%s]\nexpected\n%v\nsaw\n%v\n", test.name, test.obj, &output2) } } } func TestCreater(t *testing.T) { creater := NewObjectCreator("creater version", api.Scheme) tests := []struct { name string version string kind string expectedObj runtime.Object expectErr bool }{ { name: "valid ThirdPartyResourceData creation", version: "creater version", kind: "ThirdPartyResourceData", expectedObj: &experimental.ThirdPartyResourceData{}, expectErr: false, }, { name: "invalid ThirdPartyResourceData creation", version: "invalid version", kind: "ThirdPartyResourceData", expectedObj: nil, expectErr: true, }, { name: "valid ListOptions creation", version: "v1", kind: "ListOptions", expectedObj: &v1.ListOptions{}, expectErr: false, }, } for _, test := range tests { out, err := creater.New(test.version, test.kind) if err != nil && !test.expectErr { t.Errorf("[%s] unexpected error: %v", test.name, err) } if err == nil && test.expectErr { t.Errorf("[%s] unexpected non-error", test.name) } if !reflect.DeepEqual(test.expectedObj, out) { t.Errorf("[%s] unexpected error: expect: %v, got: %v", test.expectedObj, out) } } }
socaa/kubernetes
pkg/registry/thirdpartyresourcedata/codec_test.go
GO
apache-2.0
5,038
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.ast.stmt; import org.codehaus.groovy.ast.GroovyCodeVisitor; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; /** * A return statement * * @author <a href="mailto:james@coredevelopers.net">James Strachan</a> */ public class ReturnStatement extends Statement { /** * Only used for synthetic return statements emitted by the compiler. * For comparisons use isReturningNullOrVoid() instead. */ public static final ReturnStatement RETURN_NULL_OR_VOID = new ReturnStatement(ConstantExpression.NULL); private Expression expression; public ReturnStatement(ExpressionStatement statement) { this(statement.getExpression()); setStatementLabel(statement.getStatementLabel()); } public ReturnStatement(Expression expression) { this.expression = expression; } public void visit(GroovyCodeVisitor visitor) { visitor.visitReturnStatement(this); } public Expression getExpression() { return expression; } public String getText() { return "return " + expression.getText(); } public void setExpression(Expression expression) { this.expression = expression; } public boolean isReturningNullOrVoid() { return expression instanceof ConstantExpression && ((ConstantExpression)expression).isNullExpression(); } }
aaronzirbes/incubator-groovy
src/main/org/codehaus/groovy/ast/stmt/ReturnStatement.java
Java
apache-2.0
2,280
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.views.scroll; import javax.annotation.Nullable; import android.graphics.Color; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.uimanager.annotations.ReactProp; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.views.view.ReactClippingViewGroupHelper; /** * View manager for {@link ReactHorizontalScrollView} components. * * <p>Note that {@link ReactScrollView} and {@link ReactHorizontalScrollView} are exposed to JS * as a single ScrollView component, configured via the {@code horizontal} boolean property. */ public class ReactHorizontalScrollViewManager extends ViewGroupManager<ReactHorizontalScrollView> implements ReactScrollViewCommandHelper.ScrollCommandHandler<ReactHorizontalScrollView> { private static final String REACT_CLASS = "AndroidHorizontalScrollView"; private @Nullable FpsListener mFpsListener = null; public ReactHorizontalScrollViewManager() { this(null); } public ReactHorizontalScrollViewManager(@Nullable FpsListener fpsListener) { mFpsListener = fpsListener; } @Override public String getName() { return REACT_CLASS; } @Override public ReactHorizontalScrollView createViewInstance(ThemedReactContext context) { return new ReactHorizontalScrollView(context, mFpsListener); } @ReactProp(name = "scrollEnabled", defaultBoolean = true) public void setScrollEnabled(ReactHorizontalScrollView view, boolean value) { view.setScrollEnabled(value); } @ReactProp(name = "showsHorizontalScrollIndicator") public void setShowsHorizontalScrollIndicator(ReactHorizontalScrollView view, boolean value) { view.setHorizontalScrollBarEnabled(value); } @ReactProp(name = ReactClippingViewGroupHelper.PROP_REMOVE_CLIPPED_SUBVIEWS) public void setRemoveClippedSubviews(ReactHorizontalScrollView view, boolean removeClippedSubviews) { view.setRemoveClippedSubviews(removeClippedSubviews); } /** * Computing momentum events is potentially expensive since we post a runnable on the UI thread * to see when it is done. We only do that if {@param sendMomentumEvents} is set to true. This * is handled automatically in js by checking if there is a listener on the momentum events. * * @param view * @param sendMomentumEvents */ @ReactProp(name = "sendMomentumEvents") public void setSendMomentumEvents(ReactHorizontalScrollView view, boolean sendMomentumEvents) { view.setSendMomentumEvents(sendMomentumEvents); } /** * Tag used for logging scroll performance on this scroll view. Will force momentum events to be * turned on (see setSendMomentumEvents). * * @param view * @param scrollPerfTag */ @ReactProp(name = "scrollPerfTag") public void setScrollPerfTag(ReactHorizontalScrollView view, String scrollPerfTag) { view.setScrollPerfTag(scrollPerfTag); } @ReactProp(name = "pagingEnabled") public void setPagingEnabled(ReactHorizontalScrollView view, boolean pagingEnabled) { view.setPagingEnabled(pagingEnabled); } @Override public void receiveCommand( ReactHorizontalScrollView scrollView, int commandId, @Nullable ReadableArray args) { ReactScrollViewCommandHelper.receiveCommand(this, scrollView, commandId, args); } @Override public void scrollTo( ReactHorizontalScrollView scrollView, ReactScrollViewCommandHelper.ScrollToCommandData data) { if (data.mAnimated) { scrollView.smoothScrollTo(data.mDestX, data.mDestY); } else { scrollView.scrollTo(data.mDestX, data.mDestY); } } /** * When set, fills the rest of the scrollview with a color to avoid setting a background and * creating unnecessary overdraw. * @param view * @param color */ @ReactProp(name = "endFillColor", defaultInt = Color.TRANSPARENT, customType = "Color") public void setBottomFillColor(ReactHorizontalScrollView view, int color) { view.setEndFillColor(color); } }
Helena-High/school-app
node_modules/react-native/ReactAndroid/src/main/java/com/facebook/react/views/scroll/ReactHorizontalScrollViewManager.java
Java
apache-2.0
4,366
# encoding: utf-8 module Mongoid module Validatable # Validates whether or not a field is unique against the documents in the # database. # # @example Define the uniqueness validator. # # class Person # include Mongoid::Document # field :title # # validates_uniqueness_of :title # end class UniquenessValidator < ActiveModel::EachValidator include Queryable attr_reader :klass # Unfortunately, we have to tie Uniqueness validators to a class. # # @example Setup the validator. # UniquenessValidator.new.setup(Person) # # @param [ Class ] klass The class getting validated. # # @since 1.0.0 def setup(klass) @klass = klass end # Validate the document for uniqueness violations. # # @example Validate the document. # validate_each(person, :title, "Sir") # # @param [ Document ] document The document to validate. # @param [ Symbol ] attribute The field to validate on. # @param [ Object ] value The value of the field. # # @return [ Errors ] The errors. # # @since 1.0.0 def validate_each(document, attribute, value) with_query(document) do attrib, val = to_validate(document, attribute, value) return unless validation_required?(document, attrib) if document.embedded? validate_embedded(document, attrib, val) else validate_root(document, attrib, val) end end end private # Add the error to the document. # # @api private # # @example Add the error. # validator.add_error(doc, :name, "test") # # @param [ Document ] document The document to validate. # @param [ Symbol ] attribute The name of the attribute. # @param [ Object ] value The value of the object. # # @since 2.4.10 def add_error(document, attribute, value) document.errors.add( attribute, :taken, options.except(:case_sensitive, :scope).merge(value: value) ) end # Should the uniqueness validation be case sensitive? # # @api private # # @example Is the validation case sensitive? # validator.case_sensitive? # # @return [ true, false ] If the validation is case sensitive. # # @since 2.3.0 def case_sensitive? !(options[:case_sensitive] == false) end # Create the validation criteria. # # @api private # # @example Create the criteria. # validator.create_criteria(User, user, :name, "syd") # # @param [ Class, Proxy ] base The base to execute the criteria from. # @param [ Document ] document The document to validate. # @param [ Symbol ] attribute The name of the attribute. # @param [ Object ] value The value of the object. # # @return [ Criteria ] The criteria. # # @since 2.4.10 def create_criteria(base, document, attribute, value) criteria = scope(base.unscoped, document, attribute) criteria.selector.update(criterion(document, attribute, value.mongoize)) criteria end # Get the default criteria for checking uniqueness. # # @api private # # @example Get the criteria. # validator.criterion(person, :title, "Sir") # # @param [ Document ] document The document to validate. # @param [ Symbol ] attribute The name of the attribute. # @param [ Object ] value The value of the object. # # @return [ Criteria ] The uniqueness criteria. # # @since 2.3.0 def criterion(document, attribute, value) field = document.database_field_name(attribute) if localized?(document, field) conditions = value.inject([]) { |acc, (k,v)| acc << { "#{field}.#{k}" => filter(v) } } selector = { "$or" => conditions } else selector = { field => filter(value) } end if document.persisted? && !document.embedded? selector.merge!(_id: { "$ne" => document.id }) end selector end # Filter the value based on whether the check is case sensitive or not. # # @api private # # @example Filter the value. # validator.filter("testing") # # @param [ Object ] value The value to filter. # # @return [ Object, Regexp ] The value, filtered or not. # # @since 2.3.0 def filter(value) !case_sensitive? && value ? /\A#{Regexp.escape(value.to_s)}$/i : value end # Scope the criteria to the scope options provided. # # @api private # # @example Scope the criteria. # validator.scope(criteria, document) # # @param [ Criteria ] criteria The criteria to scope. # @param [ Document ] document The document being validated. # # @return [ Criteria ] The scoped criteria. # # @since 2.3.0 def scope(criteria, document, attribute) Array.wrap(options[:scope]).each do |item| name = document.database_field_name(item) criteria = criteria.where(item => document.attributes[name]) end criteria = criteria.with(document.persistence_options) criteria end # Should validation be skipped? # # @api private # # @example Should the validation be skipped? # validator.skip_validation?(doc) # # @param [ Document ] document The embedded document. # # @return [ true, false ] If the validation should be skipped. # # @since 2.3.0 def skip_validation?(document) !document._parent || document.embedded_one? end # Scope reference has changed? # # @api private # # @example Has scope reference changed? # validator.scope_value_changed?(doc) # # @param [ Document ] document The embedded document. # # @return [ true, false ] If the scope reference has changed. # # @since def scope_value_changed?(document) Array.wrap(options[:scope]).any? do |item| document.send("attribute_changed?", item.to_s) end end # Get the name of the field and the value to validate. This is for the # case when we validate a relation via the relation name and not the key, # we need to send the key name and value to the db, not the relation # object. # # @api private # # @example Get the name and key to validate. # validator.to_validate(doc, :parent, Parent.new) # # @param [ Document ] document The doc getting validated. # @param [ Symbol ] attribute The attribute getting validated. # @param [ Object ] value The value of the attribute. # # @return [ Array<Object, Object> ] The field and value. # # @since 2.4.4 def to_validate(document, attribute, value) metadata = document.relations[attribute.to_s] if metadata && metadata.stores_foreign_key? [ metadata.foreign_key, value.id ] else [ attribute, value ] end end # Validate an embedded document. # # @api private # # @example Validate the embedded document. # validator.validate_embedded(doc, :name, "test") # # @param [ Document ] document The document. # @param [ Symbol ] attribute The attribute name. # @param [ Object ] value The value. # # @since 2.4.10 def validate_embedded(document, attribute, value) return if skip_validation?(document) relation = document._parent.send(document.metadata_name) criteria = create_criteria(relation, document, attribute, value) add_error(document, attribute, value) if criteria.count > 1 end # Validate a root document. # # @api private # # @example Validate the root document. # validator.validate_root(doc, :name, "test") # # @param [ Document ] document The document. # @param [ Symbol ] attribute The attribute name. # @param [ Object ] value The value. # # @since 2.4.10 def validate_root(document, attribute, value) criteria = create_criteria(klass || document.class, document, attribute, value) if criteria.with(persistence_options(criteria)).exists? add_error(document, attribute, value) end end # Are we required to validate the document? # # @example Is validation needed? # validator.validation_required?(doc, :field) # # @param [ Document ] document The document getting validated. # @param [ Symbol ] attribute The attribute to validate. # # @return [ true, false ] If we need to validate. # # @since 2.4.4 def validation_required?(document, attribute) document.new_record? || document.send("attribute_changed?", attribute.to_s) || scope_value_changed?(document) end # Get the persistence options to perform to check, merging with any # existing. # # @api private # # @example Get the persistence options. # validator.persistence_options(criteria) # # @param [ Criteria ] criteria The criteria. # # @return [ Hash ] The persistence options. # # @since 3.0.23 def persistence_options(criteria) (criteria.persistence_options || {}).merge!(read: :primary) end # Is the attribute localized? # # @api private # # @example Is the attribute localized? # validator.localized?(doc, :field) # # @param [ Document ] document The document getting validated. # @param [ Symbol ] attribute The attribute to validate. # # @return [ true, false ] If the attribute is localized. # # @since 4.0.0 def localized?(document, attribute) document.fields[document.database_field_name(attribute)].try(:localized?) end end end end
charliehq/mongoid
lib/mongoid/validatable/uniqueness.rb
Ruby
mit
10,323
/* * Copyright (C) 2015 Actor LLC. <https://actor.im> */ package im.actor.runtime.js.crypto; import com.google.gwt.core.client.JavaScriptObject; public class RsaKey extends JavaScriptObject { protected RsaKey() { } public final native String getPrivateKey()/*-{ return this.privateKey; }-*/; public final native String getPublicKey()/*-{ return this.publicKey; }-*/; }
luoxiaoshenghustedu/actor-platform
actor-apps/runtime-js/src/main/java/im/actor/runtime/js/crypto/RsaKey.java
Java
mit
393
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // /*============================================================================= ** ** ** ** Purpose: This class is a Delegate which defines the start method ** for starting a thread. That method must match this delegate. ** ** =============================================================================*/ namespace System.Threading { using System.Security.Permissions; using System.Threading; // Define the delegate // NOTE: If you change the signature here, there is code in COMSynchronization // that invokes this delegate in native. [System.Runtime.InteropServices.ComVisible(true)] public delegate void ThreadStart(); }
swgillespie/coreclr
src/mscorlib/src/System/Threading/ThreadStart.cs
C#
mit
862
<?php /** * @file * Contains \Drupal\migrate_drupal\Tests\Table\d7\FieldDataFieldInteger. * * THIS IS A GENERATED FILE. DO NOT EDIT. * * @see core/scripts/migrate-db.sh * @see https://www.drupal.org/sandbox/benjy/2405029 */ namespace Drupal\migrate_drupal\Tests\Table\d7; use Drupal\migrate_drupal\Tests\Dump\DrupalDumpBase; /** * Generated file to represent the field_data_field_integer table. */ class FieldDataFieldInteger extends DrupalDumpBase { public function load() { $this->createTable("field_data_field_integer", array( 'primary key' => array( 'entity_type', 'deleted', 'entity_id', 'language', 'delta', ), 'fields' => array( 'entity_type' => array( 'type' => 'varchar', 'not null' => TRUE, 'length' => '128', 'default' => '', ), 'bundle' => array( 'type' => 'varchar', 'not null' => TRUE, 'length' => '128', 'default' => '', ), 'deleted' => array( 'type' => 'int', 'not null' => TRUE, 'length' => '11', 'default' => '0', ), 'entity_id' => array( 'type' => 'int', 'not null' => TRUE, 'length' => '10', 'unsigned' => TRUE, ), 'revision_id' => array( 'type' => 'int', 'not null' => FALSE, 'length' => '10', 'unsigned' => TRUE, ), 'language' => array( 'type' => 'varchar', 'not null' => TRUE, 'length' => '32', 'default' => '', ), 'delta' => array( 'type' => 'int', 'not null' => TRUE, 'length' => '10', 'unsigned' => TRUE, ), 'field_integer_value' => array( 'type' => 'int', 'not null' => FALSE, 'length' => '11', ), ), 'mysql_character_set' => 'utf8', )); $this->database->insert("field_data_field_integer")->fields(array( 'entity_type', 'bundle', 'deleted', 'entity_id', 'revision_id', 'language', 'delta', 'field_integer_value', )) ->values(array( 'entity_type' => 'node', 'bundle' => 'test_content_type', 'deleted' => '0', 'entity_id' => '1', 'revision_id' => '1', 'language' => 'und', 'delta' => '0', 'field_integer_value' => '5', ))->execute(); } } #216fa7f54876d42ec80a9e6b2aa53991
komejo/d8demo-dev
web/core/modules/migrate_drupal/src/Tests/Table/d7/FieldDataFieldInteger.php
PHP
mit
2,545
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.Build.Logging.StructuredLogger { /// <summary> /// Class representation of a logged item group entry. /// </summary> internal class ItemGroup : TaskParameter { /// <summary> /// Initializes a new instance of the <see cref="ItemGroup"/> class. /// </summary> /// <param name="message">The message from the logger.</param> /// <param name="prefix">The prefix string (e.g. 'Added item(s): ').</param> /// <param name="itemAttributeName">Name of the item attribute ('Include' or 'Remove').</param> public ItemGroup(string message, string prefix, string itemAttributeName) : base(message, prefix, false, itemAttributeName) { } } }
cdmihai/msbuild
src/Samples/XmlFileLogger/ObjectModel/ItemGroup.cs
C#
mit
909
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System; using System.Collections.Generic; using Microsoft.Azure.Management.ResourceManager.Models; using Microsoft.Azure.Commands.ResourceManager.Cmdlets.SdkExtensions; namespace Microsoft.Azure.Commands.ResourceManager.Cmdlets.SdkModels { public class PSResourceGroupDeployment { public string DeploymentName { get; set; } public string CorrelationId { get; set; } public string ResourceGroupName { get; set; } public string ProvisioningState { get; set; } public DateTime Timestamp { get; set; } public DeploymentMode Mode { get; set; } public TemplateLink TemplateLink { get; set; } public string TemplateLinkString { get; set; } public string DeploymentDebugLogLevel { get; set; } public Dictionary<string, DeploymentVariable> Parameters { get; set; } public string ParametersString { get { return ResourcesExtensions.ConstructDeploymentVariableTable(Parameters); } } public Dictionary<string, DeploymentVariable> Outputs { get; set; } public string OutputsString { get { return ResourcesExtensions.ConstructDeploymentVariableTable(Outputs); } } } }
AzureAutomationTeam/azure-powershell
src/StackAdmin/Resources/Commands.ResourceManager/Cmdlets/SdkModels/Deployments/PSResourceGroupDeployment.cs
C#
apache-2.0
2,004
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Usage instructions: Create a single array variable named 'activity'. This // represents explanatory text and one or more questions to present to the // student. Each element in the array should itself be either // // -- a string containing a set of complete HTML elements. That is, if the // string contains an open HTML tag (such as <form>), it must also have the // corresponding close tag (such as </form>). You put the actual question // text in a string. // // -- a JavaScript object representing the answer information for a question. // That is, the object contains properties such as the type of question, a // regular expression indicating the correct answer, a string to show in // case of either correct or incorrect answers or to show when the student // asks for help. For more information on how to specify the object, please // see http://code.google.com/p/course-builder/wiki/CreateActivities. var activity = [ '<table border="1"><tr><td><b>Search Tips:</b><p><ul><li>In the last video you learned how to select effective keywords. Remember to think about the words you think will be in your desired results page.<p> <li>Determine the most important words in your search as well as potential synonyms.</ul><p> </tr></td></table>', 'You received this letter from a friend. <p><font style="font-style:italic;">Hi, I am a chef and a food blogger. Recently, I wanted to write about this really yummy French sandwich with tuna and peppers and anchovies and stuff called a Pom Mignon, or something like that. For the life of me, I don’t know precisely what it is called. I spent half an hour last night typing every possible spelling I could think of into Google, but could not find it. What do I do now? <p>Thank you,<br>L.</font><p>Given what you know about this problem, what query would you use to solve it?<p>', { questionType: 'freetext', showAnswerPrompt: 'Compare with Expert', showAnswerOutput: 'Our expert says: Different people have different styles for searching for information. Here is how I identified the sandwich--though it is not the only way to arrive at an answer.\n\nI searched for [french sandwich tuna peppers anchovies]. \n\nRemember how Dan talked about thinking about what you want to find? What words will be on the kind of page you want to appear? \n\nSo, ask yourself what kind of page is likely to:\n\n1. Give the name of this sandwich?\n2. Be a common resource on the web?\n3. Make use of the other information you have about the sandwich--since the name was obviously a dead-end?\n\nI thought of a recipe! A recipe lists all of the ingredients. In this case, the chef knew several of the ingredients, but did not connect the fact that she knew them to the idea that she could use them in a basic web search.\n\nScroll down to continue. ', outputHeight: '300px' }, '<br><br>Can you find the name of the sandwich in the results below?<br>', '<br><img src="assets/img/Image10.1.png"<p>', '<br>What\'s the name of the sandwich?<br>', { questionType: 'freetext', showAnswerPrompt: 'Check Answer', showAnswerOutput: 'Pan Bagnat!'}, ];
esacosta/u-mooc
edu-courses/assets/js/activity-1.4.js
JavaScript
apache-2.0
3,756
//// [noImplicitReturnsWithoutReturnExpression.ts] function isMissingReturnExpression(): number { return; } function isMissingReturnExpression2(): any { return; } function isMissingReturnExpression3(): number|void { return; } function isMissingReturnExpression4(): void { return; } function isMissingReturnExpression5(x) { if (x) { return 0; } else { return; } } //// [noImplicitReturnsWithoutReturnExpression.js] function isMissingReturnExpression() { return; } function isMissingReturnExpression2() { return; } function isMissingReturnExpression3() { return; } function isMissingReturnExpression4() { return; } function isMissingReturnExpression5(x) { if (x) { return 0; } else { return; } }
plantain-00/TypeScript
tests/baselines/reference/noImplicitReturnsWithoutReturnExpression.js
JavaScript
apache-2.0
820
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "sync/util/cryptographer.h" #include <algorithm> #include "base/base64.h" #include "base/basictypes.h" #include "base/logging.h" #include "sync/protocol/nigori_specifics.pb.h" #include "sync/util/encryptor.h" namespace syncer { const char kNigoriTag[] = "google_chrome_nigori"; // We name a particular Nigori instance (ie. a triplet consisting of a hostname, // a username, and a password) by calling Permute on this string. Since the // output of Permute is always the same for a given triplet, clients will always // assign the same name to a particular triplet. const char kNigoriKeyName[] = "nigori-key"; Cryptographer::Cryptographer(Encryptor* encryptor) : encryptor_(encryptor) { DCHECK(encryptor); } Cryptographer::~Cryptographer() {} void Cryptographer::Bootstrap(const std::string& restored_bootstrap_token) { if (is_initialized()) { NOTREACHED(); return; } std::string serialized_nigori_key = UnpackBootstrapToken(restored_bootstrap_token); if (serialized_nigori_key.empty()) return; ImportNigoriKey(serialized_nigori_key); } bool Cryptographer::CanDecrypt(const sync_pb::EncryptedData& data) const { return nigoris_.end() != nigoris_.find(data.key_name()); } bool Cryptographer::CanDecryptUsingDefaultKey( const sync_pb::EncryptedData& data) const { return !default_nigori_name_.empty() && data.key_name() == default_nigori_name_; } bool Cryptographer::Encrypt( const ::google::protobuf::MessageLite& message, sync_pb::EncryptedData* encrypted) const { DCHECK(encrypted); if (default_nigori_name_.empty()) { LOG(ERROR) << "Cryptographer not ready, failed to encrypt."; return false; } std::string serialized; if (!message.SerializeToString(&serialized)) { LOG(ERROR) << "Message is invalid/missing a required field."; return false; } return EncryptString(serialized, encrypted); } bool Cryptographer::EncryptString( const std::string& serialized, sync_pb::EncryptedData* encrypted) const { if (CanDecryptUsingDefaultKey(*encrypted)) { const std::string& original_serialized = DecryptToString(*encrypted); if (original_serialized == serialized) { DVLOG(2) << "Re-encryption unnecessary, encrypted data already matches."; return true; } } NigoriMap::const_iterator default_nigori = nigoris_.find(default_nigori_name_); if (default_nigori == nigoris_.end()) { LOG(ERROR) << "Corrupt default key."; return false; } encrypted->set_key_name(default_nigori_name_); if (!default_nigori->second->Encrypt(serialized, encrypted->mutable_blob())) { LOG(ERROR) << "Failed to encrypt data."; return false; } return true; } bool Cryptographer::Decrypt(const sync_pb::EncryptedData& encrypted, ::google::protobuf::MessageLite* message) const { DCHECK(message); std::string plaintext = DecryptToString(encrypted); return message->ParseFromString(plaintext); } std::string Cryptographer::DecryptToString( const sync_pb::EncryptedData& encrypted) const { NigoriMap::const_iterator it = nigoris_.find(encrypted.key_name()); if (nigoris_.end() == it) { NOTREACHED() << "Cannot decrypt message"; return std::string(); // Caller should have called CanDecrypt(encrypt). } std::string plaintext; if (!it->second->Decrypt(encrypted.blob(), &plaintext)) { return std::string(); } return plaintext; } bool Cryptographer::GetKeys(sync_pb::EncryptedData* encrypted) const { DCHECK(encrypted); DCHECK(!nigoris_.empty()); // Create a bag of all the Nigori parameters we know about. sync_pb::NigoriKeyBag bag; for (NigoriMap::const_iterator it = nigoris_.begin(); it != nigoris_.end(); ++it) { const Nigori& nigori = *it->second; sync_pb::NigoriKey* key = bag.add_key(); key->set_name(it->first); nigori.ExportKeys(key->mutable_user_key(), key->mutable_encryption_key(), key->mutable_mac_key()); } // Encrypt the bag with the default Nigori. return Encrypt(bag, encrypted); } bool Cryptographer::AddKey(const KeyParams& params) { // Create the new Nigori and make it the default encryptor. scoped_ptr<Nigori> nigori(new Nigori); if (!nigori->InitByDerivation(params.hostname, params.username, params.password)) { NOTREACHED(); // Invalid username or password. return false; } return AddKeyImpl(nigori.Pass(), true); } bool Cryptographer::AddNonDefaultKey(const KeyParams& params) { DCHECK(is_initialized()); // Create the new Nigori and add it to the keybag. scoped_ptr<Nigori> nigori(new Nigori); if (!nigori->InitByDerivation(params.hostname, params.username, params.password)) { NOTREACHED(); // Invalid username or password. return false; } return AddKeyImpl(nigori.Pass(), false); } bool Cryptographer::AddKeyFromBootstrapToken( const std::string restored_bootstrap_token) { // Create the new Nigori and make it the default encryptor. std::string serialized_nigori_key = UnpackBootstrapToken( restored_bootstrap_token); return ImportNigoriKey(serialized_nigori_key); } bool Cryptographer::AddKeyImpl(scoped_ptr<Nigori> initialized_nigori, bool set_as_default) { std::string name; if (!initialized_nigori->Permute(Nigori::Password, kNigoriKeyName, &name)) { NOTREACHED(); return false; } nigoris_[name] = make_linked_ptr(initialized_nigori.release()); // Check if the key we just added can decrypt the pending keys and add them // too if so. if (pending_keys_.get() && CanDecrypt(*pending_keys_)) { sync_pb::NigoriKeyBag pending_bag; Decrypt(*pending_keys_, &pending_bag); InstallKeyBag(pending_bag); SetDefaultKey(pending_keys_->key_name()); pending_keys_.reset(); } // The just-added key takes priority over the pending keys as default. if (set_as_default) SetDefaultKey(name); return true; } void Cryptographer::InstallKeys(const sync_pb::EncryptedData& encrypted) { DCHECK(CanDecrypt(encrypted)); sync_pb::NigoriKeyBag bag; if (!Decrypt(encrypted, &bag)) return; InstallKeyBag(bag); } void Cryptographer::SetDefaultKey(const std::string& key_name) { DCHECK(nigoris_.end() != nigoris_.find(key_name)); default_nigori_name_ = key_name; } void Cryptographer::SetPendingKeys(const sync_pb::EncryptedData& encrypted) { DCHECK(!CanDecrypt(encrypted)); DCHECK(!encrypted.blob().empty()); pending_keys_.reset(new sync_pb::EncryptedData(encrypted)); } const sync_pb::EncryptedData& Cryptographer::GetPendingKeys() const { DCHECK(has_pending_keys()); return *(pending_keys_.get()); } bool Cryptographer::DecryptPendingKeys(const KeyParams& params) { Nigori nigori; if (!nigori.InitByDerivation(params.hostname, params.username, params.password)) { NOTREACHED(); return false; } std::string plaintext; if (!nigori.Decrypt(pending_keys_->blob(), &plaintext)) return false; sync_pb::NigoriKeyBag bag; if (!bag.ParseFromString(plaintext)) { NOTREACHED(); return false; } InstallKeyBag(bag); const std::string& new_default_key_name = pending_keys_->key_name(); SetDefaultKey(new_default_key_name); pending_keys_.reset(); return true; } bool Cryptographer::GetBootstrapToken(std::string* token) const { DCHECK(token); std::string unencrypted_token = GetDefaultNigoriKey(); if (unencrypted_token.empty()) return false; std::string encrypted_token; if (!encryptor_->EncryptString(unencrypted_token, &encrypted_token)) { NOTREACHED(); return false; } if (!base::Base64Encode(encrypted_token, token)) { NOTREACHED(); return false; } return true; } std::string Cryptographer::UnpackBootstrapToken( const std::string& token) const { if (token.empty()) return std::string(); std::string encrypted_data; if (!base::Base64Decode(token, &encrypted_data)) { DLOG(WARNING) << "Could not decode token."; return std::string(); } std::string unencrypted_token; if (!encryptor_->DecryptString(encrypted_data, &unencrypted_token)) { DLOG(WARNING) << "Decryption of bootstrap token failed."; return std::string(); } return unencrypted_token; } void Cryptographer::InstallKeyBag(const sync_pb::NigoriKeyBag& bag) { int key_size = bag.key_size(); for (int i = 0; i < key_size; ++i) { const sync_pb::NigoriKey key = bag.key(i); // Only use this key if we don't already know about it. if (nigoris_.end() == nigoris_.find(key.name())) { scoped_ptr<Nigori> new_nigori(new Nigori); if (!new_nigori->InitByImport(key.user_key(), key.encryption_key(), key.mac_key())) { NOTREACHED(); continue; } nigoris_[key.name()] = make_linked_ptr(new_nigori.release()); } } } bool Cryptographer::KeybagIsStale( const sync_pb::EncryptedData& encrypted_bag) const { if (!is_ready()) return false; if (encrypted_bag.blob().empty()) return true; if (!CanDecrypt(encrypted_bag)) return false; if (!CanDecryptUsingDefaultKey(encrypted_bag)) return true; sync_pb::NigoriKeyBag bag; if (!Decrypt(encrypted_bag, &bag)) { LOG(ERROR) << "Failed to decrypt keybag for stale check. " << "Assuming keybag is corrupted."; return true; } if (static_cast<size_t>(bag.key_size()) < nigoris_.size()) return true; return false; } std::string Cryptographer::GetDefaultNigoriKey() const { if (!is_initialized()) return std::string(); NigoriMap::const_iterator iter = nigoris_.find(default_nigori_name_); if (iter == nigoris_.end()) return std::string(); sync_pb::NigoriKey key; if (!iter->second->ExportKeys(key.mutable_user_key(), key.mutable_encryption_key(), key.mutable_mac_key())) return std::string(); return key.SerializeAsString(); } bool Cryptographer::ImportNigoriKey(const std::string serialized_nigori_key) { if (serialized_nigori_key.empty()) return false; sync_pb::NigoriKey key; if (!key.ParseFromString(serialized_nigori_key)) return false; scoped_ptr<Nigori> nigori(new Nigori); if (!nigori->InitByImport(key.user_key(), key.encryption_key(), key.mac_key())) { NOTREACHED(); return false; } if (!AddKeyImpl(nigori.Pass(), true)) return false; return true; } } // namespace syncer
KitKatXperience/platform_external_chromium_org
sync/util/cryptographer.cc
C++
bsd-3-clause
10,916
<?php /* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Sylius\Bundle\ResourceBundle\Tests; use Matthias\SymfonyConfigTest\PhpUnit\ConfigurationTestCaseTrait; use Sylius\Bundle\ResourceBundle\DependencyInjection\Configuration; /** * @author Anna Walasek <anna.walasek@lakion.com> * @author Kamil Kokot <kamil.kokot@lakion.com> */ class ConfigurationTest extends \PHPUnit_Framework_TestCase { use ConfigurationTestCaseTrait; /** * @test */ public function it_does_not_break_if_not_customized() { $this->assertConfigurationIsValid( [ [] ] ); } /** * @test */ public function it_has_default_authorization_checker() { $this->assertProcessedConfigurationEquals( [ [] ], ['authorization_checker' => 'sylius.resource_controller.authorization_checker.disabled'], 'authorization_checker' ); } /** * @test */ public function its_authorization_checker_can_be_customized() { $this->assertProcessedConfigurationEquals( [ ['authorization_checker' => 'custom_service'] ], ['authorization_checker' => 'custom_service'], 'authorization_checker' ); } /** * @test */ public function its_authorization_checker_cannot_be_empty() { $this->assertPartialConfigurationIsInvalid( [ ['authorization_checker' => ''] ], 'authorization_checker' ); } /** * {@inheritdoc} */ protected function getConfiguration() { return new Configuration(); } }
Ejobs/Sylius
src/Sylius/Bundle/ResourceBundle/test/src/Tests/Configuration/ConfigurationTest.php
PHP
mit
1,925
/** * @author TristanVALCKE / https://github.com/Itee */ /* global QUnit */ import { PointLightHelper } from '../../../../src/helpers/PointLightHelper'; export default QUnit.module( 'Helpers', () => { QUnit.module( 'PointLightHelper', () => { // INHERITANCE QUnit.todo( "Extending", ( assert ) => { assert.ok( false, "everything's gonna be alright" ); } ); // INSTANCING QUnit.todo( "Instancing", ( assert ) => { assert.ok( false, "everything's gonna be alright" ); } ); // PUBLIC STUFF QUnit.todo( "dispose", ( assert ) => { assert.ok( false, "everything's gonna be alright" ); } ); QUnit.todo( "update", ( assert ) => { assert.ok( false, "everything's gonna be alright" ); } ); } ); } );
Aldrien-/three.js
test/unit/src/helpers/PointLightHelper.tests.js
JavaScript
mit
745
// 2001-11-25 Phil Edwards <pme@gcc.gnu.org> // // Copyright (C) 2001-2014 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 20.4.1.1 allocator members #include <cstdlib> #include <ext/mt_allocator.h> #include <replacement_memory_operators.h> int main() { typedef __gnu_cxx::__mt_alloc<unsigned int> allocator_type; __gnu_test::check_delete<allocator_type, false>(); return 0; }
xinchoubiology/gcc
libstdc++-v3/testsuite/ext/mt_allocator/check_delete.cc
C++
gpl-2.0
1,085
/** * @license * Copyright 2013 Palantir Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import * as ts from "typescript"; import * as Lint from "../index"; export declare class Rule extends Lint.Rules.AbstractRule { static metadata: Lint.IRuleMetadata; static FAILURE_STRING_FACTORY: (memberType: string, memberName: string | undefined, publicOnly: boolean) => string; apply(sourceFile: ts.SourceFile): Lint.RuleFailure[]; } export declare class MemberAccessWalker extends Lint.RuleWalker { visitConstructorDeclaration(node: ts.ConstructorDeclaration): void; visitMethodDeclaration(node: ts.MethodDeclaration): void; visitPropertyDeclaration(node: ts.PropertyDeclaration): void; visitGetAccessor(node: ts.AccessorDeclaration): void; visitSetAccessor(node: ts.AccessorDeclaration): void; private validateVisibilityModifiers(node); }
mo-norant/FinHeartBel
website/node_modules/tslint/lib/rules/memberAccessRule.d.ts
TypeScript
gpl-3.0
1,404
'use strict'; var fs = require('fs'); var path = require('path'); var readdirp = require('readdirp'); var handlebars = require('handlebars'); var async = require('./async'); /** * Regex pattern for layout directive. {{!< layout }} */ var layoutPattern = /{{!<\s+([A-Za-z0-9\._\-\/]+)\s*}}/; /** * Constructor */ var ExpressHbs = function() { this.handlebars = handlebars.create(); this.SafeString = this.handlebars.SafeString; this.Utils = this.handlebars.Utils; this.beautify = null; this.beautifyrc = null; }; /** * Defines a block into which content is inserted via `content`. * * @example * In layout.hbs * * {{{block "pageStylesheets"}}} */ ExpressHbs.prototype.block = function(name) { var val = (this.blocks[name] || []).join('\n'); // free mem this.blocks[name] = null; return val; }; /** * Defines content for a named block declared in layout. * * @example * * {{#contentFor "pageStylesheets"}} * <link rel="stylesheet" href='{{{URL "css/style.css"}}}' /> * {{/contentFor}} */ ExpressHbs.prototype.content = function(name, options, context) { var block = this.blocks[name] || (this.blocks[name] = []); block.push(options.fn(context)); }; /** * Returns the layout filepath given the template filename and layout used. * Backward compatible with specifying layouts in locals like 'layouts/foo', * but if you have specified a layoutsDir you can specify layouts in locals with just the layout name. * * @param {String} filename Path to template file. * @param {String} layout Layout path. */ ExpressHbs.prototype.layoutPath = function(filename, layout) { var layoutPath; if (layout[0] === '.') { layoutPath = path.resolve(path.dirname(filename), layout); } else if (this.layoutsDir) { layoutPath = path.resolve(this.layoutsDir, layout); } else { layoutPath = path.resolve(this.viewsDir, layout); } return layoutPath; } /** * Find the path of the declared layout in `str`, if any * * @param {String} str The template string to parse * @param {String} filename Path to template * @returns {String|undefined} Returns the path to layout. */ ExpressHbs.prototype.declaredLayoutFile = function(str, filename) { var matches = str.match(layoutPattern); if (matches) { var layout = matches[1]; // behave like `require`, if '.' then relative, else look in // usual location (layoutsDir) if (this.layoutsDir && layout[0] !== '.') { layout = path.resolve(this.layoutsDir, layout); } return path.resolve(path.dirname(filename), layout); } }; /** * Compiles a layout file. * * The function checks whether the layout file declares a parent layout. * If it does, the parent layout is loaded recursively and checked as well * for a parent layout, and so on, until the top layout is reached. * All layouts are then returned as a stack to the caller via the callback. * * @param {String} layoutFile The path to the layout file to compile * @param {Boolean} useCache Cache the compiled layout? * @param {Function} cb Callback called with layouts stack */ ExpressHbs.prototype.cacheLayout = function(layoutFile, useCache, cb) { var self = this; // assume hbs extension if (path.extname(layoutFile) === '') layoutFile += this._options.extname; // path is relative in directive, make it absolute var layoutTemplates = this.cache[layoutFile]; if (layoutTemplates) return cb(null, layoutTemplates); fs.readFile(layoutFile, 'utf8', function(err, str) { if (err) return cb(err); // File path of eventual declared parent layout var parentLayoutFile = self.declaredLayoutFile(str, layoutFile); // This function returns the current layout stack to the caller var _returnLayouts = function(layouts) { var currentLayout; layouts = layouts.slice(0); currentLayout = self.compile(str, layoutFile); layouts.push(currentLayout); if (useCache) { self.cache[layoutFile] = layouts.slice(0); } cb(null, layouts); }; if (parentLayoutFile) { // Recursively compile/cache parent layouts self.cacheLayout(parentLayoutFile, useCache, function(err, parentLayouts) { if (err) return cb(err); _returnLayouts(parentLayouts); }); } else { // No parent layout: return current layout with an empty stack _returnLayouts([]); } }); }; /** * Cache partial templates found under directories configure in partialsDir. */ ExpressHbs.prototype.cachePartials = function(cb) { var self = this; if (!(this.partialsDir instanceof Array)) { this.partialsDir = [this.partialsDir]; } // Use to iterate all folder in series var count = 0; function readNext() { readdirp({ root: self.partialsDir[count], fileFilter: '*.*' }) .on('warn', function(err) { console.warn('Non-fatal error in express-hbs cachePartials.', err); }) .on('error', function(err) { console.error('Fatal error in express-hbs cachePartials', err); return cb(err); }) .on('data', function(entry) { if (!entry) return; var source = fs.readFileSync(entry.fullPath, 'utf8'); var dirname = path.dirname(entry.path); dirname = dirname === '.' ? '' : dirname + '/'; var name = dirname + path.basename(entry.name, path.extname(entry.name)); self.registerPartial(name, source); }) .on('end', function() { count += 1; // If all directories aren't read, read the next directory if (count < self.partialsDir.length) { readNext() } else { self.isPartialCachingComplete = true; cb && cb(null, true); } }); } readNext(); }; /** * Express 3.x template engine compliance. * * @param {Object} options = { * handlebars: "override handlebars", * defaultLayout: "path to default layout", * partialsDir: "absolute path to partials (one path or an array of paths)", * layoutsDir: "absolute path to the layouts", * extname: "extension to use", * contentHelperName: "contentFor", * blockHelperName: "block", * beautify: "{Boolean} whether to pretty print HTML" * } * */ ExpressHbs.prototype.express3 = function(options) { var self = this; // Set defaults if (!options) options = {}; if (!options.extname) options.extname = '.hbs'; if (!options.contentHelperName) options.contentHelperName = 'contentFor'; if (!options.blockHelperName) options.blockHelperName = 'block'; if (!options.templateOptions) options.templateOptions = {}; if (options.handlebars) this.handlebars = options.handlebars; this._options = options; if (this._options.handlebars) this.handlebars = this._options.handlebars; if (options.i18n) { var i18n = options.i18n; this.handlebars.registerHelper('__', function() { return i18n.__.apply(this, arguments); }); this.handlebars.registerHelper('__n', function() { return i18n.__n.apply(this, arguments); }); } this.handlebars.registerHelper(this._options.blockHelperName, function(name, options) { var val = self.block(name); if (val == '' && (typeof options.fn === 'function')) { val = options.fn(this); } // blocks may have async helpers if (val.indexOf('__aSyNcId_') >= 0) { if (self.asyncValues) { Object.keys(self.asyncValues).forEach(function (id) { val = val.replace(id, self.asyncValues[id]); val = val.replace(self.Utils.escapeExpression(id), self.Utils.escapeExpression(self.asyncValues[id])); }); } } return val; }); // Pass 'this' as context of helper function to don't lose context call of helpers. this.handlebars.registerHelper(this._options.contentHelperName, function(name, options) { return self.content(name, options, this); }); // Absolute path to partials directory. this.partialsDir = this._options.partialsDir; // Absolute path to the layouts directory this.layoutsDir = this._options.layoutsDir; // express passes this through _express3 func, gulp pass in an option this.viewsDir = null this.viewsDirOpt = this._options.viewsDir; // Cache for templates, express 3.x doesn't do this for us this.cache = {}; // Blocks for layouts. Is this safe? What happens if the same block is used on multiple connections? // Isn't there a chance block and content are not in sync. The template and layout are processed asynchronously. this.blocks = {}; // Holds the default compiled layout if specified in options configuration. this.defaultLayoutTemplates = null; // Keep track of if partials have been cached already or not. this.isPartialCachingComplete = false; return _express3.bind(this); }; /** * Tries to load the default layout. * * @param {Boolean} useCache Whether to cache. */ ExpressHbs.prototype.loadDefaultLayout = function(useCache, cb) { var self = this; if (!this._options.defaultLayout) return cb(); if (useCache && this.defaultLayoutTemplates) return cb(null, this.defaultLayoutTemplates); this.cacheLayout(this._options.defaultLayout, useCache, function(err, templates) { if (err) return cb(err); self.defaultLayoutTemplates = templates.slice(0); return cb(null, templates); }); }; /** * express 3.x template engine compliance * * @param {String} filename Full path to template. * @param {Object} options Is the context or locals for templates. { * {Object} settings - subset of Express settings, `settings.views` is * the views directory * } * @param {Function} cb The callback expecting the rendered template as a string. * * @example * * Example options from express * * { * settings: { * 'x-powered-by': true, * env: 'production', * views: '/home/coder/barc/code/express-hbs/example/views', * 'jsonp callback name': 'callback', * 'view cache': true, * 'view engine': 'hbs' * }, * cache: true, * * // the rest are app-defined locals * title: 'My favorite veggies', * layout: 'layout/veggie' * } */ function _express3(filename, source, options, cb) { // console.log('filename', filename); // console.log('options', options); // support running as a gulp/grunt filter outside of express if (arguments.length === 3) { cb = options; options = source; source = null; } this.viewsDir = options.settings.views || this.viewsDirOpt; var self = this; /** * Allow a layout to be declared as a handlebars comment to remain spec * compatible with handlebars. * * Valid directives * * {{!< foo}} # foo.hbs in same directory as template * {{!< ../layouts/default}} # default.hbs in parent layout directory * {{!< ../layouts/default.html}} # default.html in parent layout directory */ function parseLayout(str, filename, cb) { var layoutFile = self.declaredLayoutFile(str, filename); if (layoutFile) { self.cacheLayout(layoutFile, options.cache, cb); } else { cb(null, null); } } /** * Renders `template` with given `locals` and calls `cb` with the * resulting HTML string. * * @param template * @param locals * @param cb */ function renderTemplate(template, locals, cb) { var res; try { res = template(locals, self._options.templateOptions); } catch (err) { if (err.message) { err.message = '[' + template.__filename + '] ' + err.message; } else if (typeof err === 'string') { err = '[' + template.__filename + '] ' + err; } return cb(err, null); } // Wait for async helpers async.done(function (values) { // Save for layout. Block helpers are called within layout, not in the // current template. self.asyncValues = values; Object.keys(values).forEach(function (id) { res = res.replace(id, values[id]); res = res.replace(self.Utils.escapeExpression(id), self.Utils.escapeExpression(values[id])); }); cb(null, res); }); } /** * Renders `template` with an optional set of nested `layoutTemplates` using * data in `locals`. */ function render(template, locals, layoutTemplates, cb) { if (layoutTemplates == undefined) layoutTemplates = []; // We'll render templates from bottom to top of the stack, each template // being passed the rendered string of the previous ones as `body` var i = layoutTemplates.length - 1; var _stackRenderer = function(err, htmlStr) { if (err) return cb(err); if (i >= 0) { locals.body = htmlStr; renderTemplate(layoutTemplates[i--], locals, _stackRenderer); } else { cb(null, htmlStr); } }; // Start the rendering with the innermost page template renderTemplate(template, locals, _stackRenderer); } /** * Lazy loads js-beautify, which shouldn't be used in production env. */ function loadBeautify() { if (!self.beautify) { self.beautify = require('js-beautify').html; var rc = path.join(process.cwd(), '.jsbeautifyrc'); if (fs.existsSync(rc)) { self.beautifyrc = JSON.parse(fs.readFileSync(rc, 'utf8')); } } } /** * Compiles a file into a template and a layoutTemplate, then renders it above. */ function compileFile(locals, cb) { var source, info, template; if (options.cache) { info = self.cache[filename]; if (info) { source = info.source; template = info.template; } } if (!info) { source = fs.readFileSync(filename, 'utf8'); template = self.compile(source, filename); if (options.cache) { self.cache[filename] = { source: source, template: template }; } } // Try to get the layout parseLayout(source, filename, function (err, layoutTemplates) { if (err) return cb(err); function renderIt(layoutTemplates) { if (self._options.beautify) { return render(template, locals, layoutTemplates, function(err, html) { if (err) return cb(err); loadBeautify(); return cb(null, self.beautify(html, self.beautifyrc)); }); } else { return render(template, locals, layoutTemplates, cb); } } // Determine which layout to use // If options.layout is falsy, behave as if no layout should be used - suppress defaults if ((typeof (options.layout) !== 'undefined') && !options.layout) { renderIt(null); } else { // 1. Layout specified in template if (layoutTemplates) { renderIt(layoutTemplates); } // 2. Layout specified by options from render else if ((typeof (options.layout) !== 'undefined') && options.layout) { var layoutFile = self.layoutPath(filename, options.layout); self.cacheLayout(layoutFile, options.cache, function (err, layoutTemplates) { if (err) return cb(err); renderIt(layoutTemplates); }); } // 3. Default layout specified when middleware was configured. else if (self.defaultLayoutTemplates) { renderIt(self.defaultLayoutTemplates); } // render without a template else renderIt(null); } }); } // kick it off by loading default template (if any) this.loadDefaultLayout(options.cache, function(err) { if (err) return cb(err); // Force reloading of all partials if caching is not used. Inefficient but there // is no loading partial event. if (self.partialsDir && (!options.cache || !self.isPartialCachingComplete)) { return self.cachePartials(function(err) { if (err) return cb(err); return compileFile(options, cb); }); } return compileFile(options, cb); }); } /** * Expose useful methods. */ ExpressHbs.prototype.registerHelper = function(name, fn) { this.handlebars.registerHelper(name, fn); }; /** * Registers a partial. * * @param {String} name The name of the partial as used in a template. * @param {String} source String source of the partial. */ ExpressHbs.prototype.registerPartial = function(name, source) { this.handlebars.registerPartial(name, this.compile(source)); }; /** * Compiles a string. * * @param {String} source The source to compile. * @param {String} filename The path used to embed into __filename for errors. */ ExpressHbs.prototype.compile = function(source, filename) { // Handlebars has a bug with comment only partial causes errors. This must // be a string so the block below can add a space. if (typeof source !== 'string') { throw new Error('registerPartial must be a string for empty comment workaround'); } if (source.indexOf('}}') === source.length - 2) { source += ' '; } var compiled = this.handlebars.compile(source); if (filename) { // track for error message compiled.__filename = path.relative(this.viewsDir, filename).replace(path.sep, '/'); } return compiled; } /** * Registers an asynchronous helper. * * @param {String} name The name of the partial as used in a template. * @param {String} fn The `function(options, cb)` */ ExpressHbs.prototype.registerAsyncHelper = function(name, fn) { this.handlebars.registerHelper(name, function(context) { return async.resolve(fn.bind(this), context); }); }; ExpressHbs.prototype.updateTemplateOptions = function(templateOptions) { this._options.templateOptions = templateOptions; }; /** * Creates a new instance of ExpressHbs. */ ExpressHbs.prototype.create = function() { return new ExpressHbs(); }; module.exports = new ExpressHbs();
vietpn/ghost-nodejs
node_modules/express-hbs/lib/hbs.js
JavaScript
mit
17,800