prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>uniform.rs<|end_file_name|><|fim▁begin|>use crate::*;
#[derive(Clone, Debug, Default)]
pub struct UniformCrossover;
impl CrossoverMethod for UniformCrossover {
fn crossover(
&self,
rng: &mut dyn RngCore,
parent_a: &Chromosome,<|fim▁hole|> parent_b: &Chromosome,
) -> Chromosome {
assert_eq!(parent_a.len(), parent_b.len());
let parent_a = parent_a.iter();
let parent_b = parent_b.iter();
parent_a
.zip(parent_b)
.map(|(&a, &b)| if rng.gen_bool(0.5) { a } else { b })
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::SeedableRng;
use rand_chacha::ChaCha8Rng;
#[allow(clippy::float_cmp)] // it's safe, because we're comparing hard-coded floats only
#[test]
fn test() {
let mut rng = ChaCha8Rng::from_seed(Default::default());
let parent_a: Chromosome = (1..=100).map(|n| n as f32).collect();
let parent_b: Chromosome = (1..=100).map(|n| -n as f32).collect();
let child = UniformCrossover::default().crossover(&mut rng, &parent_a, &parent_b);
// Number of genes different between `child` and `parent_a`
let diff_a = child.iter().zip(parent_a).filter(|(c, p)| *c != p).count();
// Number of genes different between `child` and `parent_b`
let diff_b = child.iter().zip(parent_b).filter(|(c, p)| *c != p).count();
// Roughly looks like 50%, which proves that chance for picking either
// gene is 50%
assert_eq!(diff_a, 49);
assert_eq!(diff_b, 51);
}
}<|fim▁end|> | |
<|file_name|>collapse-basic.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
selector: 'ngbd-collapse-basic',
templateUrl: './collapse-basic.html'
})
export class NgbdCollapseBasic {<|fim▁hole|><|fim▁end|> | public isCollapsed = false;
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='mtg-deck-editor',
version='1.0.3',
url='https://github.com/buckket/mtg-deck-editor',
author='buckket',
author_email='buckket@cock.li',
packages=['mtgdeckeditor'],
package_dir={'mtgdeckeditor': 'mtgdeckeditor'},
package_data={
'mtgdeckeditor': ['Interface.GtkBuilder'],
},
zip_safe=True,
include_package_data=True,
platforms='any',
install_requires=[
'pyxdg',
'requests',
'requests-cache',
'html5lib',
'matplotlib',
'PyGObject',
'setuptools',
'pip'
],
entry_points={<|fim▁hole|> 'gui_scripts': [
'mtg-deck-editor = mtgdeckeditor.__main__:main',
]
},
description='A GUI deck editor for the card game Magic: The Gathering.',
long_description=open('./README.rst', 'r').read(),
keywords=['mtg', 'deck editor', 'card game'],
license='GPLv3',
classifiers=[
'Programming Language :: Python',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Intended Audience :: End Users/Desktop',
'Topic :: Games/Entertainment',
],
)<|fim▁end|> | |
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { CarbonIconProps } from "../../";
declare const Laptop24: React.ForwardRefExoticComponent<
CarbonIconProps & React.RefAttributes<SVGSVGElement>
>;<|fim▁hole|><|fim▁end|> | export default Laptop24; |
<|file_name|>LabelWidget.java<|end_file_name|><|fim▁begin|>package edu.mit.blocks.codeblockutil;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.geom.AffineTransform;
import java.awt.geom.GeneralPath;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.JToolTip;
import javax.swing.KeyStroke;
import javax.swing.border.Border;
import javax.swing.border.CompoundBorder;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import edu.mit.blocks.renderable.BlockLabel;
public class LabelWidget extends JComponent {
public static final int DROP_DOWN_MENU_WIDTH = 7;
private static final long serialVersionUID = 837647234895L;
/** Border of textfield*/
private static final Border textFieldBorder = new CompoundBorder(BorderFactory.createLoweredBevelBorder(), new EmptyBorder(1, 2, 1, 2));
/** Number formatter for this label */
private static final NumberFormatter nf = new NumberFormatter(NumberFormatter.MEDIUM_PRECISION);
/** Label that is visable iff editingText is false */
private final ShadowLabel textLabel = new ShadowLabel();
/** TextField that is visable iff editingText is true */
private final BlockLabelTextField textField = new BlockLabelTextField();
/** drop down menu icon */
private final LabelMenu menu = new LabelMenu();
;
/** The label text before user begins edit (applies only to editable labels)*/
private String labelBeforeEdit = "";
/** If this is a number, then only allow nagatvie signs and periods at certain spots */
private boolean isNumber = false;
/** Is labelText editable by the user -- default true */
private boolean isEditable = false;
/** If focus is true, then show the combo pop up menu */
private boolean isFocused = false;
/** Has ComboPopup accessable selections */
private boolean hasSiblings = false;
/** True if TEXTFIELD is being edited by user. */
private boolean editingText;
/** the background color of the tooltip */
private Color tooltipBackground = new Color(255, 255, 225);
private double zoom = 1.0;
private BlockLabel blockLabel;
/**
* BlockLabel Constructor that takes in BlockID as well.
* Unfortunately BlockID is needed, so the label can redirect mouse actions.
*
*/
public LabelWidget(String initLabelText, Color fieldColor, Color tooltipBackground) {
if (initLabelText == null) {
initLabelText = "";
}
this.setFocusTraversalKeysEnabled(false);//MOVE DEFAULT FOCUS TRAVERSAL KEYS SUCH AS TABS
this.setLayout(new BorderLayout());
this.tooltipBackground = tooltipBackground;
this.labelBeforeEdit = initLabelText;
//set up textfield colors
textField.setForeground(Color.WHITE);//white text
textField.setBackground(fieldColor);//background matching block color
textField.setCaretColor(Color.WHITE);//white caret
textField.setSelectionColor(Color.BLACK);//black highlight
textField.setSelectedTextColor(Color.WHITE);//white text when highlighted
textField.setBorder(textFieldBorder);
textField.setMargin(textFieldBorder.getBorderInsets(textField));
}
public void setBlockLabel(BlockLabel blockLabel) {
this.blockLabel = blockLabel;
}
protected void fireTextChanged(String text) {
blockLabel.textChanged(text);
}
protected void fireGenusChanged(String genus) {
blockLabel.genusChanged(genus);
}
protected void fireDimensionsChanged(Dimension value) {
blockLabel.dimensionsChanged(value);
}
protected boolean isTextValid(String text) {
return blockLabel.textValid(text);
}
public void addKeyListenerToTextField(KeyListener l) {
textField.addKeyListener(l);
}
public void addMouseListenerToLabel(MouseListener l) {
textLabel.addMouseListener(l);
}
public void addMouseMotionListenerToLabel(MouseMotionListener l) {
textLabel.addMouseMotionListener(l);
}
//////////////////////////////
//// LABEL CONFIGURATION /////
/////////////////////////////
public void showMenuIcon(boolean show) {
if (this.hasSiblings) {
isFocused = show;
// repaints the menu and items with the new zoom level
menu.popupmenu.setZoomLevel(zoom);
menu.repaint();
}
}
/**
* setEditingState sets the current editing state of the BlockLabel.
* Repaints BlockLabel to reflect the change.
*/
public void setEditingState(boolean editing) {
if (editing) {
editingText = true;
textField.setText(textLabel.getText().trim());
labelBeforeEdit = textLabel.getText();
this.removeAll();
this.add(textField);
textField.grabFocus();
} else {
//update to current textfield.text
//if text entered was not empty and if it was editing before
if (editingText) {
//make sure to remove leading and trailing spaces before testing if text is valid
//TODO if allow labels to have leading and trailing spaces, will need to modify this if statement
if (isTextValid(textField.getText().trim())) {
setText(textField.getText());
<|fim▁hole|> }
editingText = false;
}
}
/**
* editingText returns if BlockLable is being edited
* @return editingText
*/
public boolean editingText() {
return editingText;
}
/**
* setEditable state of BlockLabel
* @param isEditable specifying editable state of BlockLabel
*/
public void setEditable(boolean isEditable) {
this.isEditable = isEditable;
}
/**
* isEditable returns if BlockLable is editable
* @return isEditable
*/
public boolean isEditable() {
return isEditable;
}
public void setNumeric(boolean isNumber) {
this.isNumber = isNumber;
}
/**
* isEditable returns if BlockLable is editable
* @return isEditable
*/
public boolean isNumeric() {
return isNumber;
}
public void setSiblings(boolean hasSiblings, String[][] siblings) {
this.hasSiblings = hasSiblings;
this.menu.setSiblings(siblings);
}
public boolean hasSiblings() {
return this.hasSiblings;
}
/**
* set up fonts
* @param font
*/
public void setFont(Font font) {
super.setFont(font);
textLabel.setFont(font);
textField.setFont(font);
menu.setFont(font);
}
/**
* sets the tool tip of the label
*/
public void assignToolTipToLabel(String text) {
this.textLabel.setToolTipText(text);
}
/**
* getText
* @return String of the current BlockLabel
*/
public String getText() {
return textLabel.getText().trim();
}
/**
* setText to a NumberFormatted double
* @param value
*/
public void setText(double value) {
//check for +/- Infinity
if (Math.abs(value - Double.MAX_VALUE) < 1) {
updateLabelText("Infinity");
} else if (Math.abs(value + Double.MAX_VALUE) < 1) {
updateLabelText("-Infinity");
} else {
updateLabelText(nf.format(value));
}
}
/**
* setText to a String (trimmed to remove excess spaces)
* @param string
*/
public void setText(String string) {
if (string != null) {
updateLabelText(string.trim());
}
}
/**
* setText to a boolean
* @param bool
*/
public void setText(boolean bool) {
updateLabelText(bool ? "True" : "False");
}
/**
* updateLabelText updates labelText and sychronizes textField and textLabel to it
* @param text
*/
public void updateLabelText(String text) {
//leave some space to click on
if (text.equals("")) {
text = " ";
}
//update the text everywhere
textLabel.setText(text);
textField.setText(text);
//resize to new text
updateDimensions();
//the blockLabel needs to update the data in Block
this.fireTextChanged(text);
//show text label and additional ComboPopup if one exists
this.removeAll();
this.add(textLabel, BorderLayout.CENTER);
if (hasSiblings) {
this.add(menu, BorderLayout.EAST);
}
}
////////////////////
//// RENDERING /////
////////////////////
/**
* Updates the dimensions of the textRect, textLabel, and textField to the minimum size needed
* to contain all of the text in the current font.
*/
private void updateDimensions() {
Dimension updatedDimension = new Dimension(
textField.getPreferredSize().width,
textField.getPreferredSize().height);
if (this.hasSiblings) {
updatedDimension.width += LabelWidget.DROP_DOWN_MENU_WIDTH;
}
textField.setSize(updatedDimension);
textLabel.setSize(updatedDimension);
this.setSize(updatedDimension);
this.fireDimensionsChanged(this.getSize());
}
/**
* high lights the text of the editing text field from
* 0 to the end of textfield
*/
public void highlightText() {
this.textField.setSelectionStart(0);
}
/**
* Toggles the visual suggestion that this label may be editable depending on the specified
* suggest flag and properties of the block and label. If suggest is true, the visual suggestion will display. Otherwise, nothing
* is shown. For now, the visual suggestion is a simple white line boder.
* Other requirements for indicator to show:
* - label type must be NAME
* - label must be editable
* - block can not be a factory block
* @param suggest
*/
protected void suggestEditable(boolean suggest) {
if (isEditable) {
if (suggest) {
setBorder(BorderFactory.createLineBorder(Color.white));//show white border
} else {
setBorder(null);//hide white border
}
}
}
public void setZoomLevel(double newZoom) {
this.zoom = newZoom;
Font renderingFont;// = new Font(font.getFontName(), font.getStyle(), (int)(font.getSize()*newZoom));
AffineTransform at = new AffineTransform();
at.setToScale(newZoom, newZoom);
renderingFont = this.getFont().deriveFont(at);
this.setFont(renderingFont);
this.repaint();
this.updateDimensions();
}
public String toString() {
return "Label at " + this.getLocation() + " with text: \"" + textLabel.getText() + "\"";
}
/**
* returns true if this block should can accept a negative sign
*/
public boolean canProcessNegativeSign() {
if (this.getText() != null && this.getText().contains("-")) {
//if it already has a negative sign,
//make sure we're highlighting it
if (textField.getSelectedText() != null && textField.getSelectedText().contains("-")) {
return true;
} else {
return false;
}
} else {
//if it does not have negative sign,
//make sure our highlight covers index 0
if (textField.getCaretPosition() == 0) {
return true;
} else {
if (textField.getSelectionStart() == 0) {
return true;
}
}
}
return false;
}
/**
* BlockLabelTextField is a java JtextField that internally handles various events
* and provides the semantic to interface with the user. Unliek typical JTextFields,
* the blockLabelTextField allows clients to only enter certain keys board input.
* It also reacts to enters and escapse by delegating the KeyEvent to the parent
* RenderableBlock.
*/
private class BlockLabelTextField extends JTextField implements MouseListener, DocumentListener, FocusListener, ActionListener {
private static final long serialVersionUID = 873847239234L;
/** These Key inputs are processed by this text field */
private final char[] validNumbers = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '.'};
/** These Key inputs are processed by this text field if NOT a number block*/
private final char[] validChar = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0',
'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', 'a', 's', 'd', 'f', 'g', 'h', 'j',
'k', 'l', 'z', 'x', 'c', 'v', 'b', 'n', 'm',
'Q', 'W', 'E', 'R', 'T', 'Y', 'U', 'I', 'O', 'P', 'A', 'S', 'D', 'F',
'G', 'H', 'J', 'K', 'L', 'Z', 'X', 'C', 'V', 'B', 'N', 'M',
'\'', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '_', '+',
'-', '=', '{', '}', '|', '[', ']', '\\', ' ',
':', '"', ';', '\'', '<', '>', '?', ',', '.', '/', '`', '~'};
/** These Key inputs are processed by all this text field */
private final int[] validMasks = {KeyEvent.VK_BACK_SPACE,
KeyEvent.VK_UP, KeyEvent.VK_DOWN, KeyEvent.VK_LEFT,
KeyEvent.VK_RIGHT, KeyEvent.VK_END, KeyEvent.VK_HOME,
'-', KeyEvent.VK_DELETE, KeyEvent.VK_SHIFT, KeyEvent.VK_CONTROL,
InputEvent.SHIFT_MASK, InputEvent.SHIFT_DOWN_MASK};
/**
* Contructs new block label text field
*/
private BlockLabelTextField() {
this.addActionListener(this);
this.getDocument().addDocumentListener(this);
this.addFocusListener(this);
this.addMouseListener(this);
/*
* Sets whether focus traversal keys are enabled
* for this Component. Components for which focus
* traversal keys are disabled receive key events
* for focus traversal keys.
*/
this.setFocusTraversalKeysEnabled(false);
}
public void mousePressed(MouseEvent e) {
}
public void mouseReleased(MouseEvent e) {
}
public void mouseEntered(MouseEvent e) {
}
public void mouseClicked(MouseEvent e) {
}
public void mouseExited(MouseEvent arg0) {
//remove the white line border
//note: make call here since text fields consume mouse events
//preventing parent from responding to mouse exited events
suggestEditable(false);
}
public void actionPerformed(ActionEvent e) {
setEditingState(false);
}
public void changedUpdate(DocumentEvent e) {
//listens for change in attributes
}
public void insertUpdate(DocumentEvent e) {
updateDimensions();
}
public void removeUpdate(DocumentEvent e) {
updateDimensions();
}
public void focusGained(FocusEvent e) {
}
public void focusLost(FocusEvent e) {
setEditingState(false);
}
/**
* for all user-generated AND/OR system generated key inputs,
* either perform some action that should be triggered by
* that key or
*/
protected boolean processKeyBinding(KeyStroke ks, KeyEvent e, int condition, boolean pressed) {
if (isNumber) {
if (e.getKeyChar() == '-' && canProcessNegativeSign()) {
return super.processKeyBinding(ks, e, condition, pressed);
}
if (this.getText().contains(".") && e.getKeyChar() == '.') {
return false;
}
for (char c : validNumbers) {
if (e.getKeyChar() == c) {
return super.processKeyBinding(ks, e, condition, pressed);
}
}
} else {
for (char c : validChar) {
if (e.getKeyChar() == c) {
return super.processKeyBinding(ks, e, condition, pressed);
}
}
}
for (int i : validMasks) {
if (e.getKeyCode() == i) {
return super.processKeyBinding(ks, e, condition, pressed);
}
}
if ((e.getModifiers() & Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()) != 0) {
return super.processKeyBinding(ks, e, condition, pressed);
}
return false;
}
}
private class LabelMenu extends JPanel implements MouseListener, MouseMotionListener {
private static final long serialVersionUID = 328149080240L;
private CPopupMenu popupmenu;
private GeneralPath triangle;
private LabelMenu() {
this.setOpaque(false);
this.addMouseListener(this);
this.addMouseMotionListener(this);
this.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
this.popupmenu = new CPopupMenu();
}
/**
* @param siblings = array of siblin's genus and initial label
* { {genus, label}, {genus, label}, {genus, label} ....}
*/
private void setSiblings(String[][] siblings) {
popupmenu = new CPopupMenu();
//if connected to a block, add self and add siblings
for (int i = 0; i < siblings.length; i++) {
final String selfGenus = siblings[i][0];
CMenuItem selfItem = new CMenuItem(siblings[i][1]);
selfItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
fireGenusChanged(selfGenus);
showMenuIcon(false);
}
});
popupmenu.add(selfItem);
}
}
public boolean contains(Point p) {
return triangle != null && triangle.contains(p);
}
public boolean contains(int x, int y) {
return triangle != null && triangle.contains(x, y);
}
public void paint(Graphics g) {
super.paint(g);
if (isFocused) {
Graphics2D g2 = (Graphics2D) g;
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
triangle = new GeneralPath();
triangle.moveTo(0, this.getHeight() / 4);
triangle.lineTo(this.getWidth() - 1, this.getHeight() / 4);
triangle.lineTo(this.getWidth() / 2 - 1, this.getHeight() / 4 + LabelWidget.DROP_DOWN_MENU_WIDTH);
triangle.lineTo(0, this.getHeight() / 4);
triangle.closePath();
g2.setColor(new Color(255, 255, 255, 100));
g2.fill(triangle);
g2.setColor(Color.BLACK);
g2.draw(triangle);
}
}
public void mouseEntered(MouseEvent e) {
}
public void mouseExited(MouseEvent e) {
}
public void mousePressed(MouseEvent e) {
if (hasSiblings) {
popupmenu.show(this, 0, 0);
}
}
public void mouseReleased(MouseEvent e) {
}
public void mouseClicked(MouseEvent e) {
}
public void mouseDragged(MouseEvent e) {
}
public void mouseMoved(MouseEvent e) {
}
}
/**
* Much like a JLabel, only the text is displayed with a shadow like outline
*/
private class ShadowLabel extends JLabel implements MouseListener, MouseMotionListener {
private static final long serialVersionUID = 90123787382L;
//To get the shadow effect the text must be displayed multiple times at
//multiple locations. x represents the center, white label.
// o is color values (0,0,0,0.5f) and b is black.
// o o
// o x b o
// o b o
// o
//offsetArrays representing the translation movement needed to get from
// the center location to a specific offset location given in {{x,y},{x,y}....}
//..........................................grey points.............................................black points
private final int[][] shadowPositionArray = {{0, -1}, {1, -1}, {-1, 0}, {2, 0}, {-1, 1}, {1, 1}, {0, 2}, {1, 0}, {0, 1}};
private final float[] shadowColorArray = {0.5f, 0.5f, 0.5f, 0.5f, 0.5f, 0.5f, 0.5f, 0, 0};
private double offsetSize = 1;
private ShadowLabel() {
this.addMouseListener(this);
this.addMouseMotionListener(this);
}
public void paint(Graphics g) {
Graphics2D g2 = (Graphics2D) g;
g2.addRenderingHints(new RenderingHints(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON));
//DO NOT DRAW SUPER's to prevent drawing of label's string.
//Implecations: background not automatically drawn
//super.paint(g);
//draw shadows
for (int i = 0; i < shadowPositionArray.length; i++) {
int dx = shadowPositionArray[i][0];
int dy = shadowPositionArray[i][1];
g2.setColor(new Color(0, 0, 0, shadowColorArray[i]));
g2.drawString(this.getText(), (int) ((4 + dx) * offsetSize), this.getHeight() + (int) ((dy - 6) * offsetSize));
}
//draw main Text
g2.setColor(Color.white);
g2.drawString(this.getText(), (int) ((4) * offsetSize), this.getHeight() + (int) ((-6) * offsetSize));
}
public JToolTip createToolTip() {
return new CToolTip(tooltipBackground);
}
/**
* Set to editing state upon mouse click if this block label is editable
*/
public void mouseClicked(MouseEvent e) {
//if clicked and if the label is editable,
if ((e.getClickCount() == 1) && isEditable) {
//if clicked and if the label is editable,
//then set it to the editing state when the label is clicked on
setEditingState(true);
textField.setSelectionStart(0);
}
}
public void mousePressed(MouseEvent e) {
}
public void mouseReleased(MouseEvent e) {
}
public void mouseEntered(MouseEvent e) {
suggestEditable(true);
}
public void mouseExited(MouseEvent e) {
suggestEditable(false);
}
public void mouseDragged(MouseEvent e) {
suggestEditable(false);
}
public void mouseMoved(MouseEvent e) {
suggestEditable(true);
}
}
}<|fim▁end|> | } else {
setText(labelBeforeEdit);
}
|
<|file_name|>trustkey_test.go<|end_file_name|><|fim▁begin|>package daemon // import "github.com/tiborvass/docker/daemon"
import (
"io/ioutil"
"os"
"path/filepath"
"testing"
"gotest.tools/v3/assert"
is "gotest.tools/v3/assert/cmp"
"gotest.tools/v3/fs"
)
// LoadOrCreateTrustKey
func TestLoadOrCreateTrustKeyInvalidKeyFile(t *testing.T) {
tmpKeyFolderPath, err := ioutil.TempDir("", "api-trustkey-test")
assert.NilError(t, err)
defer os.RemoveAll(tmpKeyFolderPath)
tmpKeyFile, err := ioutil.TempFile(tmpKeyFolderPath, "keyfile")
assert.NilError(t, err)
_, err = loadOrCreateTrustKey(tmpKeyFile.Name())
assert.Check(t, is.ErrorContains(err, "Error loading key file"))
}
func TestLoadOrCreateTrustKeyCreateKeyWhenFileDoesNotExist(t *testing.T) {
tmpKeyFolderPath := fs.NewDir(t, "api-trustkey-test")
defer tmpKeyFolderPath.Remove()
// Without the need to create the folder hierarchy
tmpKeyFile := tmpKeyFolderPath.Join("keyfile")
key, err := loadOrCreateTrustKey(tmpKeyFile)
assert.NilError(t, err)<|fim▁hole|> assert.Check(t, key != nil)
_, err = os.Stat(tmpKeyFile)
assert.NilError(t, err, "key file doesn't exist")
}
func TestLoadOrCreateTrustKeyCreateKeyWhenDirectoryDoesNotExist(t *testing.T) {
tmpKeyFolderPath := fs.NewDir(t, "api-trustkey-test")
defer tmpKeyFolderPath.Remove()
tmpKeyFile := tmpKeyFolderPath.Join("folder/hierarchy/keyfile")
key, err := loadOrCreateTrustKey(tmpKeyFile)
assert.NilError(t, err)
assert.Check(t, key != nil)
_, err = os.Stat(tmpKeyFile)
assert.NilError(t, err, "key file doesn't exist")
}
func TestLoadOrCreateTrustKeyCreateKeyNoPath(t *testing.T) {
defer os.Remove("keyfile")
key, err := loadOrCreateTrustKey("keyfile")
assert.NilError(t, err)
assert.Check(t, key != nil)
_, err = os.Stat("keyfile")
assert.NilError(t, err, "key file doesn't exist")
}
func TestLoadOrCreateTrustKeyLoadValidKey(t *testing.T) {
tmpKeyFile := filepath.Join("testdata", "keyfile")
key, err := loadOrCreateTrustKey(tmpKeyFile)
assert.NilError(t, err)
expected := "AWX2:I27X:WQFX:IOMK:CNAK:O7PW:VYNB:ZLKC:CVAE:YJP2:SI4A:XXAY"
assert.Check(t, is.Contains(key.String(), expected))
}<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![crate_name = "tasksink"]
/// Task sink
/// Binds PULL socket to tcp://localhost:5558
/// Collects results from workers via that socket
extern crate zmq;
use std::time::Instant;
fn main() {
// Prepare our context and socket
let mut context = zmq::Context::new();
let mut receiver = context.socket(zmq::PULL).unwrap();
assert!(receiver.bind("tcp://*:5558").is_ok());
// Wait for start of batch
let mut msg = zmq::Message::new().unwrap();
receiver.recv(&mut msg, 0).unwrap();
// Start our clock now
let start = Instant::now();
for i in 1..101 {
receiver.recv(&mut msg, 0).unwrap();
if i % 10 == 0 {
print!(":");
} else {
print!(".");
}<|fim▁hole|><|fim▁end|> | }
println!("\nTotal elapsed time: {:?}", start.elapsed());
} |
<|file_name|>changelg.rs<|end_file_name|><|fim▁begin|>extern crate git2;
extern crate regex;
use self::git2::{Repository, Commit};
use self::regex::Regex;
macro_rules! filter_option {
($e:expr) => (match $e { Ok(t) => t, Err(e) => return Some(Err(e)) })
}
macro_rules! option_match {
($e:expr) => (match $e { Ok(t) => t, Err(e) => panic!("err! - {}", e) })
}
// Todo: Return revwalk and defer iterating over comits to another function
pub fn get_commits(args: Vec<String>) {
let repo = option_match!(Repository::open("./"));
let mut revwalk = option_match!(repo.revwalk());
option_match!(
revwalk.push_range(
&commit_range(&args[1][..], &args[2][..])[..]
)
);
let revwalk = revwalk.filter_map(|id| {
let id = option_match!(id);
let commit = filter_option!(repo.find_commit(id));
Some(Ok(commit))<|fim▁hole|> for commit in revwalk {
let commit = option_match!(commit);
print_to_stdout(&commit);
}
}
fn commit_range(commit_from: &str, commit_to: &str) -> String {
let mut commit_range = String::from("");
commit_range.push_str(commit_from);
commit_range.push_str("..");
commit_range.push_str(commit_to);
commit_range
}
fn print_to_stdout(commit: &Commit) {
for line in String::from_utf8_lossy(commit.message_bytes()).lines() {
if match_changelog_identifier(line) {
println!("{} by {}", strip_changelog_hashtag(line), commit.author());
};
};
}
fn match_changelog_identifier(line: &str) -> bool {
let re = Regex::new(r"^#changelog.*$").unwrap();
re.is_match(line)
}
fn strip_changelog_hashtag(commit_msg: &str) -> &str {
if commit_msg.to_lowercase().starts_with("#changelog ") {
return &commit_msg[11..].trim_left();
}
commit_msg
}
#[test]
#[should_panic]
fn it_should_panic_on_error_in_option_match() {
fn something_bad_happens<T>() -> Result<T, &'static str> {
Err("Fail")
};
match something_bad_happens() {
Ok(t) => t,
Err(e) => panic!("err! - {}", e)
}
}
#[test]
fn it_should_return_result_on_ok_in_option_match() {
fn something_good_happens() -> Result<&'static str, &'static str> {
Ok("Good")
};
let result = match something_good_happens() {
Ok(t) => t,
Err(e) => panic!("err! - {}", e)
};
assert_eq!(result, "Good");
}
#[test]
fn it_should_return_expected_commit_range_string(){
let commit_range = commit_range("377d686351969f27f288dec2fb09d0d5431fcde1", "3763e0e3ff218cbdfbf99c68109a04d666e81abeto");
assert_eq!(commit_range, "377d686351969f27f288dec2fb09d0d5431fcde1..3763e0e3ff218cbdfbf99c68109a04d666e81abeto");
}
#[test]
fn it_should_return_true_when_a_string_is_tagged_changelog_(){
let result = match_changelog_identifier("#changelog Hello World");
assert_eq!(result, true);
}
#[test]
fn it_should_return_false_when_a_string_is_not_tagged_changelog_(){
let result = match_changelog_identifier("Hello World");
assert_eq!(result, false);
}
#[test]
fn it_should_return_message_without_hashtag() {
let result = strip_changelog_hashtag("#changelog This is a test commit message");
assert_eq!(result, "This is a test commit message");
}
#[test]
fn it_should_return_message_without_hashtag_and_surplus_whitespace() {
let result = strip_changelog_hashtag("#changelog This is a test commit message");
assert_eq!(result, "This is a test commit message");
}
#[test]
fn it_should_return_message_without_changes_if_not_changelog() {
let result = strip_changelog_hashtag("This is a test commit message without a changelog hashtag");
assert_eq!(result, "This is a test commit message without a changelog hashtag");
}<|fim▁end|> | });
|
<|file_name|>unifont.rs<|end_file_name|><|fim▁begin|>use std::fs::File;
use std::io::{BufRead, BufReader, Read, Write};<|fim▁hole|> let mut output = File::create("unifont.font").unwrap();
let mut count = 0;
for line_res in BufReader::new(input).lines() {
let line = line_res.unwrap();
let mut parts = line.split(":");
let num = u32::from_str_radix(parts.next().unwrap(), 16).unwrap();
assert_eq!(num, count);
let mut data = [0; 16];
let data_part = parts.next().unwrap();
for i in 0..data.len() {
data[i] = u8::from_str_radix(&data_part[i * 2 .. i * 2 + 2], 16).unwrap();
}
println!("{:>04X}:{:?}", num, data);
output.write(&data).unwrap();
count += 1;
}
}<|fim▁end|> |
fn main() {
let mut input = File::open("unifont.hex").unwrap(); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa
"""
PILKit image processors.
A processor accepts an image, does some stuff, and returns the result.
Processors can do anything with the image you want, but their responsibilities
should be limited to image manipulations--they should be completely decoupled
from the filesystem.
"""
from .base import *<|fim▁hole|>from .crop import *
from .overlay import *
from .resize import *<|fim▁end|> | |
<|file_name|>Query.js<|end_file_name|><|fim▁begin|>'use strict';
var pkg = require('../package');
var log = require('debug')(pkg.name + ':Query');
console.log.bind(log);
var error = require('debug')(pkg.name + ':Query');
console.error.bind(error);
var async = require('async');
var spawn = require('child_process').spawn;
var moment = require('moment');
var xml2js = require('xml2js');
var setClass = function(className, cb) {
if (typeof(className) === 'function') {
cb = className;
className = undefined;
}
log('Set class property for instance to %s.', className);
this._params.class = className;
if (typeof(cb) === 'function') {
log('setClass called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setHost = function(host, cb) {
if (typeof(host) === 'function') {
cb = host;
host = undefined;
}
log('Set host property for instance to %s.', host);
this._params.host = host || 'localhost';
if (typeof(cb) === 'function') {
log('setHost called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setNamespace = function(namespace, cb) {
if (typeof(namespace) === 'function') {
cb = namespace;
namespace = undefined;
}
if (!namespace) {
namespace = 'root\\CIMV2';
}
namespace = namespace.replace(/\//g, '\\');
log('Set namespace property for instance to %s.', namespace);
this._params.namespace = namespace;
if (typeof(cb) === 'function') {
log('setNamespace called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setPassword = function(password, cb) {
if (typeof(password) === 'function') {
cb = password;
password = undefined;
}
log('Set password property for instance to %s.', password);
this._params.password = password;
if (typeof(cb) === 'function') {
log('setPassword called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setProps = function(props, cb) {
if (typeof(props) === 'function') {
cb = props;
props = undefined;
}
if (Array.isArray(props)) {
props = props.join(',');
}
log('Set props property for instance to %s.', props);
this._params.props = props;
if (typeof(cb) === 'function') {
log('setProps called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setUsername = function(username, cb) {
if (typeof(username) === 'function') {
cb = username;
username = undefined;
}
log('Set username property for instance to %s.', username);
this._params.username = username;
if (typeof(cb) === 'function') {
log('setUsername called with callback function. Execute query.');
this.exec(cb);
}
return this;
};
var setWhere = function(where, cb) {
if (typeof(where) === 'function') {
cb = where;
where = undefined;
}
log('Set where property for instance to %s.', where);
this._params.where = where;
if (typeof(cb) === 'function') {
log('setWhere called with callback function. Execute query.');
this.exec(cb);<|fim▁hole|> }
return this;
};
var getArgsArray = function(params) {
log('Create array of arguments.');
var args = [
'/NAMESPACE:\\\\' + params.namespace,
'/NODE:\'' + params.host + '\'',
];
if (params.username) {
args.push('/USER:\'' + params.username + '\'');
}
if (params.password) {
args.push('/PASSWORD:\'' + params.password + '\'');
}
args.push('path');
args.push(params.class);
if (params.where) {
if (typeof(params.where) === 'string' && params.where.length) {
args.push('Where');
if (params.where.substr(0, 1) !== '(') {
params.where = '(' + params.where + ')';
}
args.push(params.where);
} else if (Array.isArray(params.where) && params.where.length) {
var str = '';
for (var i = 0; i < params.where.length; i++) {
var tmp = params.where[i];
if (typeof(tmp) === 'string') {
str += ' And ' + tmp;
} else if (typeof(tmp) === 'object') {
str += ' And ' + params.where[i].property +
'=\'' + params.where[i].value + '\'';
}
}
str = '(' + str.replace(/^\sAnd\s/, '') + ')';
if (str !== '()') {
args.push('Where');
args.push(str);
}
}
}
args.push('get');
if (params.props) {
var props = params.props;
if (Array.isArray(props)) {
props = props.join(',');
}
args.push(props);
}
args.push('/FORMAT:rawxml');
log('Created array of arguments.', args);
return args;
};
var typeValue = function(value, type) {
if (value !== undefined) {
if (['uint64', 'uint32', 'uint16', 'uint8', 'sint64',
'sint32', 'sint16', 'sint8'
].indexOf(type) !== -1) {
value = parseInt(value);
} else if (['real64', 'real32', 'real16', 'real8'].indexOf(type) !== -1) {
value = parseFloat(value);
} else if (type === 'boolean') {
if (value === 'TRUE') {
value = true;
} else {
value = false;
}
} else if (type === 'datetime') {
value = moment(value).toDate();
}
}
return value;
};
var extractProperty = function(prop) {
var name;
var type;
var value;
if ('$' in prop) {
name = prop.$.NAME;
type = prop.$.TYPE;
} else {
name = prop.NAME;
type = prop.TYPE;
}
if ('VALUE' in prop) {
value = prop.VALUE;
if (Array.isArray(value)) {
value = value[0];
}
value = typeValue(value, type);
} else if ('VALUE.ARRAY' in prop && prop['VALUE.ARRAY'].length > 0 &&
prop['VALUE.ARRAY'][0].VALUE) {
value = [];
for (var i = 0; i < prop['VALUE.ARRAY'][0].VALUE.length; i++) {
value.push(typeValue(prop['VALUE.ARRAY'][0].VALUE[i], type));
}
}
return {
name: name,
type: type,
value: value
};
};
var exec = function(cb) {
log('Execute query.');
if (typeof(cb) !== 'function') {
cb = function() {};
}
if (!this._params.class) {
log('Unable to execute query. Class is undefined.');
return cb(new Error('No class defined to query.'));
}
var args = getArgsArray(this._params);
var cp = spawn('wmic', args, {
stdio: ['ignore', 'pipe', 'pipe']
});
cp.on('error', function(err) {
error('Error while performing query.', err);
cb(err);
});
var stdout = '';
var stderr = '';
cp.stdout.on('data', function(data) {
stdout += data;
});
cp.stderr.on('data', function(data) {
stderr += data;
});
cp.on('close', function(code) {
if (code !== 0) {
stderr = stderr.toString().replace(/ERROR:\r\r\n/, '');
stderr = stderr.replace(/\r\r\n$/g, '').replace(/Description = /, '');
var err = new Error(stderr);
err.exitCode = code;
log('Query finished with error code.');
return cb(err);
}
stdout = stdout.toString();
if (!stdout) {
return cb();
}
var parser = new xml2js.Parser({
explicitArray: true
});
async.auto({
parse: function(cb) {
log('Parse results into xml.');
parser.parseString(stdout, cb);
},
mangle: ['parse', function(cb, result) {
if (!result.parse.COMMAND.RESULTS[0].CIM) {
log('No results from query.');
return cb();
}
log('Parse xml into formatted json.');
async.map(result.parse.COMMAND.RESULTS[0].CIM[0].INSTANCE,
function(instance, cb) {
var props = {};
async.auto({
nativeProperties: function(cb) {
async.each(instance.PROPERTY, function(prop, cb) {
var propInfo = extractProperty(prop);
props[propInfo.name] = propInfo.value;
cb();
}, cb);
},
relatedProperties: function(cb) {
async.each(instance['PROPERTY.ARRAY'], function(prop, cb) {
var propInfo = extractProperty(prop);
props[propInfo.name] = propInfo.value;
cb();
}, cb);
}
}, function() {
cb(null, props);
});
}, cb);
}]
}, function(err, result) {
log('Execution completed.');
cb(err, result.mangle);
});
});
};
var Query = function Query(options, cb) {
if (!(this instanceof Query)) {
log('Query class called without. Instantiate new instance automatically.');
return new Query(options, cb);
}
log('Create new instance of query class.');
if (typeof(options) === 'function') {
cb = options;
options = {};
} else if (typeof(options) !== 'object') {
options = {};
}
this._params = {};
setClass.call(this, options.class);
setHost.call(this, options.host || 'localhost');
setNamespace.call(this, options.namespace || 'root\\CIMV2');
setPassword.call(this, options.password);
setProps.call(this, options.properties || options.props);
setUsername.call(this, options.username);
setWhere.call(this, options.where);
log('Param values set during class creation.');
if (typeof(cb) === 'function') {
log('Class called with immediate function callback.');
this.exec(cb);
}
Query.prototype.exec = exec.bind(this);
return this;
};
Query.prototype.host = setHost;
Query.prototype.namespace = setNamespace;
Query.prototype.class = setClass;
Query.prototype.username = setUsername;
Query.prototype.password = setPassword;
Query.prototype.props = Query.prototype.properties = setProps;
Query.prototype.where = setWhere;
exports = module.exports = Query;<|fim▁end|> | |
<|file_name|>alertMessage.ts<|end_file_name|><|fim▁begin|>import IError from './error';
interface IAlertMessage {
id?: number;
status?: number;
message?: string;<|fim▁hole|> alertType: string;
}
export default IAlertMessage;<|fim▁end|> | errors?: IError[]; |
<|file_name|>PrivMgrComponentOperations.cpp<|end_file_name|><|fim▁begin|>//*****************************************************************************
// @@@ START COPYRIGHT @@@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// @@@ END COPYRIGHT @@@
//*****************************************************************************
#include "PrivMgrComponentOperations.h"
#include "PrivMgrMD.h"
#include "PrivMgrMDTable.h"
#include "PrivMgrComponents.h"
#include "PrivMgrComponentPrivileges.h"
#include <string>
#include <cstdio>
#include "ComSmallDefs.h"
// sqlcli.h included because ExExeUtilCli.h needs it (and does not include it!)
#include "sqlcli.h"
#include "ExExeUtilCli.h"
#include "ComQueue.h"
#include "ComDiags.h"
// CmpCommon.h contains STMTHEAP declaration
#include "CmpCommon.h"
#include "CmpContext.h"
#include "CmpDDLCatErrorCodes.h"
#include "ComUser.h"
namespace ComponentOperations
{
// *****************************************************************************
// * Class: MyRow
// * Description: This class represents a component operations row which contains:
// * - UID of the component
// * - 2 character code of the component operation
// * - ANSI name of the component operation
// * - Description of the component operation
// *
// * A component operation can be uniquely identified by its component UID
// * and either its ANSI name or its 2 character code.
// *****************************************************************************
class MyRow : public PrivMgrMDRow
{
public:
// -------------------------------------------------------------------
// Constructors and destructors:
// -------------------------------------------------------------------
MyRow(std::string tableName)
: PrivMgrMDRow(tableName, COMPONENT_OPERATIONS_ENUM),
componentUID_(0)
{ };
MyRow(const MyRow &other)
: PrivMgrMDRow(other)
{
componentUID_ = other.componentUID_;
operationCode_ = other.operationCode_;
operationName_ = other.operationName_;
operationType_ = other.operationType_;
operationDescription_ = other.operationDescription_;
};
virtual ~MyRow() {};
inline void clear() {componentUID_ = 0;};
bool lookupByCode(
const int64_t componentUID,
const std::string & operationCode,
std::string & operationName,
PrivMgrComponentOperations::OperationType & operationType,
std::string & operationDescription);
bool lookupByName(
const int64_t componentUID,
const std::string & operationName,
std::string & operationCode,
PrivMgrComponentOperations::OperationType & operationType,
std::string & operationDescription);
// -------------------------------------------------------------------
// Data Members:
// -------------------------------------------------------------------
// From COMPONENT_OPERATIONS
int64_t componentUID_;
std::string operationCode_;
std::string operationName_;
PrivMgrComponentOperations::OperationType operationType_;
std::string operationDescription_;
private:
MyRow();
};
// *****************************************************************************
// * Class: MyTable
// * Description: This class represents the COMPONENT_OPERATIONS table containing:
// * - the fully qualified name of the table
// *
// * A component operation can be uniquely identified by a component UID and
// * either the name of the operation or the operation code.
// *****************************************************************************
class MyTable : public PrivMgrMDTable
{
public:
MyTable(
const std::string & tableName,
PrivMgrTableEnum myTableEnum,
ComDiagsArea * pDiags)
: PrivMgrMDTable(tableName,COMPONENT_OPERATIONS_ENUM, pDiags),
lastRowRead_(tableName)
{};
inline void clear() { lastRowRead_.clear(); };
PrivStatus fetchByCode(
const int64_t componentUID,
const std::string & operationCode,
MyRow & row);
PrivStatus fetchByName(
const std::string & componentUIDString,
const std::string & operationName,
MyRow & row);
PrivStatus fetchByName(
const int64_t componentUID,
const std::string & operationName,
MyRow & row);
virtual PrivStatus insert(const PrivMgrMDRow &row);
virtual PrivStatus selectWhereUnique(
const std::string & whereClause,
PrivMgrMDRow & row);
PrivStatus selectWhere(
const std::string & whereClause,
std::vector<MyRow *> &rowList);
PrivStatus update(
const std::string &setClause,
const std::string &whereClause);
private:
MyTable();
void setRow(OutputInfo *pCliRow, MyRow &rowOut);
MyRow lastRowRead_;
};
}//End namespace ComponentOperations
using namespace ComponentOperations;
// *****************************************************************************
// PrivMgrComponentOperations methods
// *****************************************************************************
// -----------------------------------------------------------------------
// Construct a PrivMgrComponentOperations object for a new component operation.
// -----------------------------------------------------------------------
PrivMgrComponentOperations::PrivMgrComponentOperations(
const std::string & metadataLocation,
ComDiagsArea * pDiags)
: PrivMgr(metadataLocation, pDiags),
fullTableName_(metadataLocation_ + "." + PRIVMGR_COMPONENT_OPERATIONS),
myTable_(*new MyTable(fullTableName_,COMPONENT_OPERATIONS_ENUM, pDiags))
{ };
// -----------------------------------------------------------------------
// Copy constructor
// -----------------------------------------------------------------------
PrivMgrComponentOperations::PrivMgrComponentOperations(const PrivMgrComponentOperations &other)
: PrivMgr(other),
myTable_(*new MyTable(fullTableName_,COMPONENT_OPERATIONS_ENUM, pDiags_))
{
fullTableName_ = other.fullTableName_;
}
// -----------------------------------------------------------------------
// Destructor.
// -----------------------------------------------------------------------
PrivMgrComponentOperations::~PrivMgrComponentOperations()
{
delete &myTable_;
}
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::clear *
// * *
// * This function clears any cache associated with this object. *
// * *
// *****************************************************************************
void PrivMgrComponentOperations::clear()
{
MyTable &myTable = static_cast<MyTable &>(myTable_);
myTable.clear();
}
//******************* End of PrivMgrComponentOperations::clear *****************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::codeExists *
// * *
// * This function determines if a specific component operation code has *
// * been defined in Privilege Manager metadata. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is the unique ID associated with the component. *
// * *
// * <operationCode> const std::string & In *
// * is the two character code associated with the component operation. *
// * *
// *****************************************************************************
// * *
// * Returns: bool *
// * *
// * true: Operation has been created. *
// * false: Operation does not exist or error encountered. *
// * *
// *****************************************************************************
bool PrivMgrComponentOperations::codeExists(
const int64_t componentUID,
const std::string & operationCode)
{
MyRow row(fullTableName_);
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.fetchByCode(componentUID,operationCode,row);
if (privStatus == STATUS_GOOD || privStatus == STATUS_WARNING)
return true;
return false;
}
//********************* End of PrivMgrComponents::codeExists *******************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::createOperation *
// * *
// * Add an operation for the specified component to the *
// * COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentName> const std::string & In *
// * is the component name. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation to be added. *
// * *
// * <operationCode> const std::string & In *
// * is a 2 character code associated with the operation unique to the *
// * component. *
// * *
// * <isSystem> bool In *
// * is true if the operation is a system operation. *
// * *
// * <operationDescription> const std::string & In *
// * is a descrption of the operation. *
// * *
// * <existsErrorOK> const bool [In] *
// * if true, exists errors are silently ignored. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row added. *
// * *: Create failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::createOperation(
const std::string & componentName,
const std::string & operationName,
const std::string & operationCode,
bool isSystem,
const std::string & operationDescription,
const bool existsErrorOK)
{
PrivMgrComponentPrivileges componentPrivileges(metadataLocation_, pDiags_);
if (!ComUser::isRootUserID()&&
!componentPrivileges.hasSQLPriv(ComUser::getCurrentUser(), SQLOperation::MANAGE_COMPONENTS, true))
{
*pDiags_ << DgSqlCode(-CAT_NOT_AUTHORIZED);
return STATUS_ERROR;
}
if (operationCode.size() != 2 || (operationCode.size() == 2 &&
(operationCode[0] == ' ' || operationCode[1] == ' ')))
{
*pDiags_ << DgSqlCode(-CAT_CODE_MUST_CONTAIN_2_NONBLANKS);
return STATUS_ERROR;
}
// Determine if the component exists.
PrivMgrComponents component(metadataLocation_,pDiags_);
if (!component.exists(componentName))
{
*pDiags_ << DgSqlCode(-CAT_TABLE_DOES_NOT_EXIST_ERROR)
<< DgTableName(componentName.c_str());
return STATUS_ERROR;
}
// Component exists, fetch data for this component
std::string componentUIDString;
int64_t componentUID;
bool isSystemComponent;
std::string tempStr;
component.fetchByName(componentName,
componentUIDString,
componentUID,
isSystemComponent,
tempStr);
// OK, the component is defined, what about the operation? If it already is
// defined, return an error. Both the operation name and code must be
// unique within a component.
if (nameExists(componentUID,operationName))
{
if (existsErrorOK)
return STATUS_GOOD;
*pDiags_ << DgSqlCode(-CAT_COMPONENT_PRIVILEGE_NAME_EXISTS)
<< DgString0(operationName.c_str());
return STATUS_ERROR;
}
if (codeExists(componentUID,operationCode))
{
if (existsErrorOK)
return STATUS_GOOD;
*pDiags_ << DgSqlCode(-CAT_COMPONENT_PRIVILEGE_CODE_EXISTS)
<< DgString0(operationCode.c_str());
return STATUS_ERROR;
}
// An operation can only be a system operation if its component is a
// system component.
if (isSystem && !isSystemComponent)
{
*pDiags_ << DgSqlCode(-CAT_COMPONENT_NOT_SYSTEM);
return STATUS_ERROR;
}
// Name and code are not used, add an entry.
MyRow row(fullTableName_);
row.componentUID_ = componentUID;
row.operationCode_ = operationCode;
row.operationName_ = operationName;
row.operationType_ = (isSystem ? OP_TYPE_SYSTEM : OP_TYPE_USER);
row.operationDescription_ = operationDescription;
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.insert(row);
if (privStatus != STATUS_GOOD)
return privStatus;
// Grant authority to creator
PrivMgrComponentPrivileges componentPrivilege(metadataLocation_,pDiags_);
return componentPrivilege.grantPrivilegeToCreator(componentUID,
operationCode,
ComUser::getCurrentUser(),
ComUser::getCurrentUsername());
}
//************ End of PrivMgrComponentOperations::createOperation **************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::createOperationInternal *
// * *
// * Add an operation for the specified component to the *
// * COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is a unique ID for the component. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation to be added. *
// * *
// * <operationCode> const std::string & In *
// * is a 2 character code associated with the operation unique to the *
// * component. *
// * *
// * <operationTypeUnused> const bool In *
// * type of component, user, system, or unused. *
// * *
// * <operationDescription> const std::string & In *
// * is a descrption of the operation. *
// * *
// * <granteeID> const int32_t In *
// * is the the authID to be granted the privilege on the newly created *
// * component operation. *
// * *
// * <granteeName> const std::string & In *
// * is the name of the authID to be granted the privilege on the newly *
// * created component operation. *
// * *
// * <grantDepth> const int32_t In *
// * is the number of levels this privilege may be granted by the grantee. *
// * Initially this is either 0 or -1. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row added. *
// * *: Create failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::createOperationInternal(
const int64_t componentUID,
const std::string & operationName,
const std::string & operationCode,
const bool operationTypeUnused,
const std::string & operationDescription,
const int32_t granteeID,<|fim▁hole|> const std::string & granteeName,
const int32_t grantDepth,
const bool checkExistence)
{
PrivStatus privStatus = STATUS_GOOD;
// If operation already created, no need to create
if (checkExistence && nameExists(componentUID,operationName))
return STATUS_GOOD;
MyRow row(fullTableName_);
row.componentUID_ = componentUID;
row.operationCode_ = operationCode;
row.operationName_ = operationName;
row.operationType_ = (operationTypeUnused ? OP_TYPE_UNUSED : OP_TYPE_SYSTEM);
row.operationDescription_ = operationDescription;
MyTable &myTable = static_cast<MyTable &>(myTable_);
privStatus = myTable.insert(row);
if (privStatus != STATUS_GOOD)
return privStatus;
// Grant authority to creator
PrivMgrComponentPrivileges componentPrivileges(metadataLocation_,pDiags_);
std::vector<std::string> operationCodes;
operationCodes.push_back(operationCode);
privStatus = componentPrivileges.grantPrivilegeInternal(componentUID,
operationCodes,
SYSTEM_USER,
ComUser::getSystemUserName(),
granteeID,
granteeName,grantDepth,
checkExistence);
return privStatus;
}
//************ End of PrivMgrComponentOperations::createOperation **************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::describeComponentOperations *
// * *
// * Reads all rows of componentUIDString from COMPONENT_OPERATIONS, *
// * for each row, *
// * generate a CREATE COMPONENT PRIVILEGE statement, *
// * and call PrivMgrComponentPrivileges::describeComponentPrivileges() *
// * to generate GRANT COMPONENT PRIVILEGE statements. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * componentUIDString const std::string & In *
// * is the component unique ID as a numeric string. *
// * *
// * componentName const std::string & In *
// * is the name of the component *
// * *
// * outlines std::vector<std::string> & Out *
// * array of strings with CREATE and GRANT statements *
// * *
// * componentPrivileges PrivMgrComponentPrivileges * In *
// * if specified use PrivMgrComponentPrivileges object *
// * to generate GRANT statement for each component operation *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row read successfully, data returned. *
// * *
// * STATUS_NOTFOUND: No rows that matched, or error encountered. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::describeComponentOperations(
const std::string & componentUIDString,
const std::string & componentName,
std::vector<std::string> & outlines,
PrivMgrComponentPrivileges * componentPrivileges)
{
std::vector<MyRow *> rowList;
MyTable &myTable = static_cast<MyTable &>(myTable_);
std::string whereClause("WHERE COMPONENT_UID = ");
whereClause += componentUIDString;
whereClause += " and is_system <> 'U'";
PrivStatus privStatus = myTable.selectWhere(whereClause, rowList);
if (privStatus == STATUS_GOOD)
{
for(int i = 0; i < rowList.size(); i++)
{
MyRow* myRow = rowList[i];
if (myRow->operationType_ == OP_TYPE_UNUSED)
continue;
std::string componentText;
componentText += "CREATE COMPONENT PRIVILEGE ";
componentText += myRow->operationName_ + " AS ";
componentText += "'" + myRow->operationCode_ + "'";
componentText += " ON " + componentName;
if(myRow->operationType_ == OP_TYPE_SYSTEM)
componentText += " SYSTEM";
if(!myRow->operationDescription_.empty())
componentText += " DETAIL '" + myRow->operationDescription_ + "'";
componentText += ";";
outlines.push_back(componentText);
outlines.push_back("");
if(componentPrivileges)
componentPrivileges->describeComponentPrivileges(componentUIDString,
componentName,
myRow->operationCode_,
myRow->operationName_,
outlines);
delete myRow;
outlines.push_back("");
}
}
return privStatus;
}
//****** End of PrivMgrComponentOperations::describeComponentOperations ********
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::dropAll *
// * *
// * Deletes all rows in the COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row(s) deleted. *
// * *: Delete failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::dropAll()
{
std::string whereClause (" ");
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.deleteWhere(whereClause);
if (privStatus != STATUS_GOOD)
return privStatus;
PrivMgrComponentPrivileges componentPrivileges(metadataLocation_,pDiags_);
return componentPrivileges.dropAll();
}
//**************** End of PrivMgrComponentOperations::dropAll ******************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::dropAll *
// * *
// * Deletes all rows in the COMPONENT_OPERATIONS table that match the *
// * specified component unique ID. In addition, and granted privileges *
// * for the component are deleted. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const std::string & In *
// * is the component unique ID. All rows containing this UID will be *
// * deleted. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row(s) deleted. *
// * *: Delete failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::dropAll(const std::string & componentUID)
{
std::string whereClause ("WHERE COMPONENT_UID = ");
whereClause += componentUID;
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.deleteWhere(whereClause);
if (privStatus != STATUS_GOOD)
return privStatus;
PrivMgrComponentPrivileges componentPrivileges(metadataLocation_,pDiags_);
return componentPrivileges.dropAllForComponent(componentUID);
}
//**************** End of PrivMgrComponentOperations::dropAll *****************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::dropOperation *
// * *
// * Deletes operation for the specified component from the *
// * COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentName> const std::string & In *
// * is the component name. *
// * *
// * <operationName> const std::string & In *
// * is the operation name. *
// * *
// * <dropBehavior> PrivDropBehavior In *
// * indicates whether restrict or cascade behavior is requested. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row(s) deleted. *
// * *: Delete failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::dropOperation(
const std::string & componentName,
const std::string & operationName,
PrivDropBehavior dropBehavior)
{
PrivMgrComponentPrivileges componentPrivileges(metadataLocation_, pDiags_);
if (!ComUser::isRootUserID()&&
!componentPrivileges.hasSQLPriv(ComUser::getCurrentUser(), SQLOperation::MANAGE_COMPONENTS, true))
{
*pDiags_ << DgSqlCode(-CAT_NOT_AUTHORIZED);
return STATUS_ERROR;
}
// Determine if the component exists.
PrivMgrComponents component(metadataLocation_,pDiags_);
if (!component.exists(componentName))
{
*pDiags_ << DgSqlCode(-CAT_TABLE_DOES_NOT_EXIST_ERROR)
<< DgTableName(componentName.c_str());
return STATUS_ERROR;
}
// Component exists, fetch data for this component
std::string componentUIDString;
int64_t componentUID;
bool isSystemComponent;
std::string tempStr;
component.fetchByName(componentName,
componentUIDString,
componentUID,
isSystemComponent,
tempStr);
// OK, the component is defined, what about the operation?
if (!nameExists(componentUID,operationName))
{
*pDiags_ << DgSqlCode(-CAT_TABLE_DOES_NOT_EXIST_ERROR)
<< DgTableName(operationName.c_str());
return STATUS_ERROR;
}
//Operation exists, get the data.
std::string operationCode;
bool isSystemOperation = FALSE;
fetchByName(componentUIDString,operationName,operationCode,isSystemOperation,
tempStr);
//
// Has operation been granted to any authID?
//
if (dropBehavior == PrivDropBehavior::RESTRICT &&
componentPrivileges.isGranted(componentUIDString,operationCode,true))
{
*pDiags_ << DgSqlCode(-CAT_DEPENDENT_OBJECTS_EXIST);
return STATUS_ERROR;
}
// Either CASCADE, or RESTRICT and there are no user grants - drop away!
PrivStatus privStatus = componentPrivileges.dropAllForOperation(componentUIDString,
operationCode);
if (privStatus != STATUS_GOOD)
return privStatus;
// Delete row in COMPONENT_OPERATIONS table.
MyTable &myTable = static_cast<MyTable &>(myTable_);
std::string whereClause("WHERE COMPONENT_UID = ");
whereClause += componentUIDString;
whereClause += " AND OPERATION_NAME = '";
whereClause += operationName;
whereClause += "'";
return myTable.deleteWhere(whereClause);
}
//************* End of PrivMgrComponentOperations::dropOperation ***************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::fetchByName *
// * *
// * This function reads the row in the COMPONENT_OPERATIONS tables for *
// * the specified component operation and returns the associated operation *
// * code and description. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUIDString> const std::string & In *
// * is the component unique ID as a numeric string. *
// * *
// * <operationName> const std::string & In *
// * is the name of the component operation in upper case. *
// * *
// * <operationCode> std::string & Out *
// * passes back the code associated with the component operation. *
// * *
// * <isSystem> bool & Out *
// * passes back true if the component operation is a system level *
// * *
// * <operationDescription> std::string & Out *
// * passes back the description of the component operation. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row read successfully, data returned. *
// * *
// * STATUS_NOTFOUND: No rows that matched, or error encountered. *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::fetchByName(
const std::string & componentUIDString,
const std::string & operationName,
std::string & operationCode,
bool isSystem,
std::string & operationDescription)
{
MyRow row(fullTableName_);
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.fetchByName(componentUIDString,operationName,row);
if (privStatus == STATUS_NOTFOUND || privStatus == STATUS_ERROR)
return STATUS_NOTFOUND;
operationCode = row.operationCode_;
isSystem = (row.operationType_ == OP_TYPE_SYSTEM);
operationDescription = row.operationDescription_;
return STATUS_GOOD;
}
//*************** End of PrivMgrComponentOperations::fetchByName ***************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::getCount *
// * *
// * Returns: *
// * the total number of operations *
// * the number of unused operations *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD : found operations *
// * STATUS_NOTFOUND : no operations were found *
// * STATUS_ERROR : unexpected error reading metadata *
// * *
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::getCount(
const int64_t &componentUID,
int32_t &numOps,
int32_t &numUnusedOps)
{
char buf[getMetadataLocation().size() + 300];
snprintf (buf, sizeof(buf), "select distinct is_system, count(is_system) over "
"(partition by is_system) from %s.%s where component_uid = %ld",
getMetadataLocation().c_str(),PRIVMGR_COMPONENT_OPERATIONS,
componentUID);
// set pointer in diags area
int32_t diagsMark = pDiags_->mark();
ExeCliInterface cliInterface(STMTHEAP, 0, NULL,
CmpCommon::context()->sqlSession()->getParentQid());
Queue * tableQueue = NULL;
int32_t cliRC = cliInterface.fetchAllRows(tableQueue, buf, 0, false, false, true);
if (cliRC < 0)
{
cliInterface.retrieveSQLDiagnostics(CmpCommon::diags());
return STATUS_ERROR;
}
if (cliRC == 100) // did not find the row
{
pDiags_->rewind(diagsMark);
return STATUS_NOTFOUND;
}
numOps = 0;
numUnusedOps = 0;
char * ptr = NULL;
int32_t len = 0;
char value[3];
int32_t opTypeCount;
// column 0: operation type
// column 1: count of rows for operation type
tableQueue->position();
for (int idx = 0; idx < tableQueue->numEntries(); idx++)
{
OutputInfo * pCliRow = (OutputInfo*)tableQueue->getNext();
pCliRow->get(0,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
pCliRow->get(1,ptr,len);
opTypeCount = *(reinterpret_cast<int32_t*>(ptr));
numOps += opTypeCount;
if (value[0] == 'U')
numUnusedOps += opTypeCount;
}
return STATUS_GOOD;
}
//***************** End of PrivMgrComponentOperations::getCount ****************
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::isComponentUsed *
// * *
// * Determines if a component is used in the COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUIDString> const std::string & In *
// * is the component unique ID as a numeric string. *
// * *
// *****************************************************************************
// * *
// * Returns: bool *
// * *
// * true: Component is used in COMPONENT_OPERATIONS table. *
// * false: Component is used in COMPONENT_OPERATIONS table, or error trying *
// * to read from COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
bool PrivMgrComponentOperations::isComponentUsed(const std::string & componentUIDString)
{
MyRow row(fullTableName_);
std::string whereClause("WHERE COMPONENT_UID = ");
whereClause += componentUIDString;
// set pointer in diags area
int32_t diagsMark = pDiags_->mark();
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.selectWhereUnique(whereClause,row);
if (privStatus == STATUS_GOOD || privStatus == STATUS_WARNING)
return true;
// If not found or any other error is returned, rewind the diagnostics and
// return false.
pDiags_->rewind(diagsMark);
return false;
}
//************* End of PrivMgrComponentOperations::isComponentUsed *************
// *****************************************************************************
// method: updateOperationCodes
//
// Goes through the ComponentOpStruct for the sql_operations component and
// creates two lists:
// list of unused operations
// list of system operations.
//
// Updates the component_operations table and
// sets is_system to "U" for unused operations
// sets is_system to "Y" for system operations
//
// TBD - add support for all components, not just sql_operations
// *****************************************************************************
PrivStatus PrivMgrComponentOperations::updateOperationCodes(
const int64_t & componentUID )
{
if (componentUID != SQL_OPERATIONS_COMPONENT_UID)
{
PRIVMGR_INTERNAL_ERROR("Invalid component UID in PrivMgrComponentOperations::updateOperationCodes");
return STATUS_ERROR;
}
std::string unusedItems ("where component_uid = ");
unusedItems += UIDToString(componentUID);
unusedItems += " and operation_code in (";
std::string systemItems(unusedItems);
size_t numOps = sizeof(sqlOpList)/sizeof(ComponentOpStruct);
bool firstUnusedOp = true;
bool firstSystemOp = true;
for (int i = 0; i < numOps; i++)
{
const ComponentOpStruct &opDefinition = sqlOpList[i];
if (opDefinition.unusedOp)
{
if (firstUnusedOp)
{
unusedItems += "'";
firstUnusedOp = false;
}
else
unusedItems += ", '";
unusedItems += opDefinition.operationCode;
unusedItems += "'";
}
else
{
if (firstSystemOp)
{
systemItems += "'";
firstSystemOp = false;
}
else
systemItems += ", '";
systemItems += opDefinition.operationCode;
systemItems += "'";
}
}
MyTable &myTable = static_cast<MyTable &>(myTable_);
// Change system components to unused components
if (!firstUnusedOp)
{
unusedItems += ")";
std::string setClause("set is_system = 'U' ");
if (myTable.update(setClause, unusedItems) == STATUS_ERROR)
return STATUS_ERROR;
}
// Change unused components to system components
if (!firstSystemOp)
{
systemItems += ")";
std::string setClause("set is_system = 'Y' ");
if (myTable.update(setClause, systemItems) == STATUS_ERROR)
return STATUS_ERROR;
}
return STATUS_GOOD;
}
// *****************************************************************************
// * *
// * Function: PrivMgrComponentOperations::nameExists *
// * *
// * This function determines if a specific component operation has been *
// * defined in Privilege Manager metadata. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is the unique ID associated with the component. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation in upper case. *
// * *
// *****************************************************************************
// * *
// * Returns: bool *
// * *
// * true: Operation has been created. *
// * false: Operation does not exist or error encountered. *
// * *
// *****************************************************************************
bool PrivMgrComponentOperations::nameExists(
const int64_t componentUID,
const std::string & operationName)
{
MyRow row(fullTableName_);
MyTable &myTable = static_cast<MyTable &>(myTable_);
PrivStatus privStatus = myTable.fetchByName(componentUID,operationName,row);
if (privStatus == STATUS_GOOD || privStatus == STATUS_WARNING)
return true;
return false;
}
//******************** End of PrivMgrComponents::nameExists ********************
// *****************************************************************************
// MyTable methods
// *****************************************************************************
// *****************************************************************************
// * *
// * Function: MyTable::fetchByCode *
// * *
// * Reads from the COMPONENT_OPERATIONS table and returns the row *
// * associated with the specified operation code. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is the unique ID associated with the component. *
// * *
// * <operationCode> const std::string & In *
// * is the two digit code associated with the operation. *
// * *
// * <row> MyRow & Out *
// * passes back a reference to MyRow, containing the data read. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Found code, row returned. *
// * *: Code not found or error encountered. *
// * *
// *****************************************************************************
PrivStatus MyTable::fetchByCode(
const int64_t componentUID,
const std::string & operationCode,
MyRow & row)
{
// Check the last row read before reading metadata.
if (lastRowRead_.lookupByCode(componentUID,operationCode,
row.operationName_,row.operationType_,
row.operationDescription_))
{
row.componentUID_ = componentUID;
return STATUS_GOOD;
}
// Not found in cache, look for the component name in metadata.
std::string whereClause("WHERE COMPONENT_UID = ");
whereClause += PrivMgr::UIDToString(componentUID);
whereClause += " AND OPERATION_CODE = '";
whereClause += operationCode;
whereClause += "'";
PrivStatus privStatus = selectWhereUnique(whereClause,row);
switch (privStatus)
{
// Return status to caller to handle
case STATUS_NOTFOUND:
case STATUS_ERROR:
return privStatus;
break;
// Object exists
case STATUS_GOOD:
case STATUS_WARNING:
return STATUS_GOOD;
break;
// Should not occur, internal error
default:
PRIVMGR_INTERNAL_ERROR("Switch statement in PrivMgrComponentOperations::MyTable::fetchByCode()");
return STATUS_ERROR;
break;
}
return STATUS_GOOD;
}
//********************** End of MyTable::fetchByCode ***************************
// *****************************************************************************
// * *
// * Function: MyTable::fetchByName *
// * *
// * Reads from the COMPONENT_OPERATIONS table and returns the row *
// * associated with the specified operation name. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUIDString> const std::string & In *
// * is the unique ID associated with the component in string format. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation. Name is assumed to be upper case. *
// * *
// * <row> MyRow & Out *
// * passes back a reference to MyRow, containing the data read. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Found name, row returned. *
// * *: Name not found or error encountered. *
// * *
// *****************************************************************************
PrivStatus MyTable::fetchByName(
const std::string & componentUIDString,
const std::string & operationName,
MyRow & row)
{
int64_t componentUID = atol(componentUIDString.c_str());
return fetchByName(componentUID,operationName,row);
}
//********************** End of MyTable::fetchByName ***************************
// *****************************************************************************
// * *
// * Function: MyTable::fetchByName *
// * *
// * Reads from the COMPONENT_OPERATIONS table and returns the row *
// * associated with the specified operation name. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is the unique ID associated with the component. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation. Name is assumed to be upper case. *
// * *
// * <row> MyRow & Out *
// * passes back a reference to MyRow, containing the data read. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Found name, row returned. *
// * *: Name not found or error encountered. *
// * *
// *****************************************************************************
PrivStatus MyTable::fetchByName(
const int64_t componentUID,
const std::string & operationName,
MyRow & row)
{
// Check the last row read before reading metadata.
if (lastRowRead_.lookupByName(componentUID,operationName,
row.operationCode_,row.operationType_,
row.operationDescription_))
{
row.componentUID_ = componentUID;
return STATUS_GOOD;
}
// Not found in cache, look for the component name in metadata.
std::string whereClause("WHERE COMPONENT_UID = ");
whereClause += PrivMgr::UIDToString(componentUID);
whereClause += " AND OPERATION_NAME = '";
whereClause += operationName;
whereClause += "'";
PrivStatus privStatus = selectWhereUnique(whereClause,row);
switch (privStatus)
{
// Return status to caller to handle
case STATUS_NOTFOUND:
case STATUS_ERROR:
return privStatus;
break;
// Object exists
case STATUS_GOOD:
case STATUS_WARNING:
return STATUS_GOOD;
break;
// Should not occur, internal error
default:
PRIVMGR_INTERNAL_ERROR("Switch statement in PrivMgrComponentOperations::MyTable::fetchByName()");
return STATUS_ERROR;
break;
}
return STATUS_GOOD;
}
//********************** End of MyTable::fetchByName ***************************
// *****************************************************************************
// * *
// * Function: MyTable::insert *
// * *
// * Inserts a row into the COMPONENT_OPERATIONS table. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <rowIn> const PrivMgrMDRow & In *
// * is a MyRow to be inserted. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row inserted. *
// * *: Insert failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus MyTable::insert(const PrivMgrMDRow & rowIn)
{
char insertStatement[1000];
const MyRow & row = static_cast<const MyRow &>(rowIn);
char operationType = PrivMgrComponentOperations::compTypeToLit(row.operationType_);
sprintf(insertStatement, "insert into %s values (%ld, '%s', '%s', '%c', '%s')",
tableName_.c_str(),
row.componentUID_,
row.operationCode_.c_str(),
row.operationName_.c_str(),
operationType,
row.operationDescription_.c_str());
return CLIImmediate(insertStatement);
}
//************************** End of MyTable::insert ****************************
// *****************************************************************************
// * *
// * Function: MyTable::selectWhereUnique *
// * *
// * Selects a row from the COMPONENT_OPERATIONS table based on the *
// * specified WHERE clause. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <whereClause> const std::string & In *
// * is the WHERE clause specifying a unique row. *
// * *
// * <rowOut> PrivMgrMDRow & Out *
// * passes back a refernce to a MyRow. *
// * *
// *****************************************************************************
// * *
// * Returns: PrivStatus *
// * *
// * STATUS_GOOD: Row returned. *
// * *: Select failed. A CLI error is put into the diags area. *
// * *
// *****************************************************************************
PrivStatus MyTable::selectWhereUnique(
const std::string & whereClause,
PrivMgrMDRow & rowOut)
{
// Generate the select statement
std::string selectStmt ("SELECT COMPONENT_UID, OPERATION_CODE, OPERATION_NAME, IS_SYSTEM, OPERATION_DESCRIPTION FROM ");
selectStmt += tableName_;
selectStmt += " ";
selectStmt += whereClause;
ExeCliInterface cliInterface(STMTHEAP, 0, NULL,
CmpCommon::context()->sqlSession()->getParentQid());
PrivStatus privStatus = CLIFetch(cliInterface,selectStmt);
if (privStatus != STATUS_GOOD && privStatus != STATUS_WARNING)
return privStatus;
char * ptr = NULL;
Lng32 len = 0;
char value[500];
MyRow & row = static_cast<MyRow &>(rowOut);
// column 1: component_uid
cliInterface.getPtrAndLen(1,ptr,len);
row.componentUID_ = *(reinterpret_cast<int64_t*>(ptr));
// column 2: operation_code
cliInterface.getPtrAndLen(2,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
row.operationCode_ = value;
// column 3: operation_name
cliInterface.getPtrAndLen(3,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
row.operationName_ = value;
// column 4: is_system
cliInterface.getPtrAndLen(4,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
row.operationType_ = PrivMgrComponentOperations::compTypeToEnum(value[0]);
// column 5: operation_description
cliInterface.getPtrAndLen(5,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
row.operationDescription_ = value;
lastRowRead_ = row;
return STATUS_GOOD;
}
//********************* End of MyTable::selectWhereUnique **********************
// *****************************************************************************
// method: update
//
// Updates metadata based on the passed in set and where clauses.
// *****************************************************************************
PrivStatus MyTable::update(
const std::string & setClause,
const std::string & whereClause)
{
char updateStatement[setClause.size() + whereClause.size() + tableName_.size() + 100];
sprintf(updateStatement, "update %s %s %s",
tableName_.c_str(),
setClause.c_str(),
whereClause.c_str());
return CLIImmediate(updateStatement);
}
// *****************************************************************************
// MyRow methods
// *****************************************************************************
// *****************************************************************************
// * *
// * Function: MyRow::lookupByCode *
// * *
// * Looks for a specified component operation name in cache, and if found, *
// * returns the associated data. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t In *
// * is the unique ID associated with the component. *
// * *
// * <operationCode> const std::string & In *
// * is the code associated with the component operation in upper case. *
// * *
// * <operationName> std::string & Out *
// * passes back the name of the component operation. *
// * *
// * <operationType> OperationType & Out *
// * passes back the component type, system, user, or unused. *
// * *
// * <operationDescription> std::string & Out *
// * passes back the description of the component operation. *
// * *
// *****************************************************************************
// * *
// * Returns: bool *
// * *
// * true: Component name found in cache. *
// * false: Component name not found in cache. *
// * *
// *****************************************************************************
bool MyRow::lookupByCode(
const int64_t componentUID,
const std::string & operationCode,
std::string & operationName,
PrivMgrComponentOperations::OperationType & operationType,
std::string & operationDescription)
{
// If componentUID_ is zero, that means data is uninitialized.
if (componentUID_ == 0 ||
componentUID != componentUID_ ||
operationCode != operationCode)
return false;
operationType = operationType_;
operationName = operationName_;
operationDescription = operationDescription_;
return true;
}
//************************ End of MyRow::lookupByCode **************************
// *****************************************************************************
// * *
// * Function: MyRow::lookupByName *
// * *
// * Looks for a specified component operation name in cache, and if found, *
// * returns the associated data. *
// * *
// *****************************************************************************
// * *
// * Parameters: *
// * *
// * <componentUID> const int64_t Out *
// * is the unique ID associated with the component. *
// * *
// * <operationName> const std::string & In *
// * is the name of the operation. Name is assumed to be upper case. *
// * *
// * <operationCode> std::string & Out *
// * passes back the code associated with the component operation. *
// * *
// * <OperationType> operationType & Out *
// * passes back the component type, system, user, or unused. *
// * *
// * <operationDescription> std::string & Out *
// * passes back the description of the component operation. *
// * *
// *****************************************************************************
// * *
// * Returns: bool *
// * *
// * true: Component name found in cache. *
// * false: Component name not found in cache. *
// * *
// *****************************************************************************
bool MyRow::lookupByName(
const int64_t componentUID,
const std::string & operationName,
std::string & operationCode,
PrivMgrComponentOperations::OperationType & operationType,
std::string & operationDescription)
{
// If componentUID_ is zero, that means data is uninitialized.
if (componentUID_ == 0 ||
componentUID != componentUID_ ||
operationName != operationName_)
return false;
operationType = operationType_;
operationCode = operationCode_;
operationDescription = operationDescription_;
return true;
}
//************************ End of MyRow::lookupByName **************************
// ****************************************************************************
// * method: MyTable::selectWhere
// *
// * Selects rows from the COMPONENT_OPERATIONS table based on the specified
// * WHERE clause.
// *
// * Parameters:
// *
// * whereClause is the WHERE clause
// * rowList passes back array of wanted COMPONENT_OPERATIONS rows
// *
// * Returns: PrivStatus
// *
// * STATUS_GOOD: Row returned.
// * *: Select failed. A CLI error is put into the diags area.
// *****************************************************************************
PrivStatus MyTable::selectWhere(
const std::string & whereClause,
std::vector<MyRow *> &rowList)
{
std::string selectStmt("SELECT COMPONENT_UID, OPERATION_CODE, OPERATION_NAME, IS_SYSTEM, TRIM(OPERATION_DESCRIPTION) FROM ");
selectStmt += tableName_;
selectStmt += " ";
selectStmt += whereClause;
// set pointer in diags area
int32_t diagsMark = pDiags_->mark();
ExeCliInterface cliInterface(STMTHEAP, 0, NULL,
CmpCommon::context()->sqlSession()->getParentQid());
Queue * tableQueue = NULL;
int32_t cliRC = cliInterface.fetchAllRows(tableQueue,
(char *)selectStmt.c_str(),
0, false, false, true);
if (cliRC < 0)
{
cliInterface.retrieveSQLDiagnostics(CmpCommon::diags());
return STATUS_ERROR;
}
if (cliRC == 100) // did not find the row
{
pDiags_->rewind(diagsMark);
return STATUS_NOTFOUND;
}
tableQueue->position();
for (int idx = 0; idx < tableQueue->numEntries(); idx++)
{
OutputInfo * pCliRow = (OutputInfo*)tableQueue->getNext();
MyRow *pRow = new MyRow(tableName_);
setRow(pCliRow, *pRow);
rowList.push_back(pRow);
}
// TBD: need code to delete the rowList
return STATUS_GOOD;
}
// *****************************************************************************
// * method: MyTable::setRow
// *
// * Extract information(OutputInfo) returned from cli,
// * and fill a COMPONENT_OPERATIONS row object with the information.
// *
// * Parameters:
// *
// * pCliRow row destails from the cli
// * row passes back filled row object
// *
// * no errors should be generated
// *****************************************************************************
// Row read successfully. Extract the columns.
void MyTable::setRow(OutputInfo *pCliRow, MyRow &row)
{
char * ptr = NULL;
Int32 len = 0;
char value[500];
// column 1: COMPONENT_UID
pCliRow->get(0,ptr,len);
row.componentUID_ = *(reinterpret_cast<int64_t*>(ptr));
// column 2: OPERATION_CODE
pCliRow->get(1,ptr,len);
strncpy(value, ptr, len);
value[len] = 0;
row.operationCode_= value;
// column 3: OPERATION_NAME
pCliRow->get(2,ptr,len);
strncpy(value, ptr, len);
value[len] = 0;
row.operationName_= value;
// column 4: IS_SYSTEM
pCliRow->get(3,ptr,len);
strncpy(value,ptr,len);
value[len] = 0;
row.operationType_ = PrivMgrComponentOperations::compTypeToEnum(value[0]);
// column 5: OPERATION_DESCRIPTION
pCliRow->get(4,ptr,len);
strncpy(value, ptr, len);
value[len] = 0;
row.operationDescription_ = value;
}<|fim▁end|> | |
<|file_name|>struct-no-fields-5.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo;
fn i5() {
let _end_of_block = { Foo { } };
//~^ ERROR: structure literal must either have at least one field
}
fn main() {}<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>ArtistDetailActivity.java<|end_file_name|><|fim▁begin|>package com.leychina.activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.webkit.WebChromeClient;
import android.widget.ImageView;
import android.widget.TextView;
<|fim▁hole|>import com.leychina.model.Artist;
import com.leychina.value.Constant;
import com.leychina.widget.tabindicator.TouchyWebView;
import com.squareup.picasso.Picasso;
/**
* Created by yuandunlong on 11/21/15.
*/
public class ArtistDetailActivity extends AppCompatActivity {
TouchyWebView webview;
Artist artist;
ImageView imageView;
TextView weight, height, name, birthday;
public static void start(Context from, Artist artist) {
Intent intent = new Intent(from, ArtistDetailActivity.class);
intent.putExtra("artist", artist);
from.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
Intent intent = getIntent();
this.artist = (Artist) intent.getSerializableExtra("artist");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_artist_detail);
webview = (TouchyWebView) findViewById(R.id.artist_webview);
imageView = (ImageView) findViewById(R.id.image_view_artist_cover);
Picasso.with(this).load(Constant.DOMAIN+"/static/upload/"+artist.getPhoto()).into(imageView);
name = (TextView) findViewById(R.id.text_view_name);
height = (TextView) findViewById(R.id.text_view_height);
weight = (TextView) findViewById(R.id.text_view_weight);
birthday= (TextView) findViewById(R.id.text_view_birthday);
// name.setText(artist.get);
weight.setText(artist.getWeight() + "");
height.setText(artist.getHeight() + "");
birthday.setText(artist.getBlood());
webview.setWebChromeClient(new WebChromeClient());
webview.getSettings().setDefaultTextEncodingName("utf-8");
webview.loadDataWithBaseURL(Constant.DOMAIN, artist.getDescription(), "text/html", "utf-8","");
}
}<|fim▁end|> | import com.leychina.R; |
<|file_name|>pcmp.rs<|end_file_name|><|fim▁begin|>//! SSE4.2 (pcmpestri) accelerated substring search
//!
//! Using the two way substring search algorithm.
// wssm word size string matching<br>
// wslm word size lexicographical maximum suffix<|fim▁hole|>//
#![allow(dead_code)]
extern crate unchecked_index;
extern crate memchr;
use std::cmp;
use std::mem;
use std::iter::Zip;
use self::unchecked_index::get_unchecked;
use TwoWaySearcher;
fn zip<I, J>(i: I, j: J) -> Zip<I::IntoIter, J::IntoIter>
where I: IntoIterator,
J: IntoIterator
{
i.into_iter().zip(j)
}
#[cfg(target_arch = "x86")]
use std::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::*;
/// `pcmpestri`
///
/// “Packed compare explicit length strings (return index)”
///
/// PCMPESTRI xmm1, xmm2/m128, imm8
///
/// Return value: least index for start of (partial) match, (16 if no match).
///
/// Mask: `text` can be at at any point in valid memory, as long as `text_len`
/// bytes are readable.
#[target_feature(enable = "sse4.2")]
unsafe fn pcmpestri_16_mask(text: *const u8, offset: usize, text_len: usize,
needle: __m128i, needle_len: usize) -> u32 {
//debug_assert!(text_len + offset <= text.len()); // saturates at 16
//debug_assert!(needle_len <= 16); // saturates at 16
let text = mask_load(text.offset(offset as _) as *const _, text_len);
_mm_cmpestri(needle, needle_len as _, text, text_len as _, _SIDD_CMP_EQUAL_ORDERED) as _
}
/// `pcmpestri`
///
/// “Packed compare explicit length strings (return index)”
///
/// PCMPESTRI xmm1, xmm2/m128, imm8
///
/// Return value: least index for start of (partial) match, (16 if no match).
///
/// No mask: `text` must be at least 16 bytes from the end of a memory region.
#[target_feature(enable = "sse4.2")]
unsafe fn pcmpestri_16_nomask(text: *const u8, offset: usize, text_len: usize,
needle: __m128i, needle_len: usize) -> u32 {
//debug_assert!(text_len + offset <= text.len()); // saturates at 16
//debug_assert!(needle_len <= 16); // saturates at 16
let text = _mm_loadu_si128(text.offset(offset as _) as *const _);
_mm_cmpestri(needle, needle_len as _, text, text_len as _, _SIDD_CMP_EQUAL_ORDERED) as _
}
/// `pcmpestrm`
///
/// “Packed compare explicit length strings (return mask)”
///
/// PCMPESTRM xmm1, xmm2/m128, imm8
///
/// Return value: bitmask in the 16 lsb of the return value.
#[target_feature(enable = "sse4.2")]
unsafe fn pcmpestrm_eq_each(text: *const u8, offset: usize, text_len: usize,
needle: *const u8, noffset: usize, needle_len: usize) -> u64 {
// NOTE: text *must* be readable for 16 bytes
// NOTE: needle *must* be readable for 16 bytes
//debug_assert!(text_len + offset <= text.len()); // saturates at 16
//debug_assert!(needle_len <= 16); // saturates at 16
let needle = _mm_loadu_si128(needle.offset(noffset as _) as *const _);
let text = _mm_loadu_si128(text.offset(offset as _) as *const _);
let mask = _mm_cmpestrm(needle, needle_len as _, text, text_len as _, _SIDD_CMP_EQUAL_EACH);
#[cfg(target_arch = "x86")] {
_mm_extract_epi32(mask, 0) as u64 | (_mm_extract_epi32(mask, 1) as (u64) << 32)
}
#[cfg(target_arch = "x86_64")] {
_mm_extract_epi64(mask, 0) as _
}
}
/// Search for first possible match of `pat` -- might be just a byte
/// Return `(pos, length)` length of match
#[cfg(test)]
fn first_start_of_match(text: &[u8], pat: &[u8]) -> Option<(usize, usize)> {
// not safe for text that is non aligned and ends at page boundary
let patl = pat.len();
assert!(patl <= 16);
unsafe { first_start_of_match_mask(text, pat.len(), pat128(pat)) }
}
/// Safe wrapper around pcmpestri to find first match of `p` in `text`.
/// safe to search unaligned for first start of match
///
/// the end of text an be close (within 16 bytes) of a page boundary
#[target_feature(enable = "sse4.2")]
unsafe fn first_start_of_match_mask(text: &[u8], pat_len: usize, p: __m128i) -> Option<(usize, usize)> {
let tp = text.as_ptr();
debug_assert!(pat_len <= 16);
let mut offset = 0;
while text.len() >= offset + pat_len {
let tlen = text.len() - offset;
let ret = pcmpestri_16_mask(tp, offset, tlen, p, pat_len) as usize;
if ret == 16 {
offset += 16;
} else {
let match_len = cmp::min(pat_len, 16 - ret);
return Some((offset + ret, match_len));
}
}
None
}
/// Safe wrapper around pcmpestri to find first match of `p` in `text`.
/// safe to search unaligned for first start of match
///
/// unsafe because the end of text must not be close (within 16 bytes) of a page boundary
#[target_feature(enable = "sse4.2")]
unsafe fn first_start_of_match_nomask(text: &[u8], pat_len: usize, p: __m128i) -> Option<(usize, usize)> {
let tp = text.as_ptr();
debug_assert!(pat_len <= 16);
debug_assert!(pat_len <= text.len());
let mut offset = 0;
while text.len() - pat_len >= offset {
let tlen = text.len() - offset;
let ret = pcmpestri_16_nomask(tp, offset, tlen, p, pat_len) as usize;
if ret == 16 {
offset += 16;
} else {
let match_len = cmp::min(pat_len, 16 - ret);
return Some((offset + ret, match_len));
}
}
None
}
#[test]
fn test_first_start_of_match() {
let text = b"abc";
let longer = "longer text and so on";
assert_eq!(first_start_of_match(text, b"d"), None);
assert_eq!(first_start_of_match(text, b"c"), Some((2, 1)));
assert_eq!(first_start_of_match(text, b"abc"), Some((0, 3)));
assert_eq!(first_start_of_match(text, b"T"), None);
assert_eq!(first_start_of_match(text, b"\0text"), None);
assert_eq!(first_start_of_match(text, b"\0"), None);
// test all windows
for wsz in 1..17 {
for window in longer.as_bytes().windows(wsz) {
let str_find = longer.find(::std::str::from_utf8(window).unwrap());
assert!(str_find.is_some());
let first_start = first_start_of_match(longer.as_bytes(), window);
assert!(first_start.is_some());
let (pos, len) = first_start.unwrap();
assert!(len <= wsz);
assert!(len == wsz && Some(pos) == str_find
|| pos <= str_find.unwrap());
}
}
}
fn find_2byte_pat(text: &[u8], pat: &[u8]) -> Option<(usize, usize)> {
debug_assert!(text.len() >= pat.len());
debug_assert!(pat.len() == 2);
// Search for the second byte of the pattern, not the first, better for
// scripts where we have two-byte encoded codepoints (the first byte will
// repeat much more often than the second).
let mut off = 1;
while let Some(i) = memchr::memchr(pat[1], &text[off..]) {
match text.get(off + i - 1) {
None => break,
Some(&c) if c == pat[0] => return Some((off + i - 1, off + i + 1)),
_ => off += i + 1,
}
}
None
}
/// Simd text search optimized for short patterns (<= 8 bytes)
#[target_feature(enable = "sse4.2")]
unsafe fn find_short_pat(text: &[u8], pat: &[u8]) -> Option<usize> {
debug_assert!(pat.len() <= 8);
/*
if pat.len() == 2 {
return find_2byte_pat(text, pat);
}
*/
let r = pat128(pat);
// safe part of text -- everything but the last 16 bytes
let safetext = &text[..cmp::max(text.len(), 16) - 16];
let mut pos = 0;
'search: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence
match first_start_of_match_nomask(&safetext[pos..], pat.len(), r) {
None => {
pos = cmp::max(pos, safetext.len() - pat.len());
break // no matches
}
Some((mpos, mlen)) => {
pos += mpos;
if mlen < pat.len() {
if pos > text.len() - pat.len() {
return None;
}
for (&a, &b) in zip(&text[pos + mlen..], &pat[mlen..]) {
if a != b {
pos += 1;
continue 'search;
}
}
}
return Some(pos);
}
}
}
'tail: loop {
if pos > text.len() - pat.len() {
return None;
}
// find the next occurence
match first_start_of_match_mask(&text[pos..], pat.len(), r) {
None => return None, // no matches
Some((mpos, mlen)) => {
pos += mpos;
if mlen < pat.len() {
if pos > text.len() - pat.len() {
return None;
}
for (&a, &b) in zip(&text[pos + mlen..], &pat[mlen..]) {
if a != b {
pos += 1;
continue 'tail;
}
}
}
return Some(pos);
}
}
}
}
/// `is_supported` checks whether necessary SSE 4.2 feature is supported on current CPU.
pub fn is_supported() -> bool {
#[cfg(feature = "use_std")]
return is_x86_feature_detected!("sse4.2");
#[cfg(not(feature = "use_std"))]
return cfg!(target_feature = "sse4.2");
}
/// `find` finds the first ocurrence of `pattern` in the `text`.
///
/// This is the SSE42 accelerated version.
pub fn find(text: &[u8], pattern: &[u8]) -> Option<usize> {
assert!(is_supported());
if pattern.is_empty() {
return Some(0);
} else if text.len() < pattern.len() {
return None;
} else if pattern.len() == 1 {
return memchr::memchr(pattern[0], text);
} else {
unsafe { find_inner(text, pattern) }
}
}
#[target_feature(enable = "sse4.2")]
pub(crate) unsafe fn find_inner(text: &[u8], pat: &[u8]) -> Option<usize> {
if pat.len() <= 6 {
return find_short_pat(text, pat);
}
// real two way algorithm
//
// `memory` is the number of bytes of the left half that we already know
let (crit_pos, mut period) = TwoWaySearcher::crit_params(pat);
let mut memory;
if &pat[..crit_pos] == &pat[period.. period + crit_pos] {
memory = 0; // use memory
} else {
memory = !0; // !0 means memory is unused
// approximation to the true period
period = cmp::max(crit_pos, pat.len() - crit_pos) + 1;
}
//println!("pat: {:?}, crit={}, period={}", pat, crit_pos, period);
let (left, right) = pat.split_at(crit_pos);
let (right16, _right17) = right.split_at(cmp::min(16, right.len()));
assert!(right.len() != 0);
let r = pat128(right);
// safe part of text -- everything but the last 16 bytes
let safetext = &text[..cmp::max(text.len(), 16) - 16];
let mut pos = 0;
if memory == !0 {
// Long period case -- no memory, period is an approximation
'search: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence of the right half
let start = crit_pos;
match first_start_of_match_nomask(&safetext[pos + start..], right16.len(), r) {
None => {
pos = cmp::max(pos, safetext.len() - pat.len());
break // no matches
}
Some((mpos, mlen)) => {
pos += mpos;
let mut pfxlen = mlen;
if pfxlen < right.len() {
pfxlen += shared_prefix_inner(&text[pos + start + mlen..], &right[mlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
continue 'search;
} else {
// matches right part
}
}
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if left != &text[pos..pos + left.len()] {
pos += period;
continue 'search;
}
return Some(pos);
}
} else {
// Short period case -- use memory, true period
'search_memory: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence of the right half
//println!("memory trace pos={}, memory={}", pos, memory);
let mut pfxlen = if memory == 0 {
let start = crit_pos;
match first_start_of_match_nomask(&safetext[pos + start..], right16.len(), r) {
None => {
pos = cmp::max(pos, safetext.len() - pat.len());
break // no matches
}
Some((mpos, mlen)) => {
pos += mpos;
mlen
}
}
} else {
memory - crit_pos
};
if pfxlen < right.len() {
pfxlen += shared_prefix_inner(&text[pos + crit_pos + pfxlen..], &right[pfxlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
memory = 0;
continue 'search_memory;
} else {
// matches right part
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if memory <= left.len() && &left[memory..] != &text[pos + memory..pos + left.len()] {
pos += period;
memory = pat.len() - period;
continue 'search_memory;
}
return Some(pos);
}
}
// no memory used for final part
'tail: loop {
if pos > text.len() - pat.len() {
return None;
}
// find the next occurence of the right half
let start = crit_pos;
match first_start_of_match_mask(&text[pos + start..], right16.len(), r) {
None => return None,
Some((mpos, mlen)) => {
pos += mpos;
let mut pfxlen = mlen;
if pfxlen < right.len() {
pfxlen += shared_prefix_inner(&text[pos + start + mlen..], &right[mlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
continue 'tail;
} else {
// matches right part
}
}
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if left != &text[pos..pos + left.len()] {
pos += period;
continue 'tail;
}
return Some(pos);
}
}
#[test]
fn test_find() {
let text = b"abc";
assert_eq!(find(text, b"d"), None);
assert_eq!(find(text, b"c"), Some(2));
let longer = "longer text and so on, a bit more";
// test all windows
for wsz in 1..longer.len() {
for window in longer.as_bytes().windows(wsz) {
let str_find = longer.find(::std::str::from_utf8(window).unwrap());
assert!(str_find.is_some());
assert_eq!(find(longer.as_bytes(), window), str_find, "{:?} {:?}",
longer, ::std::str::from_utf8(window));
}
}
let pat = b"ger text and so on";
assert!(pat.len() > 16);
assert_eq!(Some(3), find(longer.as_bytes(), pat));
// test short period case
let text = "cbabababcbabababab";
let n = "abababab";
assert_eq!(text.find(n), find(text.as_bytes(), n.as_bytes()));
// memoized case -- this is tricky
let text = "cbababababababababababababababab";
let n = "abababab";
assert_eq!(text.find(n), find(text.as_bytes(), n.as_bytes()));
}
/// Load the first 16 bytes of `pat` into a SIMD vector.
#[inline(always)]
fn pat128(pat: &[u8]) -> __m128i {
unsafe {
mask_load(pat.as_ptr() as *const _, pat.len())
}
}
/// Load the first len bytes (maximum 16) from ptr into a vector, safely
#[inline(always)]
unsafe fn mask_load(ptr: *const u8, mut len: usize) -> __m128i {
let mut data: __m128i = _mm_setzero_si128();
len = cmp::min(len, mem::size_of_val(&data));
::std::ptr::copy_nonoverlapping(ptr, &mut data as *mut _ as _, len);
return data;
}
/// Find longest shared prefix, return its length
///
/// Alignment safe: works for any text, pat.
pub fn shared_prefix(text: &[u8], pat: &[u8]) -> usize {
assert!(is_supported());
unsafe { shared_prefix_inner(text, pat) }
}
#[target_feature(enable = "sse4.2")]
unsafe fn shared_prefix_inner(text: &[u8], pat: &[u8]) -> usize {
let tp = text.as_ptr();
let tlen = text.len();
let pp = pat.as_ptr();
let plen = pat.len();
let len = cmp::min(tlen, plen);
// TODO: do non-aligned prefix manually too(?) aligned text or pat..
// all but the end we can process with pcmpestrm
let initial_part = len.saturating_sub(16);
let mut prefix_len = 0;
let mut offset = 0;
while offset < initial_part {
let initial_tail = initial_part - offset;
let mask = pcmpestrm_eq_each(tp, offset, initial_tail, pp, offset, initial_tail);
// find zero in the first 16 bits
if mask != 0xffff {
let first_bit_set = (mask ^ 0xffff).trailing_zeros() as usize;
prefix_len += first_bit_set;
return prefix_len;
} else {
prefix_len += cmp::min(initial_tail, 16);
}
offset += 16;
}
// so one block left, the last (up to) 16 bytes
// unchecked slicing .. we don't want panics in this function
let text_suffix = get_unchecked(text, prefix_len..len);
let pat_suffix = get_unchecked(pat, prefix_len..len);
for (&a, &b) in zip(text_suffix, pat_suffix) {
if a != b {
break;
}
prefix_len += 1;
}
prefix_len
}
#[test]
fn test_prefixlen() {
let text_long = b"0123456789abcdefeffect";
let text_long2 = b"9123456789abcdefeffect";
let text_long3 = b"0123456789abcdefgffect";
let plen = shared_prefix(text_long, text_long);
assert_eq!(plen, text_long.len());
let plen = shared_prefix(b"abcd", b"abc");
assert_eq!(plen, 3);
let plen = shared_prefix(b"abcd", b"abcf");
assert_eq!(plen, 3);
assert_eq!(0, shared_prefix(text_long, text_long2));
assert_eq!(0, shared_prefix(text_long, &text_long[1..]));
assert_eq!(16, shared_prefix(text_long, text_long3));
for i in 0..text_long.len() + 1 {
assert_eq!(text_long.len() - i, shared_prefix(&text_long[i..], &text_long[i..]));
}
let l1 = [7u8; 1024];
let mut l2 = [7u8; 1024];
let off = 1000;
l2[off] = 0;
for i in 0..off {
let plen = shared_prefix(&l1[i..], &l2[i..]);
assert_eq!(plen, off - i);
}
}<|fim▁end|> | |
<|file_name|>cs.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'link', 'cs', {
acccessKey: 'Přístupový klíč',
advanced: 'Rozšířené',
advisoryContentType: 'Pomocný typ obsahu',
advisoryTitle: 'Pomocný titulek',
anchor: {
toolbar: 'Záložka',
menu: 'Vlastnosti záložky',
title: 'Vlastnosti záložky',
name: 'Název záložky',
errorName: 'Zadejte prosím název záložky',
remove: 'Odstranit záložku'
},
anchorId: 'Podle Id objektu',
anchorName: 'Podle jména kotvy',
charset: 'Přiřazená znaková sada',
cssClasses: 'Třída stylu',
download: 'Force Download', // MISSING
displayText: 'Zobrazit text',
emailAddress: 'E-mailová adresa',
emailBody: 'Tělo zprávy',
emailSubject: 'Předmět zprávy',
id: 'Id',
<|fim▁hole|> langCode: 'Kód jazyka',
langDir: 'Směr jazyka',
langDirLTR: 'Zleva doprava (LTR)',
langDirRTL: 'Zprava doleva (RTL)',
menu: 'Změnit odkaz',
name: 'Jméno',
noAnchors: '(Ve stránce není definována žádná kotva!)',
noEmail: 'Zadejte prosím e-mailovou adresu',
noUrl: 'Zadejte prosím URL odkazu',
other: '<jiný>',
popupDependent: 'Závislost (Netscape)',
popupFeatures: 'Vlastnosti vyskakovacího okna',
popupFullScreen: 'Celá obrazovka (IE)',
popupLeft: 'Levý okraj',
popupLocationBar: 'Panel umístění',
popupMenuBar: 'Panel nabídky',
popupResizable: 'Umožňující měnit velikost',
popupScrollBars: 'Posuvníky',
popupStatusBar: 'Stavový řádek',
popupToolbar: 'Panel nástrojů',
popupTop: 'Horní okraj',
rel: 'Vztah',
selectAnchor: 'Vybrat kotvu',
styles: 'Styl',
tabIndex: 'Pořadí prvku',
target: 'Cíl',
targetFrame: '<rámec>',
targetFrameName: 'Název cílového rámu',
targetPopup: '<vyskakovací okno>',
targetPopupName: 'Název vyskakovacího okna',
title: 'Odkaz',
toAnchor: 'Kotva v této stránce',
toEmail: 'E-mail',
toUrl: 'URL',
toolbar: 'Odkaz',
type: 'Typ odkazu',
unlink: 'Odstranit odkaz',
upload: 'Odeslat'
} );<|fim▁end|> | info: 'Informace o odkazu',
|
<|file_name|>BoardDAO.java<|end_file_name|><|fim▁begin|>package com.board;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
public class BoardDAO {
private Connection conn;
public BoardDAO(Connection conn){
this.conn = conn;
}
//1. num의 최대값
public int getMaxNum(){
int maxNum = 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(max(num),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
maxNum = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return maxNum;
}
// 입력(created.jsp -> created_ok.jsp)
public int insertData(BoardForm dto){
int result = 0;
/*
PreparedStatement pstmt = null;
StringBuffer sql = new StringBuffer();
*/
PreparedStatement pstmt = null;
String sql;
try {
/*
sql.append("insert into board");
sql.append("(num, name, pwd, email, subject, content,");
*/
sql = "insert into board" +
"(num, name, pwd, email, subject, content," +
"ipAddr, hitCount, created) " +
"values(?, ?, ?, ?, ?, ?, ?, 0, sysdate)";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, dto.getNum());
pstmt.setString(2, dto.getName());
pstmt.setString(3, dto.getPwd());
pstmt.setString(4, dto.getEmail());
pstmt.setString(5, dto.getSubject());
pstmt.setString(6, dto.getContent());
pstmt.setString(7, dto.getIpAddr());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println("# insertData");
System.out.println(e.toString());
}
return result;
}
// 전체데이터 가지고 올거야
public List<BoardForm> getList(int start, int end){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, start);
pstmt.setInt(2, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
<|fim▁hole|> }
//전체 데이터수 구하기
public int getDataCount(){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(count(*),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//조회수증가
public int updateHitCount(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set hitCount=hitCount+1 where num=?" ;
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 한명의 데이터 출력
public BoardForm getReadData(int num){
BoardForm dto = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
// 제목, 작성자, 줄수, 등록일, 조회수, 내용, ip주소
sql = "select num, name, pwd, email, subject, content, ipaddr, created, hitCount " +
"from board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
rs = pstmt.executeQuery();
if(rs.next()){
dto = new BoardForm();
dto.setNum(rs.getInt("num"));
dto.setName(rs.getString("name"));
dto.setPwd(rs.getString("pwd"));
dto.setEmail(rs.getString("email"));
dto.setSubject(rs.getString("subject"));
dto.setContent(rs.getString("content"));
dto.setIpAddr(rs.getString("ipAddr"));
dto.setHitCount(rs.getInt("hitCount"));
dto.setCreated(rs.getString("created"));
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return dto;
}
// 삭제
public int deleteData(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "delete board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 수정
public int updateData(BoardForm dto){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set name=?, pwd=?, subject=?, content=?, email=? where num=? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, dto.getName());
pstmt.setString(2, dto.getPwd());
pstmt.setString(3, dto.getSubject());
pstmt.setString(4, dto.getContent());
pstmt.setString(5, dto.getEmail());
pstmt.setInt(6, dto.getNum());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//검색된 데이터수 구하기
public int getDataCount(String searchKey, String searchValue){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select nvl(count(*),0) from board where "+searchKey + " like ?";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 검색데이터 가지고 올거야
public List<BoardForm> getList(int start, int end, String searchKey, String searchValue){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board where "+searchKey + " like ? order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
pstmt.setInt(2, start);
pstmt.setInt(3, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
}
}
/////////////////<|fim▁end|> | |
<|file_name|>widgets.py<|end_file_name|><|fim▁begin|>class Widget(object):
def __init__(self, options, *args, **kwargs):
super(Widget, self).__init__(*args, **kwargs)
self.options = options
<|fim▁hole|> def render(self, request):
raise NotImplementedError
def render_option_form(self):
raise NotImplementedError
def get_option_dict(self):
return self.options<|fim▁end|> | def get_display_name(self):
raise NotImplementedError
|
<|file_name|>fractions.js<|end_file_name|><|fim▁begin|>const multiples =
'(hundred|thousand|million|billion|trillion|quadrillion|quintillion|sextillion|septillion)'
const here = 'fraction-tagger'
// plural-ordinals like 'hundredths' are already tagged as Fraction by compromise
const tagFractions = function (doc) {
// hundred
doc.match(multiples).tag('#Multiple', here)
// half a penny
doc.match('[(half|quarter)] of? (a|an)', 0).tag('Fraction', 'millionth')
// nearly half
doc.match('#Adverb [half]', 0).tag('Fraction', 'nearly-half')
// half the
doc.match('[half] the', 0).tag('Fraction', 'half-the')
// two-halves
doc.match('#Value (halves|halfs|quarters)').tag('Fraction', 'two-halves')
// ---ordinals as fractions---
// a fifth
doc.match('a #Ordinal').tag('Fraction', 'a-quarter')
// seven fifths
doc.match('(#Fraction && /s$/)').lookBefore('#Cardinal+$').tag('Fraction')
// one third of ..
doc.match('[#Cardinal+ #Ordinal] of .', 0).tag('Fraction', 'ordinal-of')
// 100th of
doc.match('[(#NumericValue && #Ordinal)] of .', 0).tag('Fraction', 'num-ordinal-of')
// a twenty fifth
doc.match('(a|one) #Cardinal?+ #Ordinal').tag('Fraction', 'a-ordinal')
// doc.match('(a|one) [#Ordinal]', 0).tag('Fraction', 'a-ordinal')
// values.if('#Ordinal$').tag('Fraction', '4-fifths')
// seven quarters
// values.tag('Fraction', '4-quarters')
// doc.match('(#Value && !#Ordinal)+ (#Ordinal|#Fraction)').tag('Fraction', '4-fifths')
// 12 and seven fifths
// doc.match('#Value+ and #Value+ (#Ordinal|half|quarter|#Fraction)').tag('Fraction', 'val-and-ord')
// fixups
// doc.match('#Cardinal+? (second|seconds)').unTag('Fraction', '3 seconds')
// doc.match('#Ordinal (half|quarter)').unTag('Fraction', '2nd quarter')
// doc.match('#Ordinal #Ordinal+').unTag('Fraction')
// doc.match('[#Cardinal+? (second|seconds)] of (a|an)', 0).tag('Fraction', here)
// doc.match(multiples).tag('#Multiple', here)
// // '3 out of 5'<|fim▁hole|> // // one and a half
// doc.match('#Cardinal and a (#Fraction && #Value)').tag('Fraction', here)
// fraction - 'a third of a slice'
// TODO:fixme
// m = doc.match(`[(#Cardinal|a) ${ordinals}] of (a|an|the)`, 0).tag('Fraction', 'ord-of')
// tag 'thirds' as a ordinal
// m.match('.$').tag('Ordinal', 'plural-ordinal')
return doc
}
module.exports = tagFractions<|fim▁end|> | doc.match('#Cardinal+ out? of every? #Cardinal').tag('Fraction', here) |
<|file_name|>admin-videos-ctrl.js<|end_file_name|><|fim▁begin|>angular.module('streama').controller('adminVideosCtrl', ['$scope', 'apiService', 'modalService', '$state', function ($scope, apiService, modalService, $state) {
$scope.loading = true;
apiService.genericVideo.list().then(function (response) {
$scope.videos = response.data;
$scope.loading = false;
});
$scope.openGenericVideoModal = function () {
modalService.genericVideoModal(null, function (data) {<|fim▁hole|> $state.go('admin.video', {videoId: data.id});
});
};
$scope.addFromSuggested = function (movie, redirect) {
var tempMovie = angular.copy(movie);
var apiId = tempMovie.id;
delete tempMovie.id;
tempMovie.apiId = apiId;
apiService.movie.save(tempMovie).then(function (response) {
if(redirect){
$state.go('admin.movie', {movieId: response.data.id});
}else{
$scope.movies.push(response.data);
}
});
};
$scope.alreadyAdded = function (movie) {
console.log('%c movie', 'color: deeppink; font-weight: bold; text-shadow: 0 0 5px deeppink;', movie);
return movie.id && _.find($scope.movies, {apiId: movie.id.toString()});
};
}]);<|fim▁end|> | |
<|file_name|>JumpGameTest.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>
#include "JumpGame.hpp"
TEST_CASE("Jump Game") {
JumpGame s;
SECTION("Sample tests") {
vector<int> nums_1{2, 3, 1, 1, 4};
REQUIRE(s.canJump(nums_1));
vector<int> nums_2{3, 2, 1, 0, 4};
REQUIRE_FALSE(s.canJump(nums_2));
}
}<|fim▁end|> | #include "catch.hpp" |
<|file_name|>0003_auto_20170522_1154.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals<|fim▁hole|>from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('eventlog', '0002_auto_20170522_1134'),
]
operations = [
migrations.AddField(
model_name='fieldsightlog',
name='source',
field=models.ForeignKey(related_name='log', to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AlterField(
model_name='fieldsightlog',
name='type',
field=models.IntegerField(default=0, choices=[(0, b'USER'), (1, b'FORM'), (2, b'SUBMISSION'), (3, b'Site')]),
),
]<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.lexers.sw
~~~~~~~~~~~~~~~~~~~~~
Lexers for semantic web languages.
:copyright: 2007 by Philip Cooper <philip.cooper@openvest.com>.
:license: BSD, see LICENSE for more details.
Modified and extended by Gerrit Niezen. (LICENSE file described above is missing, wasn't distributed with original file)
"""
import re
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Error, Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Literal
from pygments.util import shebang_matches
__all__ = ['Notation3Lexer','SparqlLexer']
# The N3 lexer should be close to the not really correct grammar at
# http://www.w3.org/2000/10/swap/grammar/n3-ietf.txt
# Comments indicate to which grammar rule the various regular
# expressions correspond.
_explicit_uri = r'<[^>]*>'
_qname = r'((\w[-\w]*)?:)?\w[-\w]*|(\w[-\w]*)?:' #(([:letter:][-\w]*)?:)?[:letter:][.\w]*
_symbol = '(' + _qname + '|' + _explicit_uri +')'
_quickvariable = r'\?\w+'
def expression(symbolAction, nextState):
#expression ::= | pathitem pathtail
#pathitem ::= | "(" pathlist ")"
# | "[" propertylist "]"
# | "{" formulacontent "}"
# | boolean
# | literal
# | numericliteral
# | quickvariable
# | symbol
if not isinstance(nextState,tuple):
nextState = (nextState,)
nextState = nextState + ('pathtail',)
return [
#pathlist
(r'\(', Punctuation, nextState + ('list',)),
#properylist
(r'\[', Punctuation, nextState + ('propertyList',)),
#formulacontent
(r'\{', Punctuation, nextState + ('root',)),
#boolean
(r'@false|@true', Keyword.Constant, nextState),
#literal
(r'("""[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*""")|("[^"\\]*(?:\\.[^"\\]*)*")', String, nextState + ('dtlang',)),
#numericliteral ::= double|integer|rational
(r'[-+]?[0-9]+(\.[0-9]+)?([eE][-+]?[0-9]+)', Number.Float, nextState),
(r'[-+]?[0-9]+', Number.Integer, nextState),
(r'[-+]?[0-9]+/[0-9]+', Number, nextState),
#quickvariable
(_quickvariable, Name.Variable, nextState),
#symbol
(_symbol, symbolAction, nextState),
]
class Notation3Lexer(RegexLexer):
"""
Lexer for the N3 / Turtle / NT
"""
name = 'N3'
aliases = ['n3', 'turtle']
filenames = ['*.n3', '*.ttl', '*.NT']
mimetypes = ['text/rdf+n3','application/x-turtle','application/n3']
tokens = {
'whitespaces': [
(r'(#.*)', Comment),
(r'\s+', Text),
],
'pathtailExpression': expression(Name.Function, '#pop'),
'pathtail': [
# No whitespaces allowed in front!
(r'(^|!|\.)(?!\s)', Operator, 'pathtailExpression'),
(r'', Text, '#pop'),
],
# statement:
'root': [
include('whitespaces'),
# declaration ::= base|prefix|keywords
(r'(@(?:prefix|base)\s*)([^\!\"\#\$\&\'\(\)\*\,\+\/\;\<\=\>\?\@\[\\\]\^\`\{\|\}\~]*:\s+)?(<[^>]*>\s*\.)', bygroups(Keyword,Name.Variable,Name.Namespace)),
(r'(@keywords)(\s*\w+\s*,)*(\s*\w+)', bygroups(Keyword,Text,Text)),
# existential|universal
(r'@forSome|@forAll', Name.Class, 'symbol_csl'),
# Terminating a formula
(r'\}', Punctuation, '#pop'),
] + expression(Name.Class, 'propertyList'),
'propertyList': [
#predicate ::= | "<="
# | "="
# | "=>"
# | "@a"
# | "@has" expression
# | "@is" expression "@of"
# | expression
include('whitespaces'),
(r';', Punctuation),
(r'(<=|=>|=|@?a(?=\s))', Operator, 'objectList'),
(r'\.', Punctuation, '#pop'),
(r'\]', Punctuation, '#pop'),
(r'(?=\})', Text, '#pop'),
] + expression(Name.Function, 'objectList'),
'objectList': [
include('whitespaces'),
(r',', Punctuation),
(r'(?=;)', Text, '#pop'),
(r'(?=\.)', Text, '#pop'),
(r'(?=\])', Text, '#pop'),
(r'(?=\})', Text, '#pop'),
] + expression(Name.Attribute, ()),
'list': [
include('objectList'),
(r'\)', Punctuation, '#pop'),
],
'symbol_csl': [
include('whitespaces'),
(r',', Punctuation),
(_symbol, Name.Variable),
(r'.', Punctuation, '#pop'),
],
'dtlang': [
#dtlang ::= "@" langcode|"^^" symbol|void
(r'@[a-z]+(-[a-z0-9]+)*', Name.Attribute, '#pop'),
(r'\^\^'+_symbol, Name.Attribute, '#pop'),
(r'', Text, '#pop'),
],
}
class SparqlLexer(RegexLexer):
"""
Lexer for SPARQL Not Complete
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.sparql']
mimetypes = ['text/x-sql']
flags = re.IGNORECASE
tokens = {
'comments': [
(r'(\s*#.*)', Comment)
],
'root': [
include('comments'),
(r'(\s*(?:PREFIX|BASE)\s+)([\w-]*:[\w-]*)?(\s*<[^> ]*>\s*)',bygroups(Keyword,Name.Variable,Name.Namespace)),
(r'(\s*#.*)', Comment),
(r'(\s*)(SELECT\s*(?:DISTINCT|REDUCED)?)(\s*)',bygroups(Text, Keyword,Text), 'selectVars'),
(r'(\s*)((?:ASK|CONSTRUCT|DESCRIBE)\s*(?:DISTINCT|REDUCED)?\s*)((?:\?[a-zA-Z0-9_-]+\s*)+|\*)(\s*)',
bygroups(Text, Keyword,Name.Variable,Text)),
(r'(\s*)((?:LOAD|CLEAR|DROP|CREATE)\s*(?:SILENT)?\s*)(\s*(?:GRAPH)?\s*)(\s*<[^> ]*>\s*)(;)(\s*)',
bygroups(Text, Keyword, Keyword, Name.Attribute, Text, Text)),
(r'(\s*)((?:ADD|MOVE|COPY)\s*(?:SILENT)?\s*)(\s*(?:GRAPH)?\s*)(\s*<[^> ]*>\s*)((?:TO)\s*)(\s*(?:GRAPH)?\s*)(\s*<[^> ]*>\s*)?(;)(\s*)',
bygroups(Text, Keyword, Keyword, Name.Attribute, Keyword, Keyword, Name.Attribute, Text, Text)),
(r'(\s*)((?:INSERT|DELETE)\s*(?:DATA)?)\s*',bygroups(Text, Keyword),'quaddata'),
(r'(\s*)(CONSTRUCT)?\s*({)',bygroups(Text, Keyword,Punctuation),'graph'),
(r'(\s*)(FROM\s*(?:NAMED)?)(\s*.*)', bygroups(Text, Keyword,Text)),
(r'(\s*)(WHERE\s?)?\s*({)',bygroups(Text, Keyword, Punctuation),'groupgraph'),
(r'(\s*)(LIMIT|OFFSET)(\s*[+-]?[0-9]+)',bygroups(Text, Keyword,Literal.String)),
(r'(ORDER BY (?:ASC|DESC)\s*)(\()\s*',bygroups(Text, Keyword,Punctuation),'bindgraph'),
(r'(\s*)(})', bygroups(Text, Punctuation)),
],
'selectVars':[
(r'(\s*)(\*)(\s*)',bygroups(Text,Keyword,Text), '#pop'),
(r'(?=\s*(FROM|WHERE|GROUP|HAVING|ORDER|LIMIT|OFFSET))', Text, '#pop'),
(r'(\s*)(\()(\s*)', bygroups(Text, Punctuation, Text), 'bindgraph'),
include('variable'),
(r'\n', Text),
(r'', Text, '#pop'),
],
'quaddata':[
(r'(\s*)({)(\s*)(GRAPH)(\s*<[^> ]*>\s*)', bygroups(Text, Punctuation, Text, Keyword, Name.Attribute), 'quads'),
(r'(\s*)({)(\s*)',bygroups(Text,Punctuation,Text), 'graph'),
(r'', Text, '#pop'),
],
'quads':[
(r'(\s*)({)(\s*)(GRAPH)(\s*<[^> ]*>\s*)', bygroups(Text, Punctuation, Text, Keyword, Name.Attribute), '#push'),
(r'(\s*)({)(\s*)', bygroups(Text,Punctuation,Text), 'graph'),
(r'(\s*)(})(\s*)', bygroups(Text,Punctuation,Text), '#pop'),
],
'groupgraph':[
(r'(\s*)(UNION)(\s*)({)(\s*)', bygroups(Text, Keyword, Text, Punctuation, Text), '#push'),
(r'(\s*)({)(\s*)',bygroups(Text, Punctuation, Text), '#push'),
include('graph'),
include('root'),
(r'', Text, '#pop'),
],
'graph':[
(r'(\s*)(<[^>]*\>)', bygroups(Text, Name.Class), ('triple','predObj')),
(r'(\s*[a-zA-Z_0-9\-]*:[a-zA-Z0-9\-_]*\s)', Name.Class, ('triple','predObj')),
(r'(\s*\?[a-zA-Z0-9_-]*)', Name.Variable, ('triple','predObj')),
(r'\s*\[\]\s*', Name.Class, ('triple','predObj')),
(r'(\s*)(FILTER)(\s*)',bygroups(Text, Keyword,Text),'filterConstraint'),
(r'(\s*)(BIND)(\s*)(\()(\s*)',bygroups(Text, Keyword, Text, Punctuation, Text),'bindgraph'),
(r'(\s*)(OPTIONAL)(\s*)({)',bygroups(Text, Keyword, Text, Punctuation), '#push'),
(r'(\s*)(})(\s*)(\.)(\s*)', bygroups(Text, Punctuation, Text, Punctuation, Text), '#pop'),
(r'(\s*)(})', bygroups(Text, Punctuation), '#pop'),
(r'(\s*)(\.)(\s*)', bygroups(Text, Punctuation, Text), '#pop'),
],
'triple' : [
(r'(?=\s*})', Text, '#pop'),
(r'(\s*)(\.)(\s*)', bygroups(Text, Punctuation, Text), '#pop'),
],
'predObj': [
include('comments'),
(r'(\s*\?[a-zA-Z0-9_-]*\b\s*)', Name.Variable,'object'),
(r'(\s*[a-zA-Z_:][a-zA-Z0-9\-_:]*\b\s*)', Operator, 'object'),
(r'\s*(<[^>]*\>)', Operator, 'object'),
(r'\s*\]\s*', Text, '#pop'),
(r'(?=\s*\.\s*)', Keyword, '#pop'),
],
'objList': [
(r'(\s*)(\))', bygroups(Text, Punctuation), '#pop'),
include('object'),
],
'object': [
include('variable'),
(r'\s*\[', Text, 'predObj'),
(r'\s*<[^> ]*>', Name.Attribute),
(r'(\s*)("""(?:.|\n)*?""")(\@[a-z]{2-4}|\^\^<?[a-zA-Z0-9\-\:_#/\.]*>?)?\s*', bygroups(Text, Literal.String,Text)),
(r'\s*".*?[^\\]"(?:\@[a-z]{2-4}|\^\^<?[a-zA-Z0-9\-\:_#/\.]*>?)?\s*', Literal.String),
(r'(\s*)((?:[+-])?\d+\.?\d*)(\s*)', bygroups(Text, Number, Text)),
(r'\s*[a-zA-Z0-9\-_\:]+\s*', Name.Attribute),
(r'(\s*)(\()', bygroups(Text, Punctuation), 'objList'),
(r',', Punctuation),
(r'(\s*)(;)(\s*)', bygroups(Text, Punctuation, Text), '#pop'),
(r'(?=\])', Text, '#pop'),
(r'\s*(?=\.)', Text, '#pop'),
],
'variable':[
(r'(\?[a-zA-Z0-9\-_]+\s*)', Name.Variable),
],
'filterConstraint':[
include('filterBuiltin'),
(r'(\s*)(\()(\s*)', bygroups(Text, Punctuation, Text), 'filterExp'),<|fim▁hole|> ],
#filterBuiltin is intended to be included, not pushed
'filterBuiltin':[
include('aggregate'),
(r'(str|lang|langmates|datatype|bound|iri|uri|bnode)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(abs|ceil|floor|round)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(strlen|ucase|lcase|encode_for_uri|contains|strstarts|strends|strbefore|strafter)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(year|month|day|hours|minutes|seconds|timezone|tz)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(md5|sha1|sha256|sha384|sha512)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(if|strlang|strdt)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(sameterm|isIRI|isURI|isBlank|isLiteral|isNumeric)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
(r'(regex)(\s*)(\()', bygroups(Name.Builtin, Text, Punctuation), 'objList'),
],
# aggregate is intended to be included, not pushed
'aggregate':[
(r'(\s*)(COUNT)(\s*)(\()(\s*)(DISTINCT)?(\s*)(\*)(\s*)',
bygroups(Text, Keyword, Text, Punctuation, Text, Keyword, Text, Keyword, Text)),
(r'(\s*)(COUNT|SUM|MIN|MAX|AVG|SAMPLE)(\s*)(\()(\s*)(DISTINCT)?(\s*)',
bygroups(Text, Keyword, Text, Punctuation, Text, Keyword, Text), 'filterExp'),
(r'(\s*)(GROUP_CONCAT)(\s*)(\()(\s*)(DISTINCT)?(\s*)',
bygroups(Text, Keyword, Text, Punctuation, Text, Keyword, Text), 'groupConcatExp'),
],
'groupConcatExp':[
(r'(\s*)(;)(\s*)(SEPARATOR)(\s*)(=)(\s*)',
bygroups(Text, Punctuation, Text, Keyword, Text, Operator, Text), 'string'),
include('filterExp'),
],
'filterExp':[
include('filterBuiltin'),
(r'(\s*)(\()(\s*)', bygroups(Text, Punctuation, Text), '#push'),
include('variable'),
include('object'),
(r'\s*[+*/<>=~!%&|-]+\s*', Operator),
(r'(\s*)(\))', bygroups(Text, Punctuation), '#pop'),
],
'bindgraph':[
(r'(\s*)(\()(\s*)', bygroups(Text, Punctuation, Text), '#push'),
(r'\s*AS\s*', Keyword),
(r'(\s*)(IRI)(\s*)(\()(\s*)',bygroups(Text, Keyword, Text, Punctuation, Text),'iri'),
(r'(\s*)(\))(\s*)', bygroups(Text, Punctuation, Text), '#pop'),
include('filterExp'),
include('variable'),
include('object'),
(r'', Text, '#pop'),
],
'iri':[
include('object'),
(r'(\s*)(\))', bygroups(Text, Punctuation), '#pop'),
],
'string':[
(r'(\s*)("""(?:.|\n)*?""")(\@[a-z]{2-4}|\^\^<?[a-zA-Z0-9\-\:_#/\.]*>?)?\s*', bygroups(Text,Literal.String,Text), '#pop'),
(r'\s*".*?[^\\]"(?:\@[a-z]{2-4}|\^\^<?[a-zA-Z0-9\-\:_#/\.]*>?)?\s*', Literal.String, '#pop'),
],
}<|fim▁end|> | (r'(\s*)(\.)(\s*)', bygroups(Text, Punctuation, Text), '#pop'), |
<|file_name|>shipping_line.py<|end_file_name|><|fim▁begin|>from ..base import HaravanResource
class ShippingLine(HaravanResource):<|fim▁hole|><|fim▁end|> | pass |
<|file_name|>topology.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha3
const (
TopologyPublic = "public"
TopologyPrivate = "private"
)
type TopologySpec struct {
// The environment to launch the Kubernetes masters in public|private
Masters string `json:"masters,omitempty"`
// The environment to launch the Kubernetes nodes in public|private
Nodes string `json:"nodes,omitempty"`
// Bastion provide an external facing point of entry into a network
// containing private network instances. This host can provide a single
// point of fortification or audit and can be started and stopped to enable
// or disable inbound SSH communication from the Internet, some call bastion
// as the "jump server".
Bastion *BastionSpec `json:"bastion,omitempty"`<|fim▁hole|> // DNS configures options relating to DNS, in particular whether we use a public or a private hosted zone
DNS *DNSSpec `json:"dns,omitempty"`
}
type DNSSpec struct {
Type DNSType `json:"type,omitempty"`
}
type DNSType string
const (
DNSTypePublic DNSType = "Public"
DNSTypePrivate DNSType = "Private"
)<|fim▁end|> | |
<|file_name|>createsigningcertificate.py<|end_file_name|><|fim▁begin|># Copyright 2009-2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from requestbuilder import Arg
from euca2ools.commands.iam import IAMRequest, AS_ACCOUNT, arg_user
class CreateSigningCertificate(IAMRequest):
DESCRIPTION = '[Eucalyptus only] Create a new signing certificate'
ARGS = [arg_user(nargs='?', help='''user to create the signing
certificate for (default: current user)'''),
Arg('--out', metavar='FILE', route_to=None,
help='file to write the certificate to (default: stdout)'),<|fim▁hole|> Arg('--keyout', metavar='FILE', route_to=None,
help='file to write the private key to (default: stdout)'),
AS_ACCOUNT]
def postprocess(self, result):
if self.args['out']:
with open(self.args['out'], 'w') as certfile:
certfile.write(result['Certificate']['CertificateBody'])
if self.args['keyout']:
old_umask = os.umask(0o077)
with open(self.args['keyout'], 'w') as keyfile:
keyfile.write(result['Certificate']['PrivateKey'])
os.umask(old_umask)
def print_result(self, result):
print result['Certificate']['CertificateId']
if not self.args['out']:
print result['Certificate']['CertificateBody']
if not self.args['keyout']:
print result['Certificate']['PrivateKey']<|fim▁end|> | |
<|file_name|>componentFactory.ts<|end_file_name|><|fim▁begin|>import * as Debug from 'debug';
import * as Models from '../models';
const debug = Debug('neeo:device:ComponentFactory');
// TODO declare types in models/components to use here and in requestHandler.
const TYPE_BUTTON = 'button';
const TYPE_SWITCH = 'switch';
const TYPE_SLIDER = 'slider';
const TYPE_SENSOR = 'sensor';
const TYPE_TEXTLABEL = 'textlabel';
const TYPE_IMAGEURL = 'imageurl';
const TYPE_DIRECTORY = 'directory';
const TYPE_DISCOVER_ROUTE = 'discover';
const TYPE_REGISTER_ROUTE = 'register';
const TYPE_DEVICE_SUBSCRIPTION_ROUTE = 'devicesubscription';
const TYPE_FAVORITE_HANDLER_ROUTE = 'favoritehandler';
export const SENSOR_TYPE_ARRAY = 'array';
export const SENSOR_TYPE_BINARY = 'binary';
export const SENSOR_TYPE_CUSTOM = 'custom';
export const SENSOR_TYPE_POWER = 'power';
export const SENSOR_TYPE_RANGE = 'range';
export const SENSOR_TYPE_STRING = 'string';
const SENSOR_SUFFIX = '_SENSOR';
const SENSOR_DEFAULT_TYPE = SENSOR_TYPE_RANGE;
const SENSOR_TYPES = [
SENSOR_TYPE_ARRAY,
SENSOR_TYPE_BINARY,
SENSOR_TYPE_CUSTOM,
SENSOR_TYPE_POWER,
SENSOR_TYPE_RANGE,
SENSOR_TYPE_STRING,
];
const SLIDER_TYPE_RANGE = 'range';
const SLIDER_DEFAULT_RANGE = [0, 100];
const SLIDER_DEFAULT_UNIT = '%';
const VALID_IMAGEURL_SIZES = ['small', 'large'];
export function buildButton(
pathPrefix: string,
param: Models.ButtonDescriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
const label = param.label ? encodeURIComponent(param.label) : name;
return {
type: TYPE_BUTTON,
name,
label,
path,
};
}
export function buildDirectory(
pathPrefix: string,
param: Models.Directory.Descriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
return {
type: TYPE_DIRECTORY,
label: encodeURIComponent(param.label || ''),
name,
path,
role: param.role,
identifier: param.identifier,
};
}
export function buildSwitch(
pathPrefix: string,
param: Models.Descriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
return {
type: TYPE_SWITCH,
name,
label: encodeURIComponent(param.label || ''),
path,
sensor: getSensorNameIfNeeded(name),
};
}
function validateRange(param?: ReadonlyArray<number>) {
const range = param || SLIDER_DEFAULT_RANGE;
if (!range || !Array.isArray(range)) {
throw new Error(`INVALID_SLIDER_RANGE ${JSON.stringify(range)}, range must be an array`);
}
if (range.length !== 2 || typeof range[0] !== 'number' || typeof range[1] !== 'number') {
throw new Error(`INVALID_SLIDER_RANGE: ${JSON.stringify(range)}, range must include 2 numbers`);
}
return range;
}
function buildPowerSensor(
pathPrefix: string,
param: Models.Sensor.Descriptor
) {
const component = buildSensorHelper(pathPrefix, param, SENSOR_TYPE_POWER);
// Power state sensors are added by addPowerStateSensor with the name
// powerstate, for backward compatibility we need to avoid changing it
// to POWERSTATE_SENSOR.
const legacyNoSuffixName = encodeURIComponent(param.name);
component.name = legacyNoSuffixName;
component.path = pathPrefix + legacyNoSuffixName;
return component;
}
export function buildSensor(
pathPrefix: string,
param: Models.Sensor.Descriptor
) {
if (param.type === SENSOR_TYPE_POWER) {
return buildPowerSensor(pathPrefix, param);
}
if (param.type && SENSOR_TYPES.includes(param.type)) {
return buildSensorHelper(pathPrefix, param, param.type);
}
return buildLegacyFallbackSensor(pathPrefix, param);
}
export function buildRangeSlider(
pathPrefix: string,
param: Models.Slider.Descriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
const range = validateRange(param.range);
const unit = param.unit ? encodeURIComponent(param.unit) : SLIDER_DEFAULT_UNIT;
const label = param.label ? encodeURIComponent(param.label) : name;
return {
type: TYPE_SLIDER,
name,
label,
path,
slider: {
type: SLIDER_TYPE_RANGE,
sensor: getSensorNameIfNeeded(name),
range,
unit,
},
};
}
function buildSensorHelper(
pathPrefix: string,
param: Models.Sensor.Descriptor,
type = SENSOR_DEFAULT_TYPE
) {
validateParameter(pathPrefix, param);
const name = getSensorNameIfNeeded(encodeURIComponent(param.name));
const path = pathPrefix + name;
const label = encodeURIComponent(param.sensorlabel || param.label || param.name);
if (type === SENSOR_TYPE_CUSTOM) {
debug('Warning: sensor of type custom is not recommended.', param.name);
}
const component = {
type: TYPE_SENSOR,
name,
label,
path,
sensor: {
type,
...(type === SENSOR_TYPE_RANGE
? {
range: validateRange(param.range),
unit: param.unit ? encodeURIComponent(param.unit) : SLIDER_DEFAULT_UNIT,
}
: {}),
},
};
return component;
}
function buildLegacyFallbackSensor(pathPrefix: string, param: Models.Sensor.Descriptor) {
debug(
'Warning: no type for sensor %s, using default. ' +
'This fallback will be removed in a future version.',
param.name
);
const component = buildSensorHelper(pathPrefix, param, SENSOR_TYPE_RANGE);
// To avoid breaking changes we keep the non standard no suffix name
const legacyNoSuffixName = encodeURIComponent(param.name);
component.name = legacyNoSuffixName;
component.path = pathPrefix + legacyNoSuffixName;
return component;
}
export function buildTextLabel(
pathPrefix: string,
param: Models.TextLabel.Descriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
const label = param.label ? encodeURIComponent(param.label) : name;
return {
type: TYPE_TEXTLABEL,
name,
label,
path,
sensor: getSensorNameIfNeeded(name),
isLabelVisible: param.isLabelVisible,
};
}
function validateImageSize(size: string) {
if (!VALID_IMAGEURL_SIZES.includes(size)) {
throw new Error('INVALID_IMAGEURL_SIZE');
}
}
export function buildImageUrl(
pathPrefix: string,
param: Models.Image.Descriptor
) {
validateParameter(pathPrefix, param);
const name = encodeURIComponent(param.name);
const path = pathPrefix + name;
const imageUri = param.uri || null;
const label = param.label ? encodeURIComponent(param.label) : name;
const size = param.size || 'large';
if (!param.size) {
debug('warning, no size definition found for image, use large');
}
validateImageSize(size);
return {
type: TYPE_IMAGEURL,
name,
label,
imageUri,
size,
path,
sensor: getSensorNameIfNeeded(name),
};
}
export function buildDiscovery(pathPrefix: string) {
return getRouteFor(pathPrefix, TYPE_DISCOVER_ROUTE);
}
export function buildRegister(pathPrefix: string) {<|fim▁hole|> return getRouteFor(pathPrefix, TYPE_REGISTER_ROUTE);
}
export function buildDeviceSubscription(pathPrefix: string) {
return getRouteFor(pathPrefix, TYPE_DEVICE_SUBSCRIPTION_ROUTE);
}
export function buildFavoritesHandler(pathPrefix: string) {
return getRouteFor(pathPrefix, TYPE_FAVORITE_HANDLER_ROUTE);
}
function validateParameter(pathPrefix: string, param: { name: string }) {
if (!pathPrefix) {
throw new Error('INVALID_PATHPREFIX');
}
if (!param || !param.name) {
throw new Error('INVALID_BUILD_PARAMETER');
}
}
function getRouteFor(pathPrefix: string, route: string) {
if (!pathPrefix) {
throw new Error('INVALID_PATHPREFIX');
}
const path = pathPrefix + route;
return {
type: route,
name: route,
path,
};
}
function getSensorNameIfNeeded(name: string) {
const alreadySensorName = name.endsWith(SENSOR_SUFFIX);
if (alreadySensorName) {
return name;
}
return name.toUpperCase() + SENSOR_SUFFIX;
}<|fim▁end|> | |
<|file_name|>SQLTransactionRollbackException.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.sql;
public class SQLTransactionRollbackException extends SQLTransientException {
private static final long serialVersionUID = 5246680841170837229L;
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to null, the SQLState string is set to null and the Error Code is set
* to 0.
*/
public SQLTransactionRollbackException() {
super();
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given reason string, the SQLState string is set to null and
* the Error Code is set to 0.
*
* @param reason
* the string to use as the Reason string
*/
public SQLTransactionRollbackException(String reason) {
super(reason, null, 0);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given reason string, the SQLState string is set to the given
* SQLState string and the Error Code is set to 0.
*
* @param reason
* the string to use as the Reason string
* @param sqlState
* the string to use as the SQLState string
*/
public SQLTransactionRollbackException(String reason, String sqlState) {
super(reason, sqlState, 0);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given reason string, the SQLState string is set to the given
* SQLState string and the Error Code is set to the given error code value.
*
* @param reason
* the string to use as the Reason string
* @param sqlState
* the string to use as the SQLState string
* @param vendorCode
* the integer value for the error code
*/
public SQLTransactionRollbackException(String reason, String sqlState,
int vendorCode) {
super(reason, sqlState, vendorCode);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the null if cause == null or cause.toString() if cause!=null,and
* the cause Throwable object is set to the given cause Throwable object.
*
* @param cause
* the Throwable object for the underlying reason this
* SQLException
*/
public SQLTransactionRollbackException(Throwable cause) {
super(cause);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given and the cause Throwable object is set to the given cause
* Throwable object.
*
* @param reason
* the string to use as the Reason string
* @param cause
* the Throwable object for the underlying reason this
* SQLException
*/
public SQLTransactionRollbackException(String reason, Throwable cause) {
super(reason, cause);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given reason string, the SQLState string is set to the given
* SQLState string and the cause Throwable object is set to the given cause
* Throwable object.
*
* @param reason
* the string to use as the Reason string
* @param sqlState
* the string to use as the SQLState string
* @param cause
* the Throwable object for the underlying reason this
* SQLException
*/
public SQLTransactionRollbackException(String reason, String sqlState,
Throwable cause) {
super(reason, sqlState, cause);
}
/**
* Creates an SQLTransactionRollbackException object. The Reason string is
* set to the given reason string, the SQLState string is set to the given
* SQLState string , the Error Code is set to the given error code value,
* and the cause Throwable object is set to the given cause Throwable
* object.
*
<|fim▁hole|> * @param sqlState
* the string to use as the SQLState string
* @param vendorCode
* the integer value for the error code
* @param cause
* the Throwable object for the underlying reason this
* SQLException
*/
public SQLTransactionRollbackException(String reason, String sqlState,
int vendorCode, Throwable cause) {
super(reason, sqlState, vendorCode, cause);
}
}<|fim▁end|> | * @param reason
* the string to use as the Reason string
|
<|file_name|>testTFIDF.py<|end_file_name|><|fim▁begin|>from tfidf import *
import psycopg2
import psycopg2.extensions
import math
def cos_sim(A,B):
def dot_product(a,b):
sum = 0.0
for key in a.keys():
if key in b:
sum += a[key]*b[key]
return sum
return dot_product(A,B)/(math.sqrt(dot_product(A,A)) * math.sqrt(dot_product(B,B)))
conn = psycopg2.connect("host=localhost dbname=SOFTFile user=AUREA password=AUREA")
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
c = conn.cursor()
qry = "SELECT dataset_id, dataset_title, dataset_description \
FROM dataset"
#WHERE dataset_id < 20"
c.execute(qry)
documentList = []<|fim▁hole|>for id,title, description in c.fetchall():
documentList.append(title + description)
docMap.append(id)
c.close()
vectors = []
print "gotDocs"
for x in range(len(documentList)):
words = {}
for word in documentList[documentNumber].split(None):
words[word] = tfidf(word,documentList[documentNumber],documentList)
#for item in sorted(words.items(), key=itemgetter(1), reverse=True):
# print "%f <= %s" % (item[1], item[0])
vectors.append(words)
documentNumber = x+1
print "got vectors"
sim = []
for i in range(len(vectors[:-1])):
for j in range(i+1, len(vectors)):
sim = cos_sim(vectors[i], vectors[j])
db_id1 = docMap[i]
db_id2 = docMap[j]
qry = "INSERT into cosine_similarity(id1, id2, score) VALUES (%s, %s, %s)"
c = conn.cursor()
c.execute(qry, (db_id1, db_id2, sim))
c.close()<|fim▁end|> | documentNumber = 0
docMap = [] |
<|file_name|>deeper_ns_list.py<|end_file_name|><|fim▁begin|>from invoke import task, Collection
@task
def toplevel(ctx):
pass
@task
def subtask(ctx):
pass
ns = Collection(<|fim▁hole|>)<|fim▁end|> | toplevel,
Collection('a', subtask,
Collection('nother', subtask)
) |
<|file_name|>IDomFacade.ts<|end_file_name|><|fim▁begin|>import { Bucket } from '../expressions/util/Bucket';
import { Attr, CharacterData, Element, Node } from '../types/Types';
/**
* The base interface of a dom facade
*
* @public
*/
export default interface IDomFacade {
/**
* Get all attributes of this element.
* The bucket can be used to narrow down which attributes should be retrieved.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getAllAttributes(node: Element, bucket?: Bucket | null): Attr[];
/**
* Get the value of specified attribute of this element.
*
* @param node -
* @param attributeName -
*/
getAttribute(node: Element, attributeName: string): string | null;
/**
* Get all child nodes of this element.
* The bucket can be used to narrow down which child nodes should be retrieved.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getChildNodes(node: Node, bucket?: Bucket | null): Node[];
/**
* Get the data of this element.
*
* @param node -
*/
getData(node: Attr | CharacterData): string;<|fim▁hole|>
/**
* Get the first child of this element.
* An implementation of IDomFacade is free to interpret the bucket to skip returning nodes that do not match the bucket, or use this information to its advantage.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getFirstChild(node: Node, bucket?: Bucket | null): Node | null;
/**
* Get the last child of this element.
* An implementation of IDomFacade is free to interpret the bucket to skip returning nodes that do not match the bucket, or use this information to its advantage.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getLastChild(node: Node, bucket?: Bucket | null): Node | null;
/**
* Get the next sibling of this node
* An implementation of IDomFacade is free to interpret the bucket to skip returning nodes that do not match the bucket, or use this information to its advantage.
*
* @param node -
* @param bucket - The bucket that matches the nextSibling that is requested.
*/
getNextSibling(node: Node, bucket?: Bucket | null): Node | null;
/**
* Get the parent of this element.
* An implementation of IDomFacade is free to interpret the bucket to skip returning nodes that do not match the bucket, or use this information to its advantage.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getParentNode(node: Node, bucket?: Bucket | null): Node | null;
/**
* Get the previous sibling of this element.
* An implementation of IDomFacade is free to interpret the bucket to skip returning nodes that do not match the bucket, or use this information to its advantage.
*
* @param node -
* @param bucket - The bucket that matches the attribute that will be used.
*/
getPreviousSibling(node: Node, bucket?: Bucket | null): Node | null;
}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for board project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os<|fim▁hole|><|fim▁end|> | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "board.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application() |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[macro_use]
// has macros, must go first
mod utils;
pub mod annotation;
pub mod attribute;
pub mod attribute_group;
pub mod attributes;
pub mod choice;
pub mod common;
pub mod complex_content;
pub mod complex_type;
pub mod constants;
pub mod element;
pub mod extension;
pub mod group;
pub mod id;
pub mod import;
pub mod list;
pub mod primitives;
pub mod restriction;
pub mod sequence;
pub mod simple_content;
pub mod simple_type;
pub mod union;
use crate::error::Result;
use crate::xsd::annotation::Annotation;
use crate::xsd::attribute_group::AttributeGroup;
use crate::xsd::complex_type::ComplexType;
use crate::xsd::constants::{
ANNOTATION, ATTRIBUTE_GROUP, BASE, COMPLEX_TYPE, DEFAULT, ELEMENT, FIXED, GROUP, IMPORT,
MAX_OCCURS, MIN_OCCURS, NAME, NAMESPACE, REF, REQUIRED, SIMPLE_TYPE, TYPE, UNBOUNDED, USE,
VALUE,
};
use crate::xsd::element::Element;
use crate::xsd::group::GroupDefinition;
use crate::xsd::id::{Id, Lineage, RootNodeType};
use crate::xsd::import::Import;
use crate::xsd::simple_type::SimpleType;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::fmt::{Display, Formatter};
use std::path::Path;
#[derive(Clone, Debug)]
pub struct Xsd {
entries: Vec<Entry>,
prefix: String,
}
impl Default for Xsd {
fn default() -> Self {
Self {
entries: Vec::new(),
prefix: "xs".to_owned(),
}
}
}
impl Xsd {
pub fn load<P: AsRef<Path>>(filepath: P) -> Result<Self> {
let xml_str = wrap!(
std::fs::read_to_string(filepath.as_ref()),
"unable to load '{}'",
filepath.as_ref().display()
)?;
let doc = exile::parse(&xml_str).unwrap();
Self::parse(doc.root())
}
pub fn parse(root: &exile::Element) -> Result<Self> {
if root.name != "schema" {
return raise!("expected the root node to be named 'schema'");
}
let mut prefix = "";
for (k, v) in root.attributes.map() {
if v.as_str() == "http://www.w3.org/2001/XMLSchema" {
if k.starts_with("xmlns:") {
let mut split = k.split(':');
let _ = split.next().ok_or(make_err!("expected to find xmlns:"))?;
let ns: &str = split
.next()
.ok_or(make_err!("expected to find xmlns prefix"))?;
prefix = ns;
break;
}
}
}
if prefix.is_empty() {
return raise!("xmlns prefix is empty");
}
let mut xsd = Xsd {
entries: Vec::new(),
prefix: prefix.to_owned(),
};
for (i, entry_node) in root.children().enumerate() {
let entry = Entry::from_xml(entry_node, Lineage::Index(i as u64), &xsd)?;
xsd.add_entry(entry)?;
}
Ok(xsd)
}
pub fn new<S: AsRef<str>>(prefix: S) -> Self {
Self {
entries: Vec::new(),
prefix: prefix.as_ref().into(),
}
}
pub fn prefix(&self) -> &str {
self.prefix.as_str()
}
pub fn add_entry(&mut self, entry: Entry) -> Result<()> {
// TODO - make an efficient storage
self.entries.push(entry);
Ok(())
}
pub fn find(&self, id: &Id) -> Result<&Entry> {
// TODO - make an efficient lookup
for entry in &self.entries {
if entry.id() == id {
return Ok(entry);
}
}
raise!("id '{}' not found", id)
}
pub fn remove(&mut self, id: &Id) -> Result<Entry> {
// TODO - efficient removal
let mut pos = None;
for (i, entry) in self.entries.iter().enumerate() {
if entry.id() == id {
pos = Some(i);
break;
}
}
if let Some(i) = pos {
// Note - this can panic, but shouldn't unless a data race occurs.
Ok(self.entries.remove(i))
} else {
raise!("entry '{}' not found", id)
}
}
// TODO - this should be an iterator so the underlying data structure can change.
pub fn entries(&self) -> &Vec<Entry> {
&self.entries
}
}
impl Display for Xsd {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for entry in &self.entries {
writeln!(f, "{}", entry.id())?;
}
Ok(())
}
}
#[derive(Clone, Debug)]
pub enum Entry {
Annotation(Annotation),
AttributeGroup(AttributeGroup),
ComplexType(ComplexType),
Element(Element),
Group(GroupDefinition),
Import(Import),
SimpleType(SimpleType),
}
impl Entry {
pub fn from_xml(node: &exile::Element, lineage: Lineage, xsd: &Xsd) -> Result<Self> {
let n = node.name.as_str();
let t = RootNodeType::parse(n)?;
match t {
RootNodeType::Annotation => {
Ok(Entry::Annotation(Annotation::from_xml(node, lineage, xsd)?))
}
RootNodeType::AttributeGroup => Ok(Entry::AttributeGroup(AttributeGroup::from_xml(
node, lineage, xsd,
)?)),
RootNodeType::ComplexType => Ok(Entry::ComplexType(ComplexType::from_xml(
node, lineage, xsd,
)?)),
RootNodeType::Element => Ok(Entry::Element(Element::from_xml(node, lineage, xsd)?)),
RootNodeType::Group => Ok(Entry::Group(GroupDefinition::from_xml(node, lineage, xsd)?)),
RootNodeType::Import => Ok(Entry::Import(Import::from_xml(node, lineage, xsd)?)),
RootNodeType::SimpleType => {
Ok(Entry::SimpleType(SimpleType::from_xml(node, lineage, xsd)?))
}
}
}
pub fn id(&self) -> &Id {
match self {
Entry::Annotation(x) => &x.id,
Entry::AttributeGroup(x) => x.id(),
Entry::ComplexType(x) => &x.id,
Entry::Element(x) => x.id(),
Entry::Group(x) => &x.id,
Entry::Import(x) => &x.id,
Entry::SimpleType(x) => &x.id,
}
}
pub fn documentation(&self) -> String {
match self {
Entry::Annotation(x) => x.documentation(),
Entry::AttributeGroup(x) => x.documentation(),
Entry::ComplexType(x) => x.documentation(),
Entry::Element(x) => x.documentation(),
Entry::Group(x) => x.documentation(),
Entry::Import(x) => x.documentation(),
Entry::SimpleType(x) => x.documentation(),
}
}
}
pub(crate) fn get_attribute<S: AsRef<str>>(
node: &exile::Element,
attribute_name: S,
) -> Result<String> {
Ok(node
.attributes
.map()
.get(attribute_name.as_ref())
.ok_or(make_err!(
"'{}' attribute not found in '{}' node",
attribute_name.as_ref(),
node.name.as_str()
))?
.clone())
}
pub(crate) fn name_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, NAME)
}
pub(crate) fn namespace_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, NAMESPACE)
}
pub(crate) fn value_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, VALUE)
}
pub(crate) fn ref_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, REF)
}
pub(crate) fn type_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, TYPE)
}
pub(crate) fn use_required(node: &exile::Element) -> bool {
match get_attribute(node, USE) {
Ok(val) => val.as_str() == REQUIRED,
Err(_) => false,
}
}
pub(crate) fn default_attribute(node: &exile::Element) -> Option<String> {
node.attributes.map().get(DEFAULT).cloned()
}
pub(crate) fn fixed_attribute(node: &exile::Element) -> Option<String> {
node.attributes.map().get(FIXED).cloned()
}
pub(crate) fn is_ref(node: &exile::Element) -> bool {
node.attributes.map().get(REF).is_some()
}
pub(crate) fn base_attribute(node: &exile::Element) -> Result<String> {
get_attribute(node, BASE)
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct Occurs {
pub min_occurs: u64,
/// None means `unbounded`
pub max_occurs: Option<u64>,
}
impl Default for Occurs {
fn default() -> Self {
Self {
min_occurs: 1,
max_occurs: Some(1),
}
}
}
impl Occurs {
pub fn from_xml(node: &exile::Element) -> Result<Occurs> {
Ok(Self::from_map(node.attributes.map())?)
}
pub fn from_map(map: &BTreeMap<String, String>) -> Result<Occurs> {
let min_occurs: u64 = if let Some(sval) = map.get(MIN_OCCURS) {
wrap!(sval.parse::<u64>())?
} else {
1
};
let max_occurs: Option<u64> = if let Some(sval) = map.get(MAX_OCCURS) {
if sval.as_str() == UNBOUNDED {
None
} else {
Some(wrap!(sval.parse::<u64>())?)
}
} else {
Some(1)
};
if let Some(the_max) = max_occurs {
if min_occurs > the_max {
return raise!(
"{} cannot be greater than {}, in this case {} is {} and {} is {}",
MIN_OCCURS,
MAX_OCCURS,
MIN_OCCURS,
min_occurs,
MAX_OCCURS,
the_max
);
}
}
Ok(Self {
min_occurs,
max_occurs,
})
}
}
#[test]
fn parse_occurs() {
let test_cases = vec![
(
r#"<xyz minOccurs="1"/>"#,
Occurs {
min_occurs: 1,
max_occurs: Some(1),
},
),
(
r#"<xyz maxOccurs="unbounded"/>"#,
Occurs {
min_occurs: 1,
max_occurs: None,
},
),
(
r#"<xyz/>"#,
Occurs {
min_occurs: 1,
max_occurs: Some(1),
},
),
(
r#"<xyz minOccurs="2" maxOccurs="3"/>"#,
Occurs {
min_occurs: 2,
max_occurs: Some(3),
},
),
];
for (xml, want) in test_cases {
let doc = exile::parse(xml).unwrap();
let got = Occurs::from_xml(doc.root()).unwrap();
assert_eq!(got, want)
}
}
#[test]<|fim▁hole|> r#"<xyz minOccurs="10" maxOccurs="1"/>"#,
r#"<xyz maxOccurs="unexpectedString"/>"#,
];
for xml in test_cases {
let doc = exile::parse(xml).unwrap();
assert!(Occurs::from_xml(doc.root()).is_err());
}
}<|fim▁end|> | fn parse_occurs_err() {
let test_cases = vec![ |
<|file_name|>smooth.py<|end_file_name|><|fim▁begin|>'''<|fim▁hole|>@author: jiayu.zhou
'''
import numpy as np;
def least_squares(w, X, y):
'''
least squares loss.
MATLAB verified function.
f(x) = 1/2 * ||X * w - y||_F^2.
Parameters
----------
w: np.matrix
X: np.matrix
y: np.matrix
Returns
----------
'''
Xw_y = np.dot(X, w) - y;
f = 0.5 * np.linalg.norm(Xw_y, 'fro')**2;
g = np.dot(X.T, Xw_y);
g = g.reshape(g.shape[0] * g.shape[1] , 1, order = 'F');
return [f, g];<|fim▁end|> | A set of (smooth) loss functions.
Created on Oct 2, 2014
|
<|file_name|>cast-in-array-size.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
// run-pass
// issues #10618 and #16382
// pretty-expanded FIXME #23616
const SIZE: isize = 25;
fn main() {
let _a: [bool; 1 as usize];
let _b: [isize; SIZE as usize] = [1; SIZE as usize];
let _c: [bool; '\n' as usize] = [true; '\n' as usize];
let _d: [bool; true as usize] = [true; true as usize];
}<|fim▁end|> | |
<|file_name|>Filter.py<|end_file_name|><|fim▁begin|>"""
Copyright (C) 2008 by Steven Wallace
snwallace@gmail.com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|> along with this program; if not, write to the
Free Software Foundation, Inc.,
59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
from __future__ import with_statement
import struct
import threading
import sys, traceback, time
def cascadeSetIn(a, b):
a.setIn(b)
return b
class NetworkException(Exception):
pass
class Filter:
def __init__(self, *args):
self.input = None
self.output = None
self.server = False
self.master = None
self.initialized = threading.Event()
self.wlock = threading.Lock()
self.rlock = threading.Lock()
self.init_lock = threading.Lock()
self._init(*args)
def _init(self, *args):
pass
def disconnect(self):
if self.input:
self.input.disconnect()
def begin(self):
with self.init_lock:
if not self.initialized.isSet():
self._begin()
if self.input:
if not self.initialized.isSet():
self.initialized.wait()
self.input.begin()
def _begin(self):
self.initialized.set()
def end(self):
if self.output:
self.output.end()
def setIn(self, input = None):
self.input = input
if input:
input.setOut(self)
def setOut(self, output = None):
self.output = output
def readIn(self, data):
self.writeOut(data)
def readOut(self, data):
with self.rlock:
self._readOut(data)
def _readOut(self, data):
self.writeIn(data)
def writeIn(self, data):
if self.input:
self.input.readOut(data)
def writeOut(self, data):
self.initialized.wait()
with self.wlock:
self._writeOut(data)
def _writeOut(self, data):
if self.output:
self.output.readIn(data)
def error(self, error):
raise NetworkException(error)
class PacketizerFilter(Filter):
def _init(self):
self.received = ""
def _readOut(self, data):
self.received += data
while len(self.received) > 3:
length ,= struct.unpack("!i",self.received[:4])
if length + 4 <= len(self.received):
self.writeIn(self.received[4:length+4])
self.received = self.received[length+4:]
else:
return
def _writeOut(self, data):
Filter._writeOut(self, struct.pack("!i",len(data))+data)
class CompressionFilter(Filter):
def _init(self):
self.algorithms = {}
self.otherAlgorithms = []
try:
import zlib
self.algorithms['z'] = zlib
except:
pass
try:
import bz2
self.algorithms['b'] = bz2
except:
pass
try:
import noCompress
self.algorithms['n'] = noCompress
except:
pass
def _begin(self):
if self.server:
self._writeOut(''.join(self.algorithms.keys()))
def _readOut(self, data):
if not self.initialized.isSet():
if self.server:
self.otherAlgorithms = [i for i in data]
self.initialized.set()
self.begin()
else:
self.otherAlgorithms = [i for i in data]
self._writeOut(''.join(self.algorithms.keys()))
self.initialized.set()
self.begin()
else:
algorithm = data[0]
if algorithm not in self.algorithms:
self.error("UNKNOWN COMPRESSION ALGORITHM " + data)
self.writeIn(self.algorithms[algorithm].decompress(data[1:]))
def _writeOut(self, data):
if not self.initialized:
Filter._writeOut(self, data)
else:
algorithm = 'n'
newData = data
for i in self.otherAlgorithms:
if i in self.algorithms:
tmpData = self.algorithms[i].compress(data, 9)
if len(tmpData) < len(newData):
newData = tmpData
algorithm = i
Filter._writeOut(self, ''.join((algorithm, newData)))
def EncryptionFilter(Filter):
pass #TODO
class TCPFilter(Filter):
def _init(self, connection = None):
self.connection = connection
def _writeOut(self, data):
if self.connection:
try:
self.connection.send(data)
except:
pass
def poll(self):
try:
data = self.connection.recv(4096)
if data:
self.readOut(data)
else:
self.disconnect()
except:
print "bleh!"
traceback.print_exc(file=sys.stdout)
self.disconnect()
def disconnect(self):
self.master.remove(self.connection)
if self.connection:
self.connection.close()
Filter.disconnect(self)
def end(self):
self.disconnect()<|fim▁end|> | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License |
<|file_name|>patrol.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Patrols the edges of the map going clockwise. Just drives around in circles,
taking pains not to hit a wall.
When we see a wall, turn right a little.
"""
from courier import RoboLink
robot = RoboLink.connect(name="Patroller", scanner=2, engine=5)<|fim▁hole|>
# Should probably be handled by RoboLink.connect, but whatever.
if not robot:
print "Error connecting"
exit(1)
# The below will be changed to 'while not robot.dead:' just as soon as I
# implement that.
while True:
# Full speed ahead! Doing this every tic just in case we hit a wall or stop
# or something. Setting the throttle doesn't take very long.
robot.throttle = 100
if robot.scan_wall() is not None:
# If we see a wall, turn right ten degrees.
robot.turn(10)<|fim▁end|> | # ^^^ by default, we get a 5-quality
# scanner and a 2-quality engine. well, we want to
# move faster, so we'll soup up our engine instead. |
<|file_name|>quotesAPI.ts<|end_file_name|><|fim▁begin|>import { AddQuoteParameters } from '@mymicds/sdk';
import { assertType } from 'typescript-is';
import * as api from '../libs/api';<|fim▁hole|>
import * as Random from 'random-js';
const engine = Random.engines.mt19937().autoSeed();
export default ((app, db) => {
app.get('/quote', async (req, res) => {
try {
const quotesResult = await quotes.get(db);
const quote = Random.pick(engine, quotesResult);
api.success(res, { quote });
} catch (err) {
api.error(res, err);
}
});
app.post('/quote', async (req, res) => {
try {
assertType<AddQuoteParameters>(req.body);
await quotes.insert(db, req.body.author, req.body.quote);
api.success(res);
} catch (err) {
api.error(res, err);
}
});
}) as RoutesFunction;<|fim▁end|> | import * as quotes from '../libs/quotes';
import RoutesFunction from './routesFunction'; |
<|file_name|>termination-trait-for-impl-termination.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(termination_trait_lib)]
<|fim▁hole|><|fim▁end|> | fn main() -> impl std::process::Termination { } |
<|file_name|>0012_add_asset_uid_to_xform.py<|end_file_name|><|fim▁begin|># coding: utf-8
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logger', '0011_add-index-to-instance-uuid_and_xform_uuid'),
]
operations = [
migrations.AddField(
model_name='xform',
name='kpi_asset_uid',<|fim▁hole|> ),
]<|fim▁end|> | field=models.CharField(max_length=32, null=True), |
<|file_name|>rome_fields_dict.py<|end_file_name|><|fim▁begin|><|fim▁hole|> 'ROME-FIELD-05':[ 268.35435 , -30.2578356389 , '17:53:25.044','-30:15:28.2083' ],
'ROME-FIELD-06':[ 268.356124833 , -29.7729819283 , '17:53:25.47','-29:46:22.7349' ],
'ROME-FIELD-07':[ 268.529571333 , -28.6937071111 , '17:54:07.0971','-28:41:37.3456' ],
'ROME-FIELD-08':[ 268.709737083 , -29.1867251944 , '17:54:50.3369','-29:11:12.2107' ],
'ROME-FIELD-09':[ 268.881108542 , -29.7704673333 , '17:55:31.4661','-29:46:13.6824' ],
'ROME-FIELD-10':[ 269.048498333 , -28.6440675 , '17:56:11.6396','-28:38:38.643' ],
'ROME-FIELD-11':[ 269.23883225 , -29.2716684211 , '17:56:57.3197','-29:16:18.0063' ],
'ROME-FIELD-12':[ 269.39478875 , -30.0992361667 , '17:57:34.7493','-30:05:57.2502' ],
'ROME-FIELD-13':[ 269.563719375 , -28.4422328996 , '17:58:15.2927','-28:26:32.0384' ],
'ROME-FIELD-14':[ 269.758843 , -29.1796030365 , '17:59:02.1223','-29:10:46.5709' ],
'ROME-FIELD-15':[ 269.78359875 , -29.63940425 , '17:59:08.0637','-29:38:21.8553' ],
'ROME-FIELD-16':[ 270.074981708 , -28.5375585833 , '18:00:17.9956','-28:32:15.2109' ],
'ROME-FIELD-17':[ 270.81 , -28.0978333333 , '18:03:14.4','-28:05:52.2' ],
'ROME-FIELD-18':[ 270.290886667 , -27.9986032778 , '18:01:09.8128','-27:59:54.9718' ],
'ROME-FIELD-19':[ 270.312763708 , -29.0084241944 , '18:01:15.0633','-29:00:30.3271' ],
'ROME-FIELD-20':[ 270.83674125 , -28.8431573889 , '18:03:20.8179','-28:50:35.3666' ]}<|fim▁end|> | field_dict={'ROME-FIELD-01':[ 267.835895375 , -30.0608178195 , '17:51:20.6149','-30:03:38.9442' ],
'ROME-FIELD-02':[ 269.636745458 , -27.9782661111 , '17:58:32.8189','-27:58:41.758' ],
'ROME-FIELD-03':[ 268.000049542 , -28.8195573333 , '17:52:00.0119','-28:49:10.4064' ],
'ROME-FIELD-04':[ 268.180171708 , -29.27851275 , '17:52:43.2412','-29:16:42.6459' ],
|
<|file_name|>remainingTimePipe.js<|end_file_name|><|fim▁begin|>"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var core_1 = require("@angular/core");
var RemainingTimePipe = (function () {
function RemainingTimePipe() {
}
RemainingTimePipe.prototype.transform = function (date) {
var DaysInMonths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
var Months = "JanFebMarAprMayJunJulAugSepOctNovDec";
var padding = "in ";
//Input pattern example: 2017-01-02T09:23:00.000Z
var input = date + "";
var splitted = input.split('T');
var today = new Date();
var year = +splitted[0].split('-')[0];
var month = +splitted[0].split('-')[1];
var day = +splitted[0].split('-')[2];
var splittedTime = splitted[1].split(':');
var hour = +splittedTime[0];
var minute = +splittedTime[1];
var second = +splittedTime[2].split('.')[0];
//Years
var currentYear = today.getFullYear();
var remaining = year - currentYear;
if (remaining < 0) {
return 'Started';
}
if (remaining > 0) {
if (remaining == 1) {
return padding + '1 year';
}
return padding + remaining + ' years';
}
//Months
var currentMonth = today.getMonth() + 1;
remaining = month - currentMonth;
if (remaining > 0) {
if (remaining == 1) {
//TODO Leap year
var currentDate = today.getDate();
var daysInPreviousMonth = (month != 0 ? DaysInMonths[month - 1] : DaysInMonths[11]);
var daysRemaining = (daysInPreviousMonth + day) - currentDate;
if (daysRemaining < 7) {
if (daysRemaining == 1) {
return padding + '1 day';
}
return padding + daysRemaining + ' days';
}
var weeksPassed = daysRemaining / 7;
weeksPassed = Math.round(weeksPassed);
if (weeksPassed == 1) {
return padding + '1 week';
}
return padding + weeksPassed + ' weeks';
}
return padding + remaining + ' months';
}
//Days
var currentDay = today.getDate();
var daysPassed = day - currentDay;
if (daysPassed > 0) {
if (daysPassed < 7) {
if (daysPassed == 1) {
return padding + '1 day';
}
return padding + daysPassed + ' days';
}
var weeksPassed = daysPassed / 7;
weeksPassed = Math.round(weeksPassed);
if (weeksPassed == 1) {
return padding + '1 week';
}
return padding + weeksPassed + ' weeks';
}
//Hours
var currentHour = today.getHours();
remaining = hour - currentHour;
if (remaining > 1) {
if (remaining == 2) {
return padding + '1 hour';
}
return padding + remaining + ' hours';
}
//Minutes
var currentMinute = today.getMinutes();
if (remaining == 1) {<|fim▁hole|> }
if (remaining > 0) {
if (remaining == 1) {
return padding + 'a minute';
}
return padding + remaining + ' minutes';
}
//Seconds
var currentSecond = today.getSeconds();
remaining = second - currentSecond;
if (remaining > 0) {
return padding + 'less than a minute';
}
return 'Started';
};
return RemainingTimePipe;
}());
RemainingTimePipe = __decorate([
core_1.Pipe({
name: 'remainingTimePipe'
}),
__metadata("design:paramtypes", [])
], RemainingTimePipe);
exports.RemainingTimePipe = RemainingTimePipe;
//# sourceMappingURL=remainingTimePipe.js.map<|fim▁end|> | remaining = 60 + minute - currentMinute;
}
else {
remaining = minute - currentMinute; |
<|file_name|>test_oneview_ethernet_network_facts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2019) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import pytest
from mock import mock
from hpe_test_utils import OneViewBaseFactsTest
from oneview_module_loader import EthernetNetworkFactsModule
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(<|fim▁hole|>)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Ethernet Network",
options=[]
)
PARAMS_GET_BY_NAME_WITH_OPTIONS = dict(
config='config.json',
name="Test Ethernet Network",
options=['associatedProfiles', 'associatedUplinkGroups']
)
PRESENT_ENETS = [{
"name": "Test Ethernet Network",
"uri": "/rest/ethernet-networks/d34dcf5e-0d8e-441c-b00d-e1dd6a067188"
}]
ENET_ASSOCIATED_UPLINK_GROUP_URIS = [
"/rest/uplink-sets/c6bf9af9-48e7-4236-b08a-77684dc258a5",
"/rest/uplink-sets/e2f0031b-52bd-4223-9ac1-d91cb519d548"
]
ENET_ASSOCIATED_PROFILE_URIS = [
"/rest/server-profiles/83e2e117-59dc-4e33-9f24-462af951cbbe",
"/rest/server-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d"
]
ENET_ASSOCIATED_UPLINK_GROUPS = [dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[0], name='Uplink Set 1'),
dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[1], name='Uplink Set 2')]
ENET_ASSOCIATED_PROFILES = [dict(uri=ENET_ASSOCIATED_PROFILE_URIS[0], name='Server Profile 1'),
dict(uri=ENET_ASSOCIATED_PROFILE_URIS[1], name='Server Profile 2')]
@pytest.mark.resource(TestEthernetNetworkFactsModule='ethernet_networks')
class TestEthernetNetworkFactsModule(OneViewBaseFactsTest):
def test_should_get_all_enets(self):
self.resource.get_all.return_value = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_ALL
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name(self):
self.resource.data = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name_with_options(self):
self.resource.data = PRESENT_ENETS
self.resource.get_associated_profiles.return_value = ENET_ASSOCIATED_PROFILE_URIS
self.resource.get_associated_uplink_groups.return_value = ENET_ASSOCIATED_UPLINK_GROUP_URIS
profiles = []
for data in ENET_ASSOCIATED_PROFILES:
obj = mock.Mock()
obj.data = data
profiles.append(obj)
uplinks = []
for data in ENET_ASSOCIATED_UPLINK_GROUPS:
obj = mock.Mock()
obj.data = data
uplinks.append(obj)
self.mock_ov_client.server_profiles.get_by_uri.side_effect = profiles
self.mock_ov_client.uplink_sets.get_by_uri.side_effect = uplinks
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=PRESENT_ENETS,
enet_associated_profiles=ENET_ASSOCIATED_PROFILES,
enet_associated_uplink_groups=ENET_ASSOCIATED_UPLINK_GROUPS)
)
if __name__ == '__main__':
pytest.main([__file__])<|fim▁end|> | config='config.json',
name=None |
<|file_name|>mapUtils.test.js<|end_file_name|><|fim▁begin|>import deepFreeze from 'deep-freeze';
import { arrayToMap, mapKeysToArray } from './mapUtils';
describe('arrayToMap', () => {
it('should create map from 2D array', () => {
const a = [
['key1', 'value1'],
['key2', 'value2']
];
deepFreeze(a);
const result = arrayToMap(a);
expect(result.size).toBe(2);
expect(result.get('key1')).toBe('value1');
expect(result.get('key2')).toBe('value2');
});
<|fim▁hole|>
expect(result.size).toBe(0);
});
});
describe('mapKeysToArray', () => {
it('should create array from map keys in order', () => {
const map = new Map();
map.set('a', 'value1');
map.set('c', 'value2');
map.set('1', 'value3');
map.set('b', 'value4');
map.set('2', 'value5');
const result = mapKeysToArray(map);
expect(result).toEqual(['a', 'c', '1', 'b', '2']);
});
it('should create empty array from new map', () => {
const map = new Map();
const result = mapKeysToArray(map);
expect(result).toEqual([]);
});
});<|fim▁end|> | it('should create empty map from empty array', () => {
const result = arrayToMap([]); |
<|file_name|>repr-c-issue-82792.rs<|end_file_name|><|fim▁begin|>// Regression test for #82792.
// run-pass
#![feature(const_generics_defaults)]
#[repr(C)]
pub struct Loaf<T: Sized, const N: usize = 1> {
head: [T; N],<|fim▁hole|>
fn main() {}<|fim▁end|> | slice: [T],
} |
<|file_name|>do_iter.py<|end_file_name|><|fim▁begin|>#!/bin/python3.5<|fim▁hole|>from collections import Iterable
d = {'a':1, 'b':2, 'c':3}
for key in d:
print(key)
for value in d.values():
print(value)
for ch in 'ABC':
print(ch)
print(isinstance('abc', Iterable) )
print(isinstance(123,Iterable))
for i, value in enumerate(['a', 'b', 'c']):
print(i, value)
for x,y in [(1,1), (2,4), (3,9)]:
print(x,y)
list(range(1, 11))
print(list)<|fim▁end|> | |
<|file_name|>tutorial_notes.py<|end_file_name|><|fim▁begin|># Copyright (c) 2020 Bartosz Szczesny <bszcz@bszcz.org>
# This program is free software under the MIT license.
print('\n# avoid new line at the beginning')
s = """\
test
"""
print(s)
print('\n# string are immutable')
s = 'string'
try:
s[1] = 'p'
except TypeError as e:
print(e)
print('\n# enumerate() function')
for n, c in enumerate(['a', 'b', 'c']):
print(n, c)
print('\n# list() is an iterator')
print(list(range(10)))
print('\n# else clause in loops')
for i in range(10):
if n == 2:
break
else:
print('loop did not break')
print('\n# docstrings')
def documented():
"This function is documented."
pass
# now can run: help(documented)
print(documented.__doc__)
print('\n# unpacking arguments')
def unpack(n, c):
print('unpacked:', n, c)
arg_list = [1, 'a']
arg_dict = {'n': 1, 'c': 'a'}
unpack(*arg_list)
unpack(**arg_dict)
print('\n# function annotations')
def annotated(i: int, s: str) -> str:
return 's'
print(annotated.__annotations__)
print('\n# not feeling myself')
class NotSelf():
def __init__(o, n):
o.n = n
def get_n(o):
return o.n
ns = NotSelf(10)
print(ns.get_n())
print('\n# lists operations')
print("""\
a = list()
a.copy() => a[:] # return shallow copy
a.clear() => del a[:]
a.append(item) => a[len(a):] = [item]
a.extend(iterable) => a[len(a):] = iterable
""")
print('\n# set comprehension')
a = 'abracadabra'
s = {x for x in a}
print(a, '->', s)
print('\n# keys can be any immutable type')
d = dict()
d[('a', 1)] = 100
d[('b', 2)] = 200
print(d)
print('\n# dictionary comprehension')
d = {x: 'got ' + str(x) for x in range(3)}
print(d)
print('\n# simple strings as keys')
d = dict(a=1, b=2, c=3)
print(d)
print('\n# reversed() function')
a = reversed(range(10)) # iterator<|fim▁hole|>print('\n# reload import')
# reload a module without
# restarting the interpreter
# or an already running script
import math
import importlib
importlib.reload(math)
print('\n# dir() function')
import builtins
print(dir()) # currently defined
print()
print(dir(math)) # defined by the module
print()
print(dir(builtins)) # build-in objects
print('\n# string formatting')
c = 299_792_458
print(f'Speed of light is {c} m/s.')
print('Speed of light is {c:.0f} km/s.'.format(c=c/1000))
pi = 3.14159
print(f'Pi is {pi:.2f}.')
d = {'a': 1, 'b': 2}
print('A: {a}, B: {b}.'.format(**d))
print('\n# exceptions')
class E1(Exception):
pass
class E2(E1):
pass
for e in [E1, E2, Exception]:
try:
raise e # no need for ()
except E1: # will catch E2 as well
print('E1.')
except E2:
print('E2.')
except: # will catch anything
print('Exception.')
finally:
print('Finally.')
print()
try:
pass
except:
pass
else: # if not exception raised
print('No exception.')
finally:
print('Finally.')
print()
try:
try:
raise E1
except E2:
print('E2.')
except: # will catch anything
raise # re-raise
finally:
print('Finally (E2).')
except E1:
print('E1.')
finally:
print('Finally (E1).')
print('\n# global and nonlocal scope')
def scope_test():
def do_local():
s = 'local'
def do_nonlocal():
nonlocal s
s = 'nonlocal'
def do_global():
global s
s = 'global'
s = 's'
do_local()
print(s)
do_nonlocal()
print(s)
do_global()
print(s)
scope_test()
print(s)
print('\n# instance and subclass')
print(isinstance(1, int))
print(isinstance(1.0, int))
print(issubclass(bool, int))
print('\n# struct')
class Struct:
pass
s = Struct()
s.x = 1
s.y = 2
print(s.x, s.y)
print('\n# generator')
def rev(s):
for i in range(len(s) - 1, -1, -1):
yield s[i]
for c in rev('abc'):
print(c)
print('\n# generator expression')
# like list comprehension
# but with parentheses
s = sum(i * i for i in range(10))
print(s)
print('\n# regex')
import re
# can use \1 in regex string
r = re.sub(r'([0-9]) \1', r'\1', '1 2 2 3 3 3')
print(r)
print('\n# array')
# store numbers of the same type efficiently
import sys
from array import array
l = list([1, 2, 3, 4, 5])
a = array('B', [1, 2, 3, 4, 5]) # B - unsigned byte
print(sys.getsizeof(l))
print(sys.getsizeof(a))
print('\n# float as ratio')
pi = 3.14159
print(pi.as_integer_ratio())
print('\n# float as hex')
pi = 3.14159
print(pi.hex())
print(float.fromhex('0x1.921f9f01b866ep+1'))
print('\n# precise sum')
a = [0.3, 0.3, 0.3, 0.1]
print(sum(a) == 1)
print(math.fsum(a) == 1)<|fim▁end|> | print(list(a))
|
<|file_name|>DropdownToggle.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
declare namespace DropdownToggle {
export interface DropdownToggleProps extends React.HTMLProps<DropdownToggle> {
bsRole?: string;
noCaret?: boolean;
open?: boolean;
title?: string;
useAnchor?: boolean;
bsClass?:string; // Added since v0.30.0
bsStyle?:string | null;<|fim▁hole|>declare class DropdownToggle extends React.Component<DropdownToggle.DropdownToggleProps> { }
export = DropdownToggle;<|fim▁end|> | bsSize?:string;
}
} |
<|file_name|>jstraceable.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use syntax::ext::base::ExtCtxt;
use syntax::codemap::Span;
use syntax::ptr::P;
use syntax::ast::{Item, MetaItem, Expr};
use syntax::ast;
use syntax::attr;
use syntax::ext::build::AstBuilder;
use syntax::ext::deriving::generic::{combine_substructure, EnumMatching, FieldInfo, MethodDef, Struct, Substructure, TraitDef, ty};
use syntax::parse::token::InternedString;
pub fn expand_dom_struct(_: &mut ExtCtxt, _: Span, _: &MetaItem, item: P<Item>) -> P<Item> {
let mut item2 = (*item).clone();
{
let mut add_attr = |s| {
item2.attrs.push(attr::mk_attr_outer(attr::mk_attr_id(), attr::mk_word_item(InternedString::new(s))));
};
add_attr("must_root");<|fim▁hole|> // The following attributes are only for internal usage
add_attr("_generate_reflector");
// #[dom_struct] gets consumed, so this lets us keep around a residue
// Do NOT register a modifier/decorator on this attribute
add_attr("_dom_struct_marker");
}
P(item2)
}
/// Provides the hook to expand `#[jstraceable]` into an implementation of `JSTraceable`
///
/// The expansion basically calls `trace()` on all of the fields of the struct/enum, erroring if they do not implement the method.
pub fn expand_jstraceable(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: &mut FnMut(P<Item>)) {
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: ty::Path::new(vec!("dom","bindings","trace","JSTraceable")),
additional_bounds: Vec::new(),
generics: ty::LifetimeBounds::empty(),
methods: vec![
MethodDef {
name: "trace",
generics: ty::LifetimeBounds::empty(),
explicit_self: ty::borrowed_explicit_self(),
args: vec!(ty::Ptr(box ty::Literal(ty::Path::new(vec!("js","jsapi","JSTracer"))), ty::Raw(ast::MutMutable))),
ret_ty: ty::nil_ty(),
attributes: vec!(attr::mk_attr_outer(attr::mk_attr_id(),
attr::mk_name_value_item_str(InternedString::new("inline"),
InternedString::new("always")))),
combine_substructure: combine_substructure(box jstraceable_substructure)
}
],
associated_types: vec![],
};
trait_def.expand(cx, mitem, item, |a| push(a))
}
// Mostly copied from syntax::ext::deriving::hash
/// Defines how the implementation for `trace()` is to be generated
fn jstraceable_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
let state_expr = match substr.nonself_args {
[ref state_expr] => state_expr,
_ => cx.span_bug(trait_span, "incorrect number of arguments in `jstraceable`")
};
let trace_ident = substr.method_ident;
let call_trace = |span, thing_expr| {
let expr = cx.expr_method_call(span, thing_expr, trace_ident, vec!(state_expr.clone()));
cx.stmt_expr(expr)
};
let mut stmts = Vec::new();
let fields = match *substr.fields {
Struct(ref fs) | EnumMatching(_, _, ref fs) => fs,
_ => cx.span_bug(trait_span, "impossible substructure in `jstraceable`")
};
for &FieldInfo { ref self_, span, .. } in fields.iter() {
stmts.push(call_trace(span, self_.clone()));
}
cx.expr_block(cx.block(trait_span, stmts, None))
}<|fim▁end|> | add_attr("privatize");
add_attr("jstraceable");
|
<|file_name|>numeric.rs<|end_file_name|><|fim▁begin|>use std::char::{is_whitespace, to_digit};
use std::cmp::max;
use std::num::pow;
#[deriving(PartialOrd, Clone, Show, Eq)]
pub struct Numeric {
value: i64,
len: uint,
precision: uint
}
impl Numeric {
pub fn new(value: i64, len: uint, precision: uint) -> Numeric {
// TODO consistency check
Numeric {
value: value,
len: len,
precision: precision
}
}
pub fn from_str(s: &str, len: uint, precision: uint) -> Option<Numeric> {
let mut s = s.trim_chars(is_whitespace);
let mut value = 0i64;
let mut negative = false;
let mut fraction = false;
if s[0..1] == "-" {
negative = true;
s = s[1..];
}
if s.contains_char('.') {
while s.chars().last() == Some('0') {
s = s[..s.len() - 1]
}
}
let mut digits_seen = 0u;
let mut digits_seen_fraction = 0u;
for c in s.chars() {
if let Some(n) = to_digit(c, 10) {
value = value * 10 + n as i64;
if fraction {
digits_seen_fraction += 1;
} else {
digits_seen += 1;
}
} else if c == '.' {
fraction = match fraction {
true => return None,
false => true
};
} else {
return None;
}
}
if negative {
value *= -1;
}
if digits_seen > len - precision || digits_seen_fraction > precision {
None
} else {
Some(Numeric::new(value * pow(10, precision - digits_seen_fraction), len, precision))
}
}
}
impl PartialEq for Numeric {
fn eq(&self, other: &Numeric) -> bool {
self.value == other.value
&& self.precision == other.precision
}
}
impl Ord for Numeric {
fn cmp(&self, other: &Numeric) -> Ordering {
match self.precision.cmp(&other.precision) {
Equal => self.value.cmp(&other.value),
Less => (self.value * pow(10, other.precision - self.precision)).cmp(&other.value),
Greater => (other.value * pow(10, self.precision - other.precision)).cmp(&self.value),
}<|fim▁hole|>impl Add<Numeric, Numeric> for Numeric {
fn add(&self, rhs: &Numeric) -> Numeric {
Numeric {
value: match self.precision.cmp(&rhs.precision) {
Equal => self.value + rhs.value,
Less => self.value * pow(10, rhs.precision - self.precision) + rhs.value,
Greater => rhs.value * pow(10, self.precision - rhs.precision) + self.value,
},
precision: max(self.precision, rhs.precision),
len: max(self.len, rhs.len)
}
}
}
impl Sub<Numeric, Numeric> for Numeric {
fn sub(&self, rhs: &Numeric) -> Numeric {
Numeric {
value: match self.precision.cmp(&rhs.precision) {
Equal => self.value - rhs.value,
Less => self.value * pow(10, rhs.precision - self.precision) - rhs.value,
Greater => self.value - rhs.value * pow(10, self.precision - rhs.precision),
},
precision: max(self.precision, rhs.precision),
len: max(self.len, rhs.len)
}
}
}
impl Mul<Numeric, Numeric> for Numeric {
fn mul(&self, rhs: &Numeric) -> Numeric {
Numeric {
value: match self.precision.cmp(&rhs.precision) {
Equal => self.value * rhs.value,
Less => self.value * pow(10, rhs.precision - self.precision) * rhs.value,
Greater => self.value * rhs.value * pow(10, self.precision * rhs.precision),
},
precision: max(self.precision, rhs.precision),
len: max(self.len, rhs.len)
}
}
}
#[cfg(test)]
mod test {
use super::Numeric;
#[test]
fn test_from_str() {
assert_eq!(Numeric::from_str("50", 2, 0), Some(Numeric {value: 50, len: 2, precision: 0}))
assert_eq!(Numeric::from_str("-50", 2, 0), Some(Numeric {value: -50, len: 2, precision: 0}))
assert_eq!(Numeric::from_str("50.25", 4, 2), Some(Numeric {value: 5025, len: 4, precision: 2}))
assert_eq!(Numeric::from_str("-50.25", 4, 2), Some(Numeric {value: -5025, len: 4, precision: 2}))
assert_eq!(Numeric::from_str("-50.250", 4, 2), Some(Numeric {value: -5025, len: 4, precision: 2}))
assert_eq!(Numeric::from_str("-50.25", 5, 3), Some(Numeric {value: -50250, len: 5, precision: 3}))
assert_eq!(Numeric::from_str("10.2.1", 4, 0), None)
assert_eq!(Numeric::from_str("abc", 4, 0), None)
}
}<|fim▁end|> | }
}
|
<|file_name|>actions_it_test.go<|end_file_name|><|fim▁begin|>// +build integration
/*
Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
Copyright (C) ITsysCOM GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package engine
import (
"flag"
"net/rpc"
"net/rpc/jsonrpc"
"path"
"strconv"
"testing"
"time"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/utils"
)
var actsLclCfg *config.CGRConfig
var actsLclRpc *rpc.Client
var actsLclCfgPath = path.Join(*dataDir, "conf", "samples", "actions")
var waitRater = flag.Int("wait_rater", 100, "Number of miliseconds to wait for rater to start and cache")
func TestActionsitInitCfg(t *testing.T) {
// Init config first
var err error<|fim▁hole|> if err != nil {
t.Error(err)
}
actsLclCfg.DataFolderPath = *dataDir // Share DataFolderPath through config towards StoreDb for Flush()
config.SetCgrConfig(actsLclCfg)
}
func TestActionsitInitCdrDb(t *testing.T) {
if err := InitStorDb(actsLclCfg); err != nil {
t.Fatal(err)
}
}
// Finds cgr-engine executable and starts it with default configuration
func TestActionsitStartEngine(t *testing.T) {
if _, err := StartEngine(actsLclCfgPath, *waitRater); err != nil {
t.Fatal(err)
}
}
// Connect rpc client to rater
func TestActionsitRpcConn(t *testing.T) {
var err error
time.Sleep(500 * time.Millisecond)
actsLclRpc, err = jsonrpc.Dial("tcp", actsLclCfg.RPCJSONListen) // We connect over JSON so we can also troubleshoot if needed
if err != nil {
t.Fatal(err)
}
}
func TestActionsitSetCdrlogDebit(t *testing.T) {
var reply string
attrsSetAccount := &utils.AttrSetAccount{Tenant: "cgrates.org", Account: "dan2904"}
if err := actsLclRpc.Call("ApierV1.SetAccount", attrsSetAccount, &reply); err != nil {
t.Error("Got error on ApierV1.SetAccount: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV1.SetAccount received: %s", reply)
}
attrsAA := &utils.AttrSetActions{ActionsId: "ACTS_1", Actions: []*utils.TPAction{
&utils.TPAction{Identifier: DEBIT, BalanceType: utils.MONETARY, Units: "5", ExpiryTime: UNLIMITED, Weight: 20.0},
&utils.TPAction{Identifier: CDRLOG},
}}
if err := actsLclRpc.Call("ApierV2.SetActions", attrsAA, &reply); err != nil && err.Error() != utils.ErrExists.Error() {
t.Error("Got error on ApierV2.SetActions: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV2.SetActions received: %s", reply)
}
attrsEA := &utils.AttrExecuteAction{Tenant: attrsSetAccount.Tenant, Account: attrsSetAccount.Account, ActionsId: attrsAA.ActionsId}
if err := actsLclRpc.Call("ApierV1.ExecuteAction", attrsEA, &reply); err != nil {
t.Error("Got error on ApierV1.ExecuteAction: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV1.ExecuteAction received: %s", reply)
}
var rcvedCdrs []*ExternalCDR
if err := actsLclRpc.Call("ApierV2.GetCdrs", utils.RPCCDRsFilter{Sources: []string{CDRLOG},
Accounts: []string{attrsSetAccount.Account}}, &rcvedCdrs); err != nil {
t.Error("Unexpected error: ", err.Error())
} else if len(rcvedCdrs) != 1 {
t.Error("Unexpected number of CDRs returned: ", len(rcvedCdrs))
} else if rcvedCdrs[0].ToR != utils.MONETARY ||
rcvedCdrs[0].OriginHost != "127.0.0.1" ||
rcvedCdrs[0].Source != CDRLOG ||
rcvedCdrs[0].RequestType != utils.META_PREPAID ||
rcvedCdrs[0].Tenant != "cgrates.org" ||
rcvedCdrs[0].Account != "dan2904" ||
rcvedCdrs[0].Subject != "dan2904" ||
rcvedCdrs[0].Usage != "1" ||
rcvedCdrs[0].RunID != DEBIT ||
strconv.FormatFloat(rcvedCdrs[0].Cost, 'f', -1, 64) != attrsAA.Actions[0].Units {
t.Errorf("Received: %+v", rcvedCdrs[0])
}
}
func TestActionsitSetCdrlogTopup(t *testing.T) {
var reply string
attrsSetAccount := &utils.AttrSetAccount{Tenant: "cgrates.org", Account: "dan2905"}
if err := actsLclRpc.Call("ApierV1.SetAccount", attrsSetAccount, &reply); err != nil {
t.Error("Got error on ApierV1.SetAccount: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV1.SetAccount received: %s", reply)
}
attrsAA := &utils.AttrSetActions{ActionsId: "ACTS_2", Actions: []*utils.TPAction{
&utils.TPAction{Identifier: TOPUP, BalanceType: utils.MONETARY, Units: "5", ExpiryTime: UNLIMITED, Weight: 20.0},
&utils.TPAction{Identifier: CDRLOG},
}}
if err := actsLclRpc.Call("ApierV2.SetActions", attrsAA, &reply); err != nil && err.Error() != utils.ErrExists.Error() {
t.Error("Got error on ApierV2.SetActions: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV2.SetActions received: %s", reply)
}
attrsEA := &utils.AttrExecuteAction{Tenant: attrsSetAccount.Tenant, Account: attrsSetAccount.Account, ActionsId: attrsAA.ActionsId}
if err := actsLclRpc.Call("ApierV1.ExecuteAction", attrsEA, &reply); err != nil {
t.Error("Got error on ApierV1.ExecuteAction: ", err.Error())
} else if reply != utils.OK {
t.Errorf("Calling ApierV1.ExecuteAction received: %s", reply)
}
var rcvedCdrs []*ExternalCDR
if err := actsLclRpc.Call("ApierV2.GetCdrs", utils.RPCCDRsFilter{Sources: []string{CDRLOG},
Accounts: []string{attrsSetAccount.Account}}, &rcvedCdrs); err != nil {
t.Error("Unexpected error: ", err.Error())
} else if len(rcvedCdrs) != 1 {
t.Error("Unexpected number of CDRs returned: ", len(rcvedCdrs))
} else if rcvedCdrs[0].ToR != utils.MONETARY ||
rcvedCdrs[0].OriginHost != "127.0.0.1" ||
rcvedCdrs[0].Source != CDRLOG ||
rcvedCdrs[0].RequestType != utils.META_PREPAID ||
rcvedCdrs[0].Tenant != "cgrates.org" ||
rcvedCdrs[0].Account != "dan2905" ||
rcvedCdrs[0].Subject != "dan2905" ||
rcvedCdrs[0].Usage != "1" ||
rcvedCdrs[0].RunID != TOPUP ||
strconv.FormatFloat(rcvedCdrs[0].Cost, 'f', -1, 64) != attrsAA.Actions[0].Units {
t.Errorf("Received: %+v", rcvedCdrs[0])
}
}
func TestActionsitStopCgrEngine(t *testing.T) {
if err := KillEngine(*waitRater); err != nil {
t.Error(err)
}
}<|fim▁end|> | actsLclCfg, err = config.NewCGRConfigFromFolder(actsLclCfgPath) |
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>from itertools import count
from typing import Union
from dataclasses import dataclass, field
from OnePy.constants import ActionType, OrderType
from OnePy.sys_module.components.exceptions import (OrderConflictError,
PctRangeError)
from OnePy.sys_module.metabase_env import OnePyEnvBase
@dataclass
class Signal(OnePyEnvBase):
counter = count(1)
strategy_name: str
action_type: ActionType
size: int
ticker: str
takeprofit: float = None
takeprofit_pct: float = None
stoploss: float = None
stoploss_pct: float = None
trailingstop: float = None
trailingstop_pct: float = None
price: float = None
price_pct: float = None
signal_id: int = None
datetime: str = field(init=False)
def __post_init__(self):
self.datetime = self.env.sys_date
self.next_datetime = self.env.feeds[self.ticker].next_ohlc['date']
self.signal_id = next(self.counter)
self._check_all_conflict()
self._save_signals()
def _save_signals(self):
self.env.signals_normal_cur.append(self)
if self.env.is_save_original:
self.env.signals_normal.append(self)
def _check_all_conflict(self):<|fim▁hole|> self._check_conflict(
self.takeprofit, self.takeprofit_pct, name='takeprofit')
self._check_conflict(self.stoploss, self.stoploss_pct, name='stoploss')
self._check_conflict(
self.trailingstop, self.trailingstop_pct, name='trailingstop')
def _check_size(self):
if self.size <= 0:
raise Exception("size should be Positive")
@staticmethod
def _check_conflict(obj: float, obj_pct: float, name: str):
if obj and obj_pct:
raise OrderConflictError("$ and pct can't be set together")
if obj_pct:
if not -1 < obj_pct < 1:
raise PctRangeError("pct should be -1 < pct < 1")
if name != 'price':
if obj:
if obj <= 0:
raise ValueError(f"{name.upper()} should be Positive")
if obj_pct:
if obj_pct <= 0:
raise ValueError(f"{name.upper()} should be Positive")
def get(self, name: str):
return getattr(self, name)
def set(self, name: str, value: float):
setattr(self, name, value)
@dataclass
class SignalForPending(Signal):
price: float = None
price_pct: float = None
def _save_signals(self):
self.env.signals_pending_cur.append(self)
if self.env.is_save_original:
self.env.signals_pending.append(self)
@dataclass
class SignalByTrigger(SignalForPending):
counter = count(1)
order_type: OrderType = None
mkt_id: int = None
trigger_key: str = None
execute_price: float = None # 用来确定是否是必成单,用于挂单
first_cur_price: float = None # 记录挂单信号产生时候的价格
parent_order: str = None # 其实不是str,是一个order对象
def _save_signals(self):
self.env.signals_trigger_cur.append(self)
if self.env.is_save_original:
self.env.signals_trigger.append(self)
@dataclass
class SignalCancelBase(OnePyEnvBase):
counter = None
action_type: ActionType
strategy_name: str
ticker: str
long_or_short: str
def __post_init__(self):
self.datetime = self.env.sys_date
self.signal_id = next(self.counter)
self._check_all_conflict()
self._save_signals()
def _save_signals(self):
self.env.signals_cancel_cur.append(self)
if self.env.is_save_original:
self.env.signals_cancel.append(self)
def _check_all_conflict(self):
raise NotImplementedError
@dataclass
class SignalCancelTST(SignalCancelBase):
counter = count(1)
takeprofit: bool
stoploss: bool
trailingstop: bool
def _check_all_conflict(self):
pass
@dataclass
class SignalCancelPending(SignalCancelBase):
counter = count(1)
below_price: float = None
above_price: float = None
def _check_all_conflict(self):
if self.below_price is not None and self.above_price is not None:
raise ValueError(f"below and above price can't be set together!")<|fim▁end|> | self._check_size()
self._check_conflict(self.price, self.price_pct, name='price') |
<|file_name|>code128.cpp<|end_file_name|><|fim▁begin|>/*
* OpenRPT report writer and rendering engine
* Copyright (C) 2001-2011 by OpenMFG, LLC
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
* Please contact info@openmfg.com with any questions on this license.
*/
/*
* This file contains the implementation of the Code 128 barcode renderer.
* All this code assumes a 100dpi rendering surface for it's calculations.
*/
#include <QString>
#include <QVector>
#include <QRect>
#include <QPainter>
#include <QPen>
#include <QBrush>
#include "parsexmlutils.h"
#include "renderobjects.h"
static const int SETA = 0;
static const int SETB = 1;
static const int SETC = 2;
static const char FNC1 = (char)130;
static const char FNC2 = (char)131;
static const char FNC3 = (char)132;
static const char FNC4 = (char)133;
static const char SHIFT = (char)134;
static const char CODEA = (char)135;
static const char CODEB = (char)136;
static const char CODEC = (char)137;
static const char STARTA = (char)138;
static const char STARTB = (char)139;
static const char STARTC = (char)140;
struct code128 {
char codea;
char codeb;
char codec;
int values[6];
bool _null;
};
static const struct code128 _128codes[] = {
// A , B , C , { B S B S B S }, NULL? },
{ ' ', ' ', 0, { 2, 1, 2, 2, 2, 2 }, false },
{ '!', '!', 1, { 2, 2, 2, 1, 2, 2 }, false },
{ '"', '"', 2, { 2, 2, 2, 2, 2, 1 }, false },
{ '#', '#', 3, { 1, 2, 1, 2, 2, 3 }, false },
{ '$', '$', 4, { 1, 2, 1, 3, 2, 2 }, false },
{ '%', '%', 5, { 1, 3, 1, 2, 2, 2 }, false },
{ '&', '&', 6, { 1, 2, 2, 2, 1, 3 }, false },
{ '\'', '\'', 7, { 1, 2, 2, 3, 1, 2 }, false },
{ '(', '(', 8, { 1, 3, 2, 2, 1, 2 }, false },
{ ')', ')', 9, { 2, 2, 1, 2, 1, 3 }, false },
{ '*', '*', 10, { 2, 2, 1, 3, 1, 2 }, false },
{ '+', '+', 11, { 2, 3, 1, 2, 1, 2 }, false },
{ ',', ',', 12, { 1, 1, 2, 2, 3, 2 }, false },
{ '-', '-', 13, { 1, 2, 2, 1, 3, 2 }, false },
{ '.', '.', 14, { 1, 2, 2, 2, 3, 1 }, false },
{ '/', '/', 15, { 1, 1, 3, 2, 2, 2 }, false },
{ '0', '0', 16, { 1, 2, 3, 1, 2, 2 }, false },
{ '1', '1', 17, { 1, 2, 3, 2, 2, 1 }, false },
{ '2', '2', 18, { 2, 2, 3, 2, 1, 1 }, false },
{ '3', '3', 19, { 2, 2, 1, 1, 3, 2 }, false },
{ '4', '4', 20, { 2, 2, 1, 2, 3, 1 }, false },
{ '5', '5', 21, { 2, 1, 3, 2, 1, 2 }, false },
{ '6', '6', 22, { 2, 2, 3, 1, 1, 2 }, false },
{ '7', '7', 23, { 3, 1, 2, 1, 3, 1 }, false },
{ '8', '8', 24, { 3, 1, 1, 2, 2, 2 }, false },
{ '9', '9', 25, { 3, 2, 1, 1, 2, 2 }, false },
{ ':', ':', 26, { 3, 2, 1, 2, 2, 1 }, false },
{ ';', ';', 27, { 3, 1, 2, 2, 1, 2 }, false },
{ '<', '<', 28, { 3, 2, 2, 1, 1, 2 }, false },
{ '=', '=', 29, { 3, 2, 2, 2, 1, 1 }, false },
{ '>', '>', 30, { 2, 1, 2, 1, 2, 3 }, false },
{ '?', '?', 31, { 2, 1, 2, 3, 2, 1 }, false },
{ '@', '@', 32, { 2, 3, 2, 1, 2, 1 }, false },
{ 'A', 'A', 33, { 1, 1, 1, 3, 2, 3 }, false },
{ 'B', 'B', 34, { 1, 3, 1, 1, 2, 3 }, false },
{ 'C', 'C', 35, { 1, 3, 1, 3, 2, 1 }, false },
{ 'D', 'D', 36, { 1, 1, 2, 3, 1, 3 }, false },
{ 'E', 'E', 37, { 1, 3, 2, 1, 1, 3 }, false },
{ 'F', 'F', 38, { 1, 3, 2, 3, 1, 1 }, false },
{ 'G', 'G', 39, { 2, 1, 1, 3, 1, 3 }, false },
{ 'H', 'H', 40, { 2, 3, 1, 1, 1, 3 }, false },
{ 'I', 'I', 41, { 2, 3, 1, 3, 1, 1 }, false },
{ 'J', 'J', 42, { 1, 1, 2, 1, 3, 3 }, false },
{ 'K', 'K', 43, { 1, 1, 2, 3, 3, 1 }, false },
{ 'L', 'L', 44, { 1, 3, 2, 1, 3, 1 }, false },
{ 'M', 'M', 45, { 1, 1, 3, 1, 2, 3 }, false },
{ 'N', 'N', 46, { 1, 1, 3, 3, 2, 1 }, false },
{ 'O', 'O', 47, { 1, 3, 3, 1, 2, 1 }, false },
{ 'P', 'P', 48, { 3, 1, 3, 1, 2, 1 }, false },
{ 'Q', 'Q', 49, { 2, 1, 1, 3, 3, 1 }, false },
{ 'R', 'R', 50, { 2, 3, 1, 1, 3, 1 }, false },
{ 'S', 'S', 51, { 2, 1, 3, 1, 1, 3 }, false },
{ 'T', 'T', 52, { 2, 1, 3, 3, 1, 1 }, false },
{ 'U', 'U', 53, { 2, 1, 3, 1, 3, 1 }, false },
{ 'V', 'V', 54, { 3, 1, 1, 1, 2, 3 }, false },
{ 'W', 'W', 55, { 3, 1, 1, 3, 2, 1 }, false },
{ 'X', 'X', 56, { 3, 3, 1, 1, 2, 1 }, false },
{ 'Y', 'Y', 57, { 3, 1, 2, 1, 1, 3 }, false },
{ 'Z', 'Z', 58, { 3, 1, 2, 3, 1, 1 }, false },
{ '[', '[', 59, { 3, 3, 2, 1, 1, 1 }, false },
{ '\\', '\\', 60, { 3, 1, 4, 1, 1, 1 }, false },
{ ']', ']', 61, { 2, 2, 1, 4, 1, 1 }, false },
{ '^', '^', 62, { 4, 3, 1, 1, 1, 1 }, false },
{ '_', '_', 63, { 1, 1, 1, 2, 2, 4 }, false },
{ 0x00, '`', 64, { 1, 1, 1, 4, 2, 2 }, false }, // NUL
{ 0x01, 'a', 65, { 1, 2, 1, 1, 2, 4 }, false }, // SOH
{ 0x02, 'b', 66, { 1, 2, 1, 4, 2, 1 }, false }, // STX
{ 0x03, 'c', 67, { 1, 4, 1, 1, 2, 2 }, false }, // ETX
{ 0x04, 'd', 68, { 1, 4, 1, 2, 2, 1 }, false }, // EOT
{ 0x05, 'e', 69, { 1, 1, 2, 2, 1, 4 }, false }, // ENQ
{ 0x06, 'f', 70, { 1, 1, 2, 4, 1, 2 }, false }, // ACK
{ 0x07, 'g', 71, { 1, 2, 2, 1, 1, 4 }, false }, // BEL
{ 0x08, 'h', 72, { 1, 2, 2, 4, 1, 1 }, false }, // BS
{ 0x09, 'i', 73, { 1, 4, 2, 1, 1, 2 }, false }, // HT
{ 0x0A, 'j', 74, { 1, 4, 2, 2, 1, 1 }, false }, // LF
{ 0x0B, 'k', 75, { 2, 4, 1, 2, 1, 1 }, false }, // VT
{ 0x0C, 'l', 76, { 2, 2, 1, 1, 1, 4 }, false }, // FF
{ 0x0D, 'm', 77, { 4, 1, 3, 1, 1, 1 }, false }, // CR
{ 0x0E, 'n', 78, { 2, 4, 1, 1, 1, 2 }, false }, // SO
{ 0x0F, 'o', 79, { 1, 3, 4, 1, 1, 1 }, false }, // SI
{ 0x10, 'p', 80, { 1, 1, 1, 2, 4, 2 }, false }, // DLE
{ 0x11, 'q', 81, { 1, 2, 1, 1, 4, 2 }, false }, // DC1
{ 0x12, 'r', 82, { 1, 2, 1, 2, 4, 1 }, false }, // DC2
{ 0x13, 's', 83, { 1, 1, 4, 2, 1, 2 }, false }, // DC3
{ 0x14, 't', 84, { 1, 2, 4, 1, 1, 2 }, false }, // DC4
{ 0x15, 'u', 85, { 1, 2, 4, 2, 1, 1 }, false }, // NAK
{ 0x16, 'v', 86, { 4, 1, 1, 2, 1, 2 }, false }, // SYN
{ 0x17, 'w', 87, { 4, 2, 1, 1, 1, 2 }, false }, // ETB
{ 0x18, 'x', 88, { 4, 2, 1, 2, 1, 1 }, false }, // CAN
{ 0x19, 'y', 89, { 2, 1, 2, 1, 4, 1 }, false }, // EM
{ 0x1A, 'z', 90, { 2, 1, 4, 1, 2, 1 }, false }, // SUB
{ 0x1B, '{', 91, { 4, 1, 2, 1, 2, 1 }, false }, // ESC
{ 0x1C, '|', 92, { 1, 1, 1, 1, 4, 3 }, false }, // FS
{ 0x1D, '}', 93, { 1, 1, 1, 3, 4, 1 }, false }, // GS
{ 0x1E, '~', 94, { 1, 3, 1, 1, 4, 1 }, false }, // RS
{ 0x1F, 0x7F, 95, { 1, 1, 4, 1, 1, 3 }, false }, // US DEL
{ FNC3, FNC3, 96, { 1, 1, 4, 3, 1, 1 }, false }, // FNC3 FNC3
{ FNC2, FNC2, 97, { 4, 1, 1, 1, 1, 3 }, false }, // FNC2 FNC2
{ SHIFT, SHIFT, 98, { 4, 1, 1, 3, 1, 1 }, false }, // SHIFT SHIFT
{ CODEC, CODEC, 99, { 1, 1, 3, 1, 4, 1 }, false }, // CODEC CODEC
{ CODEB, FNC4, CODEB, { 1, 1, 4, 1, 3, 1 }, false }, // CODEB FNC4 CODEB
{ FNC4, CODEA, CODEA, { 3, 1, 1, 1, 4, 1 }, false }, // FNC4 CODEA CODEA
{ FNC1, FNC1, FNC1, { 4, 1, 1, 1, 3, 1 }, false }, // FNC1 FNC1 FNC1
{ STARTA, STARTA, STARTA, { 2, 1, 1, 4, 1, 2 }, false }, // STARTA
{ STARTB, STARTB, STARTB, { 2, 1, 1, 2, 1, 4 }, false }, // STARTB
{ STARTC, STARTC, STARTC, { 2, 1, 1, 2, 3, 2 }, false }, // STARTC
{ '\0', '\0', '\0', { 0, 0, 0, 0, 0, 0 }, true } // null termininator of list
};
// STOP CHARACTER { 2 3 3 1 1 1 2 }
int code128Index(QChar code, int set) {
for(int idx = 0; _128codes[idx]._null == false; idx++) {
if(set == SETA && _128codes[idx].codea == code.toLatin1()) return idx;
if(set == SETB && _128codes[idx].codeb == code.toLatin1()) return idx;
if(set == SETC && _128codes[idx].codec == code.toLatin1()) return idx;
}
return -1; // couldn't find it
}
void renderCode128(OROPage * page, const QRectF & r, const QString & _str, ORBarcodeData * bc)
{
QVector<int> str;
int i = 0;
// create the list.. if the list is empty then just set a start code and move on
if(_str.isEmpty())
str.push_back(104);
else
{
int rank_a = 0;
int rank_b = 0;
int rank_c = 0;
QChar c;
for(i = 0; i < _str.length(); i++)
{
c = _str.at(i);
rank_a += (code128Index(c, SETA) != -1 ? 1 : 0);
rank_b += (code128Index(c, SETB) != -1 ? 1 : 0);
rank_c += (c >= '0' && c <= '9' ? 1 : 0);
}
if(rank_c == _str.length() && ((rank_c % 2) == 0 || rank_c > 4))
{
// every value in the is a digit so we are going to go with mode C
// and we have an even number or we have more than 4 values
i = 0;
if((rank_c % 2) == 1)
{
str.push_back(104); // START B
c = _str.at(0);
str.push_back(code128Index(c, SETB));
str.push_back(99); // MODE C
i = 1;
}
else
str.push_back(105); // START C
for(i = i; i < _str.length(); i+=2)
{
char a, b;
c = _str.at(i);
a = c.toLatin1();
a -= 48;
c = _str.at(i+1);
b = c.toLatin1();
b -= 48;
str.push_back(int((a * 10) + b));
}
}
else
{
// start in the mode that had the higher number of hits and then
// just shift into the opposite mode as needed
int set = ( rank_a > rank_b ? SETA : SETB );
str.push_back(( rank_a > rank_b ? 103 : 104 ));
int v = -1;
for(i = 0; i < _str.length(); i++)
{
c = _str.at(i);
v = code128Index(c, set);
if(v == -1)
{
v = code128Index(c, (set == SETA ? SETB : SETA));
if(v != -1)
{
str.push_back(98); // SHIFT
str.push_back(v);
}
}
else
str.push_back(v);
}
}
}
// calculate and append the checksum value to the list
int checksum = str.at(0);
for(i = 1; i < str.size(); i++)
checksum += (str.at(i) * i);
checksum = checksum % 103;
str.push_back(checksum);
// lets determine some core attributes about this barcode
qreal bar_width = bc->narrowBarWidth;
// this is are mandatory minimum quiet zone
qreal quiet_zone = bar_width * 10;
if(quiet_zone < 0.1)
quiet_zone = 0.1;
// what kind of area do we have to work with
qreal draw_width = r.width();
qreal draw_height = r.height();
// how long is the value we need to encode?
int val_length = str.size() - 2; // we include start and checksum in are list so
// subtract them out for our calculations
// L = (11C + 35)X
// L length of barcode (excluding quite zone) in units same as X and I
// C the number of characters in the value excluding the start/stop and checksum characters
// X the width of a bar (pixels in our case)
qreal L;
qreal C = val_length;
qreal X = bar_width;
L = (((11.0 * C) + 35.0) * X);
// now we have the actual width the barcode will be so can determine the actual
// size of the quiet zone (we assume we center the barcode in the given area
// what should we do if the area is too small????
// At the moment the way the code is written is we will always start at the minimum
// required quiet zone if we don't have enough space.... I guess we'll just have over-run
// to the right
//
// calculate the starting position based on the alignment option
// for left align we don't need to do anything as the values are already setup for it
if(bc->align == 1) // center
{
qreal nqz = (draw_width - L) / 2.0;
if(nqz > quiet_zone)
quiet_zone = nqz;
}
else if(bc->align > 1) // right
quiet_zone = draw_width - (L + quiet_zone);
// else if(align < 1) {} // left : do nothing
qreal pos = r.left() + quiet_zone;
qreal top = r.top();
QPen pen(Qt::NoPen);
QBrush brush(QColor("black"));
bool space = false;
int idx = 0, b = 0;
qreal w = 0.0;
for(i = 0; i < str.size(); i++)
{
// loop through each value and render the barcode
idx = str.at(i);
if(idx < 0 || idx > 105)
{
qDebug("Encountered a non-compliant element while rendering a 3of9 barcode -- skipping");
continue;
}
space = false;
for(b = 0; b < 6; b++, space = !space)
{
w = _128codes[idx].values[b] * bar_width;<|fim▁hole|> {
ORORect * rect = new ORORect(bc);
rect->setPen(pen);
rect->setBrush(brush);
rect->setRect(QRectF(pos,top, w,draw_height));
rect->setRotationAxis(r.topLeft());
page->addPrimitive(rect);
}
pos += w;
}
}
// we have to do the stop character seperatly like this because it has
// 7 elements in it's bar sequence rather than 6 like the others
int STOP_CHARACTER[]={ 2, 3, 3, 1, 1, 1, 2 };
space = false;
for(b = 0; b < 7; b++, space = !space)
{
w = STOP_CHARACTER[b] * bar_width;
if(!space)
{
ORORect * rect = new ORORect(bc);
rect->setPen(pen);
rect->setBrush(brush);
rect->setRect(QRectF(pos,top, w,draw_height));
rect->setRotationAxis(r.topLeft());
page->addPrimitive(rect);
}
pos += w;
}
return;
}<|fim▁end|> | if(!space) |
<|file_name|>feature-gate-cfg-target-vendor.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(target_vendor = "x")] //~ ERROR `cfg(target_vendor)` is experimental
#[cfg_attr(target_vendor = "x", x)] //~ ERROR `cfg(target_vendor)` is experimental
struct Foo(u64, u64);
#[cfg(not(any(all(target_vendor = "x"))))] //~ ERROR `cfg(target_vendor)` is experimental
fn foo() {}
<|fim▁hole|>}<|fim▁end|> | fn main() {
cfg!(target_vendor = "x");
//~^ ERROR `cfg(target_vendor)` is experimental and subject to change |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>import math as mth
import numpy as np
#----------------------
# J Matthews, 21/02
# This is a file containing useful constants for python coding
#
# Units in CGS unless stated
#
#----------------------
#H=6.62606957E-27
HEV=4.13620e-15
#C=29979245800.0
#BOLTZMANN=1.3806488E-16
VERY_BIG=1e50
H=6.6262e-27
HC=1.98587e-16
HEV=4.13620e-15 # Planck's constant in eV
HRYD=3.04005e-16 # NSH 1204 Planck's constant in Rydberg
C =2.997925e10
G=6.670e-8
BOLTZMANN =1.38062e-16
WIEN= 5.879e10 # NSH 1208 Wien Disp Const in frequency units
H_OVER_K=4.799437e-11
STEFAN_BOLTZMANN =5.6696e-5
THOMPSON=0.66524e-24
PI = 3.1415927
MELEC = 9.10956e-28
E= 4.8035e-10 # Electric charge in esu
MPROT = 1.672661e-24
MSOL = 1.989e33
PC= 3.08e18<|fim▁hole|>PI_E2_OVER_M =7.96e8
ALPHA= 7.297351e-3 # Fine structure constant
BOHR= 0.529175e-8 # Bohr radius
CR= 3.288051e15 #Rydberg frequency for H != Ryd freq for infinite mass
ANGSTROM = 1.e-8 #Definition of an Angstrom in units of this code, e.g. cm
EV2ERGS =1.602192e-12
RADIAN= 57.29578
RYD2ERGS =2.1798741e-11
PARSEC=3.086E18<|fim▁end|> | YR = 3.1556925e7
PI_E2_OVER_MC=0.02655103 # Classical cross-section |
<|file_name|>apicoverage_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"bytes"
"io"
"regexp"
"testing"
)
//NOTE: This method is for avoiding flake tests instead of using reflect.DeepEqual()
func equalAPIArray(a, b apiArray) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
found := false
for j := range b {
if i == j {
found = true
break
}
}
if !found {
return false
}
}
return true
}
func TestParseOpenAPI(t *testing.T) {
testCases := []struct {
Rawdata []byte
Expected apiArray
}{
{
Rawdata: []byte(`{"paths": {"/resources": {
"get": {"description": "get available resources"}}}}`),
Expected: apiArray{
{Method: "GET", URL: "/resources"},
},
},
{
Rawdata: []byte(`{"paths": {"/resources": {
"get": {"description": "get available resources"},
"post": {"description": "create resource"}}}}`),
Expected: apiArray{
{Method: "GET", URL: "/resources"},
{Method: "POST", URL: "/resources"},
},
},
{
Rawdata: []byte(`{"paths": {
"/resources": {
"get": {"description": "get available resources"},
"post": {"description": "create resource"}},
"/foo": {
"get": {"description": "get avaiable foo"},
"post": {"description": "create foo"},
"parameters": [{"type": "string", "description": "This should be ignored", "name": "bar", "in": "query"}]}}}`),
Expected: apiArray{
{Method: "GET", URL: "/resources"},
{Method: "POST", URL: "/resources"},
{Method: "GET", URL: "/foo"},
{Method: "POST", URL: "/foo"},
},
},
}
for _, test := range testCases {
res := parseOpenAPI(test.Rawdata)
if !equalAPIArray(res, test.Expected) {
t.Errorf("OpenAPI did not match expected for test")
t.Errorf("%#v", res)
t.Errorf("%#v", test.Expected)
}
}
}
func TestParseAPILog(t *testing.T) {
testCases := []struct {
Rawdata io.Reader
Expected apiArray
}{
{
Rawdata: bytes.NewReader(
[]byte(`
I0919 15:34:14.943642 6611 round_trippers.go:414] GET https://k8s-api/api/v1/foo
I0919 15:34:16.943642 6611 round_trippers.go:414] POST https://k8s-api/api/v1/bar
`)),
Expected: apiArray{
{Method: "GET", URL: "/api/v1/foo"},
{Method: "POST", URL: "/api/v1/bar"},
},
},
{
Rawdata: bytes.NewReader(
[]byte(`
I0919 15:34:14.943642 6611 round_trippers.go:414] GET https://k8s-api/api/v1/foo?other
`)),
Expected: apiArray{
{Method: "GET", URL: "/api/v1/foo"},
},
},
}
for _, test := range testCases {
res := parseAPILog(test.Rawdata)
if !equalAPIArray(res, test.Expected) {
t.Errorf("APILog did not match expected for test")
t.Errorf("Actual: %#v", res)
t.Errorf("Expected: %#v", test.Expected)
}
}
}
func TestGetTestedAPIsByLevel(t *testing.T) {
testCases := []struct {
Negative bool
Reg *regexp.Regexp
apisOpenapi apiArray
apisTested apiArray
ExpectedTested apiArray
ExpectedAll apiArray
}{
{
//Test Alpha APIs are returned
Negative: false,
Reg: reAlphaAPI,
apisOpenapi: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "POST", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "POST", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
{Method: "POST", URL: "/apis/resources/v1beta1/"},
},
apisTested: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
},
ExpectedTested: apiArray{
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
},
ExpectedAll: apiArray{
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "POST", URL: "/apis/resources/v2alpha1/"},
},
},
{
//Test Beta APIs are returned
Negative: false,
Reg: reBetaAPI,
apisOpenapi: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "POST", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "POST", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
{Method: "POST", URL: "/apis/resources/v1beta1/"},
},
apisTested: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
},
ExpectedTested: apiArray{
{Method: "GET", URL: "/apis/resources/v1beta1/"},
},
ExpectedAll: apiArray{
{Method: "GET", URL: "/apis/resources/v1beta1/"},
{Method: "POST", URL: "/apis/resources/v1beta1/"},
},
},
{
//Test Stable APIs are returned
Negative: true,
Reg: reNotStableAPI,
apisOpenapi: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "POST", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "POST", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
{Method: "POST", URL: "/apis/resources/v1beta1/"},
},
apisTested: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "GET", URL: "/apis/resources/v2alpha1/"},
{Method: "GET", URL: "/apis/resources/v1beta1/"},
},
ExpectedTested: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
},
ExpectedAll: apiArray{
{Method: "GET", URL: "/apis/resources/v1/"},
{Method: "POST", URL: "/apis/resources/v1/"},
},
},
}
for _, test := range testCases {<|fim▁hole|> t.Errorf("Actual: %#v", resTested)
}
if !equalAPIArray(resAll, test.ExpectedAll) {
t.Errorf("resAll did not match expected for test")
t.Errorf("Expected: %#v", test.ExpectedAll)
t.Errorf("Actual: %#v", resAll)
}
}
}<|fim▁end|> | resTested, resAll := getTestedAPIsByLevel(test.Negative, test.Reg, test.apisOpenapi, test.apisTested)
if !equalAPIArray(resTested, test.ExpectedTested) {
t.Errorf("resTested did not match expected for test")
t.Errorf("Expected: %#v", test.ExpectedTested) |
<|file_name|>match.js<|end_file_name|><|fim▁begin|>const _transform = require('lodash/transform');
function MatchTransformer(match) {
if( !(this instanceof MatchTransformer) ) {
return this.transform(new MatchTransformer(match));
}
<|fim▁hole|> if( typeof match === 'string' ) {
// Escape string
match = match.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
match = new RegExp(match);
}
this._match = match;
}
MatchTransformer.prototype.parse = function(source) {
return _transform(source, (result, value, key) => {
if( this._match.test(key) )
result[key] = value;
}, {});
};
MatchTransformer.prototype.reverse = MatchTransformer.prototype.parse;
module.exports = MatchTransformer;<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.contrib import admin
from article.models import Article
# Register your models here.
admin.site.register(Article)<|fim▁end|> | |
<|file_name|>tree.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';<|fim▁hole|>import { TooltipModule } from '../tooltip';
@NgModule({
imports: [
CommonModule,
TooltipModule
],
declarations: [
TreeComponent,
TreeNodeComponent
],
providers: [],
exports: [
TreeComponent,
TreeNodeComponent
]
})
export class TreeModule { }<|fim▁end|> | import { TreeComponent } from './tree';
import { TreeNodeComponent } from './tree-node'; |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse
from django.shortcuts import render
from django.views import generic
import api.soql
import json
from api.soql import *
# Create your views here.
def indexView(request):
context = {
"vehicleAgencies": getUniqueValuesWithAggregate("gayt-taic", "agency", "max(postal_code)"),
"vehicleFuelTypes": getUniqueValues("gayt-taic", "fuel_type"),
"buildingAgencies": getUniqueValues("24pi-kxxa", "department_name")
}
return render(request,'TeamAqua/index.html', context=context)
def getUniqueValues(resource, column):
query = (
api.soql.SoQL(resource)
.select([column])<|fim▁hole|> .orderBy({column: "ASC"})
)
jsonString = query.execute()
return json.loads(jsonString)
def getUniqueValuesWithAggregate(resource, column, aggregate):
query = (
api.soql.SoQL(resource)
.select([column, aggregate])
.groupBy([column])
.orderBy({column: "ASC"})
)
jsonString = query.execute()
return json.loads(jsonString)<|fim▁end|> | .groupBy([column]) |
<|file_name|>0015_auto__add_field_hardwarerelease_medium.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Hardwarerelease.medium'
db.add_column('ashop_hardwarerelease', 'medium', self.gf('django.db.models.fields.CharField')(default=0, max_length=10), keep_default=False)
def backwards(self, orm):
# Deleting field 'Hardwarerelease.medium'
db.delete_column('ashop_hardwarerelease', 'medium')
models = {
'alibrary.artist': {
'Meta': {'ordering': "('name',)", 'object_name': 'Artist'},
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ArtistMembership']", 'symmetrical': 'False'}),
'multiple': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'professions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Profession']", 'through': "orm['alibrary.ArtistProfessions']", 'symmetrical': 'False'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.artistmembership': {
'Meta': {'object_name': 'ArtistMembership'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_child'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_parent'", 'to': "orm['alibrary.Artist']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_membership_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.artistprofessions': {
'Meta': {'object_name': 'ArtistProfessions'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Profession']"})
},
'alibrary.label': {
'Meta': {'ordering': "('name',)", 'object_name': 'Label'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email_main': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labelcode': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Label']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'7437b6be-ab03-4a9d-af4e-dbdd430c819e'", 'max_length': '36'})
},
'alibrary.profession': {
'Meta': {'ordering': "('name',)", 'object_name': 'Profession'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'})
},
'alibrary.release': {
'Meta': {'ordering': "('releasedate',)", 'object_name': 'Release'},
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Profession']", 'null': 'True', 'through': "orm['alibrary.ReleaseExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_label'", 'to': "orm['alibrary.Label']"}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'pressings': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'releasedate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'releasetype': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '12'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.releaseextraartists': {
'Meta': {'object_name': 'ReleaseExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_release'", 'to': "orm['alibrary.Release']"})
},
'ashop.baseproduct': {
'Meta': {'ordering': "['name']", 'object_name': 'Baseproduct', '_ormbases': ['shop.Product']},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'needs_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'baseproduct_picture'", 'null': 'True', 'to': "orm['filer.Image']"}),
'picture_listing': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'baseproduct_picture_listing'", 'null': 'True', 'to': "orm['filer.Image']"}),
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'}),
'subline': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'weight': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'ashop.downloadrelease': {
'Meta': {'ordering': "['name']", 'object_name': 'Downloadrelease', '_ormbases': ['ashop.Releaseproduct']},
'releaseproduct_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ashop.Releaseproduct']", 'unique': 'True', 'primary_key': 'True'})
},
'ashop.hardwarerelease': {
'Meta': {'ordering': "['name']", 'object_name': 'Hardwarerelease', '_ormbases': ['ashop.Releaseproduct']},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'medium': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'needs_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'releaseproduct_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ashop.Releaseproduct']", 'unique': 'True', 'primary_key': 'True'}),
'weight': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'ashop.releaseproduct': {
'Meta': {'ordering': "['name']", 'object_name': 'Releaseproduct', '_ormbases': ['shop.Product']},
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'releaseproduct_release'", 'to': "orm['alibrary.Release']"})
},
'ashop.singleproduct': {
'Meta': {'object_name': 'SingleProduct', 'db_table': "'cmsplugin_singleproduct'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ashop.Baseproduct']"}),
'style': ('django.db.models.fields.CharField', [], {'default': "'l'", 'max_length': '24'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),<|fim▁hole|> 'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['ashop']<|fim▁end|> | 'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}), |
<|file_name|>eventcardlist_component_test.js<|end_file_name|><|fim▁begin|>// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import eventsModule from 'events/events_module';
describe('Event Card List controller', () => {
/**
* Event Card List controller.
* @type {!events/eventcardlist_component.EventCardListController}
*/
let ctrl;
beforeEach(() => {
angular.mock.module(eventsModule.name);
angular.mock.inject(
($componentController) => { ctrl = $componentController('kdEventCardList', {}); });
});
it('should not filter any events if all option is selected', () => {
// given
let eventType = 'All';
let events = [
{
type: 'Warning',
message: 'event-1',
},
{
type: 'Normal',
message: 'event-2',
},
];
// when
let result = ctrl.filterByType(events, eventType);
// then
expect(result.length).toEqual(2);
});
it('should filter all non-warning events if warning option is selected', () => {
// given
let eventType = 'Warning';
let events = [
{
type: 'Warning',
message: 'event-1',
},
{
type: 'Normal',
message: 'event-2',
},
{
type: 'Normal',
message: 'event-3',
},
];
// when
let result = ctrl.filterByType(events, eventType);
// then
expect(result.length).toEqual(1);
});
it('should return true when there are events to display', () => {
// given
ctrl.filteredEvents = ['Some event'];
// when
let result = ctrl.hasEvents();
// then
expect(result).toBeTruthy();
});
it('should return false if there are no events to display', () => {
// when
let result = ctrl.hasEvents();
// then
expect(result).toBeFalsy();<|fim▁hole|>
it('should filter events and show only warnings', () => {
// given
ctrl.eventType = 'Warning';
ctrl.events = [
{
type: 'Warning',
message: 'event-1',
},
{
type: 'Normal',
message: 'event-2',
},
{
type: 'Normal',
message: 'event-3',
},
];
// when
ctrl.handleEventFiltering();
// then
expect(ctrl.filteredEvents.length).toEqual(1);
});
it('should not filter any events and show all', () => {
// given
ctrl.eventType = 'All';
ctrl.events = [
{
type: 'Warning',
message: 'event-1',
},
{
type: 'Normal',
message: 'event-2',
},
{
type: 'Normal',
message: 'event-3',
},
];
// when
ctrl.handleEventFiltering();
// then
expect(ctrl.filteredEvents.length).toEqual(3);
});
it('should return true when warning event', () => {
// given
let event = {
type: 'Warning',
message: 'event-1',
};
// when
let result = ctrl.isEventWarning(event);
// then
expect(result).toBeTruthy();
});
it('should return false when not warning event', () => {
// given
let event = {
type: 'Normal',
message: 'event-1',
};
// when
let result = ctrl.isEventWarning(event);
// then
expect(result).toBeFalsy();
});
});<|fim▁end|> | }); |
<|file_name|>mix299.py<|end_file_name|><|fim▁begin|>def Setup(Settings,DefaultModel):
# set1-test_of_models_against_datasets/osm299.py
Settings["experiment_name"] = "set1_Mix_model_versus_datasets_299px"
Settings["graph_histories"] = ['together'] #['all','together',[],[1,0],[0,0,0],[]]
# 5556x_minlen30_640px 5556x_minlen20_640px 5556x_reslen20_299px 5556x_reslen30_299px<|fim▁hole|>
n=0
Settings["models"][n]["dataset_name"] = "5556x_reslen30_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix_minlen30_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
Settings["models"][n]["dataset_name"] = "5556x_reslen20_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix_minlen20_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
Settings["models"][n]["dataset_name"] = "5556x_mark_res_299x299"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix_nosplit_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
return Settings<|fim▁end|> | |
<|file_name|>nuoc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from heapq import heapify, heappop, heappush
with open('NUOC.INP') as f:
m, n = map(int, f.readline().split())
height = [[int(i) for i in line.split()] for line in f]
queue = ([(h, 0, i) for i, h in enumerate(height[0])]<|fim▁hole|>heapify(queue)
visited = ([[True] * n]
+ [[True] + [False] * (n - 2) + [True] for _ in range(m - 2)]
+ [[True] * n])
result = 0
while queue:
h, i, j = heappop(queue)
for x, y in (i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1):
if 0 <= x < m and 0 <= y < n and not visited[x][y]:
result += max(0, h - height[x][y])
heappush(queue, (max(height[x][y], h), x, y))
visited[x][y] = True
with open('NUOC.OUT', 'w') as f: print(result, file=f)<|fim▁end|> | + [(h, m - 1, i) for i, h in enumerate(height[-1])]
+ [(height[i][0], i, 0) for i in range(m)]
+ [(height[i][-1], i, n - 1) for i in range(m)]) |
<|file_name|>emailutil.py<|end_file_name|><|fim▁begin|># Some useful functions to extract data out of emails
# Copyright (C) 2002-2012 John Goerzen & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import email
from email.Parser import Parser as MailParser
import time
def get_message_date(content, header='Date'):
"""
Parses mail and returns resulting timestamp.
:param header: the header to extract date from;
:returns: timestamp or `None` in the case of failure.
"""<|fim▁hole|> dateheader = message.get(header)
# parsedate_tz returns a 10-tuple that can be passed to mktime_tz
# Will be None if missing or not in a valid format. Note that
# indexes 6, 7, and 8 of the result tuple are not usable.
datetuple = email.utils.parsedate_tz(dateheader)
if datetuple is None:
return None
return email.utils.mktime_tz(datetuple)<|fim▁end|> | message = MailParser().parsestr(content, True) |
<|file_name|>tcp_socket.cc<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
#include "tcp_socket.h"
using namespace easynet;
//TCPSocket 已连接的服务器
TCPSocket::TCPSocket(EPOLLSvrPtr s) {
socket_handler_ = nullptr;
svr_ = s;
msg_ = nullptr;
step_ = READ_HEAD;
}
void TCPSocket::SetHandler(On_Socket_Handler h){
socket_handler_ = h;
}<|fim▁hole|>
void TCPSocket::OnNetMessage(){
if( nullptr == socket_handler_){
LOG(FATAL) << "tcpsocket handler is nullptr!";
return;
}
//char *buff = svr_.get()->buff_; //对数组操作一定要注意, 会有越界的可能
for(;;){
char *buff = svr_.get()->buff_;
//一直读, 直到出错
int32_t ret = NetPackage::Read(peer_.fd_, buff, MAX_SOCK_BUFF);
//LOG(INFO) << "read ok ret[" << ret << "]";
if(0 == ret ){
LOG(INFO) << "connection closed by peer fd[" << peer_.fd_ << "]";
this->KickOut();
return;
}
if( ret < 0 ){
if( EAGAIN == errno || EWOULDBLOCK == errno ){
//再次read, buff将从头开始填充
//buff = svr_.get()->buff_;
//continue;
return;
}else{
LOG(INFO) << "read fail! fd[" << peer_.fd_ << "] errno[" << errno << "] msg[" << strerror(errno) << "]";
this->KickOut();
return;
}
}
int32_t more_data = ret;
while( more_data > 0){
if( nullptr == peer_.buff_ ){
peer_.buff_ = std::make_shared<DataBuffer>(peer_.fd_, HEADER_SZ);
}
auto data_buffer = peer_.buff_.get();
int32_t need_data = data_buffer->NeedData();
//读取包头
if( READ_HEAD == step_ ){
if( more_data < need_data ) {
//包头没有读完整
data_buffer->AddData(more_data, buff);
return;
}
data_buffer->AddData(need_data, buff);
//指向body的头指针, 向前添加已经读过的内存
buff += need_data;
more_data = (more_data - need_data) < 0 ? 0:(more_data - need_data);
msg_ = (MSG* )data_buffer->GetBuffPtr();
if(VERSION != msg_->header.version_ || IDENTIFY != msg_->header.identify_){
LOG(ERROR) << "version or identify is not fit! kick out client fd[" << peer_.fd_ << "] version["
<< (int)msg_->header.version_ << "] current version[" << (int)VERSION<<"]" << "identify["
<< (int)msg_->header.identify_ << "] current identify[" << (int)IDENTIFY << "]";
this->KickOut();
LOG(INFO) << "receive msg count[" << m.GetRecvPack() << "]";
return;
}
msg_->header.length_ = ntohs(msg_->header.length_);
msg_->header.type_ = ntohs(msg_->header.type_);
//为body 申请内存
data_buffer->Resize(msg_->header.length_ + HEADER_SZ);
//重新申请内存后, 以前的msg_指向的内容不能再使用了
msg_ = (MSG* )data_buffer->GetBuffPtr();
need_data = data_buffer->NeedData();
step_ = READ_BODY;
}
//现在的step 肯定是 READ_BODY
if( more_data > 0 ) {
//读取body
if(more_data < need_data) {
data_buffer->AddData(more_data, buff);
return;
}
data_buffer->AddData(need_data, buff);
more_data = (more_data - need_data) < 0 ? 0:(more_data - need_data);
//buff读取后指针后移
buff += need_data;
m.AddRecvPack();
//客户程序只需要截获到数据信息就行, 不用关心包头
char *pMsg = (char* )(data_buffer->GetBuffPtr());
pMsg += sizeof(HEADER);
auto f = socket_handler_;
try{
f(this->getID(), pMsg,msg_->header.length_, msg_->header.type_);
}catch(...){
LOG(ERROR) << "tcpsocket handler run fail!";
}
//自动删除已经用过的packet
auto tmp = std::move(peer_.buff_);
tmp = nullptr;
peer_.buff_ = nullptr;//是不是多此一举, 就是多此一举, move 后peer_buff_ 会为nullptr
//读取新的包头
step_ = READ_HEAD;
}
}
}
}
void TCPSocket::KickOut() {
IPlayer::KickOut();
//TODO 需要再 EPOLLSvr 的map 中删除事件events_map_ 和连接信息 player_map_
}<|fim▁end|> | |
<|file_name|>clienttest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright (C) 2012 Oracle Corporation
#
# This file is part of VirtualBox Open Source Edition (OSE), as
# available from http://www.virtualbox.org. This file is free software;
# you can redistribute it and/or modify it under the terms of the GNU
# General Public License (GPL) as published by the Free Software
# Foundation, in version 2 as it comes in the "COPYING" file of the
# VirtualBox OSE distribution. VirtualBox OSE is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
#
# Things needed to be set up before running this sample:
# - Install Python and verify it works (2.7.2 will do, 3.x is untested yet)
# - On Windows: Install the PyWin32 extensions for your Python version
# (see http://sourceforge.net/projects/pywin32/)
# - If not already done, set the environment variable "VBOX_INSTALL_PATH"
# to point to your VirtualBox installation directory (which in turn must have
# the "sdk" subfolder")
# - Install the VirtualBox Python bindings by doing a
# "[python] vboxapisetup.py install"
# - Run this sample with "[python] clienttest.py"
import os,sys
import traceback
#
# Converts an enumeration to a printable string.
#
def enumToString(constants, enum, elem):
all = constants.all_values(enum)
for e in all.keys():
if str(elem) == str(all[e]):
return e
return "<unknown>"
def main(argv):
from vboxapi import VirtualBoxManager<|fim▁hole|> # Get the VirtualBox manager
mgr = wrapper.mgr
# Get the global VirtualBox object
vbox = wrapper.vbox
print "Running VirtualBox version %s" %(vbox.version)
# Get all constants through the Python wrapper code
vboxConstants = wrapper.constants
# Enumerate all defined machines
for mach in wrapper.getArray(vbox, 'machines'):
try:
# Be prepared for failures - the VM can be inaccessible
vmname = '<inaccessible>'
try:
vmname = mach.name
except Exception, e:
None
vmid = '';
try:
vmid = mach.id
except Exception, e:
None
# Print some basic VM information even if there were errors
print "Machine name: %s [%s]" %(vmname,vmid)
if vmname == '<inaccessible>' or vmid == '':
continue
# Print some basic VM information
print " State: %s" %(enumToString(vboxConstants, "MachineState", mach.state))
print " Session state: %s" %(enumToString(vboxConstants, "SessionState", mach.sessionState))
# Do some stuff which requires a running VM
if mach.state == vboxConstants.MachineState_Running:
# Get the session object
session = mgr.getSessionObject(vbox)
# Lock the current machine (shared mode, since we won't modify the machine)
mach.lockMachine(session, vboxConstants.LockType_Shared)
# Acquire the VM's console and guest object
console = session.console
guest = console.guest
# Retrieve the current Guest Additions runlevel and print
# the installed Guest Additions version
addRunLevel = guest.additionsRunLevel
print " Additions State: %s" %(enumToString(vboxConstants, "AdditionsRunLevelType", addRunLevel))
if addRunLevel != vboxConstants.AdditionsRunLevelType_None:
print " Additions Ver: %s" %(guest.additionsVersion)
# Get the VM's display object
display = console.display
# Get the VM's current display resolution + bit depth + position
screenNum = 0 # From first screen
(screenW, screenH, screenBPP, screenX, screenY, _) = display.getScreenResolution(screenNum)
print " Display (%d): %dx%d, %d BPP at %d,%d" %(screenNum, screenW, screenH, screenBPP, screenX, screenY)
# We're done -- don't forget to unlock the machine!
session.unlockMachine()
except Exception, e:
print "Errror [%s]: %s" %(mach.name, str(e))
traceback.print_exc()
# Call destructor and delete wrapper
del wrapper
if __name__ == '__main__':
main(sys.argv)<|fim▁end|> | # This is a VirtualBox COM/XPCOM API client, no data needed.
wrapper = VirtualBoxManager(None, None)
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Test mocks and helpers
from __future__ import absolute_import
from webob import Request
from xblock.runtime import DictKeyValueStore, KvsFieldData
from xblock.test.tools import TestRuntime
def make_request(body, method='POST'):
"""
Helper method to make request<|fim▁hole|> """
request = Request.blank('/')
request.body = body.encode('utf-8')
request.method = method
return request
# pylint: disable=abstract-method
class MockRuntime(TestRuntime):
"""
Provides a mock XBlock runtime object.
"""
def __init__(self, **kwargs):
field_data = kwargs.get('field_data', KvsFieldData(DictKeyValueStore()))
super(MockRuntime, self).__init__(field_data=field_data)<|fim▁end|> | |
<|file_name|>azure_client_set.go<|end_file_name|><|fim▁begin|>package client
import (
"log"
"net/http"
"regexp"
"time"
"github.com/hashicorp/packer-plugin-sdk/useragent"
"github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2019-12-01/compute"
"github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2019-12-01/compute/computeapi"
"github.com/Azure/go-autorest/autorest"
version "github.com/hashicorp/packer/builder/azure/version"
)
type AzureClientSet interface {
MetadataClient() MetadataClientAPI
DisksClient() computeapi.DisksClientAPI
SnapshotsClient() computeapi.SnapshotsClientAPI
ImagesClient() computeapi.ImagesClientAPI
GalleryImagesClient() computeapi.GalleryImagesClientAPI
GalleryImageVersionsClient() computeapi.GalleryImageVersionsClientAPI
VirtualMachinesClient() computeapi.VirtualMachinesClientAPI
VirtualMachineImagesClient() VirtualMachineImagesClientAPI
<|fim▁hole|>}
var subscriptionPathRegex = regexp.MustCompile(`/subscriptions/([[:xdigit:]]{8}(-[[:xdigit:]]{4}){3}-[[:xdigit:]]{12})`)
var _ AzureClientSet = &azureClientSet{}
type azureClientSet struct {
sender autorest.Sender
authorizer autorest.Authorizer
subscriptionID string
PollingDelay time.Duration
}
func New(c Config, say func(string)) (AzureClientSet, error) {
return new(c, say)
}
func new(c Config, say func(string)) (*azureClientSet, error) {
token, err := c.GetServicePrincipalToken(say, c.CloudEnvironment().ResourceManagerEndpoint)
if err != nil {
return nil, err
}
return &azureClientSet{
authorizer: autorest.NewBearerAuthorizer(token),
subscriptionID: c.SubscriptionID,
sender: http.DefaultClient,
PollingDelay: time.Second,
}, nil
}
func (s azureClientSet) SubscriptionID() string {
return s.subscriptionID
}
func (s azureClientSet) configureAutorestClient(c *autorest.Client) {
err := c.AddToUserAgent(useragent.String(version.AzurePluginVersion.FormattedVersion()))
if err != nil {
log.Printf("Error appending Packer plugin version to user agent.")
}
c.Authorizer = s.authorizer
c.Sender = s.sender
}
func (s azureClientSet) MetadataClient() MetadataClientAPI {
return metadataClient{
s.sender,
useragent.String(version.AzurePluginVersion.FormattedVersion()),
}
}
func (s azureClientSet) DisksClient() computeapi.DisksClientAPI {
c := compute.NewDisksClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) SnapshotsClient() computeapi.SnapshotsClientAPI {
c := compute.NewSnapshotsClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) ImagesClient() computeapi.ImagesClientAPI {
c := compute.NewImagesClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) VirtualMachinesClient() computeapi.VirtualMachinesClientAPI {
c := compute.NewVirtualMachinesClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) VirtualMachineImagesClient() VirtualMachineImagesClientAPI {
c := compute.NewVirtualMachineImagesClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return VirtualMachineImagesClient{c}
}
func (s azureClientSet) GalleryImagesClient() computeapi.GalleryImagesClientAPI {
c := compute.NewGalleryImagesClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) GalleryImageVersionsClient() computeapi.GalleryImageVersionsClientAPI {
c := compute.NewGalleryImageVersionsClient(s.subscriptionID)
s.configureAutorestClient(&c.Client)
c.PollingDelay = s.PollingDelay
return c
}
func (s azureClientSet) PollClient() autorest.Client {
c := autorest.NewClientWithUserAgent("Packer-Azure-ClientSet")
s.configureAutorestClient(&c)
c.PollingDelay = time.Second * 5
return c
}<|fim▁end|> | PollClient() autorest.Client
// SubscriptionID returns the subscription ID that this client set was created for
SubscriptionID() string |
<|file_name|>social.py<|end_file_name|><|fim▁begin|>"""
Social.py
Contains elements that enable connecting with external social sites.
Copyright (C) 2015 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import hashlib
import urllib
from . import ClientSide, Factory, Layout
from .Base import Node, TextNode
from .Buttons import Link
from .Display import Image
Factory = Factory.Factory("Social")
class Social(Node):
__slots__ = ('account')
properties = Node.properties.copy()
properties['account'] = {'action':'classAttribute'}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, None, None, parent, *kargs, **kwargs)
self.account = ""
class TwitterBadge(Social):
"""
Displays a clickable twitter badge.
"""
def toHTML(self, formatted=False, *args, **kwargs):
"""
Returns the twitter badge as defined by the api directly
"""
return ("""<a href="https://twitter.com/%(account)s" class="twitter-follow-button" """ + \
"""data-show-count="false">Follow @%(account)s</a><script>!function(d,s,id){""" + \
"""var js,fjs=d.getElementsByTagName(s)[0];if(!d.getElementById(id)){js=d.createElement""" + \
"""(s);js.id=id;js.src="//platform.twitter.com/widgets.js";fjs.parentNode.insertBefore(""" + \
"""js,fjs);}}(document,"script","twitter-wjs");</script>""") % {'account':self.account}
Factory.addProduct(TwitterBadge)
class TwitterAPI(Node):
__slots__ = ()
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addScript('window.twttr = (function (d,s,id) {'
'var t, js, fjs = d.getElementsByTagName(s)[0];'
'if (d.getElementById(id)) return; js=d.createElement(s); js.id=id;'
'js.src="https://platform.twitter.com/widgets.js"; fjs.parentNode.insertBefore(js, fjs);'
'return window.twttr || (t = { _e: [], ready: function(f){ t._e.push(f) } });'
'}(document, "script", "twitter-wjs"));')
Factory.addProduct(TwitterAPI)
class Tweet(Link):
__slots__ = ()
properties = Link.properties.copy()
properties['hideCount'] = {'action':'hideCount', 'type':'bool', 'info':"Don't show the number of re-tweets"}
properties['largeButton'] = {'action':'useLargeButton', 'type':'bool', 'info':'User larger tweet button size'}
properties['url'] = {'action':'attribute', 'name':'data-url', 'info':'Set the url the tweet will link to'}
properties['hashtag'] = {'action':'attribute', 'name':'data-hashtags', 'info':'Associated a hashtag to the tweet'}
properties['via'] = {'action':'attribute', 'name':'data-via', 'info':'Associated with another twitter account'}
properties['message'] = {'action':'attribute', 'name':'data-text', 'info':'The tweet message text'}
def _create(self, name=None, id=None, parent=None, *kargs, **kwargs):
Link._create(self, name, id, parent, *kargs, **kwargs)
self.setText("Tweet")
self.addClass("twitter-share-button")
self.setDestination("https://twitter.com/share")
def hideCount(self, hide=True):
if hide:
self.attributes['data-count'] = 'none'
else:
self.attributes.pop('data-count', None)
def useLargeButton(self, use=True):
if use:
self.attributes['data-size'] = 'large'
else:
self.attributes.pop('data-size', None)
def toHTML(self, formatted=False, *args, **kwargs):
"""
Adds the twitter script to the tweet button
"""
html = Link.toHTML(self, formatted, *args, **kwargs)
return html
Factory.addProduct(Tweet)
class GooglePlusAPI(Node):
__slots__ = ()
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addScript("window.___gcfg = {lang:'en-US','parsetags':'explicit'};"
"(function() {var po = document.createElement('script');"
"po.type = 'text/javascript'; po.async = true;"
"po.src = 'https://apis.google.com/js/plusone.js';"
"var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(po, s);"
"})();")
Factory.addProduct(GooglePlusAPI)
class GooglePlusShare(Layout.Box):
__slots__ = ()
properties = Layout.Box.properties.copy()
properties['size'] = {'action':'attribute', 'name':'data-height', 'type':'int',
'info':"The Size of the of the button, 2 is large"}
properties['url'] = {'action':'attribute', 'name':'data-href', 'info':"The url the google plus button points to"}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addClass("g-plus")
self.attributes['data-action'] = "share"
self.attributes['data-annotation'] = "none"
Factory.addProduct(GooglePlusShare)
class GooglePlusBadge(Social):
"""
Displays a clickable google plus badge.
"""
__slots__ = ('link', )
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Social._create(self, None, None, parent, *kargs, **kwargs)
self.link = self.add(Link())
self.link.attributes['rel'] = "publisher"
self.link.addClass("WGooglePlusBadge")
self.link += Image(src="https://ssl.gstatic.com/images/icons/gplus-32.png", alt="Google+")
def _render(self):
self.link.setDestination("https://plus.google.com/%s?prsrc=3" % self.account)
Factory.addProduct(GooglePlusBadge)
class FacebookLike(Social):
"""
Adds a facebook like link to your site
"""
def toHTML(self, formatted=False, *args, **kwargs):
return ("""<div class="fb-like" data-href="https://www.facebook.com/%s" data-send="false""" + \
"""data-layout="button_count" data-width="300" data-show-faces="false"></div>""") % self.account
Factory.addProduct(FacebookLike)
class FacebookAPI(Layout.Box):
"""
Adds facebook api support to your site and optionally calls the init method on it - only add once.
"""
__slots__ = ('loginURL', 'logoutURL', 'appId', 'init')
properties = Node.properties.copy()
properties['appId'] = {'action':'classAttribute'}
properties['init'] = {'action':'classAttribute', 'type':'bool'}
properties['loginURL'] = {'action':'classAttribute'}
properties['logoutURL'] = {'action':'classAttribute'}
class ClientSide(Layout.Box.ClientSide):
def feed(self, name, caption, description, link, picture=None, redirect=None, callback=None):
"""
Posts defined data to the users news feed.
"""
arguments = {'method':'feed', 'name':name, 'caption':caption, 'link':link}
if picture:
arguments['picture'] = picture
if redirect:
arguments['redirect_url'] = redirect
if callback:
return ClientSide.call("FB.ui", arguments, callback)
if description:
arguments['description'] = description
return ClientSide.call("FB.ui", arguments)
def _create(self, id=None, name=None, parent=None, *kargs, **kwargs):
Layout.Box._create(self, "fb-root", name, parent, *kargs, **kwargs)
self.appId = ""
self.init = False
self.loginURL = None
self.logoutURL = None
def _render(self):
"""
Returns the api support code directly
"""
if self.init:
extra = ""
if self.loginURL:
extra += "FB.Event.subscribe('auth.login', function(response){window.location = '%s'});" % \
self.loginURL
if self.logoutURL:
extra += "FB.Event.subscribe('auth.logout', function(response){window.location = '%s'});" % \
self.logoutURL
self.addScript("""window.fbAsyncInit = function(){FB.init
({appId: '%s', status: true, cookie: true, xfbml: true});
%s
}""" % (self.appId, extra))
self.addScript("""(function(d, s, id){
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) {return;}
js = d.createElement(s); js.id = id;
js.async = true;
js.src = "//connect.facebook.net/en_US/all.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));""")
Factory.addProduct(FacebookAPI)
class FacebookLogin(Node):
"""
Adds a facebook login button to the page
"""
__slots__ = ('text', )
tagName = "fb:login-button"
properties = Node.properties.copy()
properties['show-faces'] = {'action':'attribute', 'type':'bool',
'info':'Specifies whether to show faces underneath the Login button.'}
properties['width'] = {'action':'attribute', 'type':'int',
'info':'The width of the plugin in pixels. Default width: 200px.'}
properties['size'] = {'action':'attribute',
'info':'Different sized buttons: small, medium, large, xlarge (default: medium).'}
properties['max-rows'] = {'action':'attribute', 'type':'int',
'info':'The maximum number of rows of profile pictures to display. Default value: 1.'}
properties['scope'] = {'action':'attribute', 'info':'a comma separated list of extended permissions to request.'}
properties['registration-url '] = {'action':'attribute',
'info':'URL to redirect to on initial registration.'}
properties['text'] = {'action':'classAttribute', 'info':'Set a custom label for the facebook connect button.'}
def _create(self, id=None, name=None, parent=None, *kargs, **kwargs):
Node._create(self, id, name, parent, *kargs, **kwargs)
self.text = None
def _render(self):
if self.text:
if not self.childElements:
self += TextNode()
self.childElements[0].setText(self.text)
elif self.childElements:
self.childElements[0].setText("")
class ClientSide(Node.ClientSide):
"""
Defines the client-side behavior of the facebook api.
"""
def logout(self):
return ClientSide.call("FB.logout")
Factory.addProduct(FacebookLogin)
class Gravatar(Image):
"""
A Gravatar user image based on an email id
"""
__slots__ = ('email', '_size', '_default', '_rating')
properties = Image.properties.copy()
properties['email'] = {'action':'classAttribute'}
properties['size'] = {'action':'setSize', 'type':'int'}
properties['rating'] = {'action':'setRating'}
properties['default'] = {'action':'setDefault'}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Image._create(self, None, None, parent, *kargs, **kwargs)
self.email = ""
self._size = 80
self._default = "mm"
self._rating = "g"
def _render(self):
self.attributes['src'] = "http://www.gravatar.com/avatar/%s?s=%s&r=%s&d=%s" % \
(hashlib.md5(self.email.encode('utf-8')).hexdigest(), self.size(),<|fim▁hole|> self.style['height'] = "%spx" % self.size()
def profileURL(self):
"""
Returns the associated profile URL that can be used to modify the provided image
"""
return "http://www.gravatar.com/%s" % hashlib.md5(self.email.encode('utf-8')).hexdigest()
def setSize(self, size):
"""
Set the width of the google chart in pixels (maximum allowed by google is 1000 pixels)
"""
size = int(size)
if size > 2048 or size < 1:
raise ValueError("Gravatar only supports requesting image sizes 1 - 2048")
self._size = size
def size(self):
"""
Returns the size of this gravatar
"""
return self._size
def setRating(self, rating):
"""
Sets the maximum rating of the returned image (g, pg, r, or x)
"""
rating = rating.lower()
if rating not in ('g', 'pg', 'r', 'x'):
raise ValueError("Gravatar only supports the ratings g, pg, r, and x")
self._rating = rating
def rating(self):
"""
Returns the maximum rating allowed for this image
"""
return self._rating
def setDefault(self, default):
"""
Sets the default image in the case the provided email does not have a gravatar
can be a direct url or one of the included defaults:
404, mm, identicon, monsterid, wavatar, retro, and blank
"""
self._default = urllib.encode(default)
def default(self):
"""
Returns the image set to load if none is available for the specified email address
"""
return self._default
Factory.addProduct(Gravatar)<|fim▁end|> | self.rating(), self.default())
self.style['width'] = "%spx" % self.size() |
<|file_name|>guard.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Machinery to conditionally expose things.
use js::jsapi::JSContext;
use js::rust::HandleObject;
use servo_config::prefs;
/// A container with a condition.
pub struct Guard<T: Clone + Copy> {
condition: Condition,
value: T,
}
impl<T: Clone + Copy> Guard<T> {
/// Construct a new guarded value.
pub const fn new(condition: Condition, value: T) -> Self {
Guard {<|fim▁hole|>
/// Expose the value if the condition is satisfied.
///
/// The passed handle is the object on which the value may be exposed.
pub unsafe fn expose(&self, cx: *mut JSContext, obj: HandleObject) -> Option<T> {
if self.condition.is_satisfied(cx, obj) {
Some(self.value)
} else {
None
}
}
}
/// A condition to expose things.
pub enum Condition {
/// The condition is satisfied if the function returns true.
Func(unsafe fn(*mut JSContext, HandleObject) -> bool),
/// The condition is satisfied if the preference is set.
Pref(&'static str),
/// The condition is always satisfied.
Satisfied,
}
impl Condition {
unsafe fn is_satisfied(&self, cx: *mut JSContext, obj: HandleObject) -> bool {
match *self {
Condition::Pref(name) => prefs::pref_map().get(name).as_bool().unwrap_or(false),
Condition::Func(f) => f(cx, obj),
Condition::Satisfied => true,
}
}
}<|fim▁end|> | condition: condition,
value: value,
}
} |
<|file_name|>api_scanner_main.cc<|end_file_name|><|fim▁begin|>/**
******************************************************************************
* api-scanner - Scan for API imports from a packaged 360 game *
******************************************************************************
* Copyright 2015 x1nixmzeng. All rights reserved. *
* Released under the BSD license - see LICENSE in the root for more details. *
******************************************************************************
*/
#include "api_scanner_loader.h"<|fim▁hole|>
DEFINE_string(target, "", "List of file to extract imports from");
int api_scanner_main(std::vector<std::wstring>& args) {
// XXX we need gflags to split multiple flags into arrays for us
if (args.size() == 2 || !FLAGS_target.empty()) {
apiscanner_loader loader_;
std::wstring target(cvars::target.empty() ? args[1]
: xe::to_wstring(cvars::target));
std::wstring target_abs = xe::to_absolute_path(target);
// XXX For each target?
if (loader_.LoadTitleImports(target)) {
for (const auto title : loader_.GetAllTitles()) {
printf("%08x\n", title.title_id);
for (const auto import : title.imports) {
printf("\t%s\n", import.c_str());
}
}
}
}
return 0;
}
} // namespace tools
} // namespace xe
DEFINE_ENTRY_POINT(L"api-scanner", L"api-scanner --target=<target file>",
xe::tools::api_scanner_main);<|fim▁end|> |
namespace xe {
namespace tools { |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import hashlib
import json
import os
import uuid
from django import forms
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from modelcluster.contrib.taggit import ClusterTaggableManager
from modelcluster.fields import ParentalKey, ParentalManyToManyField
from modelcluster.models import ClusterableModel
from taggit.managers import TaggableManager
from taggit.models import ItemBase, TagBase, TaggedItemBase
from wagtail.admin.edit_handlers import (
FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel,
TabbedInterface)
from wagtail.admin.forms import WagtailAdminPageForm
from wagtail.admin.mail import send_mail
from wagtail.contrib.forms.forms import FormBuilder
from wagtail.contrib.forms.models import (
FORM_FIELD_CHOICES, AbstractEmailForm, AbstractFormField, AbstractFormSubmission)
from wagtail.contrib.forms.views import SubmissionsListView
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.contrib.sitemaps import Sitemap
from wagtail.contrib.table_block.blocks import TableBlock
from wagtail.core.blocks import CharBlock, RawHTMLBlock, RichTextBlock, StructBlock
from wagtail.core.fields import RichTextField, StreamField
from wagtail.core.models import Orderable, Page, PageManager, PageQuerySet, Task
from wagtail.documents.edit_handlers import DocumentChooserPanel
from wagtail.documents.models import AbstractDocument, Document
from wagtail.images.blocks import ImageChooserBlock
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.images.models import AbstractImage, AbstractRendition, Image
from wagtail.search import index
from wagtail.snippets.edit_handlers import SnippetChooserPanel
from wagtail.snippets.models import register_snippet
from wagtail.utils.decorators import cached_classmethod
from .forms import FormClassAdditionalFieldPageForm, ValidatedPageForm
EVENT_AUDIENCE_CHOICES = (
('public', "Public"),
('private', "Private"),
)
COMMON_PANELS = (
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
)
# Link fields
class LinkFields(models.Model):
link_external = models.URLField("External link", blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
link_document = models.ForeignKey(
'wagtaildocs.Document',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
@property
def link(self):
if self.link_page:
return self.link_page.url
elif self.link_document:
return self.link_document.url
else:
return self.link_external
panels = [
FieldPanel('link_external'),
PageChooserPanel('link_page'),
DocumentChooserPanel('link_document'),
]
class Meta:
abstract = True
# Carousel items
class CarouselItem(LinkFields):
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
embed_url = models.URLField("Embed URL", blank=True)
caption = models.CharField(max_length=255, blank=True)
panels = [
ImageChooserPanel('image'),
FieldPanel('embed_url'),
FieldPanel('caption'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Related links
class RelatedLink(LinkFields):
title = models.CharField(max_length=255, help_text="Link title")
panels = [
FieldPanel('title'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Simple page
class SimplePage(Page):
content = models.TextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('content'),
]
def get_admin_display_title(self):
return "%s (simple page)" % super().get_admin_display_title()
# Page with Excluded Fields when copied
class PageWithExcludedCopyField(Page):
content = models.TextField()
# Exclude this field from being copied
special_field = models.CharField(
blank=True, max_length=255, default='Very Special')
exclude_fields_in_copy = ['special_field']
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('special_field'),
FieldPanel('content'),
]
class PageWithOldStyleRouteMethod(Page):
"""
Prior to Wagtail 0.4, the route() method on Page returned an HttpResponse
rather than a Page instance. As subclasses of Page may override route,
we need to continue accepting this convention (albeit as a deprecated API).
"""
content = models.TextField()
template = 'tests/simple_page.html'
def route(self, request, path_components):
return self.serve(request)
# File page
class FilePage(Page):
file_field = models.FileField()
FilePage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('file_field'),
]
# Event page
class EventPageCarouselItem(Orderable, CarouselItem):
page = ParentalKey('tests.EventPage', related_name='carousel_items', on_delete=models.CASCADE)
class EventPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('tests.EventPage', related_name='related_links', on_delete=models.CASCADE)
class EventPageSpeakerAward(Orderable, models.Model):
speaker = ParentalKey('tests.EventPageSpeaker', related_name='awards', on_delete=models.CASCADE)
name = models.CharField("Award name", max_length=255)
date_awarded = models.DateField(null=True, blank=True)
panels = [
FieldPanel('name'),
FieldPanel('date_awarded'),
]
class EventPageSpeaker(Orderable, LinkFields, ClusterableModel):
page = ParentalKey('tests.EventPage', related_name='speakers', related_query_name='speaker', on_delete=models.CASCADE)
first_name = models.CharField("Name", max_length=255, blank=True)
last_name = models.CharField("Surname", max_length=255, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def name_display(self):
return self.first_name + " " + self.last_name
panels = [
FieldPanel('first_name'),
FieldPanel('last_name'),
ImageChooserPanel('image'),
MultiFieldPanel(LinkFields.panels, "Link"),
InlinePanel('awards', label="Awards"),
]
class EventCategory(models.Model):
name = models.CharField("Name", max_length=255)
def __str__(self):
return self.name
# Override the standard WagtailAdminPageForm to add validation on start/end dates
# that appears as a non-field error
class EventPageForm(WagtailAdminPageForm):
def clean(self):
cleaned_data = super().clean()
# Make sure that the event starts before it ends
start_date = cleaned_data['date_from']
end_date = cleaned_data['date_to']
if start_date and end_date and start_date > end_date:
raise ValidationError('The end date must be after the start date')
return cleaned_data
class EventPage(Page):
date_from = models.DateField("Start date", null=True)
date_to = models.DateField(
"End date",
null=True,
blank=True,
help_text="Not required if event is on a single day"
)
time_from = models.TimeField("Start time", null=True, blank=True)
time_to = models.TimeField("End time", null=True, blank=True)
audience = models.CharField(max_length=255, choices=EVENT_AUDIENCE_CHOICES)
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
cost = models.CharField(max_length=255)
signup_link = models.URLField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
categories = ParentalManyToManyField(EventCategory, blank=True)
search_fields = [
index.SearchField('get_audience_display'),
index.SearchField('location'),
index.SearchField('body'),
index.FilterField('url_path'),
]
password_required_template = 'tests/event_page_password_required.html'
base_form_class = EventPageForm
EventPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('date_from'),
FieldPanel('date_to'),
FieldPanel('time_from'),
FieldPanel('time_to'),
FieldPanel('location'),
FieldPanel('audience'),
FieldPanel('cost'),
FieldPanel('signup_link'),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('speakers', label="Speakers", heading="Speaker lineup"),
InlinePanel('related_links', label="Related links"),
FieldPanel('categories'),
# InlinePanel related model uses `pk` not `id`
InlinePanel('head_counts', label='Head Counts'),
]
EventPage.promote_panels = [
MultiFieldPanel(COMMON_PANELS, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
class HeadCountRelatedModelUsingPK(models.Model):
"""Related model that uses a custom primary key (pk) not id"""
custom_id = models.AutoField(primary_key=True)
event_page = ParentalKey(
EventPage,
on_delete=models.CASCADE,
related_name='head_counts'
)
head_count = models.IntegerField()
panels = [FieldPanel('head_count')]
# Override the standard WagtailAdminPageForm to add field that is not in model
# so that we can test additional potential issues like comparing versions
class FormClassAdditionalFieldPage(Page):
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('location'),
FieldPanel('body'),
FieldPanel('code'), # not in model, see set base_form_class
]
base_form_class = FormClassAdditionalFieldPageForm
# Just to be able to test multi table inheritance
class SingleEventPage(EventPage):
excerpt = models.TextField(
max_length=255,
blank=True,
null=True,
help_text="Short text to describe what is this action about"
)
# Give this page model a custom URL routing scheme
def get_url_parts(self, request=None):
url_parts = super().get_url_parts(request=request)
if url_parts is None:
return None
else:
site_id, root_url, page_path = url_parts
return (site_id, root_url, page_path + 'pointless-suffix/')
def route(self, request, path_components):
if path_components == ['pointless-suffix']:
# treat this as equivalent to a request for this page
return super().route(request, [])
else:
# fall back to default routing rules
return super().route(request, path_components)
def get_admin_display_title(self):
return "%s (single event)" % super().get_admin_display_title()
SingleEventPage.content_panels = [FieldPanel('excerpt')] + EventPage.content_panels
# "custom" sitemap object
class EventSitemap(Sitemap):
pass
# Event index (has a separate AJAX template, and a custom template context)
class EventIndex(Page):
intro = RichTextField(blank=True)
ajax_template = 'tests/includes/event_listing.html'
def get_events(self):
return self.get_children().live().type(EventPage)
def get_paginator(self):
return Paginator(self.get_events(), 4)
def get_context(self, request, page=1):
# Pagination
paginator = self.get_paginator()
try:
events = paginator.page(page)
except PageNotAnInteger:
events = paginator.page(1)
except EmptyPage:
events = paginator.page(paginator.num_pages)
# Update context
context = super().get_context(request)
context['events'] = events
return context
def route(self, request, path_components):
if self.live and len(path_components) == 1:
try:
return self.serve(request, page=int(path_components[0]))
except (TypeError, ValueError):
pass
return super().route(request, path_components)
def get_static_site_paths(self):
# Get page count
page_count = self.get_paginator().num_pages
# Yield a path for each page
for page in range(page_count):
yield '/%d/' % (page + 1)
# Yield from superclass
for path in super().get_static_site_paths():
yield path
def get_sitemap_urls(self, request=None):
# Add past events url to sitemap
return super().get_sitemap_urls(request=request) + [
{
'location': self.full_url + 'past/',
'lastmod': self.latest_revision_created_at
}
]
def get_cached_paths(self):
return super().get_cached_paths() + [
'/past/'
]
EventIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
]
class FormField(AbstractFormField):
page = ParentalKey('FormPage', related_name='form_fields', on_delete=models.CASCADE)
class FormPage(AbstractEmailForm):
def get_context(self, request):
context = super().get_context(request)
context['greeting'] = "hello world"
return context
# This is redundant (SubmissionsListView is the default view class), but importing
# SubmissionsListView in this models.py helps us to confirm that this recipe
# https://docs.wagtail.io/en/stable/reference/contrib/forms/customisation.html#customise-form-submissions-listing-in-wagtail-admin
# works without triggering circular dependency issues -
# see https://github.com/wagtail/wagtail/issues/6265
submissions_list_view_class = SubmissionsListView
FormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# FormPage with a non-HTML extension
class JadeFormField(AbstractFormField):
page = ParentalKey('JadeFormPage', related_name='form_fields', on_delete=models.CASCADE)
class JadeFormPage(AbstractEmailForm):
template = "tests/form_page.jade"
JadeFormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# Form page that redirects to a different page
class RedirectFormField(AbstractFormField):
page = ParentalKey('FormPageWithRedirect', related_name='form_fields', on_delete=models.CASCADE)
class FormPageWithRedirect(AbstractEmailForm):
thank_you_redirect_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
)
def get_context(self, request):
context = super(FormPageWithRedirect, self).get_context(request)
context['greeting'] = "hello world"
return context
def render_landing_page(self, request, form_submission=None, *args, **kwargs):
"""
Renders the landing page OR if a receipt_page_redirect is chosen redirects to this page.
"""
if self.thank_you_redirect_page:
return redirect(self.thank_you_redirect_page.url, permanent=False)
return super(FormPageWithRedirect, self).render_landing_page(request, form_submission, *args, **kwargs)
FormPageWithRedirect.content_panels = [
FieldPanel('title', classname="full title"),
PageChooserPanel('thank_you_redirect_page'),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# FormPage with a custom FormSubmission
class FormPageWithCustomSubmission(AbstractEmailForm):
"""
This Form page:
* Have custom submission model
* Have custom related_name (see `FormFieldWithCustomSubmission.page`)
* Saves reference to a user
* Doesn't render html form, if submission for current user is present
"""
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
def get_context(self, request, *args, **kwargs):
context = super().get_context(request)
context['greeting'] = "hello world"
return context
def get_form_fields(self):
return self.custom_form_fields.all()
def get_data_fields(self):
data_fields = [
('useremail', 'User email'),
]
data_fields += super().get_data_fields()
return data_fields
def get_submission_class(self):
return CustomFormPageSubmission
def process_form_submission(self, form):
form_submission = self.get_submission_class().objects.create(
form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder),
page=self, user=form.user
)
if self.to_address:
addresses = [x.strip() for x in self.to_address.split(',')]
content = '\n'.join([x[1].label + ': ' + str(form.data.get(x[0])) for x in form.fields.items()])
send_mail(self.subject, content, addresses, self.from_address,)
# process_form_submission should now return the created form_submission
return form_submission
def serve(self, request, *args, **kwargs):
if self.get_submission_class().objects.filter(page=self, user__pk=request.user.pk).exists():
return TemplateResponse(
request,
self.template,
self.get_context(request)
)<|fim▁hole|> return super().serve(request, *args, **kwargs)
FormPageWithCustomSubmission.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
InlinePanel('custom_form_fields', label="Form fields"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class FormFieldWithCustomSubmission(AbstractFormField):
page = ParentalKey(FormPageWithCustomSubmission, on_delete=models.CASCADE, related_name='custom_form_fields')
class CustomFormPageSubmission(AbstractFormSubmission):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def get_data(self):
form_data = super().get_data()
form_data.update({
'useremail': self.user.email,
})
return form_data
# Custom form page with custom submission listing view and form submission
class FormFieldForCustomListViewPage(AbstractFormField):
page = ParentalKey(
'FormPageWithCustomSubmissionListView',
related_name='form_fields',
on_delete=models.CASCADE
)
class FormPageWithCustomSubmissionListView(AbstractEmailForm):
"""Form Page with customised submissions listing view"""
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
def get_submissions_list_view_class(self):
from .views import CustomSubmissionsListView
return CustomSubmissionsListView
def get_submission_class(self):
return CustomFormPageSubmission
def get_data_fields(self):
data_fields = [
('useremail', 'User email'),
]
data_fields += super().get_data_fields()
return data_fields
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
InlinePanel('form_fields', label="Form fields"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# FormPage with cutom FormBuilder
EXTENDED_CHOICES = FORM_FIELD_CHOICES + (('ipaddress', 'IP Address'),)
class ExtendedFormField(AbstractFormField):
"""Override the field_type field with extended choices."""
page = ParentalKey(
'FormPageWithCustomFormBuilder',
related_name='form_fields',
on_delete=models.CASCADE)
field_type = models.CharField(
verbose_name='field type', max_length=16, choices=EXTENDED_CHOICES)
class CustomFormBuilder(FormBuilder):
"""
A custom FormBuilder that has an 'ipaddress' field with
customised create_singleline_field with shorter max_length
"""
def create_singleline_field(self, field, options):
options['max_length'] = 120 # usual default is 255
return forms.CharField(**options)
def create_ipaddress_field(self, field, options):
return forms.GenericIPAddressField(**options)
class FormPageWithCustomFormBuilder(AbstractEmailForm):
"""
A Form page that has a custom form builder and uses a custom
form field model with additional field_type choices.
"""
form_builder = CustomFormBuilder
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# Snippets
class AdvertPlacement(models.Model):
page = ParentalKey('wagtailcore.Page', related_name='advert_placements', on_delete=models.CASCADE)
advert = models.ForeignKey('tests.Advert', related_name='+', on_delete=models.CASCADE)
colour = models.CharField(max_length=255)
class AdvertTag(TaggedItemBase):
content_object = ParentalKey('Advert', related_name='tagged_items', on_delete=models.CASCADE)
class Advert(ClusterableModel):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
tags = TaggableManager(through=AdvertTag, blank=True)
panels = [
FieldPanel('url'),
FieldPanel('text'),
FieldPanel('tags'),
]
def __str__(self):
return self.text
register_snippet(Advert)
class AdvertWithCustomPrimaryKey(ClusterableModel):
advert_id = models.CharField(max_length=255, primary_key=True)
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
panels = [
FieldPanel('url'),
FieldPanel('text'),
]
def __str__(self):
return self.text
register_snippet(AdvertWithCustomPrimaryKey)
class AdvertWithCustomUUIDPrimaryKey(ClusterableModel):
advert_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
panels = [
FieldPanel('url'),
FieldPanel('text'),
]
def __str__(self):
return self.text
register_snippet(AdvertWithCustomUUIDPrimaryKey)
class AdvertWithTabbedInterface(models.Model):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
something_else = models.CharField(max_length=255)
advert_panels = [
FieldPanel('url'),
FieldPanel('text'),
]
other_panels = [
FieldPanel('something_else'),
]
edit_handler = TabbedInterface([
ObjectList(advert_panels, heading='Advert'),
ObjectList(other_panels, heading='Other'),
])
def __str__(self):
return self.text
class Meta:
ordering = ('text',)
register_snippet(AdvertWithTabbedInterface)
class StandardIndex(Page):
""" Index for the site """
parent_page_types = [Page]
# A custom panel setup where all Promote fields are placed in the Content tab instead;
# we use this to test that the 'promote' tab is left out of the output when empty
StandardIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('seo_title'),
FieldPanel('slug'),
InlinePanel('advert_placements', label="Adverts"),
]
StandardIndex.promote_panels = []
class StandardChild(Page):
pass
# Test overriding edit_handler with a custom one
StandardChild.edit_handler = TabbedInterface([
ObjectList(StandardChild.content_panels, heading='Content'),
ObjectList(StandardChild.promote_panels, heading='Promote'),
ObjectList(StandardChild.settings_panels, heading='Settings', classname='settings'),
ObjectList([], heading='Dinosaurs'),
], base_form_class=WagtailAdminPageForm)
class BusinessIndex(Page):
""" Can be placed anywhere, can only have Business children """
subpage_types = ['tests.BusinessChild', 'tests.BusinessSubIndex']
class BusinessSubIndex(Page):
""" Can be placed under BusinessIndex, and have BusinessChild children """
# BusinessNowherePage is 'incorrectly' added here as a possible child.
# The rules on BusinessNowherePage prevent it from being a child here though.
subpage_types = ['tests.BusinessChild', 'tests.BusinessNowherePage']
parent_page_types = ['tests.BusinessIndex', 'tests.BusinessChild']
class BusinessChild(Page):
""" Can only be placed under Business indexes, no children allowed """
subpage_types = []
parent_page_types = ['tests.BusinessIndex', BusinessSubIndex]
class BusinessNowherePage(Page):
""" Not allowed to be placed anywhere """
parent_page_types = []
class TaggedPageTag(TaggedItemBase):
content_object = ParentalKey('tests.TaggedPage', related_name='tagged_items', on_delete=models.CASCADE)
class TaggedPage(Page):
tags = ClusterTaggableManager(through=TaggedPageTag, blank=True)
TaggedPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('tags'),
]
class SingletonPage(Page):
@classmethod
def can_create_at(cls, parent):
# You can only create one of these!
return super(SingletonPage, cls).can_create_at(parent) \
and not cls.objects.exists()
class SingletonPageViaMaxCount(Page):
max_count = 1
class PageChooserModel(models.Model):
page = models.ForeignKey('wagtailcore.Page', help_text='help text', on_delete=models.CASCADE)
class EventPageChooserModel(models.Model):
page = models.ForeignKey('tests.EventPage', help_text='more help text', on_delete=models.CASCADE)
class SnippetChooserModel(models.Model):
advert = models.ForeignKey(Advert, help_text='help text', on_delete=models.CASCADE)
panels = [
SnippetChooserPanel('advert'),
]
class SnippetChooserModelWithCustomPrimaryKey(models.Model):
advertwithcustomprimarykey = models.ForeignKey(AdvertWithCustomPrimaryKey, help_text='help text', on_delete=models.CASCADE)
panels = [
SnippetChooserPanel('advertwithcustomprimarykey'),
]
class CustomImage(AbstractImage):
caption = models.CharField(max_length=255, blank=True)
fancy_caption = RichTextField(blank=True)
not_editable_field = models.CharField(max_length=255, blank=True)
admin_form_fields = Image.admin_form_fields + (
'caption',
'fancy_caption',
)
class Meta:
unique_together = [
('title', 'collection')
]
class CustomRendition(AbstractRendition):
image = models.ForeignKey(CustomImage, related_name='renditions', on_delete=models.CASCADE)
class Meta:
unique_together = (
('image', 'filter_spec', 'focal_point_key'),
)
# Custom image model with a required field
class CustomImageWithAuthor(AbstractImage):
author = models.CharField(max_length=255)
admin_form_fields = Image.admin_form_fields + (
'author',
)
class CustomRenditionWithAuthor(AbstractRendition):
image = models.ForeignKey(CustomImageWithAuthor, related_name='renditions', on_delete=models.CASCADE)
class Meta:
unique_together = (
('image', 'filter_spec', 'focal_point_key'),
)
class CustomDocument(AbstractDocument):
description = models.TextField(blank=True)
fancy_description = RichTextField(blank=True)
admin_form_fields = Document.admin_form_fields + (
'description',
'fancy_description'
)
class Meta:
unique_together = [
('title', 'collection')
]
class StreamModel(models.Model):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
class ExtendedImageChooserBlock(ImageChooserBlock):
"""
Example of Block with custom get_api_representation method.
If the request has an 'extended' query param, it returns a dict of id and title,
otherwise, it returns the default value.
"""
def get_api_representation(self, value, context=None):
image_id = super().get_api_representation(value, context=context)
if 'request' in context and context['request'].query_params.get('extended', False):
return {
'id': image_id,
'title': value.title
}
return image_id
class StreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ExtendedImageChooserBlock()),
('product', StructBlock([
('name', CharBlock()),
('price', CharBlock()),
])),
('raw_html', RawHTMLBlock()),
])
api_fields = ('body',)
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
preview_modes = []
class DefaultStreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
], default='')
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
class MTIBasePage(Page):
is_creatable = False
class Meta:
verbose_name = "MTI Base page"
class MTIChildPage(MTIBasePage):
# Should be creatable by default, no need to set anything
pass
class AbstractPage(Page):
class Meta:
abstract = True
@register_setting
class TestSetting(BaseSetting):
title = models.CharField(max_length=100)
email = models.EmailField(max_length=50)
@register_setting
class ImportantPages(BaseSetting):
sign_up_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
general_terms_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
privacy_policy_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
@register_setting(icon="tag")
class IconSetting(BaseSetting):
pass
class NotYetRegisteredSetting(BaseSetting):
pass
@register_setting
class FileUploadSetting(BaseSetting):
file = models.FileField()
class BlogCategory(models.Model):
name = models.CharField(unique=True, max_length=80)
class BlogCategoryBlogPage(models.Model):
category = models.ForeignKey(BlogCategory, related_name="+", on_delete=models.CASCADE)
page = ParentalKey('ManyToManyBlogPage', related_name='categories', on_delete=models.CASCADE)
panels = [
FieldPanel('category'),
]
class ManyToManyBlogPage(Page):
"""
A page type with two different kinds of M2M relation.
We don't formally support these, but we don't want them to cause
hard breakages either.
"""
body = RichTextField(blank=True)
adverts = models.ManyToManyField(Advert, blank=True)
blog_categories = models.ManyToManyField(
BlogCategory, through=BlogCategoryBlogPage, blank=True)
# make first_published_at editable on this page model
settings_panels = Page.settings_panels + [
FieldPanel('first_published_at'),
]
class OneToOnePage(Page):
"""
A Page containing a O2O relation.
"""
body = RichTextBlock(blank=True)
page_ptr = models.OneToOneField(Page, parent_link=True,
related_name='+', on_delete=models.CASCADE)
class GenericSnippetPage(Page):
"""
A page containing a reference to an arbitrary snippet (or any model for that matter)
linked by a GenericForeignKey
"""
snippet_content_type = models.ForeignKey(ContentType, on_delete=models.SET_NULL, null=True)
snippet_object_id = models.PositiveIntegerField(null=True)
snippet_content_object = GenericForeignKey('snippet_content_type', 'snippet_object_id')
class CustomImageFilePath(AbstractImage):
def get_upload_to(self, filename):
"""Create a path that's file-system friendly.
By hashing the file's contents we guarantee an equal distribution
of files within our root directories. This also gives us a
better chance of uploading images with the same filename, but
different contents - this isn't guaranteed as we're only using
the first three characters of the checksum.
"""
original_filepath = super().get_upload_to(filename)
folder_name, filename = original_filepath.split(os.path.sep)
# Ensure that we consume the entire file, we can't guarantee that
# the stream has not be partially (or entirely) consumed by
# another process
original_position = self.file.tell()
self.file.seek(0)
hash256 = hashlib.sha256()
while True:
data = self.file.read(256)
if not data:
break
hash256.update(data)
checksum = hash256.hexdigest()
self.file.seek(original_position)
return os.path.join(folder_name, checksum[:3], filename)
class CustomPageQuerySet(PageQuerySet):
def about_spam(self):
return self.filter(title__contains='spam')
CustomManager = PageManager.from_queryset(CustomPageQuerySet)
class CustomManagerPage(Page):
objects = CustomManager()
class MyBasePage(Page):
"""
A base Page model, used to set site-wide defaults and overrides.
"""
objects = CustomManager()
class Meta:
abstract = True
class MyCustomPage(MyBasePage):
pass
class ValidatedPage(Page):
foo = models.CharField(max_length=255)
base_form_class = ValidatedPageForm
content_panels = Page.content_panels + [
FieldPanel('foo'),
]
class DefaultRichTextFieldPage(Page):
body = RichTextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class DefaultRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock()),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body')
]
class CustomRichTextFieldPage(Page):
body = RichTextField(editor='custom')
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class CustomRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock(editor='custom')),
])
content_panels = [
FieldPanel('title', classname="full title"),
StreamFieldPanel('body'),
]
class RichTextFieldWithFeaturesPage(Page):
body = RichTextField(features=['quotation', 'embed', 'made-up-feature'])
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
# a page that only contains RichTextField within an InlinePanel,
# to test that the inline child's form media gets pulled through
class SectionedRichTextPageSection(Orderable):
page = ParentalKey('tests.SectionedRichTextPage', related_name='sections', on_delete=models.CASCADE)
body = RichTextField()
panels = [
FieldPanel('body')
]
class SectionedRichTextPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class InlineStreamPageSection(Orderable):
page = ParentalKey('tests.InlineStreamPage', related_name='sections', on_delete=models.CASCADE)
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
panels = [
StreamFieldPanel('body')
]
class InlineStreamPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class TableBlockStreamPage(Page):
table = StreamField([('table', TableBlock())])
content_panels = [StreamFieldPanel('table')]
class UserProfile(models.Model):
# Wagtail's schema must be able to coexist alongside a custom UserProfile model
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
favourite_colour = models.CharField(max_length=255)
class PanelSettings(TestSetting):
panels = [
FieldPanel('title')
]
class TabbedSettings(TestSetting):
edit_handler = TabbedInterface([
ObjectList([
FieldPanel('title')
], heading='First tab'),
ObjectList([
FieldPanel('email')
], heading='Second tab'),
])
class AlwaysShowInMenusPage(Page):
show_in_menus_default = True
# test for AddField migrations on StreamFields using various default values
class AddedStreamFieldWithoutDefaultPage(Page):
body = StreamField([
('title', CharBlock())
])
class AddedStreamFieldWithEmptyStringDefaultPage(Page):
body = StreamField([
('title', CharBlock())
], default='')
class AddedStreamFieldWithEmptyListDefaultPage(Page):
body = StreamField([
('title', CharBlock())
], default=[])
# test customising edit handler definitions on a per-request basis
class PerUserContentPanels(ObjectList):
def _replace_children_with_per_user_config(self):
self.children = self.instance.basic_content_panels
if self.request.user.is_superuser:
self.children = self.instance.superuser_content_panels
self.children = [
child.bind_to(model=self.model, instance=self.instance,
request=self.request, form=self.form)
for child in self.children]
def on_instance_bound(self):
# replace list of children when both instance and request are available
if self.request:
self._replace_children_with_per_user_config()
else:
super().on_instance_bound()
def on_request_bound(self):
# replace list of children when both instance and request are available
if self.instance:
self._replace_children_with_per_user_config()
else:
super().on_request_bound()
class PerUserPageMixin:
basic_content_panels = []
superuser_content_panels = []
@cached_classmethod
def get_edit_handler(cls):
tabs = []
if cls.basic_content_panels and cls.superuser_content_panels:
tabs.append(PerUserContentPanels(heading='Content'))
if cls.promote_panels:
tabs.append(ObjectList(cls.promote_panels,
heading='Promote'))
if cls.settings_panels:
tabs.append(ObjectList(cls.settings_panels,
heading='Settings',
classname='settings'))
edit_handler = TabbedInterface(tabs,
base_form_class=cls.base_form_class)
return edit_handler.bind_to(model=cls)
class SecretPage(PerUserPageMixin, Page):
boring_data = models.TextField()
secret_data = models.TextField()
basic_content_panels = Page.content_panels + [
FieldPanel('boring_data'),
]
superuser_content_panels = basic_content_panels + [
FieldPanel('secret_data'),
]
class SimpleParentPage(Page):
# `BusinessIndex` has been added to bring it in line with other tests
subpage_types = ['tests.SimpleChildPage', BusinessIndex]
class SimpleChildPage(Page):
# `Page` has been added to bring it in line with other tests
parent_page_types = ['tests.SimpleParentPage', Page]
max_count_per_parent = 1
class PersonPage(Page):
first_name = models.CharField(
max_length=255,
verbose_name='First Name',
)
last_name = models.CharField(
max_length=255,
verbose_name='Last Name',
)
content_panels = Page.content_panels + [
MultiFieldPanel([
FieldPanel('first_name'),
FieldPanel('last_name'),
], 'Person'),
InlinePanel('addresses', label='Address'),
]
class Meta:
verbose_name = 'Person'
verbose_name_plural = 'Persons'
class Address(index.Indexed, ClusterableModel, Orderable):
address = models.CharField(
max_length=255,
verbose_name='Address',
)
tags = ClusterTaggableManager(
through='tests.AddressTag',
blank=True,
)
person = ParentalKey(
to='tests.PersonPage',
related_name='addresses',
verbose_name='Person'
)
panels = [
FieldPanel('address'),
FieldPanel('tags'),
]
class Meta:
verbose_name = 'Address'
verbose_name_plural = 'Addresses'
class AddressTag(TaggedItemBase):
content_object = ParentalKey(
to='tests.Address',
on_delete=models.CASCADE,
related_name='tagged_items'
)
class RestaurantPage(Page):
tags = ClusterTaggableManager(through='tests.TaggedRestaurant', blank=True)
content_panels = Page.content_panels + [
FieldPanel('tags'),
]
class RestaurantTag(TagBase):
free_tagging = False
class Meta:
verbose_name = "Tag"
verbose_name_plural = "Tags"
class TaggedRestaurant(ItemBase):
tag = models.ForeignKey(
RestaurantTag, related_name="tagged_restaurants", on_delete=models.CASCADE
)
content_object = ParentalKey(
to='tests.RestaurantPage',
on_delete=models.CASCADE,
related_name='tagged_items'
)
class SimpleTask(Task):
pass<|fim▁end|> | |
<|file_name|>base_model.py<|end_file_name|><|fim▁begin|>import numpy as np
import torch
import os
import sys
import functools
import torch.nn as nn
from torch.autograd import Variable
from torch.nn import init
import torch.nn.functional as F<|fim▁hole|>class GANLoss(nn.Module):
def __init__(self, target_real_label=1.0, target_fake_label=0.0,
tensor=torch.FloatTensor):
super(GANLoss, self).__init__()
self.real_label = target_real_label
self.fake_label = target_fake_label
self.real_label_var = None
self.fake_label_var = None
self.Tensor = tensor
self.loss = nn.MSELoss()
def get_target_tensor(self, input, target_is_real):
if target_is_real:
create_label = ((self.real_label_var is None) or
(self.real_label_var.numel() != input.numel()))
if create_label:
real_tensor = self.Tensor(input.size()).fill_(self.real_label)
self.real_label_var = Variable(real_tensor, requires_grad=False)
target_tensor = self.real_label_var
else:
create_label = ((self.fake_label_var is None) or
(self.fake_label_var.numel() != input.numel()))
if create_label:
fake_tensor = self.Tensor(input.size()).fill_(self.fake_label)
self.fake_label_var = Variable(fake_tensor, requires_grad=False)
target_tensor = self.fake_label_var
return target_tensor
def __call__(self, input, target_is_real):
target_tensor = self.get_target_tensor(input, target_is_real)
return self.loss(input, target_tensor)
def U_weight_init(ms):
for m in ms.modules():
classname = m.__class__.__name__
if classname.find('Conv2d') != -1:
m.weight.data = init.kaiming_normal(m.weight.data, a=0.2)
elif classname.find('ConvTranspose2d') != -1:
m.weight.data = init.kaiming_normal(m.weight.data)
print ('worked!') # TODO: kill this
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
m.weight.data = init.kaiming_normal(m.weight.data)
def LR_weight_init(ms):
for m in ms.modules():
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data = init.kaiming_normal(m.weight.data, a=0.2)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
m.weight.data = init.kaiming_normal(m.weight.data, a=0.2)
def R_weight_init(ms):
for m in ms.modules():
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data = init.kaiming_normal(m.weight.data)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
m.weight.data = init.kaiming_normal(m.weight.data)
############################
# G network
###########################
# custom weights initialization called on netG
def get_norm_layer(norm_type='instance'):
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False)
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
def def_netG(ngf=64, norm='instance'):
norm_layer = get_norm_layer(norm_type=norm)
netG = UnetGenerator(ngf, norm_layer=norm_layer)
return netG
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class UnetGenerator(nn.Module):
def __init__(self, ngf, norm_layer):
super(UnetGenerator, self).__init__()
################ downS
self.down1 = nn.Conv2d(1, ngf // 2, kernel_size=4, stride=2, padding=1)
down = [nn.Conv2d(ngf // 2, ngf, kernel_size=4, stride=2, padding=1), norm_layer(ngf)]
self.down2 = nn.Sequential(*down)
down = [nn.Conv2d(ngf, ngf * 2, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 2)]
self.down3 = nn.Sequential(*down)
down = [nn.Conv2d(ngf * 2, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4)]
self.down4 = nn.Sequential(*down)
down = [nn.Conv2d(ngf * 4, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4)]
self.down5 = nn.Sequential(*down)
down = [nn.Conv2d(ngf * 4, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4)]
self.down6 = nn.Sequential(*down)
down = [nn.Conv2d(ngf * 4, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4)]
self.down7 = nn.Sequential(*down)
self.down8 = nn.Conv2d(ngf * 4, ngf * 8, kernel_size=4, stride=2, padding=1)
################ down--up
up = [nn.ConvTranspose2d(ngf * 8 + 2048, ngf * 8, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 8)]
self.up8 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 12, ngf * 8, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 8)]
self.up7 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 12, ngf * 8, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 8)]
self.up6 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 12, ngf * 8, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 8)]
self.up5 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 12, ngf * 4, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 4)]
self.up4 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 6, ngf * 2, kernel_size=4, stride=2, padding=1),
norm_layer(ngf * 2)]
self.up3 = nn.Sequential(*up)
up = [nn.ConvTranspose2d(ngf * 3, ngf, kernel_size=4, stride=2, padding=1), norm_layer(ngf)]
self.up2 = nn.Sequential(*up)
self.up1 = nn.ConvTranspose2d(int(ngf * 1.5), 3, kernel_size=4, stride=2, padding=1)
self.linear = nn.Linear(4096, 2048)
U_weight_init(self)
def forward(self, input, VGG):
x1 = F.leaky_relu(self.down1(input), 0.2, True)
x2 = F.leaky_relu(self.down2(x1), 0.2, True)
x3 = F.leaky_relu(self.down3(x2), 0.2, True)
x4 = F.leaky_relu(self.down4(x3), 0.2, True)
x5 = F.leaky_relu(self.down5(x4), 0.2, True)
x6 = F.leaky_relu(self.down6(x5), 0.2, True)
x7 = F.leaky_relu(self.down7(x6), 0.2, True)
x8 = F.relu(self.down8(x7), True)
VGG = F.relu(self.linear(VGG), True)
x = F.relu(self.up8(torch.cat([x8, VGG.view(-1, 2048, 1, 1)], 1)), True)
x = F.relu(self.up7(torch.cat([x, x7], 1)), True)
x = F.relu(self.up6(torch.cat([x, x6], 1)), True)
x = F.relu(self.up5(torch.cat([x, x5], 1)), True)
x = F.relu(self.up4(torch.cat([x, x4], 1)), True)
x = F.relu(self.up3(torch.cat([x, x3], 1)), True)
x = F.relu(self.up2(torch.cat([x, x2], 1)), True)
x = F.tanh(self.up1(torch.cat([x, x1], 1)))
return x
############################
# D network
###########################
def def_netD(ndf=64, norm='batch'):
norm_layer = get_norm_layer(norm_type=norm)
netD = NLayerDiscriminator(ndf, norm_layer=norm_layer)
return netD
class NLayerDiscriminator(nn.Module):
def __init__(self, ndf, norm_layer=nn.BatchNorm2d):
super(NLayerDiscriminator, self).__init__()
kw = 4
padw = 1
self.ndf = ndf
sequence = [
nn.Conv2d(4, ndf, kernel_size=kw, stride=2, padding=padw),
nn.LeakyReLU(0.2, True)
]
sequence += [
nn.Conv2d(ndf * 1, ndf * 2,
kernel_size=kw, stride=2, padding=padw),
norm_layer(ndf * 2),
nn.LeakyReLU(0.2, True)
]
sequence += [
nn.Conv2d(ndf * 2, ndf * 4,
kernel_size=kw, stride=2, padding=padw),
norm_layer(ndf * 4),
nn.LeakyReLU(0.2, True)
]
sequence += [
nn.Conv2d(ndf * 4, ndf * 8,
kernel_size=kw, stride=1, padding=padw), # stride 1
norm_layer(ndf * 8),
nn.LeakyReLU(0.2, True)
]
self.model = nn.Sequential(*sequence)
self.linear = nn.Linear(4096, ndf * 8)
sequence = [
nn.Conv2d(ndf * 8, ndf * 8, kernel_size=kw, stride=1, padding=padw),
norm_layer(ndf * 8),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf * 8, 1, kernel_size=kw, stride=1, padding=padw),
]
self.final = nn.Sequential(*sequence)
LR_weight_init(self)
def forward(self, input, VGG):
x = self.model(input)
VGG = F.leaky_relu(self.linear(VGG), 0.2, True)
return self.final(x + VGG.view(-1, self.ndf * 8, 1, 1))
############################
# VGG feature
###########################
def def_netF():
vgg19 = M.vgg19()
vgg19.load_state_dict(torch.load('vgg19.pth'))
vgg19.classifier = nn.Sequential(
*list(vgg19.classifier.children())[:2]
)
for param in vgg19.parameters():
param.requires_grad = False
return vgg19<|fim▁end|> | import torchvision.models as M
|
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>var webpackConfig = require("./webpack.config.js");
webpackConfig.devtool = "inline-source-map";
delete webpackConfig.externals;
delete webpackConfig.entry;
delete webpackConfig.output;
module.exports = function (config) {
config.set({
basePath: ".",
frameworks: ["es6-shim", "chai", "mocha", "sinon"],
files: ["tests.bundle.js"],
preprocessors: {
"tests.bundle.js": ["webpack", "sourcemap"],
},
reporters: ["dots", "coverage"],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ["Chrome"],
singleRun: false,<|fim▁hole|> concurrency: Infinity,
webpack: webpackConfig,
webpackMiddleware: {
noInfo: false,
},
coverageReporter: {
type: "lcov",
dir: "coverage/",
},
});
};<|fim▁end|> | |
<|file_name|>benchmark.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import numpy as np
import pycuda.driver as drv
from neon.backends.nervanagpu import NervanaGPU
from openai_gemm import matmul
ng = NervanaGPU()
print drv.Context.get_current().get_device().name()
config = (
# m, n, k, AT, BT (row order)
( 16, 1760, 1760, False, False),
( 32, 1760, 1760, False, False),
( 64, 1760, 1760, False, False),
( 128, 1760, 1760, False, False),
( 7000, 1760, 1760, False, False),
( 16, 2048, 2048, False, False),
( 32, 2048, 2048, False, False),
( 64, 2048, 2048, False, False),
( 128, 2048, 2048, False, False),
( 7000, 2048, 2048, False, False),
( 16, 2560, 2560, False, False),
( 32, 2560, 2560, False, False),
( 64, 2560, 2560, False, False),
( 128, 2560, 2560, False, False),
( 7000, 2560, 2560, False, False),
( 16, 4096, 4096, False, False),
( 32, 4096, 4096, False, False),
( 64, 4096, 4096, False, False),
( 128, 4096, 4096, False, False),
( 7000, 4096, 4096, False, False),
( 16, 1760, 1760, False, True),
( 32, 1760, 1760, False, True),
( 64, 1760, 1760, False, True),
( 128, 1760, 1760, False, True),
( 7000, 1760, 1760, False, True),
( 16, 2048, 2048, False, True),
( 32, 2048, 2048, False, True),
( 64, 2048, 2048, False, True),
( 128, 2048, 2048, False, True),
( 7000, 2048, 2048, False, True),
( 16, 2560, 2560, False, True),
( 32, 2560, 2560, False, True),
( 64, 2560, 2560, False, True),
( 128, 2560, 2560, False, True),
( 7000, 2560, 2560, False, True),
( 16, 4096, 4096, False, True),
( 32, 4096, 4096, False, True),
( 64, 4096, 4096, False, True),
( 128, 4096, 4096, False, True),
( 7000, 4096, 4096, False, True),
( 7133, 1760, 1760, True , False),
( 7133, 2048, 2048, True , False),
( 7133, 2560, 2560, True , False),
( 7133, 4096, 4096, True , False),
( 9124, 5124, 1760, False, False),
( 9124, 5124, 2048, False, False),
( 9124, 5124, 2560, False, False),
( 9124, 5124, 4096, False, False),
( 9124, 5124, 1760, False, True),
( 9124, 5124, 2048, False, True),
( 9124, 5124, 2560, False, True),
( 9124, 5124, 4096, False, True),
( 8457, 35, 1760, False, False),
( 8457, 35, 2048, False, False),
( 8457, 35, 2560, False, False),
( 8457, 35, 4096, False, False),
( 8457, 35, 1760, False, True),
( 8457, 35, 2048, False, True),
( 8457, 35, 2560, False, True),
( 8457, 35, 4096, False, True),
( 16, 7680, 2560, False, False),
( 32, 7680, 2560, False, False),
( 64, 7680, 2560, False, False),
( 128, 7680, 2560, False, False),
( 16, 7680, 2560, False, True),
( 32, 7680, 2560, False, True),
( 64, 7680, 2560, False, True),<|fim▁hole|> ( 16, 3072, 1024, False, False),
( 32, 3072, 1024, False, False),
( 64, 3072, 1024, False, False),
( 128, 3072, 1024, False, False),
( 16, 3072, 1024, False, True),
( 32, 3072, 1024, False, True),
( 64, 3072, 1024, False, True),
( 128, 3072, 1024, False, True),
( 7435, 3072, 1024, True , False),
( 5481, 7680, 2560, True , False),
# (60000, 32, 32, True , False),
# (60000, 256, 256, True , False),
# ( 4096, 4096, 32, True , False),
# ( 3456, 3456, 32, True , False),
# ( 896, 896, 32, True , False),
)
print "| M| N| K| Op|OpenAI_32|cuBLAS_32|ratio_32|OpenAI_16|cuBLAS_16|ratio_16|"
print "|------|------|------|---|---------|---------|--------|---------|---------|--------|"
for m, n, k, at, bt in config:
dimA = (k,m) if at else (m,k)
dimB = (n,k) if bt else (k,n)
dimC = (m,n)
opA = 'T' if at else 'N'
opB = 'T' if bt else 'N'
op = opA + opB
dtype_data = list()
for dtype in ( np.float32, np.float16 ): #np.float32, np.float16,
A = ng.empty(dimA, dtype=dtype)
B = ng.empty(dimB, dtype=dtype)
C = ng.empty(dimC, dtype=dtype)
if at: A = A.T
if bt: B = B.T
data = matmul(A, B, C, bench=True)
# if dtype is np.float16:
# print ""
# for d in sorted(data):
# print "%7.3f %5.0f %22s %5d" % d
cublas = data.pop()
openai = sorted(data)[0]
text = "%9.0f|%9.0f|%8.1f" % (openai[1], cublas[1], openai[1] / cublas[1])
dtype_data.append(text)
print "|%6d|%6d|%6d|%3s|%s|" % (m, n, k, op, "|".join(dtype_data))<|fim▁end|> | ( 128, 7680, 2560, False, True), |
<|file_name|>ROLO_evaluation.py<|end_file_name|><|fim▁begin|># Copyright (c) <2016> <GUANGHAN NING>. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Script File: ROLO_evaluation.py
Description:
ROLO is short for Recurrent YOLO, aimed at simultaneous object detection and tracking
Paper: http://arxiv.org/abs/1607.05781
Author: Guanghan Ning
Webpage: http://guanghan.info/
'''
import numpy
print numpy.__path__
import cv2
import os
import numpy as np
import sys
import ROLO_utils as utils
import matplotlib.pyplot as plot
import pickle
import scipy.io
import re
import h5py
import matlab.engine
''' -----------------------------Deal with benchmark results: matlab format-------------------------- '''
def choose_benchmark_method(id):
if id == 0:
method = 'STRUCK'
elif id == 1:
method = 'CXT'
elif id == 2:
method = 'TLD'
elif id == 3:
method = 'OAB'
elif id == 4:
method = 'CSK'
elif id == 5:
method = 'RS'
elif id == 6:
method = 'LSK'
elif id == 7:
method = 'VTD'
elif id == 8:
method = 'VTS'
elif id == 9:
method = 'CNN-SVM'
elif id == 10:
method = 'Staple'
return method
def choose_mat_file(method_id, sequence_id):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
method_name = choose_benchmark_method(method_id)
mat_file = sequence_name + '_' + method_name + '.mat'
return mat_file
def load_mat_results(mat_file, TRE, SRE, OPE, id):
if TRE is True:
fold = '/u03/Guanghan/dev/ROLO-dev/experiments/benchmark_results/pami15_TRE'
elif SRE is True:
fold = '/u03/Guanghan/dev/ROLO-dev/experiments/benchmark_results/pami15_SRE'
elif OPE is True:
fold = '/u03/Guanghan/dev/ROLO-dev/experiments/benchmark_results/pami15_TRE'
id = 0
mat_path = os.path.join(fold, mat_file)
CNN_SVM = False
if CNN_SVM is True:
eng = matlab.engine.start_matlab()
content = eng.load(mat_path,nargout=1)
mat_results= content['results'][0]['res']#[0]
numbers= [0, content['results'][0]['len']]
eng.exit()
else:
mat = scipy.io.loadmat(mat_path)
mat_results = mat['results'][0][id][0][0][5]
mat_range_str = mat['results'][0][id][0][0][2]
numbers= re.findall(r'\d+', str(mat_range_str))
return [mat_results, int(numbers[0]), int(numbers[1])]
def load_benchmark_results():
# 1. read mat file, output numpy file to: e.g., /u03/Guanghan/dev/ROLO-dev/benchmark/DATA/Car1/STRUCK/
# 2. convert to same format as yolo and rolo
# 3. evaluate AUC and avg_IOU score, for drawing the success plot
# 4. Compare with ROLO and YOLO's OPE (3 parts: TRE ,SRE, SRER)
return
def evaluate_benchmark_avg_IOU(method_id): # calculate AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1.0
avg_score= 0
method_name= choose_benchmark_method(method_id)
file_name= 'output/IOU/avgIOU_' + method_name + '.txt'
f= open(file_name, 'w')
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, False, False, True, 0)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
#
total= 0
total_score= 0
for id in range(0, ed_frame_num):
location= locations[id]
gt_location = utils.find_gt_location(lines, id)
score = utils.iou(location, gt_location)
total_score += score
total += 1.0
total_score /= total
[dummy, dummy, sequence_name, dummy, dummy]= utils.choose_video_sequence(sequence_id)
print(method_name, ',' ,sequence_name, ": avg_IOU = ", total_score)
f.write(method_name + ', ' + sequence_name + ": avg_IOU = " + str("{:.3f}".format(total_score)) + '\n')
avg_score += total_score
f.close()
avg_score /= num_evaluate
print('average score over all sequences:', avg_score)
def evaluate_benchmark_AUC_OPE(method_id): # calculate AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1.0
AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
print("thresh= ", thresh)
avg_score= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, False, False, True, 0)
#print(locations)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
#
total= 0
total_score= 0
for id in range(0, ed_frame_num):
location= locations[id]
gt_location = utils.find_gt_location(lines, id)
score = utils.cal_benchmark_score(location, gt_location, thresh)
total_score += score
total += 1.0
total_score /= total
avg_score += total_score
AUC_score.append(avg_score/num_evaluate)
print("(thresh, AUC_score) = ", thresh, ' ', avg_score/num_evaluate)
method_name= choose_benchmark_method(method_id)
file_name= 'output/AUC_score_' + method_name + '.pickle'
with open(file_name, 'w') as f:
pickle.dump(AUC_score, f)
def evaluate_benchmark_AUC_TRE(method_id): # calculate TRE of AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
TRE_num = 20
num_evaluate= evaluate_ed - evaluate_st + 1.0
AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
print("thresh= ", thresh)
avg_score= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
total_score_over_TREs= 0
for locations_id in range(0, TRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
ct_frames= 0
total_score_over_frames= 0
for id in range(st_frame_num-1, ed_frame_num):
id_offset= id - st_frame_num + 1
location= locations[id_offset] # id_offset, not id
gt_location = utils.find_gt_location(lines, id) #id, not id_offset
score = utils.cal_benchmark_score(location, gt_location, thresh)
total_score_over_frames += score
ct_frames += 1.0
total_score_over_frames /= ct_frames
total_score_over_TREs += total_score_over_frames
total_score_over_TREs /= (TRE_num * 1.0)
avg_score += total_score_over_TREs
AUC_score.append(avg_score/num_evaluate)
print("(thresh, AUC_score) = ", thresh, ' ', avg_score/num_evaluate)
method_name= choose_benchmark_method(method_id)
file_name= 'output/TRE_score_' + method_name + '.pickle'
with open(file_name, 'w') as f:
pickle.dump(AUC_score, f)
def evaluate_benchmark_avg_IOU_TRE(method_id): # calculate TRE of AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
TRE_num = 20
num_evaluate= evaluate_ed - evaluate_st + 1.0
score_over_sequences= 0
method_name= choose_benchmark_method(method_id)
file_name= 'output/IOU/TRE_avgIOU_' + method_name + '.txt'
f= open(file_name, 'w')
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
score_over_TREs= 0
for locations_id in range(0, TRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
ct_frames= 0
score_over_frames= 0
for id in range(st_frame_num-1, ed_frame_num):
id_offset= id - st_frame_num + 1
location= locations[id_offset] # id_offset, not id
gt_location = utils.find_gt_location(lines, id) #id, not id_offset
score = utils.iou(location, gt_location)
score_over_frames += score
ct_frames += 1.0
score_over_frames /= ct_frames
score_over_TREs += score_over_frames
score_over_TREs /= (TRE_num * 1.0)
score_over_sequences += score_over_TREs
avg_IOU_TRE_score= score_over_sequences/num_evaluate
print("avg_IOU_TRE_score = ", avg_IOU_TRE_score)
f.write(method_name + ', ' + sequence_name + ": TRE_avg_IOU = " + str("{:.3f}".format(avg_IOU_TRE_score)) + '\n')
f.close()
return avg_IOU_TRE_score
def evaluate_benchmark_AUC_SRE(method_id): # calculate TRE of AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
SRE_num = 12
num_evaluate= evaluate_ed - evaluate_st + 1.0
AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + + 0.0001
print("thresh= ", thresh)
avg_score_over_sequences = 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
total= 0
avg_score= 0
for locations_id in range(0, SRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, False, True, False, locations_id)
total += 1.0
ct = 0
total_score= 0
for id in range(st_frame_num-1, ed_frame_num):
id_offset= id - st_frame_num + 1
location= locations[id_offset] # id_offset, not id
gt_location = utils.find_gt_location(lines, id) #id, not id_offset
score = utils.cal_benchmark_score(location, gt_location, thresh)
total_score += score
ct += 1.0
total_score /= ct
avg_score += total_score
avg_score /= total
avg_score_over_sequences += avg_score
AUC_score.append(avg_score_over_sequences/num_evaluate)
print("(thresh, AUC_score) = ", thresh, ' ', avg_score_over_sequences/num_evaluate)
method_name= choose_benchmark_method(method_id)
file_name= 'output/SRE_score_' + method_name + '.pickle'
with open(file_name, 'w') as f:
pickle.dump(AUC_score, f)
def evaluate_benchmark_avg_IOU_SRE(method_id): # calculate TRE of AUC(Average Under Curve) of benchmark algorithms
''' PARAMETERS '''
evaluate_st = 0
evaluate_ed = 29
SRE_num = 12
num_evaluate= evaluate_ed - evaluate_st + 1.0
method_name= choose_benchmark_method(method_id)
file_name= 'output/IOU/SRE_avgIOU_' + method_name + '.txt'
f= open(file_name, 'w')
avg_score= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
# Load ground truth detection loc
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
lines = utils.load_dataset_gt(gt_file_path)
# Load benchmark detection loc
mat_file = choose_mat_file(method_id, sequence_id)
total= 0
total_score= 0
for locations_id in range(0, SRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, False, True, False, locations_id)
for id in range(st_frame_num-1, ed_frame_num):
id_offset= id - st_frame_num + 1
location= locations[id_offset] # id_offset, not id
gt_location = utils.find_gt_location(lines, id) #id, not id_offset
score = utils.iou(location, gt_location)
total_score += score
total += 1.0
total_score /= total
avg_score += total_score
avg_IOU_SRE_score= avg_score/num_evaluate
print("avg_IOU_score_SRE: ", avg_IOU_SRE_score)
f.write(method_name + ', ' + sequence_name + ": SRE_avg_IOU = " + str("{:.3f}".format(avg_IOU_SRE_score)) + '\n')
f.close()
return avg_IOU_SRE_score
''' -----------------------------Deal with ROLO results: python format-----------------------------'''
def draw_AUC_OPE():
num_methods = 9 + 1
with open('output/AUC_score.pickle') as f:
[yolo_AUC_score, rolo_AUC_score] = pickle.load(f)
yolo_AUC_score.append(0)
rolo_AUC_score.append(0)
yolo_AUC_score = np.asarray(yolo_AUC_score)
rolo_AUC_score = np.asarray(rolo_AUC_score)
with open('output/AUC_kalman_score.pickle') as f:
[yolo_kalman_AUC_score] = pickle.load(f)
yolo_kalman_AUC_score.append(0)
yolo_kalman_AUC_score = np.asarray(yolo_kalman_AUC_score)
benchmark_AUC_score = []
for method_id in range(0, num_methods):
method_name= choose_benchmark_method(method_id)
file_name= 'output/AUC_score_' + method_name + '.pickle'
with open(file_name) as f:
AUC_score = pickle.load(f)
AUC_score.append(0)
AUC_score = np.asarray(AUC_score)
benchmark_AUC_score.append(AUC_score)
x = [i/100.0 for i in range(0, 105, 5)]
print(len(x))
print(len(yolo_AUC_score))
print(x)
print(yolo_AUC_score)
print(rolo_AUC_score)
fig= plot.figure()
ax = fig.gca()
ax.set_xticks(np.arange(0, 1.1, 0.1))
ax.set_yticks(np.arange(0, 100, 10))
plot.title("Success Plot of OPE")
#plot.title("Success Plot of OPE30: AUC(Average Under Curve)")
plot.xlabel("overlap threshold")
plot.ylabel("success rate")
'''
plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO", linestyle='-', marker= "s", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, yolo_AUC_score*100, color = 'g', label = "YOLO", linestyle='--', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[0]*100, color = 'r', label = "STRUCK", linestyle='-', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[1]*100, color = 'r', label = "CXT", linestyle='--', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[2]*100, color = 'b', label = "TLD", linestyle='-', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[3]*100, color = 'b', label = "OAB", linestyle='--', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[4]*100, color = 'c', label = "CSK", linestyle='-', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[5]*100, color = 'c', label = "RS", linestyle='--', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[6]*100, color = 'm', label = "LSK", linestyle='-', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[7]*100, color = 'm', label = "VTD", linestyle='--', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
plot.plot(x, benchmark_AUC_score[8]*100, color = 'y', label = "VTS", linestyle='-', marker= "o", markersize= 5, linewidth= 1, markevery= 1)
'''
'test all 30'
# #plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO [0.564]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1) #exp all frames
# plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO [0.458]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1) #exp 1/3 frames
# #plot.plot(x, benchmark_AUC_score[9]*100, color = 'y', label = "CNN-SVM[0.520]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# #plot.plot(x, yolo_AUC_score*100, color = 'g', label = "YOLO [0.440]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[0]*100, color = 'r', label = "STRUCK [0.410]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[3]*100, color = 'b', label = "OAB [0.366]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[6]*100, color = 'm', label = "LSK [0.356]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[2]*100, color = 'b', label = "TLD [0.343]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
#
# plot.plot(x, yolo_kalman_AUC_score*100, color = 'k', label = "YOLO+SORT [0.341]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
#
# plot.plot(x, benchmark_AUC_score[1]*100, color = 'r', label = "CXT [0.333]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[5]*100, color = 'c', label = "RS [0.325]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[8]*100, color = 'y', label = "VTS [0.320]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[7]*100, color = 'm', label = "VTD [0.315]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
# plot.plot(x, benchmark_AUC_score[4]*100, color = 'c', label = "CSK [0.311]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
'''test last 8'''
plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO [0.476]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
#plot.plot(x, yolo_AUC_score*100, color = 'g', label = "YOLO [0.459]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[6]*100, color = 'm', label = "LSK [0.454]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[8]*100, color = 'y', label = "VTS [0.444]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[7]*100, color = 'm', label = "VTD [0.433]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[1]*100, color = 'r', label = "CXT [0.433]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[0]*100, color = 'r', label = "STRUCK [0.428]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, yolo_kalman_AUC_score*100, color = 'k', label = "YOLO+SORT [0.406]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[4]*100, color = 'c', label = "CSK [0.406]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[5]*100, color = 'c', label = "RS [0.392]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[3]*100, color = 'b', label = "OAB [0.366]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[2]*100, color = 'b', label = "TLD [0.318]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
#plot.plot(x, benchmark_AUC_score[9]*100, color = 'y', label = "VTS", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.axis([0, 1, 0, 100])
plot.grid()
plot.legend(loc = 1, prop={'size':10})
plot.show()
def draw_AUC_TRE():
with open('output/AUC_score_TRE.pickle') as f:
[yolo_AUC_score, rolo_AUC_score] = pickle.load(f)
yolo_AUC_score.append(0)
rolo_AUC_score.append(0)
yolo_AUC_score = np.asarray(yolo_AUC_score)
rolo_AUC_score = np.asarray(rolo_AUC_score)
with open('output/AUC_kalman_score_TRE.pickle') as f:
[yolo_kalman_AUC_score] = pickle.load(f)
yolo_kalman_AUC_score.append(0)
yolo_kalman_AUC_score = np.asarray(yolo_kalman_AUC_score)
benchmark_AUC_score = []
for method_id in range(0, 9):
method_name= choose_benchmark_method(method_id)
file_name= 'output/TRE_score_' + method_name + '.pickle'
with open(file_name) as f:
AUC_score = pickle.load(f)
AUC_score.append(0)
AUC_score = np.asarray(AUC_score)
benchmark_AUC_score.append(AUC_score)
x = [i/100.0 for i in range(0, 105, 5)]
print(len(x))
print(len(yolo_AUC_score))
print(x)
print(yolo_AUC_score)
print(rolo_AUC_score)
fig= plot.figure()
ax = fig.gca()
ax.set_xticks(np.arange(0, 1.1, 0.1))
ax.set_yticks(np.arange(0, 100, 10))
plot.title("Success Plot of TRE")
plot.xlabel("overlap threshold")
plot.ylabel("success rate")
'''test all 30'''
plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO [0.562]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[0]*100, color = 'r', label = "STRUCK [0.548]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[3]*100, color = 'b', label = "OAB [0.462]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[4]*100, color = 'c', label = "CSK [0.459]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[1]*100, color = 'r', label = "CXT [0.432]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
#plot.plot(x, yolo_AUC_score*100, color = 'g', label = "YOLO [0.429]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[6]*100, color = 'm', label = "LSK [0.427]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[5]*100, color = 'c', label = "RS [0.425]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[2]*100, color = 'b', label = "TLD [0.414]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[7]*100, color = 'm', label = "VTD [0.414]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[8]*100, color = 'y', label = "VTS [0.397]", linestyle= '-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, yolo_kalman_AUC_score*100, color = 'k', label = "YOLO+SORT [0.322]", linestyle= '--', markersize= 5, linewidth= 2, markevery= 1)
plot.axis([0, 1, 0, 100])
plot.grid()
plot.legend(loc = 1, prop={'size':10})
plot.show()
def draw_AUC_SRE():
with open('output/AUC_score.pickle') as f:
[yolo_AUC_score, rolo_AUC_score] = pickle.load(f)
yolo_AUC_score.append(0)
rolo_AUC_score.append(0)
yolo_AUC_score = np.asarray(yolo_AUC_score)
rolo_AUC_score = np.asarray(rolo_AUC_score)
with open('output/AUC_kalman_score.pickle') as f:
[yolo_kalman_AUC_score] = pickle.load(f)
yolo_kalman_AUC_score.append(0)
yolo_kalman_AUC_score = np.asarray(yolo_kalman_AUC_score)
benchmark_AUC_score = []
for method_id in range(0, 9):
method_name= choose_benchmark_method(method_id)
file_name= 'output/SRE_score_' + method_name + '.pickle'
with open(file_name) as f:
AUC_score = pickle.load(f)
AUC_score.append(0)
AUC_score = np.asarray(AUC_score)
benchmark_AUC_score.append(AUC_score)
x = [i/100.0 for i in range(0, 105, 5)]
print(len(x))
print(len(yolo_AUC_score))
print(x)
print(yolo_AUC_score)
print(rolo_AUC_score)
fig= plot.figure()
ax = fig.gca()
ax.set_xticks(np.arange(0, 1.1, 0.1))
ax.set_yticks(np.arange(0, 100, 10))
plot.title("Success Plot of SRE")
plot.xlabel("overlap threshold")
plot.ylabel("success rate")
plot.plot(x, rolo_AUC_score*100, color = 'g', label = "ROLO [0.564]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
#plot.plot(x, yolo_AUC_score*100, color = 'g', label = "YOLO [0.440]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[0]*100, color = 'r', label = "STRUCK [0.391]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, yolo_kalman_AUC_score*100, color = 'k', label = "YOLO+SORT [0.341]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[3]*100, color = 'b', label = "OAB [0.341]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[2]*100, color = 'b', label = "TLD [0.331]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[5]*100, color = 'c', label = "RS [0.320]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[6]*100, color = 'm', label = "LSK [0.302]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[1]*100, color = 'r', label = "CXT [0.295]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[4]*100, color = 'c', label = "CSK [0.295]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[7]*100, color = 'm', label = "VTD [0.286]", linestyle='--', markersize= 5, linewidth= 2, markevery= 1)
plot.plot(x, benchmark_AUC_score[8]*100, color = 'y', label = "VTS [0.284]", linestyle='-', markersize= 5, linewidth= 2, markevery= 1)
plot.axis([0, 1, 0, 100])
plot.grid()
plot.legend(loc = 1, prop={'size':10})
plot.show()
def draw_step_IOU_curve():
#x = [i for i in range(3, 11, 3)]
x= np.asarray([1, 3, 6, 9])
avg_IOU = np.asarray([0.359, 0.434, 0.458, 0.427])
fig= plot.figure()
ax = fig.gca()
ax.set_xticks(np.arange(1, 11, 1))
ax.set_yticks(np.arange(0.35, 0.47, 0.02))
plot.title("The average accuracy over the numbers of steps")
plot.xlabel("step")
plot.ylabel("Accuracy [IoU]")
plot.plot(x, avg_IOU, color = 'g', linestyle='-', marker= "s", markersize= 10, linewidth= 2, markevery= 1)
plot.axis([1, 10, 0.35, 0.47])
plot.grid()
plot.legend(loc = 1, prop={'size':10})
plot.show()
def draw_step_fps_curve():
avg_fps = np.asarray([271, 110, 61, 42])
x= np.asarray([1, 3, 6, 9])
#x = [i for i in range(3, 11, 3)]
print x
fig= plot.figure()
ax = fig.gca()
ax.set_xticks(np.arange(1, 11, 1))
ax.set_yticks(np.arange(0, 275, 30))
plot.title("Fps of the tracking module over the numbers of steps")
plot.xlabel("step")
plot.ylabel("Frames Per Second (fps)")
plot.plot(x, avg_fps, color = 'r', linestyle='-', marker= "^", markersize= 10, linewidth= 2, markevery= 1)
plot.axis([1, 10, 20, 275])
plot.grid()
plot.legend(loc = 1, prop={'size':10})
plot.show()
def evaluate_AUC_TRE(): # calculate AUC(Average Under Curve) TRE
''' PARAMETERS '''
num_steps= 3
TRE_num = 20
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
yolo_AUC_score= []
rolo_AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
#print("thresh= ", thresh)
rolo_avg_score= 0
yolo_avg_score= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_out/')
rolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'rolo_out_test/')
paths_imgs = utils.load_folder( img_fold_path)
paths_rolo= utils.load_folder( rolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
rolo_total_score_over_TREs= 0
yolo_total_score_over_TREs= 0
# Load benchmark detection loc
mat_file = choose_mat_file(0, sequence_id)
for locations_id in range(0, TRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
print(st_frame_num)
ct_frames= 0
rolo_total_score_over_frames= 0
yolo_total_score_over_frames= 0
for i in range(st_frame_num-1, len(paths_rolo)- num_steps):
id= i + 1
test_id= id + num_steps
yolo_location= utils.find_yolo_location(yolo_out_path, test_id)
yolo_location= utils.locations_normal(wid, ht, yolo_location)
rolo_location= utils.find_rolo_location( rolo_out_path, test_id)
rolo_location = utils.locations_normal( wid, ht, rolo_location)
gt_location = utils.find_gt_location( lines, test_id - 1)
rolo_score = utils.cal_rolo_score(rolo_location, gt_location, thresh)
rolo_total_score_over_frames += rolo_score
yolo_score = utils.cal_yolo_score(yolo_location, gt_location, thresh)
yolo_total_score_over_frames += yolo_score
ct_frames += 1.0
rolo_total_score_over_frames /= ct_frames
yolo_total_score_over_frames /= ct_frames
rolo_total_score_over_TREs += rolo_total_score_over_frames
yolo_total_score_over_TREs += yolo_total_score_over_frames
rolo_total_score_over_TREs /= (TRE_num * 1.0)
yolo_total_score_over_TREs /= (TRE_num * 1.0)
rolo_avg_score += rolo_total_score_over_TREs
yolo_avg_score += yolo_total_score_over_TREs
print('Sequence ID: ', sequence_id)
print("yolo_avg_score = ", yolo_total_score_over_TREs)
print("rolo_avg_score = ", rolo_total_score_over_TREs)
yolo_AUC_score.append(yolo_avg_score/num_evaluate)
rolo_AUC_score.append(rolo_avg_score/num_evaluate)
print("(thresh, yolo_AUC_score) = ", thresh, ' ', yolo_avg_score/num_evaluate)
print("(thresh, rolo_AUC_score) = ", thresh, ' ', rolo_avg_score/num_evaluate)
with open('output/AUC_score_TRE.pickle', 'w') as f:
pickle.dump([yolo_AUC_score, rolo_AUC_score], f)
def evaluate_kalman_AUC_TRE(): # calculate AUC(Average Under Curve) TRE
''' PARAMETERS '''
num_steps= 3
TRE_num = 20
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
yolo_AUC_score= []
rolo_AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
#print("thresh= ", thresh)
yolo_avg_score= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_output_kalman_txt/')
paths_imgs = utils.load_folder( img_fold_path)
paths_yolo= utils.load_folder( yolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
yolo_total_score_over_TREs= 0
# Load benchmark detection loc
mat_file = choose_mat_file(0, sequence_id)
for locations_id in range(0, TRE_num):
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
#print(st_frame_num)
ct_frames= 0
yolo_total_score_over_frames= 0
for i in range(st_frame_num-1, len(paths_yolo)- num_steps):
id= i + 1
test_id= id + num_steps
yolo_location= utils.find_yolo_kalman_location(yolo_out_path, test_id)
gt_location = utils.find_gt_location( lines, test_id - 1)
yolo_score = utils.cal_yolo_kalman_score(yolo_location, gt_location, thresh)
yolo_total_score_over_frames += yolo_score
ct_frames += 1.0
if ct_frames!= 0: yolo_total_score_over_frames /= ct_frames
yolo_total_score_over_TREs += yolo_total_score_over_frames
yolo_total_score_over_TREs /= (TRE_num * 1.0)
yolo_avg_score += yolo_total_score_over_TREs
print('Sequence ID: ', sequence_id)
print("yolo_avg_score = ", yolo_total_score_over_TREs)
yolo_AUC_score.append(yolo_avg_score/num_evaluate)
print("(thresh, yolo_AUC_score) = ", thresh, ' ', yolo_avg_score/num_evaluate)
with open('output/AUC_kalman_score_TRE.pickle', 'w') as f:
pickle.dump([yolo_AUC_score], f)
def evaluate_AUC(): # calculate AUC(Average Under Curve)
''' PARAMETERS '''
num_steps= 3
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
yolo_AUC_score= []
rolo_AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
print("thresh= ", thresh)
rolo_avg_score= 0
yolo_avg_score= 0
for test in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(test)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_out/')
rolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'rolo_out_test/')
print(rolo_out_path)
paths_imgs = utils.load_folder( img_fold_path)
paths_rolo= utils.load_folder( rolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
total= 0
rolo_total_score= 0
yolo_total_score= 0
for i in range(len(paths_rolo)- num_steps):
id= i + 1
test_id= id + num_steps
#path = paths_imgs[test_id]
#img = utils.file_to_img(None, path)
#if(img is None): break
yolo_location= utils.find_yolo_location(yolo_out_path, test_id)
yolo_location= utils.locations_normal(wid, ht, yolo_location)
rolo_location= utils.find_rolo_location( rolo_out_path, test_id)
rolo_location = utils.locations_normal( wid, ht, rolo_location)<|fim▁hole|>
rolo_score = utils.cal_rolo_score(rolo_location, gt_location, thresh)
#print('rolo_score', rolo_score)
rolo_total_score += rolo_score
#print('rolo_total_score', rolo_total_score)
yolo_score = utils.cal_yolo_score(yolo_location, gt_location, thresh)
yolo_total_score += yolo_score
total += 1.0
rolo_total_score /= total
yolo_total_score /= total
rolo_avg_score += rolo_total_score
yolo_avg_score += yolo_total_score
print('Sequence ID: ', test)
print("yolo_avg_score = ", yolo_total_score)
print("rolo_avg_score = ", rolo_total_score)
yolo_AUC_score.append(yolo_avg_score/num_evaluate)
rolo_AUC_score.append(rolo_avg_score/num_evaluate)
print("(thresh, yolo_AUC_score) = ", thresh, ' ', yolo_avg_score/num_evaluate)
print("(thresh, rolo_AUC_score) = ", thresh, ' ', rolo_avg_score/num_evaluate)
with open('output/AUC_score.pickle', 'w') as f:
pickle.dump([yolo_AUC_score, rolo_AUC_score], f)
#draw_AUC()
def evaluate_kalman_AUC(): # calculate AUC(Average Under Curve)
''' PARAMETERS '''
num_steps= 3
evaluate_st = 20
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
yolo_AUC_score= []
for thresh_int in range(0, 100, 5):
thresh = thresh_int / 100.0 + 0.0001
print("thresh= ", thresh)
yolo_avg_score= 0
for test in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(test)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_output_kalman_txt/')
print(yolo_out_path)
paths_rolo= utils.load_folder( yolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
total= 0
yolo_total_score= 0
for i in range(len(paths_rolo)- num_steps):
id= i + 1
test_id= id + num_steps
#path = paths_imgs[test_id]
#img = utils.file_to_img(None, path)
#if(img is None): break
yolo_location= utils.find_yolo_kalman_location(yolo_out_path, test_id)
#yolo_location= utils.locations_normal(wid, ht, yolo_location)
gt_location = utils.find_gt_location( lines, test_id - 1)
yolo_score = utils.cal_yolo_kalman_score(yolo_location, gt_location, thresh)
yolo_total_score += yolo_score
total += 1.0
yolo_total_score /= total
yolo_avg_score += yolo_total_score
print('Sequence ID: ', test)
print("yolo_avg_score = ", yolo_total_score)
yolo_AUC_score.append(yolo_avg_score/num_evaluate)
print("(thresh, yolo_kalman_AUC_score) = ", thresh, ' ', yolo_avg_score/num_evaluate)
with open('output/AUC_kalman_score.pickle', 'w') as f:
pickle.dump([yolo_AUC_score], f)
#draw_AUC()
def evaluate_avg_IOU(): # calculate AOS(Average Overlap Score) for each sequence
''' PARAMETERS '''
num_steps= 3
output_video = False
display_video = False
evaluate_st = 0
evaluate_ed = 29
yolo_ious = []
rolo_ious = []
for test in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(test)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_out/')
rolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'rolo_out_test/')
print(rolo_out_path)
paths_imgs = utils.load_folder( img_fold_path)
paths_rolo= utils.load_folder( rolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
fourcc= cv2.cv.CV_FOURCC(*'DIVX')
video_name = sequence_name + '_test.avi'
video_path = os.path.join('output/videos/', video_name)
if output_video is True: video = cv2.VideoWriter(video_path, fourcc, 20, (wid, ht))
total= 0
rolo_avgloss= 0
yolo_avgloss= 0
for i in range(len(paths_rolo)- num_steps-1):
id= i + 1
test_id= id + num_steps #* num_steps + 1
path = paths_imgs[test_id]
img = utils.file_to_img( path)
if(img is None): break
yolo_location= utils.find_yolo_location( yolo_out_path, test_id)
yolo_location= utils.locations_normal(wid, ht, yolo_location)
#print(yolo_location)
rolo_location= utils.find_rolo_location(rolo_out_path, test_id)
rolo_location = utils.locations_normal(wid, ht, rolo_location)
#print(rolo_location)
gt_location = utils.find_gt_location(lines, test_id - 1)
#print('gt: ' + str(test_id))
#print(gt_location)
if display_video is True: frame = utils.debug_3_locations(img, gt_location, yolo_location, rolo_location)
if output_video is True: video.write(frame)
#cv2.imshow('frame',frame)
#cv2.waitKey(100)
rolo_loss = utils.cal_rolo_IOU(rolo_location, gt_location)
rolo_avgloss += rolo_loss
yolo_loss= utils.cal_yolo_IOU(yolo_location, gt_location)
yolo_avgloss += yolo_loss
total += 1
rolo_avgloss /= total
yolo_avgloss /= total
print('Sequence ID: ', test)
print("yolo_avg_iou = ", yolo_avgloss)
print("rolo_avg_iou = ", rolo_avgloss)
yolo_ious.append(yolo_avgloss)
rolo_ious.append(rolo_avgloss)
if output_video is True: video.release()
#cv2.destroyAllWindows()
print('yolo_ious: ', yolo_ious)
print('rolo_ious: ', rolo_ious)
log_file = open("output/testing-log-final.txt", "a")
log_file.write('YOLO_avg_IOU: ')
for item in range(len(yolo_ious)):
log_file.write(str("{:.3f}".format(yolo_ious[item])) + ' ')
log_file.write('\nROLO_avg_IOU: ')
for item in range(len(rolo_ious)):
log_file.write(str("{:.3f}".format(rolo_ious[item])) + ' ')
log_file.write('\n\n')
yolo_avg_iou = np.mean(yolo_ious)
rolo_avg_iou = np.mean(rolo_ious)
log_file.write('YOLO_total_avg_IOU: ')
log_file.write(str("{:.3f}".format(yolo_avg_iou))+ ' ')
log_file.write('ROLO_total_avg_IOU: ')
log_file.write(str("{:.3f}".format(rolo_avg_iou)) + ' ')
def evaluate_avg_IOU_kalman(): # calculate AOS(Average Overlap Score) for each sequence
''' PARAMETERS '''
num_steps= 3
output_video = False
display_video = False
evaluate_st = 20
evaluate_ed = 29
yolo_ious = []
for test in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(test)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_kalman_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_output_kalman_txt/')
paths_imgs = utils.load_folder( img_fold_path)
paths_yolo= utils.load_folder( yolo_kalman_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
fourcc= cv2.cv.CV_FOURCC(*'DIVX')
video_name = sequence_name + '_test.avi'
video_path = os.path.join('output/videos_kalman/', video_name)
if output_video is True: video = cv2.VideoWriter(video_path, fourcc, 20, (wid, ht))
total= 0
yolo_avgloss= 0
for i in range(len(paths_yolo)- num_steps-1):
id= i + 1
test_id= id + num_steps #* num_steps + 1
path = paths_imgs[test_id]
img = utils.file_to_img( path)
if(img is None): break
yolo_location= utils.find_yolo_kalman_location( yolo_kalman_path, test_id)
#yolo_location= utils.locations_normal(wid, ht, yolo_location)
#print(yolo_location)
gt_location = utils.find_gt_location(lines, test_id - 1)
#print('gt: ' + str(test_id))
#print(gt_location)
if display_video is True:
frame = utils.debug_kalman_locations(img, gt_location, yolo_location)
cv2.imshow('frame',frame)
cv2.waitKey(100)
if output_video is True: video.write(frame)
yolo_loss = utils.iou(yolo_location, gt_location)
#yolo_loss= utils.cal_yolo_IOU(yolo_location, gt_location)
yolo_avgloss += yolo_loss
total += 1
yolo_avgloss /= total
print('Sequence ID: ', test)
print("yolo_avg_iou = ", yolo_avgloss)
yolo_ious.append(yolo_avgloss)
if output_video is True: video.release()
#cv2.destroyAllWindows()
print('yolo_ious: ', yolo_ious)
log_file = open("output/yolo_kalman_log.txt", "a")
log_file.write('YOLO_avg_IOU: ')
for item in range(len(yolo_ious)):
log_file.write(str("{:.3f}".format(yolo_ious[item])) + ' ')
log_file.write('\n\n')
yolo_avg_iou = np.mean(yolo_ious)
log_file.write('YOLO_total_avg_IOU: ')
log_file.write(str("{:.3f}".format(yolo_avg_iou))+ ' ')
def evaluate_avg_IOU_TRE(): # calculate AUC(Average Under Curve) TRE
''' PARAMETERS '''
num_steps= 3
TRE_num = 20
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
rolo_avg_score_over_sequences= 0
yolo_avg_score_over_sequences= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_out/')
rolo_out_path= os.path.join('benchmark/DATA', sequence_name, 'rolo_out_test/')
paths_imgs = utils.load_folder( img_fold_path)
paths_rolo= utils.load_folder( rolo_out_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
rolo_total_score_over_TREs= 0
yolo_total_score_over_TREs= 0
# Load benchmark detection loc
mat_file = choose_mat_file(0, sequence_id)
for locations_id in range(0, TRE_num):
#print(locations_id)
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
#print(ed_frame_num)
ct_frames = 0
rolo_score_over_interval= 0
yolo_score_over_interval= 0
for i in range(st_frame_num-1, len(paths_rolo)- num_steps):
id= i + 1
test_id= id + num_steps
yolo_location= utils.find_yolo_location(yolo_out_path, test_id)
yolo_location= utils.locations_normal(wid, ht, yolo_location)
rolo_location= utils.find_rolo_location( rolo_out_path, test_id)
rolo_location = utils.locations_normal( wid, ht, rolo_location)
gt_location = utils.find_gt_location( lines, test_id - 1)
rolo_score = utils.cal_rolo_IOU(rolo_location, gt_location)
rolo_score_over_interval += rolo_score
yolo_score = utils.cal_yolo_IOU(yolo_location, gt_location)
yolo_score_over_interval += yolo_score
ct_frames += 1.0
rolo_score_over_interval /= ct_frames
yolo_score_over_interval /= ct_frames
rolo_total_score_over_TREs += rolo_score_over_interval
yolo_total_score_over_TREs += yolo_score_over_interval
rolo_total_score_over_TREs /= (TRE_num * 1.0)
yolo_total_score_over_TREs /= (TRE_num * 1.0)
print('Sequence ID: ', sequence_id)
print("yolo_avg_score = ", rolo_total_score_over_TREs)
print("rolo_avg_score = ", yolo_total_score_over_TREs)
rolo_avg_score_over_sequences += rolo_total_score_over_TREs
yolo_avg_score_over_sequences += yolo_total_score_over_TREs
yolo_avg_IOU_TRE = yolo_avg_score_over_sequences/num_evaluate
rolo_avg_IOU_TRE = rolo_avg_score_over_sequences/num_evaluate
print("(yolo_avg_IOU_TRE) = ", yolo_avg_IOU_TRE)
print("(rolo_avg_IOU_TRE) = ", rolo_avg_IOU_TRE)
log_file = open("output/IOU/avg_IOU_TRE.txt", "a")
log_file.write('yolo_avg_IOU_TRE: ')
log_file.write(str("{:.3f}".format(yolo_avg_IOU_TRE)) + ' ')
log_file.write('\n rolo_avg_IOU_TRE: ')
log_file.write(str("{:.3f}".format(rolo_avg_IOU_TRE)) + ' ')
log_file.write('\n\n')
def evaluate_avg_IOU_kalman_TRE(): # calculate AUC(Average Under Curve) TRE
''' PARAMETERS '''
num_steps= 3
TRE_num = 20
evaluate_st = 0
evaluate_ed = 29
num_evaluate= evaluate_ed - evaluate_st + 1
yolo_avg_score_over_sequences= 0
for sequence_id in range(evaluate_st, evaluate_ed + 1):
[wid, ht, sequence_name, dummy_1, dummy_2] = utils.choose_video_sequence(sequence_id)
img_fold_path = os.path.join('benchmark/DATA', sequence_name, 'img/')
gt_file_path= os.path.join('benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
yolo_kalman_path= os.path.join('benchmark/DATA', sequence_name, 'yolo_output_kalman_txt/')
paths_imgs = utils.load_folder( img_fold_path)
paths_yolo= utils.load_folder( yolo_kalman_path)
lines = utils.load_dataset_gt( gt_file_path)
# Define the codec and create VideoWriter object
yolo_total_score_over_TREs= 0
# Load benchmark detection loc
mat_file = choose_mat_file(0, sequence_id)
for locations_id in range(0, TRE_num):
#print(locations_id)
[locations, st_frame_num, ed_frame_num] = load_mat_results(mat_file, True, False, False, locations_id)
#print(ed_frame_num)
ct_frames = 0
yolo_score_over_interval= 0
for i in range(st_frame_num-1, len(paths_yolo)- num_steps):
id= i + 1
test_id= id + num_steps
#print(test_id)
yolo_location= utils.find_yolo_kalman_location(yolo_kalman_path, test_id)
gt_location = utils.find_gt_location( lines, test_id - 1)
#yolo_score = utils.cal_yolo_kalman_IOU(yolo_location, gt_location)
yolo_score = utils.iou(yolo_location, gt_location)
#print(yolo_score)
yolo_score_over_interval += yolo_score
ct_frames += 1.0
if ct_frames!= 0: yolo_score_over_interval /= ct_frames
yolo_total_score_over_TREs += yolo_score_over_interval
yolo_total_score_over_TREs /= (TRE_num * 1.0)
print('Sequence ID: ', sequence_id)
print("yolo_avg_score = ", yolo_total_score_over_TREs)
yolo_avg_score_over_sequences += yolo_total_score_over_TREs
yolo_avg_IOU_TRE = yolo_avg_score_over_sequences/num_evaluate
print("(yolo_avg_IOU_TRE) = ", yolo_avg_IOU_TRE)
log_file = open("output/IOU/avg_kalman_IOU_TRE.txt", "a")
log_file.write('yolo_kalman_avg_IOU_TRE: ')
log_file.write(str("{:.3f}".format(yolo_avg_IOU_TRE)) + ' ')
log_file.write('\n\n')
'''----------------------------------------main-----------------------------------------------------'''
def main(argv):
#evaluate_avg_IOU()
#evaluate_avg_IOU_TRE()
#evaluate_avg_IOU_kalman()
#evaluate_avg_IOU_kalman_TRE()
#evaluate_AUC() #AUC_OPE and AUC_SRE is the same for ROLO and YOLO
#evaluate_AUC_TRE()
#evaluate_kalman_AUC()
#evaluate_kalman_AUC_TRE()
#for method_id in range(9, 10):
# evaluate_benchmark_avg_IOU(method_id)
#for method_id in range(0, 9):
# evaluate_benchmark_avg_IOU_TRE(method_id)
#for method_id in range(0, 9):
# evaluate_benchmark_avg_IOU_SRE(method_id)
#for method_id in range(9, 10):
# evaluate_benchmark_AUC_OPE(method_id)
#for method_id in range(0, 9):
# evaluate_benchmark_AUC_TRE(method_id)
#for method_id in range(0, 9):
# evaluate_benchmark_AUC_SRE(method_id)
draw_AUC_OPE()
#draw_AUC_TRE()
#draw_AUC_SRE()
#draw_step_IOU_curve()
#draw_step_fps_curve()
if __name__=='__main__':
main(sys.argv)<|fim▁end|> |
gt_location = utils.find_gt_location( lines, test_id - 1) |
<|file_name|>clone.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from kafka.tools import log
from kafka.tools.assigner.actions import ActionModule
from kafka.tools.exceptions import ConfigurationException
class ActionClone(ActionModule):
name = "clone"
helpstr = "Copy partitions from some brokers to a new broker (increasing RF)"
def __init__(self, args, cluster):
super(ActionClone, self).__init__(args, cluster)
self.check_brokers()
if args.to_broker not in self.cluster.brokers:
raise ConfigurationException("Target broker is not in the brokers list for this cluster")
self.sources = args.brokers
self.to_broker = self.cluster.brokers[args.to_broker]
@classmethod
def _add_args(cls, parser):
parser.add_argument('-b', '--brokers', help="List of source broker IDs", required=True, type=int, nargs='*')
parser.add_argument('-t', '--to_broker', help="Broker ID to copy partitions to", required=True, type=int)
def process_cluster(self):
source_set = set(self.sources)
for partition in self.cluster.partitions(self.args.exclude_topics):
if len(source_set & set([replica.id for replica in partition.replicas])) > 0:
if self.to_broker in partition.replicas:
log.warn("Target broker (ID {0}) is already in the replica list for {1}:{2}".format(self.to_broker.id, partition.topic.name, partition.num))
# If the broker is already in the replica list, it ALWAYS becomes the leader<|fim▁hole|> else:
# If one of the source brokers is currently the leader, the target broker is the leader. Otherwise, the target leader is in second place
if partition.replicas[0].id in self.sources:
partition.add_replica(self.to_broker, 0)
else:
partition.add_replica(self.to_broker, 1)<|fim▁end|> | if self.to_broker != partition.replicas[0]:
partition.swap_replica_positions(self.to_broker, partition.replicas[0]) |
<|file_name|>extendedDataDisplayBuilder.factory.js<|end_file_name|><|fim▁begin|>/**
* @ngdoc service
* @name merchello.models.extendedDataDisplayBuilder
*
* @description
* A utility service that builds ExtendedDataBuilder models
*/
angular.module('merchello.models')
.factory('extendedDataDisplayBuilder',
['genericModelBuilder', 'ExtendedDataDisplay', 'ExtendedDataItemDisplay',
function(genericModelBuilder, ExtendedDataDisplay, ExtendedDataItemDisplay) {
var Constructor = ExtendedDataDisplay;
<|fim▁hole|> transform: function(jsonResult) {
var extendedData = new Constructor();
if (jsonResult !== undefined) {
var items = genericModelBuilder.transform(jsonResult, ExtendedDataItemDisplay);
if(items.length > 0) {
extendedData.items = items;
}
}
return extendedData;
}
};
}]);<|fim▁end|> | return {
createDefault: function() {
return new Constructor();
}, |
<|file_name|>build_numeric.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Generate the body of ieee.numeric_std and numeric_bit from a template.
# The implementation is based only on the specification and on testing (as
# the specifications are often ambiguous).
# The algorithms are very simple: carry ripple adder, restoring division.
# This file is part of GHDL.
# Both this file and the outputs of this file are copyrighted.
# Copyright (C) 2015 Tristan Gingold
#
# GHDL is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2, or (at your option) any later
# version.
#
# GHDL is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with GCC; see the file COPYING2. If not see
# <http://www.gnu.org/licenses/>.
import re
import sys
# My python 'style' and knowledge is basic... Do not hesitate to comment.
binary_funcs = [ "and", "nand", "or", "nor", "xor" ]
compare_funcs = [ "=", "/=", ">", ">=", "<", "<=" ]
vec_types = ['UNSIGNED', 'SIGNED']
logics = ['bit', 'std']
logic_types = {'bit' : 'bit', 'std': 'sl_x01' }
logic_undefs = {'bit' : "'0'", 'std': "'X'" }
logic = 'xx' # Current logic, either bit or std
v93=False
# Stream to write.
out=sys.stdout
def w(s):
"Write S to the output"
out.write(s)
def logic_type():
return logic_types[logic]
def logic_undef():
return logic_undefs[logic]
def disp_vec_binary(func, typ):
"Generate the body of a vector binary logic function"
res = """
function "{0}" (l, r : {1}) return {1}
is
subtype res_type is {1} (l'length - 1 downto 0);
alias la : res_type is l;
alias ra : {1} (r'length - 1 downto 0) is r;
variable res : res_type;
begin
if la'left /= ra'left then
assert false
report "NUMERIC_STD.""{0}"": arguments are not of the same length"
severity failure;
res := (others => """ + logic_undef() + """);
else
for I in res_type'range loop
res (I) := la (I) {0} ra (I);
end loop;
end if;
return res;
end "{0}";\n"""
w (res.format(func, typ))
def disp_non_logical_warning(func):
return """
assert NO_WARNING
report "NUMERIC_STD.""{0}"": non logical value detected"
severity warning;""".format(func)
def conv_bit(expr):
if logic == 'std':
return "sl_to_x01 (" + expr + ")"
else:
return expr
def extract_bit(name):
res = "{0}b := " + conv_bit ("{0}a (i)") + ";"
return res.format(name)
def init_carry(func):
if func == '+':
return """
carry := '0';"""
else:
return """
carry := '1';"""
def extract_extend_bit(name,typ):
res = """
if i > {0}a'left then
{0}b := """
if typ == 'UNSIGNED':
res += "'0';"
else:
res += "{0} ({0}'left);"
res += """
else
""" + extract_bit(name) + """
end if;"""
return res.format(name)
def disp_vec_vec_binary(func, typ):
"Generate vector binary function body"
res = """
function "{0}" (l, r : {1}) return {1}
is
constant lft : integer := MAX (l'length, r'length) - 1;
subtype res_type is {1} (lft downto 0);
alias la : {1} (l'length - 1 downto 0) is l;
alias ra : {1} (r'length - 1 downto 0) is r;
variable res : res_type;
variable lb, rb, carry : """ + logic_type () + """;
begin
if la'left < 0 or ra'left < 0 then
return null_{1};
end if;"""
res += init_carry(func)
res += """
for i in 0 to lft loop"""
res += extract_extend_bit('l', typ)
res += extract_extend_bit('r', typ)
if logic == 'std':
res += """
if lb = 'X' or rb = 'X' then""" + \
disp_non_logical_warning(func) + """
res := (others => 'X');
exit;
end if;"""
if func == '-':
res += """
rb := not rb;"""
res += """
res (i) := compute_sum (carry, rb, lb);
carry := compute_carry (carry, rb, lb);
end loop;
return res;
end "{0}";
"""
w (res.format (func, typ))
def declare_int_var(name, typ):
res = """
variable {0}1, {0}2 : {1};
variable {0}d : nat1;""";
if typ == "INTEGER":
res += """
constant {0}msb : nat1 := boolean'pos({0} < 0);"""
return res.format(name, typ)
def init_int_var(name, typ):
return """
{0}1 := {0};""".format(name);
def extract_int_lsb(name, typ):
res = """
{0}2 := {0}1 / 2;"""
if typ == "INTEGER":
res += """
if {0}1 < 0 then
{0}d := 2 * {0}2 - {0}1;
{0}1 := {0}2 - {0}d;
else
{0}d := {0}1 - 2 * {0}2;
{0}1 := {0}2;
end if;"""
else:
res += """
{0}d := {0}1 - 2 * {0}2;
{0}1 := {0}2;"""
res += """
{0}b := nat1_to_01 ({0}d);"""
return res.format(name,typ)
def check_int_truncated(func, name, typ):
if typ == "INTEGER":
v = "-{0}msb".format(name)
else:
v = "0"
return """
if {1}1 /= {2} then
assert NO_WARNING
report "NUMERIC_STD.""{0}"": vector is truncated"
severity warning;
end if;""".format(func, name, v)
def create_vec_int_dict(func, left, right):
if left in vec_types:
dic = {'vtype': left,
'itype': right,
'vparam': 'l',
'iparam': 'r'}
else:
dic = {'vtype': right,
'itype': left,
'vparam': 'r',
'iparam': 'l'}
dic.update({'ltype': left,
'rtype': right,
'func': func,
'logic': logic_type()})
return dic
def disp_vec_int_binary(func, left, right):
"Generate vector binary function body"
dic = create_vec_int_dict(func, left, right)
res = """
function "{func}" (l : {ltype}; r : {rtype}) return {vtype}
is
subtype res_type is {vtype} ({vparam}'length - 1 downto 0);
alias {vparam}a : res_type is {vparam};""" + \
declare_int_var (dic["iparam"], dic["itype"]) + """
variable res : res_type;
variable lb, rb, carry : {logic};
begin
if res'length < 0 then
return null_{vtype};
end if;"""
# Initialize carry. For subtraction, use 2-complement.
res += init_carry(func)
res += init_int_var(dic['iparam'], dic['itype']) + """
for i in res'reverse_range loop
""" + extract_bit(dic['vparam']) + "\n" + \
extract_int_lsb(dic['iparam'], dic['itype']);
if logic == 'std':
res += """
if {vparam}b = 'X' then""" + \
disp_non_logical_warning(func) + """
res := (others => 'X');
{iparam}1 := 0;
exit;
end if;"""
# 2-complement for subtraction
if func == '-':
res += """
rb := not rb;"""
res += """
res (i) := compute_sum (carry, rb, lb);
carry := compute_carry (carry, rb, lb);
end loop;""" + \
check_int_truncated(func, dic['iparam'], dic['itype']) + """
return res;
end "{func}";\n"""
w(res.format (**dic))
def disp_vec_int_gcompare(func, left, right):
"Generate comparison function"
dic = create_vec_int_dict(func, left, right)
res = """
function {func} (l : {ltype}; r : {rtype}) return compare_type
is
subtype res_type is {vtype} ({vparam}'length - 1 downto 0);
alias la : res_type is l;""" + \
declare_int_var (dic['iparam'], dic['itype']) + """
variable lb, rb : {logic};
variable res : compare_type;
begin
res := compare_eq;""";
res += init_int_var(dic['iparam'], dic['itype']) + """
for i in {vparam}a'reverse_range loop
""" + extract_bit (dic['vparam']) + \
extract_int_lsb("r", right)
if logic == 'std':
res += """
if {vparam}b = 'X' then
return compare_unknown;
end if;"""
res += """
if lb = '1' and rb = '0' then
res := compare_gt;
elsif lb = '0' and rb = '1' then
res := compare_lt;
end if;
end loop;"""
if func == "ucompare":
res += """
if r1 /= 0 then
res := compare_lt;
end if;"""
else:
res += """
if """ + conv_bit ("l (l'left)") + """ = '1' then
if r >= 0 then
res := compare_lt;
end if;
else
if r < 0 then
res := compare_gt;
end if;
end if;"""
res += """
return res;
end {func};
"""
w(res.format (**dic))
def disp_vec_int_compare(func, left, right):
"Generate comparison function"
dic = create_vec_int_dict(func, left, right)
res = """
function "{func}" (l : {ltype}; r : {rtype}) return boolean
is
subtype res_type is {vtype} ({vparam}'length - 1 downto 0);
alias {vparam}a : res_type is {vparam};""" + \
declare_int_var (dic['iparam'], dic['itype']) + """
variable res : compare_type;
begin
if {vparam}'length = 0 then
assert NO_WARNING
report "NUMERIC_STD.""{func}"": null argument, returning FALSE"
severity warning;
return false;
end if;
res := """
if left == "SIGNED" or right == "SIGNED":
res += "scompare"
else:
res += "ucompare"
if left in vec_types:
res += " (l, r);"
else:
res += " (r, l);"
if logic == 'std':
res += """
if res = compare_unknown then""" + \
disp_non_logical_warning(func) + """
return false;
end if;"""
if left in vec_types:
res += """
return res {func} compare_eq;"""
else:
res += """
return compare_eq {func} res;"""
res += """
end "{func}";
"""
w(res.format (**dic))
def disp_vec_vec_gcompare(func, typ):
"Generate comparison function"
res = """
function {func} (l, r : {typ}) return compare_type
is
constant sz : integer := MAX (l'length, r'length) - 1;
alias la : {typ} (l'length - 1 downto 0) is l;
alias ra : {typ} (r'length - 1 downto 0) is r;
variable lb, rb : {logic};
variable res : compare_type;
begin"""
if typ == 'SIGNED':
res += """
-- Consider sign bit as S * -(2**N).
lb := """ + conv_bit ("la (la'left)") + """;
rb := """ + conv_bit ("ra (ra'left)") + """;
if lb = '1' and rb = '0' then
return compare_lt;
elsif lb = '0' and rb = '1' then
return compare_gt;
else
res := compare_eq;
end if;"""
else:
res += """
res := compare_eq;"""
if typ == 'SIGNED':
res += """
for i in 0 to sz - 1 loop"""
else:
res += """
for i in 0 to sz loop"""
res += extract_extend_bit('l', typ)
res += extract_extend_bit('r', typ)
if logic == 'std':
res += """
if lb = 'X' or rb = 'X' then
return compare_unknown;
end if;"""
res += """
if lb = '1' and rb = '0' then
res := compare_gt;
elsif lb = '0' and rb = '1' then
res := compare_lt;
end if;
end loop;
return res;
end {func};\n"""
w(res.format (func=func, typ=typ, logic=logic_type()))
def disp_vec_vec_compare(func, typ):
"Generate comparison function"
res = """
function "{func}" (l, r : {typ}) return boolean
is
variable res : compare_type;
begin
if l'length = 0 or r'length = 0 then
assert NO_WARNING
report "NUMERIC_STD.""{func}"": null argument, returning FALSE"
severity warning;
return false;
end if;
res := """
if typ == "SIGNED":
res += "scompare"
else:
res += "ucompare"
res += """ (l, r);"""
if logic == 'std':
res += """
if res = compare_unknown then""" + \
disp_non_logical_warning(func) + """
return false;
end if;"""
res += """
return res {func} compare_eq;
end "{func}";\n"""
w(res.format (func=func, typ=typ))
def disp_vec_not(typ):
"Generate vector binary function body"
w("""
function "not" (l : {0}) return {0}
is
subtype res_type is {0} (l'length - 1 downto 0);
alias la : res_type is l;
variable res : res_type;
begin
for I in res_type'range loop
res (I) := not la (I);
end loop;
return res;
end "not";\n""".format(typ))
def disp_resize(typ):
res = """
function resize (ARG : {0}; NEW_SIZE: natural) return {0}
is
alias arg1 : {0} (ARG'length - 1 downto 0) is arg;
variable res : {0} (new_size - 1 downto 0) := (others => '0');
begin
if new_size = 0 then
return null_{0};
end if;
if arg1'length = 0 then
return res;
end if;
if arg1'length > new_size then
-- Reduction."""
if typ == 'SIGNED':
res += """
res (res'left) := arg1 (arg1'left);
res (res'left - 1 downto 0) := arg1 (res'left - 1 downto 0);"""
else:
res += """
res := arg1 (res'range);"""
res += """
else
-- Expansion
res (arg1'range) := arg1;"""
if typ == 'SIGNED':
res += """
res (res'left downto arg1'length) := (others => arg1 (arg1'left));"""
res += """
end if;
return res;
end resize;\n"""
w(res.format(typ))
def gen_shift(dir, inv):
if (dir == 'left') ^ inv:
res = """
res (res'left downto {opp}count) := arg1 (arg1'left {sub} count downto 0);"""
else:
res = """
res (res'left {sub} count downto 0) := arg1 (arg1'left downto {opp}count);"""
if inv:
return res.format(opp="-", sub="+")
else:
return res.format(opp="", sub="-")
def disp_shift_op(name, typ, dir):
res = """
function {0} (ARG : {1}; COUNT: INTEGER) return {1}
is
subtype res_type is {1} (ARG'length - 1 downto 0);
alias arg1 : res_type is arg;
variable res : res_type := (others => '0');
begin
if res'length = 0 then
return null_{1};
end if;
if count >= 0 and count <= arg1'left then"""
res += gen_shift(dir, False)
res += """
elsif count < 0 and count >= -arg1'left then"""
res += gen_shift(dir, True)
res += """
end if;
return res;
end {0};\n"""
w(res.format(name, typ))
def disp_shift(name, typ, dir):
res = """
function {0} (ARG : {1}; COUNT: NATURAL) return {1}
is
subtype res_type is {1} (ARG'length - 1 downto 0);
alias arg1 : res_type is arg;
variable res : res_type := (others => """
if typ == 'SIGNED' and dir == 'right':
res += "arg1 (arg1'left)"
else:
res += "'0'"
res += """);
begin
if res'length = 0 then
return null_{1};
end if;
if count <= arg1'left then"""
res += gen_shift(dir, False)
res += """
end if;
return res;
end {0};\n"""
w(res.format(name, typ))
def disp_rotate(name, typ, dir):
if 'rotate' in name:
count_type = 'natural'
op = 'rem'
else:
count_type = 'integer'
op = 'mod'
res = """
function {0} (ARG : {1}; COUNT: {2}) return {1}
is
subtype res_type is {1} (ARG'length - 1 downto 0);
alias arg1 : res_type is arg;
variable res : res_type := (others => '0');
variable cnt : natural;
begin
if res'length = 0 then
return null_{1};
end if;
cnt := count """ + op + " res'length;"
if dir == 'left':
res += """
res (res'left downto cnt) := arg1 (res'left - cnt downto 0);
res (cnt - 1 downto 0) := arg1 (res'left downto res'left - cnt + 1);"""
else:
res += """
res (res'left - cnt downto 0) := arg1 (res'left downto cnt);
res (res'left downto res'left - cnt + 1) := arg1 (cnt - 1 downto 0);"""
res += """
return res;
end {0};\n"""
w(res.format(name, typ, count_type))
def disp_vec_vec_mul(func, typ):
res = """
function "{0}" (L, R : {1}) return {1}
is
alias la : {1} (L'Length - 1 downto 0) is l;
alias ra : {1} (R'Length - 1 downto 0) is r;
variable res : {1} (L'length + R'Length -1 downto 0) := (others => '0');
variable rb, lb, vb, carry : """ + logic_type() + """;
begin
if la'length = 0 or ra'length = 0 then
return null_{1};
end if;
-- Shift and add L.
for i in natural range 0 to ra'left """
if typ == 'SIGNED':
res += "- 1 "
res += """loop
""" + extract_bit ('r') + """
if rb = '1' then
-- Compute res := res + shift_left (l, i).
carry := '0';
for j in la'reverse_range loop
lb := la (j);
vb := res (i + j);
res (i + j) := compute_sum (carry, vb, lb);
carry := compute_carry (carry, vb, lb);
end loop;"""
if typ == 'UNSIGNED':
res += """
-- Propagate carry.
for j in i + la'length to res'left loop
exit when carry = '0';
vb := res (j);
res (j) := carry xor vb;
carry := carry and vb;
end loop;"""
else:
res += """
-- Sign extend and propagate carry.
lb := la (la'left);
for j in i + l'length to res'left loop
vb := res (j);
res (j) := compute_sum (carry, vb, lb);
carry := compute_carry (carry, vb, lb);
end loop;"""
if logic == 'std':
res += """
elsif rb = 'X' then""" + \
disp_non_logical_warning (func)
res += """
end if;
end loop;"""
if typ == 'SIGNED':
res += """
if ra (ra'left) = '1' then
-- R is a negative number. It is considered as:
-- -2**n + (Rn-1 Rn-2 ... R0).
-- Compute res := res - 2**n * l.
carry := '1';
for i in la'reverse_range loop
vb := res (ra'length - 1 + i);
lb := not la (i);
res (ra'length - 1+ i) := compute_sum (carry, vb, lb);
carry := compute_carry (carry, vb, lb);
end loop;
vb := res (res'left);
lb := not la (la'left);
res (res'left) := compute_sum (carry, vb, lb);
end if;"""
res += """
return res;
end "{0}";\n"""
w(res.format(func,typ))
def disp_vec_int_mul(left, right):
res = """
function "*" (L : {0}; R : {1}) return {0}
is
constant size : natural := l'length;
begin
if size = 0 then
return null_{0};
end if;
return l * to_{0} (r, size);
end "*";\n"""
w (res.format(left,right))
def disp_int_vec_mul(left, right):
res = """
function "*" (L : {0}; R : {1}) return {1}
is
constant size : natural := r'length;
begin
if size = 0 then
return null_{1};
end if;
return r * to_{1} (l, size);
end "*";\n"""
w (res.format(left,right))
def disp_neg(func):
res = """
function "{func}" (ARG : SIGNED) return SIGNED
is
subtype arg_type is SIGNED (ARG'length - 1 downto 0);
alias arga : arg_type is arg;
variable res : arg_type;
variable carry, a : """ + logic_type() + """;
begin
if arga'length = 0 then
return null_signed;
end if;"""
if logic == 'std':
res += """
if has_0x (arga) = 'X' then""" + \
disp_non_logical_warning("-") + """
return arg_type'(others => 'X');
end if;"""
if func == 'abs':
res += """
if arga (arga'left) = '0' then
return arga;
end if;"""
res += """
carry := '1';
for i in arga'reverse_range loop
a := not arga (i);
res (i) := carry xor a;
carry := carry and a;
end loop;
return res;
end "{func}";\n"""
w(res.format(func=func))
def disp_has_0x(typ):
res = """
function has_0x (a : {0}) return {1}
is
variable res : {1} := '0';
begin
for i in a'range loop"""
if logic == 'std':
res += """
if a (i) = 'X' then
return 'X';
end if;"""
res += """
res := res or a (i);
end loop;
return res;
end has_0x;\n"""
w(res.format(typ, logic_type()))
def disp_size():
w("""<|fim▁hole|> -- At least one bit (even for 0).
variable res : natural := 1;
variable n1 : natural := n;
begin
while n1 > 1 loop
res := res + 1;
n1 := n1 / 2;
end loop;
return res;
end size_unsigned;\n""")
w("""
function size_signed (n : integer) return natural
is
variable res : natural := 1;
variable n1 : natural;
begin
if n >= 0 then
n1 := n;
else
-- Use /N = -X -1 = -(X + 1) (No overflow).
n1 := -(n + 1);
end if;
while n1 /= 0 loop
res := res + 1;
n1 := n1 / 2;
end loop;
return res;
end size_signed;\n""")
def disp_divmod():
w("""
-- All index range are normalized (N downto 0).
-- NUM and QUOT have the same range.
-- DEM and REMAIN have the same range.
-- No 'X'.
procedure divmod (num, dem : UNSIGNED; quot, remain : out UNSIGNED)
is
variable reg : unsigned (dem'left + 1 downto 0) := (others => '0');
variable sub : unsigned (dem'range) := (others => '0');
variable carry, d : """ + logic_type () + """;
begin
for i in num'range loop
-- Shift
reg (reg'left downto 1) := reg (reg'left - 1 downto 0);
reg (0) := num (i);
-- Substract
carry := '1';
for j in dem'reverse_range loop
d := not dem (j);
sub (j) := compute_sum (carry, reg (j), d);
carry := compute_carry (carry, reg (j), d);
end loop;
carry := compute_carry (carry, reg (reg'left), '1');
-- Test
if carry = '0' then
-- Greater than
quot (i) := '0';
else
quot (i) := '1';
reg (reg'left) := '0';
reg (sub'range) := sub;
end if;
end loop;
remain := reg (dem'range);
end divmod;
""")
def disp_vec_vec_udiv(func):
res = """
function "{func}" (L, R : UNSIGNED) return UNSIGNED
is
subtype l_type is UNSIGNED (L'length - 1 downto 0);
subtype r_type is UNSIGNED (R'length - 1 downto 0);
alias la : l_type is l;
alias ra : r_type is r;
variable quot : l_type;
variable rema : r_type;
variable r0 : """ + logic_type() + """ := has_0x (r);
begin
if la'length = 0 or ra'length = 0 then
return null_unsigned;
end if;"""
if logic == 'std':
res += """
if has_0x (l) = 'X' or r0 = 'X' then""" + \
disp_non_logical_warning ('/') + """
return l_type'(others => 'X');
end if;"""
res += """
assert r0 /= '0'
report "NUMERIC_STD.""{func}"": division by 0"
severity error;
divmod (la, ra, quot, rema);"""
if func == '/':
res += """
return quot;"""
else:
res += """
return rema;"""
res += """
end "{func}";\n"""
w(res.format(func=func))
def disp_vec_int_udiv(func):
res = """
function "{func}" (L : UNSIGNED; R : NATURAL) return UNSIGNED
is
constant r_size : natural := size_unsigned (r);
begin
if l'length = 0 then
return null_unsigned;
end if;"""
if func in ['mod', 'rem']:
res += """
return resize (l {func} to_unsigned (r, r_size), l'length);"""
else:
res += """
return l {func} to_unsigned (r, r_size);"""
res += """
end "{func}";\n"""
w(res.format(func=func))
res = """
function "{func}" (L : NATURAL; R : UNSIGNED) return UNSIGNED
is
constant l_size : natural := size_unsigned (l);
begin
if r'length = 0 then
return null_unsigned;
end if;"""
if func == '/':
res += """
return resize (to_unsigned (l, l_size) {func} r, r'length);"""
else:
res += """
return to_unsigned (l, l_size) {func} r;"""
res += """
end "{func}";\n"""
w(res.format(func=func))
def disp_vec_vec_sdiv(func):
res = """
function "{func}" (L, R : SIGNED) return SIGNED
is
subtype l_type is SIGNED (L'length - 1 downto 0);
subtype r_type is SIGNED (R'length - 1 downto 0);
alias la : l_type is l;
alias ra : r_type is r;
subtype l_utype is UNSIGNED (l_type'range);
subtype r_utype is UNSIGNED (r_type'range);
variable lu : l_utype;
variable ru : r_utype;
variable quot : l_utype;
variable rema : r_utype;
variable r0 : """ + logic_type() + """ := has_0x (r);
begin
if la'length = 0 or ra'length = 0 then
return null_signed;
end if;"""
if logic == 'std':
res += """
if has_0x (l) = 'X' or r0 = 'X' then""" + \
disp_non_logical_warning (func) + """
return l_type'(others => 'X');
end if;"""
res += """
assert r0 /= '0'
report "NUMERIC_STD.""{func}"": division by 0"
severity error;"""
res += """
if la (la'left) = '1' then
lu := unsigned (-la);
else
lu := unsigned (la);
end if;
if ra (ra'left) = '1' then
ru := unsigned (-ra);
else
ru := unsigned (ra);
end if;
divmod (lu, ru, quot, rema);"""
if func == '/':
res += """
if (ra (ra'left) xor la (la'left)) = '1' then
return -signed (quot);
else
return signed (quot);
end if;"""
elif func == 'rem':
res += """
-- Result of rem has the sign of the dividend.
if la (la'left) = '1' then
return -signed (rema);
else
return signed (rema);
end if;"""
elif func == 'mod':
res += """
-- Result of mod has the sign of the divisor.
if rema = r_utype'(others => '0') then
-- If the remainder is 0, then the modulus is 0.
return signed (rema);
else
if ra (ra'left) = '1' then
if la (la'left) = '1' then
return -signed (rema);
else
return ra + signed (rema);
end if;
else
if la (la'left) = '1' then
return ra - signed (rema);
else
return signed (rema);
end if;
end if;
end if;"""
res += """
end "{func}";\n"""
w(res.format(func=func))
def disp_vec_int_sdiv(func):
res = """
function "{func}" (L : SIGNED; R : INTEGER) return SIGNED
is
constant r_size : natural := size_signed (r);
begin
if l'length = 0 then
return null_signed;
end if;"""
if func == '/':
res += """
return l {func} to_signed (r, r_size);"""
else:
res += """
return resize (l {func} to_signed (r, r_size), l'length);"""
res += """
end "{func}";\n"""
w(res.format(func=func))
res = """
function "{func}" (L : INTEGER; R : SIGNED) return SIGNED
is
constant l_size : natural := size_signed (l);
begin
if r'length = 0 then
return null_signed;
end if;"""
if func == '/':
res += """
return resize (to_signed (l, max (l_size, r'length)) {func} r, r'length);"""
else:
res += """
return to_signed (l, l_size) {func} r;"""
res += """
end "{func}";\n"""
w(res.format(func=func))
def disp_all_log_funcs():
"Generate all function bodies for logic operators"
for t in vec_types:
disp_resize(t)
for v in vec_types:
disp_vec_not(v)
for f in binary_funcs:
for v in vec_types:
disp_vec_binary(f, v)
disp_vec_vec_gcompare("ucompare", "UNSIGNED")
disp_vec_vec_gcompare("scompare", "SIGNED")
disp_vec_int_gcompare("ucompare", "UNSIGNED", "NATURAL")
disp_vec_int_gcompare("scompare", "SIGNED", "INTEGER")
for f in compare_funcs:
disp_vec_vec_compare(f, "UNSIGNED")
disp_vec_vec_compare(f, "SIGNED")
disp_vec_int_compare(f, "UNSIGNED", "NATURAL")
disp_vec_int_compare(f, "NATURAL", "UNSIGNED")
disp_vec_int_compare(f, "SIGNED", "INTEGER")
disp_vec_int_compare(f, "INTEGER", "SIGNED")
for t in vec_types:
for d in ['left', 'right']:
disp_shift('shift_' + d, t, d);
for d in ['left', 'right']:
disp_rotate('rotate_' + d, t, d);
if v93:
disp_shift_op('"sll"', t, 'left')
disp_shift_op('"srl"', t, 'right')
disp_rotate('"rol"', t, 'left')
disp_rotate('"ror"', t, 'right')
def disp_match(typ):
res = """
function std_match (l, r : {0}) return boolean
is
alias la : {0} (l'length downto 1) is l;
alias ra : {0} (r'length downto 1) is r;
begin
if la'left = 0 or ra'left = 0 then
assert NO_WARNING
report "NUMERIC_STD.STD_MATCH: null argument, returning false"
severity warning;
return false;
elsif la'left /= ra'left then
assert NO_WARNING
report "NUMERIC_STD.STD_MATCH: args length mismatch, returning false"
severity warning;
return false;
else
for i in la'range loop
if not match_table (la (i), ra (i)) then
return false;
end if;
end loop;
return true;
end if;
end std_match;\n"""
w(res.format(typ))
def disp_all_match_funcs():
disp_match('std_ulogic_vector');
disp_match('std_logic_vector');
disp_match('UNSIGNED');
disp_match('SIGNED');
def disp_all_arith_funcs():
"Generate all function bodies for logic operators"
for op in ['+', '-']:
disp_vec_vec_binary(op, "UNSIGNED")
disp_vec_vec_binary(op, "SIGNED")
disp_vec_int_binary(op, "UNSIGNED", "NATURAL")
disp_vec_int_binary(op, "NATURAL", "UNSIGNED")
disp_vec_int_binary(op, "SIGNED", "INTEGER")
disp_vec_int_binary(op, "INTEGER", "SIGNED")
disp_vec_vec_mul('*', 'UNSIGNED')
disp_vec_vec_mul('*', 'SIGNED')
disp_vec_int_mul('UNSIGNED', 'NATURAL')
disp_vec_int_mul('SIGNED', 'INTEGER')
disp_int_vec_mul('NATURAL', 'UNSIGNED')
disp_int_vec_mul('INTEGER', 'SIGNED')
disp_has_0x('UNSIGNED')
disp_divmod()
disp_size()
disp_vec_vec_udiv('/')
disp_vec_int_udiv('/')
disp_vec_vec_udiv('rem')
disp_vec_int_udiv('rem')
disp_vec_vec_udiv('mod')
disp_vec_int_udiv('mod')
disp_has_0x('SIGNED')
disp_neg("-")
disp_neg("abs")
disp_vec_vec_sdiv('/')
disp_vec_int_sdiv('/')
disp_vec_vec_sdiv('rem')
disp_vec_int_sdiv('rem')
disp_vec_vec_sdiv('mod')
disp_vec_int_sdiv('mod')
# Patterns to replace
pats = {' @LOG\n' : disp_all_log_funcs,
' @ARITH\n' : disp_all_arith_funcs,
' @MATCH\n' : disp_all_match_funcs }
spec_file='numeric_std.vhdl'
#proto_file='numeric_std-body.proto'
def gen_body(proto_file):
w('-- This -*- vhdl -*- file was generated from ' + proto_file + '\n')
for line in open(proto_file):
if line in pats:
pats[line]()
continue
w(line)
# Copy spec
for log in logics:
for std in ['87', '93']:
out=open('numeric_' + log + '.v' + std, 'w')
for line in open('numeric_' + log + '.proto'):
if line == ' @COMMON\n':
for lcom in open('numeric_common.proto'):
if lcom[0:2] == '--':
pass
elif std == '87' and ('"xnor"' in lcom
or '"sll"' in lcom
or '"srl"' in lcom
or '"rol"' in lcom
or '"ror"' in lcom):
w("--" + lcom[2:])
else:
w(lcom)
else:
w(line)
out.close()
# Generate bodies
v93=False
for l in logics:
logic = l
out=open('numeric_{0}-body.v87'.format(l), 'w')
gen_body('numeric_{0}-body.proto'.format(l))
out.close()
v93=True
binary_funcs.append("xnor")
for l in logics:
logic = l
out=open('numeric_{0}-body.v93'.format(l), 'w')
gen_body('numeric_{0}-body.proto'.format(l))
out.close()<|fim▁end|> | function size_unsigned (n : natural) return natural
is |
<|file_name|>warm_starting_util.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities to warm-start TF.Learn Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_ops
from tensorflow.python.training import checkpoint_utils<|fim▁hole|>from tensorflow.python.util.tf_export import tf_export
@tf_export("train.VocabInfo", "estimator.VocabInfo")
class VocabInfo(
collections.namedtuple("VocabInfo", [
"new_vocab",
"new_vocab_size",
"num_oov_buckets",
"old_vocab",
"old_vocab_size",
"backup_initializer",
])):
"""Vocabulary information for warm-starting.
See @{tf.estimator.WarmStartSettings$WarmStartSettings} for examples of using
VocabInfo to warm-start.
Attributes:
new_vocab: [Required] A path to the new vocabulary file (used with the
model to be trained).
new_vocab_size: [Required] An integer indicating how many entries of the new
vocabulary will used in training.
num_oov_buckets: [Required] An integer indicating how many OOV buckets are
associated with the vocabulary.
old_vocab: [Required] A path to the old vocabulary file (used with the
checkpoint to be warm-started from).
old_vocab_size: [Optional] An integer indicating how many entries of the old
vocabulary were used in the creation of the checkpoint. If not provided,
the entire old vocabulary will be used.
backup_initializer: [Optional] A variable initializer used for variables
corresponding to new vocabulary entries and OOV. If not provided, these
entries will be zero-initialized.
"""
def __new__(cls,
new_vocab,
new_vocab_size,
num_oov_buckets,
old_vocab,
old_vocab_size=-1,
backup_initializer=None):
return super(VocabInfo, cls).__new__(
cls,
new_vocab,
new_vocab_size,
num_oov_buckets,
old_vocab,
old_vocab_size,
backup_initializer,
)
def _is_variable(x):
return (isinstance(x, variables_lib.Variable) or
isinstance(x, resource_variable_ops.ResourceVariable))
def _infer_var_name(var):
"""Returns name of the `var`.
Args:
var: A list. The list can contain either of the following:
(i) A single `Variable`
(ii) A single `ResourceVariable`
(iii) Multiple `Variable` objects which must be slices of the same larger
variable.
(iv) A single `PartitionedVariable`
Returns:
Name of the `var`
"""
name_to_var_dict = saver.BaseSaverBuilder.OpListToDict(var)
if len(name_to_var_dict) > 1:
raise TypeError("`var` = %s passed as arg violates the constraints. "
"name_to_var_dict = %s" % (var, name_to_var_dict))
return list(name_to_var_dict.keys())[0]
def _warm_start_var(var, prev_ckpt, prev_tensor_name=None):
"""Warm-starts given variable from `prev_tensor_name` tensor in `prev_ckpt`.
Args:
var: Current graph's variable that needs to be warm-started (initialized).
Can be either of the following:
(i) `Variable`
(ii) `ResourceVariable`
(iii) list of `Variable`: The list must contain slices of the same larger
variable.
(iv) `PartitionedVariable`
prev_ckpt: A string specifying the directory with checkpoint file(s) or path
to checkpoint. The given checkpoint must have tensor with name
`prev_tensor_name` (if not None) or tensor with name same as given `var`.
prev_tensor_name: Name of the tensor to lookup in provided `prev_ckpt`. If
None, we lookup tensor with same name as given `var`.
"""
if _is_variable(var):
current_var_name = _infer_var_name([var])
elif isinstance(var, list) and all(_is_variable(v) for v in var):
current_var_name = _infer_var_name(var)
elif isinstance(var, variables_lib.PartitionedVariable):
current_var_name = _infer_var_name([var])
var = var._get_variable_list() # pylint: disable=protected-access
else:
raise TypeError(
"var MUST be one of the following: a Variable, list of Variable or "
"PartitionedVariable, but is {}".format(type(var)))
if not prev_tensor_name:
# Assume tensor name remains the same.
prev_tensor_name = current_var_name
checkpoint_utils.init_from_checkpoint(prev_ckpt, {prev_tensor_name: var})
# pylint: disable=protected-access
# Accesses protected members of tf.Variable to reset the variable's internal
# state.
def _warm_start_var_with_vocab(var,
current_vocab_path,
current_vocab_size,
prev_ckpt,
prev_vocab_path,
previous_vocab_size=-1,
current_oov_buckets=0,
prev_tensor_name=None,
initializer=None):
"""Warm-starts given variable from `prev_tensor_name` tensor in `prev_ckpt`.
Use this method when the `var` is backed by vocabulary. This method stitches
the given `var` such that values corresponding to individual features in the
vocabulary remain consistent irrespective of changing order of the features
between old and new vocabularies.
Args:
var: Current graph's variable that needs to be warm-started (initialized).
Can be either of the following:
(i) `Variable`
(ii) `ResourceVariable`
(iii) list of `Variable`: The list must contain slices of the same larger
variable.
(iv) `PartitionedVariable`
current_vocab_path: Path to the vocab file used for the given `var`.
current_vocab_size: An `int` specifying the number of entries in the current
vocab.
prev_ckpt: A string specifying the directory with checkpoint file(s) or path
to checkpoint. The given checkpoint must have tensor with name
`prev_tensor_name` (if not None) or tensor with name same as given `var`.
prev_vocab_path: Path to the vocab file used for the tensor in `prev_ckpt`.
previous_vocab_size: If provided, will constrain previous vocab to the first
`previous_vocab_size` entries. -1 means use the entire previous vocab.
current_oov_buckets: An `int` specifying the number of out-of-vocabulary
buckets used for given `var`.
prev_tensor_name: Name of the tensor to lookup in provided `prev_ckpt`. If
None, we lookup tensor with same name as given `var`.
initializer: Variable initializer to be used for missing entries. If None,
missing entries will be zero-initialized.
Raises:
ValueError: If required args are not provided.
"""
if not (current_vocab_path and current_vocab_size and prev_ckpt and
prev_vocab_path):
raise ValueError("Invalid args: Must provide all of [current_vocab_path, "
"current_vocab_size, prev_ckpt, prev_vocab_path}.")
if _is_variable(var):
var = [var]
elif isinstance(var, list) and all(_is_variable(v) for v in var):
var = var
elif isinstance(var, variables_lib.PartitionedVariable):
var = var._get_variable_list()
else:
raise TypeError(
"var MUST be one of the following: a Variable, list of Variable or "
"PartitionedVariable, but is {}".format(type(var)))
if not prev_tensor_name:
# Assume tensor name remains the same.
prev_tensor_name = _infer_var_name(var)
for v in var:
v_shape = v.get_shape().as_list()
slice_info = v._get_save_slice_info()
partition_info = None
if slice_info:
partition_info = variable_scope._PartitionInfo(
full_shape=slice_info.full_shape,
var_offset=slice_info.var_offset)
# TODO(eddz): Support cases where class vocabularies need remapping too.
init = checkpoint_ops._load_and_remap_matrix_initializer(
ckpt_path=checkpoint_utils._get_checkpoint_filename(prev_ckpt),
old_tensor_name=prev_tensor_name,
new_row_vocab_size=current_vocab_size,
new_col_vocab_size=v_shape[1],
old_row_vocab_size=previous_vocab_size,
old_row_vocab_file=prev_vocab_path,
new_row_vocab_file=current_vocab_path,
old_col_vocab_file=None,
new_col_vocab_file=None,
num_row_oov_buckets=current_oov_buckets,
num_col_oov_buckets=0,
initializer=initializer)
new_init_val = ops.convert_to_tensor(
init(shape=v_shape, partition_info=partition_info))
v._initializer_op = state_ops.assign(v, new_init_val)
# pylint: enable=protected-access
@tf_export("train.warm_start")
def warm_start(ckpt_to_initialize_from,
vars_to_warm_start=".*",
var_name_to_vocab_info=None,
var_name_to_prev_var_name=None):
"""Warm-starts a model using the given settings.
If you are using a tf.estimator.Estimator, this will automatically be called
during training.
Args:
ckpt_to_initialize_from: [Required] A string specifying the directory with
checkpoint file(s) or path to checkpoint from which to warm-start the
model parameters.
vars_to_warm_start: [Optional] A regular expression that captures which
variables to warm-start (see tf.get_collection). Defaults to `'.*'`,
which warm-starts all variables. If `None` is explicitly given, only
variables specified in `var_name_to_vocab_info` will be warm-started.
var_name_to_vocab_info: [Optional] Dict of variable names (strings) to
VocabInfo. The variable names should be "full" variables, not the names
of the partitions. If not explicitly provided, the variable is assumed to
have no vocabulary.
var_name_to_prev_var_name: [Optional] Dict of variable names (strings) to
name of the previously-trained variable in `ckpt_to_initialize_from`. If
not explicitly provided, the name of the variable is assumed to be same
between previous checkpoint and current model.
Raises:
ValueError: If the WarmStartSettings contains prev_var_name or VocabInfo
configuration for variable names that are not used. This is to ensure
a stronger check for variable configuration than relying on users to
examine the logs.
"""
if var_name_to_vocab_info is None:
var_name_to_vocab_info = {}
if var_name_to_prev_var_name is None:
var_name_to_prev_var_name = {}
logging.info("Warm-starting from: %s", (ckpt_to_initialize_from,))
# We have to deal with partitioned variables, since get_collection flattens
# out the list.
grouped_variables = {}
# Both vars_to_warm_start = '.*' and
# vars_to_warm_start = None will match everything here.
for v in ops.get_collection(
# TODO(eddz): Allow for different collections here (to support
# warm-starting accumulators).
ops.GraphKeys.TRAINABLE_VARIABLES,
scope=vars_to_warm_start):
if not isinstance(v, list):
var_name = _infer_var_name([v])
else:
var_name = _infer_var_name(v)
grouped_variables.setdefault(var_name, []).append(v)
# Keep track of which var_names in var_name_to_prev_var_name and
# var_name_to_vocab_info have been used. Err on the safer side by throwing an
# exception if any are unused by the end of the loop. It is easy to misname
# a variable during this configuration, in which case without this check, we
# would fail to warm-start silently.
prev_var_name_used = set()
vocab_info_used = set()
for var_name, variable in six.iteritems(grouped_variables):
prev_var_name = var_name_to_prev_var_name.get(var_name)
if prev_var_name:
prev_var_name_used.add(var_name)
vocab_info = var_name_to_vocab_info.get(var_name)
if vocab_info:
vocab_info_used.add(var_name)
logging.info(
"Warm-starting variable: {}; current_vocab: {} current_vocab_size: {}"
" prev_vocab: {} prev_vocab_size: {} current_oov: {} prev_tensor: {}"
" initializer: {}".format(
var_name,
vocab_info.new_vocab,
vocab_info.new_vocab_size,
vocab_info.old_vocab,
(vocab_info.old_vocab_size if vocab_info.old_vocab_size > 0
else "All"),
vocab_info.num_oov_buckets,
prev_var_name or "Unchanged",
vocab_info.backup_initializer or "zero-initialized"))
_warm_start_var_with_vocab(
variable,
current_vocab_path=vocab_info.new_vocab,
current_vocab_size=vocab_info.new_vocab_size,
prev_ckpt=ckpt_to_initialize_from,
prev_vocab_path=vocab_info.old_vocab,
previous_vocab_size=vocab_info.old_vocab_size,
current_oov_buckets=vocab_info.num_oov_buckets,
prev_tensor_name=prev_var_name,
initializer=vocab_info.backup_initializer)
else:
# For the special value of vars_to_warm_start = None,
# we only warm-start variables with explicitly specified vocabularies.
if vars_to_warm_start:
logging.info("Warm-starting variable: {}; prev_var_name: {}".format(
var_name, prev_var_name or "Unchanged"))
# Because we use a default empty list in grouped_variables, single
# unpartitioned variables will be lists here, which we rectify in order
# for init_from_checkpoint logic to work correctly.
if len(variable) == 1:
variable = variable[0]
_warm_start_var(variable, ckpt_to_initialize_from, prev_var_name)
prev_var_name_not_used = set(
var_name_to_prev_var_name.keys()) - prev_var_name_used
vocab_info_not_used = set(var_name_to_vocab_info.keys()) - vocab_info_used
if prev_var_name_not_used:
raise ValueError(
"You provided the following variables in "
"var_name_to_prev_var_name that were not used: "
"{0}. Perhaps you misspelled them? Here is the list of viable "
"variable names: {1}".format(prev_var_name_not_used,
grouped_variables.keys()))
if vocab_info_not_used:
raise ValueError(
"You provided the following variables in "
"var_name_to_vocab_info that were not used: {0}. "
" Perhaps you misspelled them? Here is the list of viable variable "
"names: {1}".format(vocab_info_not_used, grouped_variables.keys()))<|fim▁end|> | from tensorflow.python.training import saver |
<|file_name|>utils_test.go<|end_file_name|><|fim▁begin|>package main
import (
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Utils", func() {
Describe("absPathToFile", func() {
It("should return provided absolute path as is", func() {
Expect(absPathToFile("/lorem/ipsum/dolor")).To(
Equal("/lorem/ipsum/dolor"),
)
})
It("should expand provided relative path", func() {
Expect(absPathToFile("./lorem/ipsum/dolor")).To(
Equal(appPath() + "/lorem/ipsum/dolor"),
)
})
})
Describe("checkTimeFormat", func() {
It("should return true for valid datetime format", func() {
Expect(checkTimeFormat("2014-08-08T01:02:03")).To(BeTrue())
Expect(checkTimeFormat("2014-08-08T01:02:03+06:00")).To(BeTrue())
Expect(checkTimeFormat("2014-08-08T01:02:03.123")).To(BeTrue())
Expect(checkTimeFormat("2014-08-08T01:02:03.123+06:00")).To(BeTrue())
})
It("should return false for invalid datetime format", func() {
Expect(checkTimeFormat("2014-08-08T01:02:033")).To(BeFalse())
})
})
Describe("checkDateTimeFormat", func() {<|fim▁hole|> Expect(checkDateTimeFormat("2014-08-08")).To(BeTrue())
})
It("should return true for valid datetime format", func() {
Expect(checkDateTimeFormat("2014-08-08T01:02:03")).To(BeTrue())
Expect(checkDateTimeFormat("2014-08-08T01:02:03+06:00")).To(BeTrue())
Expect(checkDateTimeFormat("2014-08-08T01:02:03.123")).To(BeTrue())
Expect(checkDateTimeFormat("2014-08-08T01:02:03.123+06:00")).To(BeTrue())
})
It("should return false for invalid date format", func() {
Expect(checkDateTimeFormat("2014-08-088")).To(BeFalse())
})
It("should return false for invalid datetime format", func() {
Expect(checkDateTimeFormat("2014-08-08T01:02:033")).To(BeFalse())
})
})
Describe("parseTime", func() {
It("should parse time", func() {
result, err := parseTime("2014-09-08T11:12:13.321")
Expect(result).To(
Equal(time.Date(2014, 9, 8, 11, 12, 13, 321000000, time.Local)),
)
Expect(err).NotTo(HaveOccurred())
})
Context("when provided string has invalid format", func() {
It("should return error", func() {
_, err := parseTime("2014-09-08T11:12:134.321")
Expect(err).To(HaveOccurred())
})
})
})
Describe("parseDateTime", func() {
It("should parse time", func() {
result, err := parseDateTime("2014-09-08T11:12:13.321", false)
Expect(result).To(
Equal(time.Date(2014, 9, 8, 11, 12, 13, 321000000, time.Local)),
)
Expect(err).NotTo(HaveOccurred())
})
It("should parse date and set time to the beginning of day", func() {
result, err := parseDateTime("2014-09-08", false)
Expect(result).To(Equal(time.Date(2014, 9, 8, 0, 0, 0, 0, time.Local)))
Expect(err).NotTo(HaveOccurred())
})
Context("when clockToEnd is true", func() {
It("should parse date and set time to the end of day", func() {
result, err := parseDateTime("2014-09-08", true)
Expect(result).To(Equal(time.Date(2014, 9, 8, 23, 59, 59, 999999999, time.Local)))
Expect(err).NotTo(HaveOccurred())
})
})
Context("when provided string has invalid format", func() {
It("should return error", func() {
_, err := parseDateTime("2014-09-088", false)
Expect(err).To(HaveOccurred())
})
})
})
Describe("uniqStrings", func() {
It("should remove dublicated items from array", func() {
input := []string{"fff", "fff"}
Expect(uniqStrings(input)).To(Equal([]string{"fff"}))
})
It("should return array with all elements from provided one", func() {
input := []string{"fff", "fff2"}
Expect(uniqStrings(input)).To(HaveLen(len(input)))
Expect(uniqStrings(input)).To(ConsistOf(input))
})
})
})<|fim▁end|> | It("should return true for valid date format", func() { |
<|file_name|>P4COMSTR.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test setting the $P4COMSTR variable.
"""
import os.path
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.subdir('Perforce', ['Perforce', 'sub'], 'sub')
sub_Perforce = os.path.join('sub', 'Perforce')
sub_SConscript = os.path.join('sub', 'SConscript')
sub_all = os.path.join('sub', 'all')
sub_ddd_in = os.path.join('sub', 'ddd.in')
sub_ddd_out = os.path.join('sub', 'ddd.out')
sub_eee_in = os.path.join('sub', 'eee.in')
sub_eee_out = os.path.join('sub', 'eee.out')
sub_fff_in = os.path.join('sub', 'fff.in')
sub_fff_out = os.path.join('sub', 'fff.out')
test.write('my-p4.py', """
import shutil
import sys
for f in sys.argv[1:]:<|fim▁hole|> shutil.copy('Perforce/'+f, f)
""")
test.write('SConstruct', """
def cat(env, source, target):
target = str(target[0])
source = map(str, source)
f = open(target, "wb")
for src in source:
f.write(open(src, "rb").read())
f.close()
env = Environment(TOOLS = ['default', 'Perforce'],
BUILDERS={'Cat':Builder(action=cat)},
P4COM='%(_python_)s my-p4.py $TARGET',
P4COMSTR='Checking out $TARGET from our fake Perforce')
env.Cat('aaa.out', 'aaa.in')
env.Cat('bbb.out', 'bbb.in')
env.Cat('ccc.out', 'ccc.in')
env.Cat('all', ['aaa.out', 'bbb.out', 'ccc.out'])
env.SourceCode('.', env.Perforce())
SConscript('sub/SConscript', "env")
""" % locals())
test.write(['Perforce', 'sub', 'SConscript'], """\
Import("env")
env.Cat('ddd.out', 'ddd.in')
env.Cat('eee.out', 'eee.in')
env.Cat('fff.out', 'fff.in')
env.Cat('all', ['ddd.out', 'eee.out', 'fff.out'])
""")
test.write(['Perforce', 'aaa.in'], "Perforce/aaa.in\n")
test.write('bbb.in', "checked-out bbb.in\n")
test.write(['Perforce', 'ccc.in'], "Perforce/ccc.in\n")
test.write(['Perforce', 'sub', 'ddd.in'], "Perforce/sub/ddd.in\n")
test.write(['sub', 'eee.in'], "checked-out sub/eee.in\n")
test.write(['Perforce', 'sub', 'fff.in'], "Perforce/sub/fff.in\n")
test.run(arguments = '.',
stdout = test.wrap_stdout(read_str = """\
Checking out %(sub_SConscript)s from our fake Perforce
""" % locals(),
build_str = """\
Checking out aaa.in from our fake Perforce
cat(["aaa.out"], ["aaa.in"])
cat(["bbb.out"], ["bbb.in"])
Checking out ccc.in from our fake Perforce
cat(["ccc.out"], ["ccc.in"])
cat(["all"], ["aaa.out", "bbb.out", "ccc.out"])
Checking out %(sub_ddd_in)s from our fake Perforce
cat(["%(sub_ddd_out)s"], ["%(sub_ddd_in)s"])
cat(["%(sub_eee_out)s"], ["%(sub_eee_in)s"])
Checking out %(sub_fff_in)s from our fake Perforce
cat(["%(sub_fff_out)s"], ["%(sub_fff_in)s"])
cat(["%(sub_all)s"], ["%(sub_ddd_out)s", "%(sub_eee_out)s", "%(sub_fff_out)s"])
""" % locals()))
test.must_match('all',
"Perforce/aaa.in\nchecked-out bbb.in\nPerforce/ccc.in\n")
test.must_match(['sub', 'all'],
"Perforce/sub/ddd.in\nchecked-out sub/eee.in\nPerforce/sub/fff.in\n")
#
test.pass_test()<|fim▁end|> | |
<|file_name|>run_upgrade.py<|end_file_name|><|fim▁begin|>import os
from importlib import import_module
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
def get_modules():
path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
root, dirs, files = next(os.walk(path))
return files
class Command(BaseCommand):
"""
Upgrades Janeway
"""
help = "Upgrades an install from one version to another."
def add_arguments(self, parser):
"""Adds arguments to Django's management command-line parser.
:param parser: the parser to which the required arguments will be added
:return: None
"""
parser.add_argument('--path', required=False)
def handle(self, *args, **options):
if not options.get('path'):
print('No upgrade selected. Available upgrade paths: ')
for file in get_modules():
module_name = file.split('.')[0]
print('- {module_name}'.format(module_name=module_name))<|fim▁hole|> translation.activate('en')
upgrade_module_name = options.get('path')
upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
try:
upgrade_module = import_module(upgrade_module_path)
upgrade_module.execute()
except ImportError as e:
print('There was an error running the requested upgrade: ')
print(e)<|fim▁end|> | print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
else: |
<|file_name|>fdb-core.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
var Core = _dereq_('../lib/Core'),
ShimIE8 = _dereq_('../lib/Shim.IE8');
if (typeof window !== 'undefined') {
window.ForerunnerDB = Core;
}
module.exports = Core;
},{"../lib/Core":4,"../lib/Shim.IE8":27}],2:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared'),
Path = _dereq_('./Path');
var BinaryTree = function (data, compareFunc, hashFunc) {
this.init.apply(this, arguments);
};
BinaryTree.prototype.init = function (data, index, compareFunc, hashFunc) {
this._store = [];
this._keys = [];
if (index !== undefined) { this.index(index); }
if (compareFunc !== undefined) { this.compareFunc(compareFunc); }
if (hashFunc !== undefined) { this.hashFunc(hashFunc); }
if (data !== undefined) { this.data(data); }
};
Shared.addModule('BinaryTree', BinaryTree);
Shared.mixin(BinaryTree.prototype, 'Mixin.ChainReactor');
Shared.mixin(BinaryTree.prototype, 'Mixin.Sorting');
Shared.mixin(BinaryTree.prototype, 'Mixin.Common');
Shared.synthesize(BinaryTree.prototype, 'compareFunc');
Shared.synthesize(BinaryTree.prototype, 'hashFunc');
Shared.synthesize(BinaryTree.prototype, 'indexDir');
Shared.synthesize(BinaryTree.prototype, 'keys');
Shared.synthesize(BinaryTree.prototype, 'index', function (index) {
if (index !== undefined) {
// Convert the index object to an array of key val objects
this.keys(this.extractKeys(index));
}
return this.$super.call(this, index);
});
BinaryTree.prototype.extractKeys = function (obj) {
var i,
keys = [];
for (i in obj) {
if (obj.hasOwnProperty(i)) {
keys.push({
key: i,
val: obj[i]
});
}
}
return keys;
};
BinaryTree.prototype.data = function (val) {
if (val !== undefined) {
this._data = val;
if (this._hashFunc) { this._hash = this._hashFunc(val); }
return this;
}
return this._data;
};
/**
* Pushes an item to the binary tree node's store array.
* @param {*} val The item to add to the store.
* @returns {*}
*/
BinaryTree.prototype.push = function (val) {
if (val !== undefined) {
this._store.push(val);
return this;
}
return false;
};
/**
* Pulls an item from the binary tree node's store array.
* @param {*} val The item to remove from the store.
* @returns {*}
*/
BinaryTree.prototype.pull = function (val) {
if (val !== undefined) {
var index = this._store.indexOf(val);
if (index > -1) {
this._store.splice(index, 1);
return this;
}
}
return false;
};
/**
* Default compare method. Can be overridden.
* @param a
* @param b
* @returns {number}
* @private
*/
BinaryTree.prototype._compareFunc = function (a, b) {
// Loop the index array
var i,
indexData,
result = 0;
for (i = 0; i < this._keys.length; i++) {
indexData = this._keys[i];
if (indexData.val === 1) {
result = this.sortAsc(a[indexData.key], b[indexData.key]);
} else if (indexData.val === -1) {
result = this.sortDesc(a[indexData.key], b[indexData.key]);
}
if (result !== 0) {
return result;
}
}
return result;
};
/**
* Default hash function. Can be overridden.
* @param obj
* @private
*/
BinaryTree.prototype._hashFunc = function (obj) {
/*var i,
indexData,
hash = '';
for (i = 0; i < this._keys.length; i++) {
indexData = this._keys[i];
if (hash) { hash += '_'; }
hash += obj[indexData.key];
}
return hash;*/
return obj[this._keys[0].key];
};
BinaryTree.prototype.insert = function (data) {
var result,
inserted,
failed,
i;
if (data instanceof Array) {
// Insert array of data
inserted = [];
failed = [];
for (i = 0; i < data.length; i++) {
if (this.insert(data[i])) {
inserted.push(data[i]);
} else {
failed.push(data[i]);
}
}
return {
inserted: inserted,
failed: failed
};
}
if (!this._data) {
// Insert into this node (overwrite) as there is no data
this.data(data);
//this.push(data);
return true;
}
result = this._compareFunc(this._data, data);
if (result === 0) {
this.push(data);
// Less than this node
if (this._left) {
// Propagate down the left branch
this._left.insert(data);
} else {
// Assign to left branch
this._left = new BinaryTree(data, this._index, this._compareFunc, this._hashFunc);
}
return true;
}
if (result === -1) {
// Greater than this node
if (this._right) {
// Propagate down the right branch
this._right.insert(data);
} else {
// Assign to right branch
this._right = new BinaryTree(data, this._index, this._compareFunc, this._hashFunc);
}
return true;
}
if (result === 1) {
// Less than this node
if (this._left) {
// Propagate down the left branch
this._left.insert(data);
} else {
// Assign to left branch
this._left = new BinaryTree(data, this._index, this._compareFunc, this._hashFunc);
}
return true;
}
return false;
};
BinaryTree.prototype.lookup = function (data, resultArr) {
var result = this._compareFunc(this._data, data);
resultArr = resultArr || [];
if (result === 0) {
if (this._left) { this._left.lookup(data, resultArr); }
resultArr.push(this._data);
if (this._right) { this._right.lookup(data, resultArr); }
}
if (result === -1) {
if (this._right) { this._right.lookup(data, resultArr); }
}
if (result === 1) {
if (this._left) { this._left.lookup(data, resultArr); }
}
return resultArr;
};
BinaryTree.prototype.inOrder = function (type, resultArr) {
resultArr = resultArr || [];
if (this._left) {
this._left.inOrder(type, resultArr);
}
switch (type) {
case 'hash':
resultArr.push(this._hash);
break;
case 'data':
resultArr.push(this._data);
break;
default:
resultArr.push({
key: this._data,
arr: this._store
});
break;
}
if (this._right) {
this._right.inOrder(type, resultArr);
}
return resultArr;
};
/*BinaryTree.prototype.find = function (type, search, resultArr) {
resultArr = resultArr || [];
if (this._left) {
this._left.find(type, search, resultArr);
}
// Check if this node's data is greater or less than the from value
var fromResult = this.sortAsc(this._data[key], from),
toResult = this.sortAsc(this._data[key], to);
if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) {
// This data node is greater than or equal to the from value,
// and less than or equal to the to value so include it
switch (type) {
case 'hash':
resultArr.push(this._hash);
break;
case 'data':
resultArr.push(this._data);
break;
default:
resultArr.push({
key: this._data,
arr: this._store
});
break;
}
}
if (this._right) {
this._right.find(type, search, resultArr);
}
return resultArr;
};*/
/**
*
* @param {String} type
* @param {String} key The data key / path to range search against.
* @param {Number} from Range search from this value (inclusive)
* @param {Number} to Range search to this value (inclusive)
* @param {Array=} resultArr Leave undefined when calling (internal use),
* passes the result array between recursive calls to be returned when
* the recursion chain completes.
* @param {Path=} pathResolver Leave undefined when calling (internal use),
* caches the path resolver instance for performance.
* @returns {Array} Array of matching document objects
*/
BinaryTree.prototype.findRange = function (type, key, from, to, resultArr, pathResolver) {
resultArr = resultArr || [];
pathResolver = pathResolver || new Path(key);
if (this._left) {
this._left.findRange(type, key, from, to, resultArr, pathResolver);
}
// Check if this node's data is greater or less than the from value
var pathVal = pathResolver.value(this._data),
fromResult = this.sortAsc(pathVal, from),
toResult = this.sortAsc(pathVal, to);
if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) {
// This data node is greater than or equal to the from value,
// and less than or equal to the to value so include it
switch (type) {
case 'hash':
resultArr.push(this._hash);
break;
case 'data':
resultArr.push(this._data);
break;
default:
resultArr.push({
key: this._data,
arr: this._store
});
break;
}
}
if (this._right) {
this._right.findRange(type, key, from, to, resultArr, pathResolver);
}
return resultArr;
};
/*BinaryTree.prototype.findRegExp = function (type, key, pattern, resultArr) {
resultArr = resultArr || [];
if (this._left) {
this._left.findRegExp(type, key, pattern, resultArr);
}
// Check if this node's data is greater or less than the from value
var fromResult = this.sortAsc(this._data[key], from),
toResult = this.sortAsc(this._data[key], to);
if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) {
// This data node is greater than or equal to the from value,
// and less than or equal to the to value so include it
switch (type) {
case 'hash':
resultArr.push(this._hash);
break;
case 'data':
resultArr.push(this._data);
break;
default:
resultArr.push({
key: this._data,
arr: this._store
});
break;
}
}
if (this._right) {
this._right.findRegExp(type, key, pattern, resultArr);
}
return resultArr;
};*/
BinaryTree.prototype.match = function (query, options) {
// Check if the passed query has data in the keys our index
// operates on and if so, is the query sort matching our order
var pathSolver = new Path(),
indexKeyArr,
queryArr,
matchedKeys = [],
matchedKeyCount = 0,
i;
indexKeyArr = pathSolver.parseArr(this._index, {
verbose: true
});
queryArr = pathSolver.parseArr(query, {
ignore:/\$/,
verbose: true
});
// Loop the query array and check the order of keys against the
// index key array to see if this index can be used
for (i = 0; i < indexKeyArr.length; i++) {
if (queryArr[i] === indexKeyArr[i]) {
matchedKeyCount++;
matchedKeys.push(queryArr[i]);
}
}
return {
matchedKeys: matchedKeys,
totalKeyCount: queryArr.length,
score: matchedKeyCount
};
//return pathSolver.countObjectPaths(this._keys, query);
};
Shared.finishModule('BinaryTree');
module.exports = BinaryTree;
},{"./Path":23,"./Shared":26}],3:[function(_dereq_,module,exports){
"use strict";
var Shared,
Db,
Metrics,
KeyValueStore,
Path,
IndexHashMap,
IndexBinaryTree,
Crc,
Overload,
ReactorIO;
Shared = _dereq_('./Shared');
/**
* Creates a new collection. Collections store multiple documents and
* handle CRUD against those documents.
* @constructor
*/
var Collection = function (name) {
this.init.apply(this, arguments);
};
Collection.prototype.init = function (name, options) {
this._primaryKey = '_id';
this._primaryIndex = new KeyValueStore('primary');
this._primaryCrc = new KeyValueStore('primaryCrc');
this._crcLookup = new KeyValueStore('crcLookup');
this._name = name;
this._data = [];
this._metrics = new Metrics();
this._options = options || {
changeTimestamp: false
};
// Create an object to store internal protected data
this._metaData = {};
this._deferQueue = {
insert: [],
update: [],
remove: [],
upsert: [],
async: []
};
this._deferThreshold = {
insert: 100,
update: 100,
remove: 100,
upsert: 100
};
this._deferTime = {
insert: 1,
update: 1,
remove: 1,
upsert: 1
};
this._deferredCalls = true;
// Set the subset to itself since it is the root collection
this.subsetOf(this);
};
Shared.addModule('Collection', Collection);
Shared.mixin(Collection.prototype, 'Mixin.Common');
Shared.mixin(Collection.prototype, 'Mixin.Events');
Shared.mixin(Collection.prototype, 'Mixin.ChainReactor');
Shared.mixin(Collection.prototype, 'Mixin.CRUD');
Shared.mixin(Collection.prototype, 'Mixin.Constants');
Shared.mixin(Collection.prototype, 'Mixin.Triggers');
Shared.mixin(Collection.prototype, 'Mixin.Sorting');
Shared.mixin(Collection.prototype, 'Mixin.Matching');
Shared.mixin(Collection.prototype, 'Mixin.Updating');
Shared.mixin(Collection.prototype, 'Mixin.Tags');
Metrics = _dereq_('./Metrics');
KeyValueStore = _dereq_('./KeyValueStore');
Path = _dereq_('./Path');
IndexHashMap = _dereq_('./IndexHashMap');
IndexBinaryTree = _dereq_('./IndexBinaryTree');
Crc = _dereq_('./Crc');
Db = Shared.modules.Db;
Overload = _dereq_('./Overload');
ReactorIO = _dereq_('./ReactorIO');
/**
* Returns a checksum of a string.
* @param {String} string The string to checksum.
* @return {String} The checksum generated.
*/
Collection.prototype.crc = Crc;
/**
* Gets / sets the deferred calls flag. If set to true (default)
* then operations on large data sets can be broken up and done
* over multiple CPU cycles (creating an async state). For purely
* synchronous behaviour set this to false.
* @param {Boolean=} val The value to set.
* @returns {Boolean}
*/
Shared.synthesize(Collection.prototype, 'deferredCalls');
/**
* Gets / sets the current state.
* @param {String=} val The name of the state to set.
* @returns {*}
*/
Shared.synthesize(Collection.prototype, 'state');
/**
* Gets / sets the name of the collection.
* @param {String=} val The name of the collection to set.
* @returns {*}
*/
Shared.synthesize(Collection.prototype, 'name');
/**
* Gets / sets the metadata stored in the collection.
*/
Shared.synthesize(Collection.prototype, 'metaData');
/**
* Gets / sets boolean to determine if the collection should be
* capped or not.
*/
Shared.synthesize(Collection.prototype, 'capped');
/**
* Gets / sets capped collection size. This is the maximum number
* of records that the capped collection will store.
*/
Shared.synthesize(Collection.prototype, 'cappedSize');
Collection.prototype._asyncPending = function (key) {
this._deferQueue.async.push(key);
};
Collection.prototype._asyncComplete = function (key) {
// Remove async flag for this type
var index = this._deferQueue.async.indexOf(key);
while (index > -1) {
this._deferQueue.async.splice(index, 1);
index = this._deferQueue.async.indexOf(key);
}
if (this._deferQueue.async.length === 0) {
this.deferEmit('ready');
}
};
/**
* Get the data array that represents the collection's data.
* This data is returned by reference and should not be altered outside
* of the provided CRUD functionality of the collection as doing so
* may cause unstable index behaviour within the collection.
* @returns {Array}
*/
Collection.prototype.data = function () {
return this._data;
};
/**
* Drops a collection and all it's stored data from the database.
* @returns {boolean} True on success, false on failure.
*/
Collection.prototype.drop = function (callback) {
var key;
if (!this.isDropped()) {
if (this._db && this._db._collection && this._name) {
if (this.debug()) {
console.log(this.logIdentifier() + ' Dropping');
}
this._state = 'dropped';
this.emit('drop', this);
delete this._db._collection[this._name];
// Remove any reactor IO chain links
if (this._collate) {
for (key in this._collate) {
if (this._collate.hasOwnProperty(key)) {
this.collateRemove(key);
}
}
}
delete this._primaryKey;
delete this._primaryIndex;
delete this._primaryCrc;
delete this._crcLookup;
delete this._name;
delete this._data;
delete this._metrics;
delete this._listeners;
if (callback) { callback(false, true); }
return true;
}
} else {
if (callback) { callback(false, true); }
return true;
}
if (callback) { callback(false, true); }
return false;
};
/**
* Gets / sets the primary key for this collection.
* @param {String=} keyName The name of the primary key.
* @returns {*}
*/
Collection.prototype.primaryKey = function (keyName) {
if (keyName !== undefined) {
if (this._primaryKey !== keyName) {
var oldKey = this._primaryKey;
this._primaryKey = keyName;
// Set the primary key index primary key
this._primaryIndex.primaryKey(keyName);
// Rebuild the primary key index
this.rebuildPrimaryKeyIndex();
// Propagate change down the chain
this.chainSend('primaryKey', keyName, {oldData: oldKey});
}
return this;
}
return this._primaryKey;
};
/**
* Handles insert events and routes changes to binds and views as required.
* @param {Array} inserted An array of inserted documents.
* @param {Array} failed An array of documents that failed to insert.
* @private
*/
Collection.prototype._onInsert = function (inserted, failed) {
this.emit('insert', inserted, failed);
};
/**
* Handles update events and routes changes to binds and views as required.
* @param {Array} items An array of updated documents.
* @private
*/
Collection.prototype._onUpdate = function (items) {
this.emit('update', items);
};
/**
* Handles remove events and routes changes to binds and views as required.
* @param {Array} items An array of removed documents.
* @private
*/
Collection.prototype._onRemove = function (items) {
this.emit('remove', items);
};
/**
* Handles any change to the collection.
* @private
*/
Collection.prototype._onChange = function () {
if (this._options.changeTimestamp) {
// Record the last change timestamp
this._metaData.lastChange = new Date();
}
};
/**
* Gets / sets the db instance this class instance belongs to.
* @param {Db=} db The db instance.
* @returns {*}
*/
Shared.synthesize(Collection.prototype, 'db', function (db) {
if (db) {
if (this.primaryKey() === '_id') {
// Set primary key to the db's key by default
this.primaryKey(db.primaryKey());
// Apply the same debug settings
this.debug(db.debug());
}
}
return this.$super.apply(this, arguments);
});
/**
* Gets / sets mongodb emulation mode.
* @param {Boolean=} val True to enable, false to disable.
* @returns {*}
*/
Shared.synthesize(Collection.prototype, 'mongoEmulation');
/**
* Sets the collection's data to the array / documents passed. If any
* data already exists in the collection it will be removed before the
* new data is set.
* @param {Array|Object} data The array of documents or a single document
* that will be set as the collections data.
* @param options Optional options object.
* @param callback Optional callback function.
*/
Collection.prototype.setData = function (data, options, callback) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
if (data) {
var op = this._metrics.create('setData');
op.start();
options = this.options(options);
this.preSetData(data, options, callback);
if (options.$decouple) {
data = this.decouple(data);
}
if (!(data instanceof Array)) {
data = [data];
}
op.time('transformIn');
data = this.transformIn(data);
op.time('transformIn');
var oldData = [].concat(this._data);
this._dataReplace(data);
// Update the primary key index
op.time('Rebuild Primary Key Index');
this.rebuildPrimaryKeyIndex(options);
op.time('Rebuild Primary Key Index');
// Rebuild all other indexes
op.time('Rebuild All Other Indexes');
this._rebuildIndexes();
op.time('Rebuild All Other Indexes');
op.time('Resolve chains');
this.chainSend('setData', data, {oldData: oldData});
op.time('Resolve chains');
op.stop();
this._onChange();
this.emit('setData', this._data, oldData);
}
if (callback) { callback(false); }
return this;
};
/**
* Drops and rebuilds the primary key index for all documents in the collection.
* @param {Object=} options An optional options object.
* @private
*/
Collection.prototype.rebuildPrimaryKeyIndex = function (options) {
options = options || {
$ensureKeys: undefined,
$violationCheck: undefined
};
var ensureKeys = options && options.$ensureKeys !== undefined ? options.$ensureKeys : true,
violationCheck = options && options.$violationCheck !== undefined ? options.$violationCheck : true,
arr,
arrCount,
arrItem,
pIndex = this._primaryIndex,
crcIndex = this._primaryCrc,
crcLookup = this._crcLookup,
pKey = this._primaryKey,
jString;
// Drop the existing primary index
pIndex.truncate();
crcIndex.truncate();
crcLookup.truncate();
// Loop the data and check for a primary key in each object
arr = this._data;
arrCount = arr.length;
while (arrCount--) {
arrItem = arr[arrCount];
if (ensureKeys) {
// Make sure the item has a primary key
this.ensurePrimaryKey(arrItem);
}
if (violationCheck) {
// Check for primary key violation
if (!pIndex.uniqueSet(arrItem[pKey], arrItem)) {
// Primary key violation
throw(this.logIdentifier() + ' Call to setData on collection failed because your data violates the primary key unique constraint. One or more documents are using the same primary key: ' + arrItem[this._primaryKey]);
}
} else {
pIndex.set(arrItem[pKey], arrItem);
}
// Generate a CRC string
jString = this.jStringify(arrItem);
crcIndex.set(arrItem[pKey], jString);
crcLookup.set(jString, arrItem);
}
};
/**
* Checks for a primary key on the document and assigns one if none
* currently exists.
* @param {Object} obj The object to check a primary key against.
* @private
*/
Collection.prototype.ensurePrimaryKey = function (obj) {
if (obj[this._primaryKey] === undefined) {
// Assign a primary key automatically
obj[this._primaryKey] = this.objectId();
}
};
/**
* Clears all data from the collection.
* @returns {Collection}
*/
Collection.prototype.truncate = function () {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
this.emit('truncate', this._data);
// Clear all the data from the collection
this._data.length = 0;
// Re-create the primary index data
this._primaryIndex = new KeyValueStore('primary');
this._primaryCrc = new KeyValueStore('primaryCrc');
this._crcLookup = new KeyValueStore('crcLookup');
this._onChange();
this.deferEmit('change', {type: 'truncate'});
return this;
};
/**
* Modifies an existing document or documents in a collection. This will update
* all matches for 'query' with the data held in 'update'. It will not overwrite
* the matched documents with the update document.
*
* @param {Object} obj The document object to upsert or an array containing
* documents to upsert.
*
* If the document contains a primary key field (based on the collections's primary
* key) then the database will search for an existing document with a matching id.
* If a matching document is found, the document will be updated. Any keys that
* match keys on the existing document will be overwritten with new data. Any keys
* that do not currently exist on the document will be added to the document.
*
* If the document does not contain an id or the id passed does not match an existing
* document, an insert is performed instead. If no id is present a new primary key
* id is provided for the item.
*
* @param {Function=} callback Optional callback method.
* @returns {Object} An object containing two keys, "op" contains either "insert" or
* "update" depending on the type of operation that was performed and "result"
* contains the return data from the operation used.
*/
Collection.prototype.upsert = function (obj, callback) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
if (obj) {
var queue = this._deferQueue.upsert,
deferThreshold = this._deferThreshold.upsert,
returnData = {},
query,
i;
// Determine if the object passed is an array or not
if (obj instanceof Array) {
if (this._deferredCalls && obj.length > deferThreshold) {
// Break up upsert into blocks
this._deferQueue.upsert = queue.concat(obj);
this._asyncPending('upsert');
// Fire off the insert queue handler
this.processQueue('upsert', callback);
return {};
} else {
// Loop the array and upsert each item
returnData = [];
for (i = 0; i < obj.length; i++) {
returnData.push(this.upsert(obj[i]));
}
if (callback) { callback(); }
return returnData;
}
}
// Determine if the operation is an insert or an update
if (obj[this._primaryKey]) {
// Check if an object with this primary key already exists
query = {};
query[this._primaryKey] = obj[this._primaryKey];
if (this._primaryIndex.lookup(query)[0]) {
// The document already exists with this id, this operation is an update
returnData.op = 'update';
} else {
// No document with this id exists, this operation is an insert
returnData.op = 'insert';
}
} else {
// The document passed does not contain an id, this operation is an insert
returnData.op = 'insert';
}
switch (returnData.op) {
case 'insert':
returnData.result = this.insert(obj);
break;
case 'update':
returnData.result = this.update(query, obj);
break;
default:
break;
}
return returnData;
} else {
if (callback) { callback(); }
}
return {};
};
/**
* Executes a method against each document that matches query and returns an
* array of documents that may have been modified by the method.
* @param {Object} query The query object.
* @param {Function} func The method that each document is passed to. If this method
* returns false for a particular document it is excluded from the results.
* @param {Object=} options Optional options object.
* @returns {Array}
*/
Collection.prototype.filter = function (query, func, options) {
return (this.find(query, options)).filter(func);
};
/**
* Executes a method against each document that matches query and then executes
* an update based on the return data of the method.
* @param {Object} query The query object.
* @param {Function} func The method that each document is passed to. If this method
* returns false for a particular document it is excluded from the update.
* @param {Object=} options Optional options object passed to the initial find call.
* @returns {Array}
*/
Collection.prototype.filterUpdate = function (query, func, options) {
var items = this.find(query, options),
results = [],
singleItem,
singleQuery,
singleUpdate,
pk = this.primaryKey(),
i;
for (i = 0; i < items.length; i++) {
singleItem = items[i];
singleUpdate = func(singleItem);
if (singleUpdate) {
singleQuery = {};
singleQuery[pk] = singleItem[pk];
results.push(this.update(singleQuery, singleUpdate));
}
}
return results;
};
/**
* Modifies an existing document or documents in a collection. This will update
* all matches for 'query' with the data held in 'update'. It will not overwrite
* the matched documents with the update document.
*
* @param {Object} query The query that must be matched for a document to be
* operated on.
* @param {Object} update The object containing updated key/values. Any keys that
* match keys on the existing document will be overwritten with this data. Any
* keys that do not currently exist on the document will be added to the document.
* @param {Object=} options An options object.
* @returns {Array} The items that were updated.
*/
Collection.prototype.update = function (query, update, options) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
// Decouple the update data
update = this.decouple(update);
// Convert queries from mongo dot notation to forerunner queries
if (this.mongoEmulation()) {
this.convertToFdb(query);
this.convertToFdb(update);
}
// Handle transform
update = this.transformIn(update);
if (this.debug()) {
console.log(this.logIdentifier() + ' Updating some data');
}
var self = this,
op = this._metrics.create('update'),
dataSet,
updated,
updateCall = function (referencedDoc) {
var oldDoc = self.decouple(referencedDoc),
newDoc,
triggerOperation,
result;
if (self.willTrigger(self.TYPE_UPDATE, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_UPDATE, self.PHASE_AFTER)) {
newDoc = self.decouple(referencedDoc);
triggerOperation = {
type: 'update',
query: self.decouple(query),
update: self.decouple(update),
options: self.decouple(options),
op: op
};
// Update newDoc with the update criteria so we know what the data will look
// like AFTER the update is processed
result = self.updateObject(newDoc, triggerOperation.update, triggerOperation.query, triggerOperation.options, '');
if (self.processTrigger(triggerOperation, self.TYPE_UPDATE, self.PHASE_BEFORE, referencedDoc, newDoc) !== false) {
// No triggers complained so let's execute the replacement of the existing
// object with the new one
result = self.updateObject(referencedDoc, newDoc, triggerOperation.query, triggerOperation.options, '');
// NOTE: If for some reason we would only like to fire this event if changes are actually going
// to occur on the object from the proposed update then we can add "result &&" to the if
self.processTrigger(triggerOperation, self.TYPE_UPDATE, self.PHASE_AFTER, oldDoc, newDoc);
} else {
// Trigger cancelled operation so tell result that it was not updated
result = false;
}
} else {
// No triggers complained so let's execute the replacement of the existing
// object with the new one
result = self.updateObject(referencedDoc, update, query, options, '');
}
// Inform indexes of the change
self._updateIndexes(oldDoc, referencedDoc);
return result;
};
op.start();
op.time('Retrieve documents to update');
dataSet = this.find(query, {$decouple: false});
op.time('Retrieve documents to update');
if (dataSet.length) {
op.time('Update documents');
updated = dataSet.filter(updateCall);
op.time('Update documents');
if (updated.length) {
op.time('Resolve chains');
this.chainSend('update', {
query: query,
update: update,
dataSet: updated
}, options);
op.time('Resolve chains');
this._onUpdate(updated);
this._onChange();
this.deferEmit('change', {type: 'update', data: updated});
}
}
op.stop();
// TODO: Should we decouple the updated array before return by default?
return updated || [];
};
/**
* Replaces an existing object with data from the new object without
* breaking data references.
* @param {Object} currentObj The object to alter.
* @param {Object} newObj The new object to overwrite the existing one with.
* @returns {*} Chain.
* @private
*/
Collection.prototype._replaceObj = function (currentObj, newObj) {
var i;
// Check if the new document has a different primary key value from the existing one
// Remove item from indexes
this._removeFromIndexes(currentObj);
// Remove existing keys from current object
for (i in currentObj) {
if (currentObj.hasOwnProperty(i)) {
delete currentObj[i];
}
}
// Add new keys to current object
for (i in newObj) {
if (newObj.hasOwnProperty(i)) {
currentObj[i] = newObj[i];
}
}
// Update the item in the primary index
if (!this._insertIntoIndexes(currentObj)) {
throw(this.logIdentifier() + ' Primary key violation in update! Key violated: ' + currentObj[this._primaryKey]);
}
// Update the object in the collection data
//this._data.splice(this._data.indexOf(currentObj), 1, newObj);
return this;
};
/**
* Helper method to update a document from it's id.
* @param {String} id The id of the document.
* @param {Object} update The object containing the key/values to update to.
* @returns {Object} The document that was updated or undefined
* if no document was updated.
*/
Collection.prototype.updateById = function (id, update) {
var searchObj = {};
searchObj[this._primaryKey] = id;
return this.update(searchObj, update)[0];
};
/**
* Internal method for document updating.
* @param {Object} doc The document to update.
* @param {Object} update The object with key/value pairs to update the document with.
* @param {Object} query The query object that we need to match to perform an update.
* @param {Object} options An options object.
* @param {String} path The current recursive path.
* @param {String} opType The type of update operation to perform, if none is specified
* default is to set new data against matching fields.
* @returns {Boolean} True if the document was updated with new / changed data or
* false if it was not updated because the data was the same.
* @private
*/
Collection.prototype.updateObject = function (doc, update, query, options, path, opType) {
// TODO: This method is long, try to break it into smaller pieces
update = this.decouple(update);
// Clear leading dots from path
path = path || '';
if (path.substr(0, 1) === '.') { path = path.substr(1, path.length -1); }
//var oldDoc = this.decouple(doc),
var updated = false,
recurseUpdated = false,
operation,
tmpArray,
tmpIndex,
tmpCount,
tempIndex,
tempKey,
replaceObj,
pk,
pathInstance,
sourceIsArray,
updateIsArray,
i;
// Loop each key in the update object
for (i in update) {
if (update.hasOwnProperty(i)) {
// Reset operation flag
operation = false;
// Check if the property starts with a dollar (function)
if (i.substr(0, 1) === '$') {
// Check for commands
switch (i) {
case '$key':
case '$index':
case '$data':
case '$min':
case '$max':
// Ignore some operators
operation = true;
break;
case '$each':
operation = true;
// Loop over the array of updates and run each one
tmpCount = update.$each.length;
for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) {
recurseUpdated = this.updateObject(doc, update.$each[tmpIndex], query, options, path);
if (recurseUpdated) {
updated = true;
}
}
updated = updated || recurseUpdated;
break;
case '$replace':
operation = true;
replaceObj = update.$replace;
pk = this.primaryKey();
// Loop the existing item properties and compare with
// the replacement (never remove primary key)
for (tempKey in doc) {
if (doc.hasOwnProperty(tempKey) && tempKey !== pk) {
if (replaceObj[tempKey] === undefined) {
// The new document doesn't have this field, remove it from the doc
this._updateUnset(doc, tempKey);
updated = true;
}
}
}
// Loop the new item props and update the doc
for (tempKey in replaceObj) {
if (replaceObj.hasOwnProperty(tempKey) && tempKey !== pk) {
this._updateOverwrite(doc, tempKey, replaceObj[tempKey]);
updated = true;
}
}
break;
default:
operation = true;
// Now run the operation
recurseUpdated = this.updateObject(doc, update[i], query, options, path, i);
updated = updated || recurseUpdated;
break;
}
}
// Check if the key has a .$ at the end, denoting an array lookup
if (this._isPositionalKey(i)) {
operation = true;
// Modify i to be the name of the field
i = i.substr(0, i.length - 2);
pathInstance = new Path(path + '.' + i);
// Check if the key is an array and has items
if (doc[i] && doc[i] instanceof Array && doc[i].length) {
tmpArray = [];
// Loop the array and find matches to our search
for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) {
if (this._match(doc[i][tmpIndex], pathInstance.value(query)[0], options, '', {})) {
tmpArray.push(tmpIndex);
}
}
// Loop the items that matched and update them
for (tmpIndex = 0; tmpIndex < tmpArray.length; tmpIndex++) {
recurseUpdated = this.updateObject(doc[i][tmpArray[tmpIndex]], update[i + '.$'], query, options, path + '.' + i, opType);
updated = updated || recurseUpdated;
}
}
}
if (!operation) {
if (!opType && typeof(update[i]) === 'object') {
if (doc[i] !== null && typeof(doc[i]) === 'object') {
// Check if we are dealing with arrays
sourceIsArray = doc[i] instanceof Array;
updateIsArray = update[i] instanceof Array;
if (sourceIsArray || updateIsArray) {
// Check if the update is an object and the doc is an array
if (!updateIsArray && sourceIsArray) {
// Update is an object, source is an array so match the array items
// with our query object to find the one to update inside this array
// Loop the array and find matches to our search
for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) {
recurseUpdated = this.updateObject(doc[i][tmpIndex], update[i], query, options, path + '.' + i, opType);
updated = updated || recurseUpdated;
}
} else {
// Either both source and update are arrays or the update is
// an array and the source is not, so set source to update
if (doc[i] !== update[i]) {
this._updateProperty(doc, i, update[i]);
updated = true;
}
}
} else {
// The doc key is an object so traverse the
// update further
recurseUpdated = this.updateObject(doc[i], update[i], query, options, path + '.' + i, opType);
updated = updated || recurseUpdated;
}
} else {
if (doc[i] !== update[i]) {
this._updateProperty(doc, i, update[i]);
updated = true;
}
}
} else {
switch (opType) {
case '$inc':
var doUpdate = true;
// Check for a $min / $max operator
if (update[i] > 0) {
if (update.$max) {
// Check current value
if (doc[i] >= update.$max) {
// Don't update
doUpdate = false;
}
}
} else if (update[i] < 0) {
if (update.$min) {
// Check current value
if (doc[i] <= update.$min) {
// Don't update
doUpdate = false;
}
}
}
if (doUpdate) {
this._updateIncrement(doc, i, update[i]);
updated = true;
}
break;
case '$cast':
// Casts a property to the type specified if it is not already
// that type. If the cast is an array or an object and the property
// is not already that type a new array or object is created and
// set to the property, overwriting the previous value
switch (update[i]) {
case 'array':
if (!(doc[i] instanceof Array)) {
// Cast to an array
this._updateProperty(doc, i, update.$data || []);
updated = true;
}
break;
case 'object':
if (!(doc[i] instanceof Object) || (doc[i] instanceof Array)) {
// Cast to an object
this._updateProperty(doc, i, update.$data || {});
updated = true;
}
break;
case 'number':
if (typeof doc[i] !== 'number') {
// Cast to a number
this._updateProperty(doc, i, Number(doc[i]));
updated = true;
}
break;
case 'string':
if (typeof doc[i] !== 'string') {
// Cast to a string
this._updateProperty(doc, i, String(doc[i]));
updated = true;
}
break;
default:
throw(this.logIdentifier() + ' Cannot update cast to unknown type: ' + update[i]);
}
break;
case '$push':
// Check if the target key is undefined and if so, create an array
if (doc[i] === undefined) {
// Initialise a new array
this._updateProperty(doc, i, []);
}
// Check that the target key is an array
if (doc[i] instanceof Array) {
// Check for a $position modifier with an $each
if (update[i].$position !== undefined && update[i].$each instanceof Array) {
// Grab the position to insert at
tempIndex = update[i].$position;
// Loop the each array and push each item
tmpCount = update[i].$each.length;
for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) {
this._updateSplicePush(doc[i], tempIndex + tmpIndex, update[i].$each[tmpIndex]);
}
} else if (update[i].$each instanceof Array) {
// Do a loop over the each to push multiple items
tmpCount = update[i].$each.length;
for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) {
this._updatePush(doc[i], update[i].$each[tmpIndex]);
}
} else {
// Do a standard push
this._updatePush(doc[i], update[i]);
}
updated = true;
} else {
throw(this.logIdentifier() + ' Cannot push to a key that is not an array! (' + i + ')');
}
break;
case '$pull':
if (doc[i] instanceof Array) {
tmpArray = [];
// Loop the array and find matches to our search
for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) {
if (this._match(doc[i][tmpIndex], update[i], options, '', {})) {
tmpArray.push(tmpIndex);
}
}
tmpCount = tmpArray.length;
// Now loop the pull array and remove items to be pulled
while (tmpCount--) {
this._updatePull(doc[i], tmpArray[tmpCount]);
updated = true;
}
}
break;
case '$pullAll':
if (doc[i] instanceof Array) {
if (update[i] instanceof Array) {
tmpArray = doc[i];
tmpCount = tmpArray.length;
if (tmpCount > 0) {
// Now loop the pull array and remove items to be pulled
while (tmpCount--) {
for (tempIndex = 0; tempIndex < update[i].length; tempIndex++) {
if (tmpArray[tmpCount] === update[i][tempIndex]) {
this._updatePull(doc[i], tmpCount);
tmpCount--;
updated = true;
}
}
if (tmpCount < 0) {
break;
}
}
}
} else {
throw(this.logIdentifier() + ' Cannot pullAll without being given an array of values to pull! (' + i + ')');
}
}
break;
case '$addToSet':
// Check if the target key is undefined and if so, create an array
if (doc[i] === undefined) {
// Initialise a new array
this._updateProperty(doc, i, []);
}
// Check that the target key is an array
if (doc[i] instanceof Array) {
// Loop the target array and check for existence of item
var targetArr = doc[i],
targetArrIndex,
targetArrCount = targetArr.length,
objHash,
addObj = true,
optionObj = (options && options.$addToSet),
hashMode,
pathSolver;
// Check if we have an options object for our operation
if (update[i].$key) {
hashMode = false;
pathSolver = new Path(update[i].$key);
objHash = pathSolver.value(update[i])[0];
// Remove the key from the object before we add it
delete update[i].$key;
} else if (optionObj && optionObj.key) {
hashMode = false;
pathSolver = new Path(optionObj.key);
objHash = pathSolver.value(update[i])[0];
} else {
objHash = this.jStringify(update[i]);
hashMode = true;
}
for (targetArrIndex = 0; targetArrIndex < targetArrCount; targetArrIndex++) {
if (hashMode) {
// Check if objects match via a string hash (JSON)
if (this.jStringify(targetArr[targetArrIndex]) === objHash) {
// The object already exists, don't add it
addObj = false;
break;
}
} else {
// Check if objects match based on the path
if (objHash === pathSolver.value(targetArr[targetArrIndex])[0]) {
// The object already exists, don't add it
addObj = false;
break;
}
}
}
if (addObj) {
this._updatePush(doc[i], update[i]);
updated = true;
}
} else {
throw(this.logIdentifier() + ' Cannot addToSet on a key that is not an array! (' + i + ')');
}
break;
case '$splicePush':
// Check if the target key is undefined and if so, create an array
if (doc[i] === undefined) {
// Initialise a new array
this._updateProperty(doc, i, []);
}
// Check that the target key is an array
if (doc[i] instanceof Array) {
tempIndex = update.$index;
if (tempIndex !== undefined) {
delete update.$index;
// Check for out of bounds index
if (tempIndex > doc[i].length) {
tempIndex = doc[i].length;
}
this._updateSplicePush(doc[i], tempIndex, update[i]);
updated = true;
} else {
throw(this.logIdentifier() + ' Cannot splicePush without a $index integer value!');
}
} else {
throw(this.logIdentifier() + ' Cannot splicePush with a key that is not an array! (' + i + ')');
}
break;
case '$move':
if (doc[i] instanceof Array) {
// Loop the array and find matches to our search
for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) {
if (this._match(doc[i][tmpIndex], update[i], options, '', {})) {
var moveToIndex = update.$index;
if (moveToIndex !== undefined) {
delete update.$index;
this._updateSpliceMove(doc[i], tmpIndex, moveToIndex);
updated = true;
} else {
throw(this.logIdentifier() + ' Cannot move without a $index integer value!');
}
break;
}
}
} else {
throw(this.logIdentifier() + ' Cannot move on a key that is not an array! (' + i + ')');
}
break;
case '$mul':
this._updateMultiply(doc, i, update[i]);
updated = true;
break;
case '$rename':
this._updateRename(doc, i, update[i]);
updated = true;
break;
case '$overwrite':
this._updateOverwrite(doc, i, update[i]);
updated = true;
break;
case '$unset':
this._updateUnset(doc, i);
updated = true;
break;
case '$clear':
this._updateClear(doc, i);
updated = true;
break;
case '$pop':
if (doc[i] instanceof Array) {
if (this._updatePop(doc[i], update[i])) {
updated = true;
}
} else {
throw(this.logIdentifier() + ' Cannot pop from a key that is not an array! (' + i + ')');
}
break;
case '$toggle':
// Toggle the boolean property between true and false
this._updateProperty(doc, i, !doc[i]);
updated = true;
break;
default:
if (doc[i] !== update[i]) {
this._updateProperty(doc, i, update[i]);
updated = true;
}
break;
}
}
}
}
}
return updated;
};
/**
* Determines if the passed key has an array positional mark (a dollar at the end
* of its name).
* @param {String} key The key to check.
* @returns {Boolean} True if it is a positional or false if not.
* @private
*/
Collection.prototype._isPositionalKey = function (key) {
return key.substr(key.length - 2, 2) === '.$';
};
/**
* Removes any documents from the collection that match the search query
* key/values.
* @param {Object} query The query object.
* @param {Object=} options An options object.
* @param {Function=} callback A callback method.
* @returns {Array} An array of the documents that were removed.
*/
Collection.prototype.remove = function (query, options, callback) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
var self = this,
dataSet,
index,
arrIndex,
returnArr,
removeMethod,
triggerOperation,
doc,
newDoc;
if (typeof(options) === 'function') {
callback = options;
options = {};
}
// Convert queries from mongo dot notation to forerunner queries
if (this.mongoEmulation()) {
this.convertToFdb(query);
}
if (query instanceof Array) {
returnArr = [];
for (arrIndex = 0; arrIndex < query.length; arrIndex++) {
returnArr.push(this.remove(query[arrIndex], {noEmit: true}));
}
if (!options || (options && !options.noEmit)) {
this._onRemove(returnArr);
}
if (callback) { callback(false, returnArr); }
return returnArr;
} else {
returnArr = [];
dataSet = this.find(query, {$decouple: false});
if (dataSet.length) {
removeMethod = function (dataItem) {
// Remove the item from the collection's indexes
self._removeFromIndexes(dataItem);
// Remove data from internal stores
index = self._data.indexOf(dataItem);
self._dataRemoveAtIndex(index);
returnArr.push(dataItem);
};
// Remove the data from the collection
for (var i = 0; i < dataSet.length; i++) {
doc = dataSet[i];
if (self.willTrigger(self.TYPE_REMOVE, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_REMOVE, self.PHASE_AFTER)) {
triggerOperation = {
type: 'remove'
};
newDoc = self.decouple(doc);
if (self.processTrigger(triggerOperation, self.TYPE_REMOVE, self.PHASE_BEFORE, newDoc, newDoc) !== false) {
// The trigger didn't ask to cancel so execute the removal method
removeMethod(doc);
self.processTrigger(triggerOperation, self.TYPE_REMOVE, self.PHASE_AFTER, newDoc, newDoc);
}
} else {
// No triggers to execute
removeMethod(doc);
}
}
if (returnArr.length) {
//op.time('Resolve chains');
self.chainSend('remove', {
query: query,
dataSet: returnArr
}, options);
//op.time('Resolve chains');
if (!options || (options && !options.noEmit)) {
this._onRemove(returnArr);
}
this._onChange();
this.deferEmit('change', {type: 'remove', data: returnArr});
}
}
if (callback) { callback(false, returnArr); }
return returnArr;
}
};
/**
* Helper method that removes a document that matches the given id.
* @param {String} id The id of the document to remove.
* @returns {Object} The document that was removed or undefined if
* nothing was removed.
*/
Collection.prototype.removeById = function (id) {
var searchObj = {};
searchObj[this._primaryKey] = id;
return this.remove(searchObj)[0];
};
/**
* Processes a deferred action queue.
* @param {String} type The queue name to process.
* @param {Function} callback A method to call when the queue has processed.
* @param {Object=} resultObj A temp object to hold results in.
*/
Collection.prototype.processQueue = function (type, callback, resultObj) {
var self = this,
queue = this._deferQueue[type],
deferThreshold = this._deferThreshold[type],
deferTime = this._deferTime[type],
dataArr,
result;
resultObj = resultObj || {
deferred: true
};
if (queue.length) {
// Process items up to the threshold
if (queue.length > deferThreshold) {
// Grab items up to the threshold value
dataArr = queue.splice(0, deferThreshold);
} else {
// Grab all the remaining items
dataArr = queue.splice(0, queue.length);
}
result = self[type](dataArr);
switch (type) {
case 'insert':
resultObj.inserted = resultObj.inserted || [];
resultObj.failed = resultObj.failed || [];
resultObj.inserted = resultObj.inserted.concat(result.inserted);
resultObj.failed = resultObj.failed.concat(result.failed);
break;
}
// Queue another process
setTimeout(function () {
self.processQueue.call(self, type, callback, resultObj);
}, deferTime);
} else {
if (callback) { callback(resultObj); }
this._asyncComplete(type);
}
// Check if all queues are complete
if (!this.isProcessingQueue()) {
this.deferEmit('queuesComplete');
}
};
/**
* Checks if any CRUD operations have been deferred and are still waiting to
* be processed.
* @returns {Boolean} True if there are still deferred CRUD operations to process
* or false if all queues are clear.
*/
Collection.prototype.isProcessingQueue = function () {
var i;
for (i in this._deferQueue) {
if (this._deferQueue.hasOwnProperty(i)) {
if (this._deferQueue[i].length) {
return true;
}
}
}
return false;
};
/**
* Inserts a document or array of documents into the collection.
* @param {Object|Array} data Either a document object or array of document
* @param {Number=} index Optional index to insert the record at.
* @param {Function=} callback Optional callback called once action is complete.
* objects to insert into the collection.
*/
Collection.prototype.insert = function (data, index, callback) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
if (typeof(index) === 'function') {
callback = index;
index = this._data.length;
} else if (index === undefined) {
index = this._data.length;
}
data = this.transformIn(data);
return this._insertHandle(data, index, callback);
};
/**
* Inserts a document or array of documents into the collection.
* @param {Object|Array} data Either a document object or array of document
* @param {Number=} index Optional index to insert the record at.
* @param {Function=} callback Optional callback called once action is complete.
* objects to insert into the collection.
*/
Collection.prototype._insertHandle = function (data, index, callback) {
var //self = this,
queue = this._deferQueue.insert,
deferThreshold = this._deferThreshold.insert,
//deferTime = this._deferTime.insert,
inserted = [],
failed = [],
insertResult,
resultObj,
i;
if (data instanceof Array) {
// Check if there are more insert items than the insert defer
// threshold, if so, break up inserts so we don't tie up the
// ui or thread
if (this._deferredCalls && data.length > deferThreshold) {
// Break up insert into blocks
this._deferQueue.insert = queue.concat(data);
this._asyncPending('insert');
// Fire off the insert queue handler
this.processQueue('insert', callback);
return;
} else {
// Loop the array and add items
for (i = 0; i < data.length; i++) {
insertResult = this._insert(data[i], index + i);
if (insertResult === true) {
inserted.push(data[i]);
} else {
failed.push({
doc: data[i],
reason: insertResult
});
}
}
}
} else {
// Store the data item
insertResult = this._insert(data, index);
if (insertResult === true) {
inserted.push(data);
} else {
failed.push({
doc: data,
reason: insertResult
});
}
}
resultObj = {
deferred: false,
inserted: inserted,
failed: failed
};
this._onInsert(inserted, failed);
if (callback) { callback(resultObj); }
this._onChange();
this.deferEmit('change', {type: 'insert', data: inserted});
return resultObj;
};
/**
* Internal method to insert a document into the collection. Will
* check for index violations before allowing the document to be inserted.
* @param {Object} doc The document to insert after passing index violation
* tests.
* @param {Number=} index Optional index to insert the document at.
* @returns {Boolean|Object} True on success, false if no document passed,
* or an object containing details about an index violation if one occurred.
* @private
*/
Collection.prototype._insert = function (doc, index) {
if (doc) {
var self = this,
indexViolation,
triggerOperation,
insertMethod,
newDoc,
capped = this.capped(),
cappedSize = this.cappedSize();
this.ensurePrimaryKey(doc);
// Check indexes are not going to be broken by the document
indexViolation = this.insertIndexViolation(doc);
insertMethod = function (doc) {
// Add the item to the collection's indexes
self._insertIntoIndexes(doc);
// Check index overflow
if (index > self._data.length) {
index = self._data.length;
}
// Insert the document
self._dataInsertAtIndex(index, doc);
// Check capped collection status and remove first record
// if we are over the threshold
if (capped && self._data.length > cappedSize) {
// Remove the first item in the data array
self.removeById(self._data[0][self._primaryKey]);
}
//op.time('Resolve chains');
self.chainSend('insert', doc, {index: index});
//op.time('Resolve chains');
};
if (!indexViolation) {
if (self.willTrigger(self.TYPE_INSERT, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_INSERT, self.PHASE_AFTER)) {
triggerOperation = {
type: 'insert'
};
if (self.processTrigger(triggerOperation, self.TYPE_INSERT, self.PHASE_BEFORE, {}, doc) !== false) {
insertMethod(doc);
if (self.willTrigger(self.TYPE_INSERT, self.PHASE_AFTER)) {
// Clone the doc so that the programmer cannot update the internal document
// on the "after" phase trigger
newDoc = self.decouple(doc);
self.processTrigger(triggerOperation, self.TYPE_INSERT, self.PHASE_AFTER, {}, newDoc);
}
} else {
// The trigger just wants to cancel the operation
return 'Trigger cancelled operation';
}
} else {
// No triggers to execute
insertMethod(doc);
}
return true;
} else {
return 'Index violation in index: ' + indexViolation;
}
}
return 'No document passed to insert';
};
/**
* Inserts a document into the internal collection data array at
* Inserts a document into the internal collection data array at
* the specified index.
* @param {number} index The index to insert at.
* @param {object} doc The document to insert.
* @private
*/
Collection.prototype._dataInsertAtIndex = function (index, doc) {
this._data.splice(index, 0, doc);
};
/**
* Removes a document from the internal collection data array at
* the specified index.
* @param {number} index The index to remove from.
* @private
*/
Collection.prototype._dataRemoveAtIndex = function (index) {
this._data.splice(index, 1);
};
/**
* Replaces all data in the collection's internal data array with
* the passed array of data.
* @param {array} data The array of data to replace existing data with.
* @private
*/
Collection.prototype._dataReplace = function (data) {
// Clear the array - using a while loop with pop is by far the
// fastest way to clear an array currently
while (this._data.length) {
this._data.pop();
}
// Append new items to the array
this._data = this._data.concat(data);
};
/**
* Inserts a document into the collection indexes.
* @param {Object} doc The document to insert.
* @private
*/
Collection.prototype._insertIntoIndexes = function (doc) {
var arr = this._indexByName,
arrIndex,
violated,
jString = this.jStringify(doc);
// Insert to primary key index
violated = this._primaryIndex.uniqueSet(doc[this._primaryKey], doc);
this._primaryCrc.uniqueSet(doc[this._primaryKey], jString);
this._crcLookup.uniqueSet(jString, doc);
// Insert into other indexes
for (arrIndex in arr) {
if (arr.hasOwnProperty(arrIndex)) {
arr[arrIndex].insert(doc);
}
}
return violated;
};
/**
* Removes a document from the collection indexes.
* @param {Object} doc The document to remove.
* @private
*/
Collection.prototype._removeFromIndexes = function (doc) {
var arr = this._indexByName,
arrIndex,
jString = this.jStringify(doc);
// Remove from primary key index
this._primaryIndex.unSet(doc[this._primaryKey]);
this._primaryCrc.unSet(doc[this._primaryKey]);
this._crcLookup.unSet(jString);
// Remove from other indexes
for (arrIndex in arr) {
if (arr.hasOwnProperty(arrIndex)) {
arr[arrIndex].remove(doc);
}
}
};
/**
* Updates collection index data for the passed document.
* @param {Object} oldDoc The old document as it was before the update.
* @param {Object} newDoc The document as it now is after the update.
* @private
*/
Collection.prototype._updateIndexes = function (oldDoc, newDoc) {
this._removeFromIndexes(oldDoc);
this._insertIntoIndexes(newDoc);
};
/**
* Rebuild collection indexes.
* @private
*/
Collection.prototype._rebuildIndexes = function () {
var arr = this._indexByName,
arrIndex;
// Remove from other indexes
for (arrIndex in arr) {
if (arr.hasOwnProperty(arrIndex)) {
arr[arrIndex].rebuild();
}
}
};
/**
* Uses the passed query to generate a new collection with results
* matching the query parameters.
*
* @param {Object} query The query object to generate the subset with.
* @param {Object=} options An options object.
* @returns {*}
*/
Collection.prototype.subset = function (query, options) {
var result = this.find(query, options);
return new Collection()
.subsetOf(this)
.primaryKey(this._primaryKey)
.setData(result);
};
/**
* Gets / sets the collection that this collection is a subset of.
* @param {Collection=} collection The collection to set as the parent of this subset.
* @returns {Collection}
*/
Shared.synthesize(Collection.prototype, 'subsetOf');
/**
* Checks if the collection is a subset of the passed collection.
* @param {Collection} collection The collection to test against.
* @returns {Boolean} True if the passed collection is the parent of
* the current collection.
*/
Collection.prototype.isSubsetOf = function (collection) {
return this._subsetOf === collection;
};
/**
* Find the distinct values for a specified field across a single collection and
* returns the results in an array.
* @param {String} key The field path to return distinct values for e.g. "person.name".
* @param {Object=} query The query to use to filter the documents used to return values from.
* @param {Object=} options The query options to use when running the query.
* @returns {Array}
*/
Collection.prototype.distinct = function (key, query, options) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
var data = this.find(query, options),
pathSolver = new Path(key),
valueUsed = {},
distinctValues = [],
value,
i;
// Loop the data and build array of distinct values
for (i = 0; i < data.length; i++) {
value = pathSolver.value(data[i])[0];
if (value && !valueUsed[value]) {
valueUsed[value] = true;
distinctValues.push(value);
}
}
return distinctValues;
};
/**
* Helper method to find a document by it's id.
* @param {String} id The id of the document.
* @param {Object=} options The options object, allowed keys are sort and limit.
* @returns {Array} The items that were updated.
*/
Collection.prototype.findById = function (id, options) {
var searchObj = {};
searchObj[this._primaryKey] = id;
return this.find(searchObj, options)[0];
};
/**
* Finds all documents that contain the passed string or search object
* regardless of where the string might occur within the document. This
* will match strings from the start, middle or end of the document's
* string (partial match).
* @param search The string to search for. Case sensitive.
* @param options A standard find() options object.
* @returns {Array} An array of documents that matched the search string.
*/
Collection.prototype.peek = function (search, options) {
// Loop all items
var arr = this._data,
arrCount = arr.length,
arrIndex,
arrItem,
tempColl = new Collection(),
typeOfSearch = typeof search;
if (typeOfSearch === 'string') {
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
// Get json representation of object
arrItem = this.jStringify(arr[arrIndex]);
// Check if string exists in object json
if (arrItem.indexOf(search) > -1) {
// Add this item to the temp collection
tempColl.insert(arr[arrIndex]);
}
}
return tempColl.find({}, options);
} else {
return this.find(search, options);
}
};
/**
* Provides a query plan / operations log for a query.
* @param {Object} query The query to execute.
* @param {Object=} options Optional options object.
* @returns {Object} The query plan.
*/
Collection.prototype.explain = function (query, options) {
var result = this.find(query, options);
return result.__fdbOp._data;
};
/**
* Generates an options object with default values or adds default
* values to a passed object if those values are not currently set
* to anything.
* @param {object=} obj Optional options object to modify.
* @returns {object} The options object.
*/
Collection.prototype.options = function (obj) {
obj = obj || {};
obj.$decouple = obj.$decouple !== undefined ? obj.$decouple : true;
obj.$explain = obj.$explain !== undefined ? obj.$explain : false;
return obj;
};
/**
* Queries the collection based on the query object passed.
* @param {Object} query The query key/values that a document must match in
* order for it to be returned in the result array.
* @param {Object=} options An optional options object.
* @param {Function=} callback !! DO NOT USE, THIS IS NON-OPERATIONAL !!
* Optional callback. If specified the find process
* will not return a value and will assume that you wish to operate under an
* async mode. This will break up large find requests into smaller chunks and
* process them in a non-blocking fashion allowing large datasets to be queried
* without causing the browser UI to pause. Results from this type of operation
* will be passed back to the callback once completed.
*
* @returns {Array} The results array from the find operation, containing all
* documents that matched the query.
*/
Collection.prototype.find = function (query, options, callback) {
// Convert queries from mongo dot notation to forerunner queries
if (this.mongoEmulation()) {
this.convertToFdb(query);
}
if (callback) {
// Check the size of the collection's data array
// Split operation into smaller tasks and callback when complete
callback('Callbacks for the find() operation are not yet implemented!', []);
return [];
}
return this._find.apply(this, arguments);
};
Collection.prototype._find = function (query, options) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
// TODO: This method is quite long, break into smaller pieces
query = query || {};
options = this.options(options);
var op = this._metrics.create('find'),
pk = this.primaryKey(),
self = this,
analysis,
scanLength,
requiresTableScan = true,
resultArr,
joinCollectionIndex,
joinIndex,
joinCollection = {},
joinQuery,
joinPath,
joinCollectionName,
joinCollectionInstance,
joinMatch,
joinMatchIndex,
joinSearchQuery,
joinSearchOptions,
joinMulti,
joinRequire,
joinFindResults,
joinFindResult,
joinItem,
joinPrefix,
resultCollectionName,
resultIndex,
resultRemove = [],
index,
i, j, k, l,
fieldListOn = [],
fieldListOff = [],
elemMatchPathSolver,
elemMatchSubArr,
elemMatchSpliceArr,
matcherTmpOptions = {},
result,
cursor = {},
pathSolver,
//renameFieldMethod,
//renameFieldPath,
matcher = function (doc) {
return self._match(doc, query, options, 'and', matcherTmpOptions);
};
op.start();
if (query) {
// Get query analysis to execute best optimised code path
op.time('analyseQuery');
analysis = this._analyseQuery(self.decouple(query), options, op);
op.time('analyseQuery');
op.data('analysis', analysis);
if (analysis.hasJoin && analysis.queriesJoin) {
// The query has a join and tries to limit by it's joined data
// Get an instance reference to the join collections
op.time('joinReferences');
for (joinIndex = 0; joinIndex < analysis.joinsOn.length; joinIndex++) {
joinCollectionName = analysis.joinsOn[joinIndex];
joinPath = new Path(analysis.joinQueries[joinCollectionName]);
joinQuery = joinPath.value(query)[0];
joinCollection[analysis.joinsOn[joinIndex]] = this._db.collection(analysis.joinsOn[joinIndex]).subset(joinQuery);
// Remove join clause from main query
delete query[analysis.joinQueries[joinCollectionName]];
}
op.time('joinReferences');
}
// Check if an index lookup can be used to return this result
if (analysis.indexMatch.length && (!options || (options && !options.$skipIndex))) {
op.data('index.potential', analysis.indexMatch);
op.data('index.used', analysis.indexMatch[0].index);
// Get the data from the index
op.time('indexLookup');
resultArr = analysis.indexMatch[0].lookup || [];
op.time('indexLookup');
// Check if the index coverage is all keys, if not we still need to table scan it
if (analysis.indexMatch[0].keyData.totalKeyCount === analysis.indexMatch[0].keyData.score) {
// Don't require a table scan to find relevant documents
requiresTableScan = false;
}
} else {
op.flag('usedIndex', false);
}
if (requiresTableScan) {
if (resultArr && resultArr.length) {
scanLength = resultArr.length;
op.time('tableScan: ' + scanLength);
// Filter the source data and return the result
resultArr = resultArr.filter(matcher);
} else {
// Filter the source data and return the result
scanLength = this._data.length;
op.time('tableScan: ' + scanLength);
resultArr = this._data.filter(matcher);
}
op.time('tableScan: ' + scanLength);
}
// Order the array if we were passed a sort clause
if (options.$orderBy) {
op.time('sort');
resultArr = this.sort(options.$orderBy, resultArr);
op.time('sort');
}
if (options.$page !== undefined && options.$limit !== undefined) {
// Record paging data
cursor.page = options.$page;
cursor.pages = Math.ceil(resultArr.length / options.$limit);
cursor.records = resultArr.length;
// Check if we actually need to apply the paging logic
if (options.$page && options.$limit > 0) {
op.data('cursor', cursor);
// Skip to the page specified based on limit
resultArr.splice(0, options.$page * options.$limit);
}
}
if (options.$skip) {
cursor.skip = options.$skip;
// Skip past the number of records specified
resultArr.splice(0, options.$skip);
op.data('skip', options.$skip);
}
if (options.$limit && resultArr && resultArr.length > options.$limit) {
cursor.limit = options.$limit;
resultArr.length = options.$limit;
op.data('limit', options.$limit);
}
if (options.$decouple) {
// Now decouple the data from the original objects
op.time('decouple');
resultArr = this.decouple(resultArr);
op.time('decouple');
op.data('flag.decouple', true);
}
// Now process any joins on the final data
if (options.$join) {
for (joinCollectionIndex = 0; joinCollectionIndex < options.$join.length; joinCollectionIndex++) {
for (joinCollectionName in options.$join[joinCollectionIndex]) {
if (options.$join[joinCollectionIndex].hasOwnProperty(joinCollectionName)) {
// Set the key to store the join result in to the collection name by default
resultCollectionName = joinCollectionName;
// Get the join collection instance from the DB
if (joinCollection[joinCollectionName]) {
joinCollectionInstance = joinCollection[joinCollectionName];
} else {
joinCollectionInstance = this._db.collection(joinCollectionName);
}
// Get the match data for the join
joinMatch = options.$join[joinCollectionIndex][joinCollectionName];
// Loop our result data array
for (resultIndex = 0; resultIndex < resultArr.length; resultIndex++) {
// Loop the join conditions and build a search object from them
joinSearchQuery = {};
joinMulti = false;
joinRequire = false;
joinPrefix = '';
for (joinMatchIndex in joinMatch) {
if (joinMatch.hasOwnProperty(joinMatchIndex)) {
// Check the join condition name for a special command operator
if (joinMatchIndex.substr(0, 1) === '$') {
// Special command
switch (joinMatchIndex) {
case '$where':
if (joinMatch[joinMatchIndex].query) {
// Commented old code here, new one does dynamic reverse lookups
//joinSearchQuery = joinMatch[joinMatchIndex].query;
joinSearchQuery = self._resolveDynamicQuery(joinMatch[joinMatchIndex].query, resultArr[resultIndex]);
}
if (joinMatch[joinMatchIndex].options) { joinSearchOptions = joinMatch[joinMatchIndex].options; }
break;
case '$as':
// Rename the collection when stored in the result document
resultCollectionName = joinMatch[joinMatchIndex];
break;
case '$multi':
// Return an array of documents instead of a single matching document
joinMulti = joinMatch[joinMatchIndex];
break;
case '$require':
// Remove the result item if no matching join data is found
joinRequire = joinMatch[joinMatchIndex];
break;
case '$prefix':
// Add a prefix to properties mixed in
joinPrefix = joinMatch[joinMatchIndex];
break;
default:
break;
}
} else {
// Get the data to match against and store in the search object
// Resolve complex referenced query
joinSearchQuery[joinMatchIndex] = self._resolveDynamicQuery(joinMatch[joinMatchIndex], resultArr[resultIndex]);
}
}
}
// Do a find on the target collection against the match data
joinFindResults = joinCollectionInstance.find(joinSearchQuery, joinSearchOptions);
// Check if we require a joined row to allow the result item
if (!joinRequire || (joinRequire && joinFindResults[0])) {
// Join is not required or condition is met
if (resultCollectionName === '$root') {
// The property name to store the join results in is $root
// which means we need to mixin the results but this only
// works if joinMulti is disabled
if (joinMulti !== false) {
// Throw an exception here as this join is not physically possible!
throw(this.logIdentifier() + ' Cannot combine [$as: "$root"] with [$multi: true] in $join clause!');
}
// Mixin the result
joinFindResult = joinFindResults[0];
joinItem = resultArr[resultIndex];
for (l in joinFindResult) {
if (joinFindResult.hasOwnProperty(l) && joinItem[joinPrefix + l] === undefined) {
// Properties are only mixed in if they do not already exist
// in the target item (are undefined). Using a prefix denoted via
// $prefix is a good way to prevent property name conflicts
joinItem[joinPrefix + l] = joinFindResult[l];
}
}
} else {
resultArr[resultIndex][resultCollectionName] = joinMulti === false ? joinFindResults[0] : joinFindResults;
}
} else {
// Join required but condition not met, add item to removal queue
resultRemove.push(resultArr[resultIndex]);
}
}
}
}
}
op.data('flag.join', true);
}
// Process removal queue
if (resultRemove.length) {
op.time('removalQueue');
for (i = 0; i < resultRemove.length; i++) {
index = resultArr.indexOf(resultRemove[i]);
if (index > -1) {
resultArr.splice(index, 1);
}
}
op.time('removalQueue');
}
if (options.$transform) {
op.time('transform');
for (i = 0; i < resultArr.length; i++) {
resultArr.splice(i, 1, options.$transform(resultArr[i]));
}
op.time('transform');
op.data('flag.transform', true);
}
// Process transforms
if (this._transformEnabled && this._transformOut) {
op.time('transformOut');
resultArr = this.transformOut(resultArr);
op.time('transformOut');
}
op.data('results', resultArr.length);
} else {
resultArr = [];
}
// Check for an $as operator in the options object and if it exists
// iterate over the fields and generate a rename function that will
// operate over the entire returned data array and rename each object's
// fields to their new names
// TODO: Enable $as in collection find to allow renaming fields
/*if (options.$as) {
renameFieldPath = new Path();
renameFieldMethod = function (obj, oldFieldPath, newFieldName) {
renameFieldPath.path(oldFieldPath);
renameFieldPath.rename(newFieldName);
};
for (i in options.$as) {
if (options.$as.hasOwnProperty(i)) {
}
}
}*/
if (!options.$aggregate) {
// Generate a list of fields to limit data by
// Each property starts off being enabled by default (= 1) then
// if any property is explicitly specified as 1 then all switch to
// zero except _id.
//
// Any that are explicitly set to zero are switched off.
op.time('scanFields');
for (i in options) {
if (options.hasOwnProperty(i) && i.indexOf('$') !== 0) {
if (options[i] === 1) {
fieldListOn.push(i);
} else if (options[i] === 0) {
fieldListOff.push(i);
}
}
}
op.time('scanFields');
// Limit returned fields by the options data
if (fieldListOn.length || fieldListOff.length) {
op.data('flag.limitFields', true);
op.data('limitFields.on', fieldListOn);
op.data('limitFields.off', fieldListOff);
op.time('limitFields');
// We have explicit fields switched on or off
for (i = 0; i < resultArr.length; i++) {
result = resultArr[i];
for (j in result) {
if (result.hasOwnProperty(j)) {
if (fieldListOn.length) {
// We have explicit fields switched on so remove all fields
// that are not explicitly switched on
// Check if the field name is not the primary key
if (j !== pk) {
if (fieldListOn.indexOf(j) === -1) {
// This field is not in the on list, remove it
delete result[j];
}
}
}
if (fieldListOff.length) {
// We have explicit fields switched off so remove fields
// that are explicitly switched off
if (fieldListOff.indexOf(j) > -1) {
// This field is in the off list, remove it
delete result[j];
}
}
}
}
}
op.time('limitFields');
}
// Now run any projections on the data required
if (options.$elemMatch) {
op.data('flag.elemMatch', true);
op.time('projection-elemMatch');
for (i in options.$elemMatch) {
if (options.$elemMatch.hasOwnProperty(i)) {
elemMatchPathSolver = new Path(i);
// Loop the results array
for (j = 0; j < resultArr.length; j++) {
elemMatchSubArr = elemMatchPathSolver.value(resultArr[j])[0];
// Check we have a sub-array to loop
if (elemMatchSubArr && elemMatchSubArr.length) {
// Loop the sub-array and check for projection query matches
for (k = 0; k < elemMatchSubArr.length; k++) {
// Check if the current item in the sub-array matches the projection query
if (self._match(elemMatchSubArr[k], options.$elemMatch[i], options, '', {})) {
// The item matches the projection query so set the sub-array
// to an array that ONLY contains the matching item and then
// exit the loop since we only want to match the first item
elemMatchPathSolver.set(resultArr[j], i, [elemMatchSubArr[k]]);
break;
}
}
}
}
}
}
op.time('projection-elemMatch');
}
if (options.$elemsMatch) {
op.data('flag.elemsMatch', true);
op.time('projection-elemsMatch');
for (i in options.$elemsMatch) {
if (options.$elemsMatch.hasOwnProperty(i)) {
elemMatchPathSolver = new Path(i);
// Loop the results array
for (j = 0; j < resultArr.length; j++) {
elemMatchSubArr = elemMatchPathSolver.value(resultArr[j])[0];
// Check we have a sub-array to loop
if (elemMatchSubArr && elemMatchSubArr.length) {
elemMatchSpliceArr = [];
// Loop the sub-array and check for projection query matches
for (k = 0; k < elemMatchSubArr.length; k++) {
// Check if the current item in the sub-array matches the projection query
if (self._match(elemMatchSubArr[k], options.$elemsMatch[i], options, '', {})) {
// The item matches the projection query so add it to the final array
elemMatchSpliceArr.push(elemMatchSubArr[k]);
}
}
// Now set the final sub-array to the matched items
elemMatchPathSolver.set(resultArr[j], i, elemMatchSpliceArr);
}
}
}
}
op.time('projection-elemsMatch');
}
}
// Process aggregation
if (options.$aggregate) {
op.data('flag.aggregate', true);
op.time('aggregate');
pathSolver = new Path(options.$aggregate);
resultArr = pathSolver.value(resultArr);
op.time('aggregate');
}
op.stop();
resultArr.__fdbOp = op;
resultArr.$cursor = cursor;
return resultArr;
};
Collection.prototype._resolveDynamicQuery = function (query, item) {
var self = this,
newQuery,
propType,
propVal,
pathResult,
i;
if (typeof query === 'string') {
// Check if the property name starts with a back-reference
if (query.substr(0, 3) === '$$.') {
// Fill the query with a back-referenced value
pathResult = new Path(query.substr(3, query.length - 3)).value(item);
} else {
pathResult = new Path(query).value(item);
}
if (pathResult.length > 1) {
return {$in: pathResult};
} else {
return pathResult[0];
}
}
newQuery = {};
for (i in query) {
if (query.hasOwnProperty(i)) {
propType = typeof query[i];
propVal = query[i];
switch (propType) {
case 'string':
// Check if the property name starts with a back-reference
if (propVal.substr(0, 3) === '$$.') {
// Fill the query with a back-referenced value
newQuery[i] = new Path(propVal.substr(3, propVal.length - 3)).value(item)[0];
} else {
newQuery[i] = propVal;
}
break;
case 'object':
newQuery[i] = self._resolveDynamicQuery(propVal, item);
break;
default:
newQuery[i] = propVal;
break;
}
}
}
return newQuery;
};
/**
* Returns one document that satisfies the specified query criteria. If multiple
* documents satisfy the query, this method returns the first document to match
* the query.
* @returns {*}
*/
Collection.prototype.findOne = function () {
return (this.find.apply(this, arguments))[0];
};
/**
* Gets the index in the collection data array of the first item matched by
* the passed query object.
* @param {Object} query The query to run to find the item to return the index of.
* @param {Object=} options An options object.
* @returns {Number}
*/
Collection.prototype.indexOf = function (query, options) {
var item = this.find(query, {$decouple: false})[0],
sortedData;
if (item) {
if (!options || options && !options.$orderBy) {
// Basic lookup from order of insert
return this._data.indexOf(item);
} else {
// Trying to locate index based on query with sort order
options.$decouple = false;
sortedData = this.find(query, options);
return sortedData.indexOf(item);
}
}
return -1;
};
/**
* Returns the index of the document identified by the passed item's primary key.
* @param {*} itemLookup The document whose primary key should be used to lookup
* or the id to lookup.
* @param {Object=} options An options object.
* @returns {Number} The index the item with the matching primary key is occupying.
*/
Collection.prototype.indexOfDocById = function (itemLookup, options) {
var item,
sortedData;
if (typeof itemLookup !== 'object') {
item = this._primaryIndex.get(itemLookup);
} else {
item = this._primaryIndex.get(itemLookup[this._primaryKey]);
}
if (item) {
if (!options || options && !options.$orderBy) {
// Basic lookup
return this._data.indexOf(item);
} else {
// Sorted lookup
options.$decouple = false;
sortedData = this.find({}, options);
return sortedData.indexOf(item);
}
}
return -1;
};
/**
* Removes a document from the collection by it's index in the collection's
* data array.
* @param {Number} index The index of the document to remove.
* @returns {Object} The document that has been removed or false if none was
* removed.
*/
Collection.prototype.removeByIndex = function (index) {
var doc,
docId;
doc = this._data[index];
if (doc !== undefined) {
doc = this.decouple(doc);
docId = doc[this.primaryKey()];
return this.removeById(docId);
}
return false;
};
/**
* Gets / sets the collection transform options.
* @param {Object} obj A collection transform options object.
* @returns {*}
*/
Collection.prototype.transform = function (obj) {
if (obj !== undefined) {
if (typeof obj === "object") {
if (obj.enabled !== undefined) {
this._transformEnabled = obj.enabled;
}
if (obj.dataIn !== undefined) {
this._transformIn = obj.dataIn;
}
if (obj.dataOut !== undefined) {
this._transformOut = obj.dataOut;
}
} else {
this._transformEnabled = obj !== false;
}
return this;
}
return {
enabled: this._transformEnabled,
dataIn: this._transformIn,
dataOut: this._transformOut
};
};
/**
* Transforms data using the set transformIn method.
* @param {Object} data The data to transform.
* @returns {*}
*/
Collection.prototype.transformIn = function (data) {
if (this._transformEnabled && this._transformIn) {
if (data instanceof Array) {
var finalArr = [], i;
for (i = 0; i < data.length; i++) {
finalArr[i] = this._transformIn(data[i]);
}
return finalArr;
} else {
return this._transformIn(data);
}
}
return data;
};
/**
* Transforms data using the set transformOut method.
* @param {Object} data The data to transform.
* @returns {*}
*/
Collection.prototype.transformOut = function (data) {
if (this._transformEnabled && this._transformOut) {
if (data instanceof Array) {
var finalArr = [], i;
for (i = 0; i < data.length; i++) {
finalArr[i] = this._transformOut(data[i]);
}
return finalArr;
} else {
return this._transformOut(data);
}
}
return data;
};
/**
* Sorts an array of documents by the given sort path.
* @param {*} sortObj The keys and orders the array objects should be sorted by.
* @param {Array} arr The array of documents to sort.
* @returns {Array}
*/
Collection.prototype.sort = function (sortObj, arr) {
// Make sure we have an array object
arr = arr || [];
var sortArr = [],
sortKey,
sortSingleObj;
for (sortKey in sortObj) {
if (sortObj.hasOwnProperty(sortKey)) {
sortSingleObj = {};
sortSingleObj[sortKey] = sortObj[sortKey];
sortSingleObj.___fdbKey = String(sortKey);
sortArr.push(sortSingleObj);
}
}
if (sortArr.length < 2) {
// There is only one sort criteria, do a simple sort and return it
return this._sort(sortObj, arr);
} else {
return this._bucketSort(sortArr, arr);
}
};
/**
* Takes array of sort paths and sorts them into buckets before returning final
* array fully sorted by multi-keys.
* @param keyArr
* @param arr
* @returns {*}
* @private
*/
Collection.prototype._bucketSort = function (keyArr, arr) {
var keyObj = keyArr.shift(),
arrCopy,
bucketData,
bucketOrder,
bucketKey,
buckets,
i,
finalArr = [];
if (keyArr.length > 0) {
// Sort array by bucket key
arr = this._sort(keyObj, arr);
// Split items into buckets
bucketData = this.bucket(keyObj.___fdbKey, arr);
bucketOrder = bucketData.order;
buckets = bucketData.buckets;
// Loop buckets and sort contents
for (i = 0; i < bucketOrder.length; i++) {
bucketKey = bucketOrder[i];
arrCopy = [].concat(keyArr);
finalArr = finalArr.concat(this._bucketSort(arrCopy, buckets[bucketKey]));
}
return finalArr;
} else {
return this._sort(keyObj, arr);
}
};
/**
* Sorts array by individual sort path.
* @param key
* @param arr
* @returns {Array|*}
* @private
*/
Collection.prototype._sort = function (key, arr) {
var self = this,
sorterMethod,
pathSolver = new Path(),
dataPath = pathSolver.parse(key, true)[0];
pathSolver.path(dataPath.path);
if (dataPath.value === 1) {
// Sort ascending
sorterMethod = function (a, b) {
var valA = pathSolver.value(a)[0],
valB = pathSolver.value(b)[0];
return self.sortAsc(valA, valB);
};
} else if (dataPath.value === -1) {
// Sort descending
sorterMethod = function (a, b) {
var valA = pathSolver.value(a)[0],
valB = pathSolver.value(b)[0];
return self.sortDesc(valA, valB);
};
} else {
throw(this.logIdentifier() + ' $orderBy clause has invalid direction: ' + dataPath.value + ', accepted values are 1 or -1 for ascending or descending!');
}
return arr.sort(sorterMethod);
};
/**
* Takes an array of objects and returns a new object with the array items
* split into buckets by the passed key.
* @param {String} key The key to split the array into buckets by.
* @param {Array} arr An array of objects.
* @returns {Object}
*/
Collection.prototype.bucket = function (key, arr) {
var i,
oldField,
field,
fieldArr = [],
buckets = {};
for (i = 0; i < arr.length; i++) {
field = String(arr[i][key]);
if (oldField !== field) {
fieldArr.push(field);
oldField = field;
}
buckets[field] = buckets[field] || [];
buckets[field].push(arr[i]);
}
return {
buckets: buckets,
order: fieldArr
};
};
/**
* Internal method that takes a search query and options and returns an object
* containing details about the query which can be used to optimise the search.
*
* @param query
* @param options
* @param op
* @returns {Object}
* @private
*/
Collection.prototype._analyseQuery = function (query, options, op) {
var analysis = {
queriesOn: [this._name],
indexMatch: [],
hasJoin: false,
queriesJoin: false,
joinQueries: {},
query: query,
options: options
},
joinCollectionIndex,
joinCollectionName,
joinCollections = [],
joinCollectionReferences = [],
queryPath,
index,
indexMatchData,
indexRef,
indexRefName,
indexLookup,
pathSolver,
queryKeyCount,
i;
// Check if the query is a primary key lookup
op.time('checkIndexes');
pathSolver = new Path();
queryKeyCount = pathSolver.countKeys(query);
if (queryKeyCount) {
if (query[this._primaryKey] !== undefined) {
// Return item via primary key possible
op.time('checkIndexMatch: Primary Key');
analysis.indexMatch.push({
lookup: this._primaryIndex.lookup(query, options),
keyData: {
matchedKeys: [this._primaryKey],
totalKeyCount: queryKeyCount,
score: 1
},
index: this._primaryIndex
});
op.time('checkIndexMatch: Primary Key');
}
// Check if an index can speed up the query
for (i in this._indexById) {
if (this._indexById.hasOwnProperty(i)) {
indexRef = this._indexById[i];
indexRefName = indexRef.name();
op.time('checkIndexMatch: ' + indexRefName);
indexMatchData = indexRef.match(query, options);
if (indexMatchData.score > 0) {
// This index can be used, store it
indexLookup = indexRef.lookup(query, options);
analysis.indexMatch.push({
lookup: indexLookup,
keyData: indexMatchData,
index: indexRef
});
}
op.time('checkIndexMatch: ' + indexRefName);
if (indexMatchData.score === queryKeyCount) {
// Found an optimal index, do not check for any more
break;
}
}
}
op.time('checkIndexes');
// Sort array descending on index key count (effectively a measure of relevance to the query)
if (analysis.indexMatch.length > 1) {
op.time('findOptimalIndex');
analysis.indexMatch.sort(function (a, b) {
if (a.keyData.score > b.keyData.score) {
// This index has a higher score than the other
return -1;
}
if (a.keyData.score < b.keyData.score) {
// This index has a lower score than the other
return 1;
}
// The indexes have the same score but can still be compared by the number of records
// they return from the query. The fewer records they return the better so order by
// record count
if (a.keyData.score === b.keyData.score) {
return a.lookup.length - b.lookup.length;
}
});
op.time('findOptimalIndex');
}
}
// Check for join data
if (options.$join) {
analysis.hasJoin = true;
// Loop all join operations
for (joinCollectionIndex = 0; joinCollectionIndex < options.$join.length; joinCollectionIndex++) {
// Loop the join collections and keep a reference to them
for (joinCollectionName in options.$join[joinCollectionIndex]) {
if (options.$join[joinCollectionIndex].hasOwnProperty(joinCollectionName)) {
joinCollections.push(joinCollectionName);
// Check if the join uses an $as operator
if ('$as' in options.$join[joinCollectionIndex][joinCollectionName]) {
joinCollectionReferences.push(options.$join[joinCollectionIndex][joinCollectionName].$as);
} else {
joinCollectionReferences.push(joinCollectionName);
}
}
}
}
// Loop the join collection references and determine if the query references
// any of the collections that are used in the join. If there no queries against
// joined collections the find method can use a code path optimised for this.
// Queries against joined collections requires the joined collections to be filtered
// first and then joined so requires a little more work.
for (index = 0; index < joinCollectionReferences.length; index++) {
// Check if the query references any collection data that the join will create
queryPath = this._queryReferencesCollection(query, joinCollectionReferences[index], '');
if (queryPath) {
analysis.joinQueries[joinCollections[index]] = queryPath;
analysis.queriesJoin = true;
}
}
analysis.joinsOn = joinCollections;
analysis.queriesOn = analysis.queriesOn.concat(joinCollections);
}
return analysis;
};
/**
* Checks if the passed query references this collection.
* @param query
* @param collection
* @param path
* @returns {*}
* @private
*/
Collection.prototype._queryReferencesCollection = function (query, collection, path) {
var i;
for (i in query) {
if (query.hasOwnProperty(i)) {
// Check if this key is a reference match
if (i === collection) {
if (path) { path += '.'; }
return path + i;
} else {
if (typeof(query[i]) === 'object') {
// Recurse
if (path) { path += '.'; }
path += i;
return this._queryReferencesCollection(query[i], collection, path);
}
}
}
}
return false;
};
/**
* Returns the number of documents currently in the collection.
* @returns {Number}
*/
Collection.prototype.count = function (query, options) {
if (!query) {
return this._data.length;
} else {
// Run query and return count
return this.find(query, options).length;
}
};
/**
* Finds sub-documents from the collection's documents.
* @param {Object} match The query object to use when matching parent documents
* from which the sub-documents are queried.
* @param {String} path The path string used to identify the key in which
* sub-documents are stored in parent documents.
* @param {Object=} subDocQuery The query to use when matching which sub-documents
* to return.
* @param {Object=} subDocOptions The options object to use when querying for
* sub-documents.
* @returns {*}
*/
Collection.prototype.findSub = function (match, path, subDocQuery, subDocOptions) {
var pathHandler = new Path(path),
docArr = this.find(match),
docCount = docArr.length,
docIndex,
subDocArr,
subDocCollection = this._db.collection('__FDB_temp_' + this.objectId()),
subDocResults,
resultObj = {
parents: docCount,
subDocTotal: 0,
subDocs: [],
pathFound: false,
err: ''
};
subDocOptions = subDocOptions || {};
for (docIndex = 0; docIndex < docCount; docIndex++) {
subDocArr = pathHandler.value(docArr[docIndex])[0];
if (subDocArr) {
subDocCollection.setData(subDocArr);
subDocResults = subDocCollection.find(subDocQuery, subDocOptions);
if (subDocOptions.returnFirst && subDocResults.length) {
return subDocResults[0];
}
if (subDocOptions.$split) {
resultObj.subDocs.push(subDocResults);
} else {
resultObj.subDocs = resultObj.subDocs.concat(subDocResults);
}
resultObj.subDocTotal += subDocResults.length;
resultObj.pathFound = true;
}
}
// Drop the sub-document collection
subDocCollection.drop();
// Check if the call should not return stats, if so return only subDocs array
if (subDocOptions.$stats) {
return resultObj;
} else {
return resultObj.subDocs;
}
if (!resultObj.pathFound) {
resultObj.err = 'No objects found in the parent documents with a matching path of: ' + path;
}
return resultObj;
};
/**
* Finds the first sub-document from the collection's documents that matches
* the subDocQuery parameter.
* @param {Object} match The query object to use when matching parent documents
* from which the sub-documents are queried.
* @param {String} path The path string used to identify the key in which
* sub-documents are stored in parent documents.
* @param {Object=} subDocQuery The query to use when matching which sub-documents
* to return.
* @param {Object=} subDocOptions The options object to use when querying for
* sub-documents.
* @returns {Object}
*/
Collection.prototype.findSubOne = function (match, path, subDocQuery, subDocOptions) {
return this.findSub(match, path, subDocQuery, subDocOptions)[0];
};
/**
* Checks that the passed document will not violate any index rules if
* inserted into the collection.
* @param {Object} doc The document to check indexes against.
* @returns {Boolean} Either false (no violation occurred) or true if
* a violation was detected.
*/
Collection.prototype.insertIndexViolation = function (doc) {
var indexViolated,
arr = this._indexByName,
arrIndex,
arrItem;
// Check the item's primary key is not already in use
if (this._primaryIndex.get(doc[this._primaryKey])) {
indexViolated = this._primaryIndex;
} else {
// Check violations of other indexes
for (arrIndex in arr) {
if (arr.hasOwnProperty(arrIndex)) {
arrItem = arr[arrIndex];
if (arrItem.unique()) {
if (arrItem.violation(doc)) {
indexViolated = arrItem;
break;
}
}
}
}
}
return indexViolated ? indexViolated.name() : false;
};
/**
* Creates an index on the specified keys.
* @param {Object} keys The object containing keys to index.
* @param {Object} options An options object.
* @returns {*}
*/
Collection.prototype.ensureIndex = function (keys, options) {
if (this.isDropped()) {
throw(this.logIdentifier() + ' Cannot operate in a dropped state!');
}
this._indexByName = this._indexByName || {};
this._indexById = this._indexById || {};
var index,
time = {
start: new Date().getTime()
};
if (options) {
switch (options.type) {
case 'hashed':
index = new IndexHashMap(keys, options, this);
break;
case 'btree':
index = new IndexBinaryTree(keys, options, this);
break;
default:
// Default
index = new IndexHashMap(keys, options, this);
break;
}
} else {
// Default
index = new IndexHashMap(keys, options, this);
}
// Check the index does not already exist
if (this._indexByName[index.name()]) {
// Index already exists
return {
err: 'Index with that name already exists'
};
}
if (this._indexById[index.id()]) {
// Index already exists
return {
err: 'Index with those keys already exists'
};
}
// Create the index
index.rebuild();
// Add the index
this._indexByName[index.name()] = index;
this._indexById[index.id()] = index;
time.end = new Date().getTime();
time.total = time.end - time.start;
this._lastOp = {
type: 'ensureIndex',
stats: {
time: time
}
};
return {
index: index,
id: index.id(),
name: index.name(),
state: index.state()
};
};
/**
* Gets an index by it's name.
* @param {String} name The name of the index to retreive.
* @returns {*}
*/
Collection.prototype.index = function (name) {
if (this._indexByName) {
return this._indexByName[name];
}
};
/**
* Gets the last reporting operation's details such as run time.
* @returns {Object}
*/
Collection.prototype.lastOp = function () {
return this._metrics.list();
};
/**
* Generates a difference object that contains insert, update and remove arrays
* representing the operations to execute to make this collection have the same
* data as the one passed.
* @param {Collection} collection The collection to diff against.
* @returns {{}}
*/
Collection.prototype.diff = function (collection) {
var diff = {
insert: [],
update: [],
remove: []
};
var pm = this.primaryKey(),
arr,
arrIndex,
arrItem,
arrCount;
// Check if the primary key index of each collection can be utilised
if (pm !== collection.primaryKey()) {
throw(this.logIdentifier() + ' Diffing requires that both collections have the same primary key!');
}
// Use the collection primary key index to do the diff (super-fast)
arr = collection._data;
// Check if we have an array or another collection
while (arr && !(arr instanceof Array)) {
// We don't have an array, assign collection and get data
collection = arr;
arr = collection._data;
}
arrCount = arr.length;
// Loop the collection's data array and check for matching items
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
arrItem = arr[arrIndex];
// Check for a matching item in this collection
if (this._primaryIndex.get(arrItem[pm])) {
// Matching item exists, check if the data is the same
if (this._primaryCrc.get(arrItem[pm]) !== collection._primaryCrc.get(arrItem[pm])) {
// The documents exist in both collections but data differs, update required
diff.update.push(arrItem);
}
} else {
// The document is missing from this collection, insert required
diff.insert.push(arrItem);
}
}
// Now loop this collection's data and check for matching items
arr = this._data;
arrCount = arr.length;
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
arrItem = arr[arrIndex];
if (!collection._primaryIndex.get(arrItem[pm])) {
// The document does not exist in the other collection, remove required
diff.remove.push(arrItem);
}
}
return diff;
};
Collection.prototype.collateAdd = new Overload({
/**
* Adds a data source to collate data from and specifies the
* key name to collate data to.
* @func collateAdd
* @memberof Collection
* @param {Collection} collection The collection to collate data from.
* @param {String=} keyName Optional name of the key to collate data to.
* If none is provided the record CRUD is operated on the root collection
* data.
*/
'object, string': function (collection, keyName) {
var self = this;
self.collateAdd(collection, function (packet) {
var obj1,
obj2;
switch (packet.type) {
case 'insert':
if (keyName) {
obj1 = {
$push: {}
};
obj1.$push[keyName] = self.decouple(packet.data);
self.update({}, obj1);
} else {
self.insert(packet.data);
}
break;
case 'update':
if (keyName) {
obj1 = {};
obj2 = {};
obj1[keyName] = packet.data.query;
obj2[keyName + '.$'] = packet.data.update;
self.update(obj1, obj2);
} else {
self.update(packet.data.query, packet.data.update);
}
break;
case 'remove':
if (keyName) {
obj1 = {
$pull: {}
};
obj1.$pull[keyName] = {};
obj1.$pull[keyName][self.primaryKey()] = packet.data.dataSet[0][collection.primaryKey()];
self.update({}, obj1);
} else {
self.remove(packet.data);
}
break;
default:
}
});
},
/**
* Adds a data source to collate data from and specifies a process
* method that will handle the collation functionality (for custom
* collation).
* @func collateAdd
* @memberof Collection
* @param {Collection} collection The collection to collate data from.
* @param {Function} process The process method.
*/
'object, function': function (collection, process) {
if (typeof collection === 'string') {
// The collection passed is a name, not a reference so get
// the reference from the name
collection = this._db.collection(collection, {
autoCreate: false,
throwError: false
});
}
if (collection) {
this._collate = this._collate || {};
this._collate[collection.name()] = new ReactorIO(collection, this, process);
return this;
} else {
throw('Cannot collate from a non-existent collection!');
}
}
});
Collection.prototype.collateRemove = function (collection) {
if (typeof collection === 'object') {
// We need to have the name of the collection to remove it
collection = collection.name();
}
if (collection) {
// Drop the reactor IO chain node
this._collate[collection].drop();
// Remove the collection data from the collate object
delete this._collate[collection];
return this;
} else {
throw('No collection name passed to collateRemove() or collection not found!');
}
};
Db.prototype.collection = new Overload({
/**
* Get a collection with no name (generates a random name). If the
* collection does not already exist then one is created for that
* name automatically.
* @func collection
* @memberof Db
* @param {String} collectionName The name of the collection.
* @returns {Collection}
*/
'': function () {
return this.$main.call(this, {
name: this.objectId()
});
},
/**
* Get a collection by name. If the collection does not already exist
* then one is created for that name automatically.
* @func collection
* @memberof Db
* @param {Object} data An options object or a collection instance.
* @returns {Collection}
*/
'object': function (data) {
// Handle being passed an instance
if (data instanceof Collection) {
if (data.state() !== 'droppped') {
return data;
} else {
return this.$main.call(this, {
name: data.name()
});
}
}
return this.$main.call(this, data);
},
/**
* Get a collection by name. If the collection does not already exist
* then one is created for that name automatically.
* @func collection
* @memberof Db
* @param {String} collectionName The name of the collection.
* @returns {Collection}
*/
'string': function (collectionName) {
return this.$main.call(this, {
name: collectionName
});
},
/**
* Get a collection by name. If the collection does not already exist
* then one is created for that name automatically.
* @func collection
* @memberof Db
* @param {String} collectionName The name of the collection.
* @param {String} primaryKey Optional primary key to specify the primary key field on the collection
* objects. Defaults to "_id".
* @returns {Collection}
*/
'string, string': function (collectionName, primaryKey) {
return this.$main.call(this, {
name: collectionName,
primaryKey: primaryKey
});
},
/**
* Get a collection by name. If the collection does not already exist
* then one is created for that name automatically.
* @func collection
* @memberof Db
* @param {String} collectionName The name of the collection.
* @param {Object} options An options object.
* @returns {Collection}
*/
'string, object': function (collectionName, options) {
options.name = collectionName;
return this.$main.call(this, options);
},
/**
* Get a collection by name. If the collection does not already exist
* then one is created for that name automatically.
* @func collection
* @memberof Db
* @param {String} collectionName The name of the collection.
* @param {String} primaryKey Optional primary key to specify the primary key field on the collection
* objects. Defaults to "_id".
* @param {Object} options An options object.
* @returns {Collection}
*/
'string, string, object': function (collectionName, primaryKey, options) {
options.name = collectionName;
options.primaryKey = primaryKey;
return this.$main.call(this, options);
},
/**
* The main handler method. This gets called by all the other variants and
* handles the actual logic of the overloaded method.
* @func collection
* @memberof Db
* @param {Object} options An options object.
* @returns {*}
*/
'$main': function (options) {
var self = this,
name = options.name;
if (name) {
if (this._collection[name]) {
return this._collection[name];
} else {
if (options && options.autoCreate === false) {
if (options && options.throwError !== false) {
throw(this.logIdentifier() + ' Cannot get collection ' + name + ' because it does not exist and auto-create has been disabled!');
}
return undefined;
}
if (this.debug()) {
console.log(this.logIdentifier() + ' Creating collection ' + name);
}
}
this._collection[name] = this._collection[name] || new Collection(name, options).db(this);
this._collection[name].mongoEmulation(this.mongoEmulation());
if (options.primaryKey !== undefined) {
this._collection[name].primaryKey(options.primaryKey);
}
if (options.capped !== undefined) {
// Check we have a size
if (options.size !== undefined) {
this._collection[name].capped(options.capped);
this._collection[name].cappedSize(options.size);
} else {
throw(this.logIdentifier() + ' Cannot create a capped collection without specifying a size!');
}
}
// Listen for events on this collection so we can fire global events
// on the database in response to it
self._collection[name].on('change', function () {
self.emit('change', self._collection[name], 'collection', name);
});
self.emit('create', self._collection[name], 'collection', name);
return this._collection[name];
} else {
if (!options || (options && options.throwError !== false)) {
throw(this.logIdentifier() + ' Cannot get collection with undefined name!');
}
}
}
});
/**
* Determine if a collection with the passed name already exists.
* @memberof Db
* @param {String} viewName The name of the collection to check for.
* @returns {boolean}
*/
Db.prototype.collectionExists = function (viewName) {
return Boolean(this._collection[viewName]);
};
/**
* Returns an array of collections the DB currently has.
* @memberof Db
* @param {String|RegExp=} search The optional search string or regular expression to use
* to match collection names against.
* @returns {Array} An array of objects containing details of each collection
* the database is currently managing.
*/
Db.prototype.collections = function (search) {
var arr = [],
collections = this._collection,
collection,
i;
if (search) {
if (!(search instanceof RegExp)) {
// Turn the search into a regular expression
search = new RegExp(search);
}
}
for (i in collections) {
if (collections.hasOwnProperty(i)) {
collection = collections[i];
if (search) {
if (search.exec(i)) {
arr.push({
name: i,
count: collection.count(),
linked: collection.isLinked !== undefined ? collection.isLinked() : false
});
}
} else {
arr.push({
name: i,
count: collection.count(),
linked: collection.isLinked !== undefined ? collection.isLinked() : false
});
}
}
}
arr.sort(function (a, b) {
return a.name.localeCompare(b.name);
});
return arr;
};
Shared.finishModule('Collection');
module.exports = Collection;
},{"./Crc":5,"./IndexBinaryTree":7,"./IndexHashMap":8,"./KeyValueStore":9,"./Metrics":10,"./Overload":22,"./Path":23,"./ReactorIO":24,"./Shared":26}],4:[function(_dereq_,module,exports){
/*
License
Copyright (c) 2015 Irrelon Software Limited
http://www.irrelon.com
http://www.forerunnerdb.com
Please visit the license page to see latest license information:
http://www.forerunnerdb.com/licensing.html
*/
"use strict";
var Shared,
Db,
Metrics,
Overload,
_instances = [];
Shared = _dereq_('./Shared');
Overload = _dereq_('./Overload');
/**
* Creates a new ForerunnerDB instance. Core instances handle the lifecycle of
* multiple database instances.
* @constructor
*/
var Core = function (name) {
this.init.apply(this, arguments);
};
Core.prototype.init = function (name) {
this._db = {};
this._debug = {};
this._name = name || 'ForerunnerDB';
_instances.push(this);
};
/**
* Returns the number of instantiated ForerunnerDB objects.
* @returns {Number} The number of instantiated instances.
*/
Core.prototype.instantiatedCount = function () {
return _instances.length;
};
/**
* Get all instances as an array or a single ForerunnerDB instance
* by it's array index.
* @param {Number=} index Optional index of instance to get.
* @returns {Array|Object} Array of instances or a single instance.
*/
Core.prototype.instances = function (index) {
if (index !== undefined) {
return _instances[index];
}
return _instances;
};
/**
* Get all instances as an array of instance names or a single ForerunnerDB
* instance by it's name.
* @param {String=} name Optional name of instance to get.
* @returns {Array|Object} Array of instance names or a single instance.
*/
Core.prototype.namedInstances = function (name) {
var i,
instArr;
if (name !== undefined) {
for (i = 0; i < _instances.length; i++) {
if (_instances[i].name === name) {
return _instances[i];
}
}
return undefined;
}
instArr = [];
for (i = 0; i < _instances.length; i++) {
instArr.push(_instances[i].name);
}
return instArr;
};
Core.prototype.moduleLoaded = new Overload({
/**
* Checks if a module has been loaded into the database.
* @func moduleLoaded
* @memberof Core
* @param {String} moduleName The name of the module to check for.
* @returns {Boolean} True if the module is loaded, false if not.
*/
'string': function (moduleName) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
return false;
}
}
return true;
}
return false;
},
/**
* Checks if a module is loaded and if so calls the passed
* callback method.
* @func moduleLoaded
* @memberof Core
* @param {String} moduleName The name of the module to check for.
* @param {Function} callback The callback method to call if module is loaded.
*/
'string, function': function (moduleName, callback) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
return false;
}
}
if (callback) { callback(); }
}
},
/**
* Checks if an array of named modules are loaded and if so
* calls the passed callback method.
* @func moduleLoaded
* @memberof Core
* @param {Array} moduleName The array of module names to check for.
* @param {Function} callback The callback method to call if modules are loaded.
*/
'array, function': function (moduleNameArr, callback) {
var moduleName,
i;
for (i = 0; i < moduleNameArr.length; i++) {
moduleName = moduleNameArr[i];
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
return false;
}
}
}
}
if (callback) { callback(); }
},
/**
* Checks if a module is loaded and if so calls the passed
* success method, otherwise calls the failure method.
* @func moduleLoaded
* @memberof Core
* @param {String} moduleName The name of the module to check for.
* @param {Function} success The callback method to call if module is loaded.
* @param {Function} failure The callback method to call if module not loaded.
*/
'string, function, function': function (moduleName, success, failure) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
failure();
return false;
}
}
success();
}
}
});
/**
* Checks version against the string passed and if it matches (or partially matches)
* then the callback is called.
* @param {String} val The version to check against.
* @param {Function} callback The callback to call if match is true.
* @returns {Boolean}
*/
Core.prototype.version = function (val, callback) {
if (val !== undefined) {
if (Shared.version.indexOf(val) === 0) {
if (callback) { callback(); }
return true;
}
return false;
}
return Shared.version;
};
// Expose moduleLoaded() method to non-instantiated object ForerunnerDB
Core.moduleLoaded = Core.prototype.moduleLoaded;
// Expose version() method to non-instantiated object ForerunnerDB
Core.version = Core.prototype.version;
// Expose instances() method to non-instantiated object ForerunnerDB
Core.instances = Core.prototype.instances;
// Expose instantiatedCount() method to non-instantiated object ForerunnerDB
Core.instantiatedCount = Core.prototype.instantiatedCount;
// Provide public access to the Shared object
Core.shared = Shared;
Core.prototype.shared = Shared;
Shared.addModule('Core', Core);
Shared.mixin(Core.prototype, 'Mixin.Common');
Shared.mixin(Core.prototype, 'Mixin.Constants');
Db = _dereq_('./Db.js');
Metrics = _dereq_('./Metrics.js');
/**
* Gets / sets the name of the instance. This is primarily used for
* name-spacing persistent storage.
* @param {String=} val The name of the instance to set.
* @returns {*}
*/
Shared.synthesize(Core.prototype, 'name');
/**
* Gets / sets mongodb emulation mode.
* @param {Boolean=} val True to enable, false to disable.
* @returns {*}
*/
Shared.synthesize(Core.prototype, 'mongoEmulation');
// Set a flag to determine environment
Core.prototype._isServer = false;
/**
* Returns true if ForerunnerDB is running on a client browser.
* @returns {boolean}
*/
Core.prototype.isClient = function () {
return !this._isServer;
};
/**
* Returns true if ForerunnerDB is running on a server.
* @returns {boolean}
*/
Core.prototype.isServer = function () {
return this._isServer;
};
/**
* Checks if the database is running on a client (browser) or
* a server (node.js).
* @returns {Boolean} Returns true if running on a browser.
*/
Core.prototype.isClient = function () {
return !this._isServer;
};
/**
* Checks if the database is running on a client (browser) or
* a server (node.js).
* @returns {Boolean} Returns true if running on a server.
*/
Core.prototype.isServer = function () {
return this._isServer;
};
/**
* Added to provide an error message for users who have not seen
* the new instantiation breaking change warning and try to get
* a collection directly from the core instance.
*/
Core.prototype.collection = function () {
throw("ForerunnerDB's instantiation has changed since version 1.3.36 to support multiple database instances. Please see the readme.md file for the minor change you have to make to get your project back up and running, or see the issue related to this change at https://github.com/Irrelon/ForerunnerDB/issues/44");
};
module.exports = Core;
},{"./Db.js":6,"./Metrics.js":10,"./Overload":22,"./Shared":26}],5:[function(_dereq_,module,exports){
"use strict";
/**
* @mixin
*/
var crcTable = (function () {
var crcTable = [],
c, n, k;
for (n = 0; n < 256; n++) {
c = n;
for (k = 0; k < 8; k++) {
c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); // jshint ignore:line
}
crcTable[n] = c;
}
return crcTable;
}());
module.exports = function(str) {
var crc = 0 ^ (-1), // jshint ignore:line
i;
for (i = 0; i < str.length; i++) {
crc = (crc >>> 8) ^ crcTable[(crc ^ str.charCodeAt(i)) & 0xFF]; // jshint ignore:line
}
return (crc ^ (-1)) >>> 0; // jshint ignore:line
};
},{}],6:[function(_dereq_,module,exports){
"use strict";
var Shared,
Core,
Collection,
Metrics,
Crc,
Overload;
Shared = _dereq_('./Shared');
Overload = _dereq_('./Overload');
/**
* Creates a new ForerunnerDB database instance.
* @constructor
*/
var Db = function (name, core) {
this.init.apply(this, arguments);
};
Db.prototype.init = function (name, core) {
this.core(core);
this._primaryKey = '_id';
this._name = name;
this._collection = {};
this._debug = {};
};
Shared.addModule('Db', Db);
Db.prototype.moduleLoaded = new Overload({
/**
* Checks if a module has been loaded into the database.
* @func moduleLoaded
* @memberof Db
* @param {String} moduleName The name of the module to check for.
* @returns {Boolean} True if the module is loaded, false if not.
*/
'string': function (moduleName) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
return false;
}
}
return true;
}
return false;
},
/**
* Checks if a module is loaded and if so calls the passed
* callback method.
* @func moduleLoaded
* @memberof Db
* @param {String} moduleName The name of the module to check for.
* @param {Function} callback The callback method to call if module is loaded.
*/
'string, function': function (moduleName, callback) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
return false;
}
}
if (callback) { callback(); }
}
},
/**
* Checks if a module is loaded and if so calls the passed
* success method, otherwise calls the failure method.
* @func moduleLoaded
* @memberof Db
* @param {String} moduleName The name of the module to check for.
* @param {Function} success The callback method to call if module is loaded.
* @param {Function} failure The callback method to call if module not loaded.
*/
'string, function, function': function (moduleName, success, failure) {
if (moduleName !== undefined) {
moduleName = moduleName.replace(/ /g, '');
var modules = moduleName.split(','),
index;
for (index = 0; index < modules.length; index++) {
if (!Shared.modules[modules[index]]) {
failure();
return false;
}
}
success();
}
}
});
/**
* Checks version against the string passed and if it matches (or partially matches)
* then the callback is called.
* @param {String} val The version to check against.
* @param {Function} callback The callback to call if match is true.
* @returns {Boolean}
*/
Db.prototype.version = function (val, callback) {
if (val !== undefined) {
if (Shared.version.indexOf(val) === 0) {
if (callback) { callback(); }
return true;
}
return false;
}
return Shared.version;
};
// Expose moduleLoaded method to non-instantiated object ForerunnerDB
Db.moduleLoaded = Db.prototype.moduleLoaded;
// Expose version method to non-instantiated object ForerunnerDB
Db.version = Db.prototype.version;
// Provide public access to the Shared object
Db.shared = Shared;
Db.prototype.shared = Shared;
Shared.addModule('Db', Db);
Shared.mixin(Db.prototype, 'Mixin.Common');
Shared.mixin(Db.prototype, 'Mixin.ChainReactor');
Shared.mixin(Db.prototype, 'Mixin.Constants');
Shared.mixin(Db.prototype, 'Mixin.Tags');
Core = Shared.modules.Core;
Collection = _dereq_('./Collection.js');
Metrics = _dereq_('./Metrics.js');
Crc = _dereq_('./Crc.js');
Db.prototype._isServer = false;
/**
* Gets / sets the core object this database belongs to.
*/
Shared.synthesize(Db.prototype, 'core');
/**
* Gets / sets the default primary key for new collections.
* @param {String=} val The name of the primary key to set.
* @returns {*}
*/
Shared.synthesize(Db.prototype, 'primaryKey');
/**
* Gets / sets the current state.
* @param {String=} val The name of the state to set.
* @returns {*}
*/
Shared.synthesize(Db.prototype, 'state');
/**
* Gets / sets the name of the database.
* @param {String=} val The name of the database to set.
* @returns {*}
*/
Shared.synthesize(Db.prototype, 'name');
/**
* Gets / sets mongodb emulation mode.
* @param {Boolean=} val True to enable, false to disable.
* @returns {*}
*/
Shared.synthesize(Db.prototype, 'mongoEmulation');
/**
* Returns true if ForerunnerDB is running on a client browser.
* @returns {boolean}
*/
Db.prototype.isClient = function () {
return !this._isServer;
};
/**
* Returns true if ForerunnerDB is running on a server.
* @returns {boolean}
*/
Db.prototype.isServer = function () {
return this._isServer;
};
/**
* Returns a checksum of a string.
* @param {String} string The string to checksum.
* @return {String} The checksum generated.
*/
Db.prototype.crc = Crc;
/**
* Checks if the database is running on a client (browser) or
* a server (node.js).
* @returns {Boolean} Returns true if running on a browser.
*/
Db.prototype.isClient = function () {
return !this._isServer;
};
/**
* Checks if the database is running on a client (browser) or
* a server (node.js).
* @returns {Boolean} Returns true if running on a server.
*/
Db.prototype.isServer = function () {
return this._isServer;
};
/**
* Converts a normal javascript array of objects into a DB collection.
* @param {Array} arr An array of objects.
* @returns {Collection} A new collection instance with the data set to the
* array passed.
*/
Db.prototype.arrayToCollection = function (arr) {
return new Collection().setData(arr);
};
/**
* Registers an event listener against an event name.
* @param {String} event The name of the event to listen for.
* @param {Function} listener The listener method to call when
* the event is fired.
* @returns {*}
*/
Db.prototype.on = function(event, listener) {
this._listeners = this._listeners || {};
this._listeners[event] = this._listeners[event] || [];
this._listeners[event].push(listener);
return this;
};
/**
* De-registers an event listener from an event name.
* @param {String} event The name of the event to stop listening for.
* @param {Function} listener The listener method passed to on() when
* registering the event listener.
* @returns {*}
*/
Db.prototype.off = function(event, listener) {
if (event in this._listeners) {
var arr = this._listeners[event],
index = arr.indexOf(listener);
if (index > -1) {
arr.splice(index, 1);
}
}
return this;
};
/**
* Emits an event by name with the given data.
* @param {String} event The name of the event to emit.
* @param {*=} data The data to emit with the event.
* @returns {*}
*/
Db.prototype.emit = function(event, data) {
this._listeners = this._listeners || {};
if (event in this._listeners) {
var arr = this._listeners[event],
arrCount = arr.length,
arrIndex;
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
arr[arrIndex].apply(this, Array.prototype.slice.call(arguments, 1));
}
}
return this;
};
Db.prototype.peek = function (search) {
var i,
coll,
arr = [],
typeOfSearch = typeof search;
// Loop collections
for (i in this._collection) {
if (this._collection.hasOwnProperty(i)) {
coll = this._collection[i];
if (typeOfSearch === 'string') {
arr = arr.concat(coll.peek(search));
} else {
arr = arr.concat(coll.find(search));
}
}
}
return arr;
};
/**
* Find all documents across all collections in the database that match the passed
* string or search object.
* @param search String or search object.
* @returns {Array}
*/
Db.prototype.peek = function (search) {
var i,
coll,
arr = [],
typeOfSearch = typeof search;
// Loop collections
for (i in this._collection) {
if (this._collection.hasOwnProperty(i)) {
coll = this._collection[i];
if (typeOfSearch === 'string') {
arr = arr.concat(coll.peek(search));
} else {
arr = arr.concat(coll.find(search));
}
}
}
return arr;
};
/**
* Find all documents across all collections in the database that match the passed
* string or search object and return them in an object where each key is the name
* of the collection that the document was matched in.
* @param search String or search object.
* @returns {object}
*/
Db.prototype.peekCat = function (search) {
var i,<|fim▁hole|> typeOfSearch = typeof search;
// Loop collections
for (i in this._collection) {
if (this._collection.hasOwnProperty(i)) {
coll = this._collection[i];
if (typeOfSearch === 'string') {
arr = coll.peek(search);
if (arr && arr.length) {
cat[coll.name()] = arr;
}
} else {
arr = coll.find(search);
if (arr && arr.length) {
cat[coll.name()] = arr;
}
}
}
}
return cat;
};
Db.prototype.drop = new Overload({
/**
* Drops the database.
* @func drop
* @memberof Db
*/
'': function () {
if (!this.isDropped()) {
var arr = this.collections(),
arrCount = arr.length,
arrIndex;
this._state = 'dropped';
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
this.collection(arr[arrIndex].name).drop();
delete this._collection[arr[arrIndex].name];
}
this.emit('drop', this);
delete this._listeners;
delete this._core._db[this._name];
}
return true;
},
/**
* Drops the database with optional callback method.
* @func drop
* @memberof Db
* @param {Function} callback Optional callback method.
*/
'function': function (callback) {
if (!this.isDropped()) {
var arr = this.collections(),
arrCount = arr.length,
arrIndex,
finishCount = 0,
afterDrop = function () {
finishCount++;
if (finishCount === arrCount) {
if (callback) { callback(); }
}
};
this._state = 'dropped';
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
this.collection(arr[arrIndex].name).drop(afterDrop);
delete this._collection[arr[arrIndex].name];
}
this.emit('drop', this);
delete this._listeners;
delete this._core._db[this._name];
}
return true;
},
/**
* Drops the database with optional persistent storage drop. Persistent
* storage is dropped by default if no preference is provided.
* @func drop
* @memberof Db
* @param {Boolean} removePersist Drop persistent storage for this database.
*/
'boolean': function (removePersist) {
if (!this.isDropped()) {
var arr = this.collections(),
arrCount = arr.length,
arrIndex;
this._state = 'dropped';
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
this.collection(arr[arrIndex].name).drop(removePersist);
delete this._collection[arr[arrIndex].name];
}
this.emit('drop', this);
delete this._listeners;
delete this._core._db[this._name];
}
return true;
},
/**
* Drops the database and optionally controls dropping persistent storage
* and callback method.
* @func drop
* @memberof Db
* @param {Boolean} removePersist Drop persistent storage for this database.
* @param {Function} callback Optional callback method.
*/
'boolean, function': function (removePersist, callback) {
if (!this.isDropped()) {
var arr = this.collections(),
arrCount = arr.length,
arrIndex,
finishCount = 0,
afterDrop = function () {
finishCount++;
if (finishCount === arrCount) {
if (callback) { callback(); }
}
};
this._state = 'dropped';
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
this.collection(arr[arrIndex].name).drop(removePersist, afterDrop);
delete this._collection[arr[arrIndex].name];
}
this.emit('drop', this);
delete this._listeners;
delete this._core._db[this._name];
}
return true;
}
});
/**
* Gets a database instance by name.
* @memberof Core
* @param {String=} name Optional name of the database. If none is provided
* a random name is assigned.
* @returns {Db}
*/
Core.prototype.db = function (name) {
// Handle being passed an instance
if (name instanceof Db) {
return name;
}
if (!name) {
name = this.objectId();
}
this._db[name] = this._db[name] || new Db(name, this);
this._db[name].mongoEmulation(this.mongoEmulation());
return this._db[name];
};
/**
* Returns an array of databases that ForerunnerDB currently has.
* @memberof Core
* @param {String|RegExp=} search The optional search string or regular expression to use
* to match collection names against.
* @returns {Array} An array of objects containing details of each database
* that ForerunnerDB is currently managing and it's child entities.
*/
Core.prototype.databases = function (search) {
var arr = [],
tmpObj,
addDb,
i;
if (search) {
if (!(search instanceof RegExp)) {
// Turn the search into a regular expression
search = new RegExp(search);
}
}
for (i in this._db) {
if (this._db.hasOwnProperty(i)) {
addDb = true;
if (search) {
if (!search.exec(i)) {
addDb = false;
}
}
if (addDb) {
tmpObj = {
name: i,
children: []
};
if (this.shared.moduleExists('Collection')) {
tmpObj.children.push({
module: 'collection',
moduleName: 'Collections',
count: this._db[i].collections().length
});
}
if (this.shared.moduleExists('CollectionGroup')) {
tmpObj.children.push({
module: 'collectionGroup',
moduleName: 'Collection Groups',
count: this._db[i].collectionGroups().length
});
}
if (this.shared.moduleExists('Document')) {
tmpObj.children.push({
module: 'document',
moduleName: 'Documents',
count: this._db[i].documents().length
});
}
if (this.shared.moduleExists('Grid')) {
tmpObj.children.push({
module: 'grid',
moduleName: 'Grids',
count: this._db[i].grids().length
});
}
if (this.shared.moduleExists('Overview')) {
tmpObj.children.push({
module: 'overview',
moduleName: 'Overviews',
count: this._db[i].overviews().length
});
}
if (this.shared.moduleExists('View')) {
tmpObj.children.push({
module: 'view',
moduleName: 'Views',
count: this._db[i].views().length
});
}
arr.push(tmpObj);
}
}
}
arr.sort(function (a, b) {
return a.name.localeCompare(b.name);
});
return arr;
};
Shared.finishModule('Db');
module.exports = Db;
},{"./Collection.js":3,"./Crc.js":5,"./Metrics.js":10,"./Overload":22,"./Shared":26}],7:[function(_dereq_,module,exports){
"use strict";
/*
name
id
rebuild
state
match
lookup
*/
var Shared = _dereq_('./Shared'),
Path = _dereq_('./Path'),
BinaryTree = _dereq_('./BinaryTree'),
treeInstance = new BinaryTree(),
btree = function () {};
treeInstance.inOrder('hash');
/**
* The index class used to instantiate hash map indexes that the database can
* use to speed up queries on collections and views.
* @constructor
*/
var IndexBinaryTree = function () {
this.init.apply(this, arguments);
};
IndexBinaryTree.prototype.init = function (keys, options, collection) {
this._btree = new (btree.create(2, this.sortAsc))();
this._size = 0;
this._id = this._itemKeyHash(keys, keys);
this.unique(options && options.unique ? options.unique : false);
if (keys !== undefined) {
this.keys(keys);
}
if (collection !== undefined) {
this.collection(collection);
}
this.name(options && options.name ? options.name : this._id);
};
Shared.addModule('IndexBinaryTree', IndexBinaryTree);
Shared.mixin(IndexBinaryTree.prototype, 'Mixin.ChainReactor');
Shared.mixin(IndexBinaryTree.prototype, 'Mixin.Sorting');
IndexBinaryTree.prototype.id = function () {
return this._id;
};
IndexBinaryTree.prototype.state = function () {
return this._state;
};
IndexBinaryTree.prototype.size = function () {
return this._size;
};
Shared.synthesize(IndexBinaryTree.prototype, 'data');
Shared.synthesize(IndexBinaryTree.prototype, 'name');
Shared.synthesize(IndexBinaryTree.prototype, 'collection');
Shared.synthesize(IndexBinaryTree.prototype, 'type');
Shared.synthesize(IndexBinaryTree.prototype, 'unique');
IndexBinaryTree.prototype.keys = function (val) {
if (val !== undefined) {
this._keys = val;
// Count the keys
this._keyCount = (new Path()).parse(this._keys).length;
return this;
}
return this._keys;
};
IndexBinaryTree.prototype.rebuild = function () {
// Do we have a collection?
if (this._collection) {
// Get sorted data
var collection = this._collection.subset({}, {
$decouple: false,
$orderBy: this._keys
}),
collectionData = collection.find(),
dataIndex,
dataCount = collectionData.length;
// Clear the index data for the index
this._btree = new (btree.create(2, this.sortAsc))();
if (this._unique) {
this._uniqueLookup = {};
}
// Loop the collection data
for (dataIndex = 0; dataIndex < dataCount; dataIndex++) {
this.insert(collectionData[dataIndex]);
}
}
this._state = {
name: this._name,
keys: this._keys,
indexSize: this._size,
built: new Date(),
updated: new Date(),
ok: true
};
};
IndexBinaryTree.prototype.insert = function (dataItem, options) {
var uniqueFlag = this._unique,
uniqueHash,
dataItemHash = this._itemKeyHash(dataItem, this._keys),
keyArr;
if (uniqueFlag) {
uniqueHash = this._itemHash(dataItem, this._keys);
this._uniqueLookup[uniqueHash] = dataItem;
}
// We store multiple items that match a key inside an array
// that is then stored against that key in the tree...
// Check if item exists for this key already
keyArr = this._btree.get(dataItemHash);
// Check if the array exists
if (keyArr === undefined) {
// Generate an array for this key first
keyArr = [];
// Put the new array into the tree under the key
this._btree.put(dataItemHash, keyArr);
}
// Push the item into the array
keyArr.push(dataItem);
this._size++;
};
IndexBinaryTree.prototype.remove = function (dataItem, options) {
var uniqueFlag = this._unique,
uniqueHash,
dataItemHash = this._itemKeyHash(dataItem, this._keys),
keyArr,
itemIndex;
if (uniqueFlag) {
uniqueHash = this._itemHash(dataItem, this._keys);
delete this._uniqueLookup[uniqueHash];
}
// Try and get the array for the item hash key
keyArr = this._btree.get(dataItemHash);
if (keyArr !== undefined) {
// The key array exits, remove the item from the key array
itemIndex = keyArr.indexOf(dataItem);
if (itemIndex > -1) {
// Check the length of the array
if (keyArr.length === 1) {
// This item is the last in the array, just kill the tree entry
this._btree.del(dataItemHash);
} else {
// Remove the item
keyArr.splice(itemIndex, 1);
}
this._size--;
}
}
};
IndexBinaryTree.prototype.violation = function (dataItem) {
// Generate item hash
var uniqueHash = this._itemHash(dataItem, this._keys);
// Check if the item breaks the unique constraint
return Boolean(this._uniqueLookup[uniqueHash]);
};
IndexBinaryTree.prototype.hashViolation = function (uniqueHash) {
// Check if the item breaks the unique constraint
return Boolean(this._uniqueLookup[uniqueHash]);
};
IndexBinaryTree.prototype.lookup = function (query) {
return this._data[this._itemHash(query, this._keys)] || [];
};
IndexBinaryTree.prototype.match = function (query, options) {
// Check if the passed query has data in the keys our index
// operates on and if so, is the query sort matching our order
var pathSolver = new Path();
var indexKeyArr = pathSolver.parseArr(this._keys),
queryArr = pathSolver.parseArr(query),
matchedKeys = [],
matchedKeyCount = 0,
i;
// Loop the query array and check the order of keys against the
// index key array to see if this index can be used
for (i = 0; i < indexKeyArr.length; i++) {
if (queryArr[i] === indexKeyArr[i]) {
matchedKeyCount++;
matchedKeys.push(queryArr[i]);
} else {
// Query match failed - this is a hash map index so partial key match won't work
return {
matchedKeys: [],
totalKeyCount: queryArr.length,
score: 0
};
}
}
return {
matchedKeys: matchedKeys,
totalKeyCount: queryArr.length,
score: matchedKeyCount
};
//return pathSolver.countObjectPaths(this._keys, query);
};
IndexBinaryTree.prototype._itemHash = function (item, keys) {
var path = new Path(),
pathData,
hash = '',
k;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
if (hash) { hash += '_'; }
hash += path.value(item, pathData[k].path).join(':');
}
return hash;
};
IndexBinaryTree.prototype._itemKeyHash = function (item, keys) {
var path = new Path(),
pathData,
hash = '',
k;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
if (hash) { hash += '_'; }
hash += path.keyValue(item, pathData[k].path);
}
return hash;
};
IndexBinaryTree.prototype._itemHashArr = function (item, keys) {
var path = new Path(),
pathData,
//hash = '',
hashArr = [],
valArr,
i, k, j;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
valArr = path.value(item, pathData[k].path);
for (i = 0; i < valArr.length; i++) {
if (k === 0) {
// Setup the initial hash array
hashArr.push(valArr[i]);
} else {
// Loop the hash array and concat the value to it
for (j = 0; j < hashArr.length; j++) {
hashArr[j] = hashArr[j] + '_' + valArr[i];
}
}
}
}
return hashArr;
};
Shared.finishModule('IndexBinaryTree');
module.exports = IndexBinaryTree;
},{"./BinaryTree":2,"./Path":23,"./Shared":26}],8:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared'),
Path = _dereq_('./Path');
/**
* The index class used to instantiate hash map indexes that the database can
* use to speed up queries on collections and views.
* @constructor
*/
var IndexHashMap = function () {
this.init.apply(this, arguments);
};
IndexHashMap.prototype.init = function (keys, options, collection) {
this._crossRef = {};
this._size = 0;
this._id = this._itemKeyHash(keys, keys);
this.data({});
this.unique(options && options.unique ? options.unique : false);
if (keys !== undefined) {
this.keys(keys);
}
if (collection !== undefined) {
this.collection(collection);
}
this.name(options && options.name ? options.name : this._id);
};
Shared.addModule('IndexHashMap', IndexHashMap);
Shared.mixin(IndexHashMap.prototype, 'Mixin.ChainReactor');
IndexHashMap.prototype.id = function () {
return this._id;
};
IndexHashMap.prototype.state = function () {
return this._state;
};
IndexHashMap.prototype.size = function () {
return this._size;
};
Shared.synthesize(IndexHashMap.prototype, 'data');
Shared.synthesize(IndexHashMap.prototype, 'name');
Shared.synthesize(IndexHashMap.prototype, 'collection');
Shared.synthesize(IndexHashMap.prototype, 'type');
Shared.synthesize(IndexHashMap.prototype, 'unique');
IndexHashMap.prototype.keys = function (val) {
if (val !== undefined) {
this._keys = val;
// Count the keys
this._keyCount = (new Path()).parse(this._keys).length;
return this;
}
return this._keys;
};
IndexHashMap.prototype.rebuild = function () {
// Do we have a collection?
if (this._collection) {
// Get sorted data
var collection = this._collection.subset({}, {
$decouple: false,
$orderBy: this._keys
}),
collectionData = collection.find(),
dataIndex,
dataCount = collectionData.length;
// Clear the index data for the index
this._data = {};
if (this._unique) {
this._uniqueLookup = {};
}
// Loop the collection data
for (dataIndex = 0; dataIndex < dataCount; dataIndex++) {
this.insert(collectionData[dataIndex]);
}
}
this._state = {
name: this._name,
keys: this._keys,
indexSize: this._size,
built: new Date(),
updated: new Date(),
ok: true
};
};
IndexHashMap.prototype.insert = function (dataItem, options) {
var uniqueFlag = this._unique,
uniqueHash,
itemHashArr,
hashIndex;
if (uniqueFlag) {
uniqueHash = this._itemHash(dataItem, this._keys);
this._uniqueLookup[uniqueHash] = dataItem;
}
// Generate item hash
itemHashArr = this._itemHashArr(dataItem, this._keys);
// Get the path search results and store them
for (hashIndex = 0; hashIndex < itemHashArr.length; hashIndex++) {
this.pushToPathValue(itemHashArr[hashIndex], dataItem);
}
};
IndexHashMap.prototype.update = function (dataItem, options) {
// TODO: Write updates to work
// 1: Get uniqueHash for the dataItem primary key value (may need to generate a store for this)
// 2: Remove the uniqueHash as it currently stands
// 3: Generate a new uniqueHash for dataItem
// 4: Insert the new uniqueHash
};
IndexHashMap.prototype.remove = function (dataItem, options) {
var uniqueFlag = this._unique,
uniqueHash,
itemHashArr,
hashIndex;
if (uniqueFlag) {
uniqueHash = this._itemHash(dataItem, this._keys);
delete this._uniqueLookup[uniqueHash];
}
// Generate item hash
itemHashArr = this._itemHashArr(dataItem, this._keys);
// Get the path search results and store them
for (hashIndex = 0; hashIndex < itemHashArr.length; hashIndex++) {
this.pullFromPathValue(itemHashArr[hashIndex], dataItem);
}
};
IndexHashMap.prototype.violation = function (dataItem) {
// Generate item hash
var uniqueHash = this._itemHash(dataItem, this._keys);
// Check if the item breaks the unique constraint
return Boolean(this._uniqueLookup[uniqueHash]);
};
IndexHashMap.prototype.hashViolation = function (uniqueHash) {
// Check if the item breaks the unique constraint
return Boolean(this._uniqueLookup[uniqueHash]);
};
IndexHashMap.prototype.pushToPathValue = function (hash, obj) {
var pathValArr = this._data[hash] = this._data[hash] || [];
// Make sure we have not already indexed this object at this path/value
if (pathValArr.indexOf(obj) === -1) {
// Index the object
pathValArr.push(obj);
// Record the reference to this object in our index size
this._size++;
// Cross-reference this association for later lookup
this.pushToCrossRef(obj, pathValArr);
}
};
IndexHashMap.prototype.pullFromPathValue = function (hash, obj) {
var pathValArr = this._data[hash],
indexOfObject;
// Make sure we have already indexed this object at this path/value
indexOfObject = pathValArr.indexOf(obj);
if (indexOfObject > -1) {
// Un-index the object
pathValArr.splice(indexOfObject, 1);
// Record the reference to this object in our index size
this._size--;
// Remove object cross-reference
this.pullFromCrossRef(obj, pathValArr);
}
// Check if we should remove the path value array
if (!pathValArr.length) {
// Remove the array
delete this._data[hash];
}
};
IndexHashMap.prototype.pull = function (obj) {
// Get all places the object has been used and remove them
var id = obj[this._collection.primaryKey()],
crossRefArr = this._crossRef[id],
arrIndex,
arrCount = crossRefArr.length,
arrItem;
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
arrItem = crossRefArr[arrIndex];
// Remove item from this index lookup array
this._pullFromArray(arrItem, obj);
}
// Record the reference to this object in our index size
this._size--;
// Now remove the cross-reference entry for this object
delete this._crossRef[id];
};
IndexHashMap.prototype._pullFromArray = function (arr, obj) {
var arrCount = arr.length;
while (arrCount--) {
if (arr[arrCount] === obj) {
arr.splice(arrCount, 1);
}
}
};
IndexHashMap.prototype.pushToCrossRef = function (obj, pathValArr) {
var id = obj[this._collection.primaryKey()],
crObj;
this._crossRef[id] = this._crossRef[id] || [];
// Check if the cross-reference to the pathVal array already exists
crObj = this._crossRef[id];
if (crObj.indexOf(pathValArr) === -1) {
// Add the cross-reference
crObj.push(pathValArr);
}
};
IndexHashMap.prototype.pullFromCrossRef = function (obj, pathValArr) {
var id = obj[this._collection.primaryKey()];
delete this._crossRef[id];
};
IndexHashMap.prototype.lookup = function (query) {
return this._data[this._itemHash(query, this._keys)] || [];
};
IndexHashMap.prototype.match = function (query, options) {
// Check if the passed query has data in the keys our index
// operates on and if so, is the query sort matching our order
var pathSolver = new Path();
var indexKeyArr = pathSolver.parseArr(this._keys),
queryArr = pathSolver.parseArr(query),
matchedKeys = [],
matchedKeyCount = 0,
i;
// Loop the query array and check the order of keys against the
// index key array to see if this index can be used
for (i = 0; i < indexKeyArr.length; i++) {
if (queryArr[i] === indexKeyArr[i]) {
matchedKeyCount++;
matchedKeys.push(queryArr[i]);
} else {
// Query match failed - this is a hash map index so partial key match won't work
return {
matchedKeys: [],
totalKeyCount: queryArr.length,
score: 0
};
}
}
return {
matchedKeys: matchedKeys,
totalKeyCount: queryArr.length,
score: matchedKeyCount
};
//return pathSolver.countObjectPaths(this._keys, query);
};
IndexHashMap.prototype._itemHash = function (item, keys) {
var path = new Path(),
pathData,
hash = '',
k;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
if (hash) { hash += '_'; }
hash += path.value(item, pathData[k].path).join(':');
}
return hash;
};
IndexHashMap.prototype._itemKeyHash = function (item, keys) {
var path = new Path(),
pathData,
hash = '',
k;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
if (hash) { hash += '_'; }
hash += path.keyValue(item, pathData[k].path);
}
return hash;
};
IndexHashMap.prototype._itemHashArr = function (item, keys) {
var path = new Path(),
pathData,
//hash = '',
hashArr = [],
valArr,
i, k, j;
pathData = path.parse(keys);
for (k = 0; k < pathData.length; k++) {
valArr = path.value(item, pathData[k].path);
for (i = 0; i < valArr.length; i++) {
if (k === 0) {
// Setup the initial hash array
hashArr.push(valArr[i]);
} else {
// Loop the hash array and concat the value to it
for (j = 0; j < hashArr.length; j++) {
hashArr[j] = hashArr[j] + '_' + valArr[i];
}
}
}
}
return hashArr;
};
Shared.finishModule('IndexHashMap');
module.exports = IndexHashMap;
},{"./Path":23,"./Shared":26}],9:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared');
/**
* The key value store class used when storing basic in-memory KV data,
* and can be queried for quick retrieval. Mostly used for collection
* primary key indexes and lookups.
* @param {String=} name Optional KV store name.
* @constructor
*/
var KeyValueStore = function (name) {
this.init.apply(this, arguments);
};
KeyValueStore.prototype.init = function (name) {
this._name = name;
this._data = {};
this._primaryKey = '_id';
};
Shared.addModule('KeyValueStore', KeyValueStore);
Shared.mixin(KeyValueStore.prototype, 'Mixin.ChainReactor');
/**
* Get / set the name of the key/value store.
* @param {String} val The name to set.
* @returns {*}
*/
Shared.synthesize(KeyValueStore.prototype, 'name');
/**
* Get / set the primary key.
* @param {String} key The key to set.
* @returns {*}
*/
KeyValueStore.prototype.primaryKey = function (key) {
if (key !== undefined) {
this._primaryKey = key;
return this;
}
return this._primaryKey;
};
/**
* Removes all data from the store.
* @returns {*}
*/
KeyValueStore.prototype.truncate = function () {
this._data = {};
return this;
};
/**
* Sets data against a key in the store.
* @param {String} key The key to set data for.
* @param {*} value The value to assign to the key.
* @returns {*}
*/
KeyValueStore.prototype.set = function (key, value) {
this._data[key] = value ? value : true;
return this;
};
/**
* Gets data stored for the passed key.
* @param {String} key The key to get data for.
* @returns {*}
*/
KeyValueStore.prototype.get = function (key) {
return this._data[key];
};
/**
* Get / set the primary key.
* @param {*} obj A lookup query, can be a string key, an array of string keys,
* an object with further query clauses or a regular expression that should be
* run against all keys.
* @returns {*}
*/
KeyValueStore.prototype.lookup = function (obj) {
var pKeyVal = obj[this._primaryKey],
arrIndex,
arrCount,
lookupItem,
result;
if (pKeyVal instanceof Array) {
// An array of primary keys, find all matches
arrCount = pKeyVal.length;
result = [];
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
lookupItem = this._data[pKeyVal[arrIndex]];
if (lookupItem) {
result.push(lookupItem);
}
}
return result;
} else if (pKeyVal instanceof RegExp) {
// Create new data
result = [];
for (arrIndex in this._data) {
if (this._data.hasOwnProperty(arrIndex)) {
if (pKeyVal.test(arrIndex)) {
result.push(this._data[arrIndex]);
}
}
}
return result;
} else if (typeof pKeyVal === 'object') {
// The primary key clause is an object, now we have to do some
// more extensive searching
if (pKeyVal.$ne) {
// Create new data
result = [];
for (arrIndex in this._data) {
if (this._data.hasOwnProperty(arrIndex)) {
if (arrIndex !== pKeyVal.$ne) {
result.push(this._data[arrIndex]);
}
}
}
return result;
}
if (pKeyVal.$in && (pKeyVal.$in instanceof Array)) {
// Create new data
result = [];
for (arrIndex in this._data) {
if (this._data.hasOwnProperty(arrIndex)) {
if (pKeyVal.$in.indexOf(arrIndex) > -1) {
result.push(this._data[arrIndex]);
}
}
}
return result;
}
if (pKeyVal.$nin && (pKeyVal.$nin instanceof Array)) {
// Create new data
result = [];
for (arrIndex in this._data) {
if (this._data.hasOwnProperty(arrIndex)) {
if (pKeyVal.$nin.indexOf(arrIndex) === -1) {
result.push(this._data[arrIndex]);
}
}
}
return result;
}
if (pKeyVal.$or && (pKeyVal.$or instanceof Array)) {
// Create new data
result = [];
for (arrIndex = 0; arrIndex < pKeyVal.$or.length; arrIndex++) {
result = result.concat(this.lookup(pKeyVal.$or[arrIndex]));
}
return result;
}
} else {
// Key is a basic lookup from string
lookupItem = this._data[pKeyVal];
if (lookupItem !== undefined) {
return [lookupItem];
} else {
return [];
}
}
};
/**
* Removes data for the given key from the store.
* @param {String} key The key to un-set.
* @returns {*}
*/
KeyValueStore.prototype.unSet = function (key) {
delete this._data[key];
return this;
};
/**
* Sets data for the give key in the store only where the given key
* does not already have a value in the store.
* @param {String} key The key to set data for.
* @param {*} value The value to assign to the key.
* @returns {Boolean} True if data was set or false if data already
* exists for the key.
*/
KeyValueStore.prototype.uniqueSet = function (key, value) {
if (this._data[key] === undefined) {
this._data[key] = value;
return true;
}
return false;
};
Shared.finishModule('KeyValueStore');
module.exports = KeyValueStore;
},{"./Shared":26}],10:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared'),
Operation = _dereq_('./Operation');
/**
* The metrics class used to store details about operations.
* @constructor
*/
var Metrics = function () {
this.init.apply(this, arguments);
};
Metrics.prototype.init = function () {
this._data = [];
};
Shared.addModule('Metrics', Metrics);
Shared.mixin(Metrics.prototype, 'Mixin.ChainReactor');
/**
* Creates an operation within the metrics instance and if metrics
* are currently enabled (by calling the start() method) the operation
* is also stored in the metrics log.
* @param {String} name The name of the operation.
* @returns {Operation}
*/
Metrics.prototype.create = function (name) {
var op = new Operation(name);
if (this._enabled) {
this._data.push(op);
}
return op;
};
/**
* Starts logging operations.
* @returns {Metrics}
*/
Metrics.prototype.start = function () {
this._enabled = true;
return this;
};
/**
* Stops logging operations.
* @returns {Metrics}
*/
Metrics.prototype.stop = function () {
this._enabled = false;
return this;
};
/**
* Clears all logged operations.
* @returns {Metrics}
*/
Metrics.prototype.clear = function () {
this._data = [];
return this;
};
/**
* Returns an array of all logged operations.
* @returns {Array}
*/
Metrics.prototype.list = function () {
return this._data;
};
Shared.finishModule('Metrics');
module.exports = Metrics;
},{"./Operation":21,"./Shared":26}],11:[function(_dereq_,module,exports){
"use strict";
var CRUD = {
preSetData: function () {
},
postSetData: function () {
}
};
module.exports = CRUD;
},{}],12:[function(_dereq_,module,exports){
"use strict";
/**
* The chain reactor mixin, provides methods to the target object that allow chain
* reaction events to propagate to the target and be handled, processed and passed
* on down the chain.
* @mixin
*/
var ChainReactor = {
/**
*
* @param obj
*/
chain: function (obj) {
if (this.debug && this.debug()) {
if (obj._reactorIn && obj._reactorOut) {
console.log(obj._reactorIn.logIdentifier() + ' Adding target "' + obj._reactorOut.instanceIdentifier() + '" to the chain reactor target list');
} else {
console.log(this.logIdentifier() + ' Adding target "' + obj.instanceIdentifier() + '" to the chain reactor target list');
}
}
this._chain = this._chain || [];
var index = this._chain.indexOf(obj);
if (index === -1) {
this._chain.push(obj);
}
},
unChain: function (obj) {
if (this.debug && this.debug()) {
if (obj._reactorIn && obj._reactorOut) {
console.log(obj._reactorIn.logIdentifier() + ' Removing target "' + obj._reactorOut.instanceIdentifier() + '" from the chain reactor target list');
} else {
console.log(this.logIdentifier() + ' Removing target "' + obj.instanceIdentifier() + '" from the chain reactor target list');
}
}
if (this._chain) {
var index = this._chain.indexOf(obj);
if (index > -1) {
this._chain.splice(index, 1);
}
}
},
chainSend: function (type, data, options) {
if (this._chain) {
var arr = this._chain,
arrItem,
count = arr.length,
index;
for (index = 0; index < count; index++) {
arrItem = arr[index];
if (!arrItem._state || (arrItem._state && !arrItem.isDropped())) {
if (this.debug && this.debug()) {
if (arrItem._reactorIn && arrItem._reactorOut) {
console.log(arrItem._reactorIn.logIdentifier() + ' Sending data down the chain reactor pipe to "' + arrItem._reactorOut.instanceIdentifier() + '"');
} else {
console.log(this.logIdentifier() + ' Sending data down the chain reactor pipe to "' + arrItem.instanceIdentifier() + '"');
}
}
arrItem.chainReceive(this, type, data, options);
} else {
console.log('Reactor Data:', type, data, options);
console.log('Reactor Node:', arrItem);
throw('Chain reactor attempting to send data to target reactor node that is in a dropped state!');
}
}
}
},
chainReceive: function (sender, type, data, options) {
var chainPacket = {
sender: sender,
type: type,
data: data,
options: options
};
if (this.debug && this.debug()) {
console.log(this.logIdentifier() + 'Received data from parent reactor node');
}
// Fire our internal handler
if (!this._chainHandler || (this._chainHandler && !this._chainHandler(chainPacket))) {
// Propagate the message down the chain
this.chainSend(chainPacket.type, chainPacket.data, chainPacket.options);
}
}
};
module.exports = ChainReactor;
},{}],13:[function(_dereq_,module,exports){
"use strict";
var idCounter = 0,
Overload = _dereq_('./Overload'),
Serialiser = _dereq_('./Serialiser'),
Common,
serialiser = new Serialiser();
/**
* Provides commonly used methods to most classes in ForerunnerDB.
* @mixin
*/
Common = {
// Expose the serialiser object so it can be extended with new data handlers.
serialiser: serialiser,
/**
* Gets / sets data in the item store. The store can be used to set and
* retrieve data against a key. Useful for adding arbitrary key/value data
* to a collection / view etc and retrieving it later.
* @param {String|*} key The key under which to store the passed value or
* retrieve the existing stored value.
* @param {*=} val Optional value. If passed will overwrite the existing value
* stored against the specified key if one currently exists.
* @returns {*}
*/
store: function (key, val) {
if (key !== undefined) {
if (val !== undefined) {
// Store the data
this._store = this._store || {};
this._store[key] = val;
return this;
}
if (this._store) {
return this._store[key];
}
}
return undefined;
},
/**
* Removes a previously stored key/value pair from the item store, set previously
* by using the store() method.
* @param {String|*} key The key of the key/value pair to remove;
* @returns {Common} Returns this for chaining.
*/
unStore: function (key) {
if (key !== undefined) {
delete this._store[key];
}
return this;
},
/**
* Returns a non-referenced version of the passed object / array.
* @param {Object} data The object or array to return as a non-referenced version.
* @param {Number=} copies Optional number of copies to produce. If specified, the return
* value will be an array of decoupled objects, each distinct from the other.
* @returns {*}
*/
decouple: function (data, copies) {
if (data !== undefined) {
if (!copies) {
return this.jParse(this.jStringify(data));
} else {
var i,
json = this.jStringify(data),
copyArr = [];
for (i = 0; i < copies; i++) {
copyArr.push(this.jParse(json));
}
return copyArr;
}
}
return undefined;
},
/**
* Parses and returns data from stringified version.
* @param {String} data The stringified version of data to parse.
* @returns {Object} The parsed JSON object from the data.
*/
jParse: function (data) {
return serialiser.parse(data);
//return JSON.parse(data);
},
/**
* Converts a JSON object into a stringified version.
* @param {Object} data The data to stringify.
* @returns {String} The stringified data.
*/
jStringify: function (data) {
return serialiser.stringify(data);
//return JSON.stringify(data);
},
/**
* Generates a new 16-character hexadecimal unique ID or
* generates a new 16-character hexadecimal ID based on
* the passed string. Will always generate the same ID
* for the same string.
* @param {String=} str A string to generate the ID from.
* @return {String}
*/
objectId: function (str) {
var id,
pow = Math.pow(10, 17);
if (!str) {
idCounter++;
id = (idCounter + (
Math.random() * pow +
Math.random() * pow +
Math.random() * pow +
Math.random() * pow
)).toString(16);
} else {
var val = 0,
count = str.length,
i;
for (i = 0; i < count; i++) {
val += str.charCodeAt(i) * pow;
}
id = val.toString(16);
}
return id;
},
/**
* Gets / sets debug flag that can enable debug message output to the
* console if required.
* @param {Boolean} val The value to set debug flag to.
* @return {Boolean} True if enabled, false otherwise.
*/
/**
* Sets debug flag for a particular type that can enable debug message
* output to the console if required.
* @param {String} type The name of the debug type to set flag for.
* @param {Boolean} val The value to set debug flag to.
* @return {Boolean} True if enabled, false otherwise.
*/
debug: new Overload([
function () {
return this._debug && this._debug.all;
},
function (val) {
if (val !== undefined) {
if (typeof val === 'boolean') {
this._debug = this._debug || {};
this._debug.all = val;
this.chainSend('debug', this._debug);
return this;
} else {
return (this._debug && this._debug[val]) || (this._db && this._db._debug && this._db._debug[val]) || (this._debug && this._debug.all);
}
}
return this._debug && this._debug.all;
},
function (type, val) {
if (type !== undefined) {
if (val !== undefined) {
this._debug = this._debug || {};
this._debug[type] = val;
this.chainSend('debug', this._debug);
return this;
}
return (this._debug && this._debug[val]) || (this._db && this._db._debug && this._db._debug[type]);
}
return this._debug && this._debug.all;
}
]),
/**
* Returns a string describing the class this instance is derived from.
* @returns {string}
*/
classIdentifier: function () {
return 'ForerunnerDB.' + this.className;
},
/**
* Returns a string describing the instance by it's class name and instance
* object name.
* @returns {String} The instance identifier.
*/
instanceIdentifier: function () {
return '[' + this.className + ']' + this.name();
},
/**
* Returns a string used to denote a console log against this instance,
* consisting of the class identifier and instance identifier.
* @returns {string} The log identifier.
*/
logIdentifier: function () {
return this.classIdentifier() + ': ' + this.instanceIdentifier();
},
/**
* Converts a query object with MongoDB dot notation syntax
* to Forerunner's object notation syntax.
* @param {Object} obj The object to convert.
*/
convertToFdb: function (obj) {
var varName,
splitArr,
objCopy,
i;
for (i in obj) {
if (obj.hasOwnProperty(i)) {
objCopy = obj;
if (i.indexOf('.') > -1) {
// Replace .$ with a placeholder before splitting by . char
i = i.replace('.$', '[|$|]');
splitArr = i.split('.');
while ((varName = splitArr.shift())) {
// Replace placeholder back to original .$
varName = varName.replace('[|$|]', '.$');
if (splitArr.length) {
objCopy[varName] = {};
} else {
objCopy[varName] = obj[i];
}
objCopy = objCopy[varName];
}
delete obj[i];
}
}
}
},
/**
* Checks if the state is dropped.
* @returns {boolean} True when dropped, false otherwise.
*/
isDropped: function () {
return this._state === 'dropped';
}
};
module.exports = Common;
},{"./Overload":22,"./Serialiser":25}],14:[function(_dereq_,module,exports){
"use strict";
/**
* Provides some database constants.
* @mixin
*/
var Constants = {
TYPE_INSERT: 0,
TYPE_UPDATE: 1,
TYPE_REMOVE: 2,
PHASE_BEFORE: 0,
PHASE_AFTER: 1
};
module.exports = Constants;
},{}],15:[function(_dereq_,module,exports){
"use strict";
var Overload = _dereq_('./Overload');
/**
* Provides event emitter functionality including the methods: on, off, once, emit, deferEmit.
* @mixin
*/
var Events = {
on: new Overload({
/**
* Attach an event listener to the passed event.
* @param {String} event The name of the event to listen for.
* @param {Function} listener The method to call when the event is fired.
*/
'string, function': function (event, listener) {
this._listeners = this._listeners || {};
this._listeners[event] = this._listeners[event] || {};
this._listeners[event]['*'] = this._listeners[event]['*'] || [];
this._listeners[event]['*'].push(listener);
return this;
},
/**
* Attach an event listener to the passed event only if the passed
* id matches the document id for the event being fired.
* @param {String} event The name of the event to listen for.
* @param {*} id The document id to match against.
* @param {Function} listener The method to call when the event is fired.
*/
'string, *, function': function (event, id, listener) {
this._listeners = this._listeners || {};
this._listeners[event] = this._listeners[event] || {};
this._listeners[event][id] = this._listeners[event][id] || [];
this._listeners[event][id].push(listener);
return this;
}
}),
once: new Overload({
'string, function': function (eventName, callback) {
var self = this,
internalCallback = function () {
self.off(eventName, internalCallback);
callback.apply(self, arguments);
};
return this.on(eventName, internalCallback);
},
'string, *, function': function (eventName, id, callback) {
var self = this,
internalCallback = function () {
self.off(eventName, id, internalCallback);
callback.apply(self, arguments);
};
return this.on(eventName, id, internalCallback);
}
}),
off: new Overload({
'string': function (event) {
if (this._listeners && this._listeners[event] && event in this._listeners) {
delete this._listeners[event];
}
return this;
},
'string, function': function (event, listener) {
var arr,
index;
if (typeof(listener) === 'string') {
if (this._listeners && this._listeners[event] && this._listeners[event][listener]) {
delete this._listeners[event][listener];
}
} else {
if (this._listeners && event in this._listeners) {
arr = this._listeners[event]['*'];
index = arr.indexOf(listener);
if (index > -1) {
arr.splice(index, 1);
}
}
}
return this;
},
'string, *, function': function (event, id, listener) {
if (this._listeners && event in this._listeners && id in this.listeners[event]) {
var arr = this._listeners[event][id],
index = arr.indexOf(listener);
if (index > -1) {
arr.splice(index, 1);
}
}
},
'string, *': function (event, id) {
if (this._listeners && event in this._listeners && id in this._listeners[event]) {
// Kill all listeners for this event id
delete this._listeners[event][id];
}
}
}),
emit: function (event, data) {
this._listeners = this._listeners || {};
if (event in this._listeners) {
var arrIndex,
arrCount,
tmpFunc,
arr,
listenerIdArr,
listenerIdCount,
listenerIdIndex;
// Handle global emit
if (this._listeners[event]['*']) {
arr = this._listeners[event]['*'];
arrCount = arr.length;
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
// Check we have a function to execute
tmpFunc = arr[arrIndex];
if (typeof tmpFunc === 'function') {
tmpFunc.apply(this, Array.prototype.slice.call(arguments, 1));
}
}
}
// Handle individual emit
if (data instanceof Array) {
// Check if the array is an array of objects in the collection
if (data[0] && data[0][this._primaryKey]) {
// Loop the array and check for listeners against the primary key
listenerIdArr = this._listeners[event];
arrCount = data.length;
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
if (listenerIdArr[data[arrIndex][this._primaryKey]]) {
// Emit for this id
listenerIdCount = listenerIdArr[data[arrIndex][this._primaryKey]].length;
for (listenerIdIndex = 0; listenerIdIndex < listenerIdCount; listenerIdIndex++) {
tmpFunc = listenerIdArr[data[arrIndex][this._primaryKey]][listenerIdIndex];
if (typeof tmpFunc === 'function') {
listenerIdArr[data[arrIndex][this._primaryKey]][listenerIdIndex].apply(this, Array.prototype.slice.call(arguments, 1));
}
}
}
}
}
}
}
return this;
},
/**
* Queues an event to be fired. This has automatic de-bouncing so that any
* events of the same type that occur within 100 milliseconds of a previous
* one will all be wrapped into a single emit rather than emitting tons of
* events for lots of chained inserts etc. Only the data from the last
* de-bounced event will be emitted.
* @param {String} eventName The name of the event to emit.
* @param {*=} data Optional data to emit with the event.
*/
deferEmit: function (eventName, data) {
var self = this,
args;
if (!this._noEmitDefer && (!this._db || (this._db && !this._db._noEmitDefer))) {
args = arguments;
// Check for an existing timeout
this._deferTimeout = this._deferTimeout || {};
if (this._deferTimeout[eventName]) {
clearTimeout(this._deferTimeout[eventName]);
}
// Set a timeout
this._deferTimeout[eventName] = setTimeout(function () {
if (self.debug()) {
console.log(self.logIdentifier() + ' Emitting ' + args[0]);
}
self.emit.apply(self, args);
}, 1);
} else {
this.emit.apply(this, arguments);
}
return this;
}
};
module.exports = Events;
},{"./Overload":22}],16:[function(_dereq_,module,exports){
"use strict";
/**
* Provides object matching algorithm methods.
* @mixin
*/
var Matching = {
/**
* Internal method that checks a document against a test object.
* @param {*} source The source object or value to test against.
* @param {*} test The test object or value to test with.
* @param {Object} queryOptions The options the query was passed with.
* @param {String=} opToApply The special operation to apply to the test such
* as 'and' or an 'or' operator.
* @param {Object=} options An object containing options to apply to the
* operation such as limiting the fields returned etc.
* @returns {Boolean} True if the test was positive, false on negative.
* @private
*/
_match: function (source, test, queryOptions, opToApply, options) {
// TODO: This method is quite long, break into smaller pieces
var operation,
applyOp = opToApply,
recurseVal,
tmpIndex,
sourceType = typeof source,
testType = typeof test,
matchedAll = true,
opResult,
substringCache,
i;
options = options || {};
queryOptions = queryOptions || {};
// Check if options currently holds a root query object
if (!options.$rootQuery) {
// Root query not assigned, hold the root query
options.$rootQuery = test;
}
// Check if options currently holds a root source object
if (!options.$rootSource) {
// Root query not assigned, hold the root query
options.$rootSource = source;
}
// Assign current query data
options.$currentQuery = test;
options.$rootData = options.$rootData || {};
// Check if the comparison data are both strings or numbers
if ((sourceType === 'string' || sourceType === 'number') && (testType === 'string' || testType === 'number')) {
// The source and test data are flat types that do not require recursive searches,
// so just compare them and return the result
if (sourceType === 'number') {
// Number comparison
if (source !== test) {
matchedAll = false;
}
} else {
// String comparison
// TODO: We can probably use a queryOptions.$locale as a second parameter here
// TODO: to satisfy https://github.com/Irrelon/ForerunnerDB/issues/35
if (source.localeCompare(test)) {
matchedAll = false;
}
}
} else if ((sourceType === 'string' || sourceType === 'number') && (testType === 'object' && test instanceof RegExp)) {
if (!test.test(source)) {
matchedAll = false;
}
} else {
for (i in test) {
if (test.hasOwnProperty(i)) {
// Assign previous query data
options.$previousQuery = options.$parent;
// Assign parent query data
options.$parent = {
query: test[i],
key: i,
parent: options.$previousQuery
};
// Reset operation flag
operation = false;
// Grab first two chars of the key name to check for $
substringCache = i.substr(0, 2);
// Check if the property is a comment (ignorable)
if (substringCache === '//') {
// Skip this property
continue;
}
// Check if the property starts with a dollar (function)
if (substringCache.indexOf('$') === 0) {
// Ask the _matchOp method to handle the operation
opResult = this._matchOp(i, source, test[i], queryOptions, options);
// Check the result of the matchOp operation
// If the result is -1 then no operation took place, otherwise the result
// will be a boolean denoting a match (true) or no match (false)
if (opResult > -1) {
if (opResult) {
if (opToApply === 'or') {
return true;
}
} else {
// Set the matchedAll flag to the result of the operation
// because the operation did not return true
matchedAll = opResult;
}
// Record that an operation was handled
operation = true;
}
}
// Check for regex
if (!operation && test[i] instanceof RegExp) {
operation = true;
if (sourceType === 'object' && source[i] !== undefined && test[i].test(source[i])) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
}
if (!operation) {
// Check if our query is an object
if (typeof(test[i]) === 'object') {
// Because test[i] is an object, source must also be an object
// Check if our source data we are checking the test query against
// is an object or an array
if (source[i] !== undefined) {
if (source[i] instanceof Array && !(test[i] instanceof Array)) {
// The source data is an array, so check each item until a
// match is found
recurseVal = false;
for (tmpIndex = 0; tmpIndex < source[i].length; tmpIndex++) {
recurseVal = this._match(source[i][tmpIndex], test[i], queryOptions, applyOp, options);
if (recurseVal) {
// One of the array items matched the query so we can
// include this item in the results, so break now
break;
}
}
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
} else if (!(source[i] instanceof Array) && test[i] instanceof Array) {
// The test key data is an array and the source key data is not so check
// each item in the test key data to see if the source item matches one
// of them. This is effectively an $in search.
recurseVal = false;
for (tmpIndex = 0; tmpIndex < test[i].length; tmpIndex++) {
recurseVal = this._match(source[i], test[i][tmpIndex], queryOptions, applyOp, options);
if (recurseVal) {
// One of the array items matched the query so we can
// include this item in the results, so break now
break;
}
}
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
} else if (typeof(source) === 'object') {
// Recurse down the object tree
recurseVal = this._match(source[i], test[i], queryOptions, applyOp, options);
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
} else {
recurseVal = this._match(undefined, test[i], queryOptions, applyOp, options);
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
}
} else {
// First check if the test match is an $exists
if (test[i] && test[i].$exists !== undefined) {
// Push the item through another match recurse
recurseVal = this._match(undefined, test[i], queryOptions, applyOp, options);
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
} else {
matchedAll = false;
}
}
} else {
// Check if the prop matches our test value
if (source && source[i] === test[i]) {
if (opToApply === 'or') {
return true;
}
} else if (source && source[i] && source[i] instanceof Array && test[i] && typeof(test[i]) !== "object") {
// We are looking for a value inside an array
// The source data is an array, so check each item until a
// match is found
recurseVal = false;
for (tmpIndex = 0; tmpIndex < source[i].length; tmpIndex++) {
recurseVal = this._match(source[i][tmpIndex], test[i], queryOptions, applyOp, options);
if (recurseVal) {
// One of the array items matched the query so we can
// include this item in the results, so break now
break;
}
}
if (recurseVal) {
if (opToApply === 'or') {
return true;
}
} else {
matchedAll = false;
}
} else {
matchedAll = false;
}
}
}
if (opToApply === 'and' && !matchedAll) {
return false;
}
}
}
}
return matchedAll;
},
/**
* Internal method, performs a matching process against a query operator such as $gt or $nin.
* @param {String} key The property name in the test that matches the operator to perform
* matching against.
* @param {*} source The source data to match the query against.
* @param {*} test The query to match the source against.
* @param {Object} queryOptions The options the query was passed with.
* @param {Object=} options An options object.
* @returns {*}
* @private
*/
_matchOp: function (key, source, test, queryOptions, options) {
// Check for commands
switch (key) {
case '$gt':
// Greater than
return source > test;
case '$gte':
// Greater than or equal
return source >= test;
case '$lt':
// Less than
return source < test;
case '$lte':
// Less than or equal
return source <= test;
case '$exists':
// Property exists
return (source === undefined) !== test;
case '$eq': // Equals
return source == test; // jshint ignore:line
case '$eeq': // Equals equals
return source === test;
case '$ne': // Not equals
return source != test; // jshint ignore:line
case '$nee': // Not equals equals
return source !== test;
case '$or':
// Match true on ANY check to pass
for (var orIndex = 0; orIndex < test.length; orIndex++) {
if (this._match(source, test[orIndex], queryOptions, 'and', options)) {
return true;
}
}
return false;
case '$and':
// Match true on ALL checks to pass
for (var andIndex = 0; andIndex < test.length; andIndex++) {
if (!this._match(source, test[andIndex], queryOptions, 'and', options)) {
return false;
}
}
return true;
case '$in': // In
// Check that the in test is an array
if (test instanceof Array) {
var inArr = test,
inArrCount = inArr.length,
inArrIndex;
for (inArrIndex = 0; inArrIndex < inArrCount; inArrIndex++) {
if (this._match(source, inArr[inArrIndex], queryOptions, 'and', options)) {
return true;
}
}
return false;
} else if (typeof test === 'object') {
return this._match(source, test, queryOptions, 'and', options);
} else {
throw(this.logIdentifier() + ' Cannot use an $in operator on a non-array key: ' + key);
}
break;
case '$nin': // Not in
// Check that the not-in test is an array
if (test instanceof Array) {
var notInArr = test,
notInArrCount = notInArr.length,
notInArrIndex;
for (notInArrIndex = 0; notInArrIndex < notInArrCount; notInArrIndex++) {
if (this._match(source, notInArr[notInArrIndex], queryOptions, 'and', options)) {
return false;
}
}
return true;
} else if (typeof test === 'object') {
return this._match(source, test, queryOptions, 'and', options);
} else {
throw(this.logIdentifier() + ' Cannot use a $nin operator on a non-array key: ' + key);
}
break;
case '$distinct':
// Ensure options holds a distinct lookup
options.$rootData['//distinctLookup'] = options.$rootData['//distinctLookup'] || {};
for (var distinctProp in test) {
if (test.hasOwnProperty(distinctProp)) {
options.$rootData['//distinctLookup'][distinctProp] = options.$rootData['//distinctLookup'][distinctProp] || {};
// Check if the options distinct lookup has this field's value
if (options.$rootData['//distinctLookup'][distinctProp][source[distinctProp]]) {
// Value is already in use
return false;
} else {
// Set the value in the lookup
options.$rootData['//distinctLookup'][distinctProp][source[distinctProp]] = true;
// Allow the item in the results
return true;
}
}
}
break;
case '$count':
var countKey,
countArr,
countVal;
// Iterate the count object's keys
for (countKey in test) {
if (test.hasOwnProperty(countKey)) {
// Check the property exists and is an array. If the property being counted is not
// an array (or doesn't exist) then use a value of zero in any further count logic
countArr = source[countKey];
if (typeof countArr === 'object' && countArr instanceof Array) {
countVal = countArr.length;
} else {
countVal = 0;
}
// Now recurse down the query chain further to satisfy the query for this key (countKey)
if (!this._match(countVal, test[countKey], queryOptions, 'and', options)) {
return false;
}
}
}
// Allow the item in the results
return true;
case '$find':
case '$findOne':
case '$findSub':
var fromType = 'collection',
findQuery,
findOptions,
subQuery,
subOptions,
subPath,
result,
operation = {};
// Check we have a database object to work from
if (!this.db()) {
throw('Cannot operate a ' + key + ' sub-query on an anonymous collection (one with no db set)!');
}
// Check all parts of the $find operation exist
if (!test.$from) {
throw(key + ' missing $from property!');
}
if (test.$fromType) {
fromType = test.$fromType;
// Check the fromType exists as a method
if (!this.db()[fromType] || typeof this.db()[fromType] !== 'function') {
throw(key + ' cannot operate against $fromType "' + fromType + '" because the database does not recognise this type of object!');
}
}
// Perform the find operation
findQuery = test.$query || {};
findOptions = test.$options || {};
if (key === '$findSub') {
if (!test.$path) {
throw(key + ' missing $path property!');
}
subPath = test.$path;
subQuery = test.$subQuery || {};
subOptions = test.$subOptions || {};
result = this.db()[fromType](test.$from).findSub(findQuery, subPath, subQuery, subOptions);
} else {
result = this.db()[fromType](test.$from)[key.substr(1)](findQuery, findOptions);
}
operation[options.$parent.parent.key] = result;
return this._match(source, operation, queryOptions, 'and', options);
}
return -1;
}
};
module.exports = Matching;
},{}],17:[function(_dereq_,module,exports){
"use strict";
/**
* Provides sorting methods.
* @mixin
*/
var Sorting = {
/**
* Sorts the passed value a against the passed value b ascending.
* @param {*} a The first value to compare.
* @param {*} b The second value to compare.
* @returns {*} 1 if a is sorted after b, -1 if a is sorted before b.
*/
sortAsc: function (a, b) {
if (typeof(a) === 'string' && typeof(b) === 'string') {
return a.localeCompare(b);
} else {
if (a > b) {
return 1;
} else if (a < b) {
return -1;
}
}
return 0;
},
/**
* Sorts the passed value a against the passed value b descending.
* @param {*} a The first value to compare.
* @param {*} b The second value to compare.
* @returns {*} 1 if a is sorted after b, -1 if a is sorted before b.
*/
sortDesc: function (a, b) {
if (typeof(a) === 'string' && typeof(b) === 'string') {
return b.localeCompare(a);
} else {
if (a > b) {
return -1;
} else if (a < b) {
return 1;
}
}
return 0;
}
};
module.exports = Sorting;
},{}],18:[function(_dereq_,module,exports){
"use strict";
var Tags,
tagMap = {};
/**
* Provides class instance tagging and tag operation methods.
* @mixin
*/
Tags = {
/**
* Tags a class instance for later lookup.
* @param {String} name The tag to add.
* @returns {boolean}
*/
tagAdd: function (name) {
var i,
self = this,
mapArr = tagMap[name] = tagMap[name] || [];
for (i = 0; i < mapArr.length; i++) {
if (mapArr[i] === self) {
return true;
}
}
mapArr.push(self);
// Hook the drop event for this so we can react
if (self.on) {
self.on('drop', function () {
// We've been dropped so remove ourselves from the tag map
self.tagRemove(name);
});
}
return true;
},
/**
* Removes a tag from a class instance.
* @param {String} name The tag to remove.
* @returns {boolean}
*/
tagRemove: function (name) {
var i,
mapArr = tagMap[name];
if (mapArr) {
for (i = 0; i < mapArr.length; i++) {
if (mapArr[i] === this) {
mapArr.splice(i, 1);
return true;
}
}
}
return false;
},
/**
* Gets an array of all instances tagged with the passed tag name.
* @param {String} name The tag to lookup.
* @returns {Array} The array of instances that have the passed tag.
*/
tagLookup: function (name) {
return tagMap[name] || [];
},
/**
* Drops all instances that are tagged with the passed tag name.
* @param {String} name The tag to lookup.
* @param {Function} callback Callback once dropping has completed
* for all instances that match the passed tag name.
* @returns {boolean}
*/
tagDrop: function (name, callback) {
var arr = this.tagLookup(name),
dropCb,
dropCount,
i;
dropCb = function () {
dropCount--;
if (callback && dropCount === 0) {
callback(false);
}
};
if (arr.length) {
dropCount = arr.length;
// Loop the array and drop all items
for (i = arr.length - 1; i >= 0; i--) {
arr[i].drop(dropCb);
}
}
return true;
}
};
module.exports = Tags;
},{}],19:[function(_dereq_,module,exports){
"use strict";
var Overload = _dereq_('./Overload');
/**
* Provides trigger functionality methods.
* @mixin
*/
var Triggers = {
/**
* Add a trigger by id.
* @param {String} id The id of the trigger. This must be unique to the type and
* phase of the trigger. Only one trigger may be added with this id per type and
* phase.
* @param {Number} type The type of operation to apply the trigger to. See
* Mixin.Constants for constants to use.
* @param {Number} phase The phase of an operation to fire the trigger on. See
* Mixin.Constants for constants to use.
* @param {Function} method The method to call when the trigger is fired.
* @returns {boolean} True if the trigger was added successfully, false if not.
*/
addTrigger: function (id, type, phase, method) {
var self = this,
triggerIndex;
// Check if the trigger already exists
triggerIndex = self._triggerIndexOf(id, type, phase);
if (triggerIndex === -1) {
// The trigger does not exist, create it
self._trigger = self._trigger || {};
self._trigger[type] = self._trigger[type] || {};
self._trigger[type][phase] = self._trigger[type][phase] || [];
self._trigger[type][phase].push({
id: id,
method: method,
enabled: true
});
return true;
}
return false;
},
/**
*
* @param {String} id The id of the trigger to remove.
* @param {Number} type The type of operation to remove the trigger from. See
* Mixin.Constants for constants to use.
* @param {Number} phase The phase of the operation to remove the trigger from.
* See Mixin.Constants for constants to use.
* @returns {boolean} True if removed successfully, false if not.
*/
removeTrigger: function (id, type, phase) {
var self = this,
triggerIndex;
// Check if the trigger already exists
triggerIndex = self._triggerIndexOf(id, type, phase);
if (triggerIndex > -1) {
// The trigger exists, remove it
self._trigger[type][phase].splice(triggerIndex, 1);
}
return false;
},
enableTrigger: new Overload({
'string': function (id) {
// Alter all triggers of this type
var self = this,
types = self._trigger,
phases,
triggers,
result = false,
i, k, j;
if (types) {
for (j in types) {
if (types.hasOwnProperty(j)) {
phases = types[j];
if (phases) {
for (i in phases) {
if (phases.hasOwnProperty(i)) {
triggers = phases[i];
// Loop triggers and set enabled flag
for (k = 0; k < triggers.length; k++) {
if (triggers[k].id === id) {
triggers[k].enabled = true;
result = true;
}
}
}
}
}
}
}
}
return result;
},
'number': function (type) {
// Alter all triggers of this type
var self = this,
phases = self._trigger[type],
triggers,
result = false,
i, k;
if (phases) {
for (i in phases) {
if (phases.hasOwnProperty(i)) {
triggers = phases[i];
// Loop triggers and set to enabled
for (k = 0; k < triggers.length; k++) {
triggers[k].enabled = true;
result = true;
}
}
}
}
return result;
},
'number, number': function (type, phase) {
// Alter all triggers of this type and phase
var self = this,
phases = self._trigger[type],
triggers,
result = false,
k;
if (phases) {
triggers = phases[phase];
if (triggers) {
// Loop triggers and set to enabled
for (k = 0; k < triggers.length; k++) {
triggers[k].enabled = true;
result = true;
}
}
}
return result;
},
'string, number, number': function (id, type, phase) {
// Check if the trigger already exists
var self = this,
triggerIndex = self._triggerIndexOf(id, type, phase);
if (triggerIndex > -1) {
// Update the trigger
self._trigger[type][phase][triggerIndex].enabled = true;
return true;
}
return false;
}
}),
disableTrigger: new Overload({
'string': function (id) {
// Alter all triggers of this type
var self = this,
types = self._trigger,
phases,
triggers,
result = false,
i, k, j;
if (types) {
for (j in types) {
if (types.hasOwnProperty(j)) {
phases = types[j];
if (phases) {
for (i in phases) {
if (phases.hasOwnProperty(i)) {
triggers = phases[i];
// Loop triggers and set enabled flag
for (k = 0; k < triggers.length; k++) {
if (triggers[k].id === id) {
triggers[k].enabled = false;
result = true;
}
}
}
}
}
}
}
}
return result;
},
'number': function (type) {
// Alter all triggers of this type
var self = this,
phases = self._trigger[type],
triggers,
result = false,
i, k;
if (phases) {
for (i in phases) {
if (phases.hasOwnProperty(i)) {
triggers = phases[i];
// Loop triggers and set to disabled
for (k = 0; k < triggers.length; k++) {
triggers[k].enabled = false;
result = true;
}
}
}
}
return result;
},
'number, number': function (type, phase) {
// Alter all triggers of this type and phase
var self = this,
phases = self._trigger[type],
triggers,
result = false,
k;
if (phases) {
triggers = phases[phase];
if (triggers) {
// Loop triggers and set to disabled
for (k = 0; k < triggers.length; k++) {
triggers[k].enabled = false;
result = true;
}
}
}
return result;
},
'string, number, number': function (id, type, phase) {
// Check if the trigger already exists
var self = this,
triggerIndex = self._triggerIndexOf(id, type, phase);
if (triggerIndex > -1) {
// Update the trigger
self._trigger[type][phase][triggerIndex].enabled = false;
return true;
}
return false;
}
}),
/**
* Checks if a trigger will fire based on the type and phase provided.
* @param {Number} type The type of operation. See Mixin.Constants for
* constants to use.
* @param {Number} phase The phase of the operation. See Mixin.Constants
* for constants to use.
* @returns {Boolean} True if the trigger will fire, false otherwise.
*/
willTrigger: function (type, phase) {
if (this._trigger && this._trigger[type] && this._trigger[type][phase] && this._trigger[type][phase].length) {
// Check if a trigger in this array is enabled
var arr = this._trigger[type][phase],
i;
for (i = 0; i < arr.length; i++) {
if (arr[i].enabled) {
return true;
}
}
}
return false;
},
/**
* Processes trigger actions based on the operation, type and phase.
* @param {Object} operation Operation data to pass to the trigger.
* @param {Number} type The type of operation. See Mixin.Constants for
* constants to use.
* @param {Number} phase The phase of the operation. See Mixin.Constants
* for constants to use.
* @param {Object} oldDoc The document snapshot before operations are
* carried out against the data.
* @param {Object} newDoc The document snapshot after operations are
* carried out against the data.
* @returns {boolean}
*/
processTrigger: function (operation, type, phase, oldDoc, newDoc) {
var self = this,
triggerArr,
triggerIndex,
triggerCount,
triggerItem,
response;
if (self._trigger && self._trigger[type] && self._trigger[type][phase]) {
triggerArr = self._trigger[type][phase];
triggerCount = triggerArr.length;
for (triggerIndex = 0; triggerIndex < triggerCount; triggerIndex++) {
triggerItem = triggerArr[triggerIndex];
// Check if the trigger is enabled
if (triggerItem.enabled) {
if (this.debug()) {
var typeName,
phaseName;
switch (type) {
case this.TYPE_INSERT:
typeName = 'insert';
break;
case this.TYPE_UPDATE:
typeName = 'update';
break;
case this.TYPE_REMOVE:
typeName = 'remove';
break;
default:
typeName = '';
break;
}
switch (phase) {
case this.PHASE_BEFORE:
phaseName = 'before';
break;
case this.PHASE_AFTER:
phaseName = 'after';
break;
default:
phaseName = '';
break;
}
//console.log('Triggers: Processing trigger "' + id + '" for ' + typeName + ' in phase "' + phaseName + '"');
}
// Run the trigger's method and store the response
response = triggerItem.method.call(self, operation, oldDoc, newDoc);
// Check the response for a non-expected result (anything other than
// undefined, true or false is considered a throwable error)
if (response === false) {
// The trigger wants us to cancel operations
return false;
}
if (response !== undefined && response !== true && response !== false) {
// Trigger responded with error, throw the error
throw('ForerunnerDB.Mixin.Triggers: Trigger error: ' + response);
}
}
}
// Triggers all ran without issue, return a success (true)
return true;
}
},
/**
* Returns the index of a trigger by id based on type and phase.
* @param {String} id The id of the trigger to find the index of.
* @param {Number} type The type of operation. See Mixin.Constants for
* constants to use.
* @param {Number} phase The phase of the operation. See Mixin.Constants
* for constants to use.
* @returns {number}
* @private
*/
_triggerIndexOf: function (id, type, phase) {
var self = this,
triggerArr,
triggerCount,
triggerIndex;
if (self._trigger && self._trigger[type] && self._trigger[type][phase]) {
triggerArr = self._trigger[type][phase];
triggerCount = triggerArr.length;
for (triggerIndex = 0; triggerIndex < triggerCount; triggerIndex++) {
if (triggerArr[triggerIndex].id === id) {
return triggerIndex;
}
}
}
return -1;
}
};
module.exports = Triggers;
},{"./Overload":22}],20:[function(_dereq_,module,exports){
"use strict";
/**
* Provides methods to handle object update operations.
* @mixin
*/
var Updating = {
/**
* Updates a property on an object.
* @param {Object} doc The object whose property is to be updated.
* @param {String} prop The property to update.
* @param {*} val The new value of the property.
* @private
*/
_updateProperty: function (doc, prop, val) {
doc[prop] = val;
if (this.debug()) {
console.log(this.logIdentifier() + ' Setting non-data-bound document property "' + prop + '"');
}
},
/**
* Increments a value for a property on a document by the passed number.
* @param {Object} doc The document to modify.
* @param {String} prop The property to modify.
* @param {Number} val The amount to increment by.
* @private
*/
_updateIncrement: function (doc, prop, val) {
doc[prop] += val;
},
/**
* Changes the index of an item in the passed array.
* @param {Array} arr The array to modify.
* @param {Number} indexFrom The index to move the item from.
* @param {Number} indexTo The index to move the item to.
* @private
*/
_updateSpliceMove: function (arr, indexFrom, indexTo) {
arr.splice(indexTo, 0, arr.splice(indexFrom, 1)[0]);
if (this.debug()) {
console.log(this.logIdentifier() + ' Moving non-data-bound document array index from "' + indexFrom + '" to "' + indexTo + '"');
}
},
/**
* Inserts an item into the passed array at the specified index.
* @param {Array} arr The array to insert into.
* @param {Number} index The index to insert at.
* @param {Object} doc The document to insert.
* @private
*/
_updateSplicePush: function (arr, index, doc) {
if (arr.length > index) {
arr.splice(index, 0, doc);
} else {
arr.push(doc);
}
},
/**
* Inserts an item at the end of an array.
* @param {Array} arr The array to insert the item into.
* @param {Object} doc The document to insert.
* @private
*/
_updatePush: function (arr, doc) {
arr.push(doc);
},
/**
* Removes an item from the passed array.
* @param {Array} arr The array to modify.
* @param {Number} index The index of the item in the array to remove.
* @private
*/
_updatePull: function (arr, index) {
arr.splice(index, 1);
},
/**
* Multiplies a value for a property on a document by the passed number.
* @param {Object} doc The document to modify.
* @param {String} prop The property to modify.
* @param {Number} val The amount to multiply by.
* @private
*/
_updateMultiply: function (doc, prop, val) {
doc[prop] *= val;
},
/**
* Renames a property on a document to the passed property.
* @param {Object} doc The document to modify.
* @param {String} prop The property to rename.
* @param {Number} val The new property name.
* @private
*/
_updateRename: function (doc, prop, val) {
doc[val] = doc[prop];
delete doc[prop];
},
/**
* Sets a property on a document to the passed value.
* @param {Object} doc The document to modify.
* @param {String} prop The property to set.
* @param {*} val The new property value.
* @private
*/
_updateOverwrite: function (doc, prop, val) {
doc[prop] = val;
},
/**
* Deletes a property on a document.
* @param {Object} doc The document to modify.
* @param {String} prop The property to delete.
* @private
*/
_updateUnset: function (doc, prop) {
delete doc[prop];
},
/**
* Removes all properties from an object without destroying
* the object instance, thereby maintaining data-bound linking.
* @param {Object} doc The parent object to modify.
* @param {String} prop The name of the child object to clear.
* @private
*/
_updateClear: function (doc, prop) {
var obj = doc[prop],
i;
if (obj && typeof obj === 'object') {
for (i in obj) {
if (obj.hasOwnProperty(i)) {
this._updateUnset(obj, i);
}
}
}
},
/**
* Pops an item or items from the array stack.
* @param {Object} doc The document to modify.
* @param {Number} val If set to a positive integer, will pop the number specified
* from the stack, if set to a negative integer will shift the number specified
* from the stack.
* @return {Boolean}
* @private
*/
_updatePop: function (doc, val) {
var updated = false,
i;
if (doc.length > 0) {
if (val > 0) {
for (i = 0; i < val; i++) {
doc.pop();
}
updated = true;
} else if (val < 0) {
for (i = 0; i > val; i--) {
doc.shift();
}
updated = true;
}
}
return updated;
}
};
module.exports = Updating;
},{}],21:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared'),
Path = _dereq_('./Path');
/**
* The operation class, used to store details about an operation being
* performed by the database.
* @param {String} name The name of the operation.
* @constructor
*/
var Operation = function (name) {
this.pathSolver = new Path();
this.counter = 0;
this.init.apply(this, arguments);
};
Operation.prototype.init = function (name) {
this._data = {
operation: name, // The name of the operation executed such as "find", "update" etc
index: {
potential: [], // Indexes that could have potentially been used
used: false // The index that was picked to use
},
steps: [], // The steps taken to generate the query results,
time: {
startMs: 0,
stopMs: 0,
totalMs: 0,
process: {}
},
flag: {}, // An object with flags that denote certain execution paths
log: [] // Any extra data that might be useful such as warnings or helpful hints
};
};
Shared.addModule('Operation', Operation);
Shared.mixin(Operation.prototype, 'Mixin.ChainReactor');
/**
* Starts the operation timer.
*/
Operation.prototype.start = function () {
this._data.time.startMs = new Date().getTime();
};
/**
* Adds an item to the operation log.
* @param {String} event The item to log.
* @returns {*}
*/
Operation.prototype.log = function (event) {
if (event) {
var lastLogTime = this._log.length > 0 ? this._data.log[this._data.log.length - 1].time : 0,
logObj = {
event: event,
time: new Date().getTime(),
delta: 0
};
this._data.log.push(logObj);
if (lastLogTime) {
logObj.delta = logObj.time - lastLogTime;
}
return this;
}
return this._data.log;
};
/**
* Called when starting and ending a timed operation, used to time
* internal calls within an operation's execution.
* @param {String} section An operation name.
* @returns {*}
*/
Operation.prototype.time = function (section) {
if (section !== undefined) {
var process = this._data.time.process,
processObj = process[section] = process[section] || {};
if (!processObj.startMs) {
// Timer started
processObj.startMs = new Date().getTime();
processObj.stepObj = {
name: section
};
this._data.steps.push(processObj.stepObj);
} else {
processObj.stopMs = new Date().getTime();
processObj.totalMs = processObj.stopMs - processObj.startMs;
processObj.stepObj.totalMs = processObj.totalMs;
delete processObj.stepObj;
}
return this;
}
return this._data.time;
};
/**
* Used to set key/value flags during operation execution.
* @param {String} key
* @param {String} val
* @returns {*}
*/
Operation.prototype.flag = function (key, val) {
if (key !== undefined && val !== undefined) {
this._data.flag[key] = val;
} else if (key !== undefined) {
return this._data.flag[key];
} else {
return this._data.flag;
}
};
Operation.prototype.data = function (path, val, noTime) {
if (val !== undefined) {
// Assign value to object path
this.pathSolver.set(this._data, path, val);
return this;
}
return this.pathSolver.get(this._data, path);
};
Operation.prototype.pushData = function (path, val, noTime) {
// Assign value to object path
this.pathSolver.push(this._data, path, val);
};
/**
* Stops the operation timer.
*/
Operation.prototype.stop = function () {
this._data.time.stopMs = new Date().getTime();
this._data.time.totalMs = this._data.time.stopMs - this._data.time.startMs;
};
Shared.finishModule('Operation');
module.exports = Operation;
},{"./Path":23,"./Shared":26}],22:[function(_dereq_,module,exports){
"use strict";
/**
* Allows a method to accept overloaded calls with different parameters controlling
* which passed overload function is called.
* @param {Object} def
* @returns {Function}
* @constructor
*/
var Overload = function (def) {
if (def) {
var self = this,
index,
count,
tmpDef,
defNewKey,
sigIndex,
signatures;
if (!(def instanceof Array)) {
tmpDef = {};
// Def is an object, make sure all prop names are devoid of spaces
for (index in def) {
if (def.hasOwnProperty(index)) {
defNewKey = index.replace(/ /g, '');
// Check if the definition array has a * string in it
if (defNewKey.indexOf('*') === -1) {
// No * found
tmpDef[defNewKey] = def[index];
} else {
// A * was found, generate the different signatures that this
// definition could represent
signatures = this.generateSignaturePermutations(defNewKey);
for (sigIndex = 0; sigIndex < signatures.length; sigIndex++) {
if (!tmpDef[signatures[sigIndex]]) {
tmpDef[signatures[sigIndex]] = def[index];
}
}
}
}
}
def = tmpDef;
}
return function () {
var arr = [],
lookup,
type,
name;
// Check if we are being passed a key/function object or an array of functions
if (def instanceof Array) {
// We were passed an array of functions
count = def.length;
for (index = 0; index < count; index++) {
if (def[index].length === arguments.length) {
return self.callExtend(this, '$main', def, def[index], arguments);
}
}
} else {
// Generate lookup key from arguments
// Copy arguments to an array
for (index = 0; index < arguments.length; index++) {
type = typeof arguments[index];
// Handle detecting arrays
if (type === 'object' && arguments[index] instanceof Array) {
type = 'array';
}
// Handle been presented with a single undefined argument
if (arguments.length === 1 && type === 'undefined') {
break;
}
// Add the type to the argument types array
arr.push(type);
}
lookup = arr.join(',');
// Check for an exact lookup match
if (def[lookup]) {
return self.callExtend(this, '$main', def, def[lookup], arguments);
} else {
for (index = arr.length; index >= 0; index--) {
// Get the closest match
lookup = arr.slice(0, index).join(',');
if (def[lookup + ',...']) {
// Matched against arguments + "any other"
return self.callExtend(this, '$main', def, def[lookup + ',...'], arguments);
}
}
}
}
name = typeof this.name === 'function' ? this.name() : 'Unknown';
console.log('Overload: ', def);
throw('ForerunnerDB.Overload "' + name + '": Overloaded method does not have a matching signature for the passed arguments: ' + this.jStringify(arr));
};
}
return function () {};
};
/**
* Generates an array of all the different definition signatures that can be
* created from the passed string with a catch-all wildcard *. E.g. it will
* convert the signature: string,*,string to all potentials:
* string,string,string
* string,number,string
* string,object,string,
* string,function,string,
* string,undefined,string
*
* @param {String} str Signature string with a wildcard in it.
* @returns {Array} An array of signature strings that are generated.
*/
Overload.prototype.generateSignaturePermutations = function (str) {
var signatures = [],
newSignature,
types = ['string', 'object', 'number', 'function', 'undefined'],
index;
if (str.indexOf('*') > -1) {
// There is at least one "any" type, break out into multiple keys
// We could do this at query time with regular expressions but
// would be significantly slower
for (index = 0; index < types.length; index++) {
newSignature = str.replace('*', types[index]);
signatures = signatures.concat(this.generateSignaturePermutations(newSignature));
}
} else {
signatures.push(str);
}
return signatures;
};
Overload.prototype.callExtend = function (context, prop, propContext, func, args) {
var tmp,
ret;
if (context && propContext[prop]) {
tmp = context[prop];
context[prop] = propContext[prop];
ret = func.apply(context, args);
context[prop] = tmp;
return ret;
} else {
return func.apply(context, args);
}
};
module.exports = Overload;
},{}],23:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared');
/**
* Path object used to resolve object paths and retrieve data from
* objects by using paths.
* @param {String=} path The path to assign.
* @constructor
*/
var Path = function (path) {
this.init.apply(this, arguments);
};
Path.prototype.init = function (path) {
if (path) {
this.path(path);
}
};
Shared.addModule('Path', Path);
Shared.mixin(Path.prototype, 'Mixin.Common');
Shared.mixin(Path.prototype, 'Mixin.ChainReactor');
/**
* Gets / sets the given path for the Path instance.
* @param {String=} path The path to assign.
*/
Path.prototype.path = function (path) {
if (path !== undefined) {
this._path = this.clean(path);
this._pathParts = this._path.split('.');
return this;
}
return this._path;
};
/**
* Tests if the passed object has the paths that are specified and that
* a value exists in those paths.
* @param {Object} testKeys The object describing the paths to test for.
* @param {Object} testObj The object to test paths against.
* @returns {Boolean} True if the object paths exist.
*/
Path.prototype.hasObjectPaths = function (testKeys, testObj) {
var result = true,
i;
for (i in testKeys) {
if (testKeys.hasOwnProperty(i)) {
if (testObj[i] === undefined) {
return false;
}
if (typeof testKeys[i] === 'object') {
// Recurse object
result = this.hasObjectPaths(testKeys[i], testObj[i]);
// Should we exit early?
if (!result) {
return false;
}
}
}
}
return result;
};
/**
* Counts the total number of key endpoints in the passed object.
* @param {Object} testObj The object to count key endpoints for.
* @returns {Number} The number of endpoints.
*/
Path.prototype.countKeys = function (testObj) {
var totalKeys = 0,
i;
for (i in testObj) {
if (testObj.hasOwnProperty(i)) {
if (testObj[i] !== undefined) {
if (typeof testObj[i] !== 'object') {
totalKeys++;
} else {
totalKeys += this.countKeys(testObj[i]);
}
}
}
}
return totalKeys;
};
/**
* Tests if the passed object has the paths that are specified and that
* a value exists in those paths and if so returns the number matched.
* @param {Object} testKeys The object describing the paths to test for.
* @param {Object} testObj The object to test paths against.
* @returns {Object} Stats on the matched keys
*/
Path.prototype.countObjectPaths = function (testKeys, testObj) {
var matchData,
matchedKeys = {},
matchedKeyCount = 0,
totalKeyCount = 0,
i;
for (i in testObj) {
if (testObj.hasOwnProperty(i)) {
if (typeof testObj[i] === 'object') {
// The test / query object key is an object, recurse
matchData = this.countObjectPaths(testKeys[i], testObj[i]);
matchedKeys[i] = matchData.matchedKeys;
totalKeyCount += matchData.totalKeyCount;
matchedKeyCount += matchData.matchedKeyCount;
} else {
// The test / query object has a property that is not an object so add it as a key
totalKeyCount++;
// Check if the test keys also have this key and it is also not an object
if (testKeys && testKeys[i] && typeof testKeys[i] !== 'object') {
matchedKeys[i] = true;
matchedKeyCount++;
} else {
matchedKeys[i] = false;
}
}
}
}
return {
matchedKeys: matchedKeys,
matchedKeyCount: matchedKeyCount,
totalKeyCount: totalKeyCount
};
};
/**
* Takes a non-recursive object and converts the object hierarchy into
* a path string.
* @param {Object} obj The object to parse.
* @param {Boolean=} withValue If true will include a 'value' key in the returned
* object that represents the value the object path points to.
* @returns {Object}
*/
Path.prototype.parse = function (obj, withValue) {
var paths = [],
path = '',
resultData,
i, k;
for (i in obj) {
if (obj.hasOwnProperty(i)) {
// Set the path to the key
path = i;
if (typeof(obj[i]) === 'object') {
if (withValue) {
resultData = this.parse(obj[i], withValue);
for (k = 0; k < resultData.length; k++) {
paths.push({
path: path + '.' + resultData[k].path,
value: resultData[k].value
});
}
} else {
resultData = this.parse(obj[i]);
for (k = 0; k < resultData.length; k++) {
paths.push({
path: path + '.' + resultData[k].path
});
}
}
} else {
if (withValue) {
paths.push({
path: path,
value: obj[i]
});
} else {
paths.push({
path: path
});
}
}
}
}
return paths;
};
/**
* Takes a non-recursive object and converts the object hierarchy into
* an array of path strings that allow you to target all possible paths
* in an object.
*
* The options object accepts an "ignore" field with a regular expression
* as the value. If any key matches the expression it is not included in
* the results.
*
* The options object accepts a boolean "verbose" field. If set to true
* the results will include all paths leading up to endpoints as well as
* they endpoints themselves.
*
* @returns {Array}
*/
Path.prototype.parseArr = function (obj, options) {
options = options || {};
return this._parseArr(obj, '', [], options);
};
Path.prototype._parseArr = function (obj, path, paths, options) {
var i,
newPath = '';
path = path || '';
paths = paths || [];
for (i in obj) {
if (obj.hasOwnProperty(i)) {
if (!options.ignore || (options.ignore && !options.ignore.test(i))) {
if (path) {
newPath = path + '.' + i;
} else {
newPath = i;
}
if (typeof(obj[i]) === 'object') {
if (options.verbose) {
paths.push(newPath);
}
this._parseArr(obj[i], newPath, paths, options);
} else {
paths.push(newPath);
}
}
}
}
return paths;
};
Path.prototype.valueOne = function (obj, path) {
return this.value(obj, path)[0];
};
/**
* Gets the value(s) that the object contains for the currently assigned path string.
* @param {Object} obj The object to evaluate the path against.
* @param {String=} path A path to use instead of the existing one passed in path().
* @param {Object=} options An optional options object.
* @returns {Array} An array of values for the given path.
*/
Path.prototype.value = function (obj, path, options) {
var pathParts,
arr,
arrCount,
objPart,
objPartParent,
valuesArr,
returnArr,
i, k;
if (obj !== undefined && typeof obj === 'object') {
if (!options || options && !options.skipArrCheck) {
// Check if we were passed an array of objects and if so,
// iterate over the array and return the value from each
// array item
if (obj instanceof Array) {
returnArr = [];
for (i = 0; i < obj.length; i++) {
returnArr.push(this.valueOne(obj[i], path));
}
return returnArr;
}
}
valuesArr = [];
if (path !== undefined) {
path = this.clean(path);
pathParts = path.split('.');
}
arr = pathParts || this._pathParts;
arrCount = arr.length;
objPart = obj;
for (i = 0; i < arrCount; i++) {
objPart = objPart[arr[i]];
if (objPartParent instanceof Array) {
// Search inside the array for the next key
for (k = 0; k < objPartParent.length; k++) {
valuesArr = valuesArr.concat(this.value(objPartParent, k + '.' + arr[i], {skipArrCheck: true}));
}
return valuesArr;
} else {
if (!objPart || typeof(objPart) !== 'object') {
break;
}
}
objPartParent = objPart;
}
return [objPart];
} else {
return [];
}
};
/**
* Sets a value on an object for the specified path.
* @param {Object} obj The object to update.
* @param {String} path The path to update.
* @param {*} val The value to set the object path to.
* @returns {*}
*/
Path.prototype.set = function (obj, path, val) {
if (obj !== undefined && path !== undefined) {
var pathParts,
part;
path = this.clean(path);
pathParts = path.split('.');
part = pathParts.shift();
if (pathParts.length) {
// Generate the path part in the object if it does not already exist
obj[part] = obj[part] || {};
// Recurse
this.set(obj[part], pathParts.join('.'), val);
} else {
// Set the value
obj[part] = val;
}
}
return obj;
};
Path.prototype.get = function (obj, path) {
return this.value(obj, path)[0];
};
/**
* Push a value to an array on an object for the specified path.
* @param {Object} obj The object to update.
* @param {String} path The path to the array to push to.
* @param {*} val The value to push to the array at the object path.
* @returns {*}
*/
Path.prototype.push = function (obj, path, val) {
if (obj !== undefined && path !== undefined) {
var pathParts,
part;
path = this.clean(path);
pathParts = path.split('.');
part = pathParts.shift();
if (pathParts.length) {
// Generate the path part in the object if it does not already exist
obj[part] = obj[part] || {};
// Recurse
this.set(obj[part], pathParts.join('.'), val);
} else {
// Set the value
obj[part] = obj[part] || [];
if (obj[part] instanceof Array) {
obj[part].push(val);
} else {
throw('ForerunnerDB.Path: Cannot push to a path whose endpoint is not an array!');
}
}
}
return obj;
};
/**
* Gets the value(s) that the object contains for the currently assigned path string
* with their associated keys.
* @param {Object} obj The object to evaluate the path against.
* @param {String=} path A path to use instead of the existing one passed in path().
* @returns {Array} An array of values for the given path with the associated key.
*/
Path.prototype.keyValue = function (obj, path) {
var pathParts,
arr,
arrCount,
objPart,
objPartParent,
objPartHash,
i;
if (path !== undefined) {
path = this.clean(path);
pathParts = path.split('.');
}
arr = pathParts || this._pathParts;
arrCount = arr.length;
objPart = obj;
for (i = 0; i < arrCount; i++) {
objPart = objPart[arr[i]];
if (!objPart || typeof(objPart) !== 'object') {
objPartHash = arr[i] + ':' + objPart;
break;
}
objPartParent = objPart;
}
return objPartHash;
};
/**
* Sets a value on an object for the specified path.
* @param {Object} obj The object to update.
* @param {String} path The path to update.
* @param {*} val The value to set the object path to.
* @returns {*}
*/
Path.prototype.set = function (obj, path, val) {
if (obj !== undefined && path !== undefined) {
var pathParts,
part;
path = this.clean(path);
pathParts = path.split('.');
part = pathParts.shift();
if (pathParts.length) {
// Generate the path part in the object if it does not already exist
obj[part] = obj[part] || {};
// Recurse
this.set(obj[part], pathParts.join('.'), val);
} else {
// Set the value
obj[part] = val;
}
}
return obj;
};
/**
* Removes leading period (.) from string and returns it.
* @param {String} str The string to clean.
* @returns {*}
*/
Path.prototype.clean = function (str) {
if (str.substr(0, 1) === '.') {
str = str.substr(1, str.length -1);
}
return str;
};
Shared.finishModule('Path');
module.exports = Path;
},{"./Shared":26}],24:[function(_dereq_,module,exports){
"use strict";
var Shared = _dereq_('./Shared');
/**
* Provides chain reactor node linking so that a chain reaction can propagate
* down a node tree. Effectively creates a chain link between the reactorIn and
* reactorOut objects where a chain reaction from the reactorIn is passed through
* the reactorProcess before being passed to the reactorOut object. Reactor
* packets are only passed through to the reactorOut if the reactor IO method
* chainSend is used.
* @param {*} reactorIn An object that has the Mixin.ChainReactor methods mixed
* in to it. Chain reactions that occur inside this object will be passed through
* to the reactorOut object.
* @param {*} reactorOut An object that has the Mixin.ChainReactor methods mixed
* in to it. Chain reactions that occur in the reactorIn object will be passed
* through to this object.
* @param {Function} reactorProcess The processing method to use when chain
* reactions occur.
* @constructor
*/
var ReactorIO = function (reactorIn, reactorOut, reactorProcess) {
if (reactorIn && reactorOut && reactorProcess) {
this._reactorIn = reactorIn;
this._reactorOut = reactorOut;
this._chainHandler = reactorProcess;
if (!reactorIn.chain || !reactorOut.chainReceive) {
throw('ForerunnerDB.ReactorIO: ReactorIO requires passed in and out objects to implement the ChainReactor mixin!');
}
// Register the reactorIO with the input
reactorIn.chain(this);
// Register the output with the reactorIO
this.chain(reactorOut);
} else {
throw('ForerunnerDB.ReactorIO: ReactorIO requires in, out and process arguments to instantiate!');
}
};
Shared.addModule('ReactorIO', ReactorIO);
/**
* Drop a reactor IO object, breaking the reactor link between the in and out
* reactor nodes.
* @returns {boolean}
*/
ReactorIO.prototype.drop = function () {
if (!this.isDropped()) {
this._state = 'dropped';
// Remove links
if (this._reactorIn) {
this._reactorIn.unChain(this);
}
if (this._reactorOut) {
this.unChain(this._reactorOut);
}
delete this._reactorIn;
delete this._reactorOut;
delete this._chainHandler;
this.emit('drop', this);
delete this._listeners;
}
return true;
};
/**
* Gets / sets the current state.
* @param {String=} val The name of the state to set.
* @returns {*}
*/
Shared.synthesize(ReactorIO.prototype, 'state');
Shared.mixin(ReactorIO.prototype, 'Mixin.Common');
Shared.mixin(ReactorIO.prototype, 'Mixin.ChainReactor');
Shared.mixin(ReactorIO.prototype, 'Mixin.Events');
Shared.finishModule('ReactorIO');
module.exports = ReactorIO;
},{"./Shared":26}],25:[function(_dereq_,module,exports){
"use strict";
/**
* Provides functionality to encode and decode JavaScript objects to strings
* and back again. This differs from JSON.stringify and JSON.parse in that
* special objects such as dates can be encoded to strings and back again
* so that the reconstituted version of the string still contains a JavaScript
* date object.
* @constructor
*/
var Serialiser = function () {
this.init.apply(this, arguments);
};
Serialiser.prototype.init = function () {
this._encoder = [];
this._decoder = {};
// Register our handlers
this.registerEncoder('$date', function (data) {
if (data instanceof Date) {
return data.toISOString();
}
});
this.registerDecoder('$date', function (data) {
return new Date(data);
});
};
/**
* Register an encoder that can handle encoding for a particular
* object type.
* @param {String} handles The name of the handler e.g. $date.
* @param {Function} method The encoder method.
*/
Serialiser.prototype.registerEncoder = function (handles, method) {
this._encoder.push(function (data) {
var methodVal = method(data),
returnObj;
if (methodVal !== undefined) {
returnObj = {};
returnObj[handles] = methodVal;
}
return returnObj;
});
};
/**
* Register a decoder that can handle decoding for a particular
* object type.
* @param {String} handles The name of the handler e.g. $date. When an object
* has a field matching this handler name then this decode will be invoked
* to provide a decoded version of the data that was previously encoded by
* it's counterpart encoder method.
* @param {Function} method The decoder method.
*/
Serialiser.prototype.registerDecoder = function (handles, method) {
this._decoder[handles] = method;
};
/**
* Loops the encoders and asks each one if it wants to handle encoding for
* the passed data object. If no value is returned (undefined) then the data
* will be passed to the next encoder and so on. If a value is returned the
* loop will break and the encoded data will be used.
* @param {Object} data The data object to handle.
* @returns {*} The encoded data.
* @private
*/
Serialiser.prototype._encode = function (data) {
// Loop the encoders and if a return value is given by an encoder
// the loop will exit and return that value.
var count = this._encoder.length,
retVal;
while (count-- && !retVal) {
retVal = this._encoder[count](data);
}
return retVal;
};
/**
* Converts a previously encoded string back into an object.
* @param {String} data The string to convert to an object.
* @returns {Object} The reconstituted object.
*/
Serialiser.prototype.parse = function (data) {
return this._parse(JSON.parse(data));
};
/**
* Handles restoring an object with special data markers back into
* it's original format.
* @param {Object} data The object to recurse.
* @param {Object=} target The target object to restore data to.
* @returns {Object} The final restored object.
* @private
*/
Serialiser.prototype._parse = function (data, target) {
var i;
if (typeof data === 'object' && data !== null) {
if (data instanceof Array) {
target = target || [];
} else {
target = target || {};
}
// Iterate through the object's keys and handle
// special object types and restore them
for (i in data) {
if (data.hasOwnProperty(i)) {
if (i.substr(0, 1) === '$' && this._decoder[i]) {
// This is a special object type and a handler
// exists, restore it
return this._decoder[i](data[i]);
}
// Not a special object or no handler, recurse as normal
target[i] = this._parse(data[i], target[i]);
}
}
} else {
target = data;
}
// The data is a basic type
return target;
};
/**
* Converts an object to a encoded string representation.
* @param {Object} data The object to encode.
*/
Serialiser.prototype.stringify = function (data) {
return JSON.stringify(this._stringify(data));
};
/**
* Recurse down an object and encode special objects so they can be
* stringified and later restored.
* @param {Object} data The object to parse.
* @param {Object=} target The target object to store converted data to.
* @returns {Object} The converted object.
* @private
*/
Serialiser.prototype._stringify = function (data, target) {
var handledData,
i;
if (typeof data === 'object' && data !== null) {
// Handle special object types so they can be encoded with
// a special marker and later restored by a decoder counterpart
handledData = this._encode(data);
if (handledData) {
// An encoder handled this object type so return it now
return handledData;
}
if (data instanceof Array) {
target = target || [];
} else {
target = target || {};
}
// Iterate through the object's keys and serialise
for (i in data) {
if (data.hasOwnProperty(i)) {
target[i] = this._stringify(data[i], target[i]);
}
}
} else {
target = data;
}
// The data is a basic type
return target;
};
module.exports = Serialiser;
},{}],26:[function(_dereq_,module,exports){
"use strict";
var Overload = _dereq_('./Overload');
/**
* A shared object that can be used to store arbitrary data between class
* instances, and access helper methods.
* @mixin
*/
var Shared = {
version: '1.3.505',
modules: {},
plugins: {},
_synth: {},
/**
* Adds a module to ForerunnerDB.
* @memberof Shared
* @param {String} name The name of the module.
* @param {Function} module The module class.
*/
addModule: function (name, module) {
// Store the module in the module registry
this.modules[name] = module;
// Tell the universe we are loading this module
this.emit('moduleLoad', [name, module]);
},
/**
* Called by the module once all processing has been completed. Used to determine
* if the module is ready for use by other modules.
* @memberof Shared
* @param {String} name The name of the module.
*/
finishModule: function (name) {
if (this.modules[name]) {
// Set the finished loading flag to true
this.modules[name]._fdbFinished = true;
// Assign the module name to itself so it knows what it
// is called
if (this.modules[name].prototype) {
this.modules[name].prototype.className = name;
} else {
this.modules[name].className = name;
}
this.emit('moduleFinished', [name, this.modules[name]]);
} else {
throw('ForerunnerDB.Shared: finishModule called on a module that has not been registered with addModule(): ' + name);
}
},
/**
* Will call your callback method when the specified module has loaded. If the module
* is already loaded the callback is called immediately.
* @memberof Shared
* @param {String} name The name of the module.
* @param {Function} callback The callback method to call when the module is loaded.
*/
moduleFinished: function (name, callback) {
if (this.modules[name] && this.modules[name]._fdbFinished) {
if (callback) { callback(name, this.modules[name]); }
} else {
this.on('moduleFinished', callback);
}
},
/**
* Determines if a module has been added to ForerunnerDB or not.
* @memberof Shared
* @param {String} name The name of the module.
* @returns {Boolean} True if the module exists or false if not.
*/
moduleExists: function (name) {
return Boolean(this.modules[name]);
},
/**
* Adds the properties and methods defined in the mixin to the passed object.
* @memberof Shared
* @param {Object} obj The target object to add mixin key/values to.
* @param {String} mixinName The name of the mixin to add to the object.
*/
mixin: new Overload({
'object, string': function (obj, mixinName) {
var mixinObj;
if (typeof mixinName === 'string') {
mixinObj = this.mixins[mixinName];
if (!mixinObj) {
throw('ForerunnerDB.Shared: Cannot find mixin named: ' + mixinName);
}
}
return this.$main.call(this, obj, mixinObj);
},
'object, *': function (obj, mixinObj) {
return this.$main.call(this, obj, mixinObj);
},
'$main': function (obj, mixinObj) {
if (mixinObj && typeof mixinObj === 'object') {
for (var i in mixinObj) {
if (mixinObj.hasOwnProperty(i)) {
obj[i] = mixinObj[i];
}
}
}
return obj;
}
}),
/**
* Generates a generic getter/setter method for the passed method name.
* @memberof Shared
* @param {Object} obj The object to add the getter/setter to.
* @param {String} name The name of the getter/setter to generate.
* @param {Function=} extend A method to call before executing the getter/setter.
* The existing getter/setter can be accessed from the extend method via the
* $super e.g. this.$super();
*/
synthesize: function (obj, name, extend) {
this._synth[name] = this._synth[name] || function (val) {
if (val !== undefined) {
this['_' + name] = val;
return this;
}
return this['_' + name];
};
if (extend) {
var self = this;
obj[name] = function () {
var tmp = this.$super,
ret;
this.$super = self._synth[name];
ret = extend.apply(this, arguments);
this.$super = tmp;
return ret;
};
} else {
obj[name] = this._synth[name];
}
},
/**
* Allows a method to be overloaded.
* @memberof Shared
* @param arr
* @returns {Function}
* @constructor
*/
overload: Overload,
/**
* Define the mixins that other modules can use as required.
* @memberof Shared
*/
mixins: {
'Mixin.Common': _dereq_('./Mixin.Common'),
'Mixin.Events': _dereq_('./Mixin.Events'),
'Mixin.ChainReactor': _dereq_('./Mixin.ChainReactor'),
'Mixin.CRUD': _dereq_('./Mixin.CRUD'),
'Mixin.Constants': _dereq_('./Mixin.Constants'),
'Mixin.Triggers': _dereq_('./Mixin.Triggers'),
'Mixin.Sorting': _dereq_('./Mixin.Sorting'),
'Mixin.Matching': _dereq_('./Mixin.Matching'),
'Mixin.Updating': _dereq_('./Mixin.Updating'),
'Mixin.Tags': _dereq_('./Mixin.Tags')
}
};
// Add event handling to shared
Shared.mixin(Shared, 'Mixin.Events');
module.exports = Shared;
},{"./Mixin.CRUD":11,"./Mixin.ChainReactor":12,"./Mixin.Common":13,"./Mixin.Constants":14,"./Mixin.Events":15,"./Mixin.Matching":16,"./Mixin.Sorting":17,"./Mixin.Tags":18,"./Mixin.Triggers":19,"./Mixin.Updating":20,"./Overload":22}],27:[function(_dereq_,module,exports){
/* jshint strict:false */
if (!Array.prototype.filter) {
Array.prototype.filter = function(fun/*, thisArg*/) {
if (this === void 0 || this === null) {
throw new TypeError();
}
var t = Object(this);
var len = t.length >>> 0; // jshint ignore:line
if (typeof fun !== 'function') {
throw new TypeError();
}
var res = [];
var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
for (var i = 0; i < len; i++) {
if (i in t) {
var val = t[i];
// NOTE: Technically this should Object.defineProperty at
// the next index, as push can be affected by
// properties on Object.prototype and Array.prototype.
// But that method's new, and collisions should be
// rare, so use the more-compatible alternative.
if (fun.call(thisArg, val, i, t)) {
res.push(val);
}
}
}
return res;
};
}
if (typeof Object.create !== 'function') {
Object.create = (function() {
var Temp = function() {};
return function (prototype) {
if (arguments.length > 1) {
throw Error('Second argument not supported');
}
if (typeof prototype !== 'object') {
throw TypeError('Argument must be an object');
}
Temp.prototype = prototype;
var result = new Temp();
Temp.prototype = null;
return result;
};
})();
}
// Production steps of ECMA-262, Edition 5, 15.4.4.14
// Reference: http://es5.github.io/#x15.4.4.14e
if (!Array.prototype.indexOf) {
Array.prototype.indexOf = function(searchElement, fromIndex) {
var k;
// 1. Let O be the result of calling ToObject passing
// the this value as the argument.
if (this === null) {
throw new TypeError('"this" is null or not defined');
}
var O = Object(this);
// 2. Let lenValue be the result of calling the Get
// internal method of O with the argument "length".
// 3. Let len be ToUint32(lenValue).
var len = O.length >>> 0; // jshint ignore:line
// 4. If len is 0, return -1.
if (len === 0) {
return -1;
}
// 5. If argument fromIndex was passed let n be
// ToInteger(fromIndex); else let n be 0.
var n = +fromIndex || 0;
if (Math.abs(n) === Infinity) {
n = 0;
}
// 6. If n >= len, return -1.
if (n >= len) {
return -1;
}
// 7. If n >= 0, then Let k be n.
// 8. Else, n<0, Let k be len - abs(n).
// If k is less than 0, then let k be 0.
k = Math.max(n >= 0 ? n : len - Math.abs(n), 0);
// 9. Repeat, while k < len
while (k < len) {
// a. Let Pk be ToString(k).
// This is implicit for LHS operands of the in operator
// b. Let kPresent be the result of calling the
// HasProperty internal method of O with argument Pk.
// This step can be combined with c
// c. If kPresent is true, then
// i. Let elementK be the result of calling the Get
// internal method of O with the argument ToString(k).
// ii. Let same be the result of applying the
// Strict Equality Comparison Algorithm to
// searchElement and elementK.
// iii. If same is true, return k.
if (k in O && O[k] === searchElement) {
return k;
}
k++;
}
return -1;
};
}
module.exports = {};
},{}]},{},[1]);<|fim▁end|> | coll,
cat = {},
arr, |
<|file_name|>present-to-all.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _SvgIcon = require('../../SvgIcon');
var _SvgIcon2 = _interopRequireDefault(_SvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var CommunicationPresentToAll = function CommunicationPresentToAll(props) {
return _react2.default.createElement(
_SvgIcon2.default,
props,
_react2.default.createElement('path', { d: 'M21 3H3c-1.11 0-2 .89-2 2v14c0 1.11.89 2 2 2h18c1.11 0 2-.89 2-2V5c0-1.11-.89-2-2-2zm0 16.02H3V4.98h18v14.04zM10 12H8l4-4 4 4h-2v4h-4v-4z' })
);
};
CommunicationPresentToAll = (0, _pure2.default)(CommunicationPresentToAll);
CommunicationPresentToAll.displayName = 'CommunicationPresentToAll';
CommunicationPresentToAll.muiName = 'SvgIcon';
exports.default = CommunicationPresentToAll;<|fim▁end|> | 'use strict'; |
<|file_name|>configs-fn_args_density-vertical.rs<|end_file_name|><|fim▁begin|>// rustfmt-fn_args_density: Vertical
// Function arguments density
trait Lorem {
fn lorem(
ipsum: Ipsum,
dolor: Dolor,
sit: Sit,
amet: Amet,
);
fn lorem(
ipsum: Ipsum,
dolor: Dolor,
sit: Sit,
amet: Amet,
) {
// body
}
fn lorem(
ipsum: Ipsum,
dolor: Dolor,
sit: Sit,
amet: Amet,
consectetur: onsectetur,
adipiscing: Adipiscing,
elit: Elit,
);
fn lorem(
ipsum: Ipsum,
dolor: Dolor,
sit: Sit,
amet: Amet,
consectetur: onsectetur,
adipiscing: Adipiscing,
elit: Elit,
) {
// body
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>compile_utils_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for compile utitilies."""
import tensorflow.compat.v2 as tf
from keras import backend
from keras.testing_infra import test_combinations
from keras import losses as losses_mod
from keras import metrics as metrics_mod
from keras.engine import compile_utils
class LossesContainerTest(test_combinations.TestCase):
def test_single_loss(self):
loss_container = compile_utils.LossesContainer('mse')
y_t, y_p = tf.ones((10, 5)), tf.zeros((10, 5))
total_loss = loss_container(y_t, y_p)
self.assertTrue(loss_container._built)
self.assertLen(loss_container._losses, 1)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(total_loss.numpy(), 1.)
self.assertLen(loss_container.metrics, 1)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 1.)
loss_container.reset_state()
self.assertEqual(loss_metric.result().numpy(), 0.)
def test_loss_list(self):
loss_container = compile_utils.LossesContainer(['mse', 'mae'], [1, 0.5])
y_t = [tf.ones((10, 1)), tf.zeros((10, 1))]
y_p = [tf.ones((10, 1)), tf.ones((10, 1))]
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertEqual(loss_container._output_names, ['output_1', 'output_2'])
self.assertLen(loss_container._losses, 2)
self.assertEqual(total_loss.numpy(), 0.25)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.25)
output_1_metric = loss_container.metrics[1]
self.assertEqual(output_1_metric.name, 'output_1_loss')
self.assertEqual(output_1_metric.result().numpy(), 0)
output_2_metric = loss_container.metrics[2]
self.assertEqual(output_2_metric.name, 'output_2_loss')
self.assertEqual(output_2_metric.result().numpy(), 0.5)
loss_container.reset_state()
self.assertEqual(loss_metric.result().numpy(), 0)
self.assertEqual(output_1_metric.result().numpy(), 0)
self.assertEqual(output_2_metric.result().numpy(), 0)
def test_loss_dict(self):
loss_container = compile_utils.LossesContainer(
{
'out1': 'mse',
'out2': 'mae'
}, {
'out1': 1,
'out2': 0.5
})
y_t = {'out1': tf.ones((10, 1)), 'out2': tf.zeros((10, 1))}
y_p = {'out1': tf.ones((10, 1)), 'out2': tf.ones((10, 1))}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertLen(loss_container._losses, 2)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(total_loss.numpy(), 0.25)
self.assertLen(loss_container.metrics, 3)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.25)
out1_metric = loss_container.metrics[1]
self.assertEqual(out1_metric.name, 'out1_loss')
self.assertEqual(out1_metric.result().numpy(), 0)
out2_metric = loss_container.metrics[2]
self.assertEqual(out2_metric.name, 'out2_loss')
self.assertEqual(out2_metric.result().numpy(), 0.5)
loss_container.reset_state()
self.assertEqual(loss_metric.result().numpy(), 0)
self.assertEqual(out1_metric.result().numpy(), 0)
self.assertEqual(out2_metric.result().numpy(), 0)
def test_loss_partial_dict_with_output_names(self):
loss_container = compile_utils.LossesContainer(
{'out2': 'mae'}, {'out2': 1.}, output_names=['out1', 'out2'])
y_t = [tf.ones((10, 1)), tf.zeros((10, 1))]
y_p = [tf.ones((10, 1)), tf.ones((10, 1))]
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertEqual(total_loss.numpy(), 0.5)
self.assertLen(loss_container.metrics, 2)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.5)
out2_metric = loss_container.metrics[1]
self.assertEqual(out2_metric.name, 'out2_loss')
self.assertEqual(out2_metric.result().numpy(), 0.5)
def test_loss_dict_with_nones(self):
loss_container = compile_utils.LossesContainer({
'out1': None,
'out2': 'mae'
})
y_t = {'out1': tf.ones((10, 1)), 'out2': tf.zeros((10, 1))}
y_p = {'out1': tf.ones((10, 1)), 'out2': tf.ones((10, 1))}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(total_loss.numpy(), 0.5)
self.assertLen(loss_container.metrics, 2)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.5)
out2_metric = loss_container.metrics[1]
self.assertEqual(out2_metric.name, 'out2_loss')
self.assertEqual(out2_metric.result().numpy(), 0.5)
def test_nested_structure(self):
loss_container = compile_utils.LossesContainer(
{
'b': ['mse', None],
'a': 'mae'
}, loss_weights={
'b': [0.5, 0],
'a': 1
})
y_t = {
'b': [tf.ones((10, 1)),
tf.zeros((10, 1))],
'a': tf.zeros((10, 1))
}
y_p = {
'b': [tf.zeros((10, 1)),
tf.zeros((10, 1))],
'a': tf.ones((10, 1))
}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(total_loss.numpy(), 0.75)
self.assertLen(loss_container.metrics, 3)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.75)
a_metric = loss_container.metrics[1]
self.assertEqual(a_metric.name, 'a_loss')
self.assertEqual(a_metric.result().numpy(), 0.5)
b_1_metric = loss_container.metrics[2]
self.assertEqual(b_1_metric.name, 'b_1_loss')
self.assertEqual(b_1_metric.result().numpy(), 0.5)
def test_broadcast_single_loss(self):
loss_container = compile_utils.LossesContainer('mse')
y_t = [tf.ones((10, 1)), tf.zeros((10, 1))]
y_p = [tf.ones((10, 1)), tf.ones((10, 1))]
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
total_loss = loss_container(y_t, y_p, sample_weight=sw)
self.assertEqual(total_loss.numpy(), 0.5)
self.assertLen(loss_container.metrics, 3)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 0.5)
<|fim▁hole|> output_1_metric = loss_container.metrics[1]
self.assertEqual(output_1_metric.name, 'output_1_loss')
self.assertEqual(output_1_metric.result().numpy(), 0.)
output_2_metric = loss_container.metrics[2]
self.assertEqual(output_2_metric.name, 'output_2_loss')
self.assertEqual(output_2_metric.result().numpy(), 0.5)
def test_missing_label_with_no_loss(self):
# It's ok to exclude a label if that label has no
# losses or metrics associated with it.
loss_container = compile_utils.LossesContainer({
'output1': 'mse',
'output3': 'mae'
})
y_p = {
'output1': tf.convert_to_tensor([[0], [1], [2]]),
'output2': tf.convert_to_tensor([[3], [4], [5]]),
'output3': tf.convert_to_tensor([[6], [7], [8]])
}
y_t = {
'output1': tf.convert_to_tensor([[1], [2], [3]]),
'output3': tf.convert_to_tensor([[4], [5], [6]])
}
total_loss = loss_container(y_t, y_p)
self.assertEqual(total_loss.numpy(), 3.)
self.assertLen(loss_container.metrics, 3)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertEqual(loss_metric.result().numpy(), 3.)
output_1_metric = loss_container.metrics[1]
self.assertEqual(output_1_metric.name, 'output1_loss')
self.assertEqual(output_1_metric.result().numpy(), 1.)
output_3_metric = loss_container.metrics[2]
self.assertEqual(output_3_metric.name, 'output3_loss')
self.assertEqual(output_3_metric.result().numpy(), 2.)
def test_mismatched_dtypes(self):
y_t = tf.constant([1, 9, 2, -5], shape=(2, 2))
y_p = tf.constant([4, 8, 12, 8],
shape=(2, 2),
dtype=tf.float32)
def my_mae(labels, preds):
self.assertEqual(labels.dtype, tf.int32)
self.assertEqual(preds.dtype, tf.float32)
labels = tf.cast(labels, preds.dtype)
return backend.mean(tf.abs(preds - labels), axis=-1)
loss_container = compile_utils.LossesContainer(my_mae)
total_loss = loss_container(y_t, y_p)
self.assertEqual(total_loss.dtype, tf.float32)
def test_integer_dtypes(self):
y_t = tf.constant([1, 9, 2, -5], shape=(2, 2))
y_p = tf.constant([4, 8, 12, 8], shape=(2, 2), dtype=tf.int64)
def my_mae(labels, preds):
self.assertEqual(labels.dtype, tf.int64)
self.assertEqual(preds.dtype, tf.int64)
return backend.mean(tf.abs(preds - labels), axis=-1)
loss_container = compile_utils.LossesContainer(my_mae)
total_loss = loss_container(y_t, y_p)
self.assertEqual(total_loss.dtype, tf.int64)
def test_float_dtypes(self):
y_t = tf.constant([1, 9, 2, -5],
shape=(2, 2),
dtype=tf.float32)
y_p = tf.constant([4, 8, 12, 8],
shape=(2, 2),
dtype=tf.float64)
def my_mae(labels, preds):
self.assertEqual(labels.dtype, tf.float64)
self.assertEqual(preds.dtype, tf.float64)
return backend.mean(tf.abs(preds - labels), axis=-1)
loss_container = compile_utils.LossesContainer(my_mae)
total_loss = loss_container(y_t, y_p)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(total_loss.dtype, tf.float64)
def test_loss_masking(self):
loss_container = compile_utils.LossesContainer('mae')
y_p = tf.constant([[[1], [1]], [[0], [0]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
y_p._keras_mask = tf.constant([[1, 0], [1, 0]],
dtype=tf.float32)
total_loss = loss_container(y_t, y_p)
self.assertAlmostEqual(total_loss.numpy(), .25) # sum over batch size
self.assertLen(loss_container.metrics, 1)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertAlmostEqual(loss_metric.result().numpy(), .25)
def test_loss_sample_weight(self):
loss_container = compile_utils.LossesContainer('mae')
y_p = tf.constant([[[1], [1]], [[0], [0]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
sw = tf.constant([[.2, .3], [.5, 0]], dtype=tf.float32)
total_loss = loss_container(y_t, y_p, sample_weight=sw)
# (0 * .2 + 0 * .3 + 1 * .5 + 1 * 0) / 4
self.assertAlmostEqual(total_loss.numpy(), .125)
self.assertLen(loss_container.metrics, 1)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertAlmostEqual(loss_metric.result().numpy(), .125)
def test_loss_masking_sample_weight(self):
loss_container = compile_utils.LossesContainer('mae')
y_p = tf.constant([[[1], [1]], [[0], [0]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
sw = tf.constant([[.2, .3], [.5, 0]], dtype=tf.float32)
y_p._keras_mask = tf.constant([[1, 0], [1, 0]],
dtype=tf.float32)
total_loss = loss_container(y_t, y_p, sample_weight=sw)
# (0 * .2 + 1 * .5) / 4
self.assertAlmostEqual(total_loss.numpy(), .125) # sum over batch size
self.assertLen(loss_container.metrics, 1)
loss_metric = loss_container.metrics[0]
self.assertEqual(loss_metric.name, 'loss')
self.assertAlmostEqual(loss_metric.result().numpy(), .125)
def test_custom_loss_callables(self):
def custom_loss_fn(y_true, y_pred):
return tf.reduce_sum(y_true - y_pred)
class CustomLossClass:
def __call__(self, y_true, y_pred):
return tf.reduce_sum(y_true - y_pred)
loss_container = compile_utils.LossesContainer(
[custom_loss_fn, CustomLossClass()])
y_t, y_p = tf.ones((10, 5)), tf.zeros((10, 5))
loss_container(y_t, y_p)
self.assertEqual(loss_container._losses[0].name, 'custom_loss_fn')
self.assertEqual(loss_container._losses[1].name, 'custom_loss_class')
def test_ragged_tensor_output(self):
"""Ensure that ragged tensors can be passed as targets and predictions."""
def custom_loss_fn(y_true, y_pred):
"""MSE supports RaggedTensors directly."""
return losses_mod.mse(y_true, y_pred)
class CustomLossClass(losses_mod.Loss):
"""User defined loss function must implement RaggedTensor support."""
def call(self, y_true, y_pred):
losses = tf.ragged.map_flat_values(
tf.math.squared_difference, y_true, y_pred)
return tf.reduce_mean(losses)
loss_container = compile_utils.LossesContainer(
[custom_loss_fn, CustomLossClass()])
v_t = tf.constant([[3., 4.], [1., 2.], [3., 5.]])
v_p = tf.constant([[3.1, 4.], [1., 2.], [3., 5.]])
y_t = tf.expand_dims(
tf.RaggedTensor.from_row_splits(v_t, [0, 2, 3]), 0)
y_p = tf.expand_dims(
tf.RaggedTensor.from_row_splits(v_p, [0, 2, 3]), 0)
total_loss = loss_container(y_t, y_p)
self.assertIsInstance(total_loss, tf.Tensor)
self.assertEqual(loss_container._losses[0].name, 'custom_loss_fn')
class MetricsContainerTest(test_combinations.TestCase):
def test_single_metric(self):
metric_container = compile_utils.MetricsContainer('mse')
y_t, y_p = tf.ones((10, 5)), tf.zeros((10, 5))
metric_container.update_state(y_t, y_p)
self.assertLen(metric_container.metrics, 1)
metric = metric_container.metrics[0]
self.assertEqual(metric.name, 'mse')
self.assertEqual(metric.result().numpy(), 1.)
metric_container.reset_state()
self.assertEqual(metric.result().numpy(), 0.)
def test_list_of_metrics_one_output(self):
metric_container = compile_utils.MetricsContainer(['mse', 'mae'])
y_t, y_p = 2 * tf.ones((10, 5)), tf.zeros((10, 5))
metric_container.update_state(y_t, y_p)
self.assertLen(metric_container.metrics, 2)
mse_metric = metric_container.metrics[0]
self.assertEqual(mse_metric.name, 'mse')
self.assertEqual(mse_metric.result().numpy(), 4.)
mae_metric = metric_container.metrics[1]
self.assertEqual(mae_metric.name, 'mae')
self.assertEqual(mae_metric.result().numpy(), 2.)
metric_container.reset_state()
self.assertEqual(mse_metric.result().numpy(), 0.)
self.assertEqual(mae_metric.result().numpy(), 0.)
def test_list_of_metrics_list_of_outputs(self):
metric_container = compile_utils.MetricsContainer(
metrics=['mse', 'mae'], # Should broadcast to both outputs.
weighted_metrics=['accuracy']) # Should broadcast to both outputs.
y_t = [tf.ones((10, 1)), tf.zeros((10, 1))]
y_p = [tf.ones((10, 1)), 2 * tf.ones((10, 1))]
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
metric_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metric_container.metrics, 6)
mse_metric = metric_container.metrics[0]
self.assertEqual(mse_metric.name, 'output_1_mse')
self.assertEqual(mse_metric.result().numpy(), 0.)
mse_metric = metric_container.metrics[1]
self.assertEqual(mse_metric.name, 'output_1_mae')
self.assertEqual(mse_metric.result().numpy(), 0.)
acc_metric_1 = metric_container.metrics[2]
self.assertEqual(acc_metric_1.name, 'output_1_accuracy')
self.assertEqual(acc_metric_1.result().numpy(), 1.)
self.assertEqual(acc_metric_1._fn, metrics_mod.binary_accuracy)
mae_metric = metric_container.metrics[3]
self.assertEqual(mae_metric.name, 'output_2_mse')
self.assertEqual(mae_metric.result().numpy(), 4.)
mae_metric = metric_container.metrics[4]
self.assertEqual(mae_metric.name, 'output_2_mae')
self.assertEqual(mae_metric.result().numpy(), 2.)
acc_metric_2 = metric_container.metrics[5]
self.assertEqual(acc_metric_2.name, 'output_2_accuracy')
self.assertEqual(acc_metric_2.result().numpy(), 0.)
self.assertEqual(acc_metric_2._fn, metrics_mod.binary_accuracy)
weighted_metrics = metric_container.weighted_metrics
self.assertLen(weighted_metrics, 2)
self.assertEqual(weighted_metrics[0].name, 'output_1_accuracy')
self.assertEqual(weighted_metrics[1].name, 'output_2_accuracy')
unweighted_metrics = metric_container.unweighted_metrics
self.assertLen(unweighted_metrics, 4)
self.assertEqual(unweighted_metrics[0].name, 'output_1_mse')
self.assertEqual(unweighted_metrics[1].name, 'output_1_mae')
self.assertEqual(unweighted_metrics[2].name, 'output_2_mse')
self.assertEqual(unweighted_metrics[3].name, 'output_2_mae')
def test_metric_dict(self):
metric_container = compile_utils.MetricsContainer(
metrics={
'out1': 'mse',
'out2': 'mae'
},
weighted_metrics={
'out1': 'mse',
'out2': 'mae'
})
y_t = {'out1': tf.ones((10, 1)), 'out2': tf.zeros((10, 1))}
y_p = {'out1': tf.ones((10, 1)), 'out2': 2 * tf.ones((10, 1))}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
metric_container.update_state(y_t, y_p, sample_weight=sw)
mse_metric = metric_container.metrics[0]
self.assertEqual(mse_metric.name, 'out1_mse')
self.assertEqual(mse_metric.result().numpy(), 0.)
weighted_mse_metric = metric_container.metrics[1]
self.assertEqual(weighted_mse_metric.name, 'out1_weighted_mse')
self.assertEqual(weighted_mse_metric.result().numpy(), 0.)
mae_metric = metric_container.metrics[2]
self.assertEqual(mae_metric.name, 'out2_mae')
self.assertEqual(mae_metric.result().numpy(), 2.)
weighted_mae_metric = metric_container.metrics[3]
self.assertEqual(weighted_mae_metric.name, 'out2_weighted_mae')
self.assertEqual(weighted_mae_metric.result().numpy(), 2.)
metric_container.reset_state()
self.assertEqual(mse_metric.result().numpy(), 0.)
self.assertEqual(weighted_mse_metric.result().numpy(), 0.)
self.assertEqual(mae_metric.result().numpy(), 0.)
self.assertEqual(weighted_mae_metric.result().numpy(), 0.)
def test_metric_partial_dict_with_output_names(self):
metric_container = compile_utils.MetricsContainer(
{'out2': 'mae'}, output_names=['out1', 'out2'])
y_t = [tf.ones((10, 1)), tf.zeros((10, 1))]
y_p = [tf.ones((10, 1)), tf.ones((10, 1))]
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
metric_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metric_container.metrics, 1)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'out2_mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
def test_metric_partial_dict_with_nones(self):
metric_container = compile_utils.MetricsContainer({
'out1': None,
'out2': 'mae'
})
y_t = {'out1': tf.ones((10, 1)), 'out2': tf.zeros((10, 1))}
y_p = {'out1': tf.ones((10, 1)), 'out2': tf.ones((10, 1))}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
metric_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metric_container.metrics, 1)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'out2_mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
def test_nested_structure(self):
metric_container = compile_utils.MetricsContainer(
metrics={
'b': ['mse', None],
'a': 'mae'
},
weighted_metrics={
'b': [None, None],
'a': 'mse'
})
y_t = {
'b': [2 * tf.ones((10, 1)),
tf.zeros((10, 1))],
'a': tf.zeros((10, 1))
}
y_p = {
'b': [tf.zeros((10, 1)),
tf.zeros((10, 1))],
'a': tf.ones((10, 1))
}
sw = tf.convert_to_tensor([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
metric_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metric_container.metrics, 3)
a_mae_metric = metric_container.metrics[0]
self.assertEqual(a_mae_metric.name, 'a_mae')
self.assertEqual(a_mae_metric.result().numpy(), 1.)
weighted_a_mae_metric = metric_container.metrics[1]
self.assertEqual(weighted_a_mae_metric.name, 'a_mse')
self.assertEqual(weighted_a_mae_metric.result().numpy(), 1.)
b_1_mse_metric = metric_container.metrics[2]
self.assertEqual(b_1_mse_metric.name, 'b_1_mse')
self.assertEqual(b_1_mse_metric.result().numpy(), 4.)
def test_crossentropy(self):
metric_container = compile_utils.MetricsContainer('crossentropy')
y_t, y_p = tf.ones((10, 1)), tf.ones((10, 1))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.binary_crossentropy)
metric_container = compile_utils.MetricsContainer('crossentropy')
y_t, y_p = tf.ones((10, 1)), tf.ones((10, 20))
self.assertEqual(y_p.shape.as_list()[-1], 20)
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.sparse_categorical_crossentropy)
metric_container = compile_utils.MetricsContainer('crossentropy')
y_t, y_p = tf.ones((10, 20)), tf.ones((10, 20))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.categorical_crossentropy)
def test_accuracy(self):
metric_container = compile_utils.MetricsContainer('accuracy')
y_t, y_p = tf.ones((10, 1)), tf.ones((10, 1))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.binary_accuracy)
metric_container = compile_utils.MetricsContainer('Accuracy')
y_t, y_p = tf.ones((10, 1)), tf.ones((10, 1))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.binary_accuracy)
metric_container = compile_utils.MetricsContainer('accuracy')
y_t, y_p = tf.ones((10, 1)), tf.ones((10, 20))
self.assertEqual(y_p.shape.as_list()[-1], 20)
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.sparse_categorical_accuracy)
metric_container = compile_utils.MetricsContainer('accuracy')
y_t, y_p = tf.ones((10, 20)), tf.ones((10, 20))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0]._fn,
metrics_mod.categorical_accuracy)
def test_metric_weighting(self):
metric_container = compile_utils.MetricsContainer(
metrics=['mae'], weighted_metrics=['mae'])
y_t = tf.convert_to_tensor([[0], [3], [0]])
y_p = tf.convert_to_tensor([[0], [0], [0]])
sw = tf.convert_to_tensor([[1], [0], [1]])
metric_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metric_container.metrics, 2)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
weighted_mae_metric = metric_container.metrics[1]
self.assertEqual(weighted_mae_metric.name, 'weighted_mae')
self.assertEqual(weighted_mae_metric.result().numpy(), 0.)
def test_broadcast_metrics_to_dict(self):
metric_container = compile_utils.MetricsContainer(metrics=['mae'])
y_p = {'output': tf.convert_to_tensor([[0], [1], [2]])}
y_t = {'output': tf.convert_to_tensor([[1], [2], [3]])}
metric_container.update_state(y_t, y_p)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
def test_broadcast_metrics_to_dict_with_output_names(self):
metric_container = compile_utils.MetricsContainer(
metrics=['mae'], output_names=['output'])
y_p = tf.convert_to_tensor([[0], [1], [2]])
y_t = {'output': tf.convert_to_tensor([[1], [2], [3]])}
metric_container.update_state(y_t, y_p)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
def test_missing_label_with_no_metrics(self):
# It's ok to exclude a label if that label has no
# losses or metrics associated with it.
metric_container = compile_utils.MetricsContainer(metrics={
'output1': 'mae',
'output3': 'mse'
})
y_p = {
'output1': tf.convert_to_tensor([[0], [1], [2]]),
'output2': tf.convert_to_tensor([[3], [4], [5]]),
'output3': tf.convert_to_tensor([[6], [7], [8]])
}
y_t = {
'output1': tf.convert_to_tensor([[1], [2], [3]]),
'output3': tf.convert_to_tensor([[4], [5], [6]])
}
metric_container.update_state(y_t, y_p)
self.assertLen(metric_container.metrics, 2)
mae_metric = metric_container.metrics[0]
self.assertEqual(mae_metric.name, 'output1_mae')
self.assertEqual(mae_metric.result().numpy(), 1.)
mse_metric = metric_container.metrics[1]
self.assertEqual(mse_metric.name, 'output3_mse')
self.assertEqual(mse_metric.result().numpy(), 4.)
def test_metrics_masking(self):
metrics_container = compile_utils.MetricsContainer(
metrics=['mae'], weighted_metrics=['mse'])
y_p = tf.constant([[[1], [1]], [[0], [0]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
y_p._keras_mask = tf.constant([[1, 1], [0, 0]],
dtype=tf.float32)
metrics_container.update_state(y_t, y_p)
self.assertLen(metrics_container.metrics, 2)
mae_metric = metrics_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertAlmostEqual(mae_metric.result().numpy(), 0)
weighted_mae_metric = metrics_container.metrics[1]
self.assertEqual(weighted_mae_metric.name, 'mse')
self.assertAlmostEqual(weighted_mae_metric.result().numpy(), 0)
def test_metrics_sample_weight(self):
metrics_container = compile_utils.MetricsContainer(
metrics=['mae'], weighted_metrics=['mse'])
y_p = tf.constant([[[1], [1]], [[0], [1]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
sw = tf.constant([[.2, .3], [.5, 0]], dtype=tf.float32)
metrics_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metrics_container.metrics, 2)
mae_metric = metrics_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertAlmostEqual(mae_metric.result().numpy(), .25) # 1 / 4
weighted_mae_metric = metrics_container.metrics[1]
self.assertEqual(weighted_mae_metric.name, 'mse')
self.assertAlmostEqual(weighted_mae_metric.result().numpy(), .5) # .5 / 1
def test_metrics_masking_sample_weight(self):
metrics_container = compile_utils.MetricsContainer(
metrics=['mae'], weighted_metrics=['mse'])
y_p = tf.constant([[[1], [1]], [[0], [1]]], dtype=tf.float32)
y_t = tf.constant([[[1], [1]], [[1], [1]]], dtype=tf.float32)
sw = tf.constant([[.3, .2], [.2, .3]], dtype=tf.float32)
y_p._keras_mask = tf.constant([[1, 0], [1, 0]],
dtype=tf.float32)
metrics_container.update_state(y_t, y_p, sample_weight=sw)
self.assertLen(metrics_container.metrics, 2)
mae_metric = metrics_container.metrics[0]
self.assertEqual(mae_metric.name, 'mae')
self.assertAlmostEqual(mae_metric.result().numpy(), .5) # 1 / .5
weighted_mae_metric = metrics_container.metrics[1]
self.assertEqual(weighted_mae_metric.name, 'mse')
self.assertAlmostEqual(weighted_mae_metric.result().numpy(), .2 / .5)
def test_loss_class_as_metric_with_distribution(self):
distribution = tf.distribute.OneDeviceStrategy('/device:CPU:0')
with distribution.scope():
metric_container = compile_utils.MetricsContainer(
losses_mod.MeanSquaredError())
y_t, y_p = tf.ones((10, 5)), tf.zeros((10, 5))
metric_container.update_state(y_t, y_p)
self.assertLen(metric_container.metrics, 1)
metric = metric_container.metrics[0]
self.assertEqual(metric.name, 'mean_squared_error')
self.assertEqual(metric.result().numpy(), 1.)
def test_custom_metric_callables(self):
def custom_metric_fn(y_true, y_pred):
return tf.reduce_sum(y_true - y_pred)
class CustomMetricClass:
def __call__(self, y_true, y_pred):
return tf.reduce_sum(y_true - y_pred)
metric_container = compile_utils.MetricsContainer(
[custom_metric_fn, CustomMetricClass()])
y_t, y_p = tf.ones((10, 5)), tf.zeros((10, 5))
metric_container.update_state(y_t, y_p)
self.assertEqual(metric_container.metrics[0].name, 'custom_metric_fn')
self.assertEqual(metric_container.metrics[1].name, 'custom_metric_class')
def test_reset_state_existing_metric_before_built(self):
metric = metrics_mod.Mean()
metric.update_state([2.0, 4.0])
self.assertEqual(metric.result().numpy(), 3.0)
metric_container = compile_utils.MetricsContainer(metric)
metric_container.reset_state()
self.assertEqual(metric.result().numpy(), 0.0)
def test_duplicated_metric_instance(self):
mean_obj = metrics_mod.Mean()
metric = mean_obj
with self.assertRaisesRegex(ValueError, 'Found duplicated metrics'):
compile_utils.MetricsContainer(metrics=metric, weighted_metrics=metric)
# duplicated string should be fine
metric = 'acc'
compile_utils.MetricsContainer(metrics=metric, weighted_metrics=metric)
# complicated structure
metric = [mean_obj, 'acc']
weighted_metric = {'output1': mean_obj, 'output2': 'acc'}
with self.assertRaisesRegex(ValueError, 'Found duplicated metrics'):
compile_utils.MetricsContainer(
metrics=metric, weighted_metrics=weighted_metric)
if __name__ == '__main__':
tf.compat.v1.enable_eager_execution()
tf.test.main()<|fim▁end|> | |
<|file_name|>gaia_auth_util.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "google_apis/gaia/gaia_auth_util.h"
#include <vector>
#include "base/logging.h"
#include "base/string_util.h"
#include "base/strings/string_split.h"
#include "google_apis/gaia/gaia_urls.h"
#include "googleurl/src/gurl.h"
namespace gaia {
namespace {
const char kGmailDomain[] = "gmail.com";
}
std::string CanonicalizeEmail(const std::string& email_address) {
std::vector<std::string> parts;
char at = '@';
base::SplitString(email_address, at, &parts);
if (parts.size() != 2U)
NOTREACHED() << "expecting exactly one @, but got " << parts.size();
else if (parts[1] == kGmailDomain) // only strip '.' for gmail accounts.
RemoveChars(parts[0], ".", &parts[0]);
std::string new_email = StringToLowerASCII(JoinString(parts, at));
VLOG(1) << "Canonicalized " << email_address << " to " << new_email;
return new_email;
}
std::string CanonicalizeDomain(const std::string& domain) {
// Canonicalization of domain names means lower-casing them. Make sure to
// update this function in sync with Canonicalize if this ever changes.
return StringToLowerASCII(domain);
}
std::string SanitizeEmail(const std::string& email_address) {
std::string sanitized(email_address);
// Apply a default domain if necessary.
if (sanitized.find('@') == std::string::npos) {
sanitized += '@';
sanitized += kGmailDomain;
}
return sanitized;
}
bool AreEmailsSame(const std::string& email1, const std::string& email2) {
return gaia::CanonicalizeEmail(gaia::SanitizeEmail(email1)) ==
gaia::CanonicalizeEmail(gaia::SanitizeEmail(email2));
}
std::string ExtractDomainName(const std::string& email_address) {
// First canonicalize which will also verify we have proper domain part.
std::string email = CanonicalizeEmail(email_address);
size_t separator_pos = email.find('@');
if (separator_pos != email.npos && separator_pos < email.length() - 1)
return email.substr(separator_pos + 1);
else
NOTREACHED() << "Not a proper email address: " << email;
return std::string();
}
bool IsGaiaSignonRealm(const GURL& url) {
if (!url.SchemeIsSecure())
return false;
return url == GaiaUrls::GetInstance()->gaia_url();
}<|fim▁hole|>} // namespace gaia<|fim▁end|> | |
<|file_name|>tachybradycardia.py<|end_file_name|><|fim▁begin|>def bradycardia(hr, lowerthresh):
"""
This module determines when in the ECG data there is bradycardia
:param hr: (ndarray) heart rate- used to determine when
bradycardia occurred in ECG trace
:param lowerthresh: (int or double) lower threshold for determining
bradycardia- user input or default 60 bpm
:return: brady: (ndarray) 1 for bradycardia, 0 otherwise
"""
<|fim▁hole|> # aka the heart is beating too slowly
brady[i] = 1
return brady
def tachycardia(hr, upperthresh):
"""
This module determines when in the ECG data there is tachycardia
:param hr: (ndarray) heart rate-
used to determine when tachycardia occurred in ECG trace
:param upperthresh: (int or double) upper threshold for determining
tachycardia- user input or default 100 bpm
:return: tachy: (ndarray) 1 for tachycardia, 0 otherwise
"""
import numpy as np
tachy = [0] * np.size(hr)
for i in range(len(hr)):
if hr[i] >= int(upperthresh): # this indicates tachycardia,
# aka the heart is beating too quickly
tachy[i] = 1
return tachy<|fim▁end|> | import numpy as np
brady = [0] * np.size(hr)
for i in range(len(hr)):
if hr[i] <= int(lowerthresh): # this indicates bradycardia, |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>__author__ = 'Fabrizio Lungo<fab@lungo.co.uk>'
import os
import yaml
from __exceptions__.FileNotFound import FileNotFound
from section import ConfigurationSection
class Configuration(ConfigurationSection):
def __init__(self, fn='config.yml', name=None, create=False):<|fim▁hole|> self._fn = fn
self._create = create
self.reload()
if name is None:
name=fn
self._name = name
def reload(self):
if self._create and not os.path.exists(self._fn):
self._config = {}
elif os.path.exists(self._fn):
with open(self._fn, "r") as f:
self._config = yaml.load(f)
else:
raise FileNotFound(filename=self._fn)
def save(self):
with open(self._fn, "w") as f:
yaml.dump(self._config, f)<|fim▁end|> | |
<|file_name|>test_course_locators.py<|end_file_name|><|fim▁begin|>"""
Tests of CourseKeys and CourseLocators
"""
import ddt
from bson.objectid import ObjectId
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from opaque_keys.edx.tests import LocatorBaseTest, TestDeprecated
@ddt.ddt
class TestCourseKeys(LocatorBaseTest, TestDeprecated):
"""
Tests of :class:`.CourseKey` and :class:`.CourseLocator`
"""
@ddt.data(
"foo/bar/baz",
)
def test_deprecated_roundtrip(self, course_id):
self.assertEquals(
course_id,
unicode(CourseKey.from_string(course_id))
)
@ddt.data(
"foo!/bar/baz",
)
def test_invalid_chars_in_ssck_string(self, course_id):
with self.assertRaises(InvalidKeyError):
CourseKey.from_string(course_id)
@ddt.data(
"org/course/run/foo",
"org/course",
"org+course+run+foo",
"org+course",
)
def test_invalid_format_location(self, course_id):
with self.assertRaises(InvalidKeyError):
CourseLocator.from_string(course_id)
def test_make_usage_key(self):
depr_course = CourseKey.from_string('org/course/run')
self.assertEquals(
unicode(BlockUsageLocator(depr_course, 'category', 'name', deprecated=True)),
unicode(depr_course.make_usage_key('category', 'name'))
)
course = CourseKey.from_string('course-v1:org+course+run')
self.assertEquals(
unicode(BlockUsageLocator(course, 'block_type', 'block_id')),
unicode(course.make_usage_key('block_type', 'block_id'))
)
def test_convert_deprecation(self):
depr_course = CourseKey.from_string('org/course/run')
course = CourseKey.from_string('course-v1:org+course+run')
self.assertEquals(unicode(depr_course.replace(deprecated=False)), unicode(course))
self.assertEquals(unicode(course.replace(deprecated=True)), unicode(depr_course))
def test_course_constructor_underspecified(self):
with self.assertRaises(InvalidKeyError):
CourseLocator()
with self.assertRaises(InvalidKeyError):
CourseLocator(branch='published')
def test_course_constructor_bad_version_guid(self):
with self.assertRaises(ValueError):
CourseLocator(version_guid="012345")
with self.assertRaises(InvalidKeyError):
CourseLocator(version_guid=None)
def test_course_constructor_version_guid(self):
# pylint: disable=no-member,protected-access
# generate a random location
test_id_1 = ObjectId()
test_id_1_loc = str(test_id_1)
testobj_1 = CourseLocator(version_guid=test_id_1)
self.check_course_locn_fields(testobj_1, version_guid=test_id_1)
self.assertEqual(str(testobj_1.version_guid), test_id_1_loc)
testobj_1_string = u'@'.join((testobj_1.VERSION_PREFIX, test_id_1_loc))
self.assertEqual(testobj_1._to_string(), testobj_1_string)
self.assertEqual(str(testobj_1), u'course-v1:' + testobj_1_string)
self.assertEqual(testobj_1.html_id(), u'course-v1:' + testobj_1_string)
self.assertEqual(testobj_1.version, test_id_1)
# Test using a given string
test_id_2_loc = '519665f6223ebd6980884f2b'
test_id_2 = ObjectId(test_id_2_loc)
testobj_2 = CourseLocator(version_guid=test_id_2)
self.check_course_locn_fields(testobj_2, version_guid=test_id_2)
self.assertEqual(str(testobj_2.version_guid), test_id_2_loc)
testobj_2_string = u'@'.join((testobj_2.VERSION_PREFIX, test_id_2_loc))
self.assertEqual(testobj_2._to_string(), testobj_2_string)
self.assertEqual(str(testobj_2), u'course-v1:' + testobj_2_string)
self.assertEqual(testobj_2.html_id(), u'course-v1:' + testobj_2_string)
self.assertEqual(testobj_2.version, test_id_2)
@ddt.data(
' mit.eecs',
'mit.eecs ',
CourseLocator.VERSION_PREFIX + '@mit.eecs',
BlockUsageLocator.BLOCK_PREFIX + '@black+mit.eecs',
'mit.ee cs',
'mit.ee,cs',
'mit.ee+cs',
'mit.ee&cs',
'mit.ee()cs',
CourseLocator.BRANCH_PREFIX + '@this',
'mit.eecs+' + CourseLocator.BRANCH_PREFIX,
'mit.eecs+' + CourseLocator.BRANCH_PREFIX + '@this+' + CourseLocator.BRANCH_PREFIX + '@that',
'mit.eecs+' + CourseLocator.BRANCH_PREFIX + '@this+' + CourseLocator.BRANCH_PREFIX,
'mit.eecs+' + CourseLocator.BRANCH_PREFIX + '@this ',
'mit.eecs+' + CourseLocator.BRANCH_PREFIX + '@th%is ',
u'\ufffd',
)
def test_course_constructor_bad_package_id(self, bad_id):
"""
Test all sorts of badly-formed package_ids (and urls with those package_ids)
"""
with self.assertRaises(InvalidKeyError):
CourseLocator(org=bad_id, course='test', run='2014_T2')
with self.assertRaises(InvalidKeyError):
CourseLocator(org='test', course=bad_id, run='2014_T2')
with self.assertRaises(InvalidKeyError):
CourseLocator(org='test', course='test', run=bad_id)
with self.assertRaises(InvalidKeyError):
CourseKey.from_string(u'course-v1:test+{}+2014_T2'.format(bad_id))
@ddt.data(
'course-v1:',
'course-v1:/mit.eecs',
'http:mit.eecs',
'course-v1:mit+course+run{}@branch'.format(CourseLocator.BRANCH_PREFIX),
'course-v1:mit+course+run+',
)
def test_course_constructor_bad_url(self, bad_url):
with self.assertRaises(InvalidKeyError):
CourseKey.from_string(bad_url)
def test_course_constructor_url(self):
# Test parsing a url when it starts with a version ID and there is also a block ID.
# This hits the parsers parse_guid method.
test_id_loc = '519665f6223ebd6980884f2b'
testobj = CourseKey.from_string("course-v1:{}@{}+{}@hw3".format(
CourseLocator.VERSION_PREFIX, test_id_loc, CourseLocator.BLOCK_PREFIX
))
self.check_course_locn_fields(
testobj,
version_guid=ObjectId(test_id_loc)
)
def test_course_constructor_url_package_id_and_version_guid(self):
test_id_loc = '519665f6223ebd6980884f2b'
testobj = CourseKey.from_string(
'course-v1:mit.eecs+honors.6002x+2014_T2+{}@{}'.format(CourseLocator.VERSION_PREFIX, test_id_loc)
)
self.check_course_locn_fields(
testobj,
org='mit.eecs',
course='honors.6002x',
run='2014_T2',
version_guid=ObjectId(test_id_loc)
)
def test_course_constructor_url_package_id_branch_and_version_guid(self):
test_id_loc = '519665f6223ebd6980884f2b'
org = 'mit.eecs'
course = '~6002x'
run = '2014_T2'
testobj = CourseKey.from_string('course-v1:{}+{}+{}+{}@draft-1+{}@{}'.format(
org, course, run, CourseLocator.BRANCH_PREFIX, CourseLocator.VERSION_PREFIX, test_id_loc
))
self.check_course_locn_fields(
testobj,
org=org,
course=course,
run=run,
branch='draft-1',
version_guid=ObjectId(test_id_loc)
)
def test_course_constructor_package_id_no_branch(self):
org = 'mit.eecs'
course = '6002x'
run = '2014_T2'
testurn = '{}+{}+{}'.format(org, course, run)<|fim▁hole|> # Allow access to _to_string
# pylint: disable=protected-access
self.assertEqual(testobj._to_string(), testurn)
def test_course_constructor_package_id_separate_branch(self):
org = 'mit.eecs'
course = '6002x'
run = '2014_T2'
test_branch = 'published'
expected_urn = '{}+{}+{}+{}@{}'.format(org, course, run, CourseLocator.BRANCH_PREFIX, test_branch)
testobj = CourseLocator(org=org, course=course, run=run, branch=test_branch)
self.check_course_locn_fields(
testobj,
org=org,
course=course,
run=run,
branch=test_branch,
)
# pylint: disable=no-member,protected-access
self.assertEqual(testobj.branch, test_branch)
self.assertEqual(testobj._to_string(), expected_urn)
def test_course_constructor_deprecated_offering(self):
org = 'mit.eecs'
course = '6002x'
run = '2014_T2'
offering = '{}/{}'.format(course, run)
test_branch = 'published'
with self.assertDeprecationWarning(count=2):
testobj = CourseLocator(org=org, offering=offering, branch=test_branch)
with self.assertRaises(InvalidKeyError):
CourseLocator(org=org, offering='', branch=test_branch)
with self.assertRaises(InvalidKeyError):
CourseLocator(org=org, offering=course, branch=test_branch)
self.check_course_locn_fields(
testobj,
org=org,
course=course,
run=run,
branch=test_branch,
)
@ddt.data(
"i4x://org/course/category/name",
"i4x://org/course/category/name@revision"
)
def test_make_usage_key_from_deprecated_string_roundtrip(self, url):
course_key = CourseLocator('org', 'course', 'run')
with self.assertDeprecationWarning(count=2):
self.assertEquals(
url,
course_key.make_usage_key_from_deprecated_string(url).to_deprecated_string()
)
def test_empty_run(self):
with self.assertRaises(InvalidKeyError):
CourseLocator('org', 'course', '')
self.assertEquals(
'org/course/',
unicode(CourseLocator('org', 'course', '', deprecated=True))
)<|fim▁end|> | testobj = CourseLocator(org=org, course=course, run=run)
self.check_course_locn_fields(testobj, org=org, course=course, run=run) |
<|file_name|>preparse.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Kay preparse management command.
:Copyright: (c) 2009 Accense Technology, Inc.
Takashi Matsuo <tmatsuo@candit.jp>,
All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from os import listdir, path, mkdir
from werkzeug.utils import import_string
import kay
import kay.app
from kay.utils import local
from kay.utils.jinja2utils.compiler import compile_dir
from kay.management.utils import print_status
IGNORE_FILENAMES = {
'kay': ('debug', 'app_template'),
'app': ('kay'),
}
def find_template_dir(target_path, ignore_filenames):
ret = []
for filename in listdir(target_path):
target_fullpath = path.join(target_path, filename)
if path.isdir(target_fullpath):
if filename.startswith(".") or filename in ignore_filenames:
continue
if filename == "templates":
ret.append(target_fullpath)
else:
ret = ret + find_template_dir(target_fullpath, ignore_filenames)
else:
continue
return ret
def do_preparse_bundle():
"""
Pre compile all the jinja2 templates in Kay itself.
"""
print_status("Compiling bundled templates...")
app = kay.app.get_application()
env = app.app.jinja2_env
for dir in find_template_dir(kay.KAY_DIR, ('debug','app_template')):
dest = prepare_destdir(dir)
print_status("Now compiling templates in %s to %s." % (dir, dest))
compile_dir(env, dir, dest)
print_status("Finished compiling bundled templates...")
def do_preparse_apps():
"""
Pre compile all the jinja2 templates in your applications.
"""
from kay.conf import LazySettings
print_status("Compiling templates...")
application = kay.app.get_application()
applications = [application]
settings_treated = []
for key, settings_name in \<|fim▁hole|> applications.append(kay.app.get_application(
settings=LazySettings(settings_module=settings_name)))
settings_treated.append(settings_name)
for app in applications:
compile_app_templates(app.app) # pass KayApp instance
for key, submount_app in app.mounts.iteritems():
if isinstance(submount_app, kay.app.KayApp):
compile_app_templates(submount_app)
print_status("Finished compiling templates...")
def prepare_destdir(dir):
def replace_dirname(orig):
if 'templates' in orig:
return orig.replace('templates', 'templates_compiled')
else:
return orig+'_compiled'
dest = replace_dirname(dir)
if path.isdir(dest):
for d, subdirs, files in os.walk(dest):
for f in files:
compiled_filename = "%s/%s" % (d, f)
orig_filename = compiled_filename.replace(dest, dir)
if not path.isfile(orig_filename):
os.unlink(compiled_filename)
print_status("%s does not exist. So, '%s' is removed." % (
orig_filename, compiled_filename))
else:
mkdir(dest)
return dest
def compile_app_templates(app):
env = app.jinja2_env
target_dirs = [dir for dir in app.app_settings.TEMPLATE_DIRS\
if os.path.isdir(dir)]
for app in app.app_settings.INSTALLED_APPS:
if app.startswith("kay."):
continue
mod = import_string(app)
target_dirs.extend(find_template_dir(os.path.dirname(mod.__file__),
('kay')))
for dir in target_dirs:
dest = prepare_destdir(dir)
print_status("Now compiling templates in %s to %s." % (dir, dest))
compile_dir(env, dir, dest)<|fim▁end|> | application.app.app_settings.PER_DOMAIN_SETTINGS.iteritems():
if not settings_name in settings_treated: |
<|file_name|>TestFuzzyRowAndColumnRangeFilter.java<|end_file_name|><|fim▁begin|>/**
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* under the License.
*/
package org.apache.hadoop.hbase.filter;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
/**
*/
@Category(MediumTests.class)
public class TestFuzzyRowAndColumnRangeFilter {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final Log LOG = LogFactory.getLog(this.getClass());
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster();
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
// Nothing to do.
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
// Nothing to do.
}
@Test
public void Test() throws Exception {
String cf = "f";
String table = "TestFuzzyAndColumnRangeFilterClient";
HTable ht = TEST_UTIL.createTable(Bytes.toBytes(table),
Bytes.toBytes(cf), Integer.MAX_VALUE);
// 10 byte row key - (2 bytes 4 bytes 4 bytes)
// 4 byte qualifier
// 4 byte value
for (int i1 = 0; i1 < 2; i1++) {
for (int i2 = 0; i2 < 5; i2++) {
byte[] rk = new byte[10];
ByteBuffer buf = ByteBuffer.wrap(rk);
buf.clear();
buf.putShort((short) 2);
buf.putInt(i1);
buf.putInt(i2);
for (int c = 0; c < 5; c++) {
byte[] cq = new byte[4];
Bytes.putBytes(cq, 0, Bytes.toBytes(c), 0, 4);<|fim▁hole|> Put p = new Put(rk);
p.setDurability(Durability.SKIP_WAL);
p.add(cf.getBytes(), cq, Bytes.toBytes(c));
ht.put(p);
LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: "
+ Bytes.toStringBinary(cq));
}
}
}
TEST_UTIL.flush();
// test passes
runTest(ht, 0, 10);
// test fails
runTest(ht, 1, 8);
}
private void runTest(HTable hTable, int cqStart, int expectedSize) throws IOException {
// [0, 2, ?, ?, ?, ?, 0, 0, 0, 1]
byte[] fuzzyKey = new byte[10];
ByteBuffer buf = ByteBuffer.wrap(fuzzyKey);
buf.clear();
buf.putShort((short) 2);
for (int i = 0; i < 4; i++)
buf.put((byte)63);
buf.putInt((short)1);
byte[] mask = new byte[] {0 , 0, 1, 1, 1, 1, 0, 0, 0, 0};
Pair<byte[], byte[]> pair = new Pair<byte[], byte[]>(fuzzyKey, mask);
FuzzyRowFilter fuzzyRowFilter = new FuzzyRowFilter(Lists.newArrayList(pair));
ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(Bytes.toBytes(cqStart), true
, Bytes.toBytes(4), true);
//regular test
runScanner(hTable, expectedSize, fuzzyRowFilter, columnRangeFilter);
//reverse filter order test
runScanner(hTable, expectedSize, columnRangeFilter, fuzzyRowFilter);
}
private void runScanner(HTable hTable, int expectedSize, Filter... filters) throws IOException {
String cf = "f";
Scan scan = new Scan();
scan.addFamily(cf.getBytes());
FilterList filterList = new FilterList(filters);
scan.setFilter(filterList);
ResultScanner scanner = hTable.getScanner(scan);
List<Cell> results = new ArrayList<Cell>();
Result result;
long timeBeforeScan = System.currentTimeMillis();
while ((result = scanner.next()) != null) {
for (Cell kv : result.listCells()) {
LOG.info("Got rk: " + Bytes.toStringBinary(CellUtil.cloneRow(kv)) + " cq: "
+ Bytes.toStringBinary(CellUtil.cloneQualifier(kv)));
results.add(kv);
}
}
long scanTime = System.currentTimeMillis() - timeBeforeScan;
scanner.close();
LOG.info("scan time = " + scanTime + "ms");
LOG.info("found " + results.size() + " results");
assertEquals(expectedSize, results.size());
}
}<|fim▁end|> | |
<|file_name|>request_for_quotation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.mapper import get_mapped_doc
from frappe.utils import get_url, cint
from frappe.utils.user import get_user_fullname
from frappe.utils.print_format import download_pdf
from frappe.desk.form.load import get_attachments
from frappe.core.doctype.communication.email import make
from erpnext.accounts.party import get_party_account_currency, get_party_details
from erpnext.stock.doctype.material_request.material_request import set_missing_values
from erpnext.controllers.buying_controller import BuyingController
STANDARD_USERS = ("Guest", "Administrator")
class RequestforQuotation(BuyingController):
def validate(self):
self.validate_duplicate_supplier()
self.validate_common()
self.update_email_id()
def validate_duplicate_supplier(self):
supplier_list = [d.supplier for d in self.suppliers]
if len(supplier_list) != len(set(supplier_list)):
frappe.throw(_("Same supplier has been entered multiple times"))
def validate_common(self):
pc = frappe.get_doc('Purchase Common')
pc.validate_for_items(self)
def update_email_id(self):
for rfq_supplier in self.suppliers:
if not rfq_supplier.email_id:
rfq_supplier.email_id = frappe.db.get_value("Contact", rfq_supplier.contact, "email_id")
def validate_email_id(self, args):
if not args.email_id:
frappe.throw(_("Row {0}: For supplier {0} Email Address is required to send email").format(args.idx, args.supplier))
def on_submit(self):
frappe.db.set(self, 'status', 'Submitted')
def on_cancel(self):
frappe.db.set(self, 'status', 'Cancelled')
def send_to_supplier(self):
for rfq_supplier in self.suppliers:
if rfq_supplier.send_email:
self.validate_email_id(rfq_supplier)
# make new user if required
update_password_link = self.update_supplier_contact(rfq_supplier, self.get_link())
self.update_supplier_part_no(rfq_supplier)
self.supplier_rfq_mail(rfq_supplier, update_password_link, self.get_link())
def get_link(self):
# RFQ link for supplier portal
return get_url("/rfq/" + self.name)
def update_supplier_part_no(self, args):
self.vendor = args.supplier
for item in self.items:
item.supplier_part_no = frappe.db.get_value('Item Supplier',
{'parent': item.item_code, 'supplier': args.supplier}, 'supplier_part_no')<|fim▁hole|> def update_supplier_contact(self, rfq_supplier, link):
'''Create a new user for the supplier if not set in contact'''
update_password_link = ''
if frappe.db.exists("User", rfq_supplier.email_id):
user = frappe.get_doc("User", rfq_supplier.email_id)
else:
user, update_password_link = self.create_user(rfq_supplier, link)
self.update_contact_of_supplier(rfq_supplier, user)
return update_password_link
def update_contact_of_supplier(self, rfq_supplier, user):
if rfq_supplier.contact:
contact = frappe.get_doc("Contact", rfq_supplier.contact)
else:
contact = frappe.new_doc("Contact")
contact.first_name = rfq_supplier.supplier_name or rfq_supplier.supplier
contact.supplier = rfq_supplier.supplier
if not contact.email_id and not contact.user:
contact.email_id = user.name
contact.user = user.name
contact.save(ignore_permissions=True)
def create_user(self, rfq_supplier, link):
user = frappe.get_doc({
'doctype': 'User',
'send_welcome_email': 0,
'email': rfq_supplier.email_id,
'first_name': rfq_supplier.supplier_name or rfq_supplier.supplier,
'user_type': 'Website User',
'redirect_url': link
})
user.save(ignore_permissions=True)
update_password_link = user.reset_password()
return user, update_password_link
def supplier_rfq_mail(self, data, update_password_link, rfq_link):
full_name = get_user_fullname(frappe.session['user'])
if full_name == "Guest":
full_name = "Administrator"
args = {
'update_password_link': update_password_link,
'message': frappe.render_template(self.message_for_supplier, data.as_dict()),
'rfq_link': rfq_link,
'user_fullname': full_name
}
subject = _("Request for Quotation")
template = "templates/emails/request_for_quotation.html"
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
message = frappe.get_template(template).render(args)
attachments = self.get_attachments()
self.send_email(data, sender, subject, message, attachments)
def send_email(self, data, sender, subject, message, attachments):
make(subject = subject, content=message,recipients=data.email_id,
sender=sender,attachments = attachments, send_email=True,
doctype=self.doctype, name=self.name)["name"]
frappe.msgprint(_("Email sent to supplier {0}").format(data.supplier))
def get_attachments(self):
attachments = [d.name for d in get_attachments(self.doctype, self.name)]
attachments.append(frappe.attach_print(self.doctype, self.name, doc=self))
return attachments
@frappe.whitelist()
def send_supplier_emails(rfq_name):
check_portal_enabled('Request for Quotation')
rfq = frappe.get_doc("Request for Quotation", rfq_name)
if rfq.docstatus==1:
rfq.send_to_supplier()
def check_portal_enabled(reference_doctype):
if not frappe.db.get_value('Portal Menu Item',
{'reference_doctype': reference_doctype}, 'enabled'):
frappe.throw(_("Request for Quotation is disabled to access from portal, for more check portal settings."))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context["show_sidebar"] = True
return list_context
# This method is used to make supplier quotation from material request form.
@frappe.whitelist()
def make_supplier_quotation(source_name, for_supplier, target_doc=None):
def postprocess(source, target_doc):
target_doc.supplier = for_supplier
args = get_party_details(for_supplier, party_type="Supplier", ignore_permissions=True)
target_doc.currency = args.currency or get_party_account_currency('Supplier', for_supplier, source.company)
target_doc.buying_price_list = args.buying_price_list or frappe.db.get_value('Buying Settings', None, 'buying_price_list')
set_missing_values(source, target_doc)
doclist = get_mapped_doc("Request for Quotation", source_name, {
"Request for Quotation": {
"doctype": "Supplier Quotation",
"validation": {
"docstatus": ["=", 1]
}
},
"Request for Quotation Item": {
"doctype": "Supplier Quotation Item",
"field_map": {
"name": "request_for_quotation_item",
"parent": "request_for_quotation"
},
}
}, target_doc, postprocess)
return doclist
# This method is used to make supplier quotation from supplier's portal.
@frappe.whitelist()
def create_supplier_quotation(doc):
if isinstance(doc, basestring):
doc = json.loads(doc)
try:
sq_doc = frappe.get_doc({
"doctype": "Supplier Quotation",
"supplier": doc.get('supplier'),
"terms": doc.get("terms"),
"company": doc.get("company"),
"currency": doc.get('currency') or get_party_account_currency('Supplier', doc.get('supplier'), doc.get('company')),
"buying_price_list": doc.get('buying_price_list') or frappe.db.get_value('Buying Settings', None, 'buying_price_list')
})
add_items(sq_doc, doc.get('supplier'), doc.get('items'))
sq_doc.flags.ignore_permissions = True
sq_doc.run_method("set_missing_values")
sq_doc.save()
frappe.msgprint(_("Supplier Quotation {0} created").format(sq_doc.name))
return sq_doc.name
except Exception:
return None
def add_items(sq_doc, supplier, items):
for data in items:
if data.get("qty") > 0:
if isinstance(data, dict):
data = frappe._dict(data)
create_rfq_items(sq_doc, supplier, data)
def create_rfq_items(sq_doc, supplier, data):
sq_doc.append('items', {
"item_code": data.item_code,
"item_name": data.item_name,
"description": data.description,
"qty": data.qty,
"rate": data.rate,
"supplier_part_no": frappe.db.get_value("Item Supplier", {'parent': data.item_code, 'supplier': supplier}, "supplier_part_no"),
"warehouse": data.warehouse or '',
"request_for_quotation_item": data.name,
"request_for_quotation": data.parent
})
@frappe.whitelist()
def get_pdf(doctype, name, supplier_idx):
doc = get_rfq_doc(doctype, name, supplier_idx)
if doc:
download_pdf(doctype, name, doc=doc)
def get_rfq_doc(doctype, name, supplier_idx):
if cint(supplier_idx):
doc = frappe.get_doc(doctype, name)
args = doc.get('suppliers')[cint(supplier_idx) - 1]
doc.update_supplier_part_no(args)
return doc
@frappe.whitelist()
def get_item_from_material_requests_based_on_supplier(source_name, target_doc = None):
mr_items_list = frappe.db.sql("""
SELECT
mr.name, mr_item.item_code
FROM
`tabItem` as item,
`tabItem Supplier` as item_supp,
`tabMaterial Request Item` as mr_item,
`tabMaterial Request` as mr
WHERE item_supp.supplier = %(supplier)s
AND item.name = item_supp.parent
AND mr_item.parent = mr.name
AND mr_item.item_code = item.name
AND mr.status != "Stopped"
AND mr.material_request_type = "Purchase"
AND mr.docstatus = 1
AND mr.per_ordered < 99.99""", {"supplier": source_name}, as_dict=1)
material_requests = {}
for d in mr_items_list:
material_requests.setdefault(d.name, []).append(d.item_code)
for mr, items in material_requests.items():
target_doc = get_mapped_doc("Material Request", mr, {
"Material Request": {
"doctype": "Request for Quotation",
"validation": {
"docstatus": ["=", 1],
"material_request_type": ["=", "Purchase"],
}
},
"Material Request Item": {
"doctype": "Request for Quotation Item",
"condition": lambda row: row.item_code in items,
"field_map": [
["name", "material_request_item"],
["parent", "material_request"],
["uom", "uom"]
]
}
}, target_doc)
return target_doc<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from spack import *
class Libidl(AutotoolsPackage):
"""libraries for Interface Definition Language files"""
homepage = "https://developer.gnome.org/"
url = "https://ftp.gnome.org/pub/gnome/sources/libIDL/0.8/libIDL-0.8.14.tar.bz2"
version('0.8.14', sha256='c5d24d8c096546353fbc7cedf208392d5a02afe9d56ebcc1cccb258d7c4d2220')
depends_on('pkgconfig', type='build')
depends_on('glib')<|fim▁end|> | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT) |
<|file_name|>0002_auto_20160331_1111.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-30 22:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wordproject', '0001_initial'),
]<|fim▁hole|> name='description',
field=models.TextField(max_length=200, null=True),
),
]<|fim▁end|> |
operations = [
migrations.AlterField(
model_name='wordrecord', |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import logging, os
import psycopg2
# settings
database_name = 'postgres_database'
user = 'postgres_user'
password = 'some_password_here_lol'
port = 5432
host = 'postgres_host_normally_localhost'
path_to_gnaf_data = '/path/to/gnaf/data/'
# setup
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
def get_folder_path(support_text, absolute_path, search_path, search_name, test_name):
if not search_path and search_name in test_name:
logging.debug(support_text + absolute_path)
return absolute_path
else:
return search_path
def load_sql_file_into_db(file_path):
file_ref = open(file_path, "r").read()
db_cursor.execute(file_ref)
db_connection.commit()
<|fim▁hole|> db_connection = psycopg2.connect(database=database_name, user=user, password=password, host=host, port=port)
db_cursor = db_connection.cursor()
logging.info("Step 0 of 5 : Bootstrapping started...")
gnaf_parent_path = ''
extras_path = ''
table_creation_scripts_path = ''
example_view_creation_scripts_path = ''
table_creation_script_path = ''
foreign_key_script_path = ''
example_view_script_path = ''
authority_code_path = ''
standard_data_path = ''
gnaf_name = 'G-NAF '
table_creation_script_folder_name = 'GNAF_TableCreation_Scripts'
table_creation_script_name = 'create_tables_ansi.sql'
foreign_key_script_name = 'add_fk_constraints.sql'
authority_code_name = 'Authority Code'
standard_data_name = 'Standard'
psv_file_suffix = "_psv.psv"
views_script_folder_name = 'GNAF_View_Scripts'
example_view_script_name = 'address_view.sql'
SQL_STATEMENT = """ COPY %s FROM STDIN WITH CSV HEADER DELIMITER AS '|'"""
# find sub folders needed
for dirname, dirnames, filenames in os.walk(path_to_gnaf_data):
for subdirname in dirnames:
absolute_path = os.path.join(dirname, subdirname)
gnaf_parent_path = get_folder_path("G-NAF parent folder: ", absolute_path, gnaf_parent_path, gnaf_name, subdirname)
table_creation_scripts_path = get_folder_path("Table creation scripts folder: ", absolute_path, table_creation_scripts_path, table_creation_script_folder_name, subdirname)
example_view_creation_scripts_path = get_folder_path("Example View creation scripts folder: ", absolute_path, example_view_creation_scripts_path, views_script_folder_name, subdirname)
authority_code_path = get_folder_path("Authority Code folder: ", absolute_path, authority_code_path, authority_code_name, subdirname)
standard_data_path = get_folder_path("Standard data folder: ", absolute_path, standard_data_path, standard_data_name, subdirname)
# find table/fk creation scripts
for dirname, dirnames, filenames in os.walk(table_creation_scripts_path):
for filename in filenames:
absolute_path = os.path.join(table_creation_scripts_path, filename)
if not table_creation_script_path and table_creation_script_name in filename:
table_creation_script_path = absolute_path
logging.debug("Table creation script: " + table_creation_script_path)
if not foreign_key_script_path and foreign_key_script_name in filename:
foreign_key_script_path = absolute_path
logging.debug("Foreign key script: " + foreign_key_script_path)
# find views creation script
for dirname, dirnames, filenames in os.walk(example_view_creation_scripts_path):
for filename in filenames:
absolute_path = os.path.join(example_view_creation_scripts_path, filename)
if not example_view_script_path and example_view_script_name in filename:
example_view_script_path = absolute_path
logging.debug("Example views script: " + example_view_script_path)
logging.info("Step 0 of 5 : Bootstrapping finished!")
logging.info("Step 1 of 5 : Creating Schema started...")
load_sql_file_into_db(table_creation_script_path)
logging.info("Step 1 of 5 : Creating Schema finished!")
logging.info("Step 2 of 5 : Loading Authority Code data started...")
for dirname, dirnames, filenames in os.walk(authority_code_path):
num_files = str(len(filenames))
for index, filename in enumerate(filenames):
absolute_path = os.path.join(authority_code_path, filename)
authority_code_prefix = "Authority_Code_"
authority_code_suffix = psv_file_suffix
table_name = filename.replace(authority_code_prefix, "")
table_name = table_name.replace(authority_code_suffix, "")
logging.info("Importing file " + str(index + 1) + " of " + num_files + ": " + filename + " -> " + table_name)
db_cursor.copy_expert(sql=SQL_STATEMENT % table_name, file=open(absolute_path))
db_connection.commit()
logging.info("Step 2 of 5 : Loading Authority Code data finished!")
logging.info("Step 3 of 5 : Loading Standard data started...")
for dirname, dirnames, filenames in os.walk(standard_data_path):
num_files = str(len(filenames))
for index, filename in enumerate(filenames):
absolute_path = os.path.join(standard_data_path, filename)
standard_data_suffix = psv_file_suffix
table_name = filename.split('_', 1)[-1]
table_name = table_name.replace(standard_data_suffix, "")
logging.info("Importing file " + str(index + 1) + " of " + num_files + ": " + filename + " -> " + table_name)
db_cursor.copy_expert(sql=SQL_STATEMENT % table_name, file=open(absolute_path))
db_connection.commit()
logging.info("Step 3 of 5 : Loading Standard data finished!")
logging.info("Step 4 of 5 : Creating Foreign Key relationships creation started...")
load_sql_file_into_db(foreign_key_script_path)
logging.info("Step 4 of 5 : Creating Foreign Key relationships creation finished!")
logging.info("Step 5 of 5 : Creating example views creation started...")
load_sql_file_into_db(example_view_script_path)
logging.info("Step 5 of 5 : Creating example views creation finished!")
db_cursor.close()
db_connection.close()
except Exception as exception:
logging.error("Exception occurred: " + str(exception))<|fim▁end|> | try: |
<|file_name|>net.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "irc.h"
#include "db.h"
#include "net.h"
#include "init.h"
#include "addrman.h"
#include "ui_interface.h"
#include "script.h"
#ifdef WIN32
#include <string.h>
#endif
#ifdef USE_UPNP
#include <miniupnpc/miniwget.h>
#include <miniupnpc/miniupnpc.h>
#include <miniupnpc/upnpcommands.h>
#include <miniupnpc/upnperrors.h>
#endif
// Dump addresses to peers.dat every 15 minutes (900s)
#define DUMP_ADDRESSES_INTERVAL 900
using namespace std;
using namespace boost;
static const int MAX_OUTBOUND_CONNECTIONS = 8;
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound = NULL, const char *strDest = NULL, bool fOneShot = false);
struct LocalServiceInfo {
int nScore;
int nPort;
};
//
// Global state variables
//
bool fDiscover = true;
uint64 nLocalServices = NODE_NETWORK;
static CCriticalSection cs_mapLocalHost;
static map<CNetAddr, LocalServiceInfo> mapLocalHost;
static bool vfReachable[NET_MAX] = {};
static bool vfLimited[NET_MAX] = {};
static CNode* pnodeLocalHost = NULL;
static CNode* pnodeSync = NULL;
uint64 nLocalHostNonce = 0;
static std::vector<SOCKET> vhListenSocket;
CAddrMan addrman;
int nMaxConnections = 125;
vector<CNode*> vNodes;
CCriticalSection cs_vNodes;
map<CInv, CDataStream> mapRelay;
deque<pair<int64, CInv> > vRelayExpiration;
CCriticalSection cs_mapRelay;
limitedmap<CInv, int64> mapAlreadyAskedFor(MAX_INV_SZ);
static deque<string> vOneShots;
CCriticalSection cs_vOneShots;
set<CNetAddr> setservAddNodeAddresses;
CCriticalSection cs_setservAddNodeAddresses;
vector<std::string> vAddedNodes;
CCriticalSection cs_vAddedNodes;
static CSemaphore *semOutbound = NULL;
void AddOneShot(string strDest)
{
LOCK(cs_vOneShots);
vOneShots.push_back(strDest);
}
unsigned short GetListenPort()
{
return (unsigned short)(GetArg("-port", GetDefaultPort()));
}
void CNode::PushGetBlocks(CBlockIndex* pindexBegin, uint256 hashEnd)
{
// Filter out duplicate requests
if (pindexBegin == pindexLastGetBlocksBegin && hashEnd == hashLastGetBlocksEnd)
return;
pindexLastGetBlocksBegin = pindexBegin;
hashLastGetBlocksEnd = hashEnd;
PushMessage("getblocks", CBlockLocator(pindexBegin), hashEnd);
}
// find 'best' local address for a particular peer
bool GetLocal(CService& addr, const CNetAddr *paddrPeer)
{
if (fNoListen)
return false;
int nBestScore = -1;
int nBestReachability = -1;
{
LOCK(cs_mapLocalHost);
for (map<CNetAddr, LocalServiceInfo>::iterator it = mapLocalHost.begin(); it != mapLocalHost.end(); it++)
{
int nScore = (*it).second.nScore;
int nReachability = (*it).first.GetReachabilityFrom(paddrPeer);
if (nReachability > nBestReachability || (nReachability == nBestReachability && nScore > nBestScore))
{
addr = CService((*it).first, (*it).second.nPort);
nBestReachability = nReachability;
nBestScore = nScore;
}
}
}
return nBestScore >= 0;
}
// get best local address for a particular peer as a CAddress
CAddress GetLocalAddress(const CNetAddr *paddrPeer)
{
CAddress ret(CService("0.0.0.0",0),0);
CService addr;
if (GetLocal(addr, paddrPeer))
{
ret = CAddress(addr);
ret.nServices = nLocalServices;
ret.nTime = GetAdjustedTime();
}
return ret;
}
bool RecvLine(SOCKET hSocket, string& strLine)
{
strLine = "";
loop
{
char c;
int nBytes = recv(hSocket, &c, 1, 0);
if (nBytes > 0)
{
if (c == '\n')
continue;
if (c == '\r')
return true;
strLine += c;
if (strLine.size() >= 9000)
return true;
}
else if (nBytes <= 0)
{
boost::this_thread::interruption_point();
if (nBytes < 0)
{
int nErr = WSAGetLastError();
if (nErr == WSAEMSGSIZE)
continue;
if (nErr == WSAEWOULDBLOCK || nErr == WSAEINTR || nErr == WSAEINPROGRESS)
{
MilliSleep(10);
continue;
}
}
if (!strLine.empty())
return true;
if (nBytes == 0)
{
// socket closed
printf("socket closed\n");
return false;
}
else
{
// socket error
int nErr = WSAGetLastError();
printf("recv failed: %d\n", nErr);
return false;
}
}
}
}
// used when scores of local addresses may have changed
// pushes better local address to peers
void static AdvertizeLocal()
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->fSuccessfullyConnected)
{
CAddress addrLocal = GetLocalAddress(&pnode->addr);
if (addrLocal.IsRoutable() && (CService)addrLocal != (CService)pnode->addrLocal)
{
pnode->PushAddress(addrLocal);
pnode->addrLocal = addrLocal;
}
}
}
}
void SetReachable(enum Network net, bool fFlag)
{
LOCK(cs_mapLocalHost);
vfReachable[net] = fFlag;
if (net == NET_IPV6 && fFlag)
vfReachable[NET_IPV4] = true;
}
// learn a new local address
bool AddLocal(const CService& addr, int nScore)
{
if (!addr.IsRoutable())
return false;
if (!fDiscover && nScore < LOCAL_MANUAL)
return false;
if (IsLimited(addr))
return false;
printf("AddLocal(%s,%i)\n", addr.ToString().c_str(), nScore);
{
LOCK(cs_mapLocalHost);
bool fAlready = mapLocalHost.count(addr) > 0;
LocalServiceInfo &info = mapLocalHost[addr];
if (!fAlready || nScore >= info.nScore) {
info.nScore = nScore + (fAlready ? 1 : 0);
info.nPort = addr.GetPort();
}
SetReachable(addr.GetNetwork());
}
AdvertizeLocal();
return true;
}
bool AddLocal(const CNetAddr &addr, int nScore)
{
return AddLocal(CService(addr, GetListenPort()), nScore);
}
/** Make a particular network entirely off-limits (no automatic connects to it) */
void SetLimited(enum Network net, bool fLimited)
{
if (net == NET_UNROUTABLE)
return;
LOCK(cs_mapLocalHost);
vfLimited[net] = fLimited;
}
bool IsLimited(enum Network net)
{
LOCK(cs_mapLocalHost);
return vfLimited[net];
}
bool IsLimited(const CNetAddr &addr)
{
return IsLimited(addr.GetNetwork());
}
/** vote for a local address */
bool SeenLocal(const CService& addr)
{
{
LOCK(cs_mapLocalHost);
if (mapLocalHost.count(addr) == 0)
return false;
mapLocalHost[addr].nScore++;
}
AdvertizeLocal();
return true;
}
/** check whether a given address is potentially local */
bool IsLocal(const CService& addr)
{
LOCK(cs_mapLocalHost);
return mapLocalHost.count(addr) > 0;
}
/** check whether a given address is in a network we can probably connect to */
bool IsReachable(const CNetAddr& addr)
{
LOCK(cs_mapLocalHost);
enum Network net = addr.GetNetwork();
return vfReachable[net] && !vfLimited[net];
}
bool GetMyExternalIP2(const CService& addrConnect, const char* pszGet, const char* pszKeyword, CNetAddr& ipRet)
{
SOCKET hSocket;
if (!ConnectSocket(addrConnect, hSocket))
return error("GetMyExternalIP() : connection to %s failed", addrConnect.ToString().c_str());
send(hSocket, pszGet, strlen(pszGet), MSG_NOSIGNAL);
string strLine;
while (RecvLine(hSocket, strLine))
{
if (strLine.empty()) // HTTP response is separated from headers by blank line
{
loop
{
if (!RecvLine(hSocket, strLine))
{
closesocket(hSocket);
return false;
}
if (pszKeyword == NULL)
break;
if (strLine.find(pszKeyword) != string::npos)
{
strLine = strLine.substr(strLine.find(pszKeyword) + strlen(pszKeyword));
break;
}
}
closesocket(hSocket);
if (strLine.find("<") != string::npos)
strLine = strLine.substr(0, strLine.find("<"));
strLine = strLine.substr(strspn(strLine.c_str(), " \t\n\r"));
while (strLine.size() > 0 && isspace(strLine[strLine.size()-1]))
strLine.resize(strLine.size()-1);
CService addr(strLine,0,true);
printf("GetMyExternalIP() received [%s] %s\n", strLine.c_str(), addr.ToString().c_str());
if (!addr.IsValid() || !addr.IsRoutable())
return false;
ipRet.SetIP(addr);
return true;
}
}
closesocket(hSocket);
return error("GetMyExternalIP() : connection closed");
}
bool GetMyExternalIP(CNetAddr& ipRet)
{
CService addrConnect;
const char* pszGet;
const char* pszKeyword;
for (int nLookup = 0; nLookup <= 1; nLookup++)
for (int nHost = 1; nHost <= 2; nHost++)
{
// We should be phasing out our use of sites like these. If we need
// replacements, we should ask for volunteers to put this simple
// php file on their web server that prints the client IP:
// <?php echo $_SERVER["REMOTE_ADDR"]; ?>
if (nHost == 1)
{
addrConnect = CService("91.198.22.70", 80); // checkip.dyndns.org
if (nLookup == 1)
{
CService addrIP("checkip.dyndns.org", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET / HTTP/1.1\r\n"
"Host: checkip.dyndns.org\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = "Address:";
}
else if (nHost == 2)
{
addrConnect = CService("74.208.43.192", 80); // www.showmyip.com
if (nLookup == 1)
{
CService addrIP("www.showmyip.com", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET /simple/ HTTP/1.1\r\n"
"Host: www.showmyip.com\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = NULL; // Returns just IP address
}
if (GetMyExternalIP2(addrConnect, pszGet, pszKeyword, ipRet))
return true;
}
return false;
}
void ThreadGetMyExternalIP(void* parg)
{
// Make this thread recognisable as the external IP detection thread
RenameThread("bitcoin-ext-ip");
CNetAddr addrLocalHost;
if (GetMyExternalIP(addrLocalHost))
{
printf("GetMyExternalIP() returned %s\n", addrLocalHost.ToStringIP().c_str());
AddLocal(addrLocalHost, LOCAL_HTTP);
}
}
void AddressCurrentlyConnected(const CService& addr)
{
addrman.Connected(addr);
}
CNode* FindNode(const CNetAddr& ip)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CNetAddr)pnode->addr == ip)
return (pnode);
return NULL;
}
CNode* FindNode(std::string addrName)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->addrName == addrName)
return (pnode);
return NULL;
}
CNode* FindNode(const CService& addr)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CService)pnode->addr == addr)
return (pnode);
return NULL;
}
CNode* ConnectNode(CAddress addrConnect, const char *pszDest)
{
if (pszDest == NULL) {
if (IsLocal(addrConnect))
return NULL;
// Look for an existing connection
CNode* pnode = FindNode((CService)addrConnect);
if (pnode)
{
pnode->AddRef();
return pnode;
}
}
/// debug print
printf("trying connection %s lastseen=%.1fhrs\n",
pszDest ? pszDest : addrConnect.ToString().c_str(),
pszDest ? 0 : (double)(GetAdjustedTime() - addrConnect.nTime)/3600.0);
// Connect
SOCKET hSocket;
if (pszDest ? ConnectSocketByName(addrConnect, hSocket, pszDest, GetDefaultPort()) : ConnectSocket(addrConnect, hSocket))
{
addrman.Attempt(addrConnect);
/// debug print
printf("connected %s\n", pszDest ? pszDest : addrConnect.ToString().c_str());
// Set to non-blocking
#ifdef WIN32
u_long nOne = 1;
if (ioctlsocket(hSocket, FIONBIO, &nOne) == SOCKET_ERROR)
printf("ConnectSocket() : ioctlsocket non-blocking setting failed, error %d\n", WSAGetLastError());
#else
if (fcntl(hSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
printf("ConnectSocket() : fcntl non-blocking setting failed, error %d\n", errno);
#endif
// Add node
CNode* pnode = new CNode(hSocket, addrConnect, pszDest ? pszDest : "", false);
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
pnode->nTimeConnected = GetTime();
return pnode;
}
else
{
return NULL;
}
}
void CNode::CloseSocketDisconnect()
{
fDisconnect = true;
if (hSocket != INVALID_SOCKET)
{
printf("disconnecting node %s\n", addrName.c_str());
closesocket(hSocket);
hSocket = INVALID_SOCKET;
}
// in case this fails, we'll empty the recv buffer when the CNode is deleted
TRY_LOCK(cs_vRecvMsg, lockRecv);
if (lockRecv)
vRecvMsg.clear();
// if this was the sync node, we'll need a new one
if (this == pnodeSync)
pnodeSync = NULL;
}
void CNode::Cleanup()
{
}
void CNode::PushVersion()
{
/// when NTP implemented, change to just nTime = GetAdjustedTime()
int64 nTime = (fInbound ? GetAdjustedTime() : GetTime());
CAddress addrYou = (addr.IsRoutable() && !IsProxy(addr) ? addr : CAddress(CService("0.0.0.0",0)));
CAddress addrMe = GetLocalAddress(&addr);
RAND_bytes((unsigned char*)&nLocalHostNonce, sizeof(nLocalHostNonce));
printf("send version message: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", PROTOCOL_VERSION, nBestHeight, addrMe.ToString().c_str(), addrYou.ToString().c_str(), addr.ToString().c_str());
PushMessage("version", PROTOCOL_VERSION, nLocalServices, nTime, addrYou, addrMe,
nLocalHostNonce, FormatSubVersion(CLIENT_NAME, CLIENT_VERSION, std::vector<string>()), nBestHeight);
}
std::map<CNetAddr, int64> CNode::setBanned;
CCriticalSection CNode::cs_setBanned;
void CNode::ClearBanned()
{
setBanned.clear();
}
bool CNode::IsBanned(CNetAddr ip)
{
bool fResult = false;
{
LOCK(cs_setBanned);
std::map<CNetAddr, int64>::iterator i = setBanned.find(ip);
if (i != setBanned.end())
{
int64 t = (*i).second;
if (GetTime() < t)
fResult = true;
}
}
return fResult;
}
bool CNode::Misbehaving(int howmuch)
{
if (addr.IsLocal())
{
printf("Warning: Local node %s misbehaving (delta: %d)!\n", addrName.c_str(), howmuch);
return false;
}
nMisbehavior += howmuch;
if (nMisbehavior >= GetArg("-banscore", 100))
{
int64 banTime = GetTime()+GetArg("-bantime", 60*60*24); // Default 24-hour ban
printf("Misbehaving: %s (%d -> %d) DISCONNECTING\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior);
{
LOCK(cs_setBanned);
if (setBanned[addr] < banTime)
setBanned[addr] = banTime;
}
CloseSocketDisconnect();
return true;
} else
printf("Misbehaving: %s (%d -> %d)\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior);
return false;
}
#undef X
#define X(name) stats.name = name
void CNode::copyStats(CNodeStats &stats)
{
X(nServices);
X(nLastSend);
X(nLastRecv);
X(nTimeConnected);
X(addrName);
X(nVersion);
X(cleanSubVer);
X(fInbound);
X(nStartingHeight);
X(nMisbehavior);
X(nSendBytes);
X(nRecvBytes);
X(nBlocksRequested);
stats.fSyncNode = (this == pnodeSync);
}
#undef X
// requires LOCK(cs_vRecvMsg)
bool CNode::ReceiveMsgBytes(const char *pch, unsigned int nBytes)
{
while (nBytes > 0) {
// get current incomplete message, or create a new one
if (vRecvMsg.empty() ||
vRecvMsg.back().complete())
vRecvMsg.push_back(CNetMessage(SER_NETWORK, nRecvVersion));
CNetMessage& msg = vRecvMsg.back();
// absorb network data
int handled;
if (!msg.in_data)
handled = msg.readHeader(pch, nBytes);
else
handled = msg.readData(pch, nBytes);
if (handled < 0)
return false;
pch += handled;
nBytes -= handled;
}
return true;
}
int CNetMessage::readHeader(const char *pch, unsigned int nBytes)
{
// copy data to temporary parsing buffer
unsigned int nRemaining = 24 - nHdrPos;
unsigned int nCopy = std::min(nRemaining, nBytes);
memcpy(&hdrbuf[nHdrPos], pch, nCopy);
nHdrPos += nCopy;
// if header incomplete, exit
if (nHdrPos < 24)
return nCopy;
// deserialize to CMessageHeader
try {
hdrbuf >> hdr;
}
catch (std::exception &e) {
return -1;
}
// reject messages larger than MAX_SIZE
if (hdr.nMessageSize > MAX_SIZE)
return -1;
// switch state to reading message data
in_data = true;
vRecv.resize(hdr.nMessageSize);
return nCopy;
}
int CNetMessage::readData(const char *pch, unsigned int nBytes)
{
unsigned int nRemaining = hdr.nMessageSize - nDataPos;
unsigned int nCopy = std::min(nRemaining, nBytes);
memcpy(&vRecv[nDataPos], pch, nCopy);
nDataPos += nCopy;
return nCopy;
}
// requires LOCK(cs_vSend)
void SocketSendData(CNode *pnode)
{
std::deque<CSerializeData>::iterator it = pnode->vSendMsg.begin();
while (it != pnode->vSendMsg.end()) {
const CSerializeData &data = *it;
assert(data.size() > pnode->nSendOffset);
int nBytes = send(pnode->hSocket, &data[pnode->nSendOffset], data.size() - pnode->nSendOffset, MSG_NOSIGNAL | MSG_DONTWAIT);
if (nBytes > 0) {
pnode->nLastSend = GetTime();
pnode->nSendBytes += nBytes;
pnode->nSendOffset += nBytes;
if (pnode->nSendOffset == data.size()) {
pnode->nSendOffset = 0;
pnode->nSendSize -= data.size();
it++;
} else {
// could not send full message; stop sending more
break;
}
} else {
if (nBytes < 0) {
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
printf("socket send error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
// couldn't send anything at all
break;
}
}
if (it == pnode->vSendMsg.end()) {
assert(pnode->nSendOffset == 0);
assert(pnode->nSendSize == 0);
}
pnode->vSendMsg.erase(pnode->vSendMsg.begin(), it);
}
static list<CNode*> vNodesDisconnected;
void ThreadSocketHandler()
{
unsigned int nPrevNodeCount = 0;
loop
{
//
// Disconnect nodes
//
{
LOCK(cs_vNodes);
// Disconnect unused nodes
vector<CNode*> vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (pnode->fDisconnect ||
(pnode->GetRefCount() <= 0 && pnode->vRecvMsg.empty() && pnode->nSendSize == 0 && pnode->ssSend.empty()))
{
// remove from vNodes
vNodes.erase(remove(vNodes.begin(), vNodes.end(), pnode), vNodes.end());
// release outbound grant (if any)
pnode->grantOutbound.Release();
// close socket and cleanup
pnode->CloseSocketDisconnect();
pnode->Cleanup();
// hold in disconnected pool until all refs are released
if (pnode->fNetworkNode || pnode->fInbound)
pnode->Release();
vNodesDisconnected.push_back(pnode);
}
}
// Delete disconnected nodes
list<CNode*> vNodesDisconnectedCopy = vNodesDisconnected;
BOOST_FOREACH(CNode* pnode, vNodesDisconnectedCopy)
{
// wait until threads are done using it
if (pnode->GetRefCount() <= 0)
{
bool fDelete = false;
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
TRY_LOCK(pnode->cs_inventory, lockInv);
if (lockInv)
fDelete = true;
}
}
}
if (fDelete)
{
vNodesDisconnected.remove(pnode);
delete pnode;
}
}
}
}
if (vNodes.size() != nPrevNodeCount)
{
nPrevNodeCount = vNodes.size();
uiInterface.NotifyNumConnectionsChanged(vNodes.size());
}
//
// Find which sockets have data to receive
//
struct timeval timeout;
timeout.tv_sec = 0;
timeout.tv_usec = 50000; // frequency to poll pnode->vSend
fd_set fdsetRecv;
fd_set fdsetSend;
fd_set fdsetError;
FD_ZERO(&fdsetRecv);
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
SOCKET hSocketMax = 0;
bool have_fds = false;
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) {
FD_SET(hListenSocket, &fdsetRecv);
hSocketMax = max(hSocketMax, hListenSocket);
have_fds = true;
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->hSocket == INVALID_SOCKET)
continue;
FD_SET(pnode->hSocket, &fdsetError);
hSocketMax = max(hSocketMax, pnode->hSocket);
have_fds = true;
// Implement the following logic:
// * If there is data to send, select() for sending data. As this only
// happens when optimistic write failed, we choose to first drain the
// write buffer in this case before receiving more. This avoids
// needlessly queueing received data, if the remote peer is not themselves
// receiving data. This means properly utilizing TCP flow control signalling.
// * Otherwise, if there is no (complete) message in the receive buffer,
// or there is space left in the buffer, select() for receiving data.
// * (if neither of the above applies, there is certainly one message
// in the receiver buffer ready to be processed).
// Together, that means that at least one of the following is always possible,
// so we don't deadlock:
// * We send some data.
// * We wait for data to be received (and disconnect after timeout).
// * We process a message in the buffer (message handler thread).
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend && !pnode->vSendMsg.empty()) {
FD_SET(pnode->hSocket, &fdsetSend);
continue;
}
}
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv && (
pnode->vRecvMsg.empty() || !pnode->vRecvMsg.front().complete() ||
pnode->GetTotalRecvSize() <= ReceiveFloodSize()))
FD_SET(pnode->hSocket, &fdsetRecv);
}
}
}
int nSelect = select(have_fds ? hSocketMax + 1 : 0,
&fdsetRecv, &fdsetSend, &fdsetError, &timeout);
boost::this_thread::interruption_point();
if (nSelect == SOCKET_ERROR)
{
if (have_fds)
{
int nErr = WSAGetLastError();
printf("socket select error %d\n", nErr);
for (unsigned int i = 0; i <= hSocketMax; i++)
FD_SET(i, &fdsetRecv);
}
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
MilliSleep(timeout.tv_usec/1000);
}
//
// Accept new connections
//
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET && FD_ISSET(hListenSocket, &fdsetRecv))
{
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
SOCKET hSocket = accept(hListenSocket, (struct sockaddr*)&sockaddr, &len);
CAddress addr;
int nInbound = 0;
if (hSocket != INVALID_SOCKET)
if (!addr.SetSockAddr((const struct sockaddr*)&sockaddr))
printf("Warning: Unknown socket family\n");
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->fInbound)
nInbound++;
}
if (hSocket == INVALID_SOCKET)
{
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK)
printf("socket error accept failed: %d\n", nErr);
}
else if (nInbound >= nMaxConnections - MAX_OUTBOUND_CONNECTIONS)
{
{
LOCK(cs_setservAddNodeAddresses);
if (!setservAddNodeAddresses.count(addr))
closesocket(hSocket);
}
}
else if (CNode::IsBanned(addr))
{
printf("connection from %s dropped (banned)\n", addr.ToString().c_str());
closesocket(hSocket);
}
else
{
printf("accepted connection %s\n", addr.ToString().c_str());
CNode* pnode = new CNode(hSocket, addr, "", true);
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
}
}
//
// Service each socket
//
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->AddRef();
}
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
boost::this_thread::interruption_point();
//
// Receive
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetRecv) || FD_ISSET(pnode->hSocket, &fdsetError))
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
{
// typical socket buffer is 8K-64K
char pchBuf[0x10000];
int nBytes = recv(pnode->hSocket, pchBuf, sizeof(pchBuf), MSG_DONTWAIT);
if (nBytes > 0)
{
if (!pnode->ReceiveMsgBytes(pchBuf, nBytes))
pnode->CloseSocketDisconnect();
pnode->nLastRecv = GetTime();
pnode->nRecvBytes += nBytes;
}
else if (nBytes == 0)
{
// socket closed gracefully
if (!pnode->fDisconnect)
printf("socket closed\n");
pnode->CloseSocketDisconnect();
}
else if (nBytes < 0)
{
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
if (!pnode->fDisconnect)
printf("socket recv error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
}
}
}
//
// Send
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetSend))
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
SocketSendData(pnode);
}
//
// Inactivity checking
//
if (pnode->vSendMsg.empty())
pnode->nLastSendEmpty = GetTime();
if (GetTime() - pnode->nTimeConnected > 60)
{
if (pnode->nLastRecv == 0 || pnode->nLastSend == 0)<|fim▁hole|> }
else if (GetTime() - pnode->nLastSend > 90*60 && GetTime() - pnode->nLastSendEmpty > 90*60)
{
printf("socket not sending\n");
pnode->fDisconnect = true;
}
else if (GetTime() - pnode->nLastRecv > 90*60)
{
printf("socket inactivity timeout\n");
pnode->fDisconnect = true;
}
}
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
MilliSleep(10);
}
}
#ifdef USE_UPNP
void ThreadMapPort()
{
std::string port = strprintf("%u", GetListenPort());
const char * multicastif = 0;
const char * minissdpdpath = 0;
struct UPNPDev * devlist = 0;
char lanaddr[64];
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0);
#else
/* miniupnpc 1.6 */
int error = 0;
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0, 0, &error);
#endif
struct UPNPUrls urls;
struct IGDdatas data;
int r;
r = UPNP_GetValidIGD(devlist, &urls, &data, lanaddr, sizeof(lanaddr));
if (r == 1)
{
if (fDiscover) {
char externalIPAddress[40];
r = UPNP_GetExternalIPAddress(urls.controlURL, data.first.servicetype, externalIPAddress);
if(r != UPNPCOMMAND_SUCCESS)
printf("UPnP: GetExternalIPAddress() returned %d\n", r);
else
{
if(externalIPAddress[0])
{
printf("UPnP: ExternalIPAddress = %s\n", externalIPAddress);
AddLocal(CNetAddr(externalIPAddress), LOCAL_UPNP);
}
else
printf("UPnP: GetExternalIPAddress failed.\n");
}
}
string strDesc = "CowCoin " + FormatFullVersion();
try {
loop {
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0);
#else
/* miniupnpc 1.6 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0, "0");
#endif
if(r!=UPNPCOMMAND_SUCCESS)
printf("AddPortMapping(%s, %s, %s) failed with code %d (%s)\n",
port.c_str(), port.c_str(), lanaddr, r, strupnperror(r));
else
printf("UPnP Port Mapping successful.\n");;
MilliSleep(20*60*1000); // Refresh every 20 minutes
}
}
catch (boost::thread_interrupted)
{
r = UPNP_DeletePortMapping(urls.controlURL, data.first.servicetype, port.c_str(), "TCP", 0);
printf("UPNP_DeletePortMapping() returned : %d\n", r);
freeUPNPDevlist(devlist); devlist = 0;
FreeUPNPUrls(&urls);
throw;
}
} else {
printf("No valid UPnP IGDs found\n");
freeUPNPDevlist(devlist); devlist = 0;
if (r != 0)
FreeUPNPUrls(&urls);
}
}
void MapPort(bool fUseUPnP)
{
static boost::thread* upnp_thread = NULL;
if (fUseUPnP)
{
if (upnp_thread) {
upnp_thread->interrupt();
upnp_thread->join();
delete upnp_thread;
}
upnp_thread = new boost::thread(boost::bind(&TraceThread<boost::function<void()> >, "upnp", &ThreadMapPort));
}
else if (upnp_thread) {
upnp_thread->interrupt();
upnp_thread->join();
delete upnp_thread;
upnp_thread = NULL;
}
}
#else
void MapPort(bool)
{
// Intentionally left blank.
}
#endif
// DNS seeds
// Each pair gives a source name and a seed name.
// The first name is used as information source for addrman.
// The second name should resolve to a list of seed addresses.
static const char *strMainNetDNSSeed[][2] = {
{"cryptolife.net","wallet.cryptolife.net"},
{"cryptolife.net","explore.cryptolife.net"},
{"cryptolife.net","seed1.cryptolife.net"},
{"cryptolife.net","seed2.cryptolife.net"},
{NULL, NULL}
};
static const char *strTestNetDNSSeed[][2] = {
{NULL, NULL}
};
void ThreadDNSAddressSeed()
{
static const char *(*strDNSSeed)[2] = fTestNet ? strTestNetDNSSeed : strMainNetDNSSeed;
int found = 0;
printf("Loading addresses from DNS seeds (could take a while)\n");
for (unsigned int seed_idx = 0; strDNSSeed[seed_idx][0] != NULL; seed_idx++) {
if (HaveNameProxy()) {
AddOneShot(strDNSSeed[seed_idx][1]);
} else {
vector<CNetAddr> vaddr;
vector<CAddress> vAdd;
if (LookupHost(strDNSSeed[seed_idx][1], vaddr))
{
BOOST_FOREACH(CNetAddr& ip, vaddr)
{
int nOneDay = 24*3600;
CAddress addr = CAddress(CService(ip, GetDefaultPort()));
addr.nTime = GetTime() - 3*nOneDay - GetRand(4*nOneDay); // use a random age between 3 and 7 days old
vAdd.push_back(addr);
found++;
}
}
addrman.Add(vAdd, CNetAddr(strDNSSeed[seed_idx][0], true));
}
}
printf("%d addresses found from DNS seeds\n", found);
}
unsigned int pnSeed[] =
{
0x119caa6b
// 0x92B9B572, 0xA2F3716E, 0x5F551D90
};
void DumpAddresses()
{
int64 nStart = GetTimeMillis();
CAddrDB adb;
adb.Write(addrman);
printf("Flushed %d addresses to peers.dat %"PRI64d"ms\n",
addrman.size(), GetTimeMillis() - nStart);
}
void static ProcessOneShot()
{
string strDest;
{
LOCK(cs_vOneShots);
if (vOneShots.empty())
return;
strDest = vOneShots.front();
vOneShots.pop_front();
}
CAddress addr;
CSemaphoreGrant grant(*semOutbound, true);
if (grant) {
if (!OpenNetworkConnection(addr, &grant, strDest.c_str(), true))
AddOneShot(strDest);
}
}
void ThreadOpenConnections()
{
// Connect to specific addresses
if (mapArgs.count("-connect") && mapMultiArgs["-connect"].size() > 0)
{
for (int64 nLoop = 0;; nLoop++)
{
ProcessOneShot();
BOOST_FOREACH(string strAddr, mapMultiArgs["-connect"])
{
CAddress addr;
OpenNetworkConnection(addr, NULL, strAddr.c_str());
for (int i = 0; i < 10 && i < nLoop; i++)
{
MilliSleep(500);
}
}
MilliSleep(500);
}
}
// Initiate network connections
int64 nStart = GetTime();
loop
{
ProcessOneShot();
MilliSleep(500);
CSemaphoreGrant grant(*semOutbound);
boost::this_thread::interruption_point();
// Add seed nodes if IRC isn't working
if (addrman.size()==0 && (GetTime() - nStart > 60) && !fTestNet)
{
std::vector<CAddress> vAdd;
for (unsigned int i = 0; i < ARRAYLEN(pnSeed); i++)
{
// It'll only connect to one or two seed nodes because once it connects,
// it'll get a pile of addresses with newer timestamps.
// Seed nodes are given a random 'last seen time' of between one and two
// weeks ago.
const int64 nOneWeek = 7*24*60*60;
struct in_addr ip;
memcpy(&ip, &pnSeed[i], sizeof(ip));
CAddress addr(CService(ip, GetDefaultPort()));
addr.nTime = GetTime()-GetRand(nOneWeek)-nOneWeek;
vAdd.push_back(addr);
}
addrman.Add(vAdd, CNetAddr("127.0.0.1"));
}
//
// Choose an address to connect to based on most recently seen
//
CAddress addrConnect;
// Only connect out to one peer per network group (/16 for IPv4).
// Do this here so we don't have to critsect vNodes inside mapAddresses critsect.
int nOutbound = 0;
set<vector<unsigned char> > setConnected;
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes) {
if (!pnode->fInbound) {
setConnected.insert(pnode->addr.GetGroup());
nOutbound++;
}
}
}
int64 nANow = GetAdjustedTime();
int nTries = 0;
loop
{
// use an nUnkBias between 10 (no outgoing connections) and 90 (8 outgoing connections)
CAddress addr = addrman.Select(10 + min(nOutbound,8)*10);
// if we selected an invalid address, restart
if (!addr.IsValid() || setConnected.count(addr.GetGroup()) || IsLocal(addr))
break;
// If we didn't find an appropriate destination after trying 100 addresses fetched from addrman,
// stop this loop, and let the outer loop run again (which sleeps, adds seed nodes, recalculates
// already-connected network ranges, ...) before trying new addrman addresses.
nTries++;
if (nTries > 100)
break;
if (IsLimited(addr))
continue;
// only consider very recently tried nodes after 30 failed attempts
if (nANow - addr.nLastTry < 600 && nTries < 30)
continue;
// do not allow non-default ports, unless after 50 invalid addresses selected already
if (addr.GetPort() != GetDefaultPort() && nTries < 50)
continue;
addrConnect = addr;
break;
}
if (addrConnect.IsValid())
OpenNetworkConnection(addrConnect, &grant);
}
}
void ThreadOpenAddedConnections()
{
{
LOCK(cs_vAddedNodes);
vAddedNodes = mapMultiArgs["-addnode"];
}
if (HaveNameProxy()) {
while(true) {
list<string> lAddresses(0);
{
LOCK(cs_vAddedNodes);
BOOST_FOREACH(string& strAddNode, vAddedNodes)
lAddresses.push_back(strAddNode);
}
BOOST_FOREACH(string& strAddNode, lAddresses) {
CAddress addr;
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(addr, &grant, strAddNode.c_str());
MilliSleep(500);
}
MilliSleep(120000); // Retry every 2 minutes
}
}
for (unsigned int i = 0; true; i++)
{
list<string> lAddresses(0);
{
LOCK(cs_vAddedNodes);
BOOST_FOREACH(string& strAddNode, vAddedNodes)
lAddresses.push_back(strAddNode);
}
list<vector<CService> > lservAddressesToAdd(0);
BOOST_FOREACH(string& strAddNode, lAddresses)
{
vector<CService> vservNode(0);
if(Lookup(strAddNode.c_str(), vservNode, GetDefaultPort(), fNameLookup, 0))
{
lservAddressesToAdd.push_back(vservNode);
{
LOCK(cs_setservAddNodeAddresses);
BOOST_FOREACH(CService& serv, vservNode)
setservAddNodeAddresses.insert(serv);
}
}
}
// Attempt to connect to each IP for each addnode entry until at least one is successful per addnode entry
// (keeping in mind that addnode entries can have many IPs if fNameLookup)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
for (list<vector<CService> >::iterator it = lservAddressesToAdd.begin(); it != lservAddressesToAdd.end(); it++)
BOOST_FOREACH(CService& addrNode, *(it))
if (pnode->addr == addrNode)
{
it = lservAddressesToAdd.erase(it);
it--;
break;
}
}
BOOST_FOREACH(vector<CService>& vserv, lservAddressesToAdd)
{
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(CAddress(vserv[i % vserv.size()]), &grant);
MilliSleep(500);
}
MilliSleep(120000); // Retry every 2 minutes
}
}
// if successful, this moves the passed grant to the constructed node
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound, const char *strDest, bool fOneShot)
{
//
// Initiate outbound network connection
//
boost::this_thread::interruption_point();
if (!strDest)
if (IsLocal(addrConnect) ||
FindNode((CNetAddr)addrConnect) || CNode::IsBanned(addrConnect) ||
FindNode(addrConnect.ToStringIPPort().c_str()))
return false;
if (strDest && FindNode(strDest))
return false;
CNode* pnode = ConnectNode(addrConnect, strDest);
boost::this_thread::interruption_point();
if (!pnode)
return false;
if (grantOutbound)
grantOutbound->MoveTo(pnode->grantOutbound);
pnode->fNetworkNode = true;
if (fOneShot)
pnode->fOneShot = true;
return true;
}
// for now, use a very simple selection metric: the node from which we received
// most recently
double static NodeSyncScore(const CNode *pnode) {
return -pnode->nLastRecv;
}
void static StartSync(const vector<CNode*> &vNodes) {
CNode *pnodeNewSync = NULL;
double dBestScore = 0;
// fImporting and fReindex are accessed out of cs_main here, but only
// as an optimization - they are checked again in SendMessages.
if (fImporting || fReindex)
return;
// Iterate over all nodes
BOOST_FOREACH(CNode* pnode, vNodes) {
// check preconditions for allowing a sync
if (!pnode->fClient && !pnode->fOneShot &&
!pnode->fDisconnect && pnode->fSuccessfullyConnected &&
(pnode->nStartingHeight > (nBestHeight - 144)) &&
(pnode->nVersion < NOBLKS_VERSION_START || pnode->nVersion >= NOBLKS_VERSION_END)) {
// if ok, compare node's score with the best so far
double dScore = NodeSyncScore(pnode);
if (pnodeNewSync == NULL || dScore > dBestScore) {
pnodeNewSync = pnode;
dBestScore = dScore;
}
}
}
// if a new sync candidate was found, start sync!
if (pnodeNewSync) {
pnodeNewSync->fStartSync = true;
pnodeSync = pnodeNewSync;
}
}
void ThreadMessageHandler()
{
SetThreadPriority(THREAD_PRIORITY_BELOW_NORMAL);
while (true)
{
bool fHaveSyncNode = false;
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy) {
pnode->AddRef();
if (pnode == pnodeSync)
fHaveSyncNode = true;
}
}
if (!fHaveSyncNode)
StartSync(vNodesCopy);
// Poll the connected nodes for messages
CNode* pnodeTrickle = NULL;
if (!vNodesCopy.empty())
pnodeTrickle = vNodesCopy[GetRand(vNodesCopy.size())];
bool fSleep = true;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (pnode->fDisconnect)
continue;
// Receive messages
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
if (!ProcessMessages(pnode))
pnode->CloseSocketDisconnect();
if (pnode->nSendSize < SendBufferSize())
{
if (!pnode->vRecvGetData.empty() || (!pnode->vRecvMsg.empty() && pnode->vRecvMsg[0].complete()))
{
fSleep = false;
}
}
}
}
boost::this_thread::interruption_point();
// Send messages
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
SendMessages(pnode, pnode == pnodeTrickle);
}
boost::this_thread::interruption_point();
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
if (fSleep)
MilliSleep(100);
}
}
bool BindListenPort(const CService &addrBind, string& strError)
{
strError = "";
int nOne = 1;
// Create socket for listening for incoming connections
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
if (!addrBind.GetSockAddr((struct sockaddr*)&sockaddr, &len))
{
strError = strprintf("Error: bind address family for %s not supported", addrBind.ToString().c_str());
printf("%s\n", strError.c_str());
return false;
}
SOCKET hListenSocket = socket(((struct sockaddr*)&sockaddr)->sa_family, SOCK_STREAM, IPPROTO_TCP);
if (hListenSocket == INVALID_SOCKET)
{
strError = strprintf("Error: Couldn't open socket for incoming connections (socket returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef SO_NOSIGPIPE
// Different way of disabling SIGPIPE on BSD
setsockopt(hListenSocket, SOL_SOCKET, SO_NOSIGPIPE, (void*)&nOne, sizeof(int));
#endif
#ifndef WIN32
// Allow binding if the port is still in TIME_WAIT state after
// the program was closed and restarted. Not an issue on windows.
setsockopt(hListenSocket, SOL_SOCKET, SO_REUSEADDR, (void*)&nOne, sizeof(int));
#endif
#ifdef WIN32
// Set to non-blocking, incoming connections will also inherit this
if (ioctlsocket(hListenSocket, FIONBIO, (u_long*)&nOne) == SOCKET_ERROR)
#else
if (fcntl(hListenSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
#endif
{
strError = strprintf("Error: Couldn't set properties on socket for incoming connections (error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef USE_IPV6
// some systems don't have IPV6_V6ONLY but are always v6only; others do have the option
// and enable it by default or not. Try to enable it, if possible.
if (addrBind.IsIPv6()) {
#ifdef IPV6_V6ONLY
#ifdef WIN32
setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (const char*)&nOne, sizeof(int));
#else
setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (void*)&nOne, sizeof(int));
#endif
#endif
#ifdef WIN32
int nProtLevel = 10 /* PROTECTION_LEVEL_UNRESTRICTED */;
int nParameterId = 23 /* IPV6_PROTECTION_LEVEl */;
// this call is allowed to fail
setsockopt(hListenSocket, IPPROTO_IPV6, nParameterId, (const char*)&nProtLevel, sizeof(int));
#endif
}
#endif
if (::bind(hListenSocket, (struct sockaddr*)&sockaddr, len) == SOCKET_ERROR)
{
int nErr = WSAGetLastError();
if (nErr == WSAEADDRINUSE)
strError = strprintf(_("Unable to bind to %s on this computer. CowCoin is probably already running."), addrBind.ToString().c_str());
else
strError = strprintf(_("Unable to bind to %s on this computer (bind returned error %d, %s)"), addrBind.ToString().c_str(), nErr, strerror(nErr));
printf("%s\n", strError.c_str());
return false;
}
printf("Bound to %s\n", addrBind.ToString().c_str());
// Listen for incoming connections
if (listen(hListenSocket, SOMAXCONN) == SOCKET_ERROR)
{
strError = strprintf("Error: Listening for incoming connections failed (listen returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
vhListenSocket.push_back(hListenSocket);
if (addrBind.IsRoutable() && fDiscover)
AddLocal(addrBind, LOCAL_BIND);
return true;
}
void static Discover()
{
if (!fDiscover)
return;
#ifdef WIN32
// Get local host IP
char pszHostName[1000] = "";
if (gethostname(pszHostName, sizeof(pszHostName)) != SOCKET_ERROR)
{
vector<CNetAddr> vaddr;
if (LookupHost(pszHostName, vaddr))
{
BOOST_FOREACH (const CNetAddr &addr, vaddr)
{
AddLocal(addr, LOCAL_IF);
}
}
}
#else
// Get local host ip
struct ifaddrs* myaddrs;
if (getifaddrs(&myaddrs) == 0)
{
for (struct ifaddrs* ifa = myaddrs; ifa != NULL; ifa = ifa->ifa_next)
{
if (ifa->ifa_addr == NULL) continue;
if ((ifa->ifa_flags & IFF_UP) == 0) continue;
if (strcmp(ifa->ifa_name, "lo") == 0) continue;
if (strcmp(ifa->ifa_name, "lo0") == 0) continue;
if (ifa->ifa_addr->sa_family == AF_INET)
{
struct sockaddr_in* s4 = (struct sockaddr_in*)(ifa->ifa_addr);
CNetAddr addr(s4->sin_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv4 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#ifdef USE_IPV6
else if (ifa->ifa_addr->sa_family == AF_INET6)
{
struct sockaddr_in6* s6 = (struct sockaddr_in6*)(ifa->ifa_addr);
CNetAddr addr(s6->sin6_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv6 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#endif
}
freeifaddrs(myaddrs);
}
#endif
// Don't use external IPv4 discovery, when -onlynet="IPv6"
if (!IsLimited(NET_IPV4))
NewThread(ThreadGetMyExternalIP, NULL);
}
void StartNode(boost::thread_group& threadGroup)
{
if (semOutbound == NULL) {
// initialize semaphore
int nMaxOutbound = min(MAX_OUTBOUND_CONNECTIONS, nMaxConnections);
semOutbound = new CSemaphore(nMaxOutbound);
}
if (pnodeLocalHost == NULL)
pnodeLocalHost = new CNode(INVALID_SOCKET, CAddress(CService("127.0.0.1", 0), nLocalServices));
Discover();
//
// Start threads
//
if (!GetBoolArg("-dnsseed", true))
printf("DNS seeding disabled\n");
else
threadGroup.create_thread(boost::bind(&TraceThread<boost::function<void()> >, "dnsseed", &ThreadDNSAddressSeed));
#ifdef USE_UPNP
// Map ports with UPnP
MapPort(GetBoolArg("-upnp", USE_UPNP));
#endif
// Get addresses from IRC and advertise ours
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "irc", &ThreadIRCSeed));
// Send and receive from sockets, accept connections
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "net", &ThreadSocketHandler));
// Initiate outbound connections from -addnode
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "addcon", &ThreadOpenAddedConnections));
// Initiate outbound connections
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "opencon", &ThreadOpenConnections));
// Process messages
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "msghand", &ThreadMessageHandler));
// Dump network addresses
threadGroup.create_thread(boost::bind(&LoopForever<void (*)()>, "dumpaddr", &DumpAddresses, DUMP_ADDRESSES_INTERVAL * 1000));
}
bool StopNode()
{
printf("StopNode()\n");
GenerateBitcoins(false, NULL);
MapPort(false);
nTransactionsUpdated++;
if (semOutbound)
for (int i=0; i<MAX_OUTBOUND_CONNECTIONS; i++)
semOutbound->post();
MilliSleep(50);
DumpAddresses();
return true;
}
class CNetCleanup
{
public:
CNetCleanup()
{
}
~CNetCleanup()
{
// Close sockets
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->hSocket != INVALID_SOCKET)
closesocket(pnode->hSocket);
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET)
if (closesocket(hListenSocket) == SOCKET_ERROR)
printf("closesocket(hListenSocket) failed with error %d\n", WSAGetLastError());
// clean up some globals (to help leak detection)
BOOST_FOREACH(CNode *pnode, vNodes)
delete pnode;
BOOST_FOREACH(CNode *pnode, vNodesDisconnected)
delete pnode;
vNodes.clear();
vNodesDisconnected.clear();
delete semOutbound;
semOutbound = NULL;
delete pnodeLocalHost;
pnodeLocalHost = NULL;
#ifdef WIN32
// Shutdown Windows Sockets
WSACleanup();
#endif
}
}
instance_of_cnetcleanup;
void RelayTransaction(const CTransaction& tx, const uint256& hash)
{
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss.reserve(10000);
ss << tx;
RelayTransaction(tx, hash, ss);
}
void RelayTransaction(const CTransaction& tx, const uint256& hash, const CDataStream& ss)
{
CInv inv(MSG_TX, hash);
{
LOCK(cs_mapRelay);
// Expire old relay messages
while (!vRelayExpiration.empty() && vRelayExpiration.front().first < GetTime())
{
mapRelay.erase(vRelayExpiration.front().second);
vRelayExpiration.pop_front();
}
// Save original serialized message so newer versions are preserved
mapRelay.insert(std::make_pair(inv, ss));
vRelayExpiration.push_back(std::make_pair(GetTime() + 15 * 60, inv));
}
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if(!pnode->fRelayTxes)
continue;
LOCK(pnode->cs_filter);
if (pnode->pfilter)
{
if (pnode->pfilter->IsRelevantAndUpdate(tx, hash))
pnode->PushInventory(inv);
} else
pnode->PushInventory(inv);
}
}<|fim▁end|> | {
printf("socket no message in first 60 seconds, %d %d\n", pnode->nLastRecv != 0, pnode->nLastSend != 0);
pnode->fDisconnect = true; |