code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package tests_dominio; import org.junit.Assert; import org.junit.Test; import dominio.Asesino; import dominio.Hechicero; import dominio.Humano; public class TestAsesino { @Test public void testRobar(){ } @Test public void testCritico(){ Humano h = new Humano("Nicolas",new Asesino(),1); Humano h2 = new Humano("Lautaro",new Hechicero(),2); Assert.assertEquals(105, h2.getSalud()); if (h.habilidadCasta1(h2)) Assert.assertTrue(93==h2.getSalud()); else Assert.assertEquals(105, h2.getSalud()); } @Test public void testProbEvasion(){ Humano h = new Humano("Nico",100, 100, 25, 20, 30, new Asesino(0.2, 0.3, 1.5), 0, 1, 1); Assert.assertTrue(0.3==h.getCasta().getProbabilidadEvitarDaรฑo()); h.habilidadCasta2(null); Assert.assertEquals(0.45, h.getCasta().getProbabilidadEvitarDaรฑo(), 0.01); h.habilidadCasta2(null); Assert.assertTrue(0.5==h.getCasta().getProbabilidadEvitarDaรฑo()); } }
programacion-avanzada/jrpg-2017a-dominio
src/test/java/tests_dominio/TestAsesino.java
Java
mit
934
#include <iostream> #include <seqan/graph_types.h> #include <seqan/graph_algorithms.h> #include <seqan/find_motif.h> using namespace seqan; int main () { typedef unsigned int TCargo; typedef Graph<Undirected<TCargo> > TGraph; typedef VertexDescriptor<TGraph>::Type TVertexDescriptor; TGraph g; TVertexDescriptor edges[]={1,0,0,4,2,1,4,1,5,1,6,2,3,2,2,3,7,3,5,4,6,5,5,6,7,6,7,7}; addEdges(g,edges,14); char letters[]={'a','b','c','d','e','f','g','h'}; String<char>map; assignVertexMap(g, map, letters); TVertexDescriptor start = 0; typedef Iterator<TGraph, DfsPreorder>::Type TDfsIterator; TDfsIterator dfsIt(g, start); std::cout << "Iterate from '" << getProperty(nameMap, start) << "' in depth-first-search ordering: "; while(!atEnd(dfsIt)) { std::cout << getProperty(nameMap, getValue(dfsIt)) << ", "; goNext(dfsIt); } std::cout << std::endl; ::std::cout << g << ::std::endl; return 0; }
bkahlert/seqan-research
raw/pmsb13/pmsb13-data-20130530/sources/fjt74l9mlcqisdus/2013-04-12T11-39-06.554+0200/sandbox/my_sandbox/apps/tutorial_20/tutorial_20.cpp
C++
mit
989
๏ปฟ var report_test_url = "reports\\BSV_GC_n_08_du_22_octobre_2013.pdf"; var report_dir = "files/"; var report_extension = ".pdf"; /***************************************************************************************************************************/ /* report_panel */ function report_panel(panel, report_panel){//, on_search_report) { report_panel.report_list = report_panel.find("#report_list"); report_panel.report_list_count = report_panel.find("#report_list_count"); report_panel.report_total_count = report_panel.find("#report_total_count"); report_panel.report_filter = report_panel.find("#report_filter"); report_panel.report_filter_years = report_panel.find("#report_filter_years"); report_panel.report_filter_areas = report_panel.find("#report_filter_areas"); report_panel.report_filter_reset = report_panel.find(".filter_reset"); report_panel.report_sorter_panel = report_panel.find("#report_sorter").hide(); report_panel.report_sorters = report_panel.report_sorter_panel.find(".report_sorter_item"); report_panel.current_sort = "date"; report_panel.report_text_filter = report_panel.find("#report_text_filter").hide(); report_panel.btn_filter_text = report_panel.find("#btn_filter_text"); report_panel.opened_report_ids = new Array(); // Init filter reset report_panel.report_filter.hide(); report_panel.find("#filter_reset_years").click(function () { jQuery("#report_filter_years div").removeClass("selected"); report_panel.selected_year = null; report_panel.filter_on_change(); }); report_panel.find("#filter_reset_areas").click(function () { jQuery("#report_filter_areas div").removeClass("selected"); report_panel.selected_area = null; report_panel.filter_on_change(); }); // Sorters report_panel.report_sorters.click(function () { report_panel.sort_changed(jQuery(this)); }); report_panel.cover_up = panel.get_waiting_cover_up(report_panel, 100); /* List management *********************************************************/ // Search succeeded report_panel.search_succeeded = function (response) { console.time("[Report list] Create DOM on new search"); report_panel.opened_report_ids = new Array(); report_panel.selected_year = null; report_panel.selected_area = null; report_panel.report_sorter_panel.show(); report_panel.report_text_filter.show(); report_panel.clear_list(); report_panel.reports = response.Reports; if (report_panel.current_sort != "date") report_panel.sort_reports_array(report_panel.current_sort); report_panel.set_counts(); report_panel.create_list(response); report_panel.create_filters(response); console.timeEnd("[Report list] Create DOM on new search"); report_panel.cover_up.doFadeOut(); } /* Report list DOM creation *********************************************************/ // Show report list report_panel.set_counts = function () { report_panel.report_list_count.text(report_panel.reports.length); report_panel.report_total_count.text(report_panel.reports.length); } // Show report list report_panel.create_list = function () { var html = ""; for (i = 0; i < report_panel.reports.length; i++) { html += report_panel.create_report_item(report_panel.reports[i],i); } report_panel.report_list.html(html); jQuery("#report_list a").click(function () { var report_item = jQuery(this).parent().parent(); report_panel.opened_report_ids.push(report_item.attr("id_report")); report_item.addClass("opened"); }); } // Create report item list report_panel.create_report_item = function (data, index) { var opened = jQuery.inArray("" + data.Id, report_panel.opened_report_ids) != -1; var report_item = "<div class='report_item" + ( (index % 2 == 1) ? " alt" : "") + ((opened) ? " opened" : "") + "' year='" + data.Year + "' id_area='" + data.Id_Area + "' id_report='" + data.Id + "' >" + "<div class='report_area'>" + "<div class='cube'></div>" + "<div class='report_area_name'>" + data.AreaName + "</div>" + "<div class='report_date'>" + data.DateString + "</div>" + "</div>" + "<div class='report_name'>" + "<a href='" + report_dir + data.Name + report_extension + "' target='_blank' title='" + data.Name + "'>" + data.Name + "</a>" + "<div class='report_pdf'></div>" + "</div>" + "</div>" return report_item; } // Clear list report_panel.clear_list = function () { report_panel.report_list.empty(); report_panel.report_filter_areas.empty(); report_panel.report_filter_years.empty(); report_panel.report_list_count.text("0"); report_panel.report_total_count.text("0"); } /* Filter Methods *********************************************************/ // Filters creation report_panel.create_filters = function (response) { var reports_by_year = d3.nest() .key(function (d) { return d.Year; }) .rollup(function (g) { return g.length; }) .entries(response.Reports); for (i = 0; i < response.Years.length; i++) { var year_item = jQuery("<div year='" + reports_by_year[i].key + "'></div>") .append("<span class='filter_year_item_text'>" + reports_by_year[i].key + "</span>") .append("<span class='filter_year_item_count'>(" + reports_by_year[i].values + ")</span>") .click(function () { jQuery("#report_filter_years div").removeClass("selected"); jQuery(this).addClass("selected"); report_panel.selected_year = jQuery(this).attr("year"); report_panel.filter_on_change(); }) .appendTo(report_panel.report_filter_years); } report_panel.report_filter.show(); } report_panel.filter_area = function (id_area) { report_panel.selected_area = id_area; report_panel.filter_on_change(); } // On filter selection report_panel.filter_on_change = function () { report_panel.report_list.find(".report_item").hide(); var class_to_show = ".report_item"; if (report_panel.selected_area != null) class_to_show += "[id_area='" + report_panel.selected_area + "']"; if (report_panel.selected_year != null) class_to_show += "[year='" + report_panel.selected_year + "']"; var to_show = report_panel.report_list.find(class_to_show); to_show.show(); report_panel.report_list_count.text(to_show.length); } /* Sort Methods *********************************************************/ // on Sort report_panel.sort_changed = function (sorter) { report_panel.report_sorters.removeClass("selected"); sorter.addClass("selected") var previous_sort = report_panel.current_sort; report_panel.current_sort = sorter.attr("sort"); if (previous_sort == report_panel.current_sort) { if (report_panel.current_sort.indexOf("_desc") != -1) { report_panel.current_sort = report_panel.current_sort.replace("_desc", ""); } else { report_panel.current_sort = report_panel.current_sort + "_desc"; } } report_panel.cover_up.fadeIn(duration_fade_short, function () { report_panel.sort_list(report_panel.current_sort); report_panel.cover_up.fadeOut(duration_fade_short); }); } // Sort list report_panel.sort_list = function (sort_type) { report_panel.report_list.empty(); report_panel.sort_reports_array(report_panel.current_sort); report_panel.create_list(); report_panel.filter_on_change(); } // Data sorting function report_panel.sort_reports_array = function (sort_type) { var sort_func = null; if (sort_type == "name") { sort_func = report_panel.sort_name; } else if (sort_type == "name_desc") { sort_func = report_panel.sort_name_desc; } else if (sort_type == "area_name") { sort_func = report_panel.sort_area_name; } else if (sort_type == "area_name_desc") { sort_func = report_panel.sort_area_name_desc; } else if (sort_type == "date") { sort_func = report_panel.sort_date; } else if (sort_type == "date_desc") { sort_func = report_panel.sort_date_desc; } report_panel.reports.sort(sort_func); } // Date sort delegate report_panel.sort_date = function (e_1, e_2) { var a1 = parseInt(e_1.Date.substr(6)), b1 = parseInt(e_2.Date.substr(6)); if (a1 == b1) return 0; return a1 > b1 ? 1 : -1; } // Arean name sort delegate report_panel.sort_area_name = function (e_1, e_2) { var a1 = e_1.AreaName, b1 = e_2.AreaName; if (a1 == b1) return 0; return a1 > b1 ? 1 : -1; } // file name sort delegate report_panel.sort_name = function (e_1, e_2) { var a1 = e_1.Name.toLowerCase(), b1 = e_2.Name.toLowerCase(); if (a1 == b1) return 0; return a1 > b1 ? 1 : -1; } // Date sort delegate report_panel.sort_date_desc = function (e_1, e_2) { var a1 = parseInt(e_1.Date.substr(6)), b1 = parseInt(e_2.Date.substr(6)); if (a1 == b1) return 0; return a1 < b1 ? 1 : -1; } // Arean name sort delegate report_panel.sort_area_name_desc = function (e_1, e_2) { var a1 = e_1.AreaName, b1 = e_2.AreaName; if (a1 == b1) return 0; return a1 < b1 ? 1 : -1; } // file name sort delegate report_panel.sort_name_desc = function (e_1, e_2) { var a1 = e_1.Name.toLowerCase(), b1 = e_2.Name.toLowerCase(); if (a1 == b1) return 0; return a1 < b1 ? 1 : -1; } report_panel.open_report = function (id_report) { var report_item_anchor = report_panel.find("#report_list .report_item[id_report='" + id_report + "'] a"); report_item_anchor.click(); window.open(report_item_anchor.attr("href"), "_blank"); } return report_panel; }
win-stub/PestObserver
web/scripts/report_panel.js
JavaScript
mit
11,254
// LICENSE package com.forgedui.editor.edit; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.List; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.gef.commands.Command; import org.eclipse.gef.requests.CreateRequest; import com.forgedui.editor.GUIEditorPlugin; import com.forgedui.editor.edit.command.AddToTableViewElementCommand; import com.forgedui.editor.edit.policy.ContainerEditPolicy; import com.forgedui.editor.figures.TableViewFigure; import com.forgedui.model.titanium.SearchBar; import com.forgedui.model.titanium.TableView; import com.forgedui.model.titanium.TableViewRow; import com.forgedui.model.titanium.TableViewSection; import com.forgedui.model.titanium.TitaniumUIBoundedElement; import com.forgedui.model.titanium.TitaniumUIElement; /** * @author Dmitry {dmitry.grimm@gmail.com} * */ public class TableViewEditPart extends TitaniumContainerEditPart<TableView> { @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public List<?> getModelChildren_() { List list = new ArrayList(super.getModelChildren_()); TableView model = (TableView)getModel(); if (model.getHeaderView() != null){ list.add(model.getHeaderView()); } if (model.getFooterView() != null){ list.add(model.getFooterView()); } if ((model.getSearchHidden() == null || !model.getSearchHidden()) && model.getSearch() != null){ list.add(model.getSearch()); } return list; } /** * Making sure to refresh things visual. */ @Override public void propertyChange(PropertyChangeEvent evt) { final String propName = evt.getPropertyName(); if (TableView.PROP_HEADER_VIEW.equals(propName) || TableView.PROP_FOOTER_VIEW.equals(propName) || TableView.PROP_SEARCH_VIEW.equals(propName) || TableView.PROP_SEARCH_VIEW_HIDDEN.equals(propName) || TableView.PROP_MIN_ROW_HEIGHT.equals(propName) || TableView.PROP_MAX_ROW_HEIGHT.equals(propName) ) { refresh(); } else { super.propertyChange(evt); } } @Override protected void createEditPolicies() { super.createEditPolicies(); installEditPolicy(ContainerEditPolicy.KEY, new TableViewEditPolicy()); } @Override protected void refreshVisuals() { TableView model = (TableView)getModel(); TableViewFigure figure = (TableViewFigure)getFigure(); figure.setHeaderTitle(model.getHeaderTitle()); figure.setFooterTitle(model.getFooterTitle()); figure.setHasHeaderView(model.getHeaderView() != null); figure.setHasFooterView(model.getFooterView() != null); super.refreshVisuals(); } } class TableViewEditPolicy extends ContainerEditPolicy { protected Command getCreateCommand(CreateRequest request) { // And then passed those to the validate facility. Object newObject = request.getNewObject(); Object container = getHost().getModel(); if (!GUIEditorPlugin.getComponentValidator().validate(newObject, container)) return null; if (!(newObject instanceof TableViewRow) && !(newObject instanceof TableViewSection) && newObject instanceof TitaniumUIElement){ Rectangle r = (Rectangle)getConstraintFor(request); if (r != null){ TitaniumUIBoundedElement child = (TitaniumUIBoundedElement) newObject; if (container instanceof TableView){ TableView view = (TableView) getHost().getModel(); if (child instanceof SearchBar && view.getSearch() == null){ return new AddToTableViewElementCommand(view, child, r, true); } else if (GUIEditorPlugin.getComponentValidator().isView(child)){ if (r.y <= view.getDimension().height / 2){ if (view.getHeaderView() == null){ return new AddToTableViewElementCommand(view, child, r, true); } } else if (view.getFooterView() == null){ return new AddToTableViewElementCommand(view, child, r, false); } } return null;//Can't drop } } } return super.getCreateCommand(request); } /*@Override protected Object getConstraintFor(CreateRequest request) { Rectangle r = (Rectangle) super.getConstraintFor(request); r.x = 0; return r; }*/ }
ShoukriKattan/ForgedUI-Eclipse
com.forgedui.editor/src/com/forgedui/editor/edit/TableViewEditPart.java
Java
mit
4,096
๏ปฟ/* Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.plugins.setLang( 'about', 'pt-br', { copy: 'Copyright &copy; $1. Todos os direitos reservados.', dlgTitle: 'Sobre o CKEditor 4', moreInfo: 'Para informaรงรตes sobre a licenรงa por favor visite o nosso site:' } );
otto-torino/gino
ckeditor/plugins/about/lang/pt-br.js
JavaScript
mit
400
package com.daviancorp.android.data.database; import android.database.Cursor; import android.database.CursorWrapper; import com.daviancorp.android.data.classes.Location; /** * A convenience class to wrap a cursor that returns rows from the "locations" * table. The {@link getLocation()} method will give you a Location instance * representing the current row. */ public class LocationCursor extends CursorWrapper { public LocationCursor(Cursor c) { super(c); } /** * Returns a Location object configured for the current row, or null if the * current row is invalid. */ public Location getLocation() { if (isBeforeFirst() || isAfterLast()) return null; Location location = new Location(); long locationId = getLong(getColumnIndex(S.COLUMN_LOCATIONS_ID)); String name = getString(getColumnIndex(S.COLUMN_LOCATIONS_NAME)); String fileLocation = getString(getColumnIndex(S.COLUMN_LOCATIONS_MAP)); location.setId(locationId); location.setName(name); location.setFileLocation(fileLocation); return location; } }
dbooga/MonsterHunter3UDatabase
MonsterHunter3UDatabase/src/com/daviancorp/android/data/database/LocationCursor.java
Java
mit
1,054
<div class="col-md-2"> </div> <div class="col-md-8" style="margin-top: 2%;"> <?php if(isset($editar)){ echo form_open('Pacientes/savePaciente'); }else{ echo form_open('Pacientes/addPaciente'); } ?> <div class="panel panel-info"> <div class="panel-heading"> <center> <li class="fa fa-user-plus"></li> <?php if(isset($editar)){ echo "Editar ".$editar->Nombre; }else{ echo "Agregar"; } ?> </center> </div> <div class="panel-body"> <div class="row" style="margin-top: 1%"> <div class="col-md-8"> <div class="form-group"> <?php if(isset($editar)){ echo "<input name='id' type='hidden' value='".$editar->idPacientes."' >"; } ?> <label><li class="fa fa-edit"></li> Nombre</label> <input autofocus type="text" name="nombre" class="form-control" placeholder="Nombre Completo" required value="<?php if(isset($editar)){ echo $editar->Nombre;}?>" > </div> </div> <div class="col-md-4"> <div class="from-group"> <label><li class="fa fa-calendar"></li> Fecha</label> <input type="date" name="fecha" class="form-control" value="<?php if(isset($editar)){ echo $editar->Registro;}else{ echo date("Y-m-d");} ?>" required> </div> </div> </div> <div class="row" style="margin-top: 1%;"> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-home"></li> Domicilio</label> <input class="form-control" type="text" name="domicilio" required placeholder="Calle y Numero" value="<?php if(isset($editar)){ echo $editar->Domicilio;}?>"> </div> </div> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-check"></li> Ocupacion</label> <input class="form-control" type="text" name="ocupacion" required placeholder="Ocupacion..." value="<?php if(isset($editar)){ echo $editar->Ocupacion;}?>"> </div> </div> </div> <div class="row" style="margin-top: 1%;"> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-phone"></li> Telefono</label> <input class="form-control" type="tel" name="telefono" required placeholder="Telefono" value="<?php if(isset($editar)){ echo $editar->Tel;}?>"> </div> </div> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-question"></li> Referencias</label> <input class="form-control" type="text" name="referencia" placeholder="Referencias..." value="<?php if(isset($editar)){ echo $editar->Referencia;}?>"> </div> </div> </div> <div class="row" style="margin-top: 1%;"> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-venus-mars"></li> Sexo</label> <select class="form-control" name="sexo" required> <option></option> <option value="0" <?php if(isset($editar) && $editar->Sexo == 0){ echo 'selected';}?> >Hombre</option> <option value="1" <?php if(isset($editar) && $editar->Sexo == 1){ echo 'selected';}?> >Mujer</option> </select> </div> </div> <div class="col-md-6"> <div class="form-group"> <label class="control-label"><li class="fa fa-clock-o"></li> Edad</label> <input class="form-control" type="number" name="edad" placeholder="Edad" required min="1" value="<?php if(isset($editar)){ echo $editar->Edad;}?>"> </div> </div> </div> <div class="col-md-4"> </div> <div class="col-md-4"> <button class="btn btn-success btn-block" type="submit"><li class="fa fa-check-circle"></li> <?php if(isset($editar)){ echo "Guardar";}else{ echo "Agregar";}?></button> </div> <div class="col-md-4"> </div> </div> </div> </form> </div> </div> <div class="col-md-2"> </div>
nykteus/Consultorio
application/views/Contenido/frmAddPaciente.php
PHP
mit
4,871
var bind = require('bind'); var debug = require('debug')('uj:app'); var Entity = require('./entity'); var inherit = require('inherit'); /** * Initialize a new `App` with `options`. * * @param {Object} options */ function App (options) { this.defaults = {} this.debug = debug; Entity.call(this, options); } /** * Inherit `Entity` */ inherit(App, Entity); /** * Load saved app `id` or `traits` from storage. */ App.prototype.load = function () { Entity.prototype.load.call(this); }; /** * Expose the app singleton. */ module.exports = bind.all(new App()); /** * Expose the `App` constructor. */ module.exports.App = App;
prateekbhatt/userjoy
apps/cdn/lib/app.js
JavaScript
mit
655
//================================================================ // RS_ChangeWindowTextColorSafely.js // --------------------------------------------------------------- // The MIT License // Copyright (c) 2017 biud436 // --------------------------------------------------------------- // Free for commercial and non commercial use. //================================================================ /*:ko * @target MV * @plugindesc ํŠน์ • ์ฐฝ์˜ ํ…์ŠคํŠธ ์ƒ‰์ƒ์„ ์›ํ•˜๋Š” ์ƒ‰์ƒ์œผ๋กœ ๋ณ€๊ฒฝํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค <RS_ChangeWindowTextColorSafely> * @author biud436 * * @param windowList * @text ์‚ฌ์šฉ์ž ์ •์˜ ์ƒ‰์ƒ * @type note * @desc ๋„์›€๋ง์„ ์ฐธ๊ณ ํ•˜์„ธ์š”! * @default "" * * @help * ============================================================================= * ์‚ฌ์šฉ ๋ฐฉ๋ฒ• * ============================================================================= * ๊ฐ ์ฐฝ์— ์„œ๋กœ ๋‹ค๋ฅธ ํ…์ŠคํŠธ ์ƒ‰์ƒ์„ ์ ์šฉํ•˜๋ ค๋ฉด, * ์‚ฌ์šฉ์ž ์ •์˜ ์ƒ‰์ƒ ๋งค๊ฐœ๋ณ€์ˆ˜์— ๋‹ค์Œ ๋…ธํŠธ ํƒœ๊ทธ๋ฅผ ์ž…๋ ฅํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. * * <Window_ItemList normalColor #ff0000> * <Window_SkillList normalColor #ffff00> * <Window_SkillList crisisColor #ff0000> * * ๋…ธํŠธ ํƒœ๊ทธ๋Š” ํด๋ž˜์Šค ์ด๋ฆ„๊ณผ ํ•ด๋‹น ํด๋ž˜์Šค์˜ ๋ฉ”์†Œ๋“œ ์ด๋ฆ„ ๊ทธ๋ฆฌ๊ณ  ์ƒ‰์ƒ ๊ฐ’์„ ์ œ๊ณตํ•ด์•ผ ํ•˜๋ฏ€๋กœ, * ์ •ํ™•ํžˆ ์ž…๋ ฅํ•˜์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค. * * ์ •๋ง ๋งŽ์€ ๋ฉ”์†Œ๋“œ๋ฅผ ๋ฐ”๊ฟ€ ์ˆ˜ ์žˆ์ง€๋งŒ ๋ชจ๋‘ ํ‘œ๊ธฐํ•˜์ง„ ์•Š์•˜์Šต๋‹ˆ๋‹ค. * * ๋ฐ”๋€ ์ƒ‰์ƒ์€ ๊ฒŒ์ž„ ๋‚ด์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. * * ============================================================================= * ๋ณ€๊ฒฝ ๊ธฐ๋ก * ============================================================================= * 2017.12.21 (v1.0.0) - First Release. */ /*: * @target MV * @plugindesc This plugin allows you to change the text color for window as you desired. <RS_ChangeWindowTextColorSafely> * @author biud436 * * @param windowList * @text Window List * @type note * @desc Refer to a help documentation * @default "" * * @help * * We're going to define each window a different special color. To quickly define, * We must use to define a notetag in the plugin parameter called 'Window List' * * <Window_ItemList normalColor #ff0000> * <Window_SkillList normalColor #ffff00> * <Window_SkillList crisisColor #ff0000> * * Note tags provide the information likes as a class name and method name, * color value for window. You can see how the text color for window that is * changed in the game. * * ============================================================================= * Change Log * ============================================================================= * 2017.12.21 (v1.0.0) - First Release. */ var Imported = Imported || {}; Imported.RS_ChangeWindowTextColorSafely = true; var RS = RS || {}; RS.Utils = RS.Utils || {}; (() => { let parameters = $plugins.filter(function (i) { return i.description.contains("<RS_ChangeWindowTextColorSafely>"); }); parameters = parameters.length > 0 && parameters[0].parameters; RS.Utils.jsonParse = function (str) { const retData = JSON.parse(str, function (k, v) { try { return RS.Utils.jsonParse(v); } catch (e) { return v; } }); return retData; }; const defaultWindowClasses = RS.Utils.jsonParse(parameters["windowList"]); Utils.changeWindowTextColorSafely = function (NOTETAGS) { let clsName = ""; let funcName = ""; let color = ""; let done = false; const notetags = NOTETAGS.split(/[\r\n]+/); notetags.forEach((note) => { if (note.match(/<(.*)[ ](.*)[ ](.*)>/)) { clsName = String(RegExp.$1); funcName = String(RegExp.$2); color = String(RegExp.$3); done = true; } if (done) { const CLASS_NAME = window[clsName]; const FUNC_NAME = funcName.slice(0); const COLOR_NAME = color.slice(0); if (typeof CLASS_NAME === "function") { const prototypeName = CLASS_NAME.prototype[FUNC_NAME]; if (typeof prototypeName === "function") { CLASS_NAME.prototype[funcName] = function () { return COLOR_NAME; }; } } } }); }; Utils.changeWindowTextColorSafely(defaultWindowClasses); })();
biud436/MV
RS_ChangeWindowTextColorSafely.js
JavaScript
mit
4,574
// ----------------------------------------------------------- // // This file was generated, please do not modify. // // ----------------------------------------------------------- namespace EmptyKeys.UserInterface.Generated { using System; using System.CodeDom.Compiler; using System.Collections.ObjectModel; using EmptyKeys.UserInterface; using EmptyKeys.UserInterface.Charts; using EmptyKeys.UserInterface.Data; using EmptyKeys.UserInterface.Controls; using EmptyKeys.UserInterface.Controls.Primitives; using EmptyKeys.UserInterface.Input; using EmptyKeys.UserInterface.Interactions.Core; using EmptyKeys.UserInterface.Interactivity; using EmptyKeys.UserInterface.Media; using EmptyKeys.UserInterface.Media.Effects; using EmptyKeys.UserInterface.Media.Animation; using EmptyKeys.UserInterface.Media.Imaging; using EmptyKeys.UserInterface.Shapes; using EmptyKeys.UserInterface.Renderers; using EmptyKeys.UserInterface.Themes; [GeneratedCodeAttribute("Empty Keys UI Generator", "3.1.0.0")] public sealed class Dictionary : ResourceDictionary { private static Dictionary singleton = new Dictionary(); public Dictionary() { this.InitializeResources(); } public static Dictionary Instance { get { return singleton; } } private void InitializeResources() { // Resource - [buttonAnimStyle] Style var r_0_s_bo = this[typeof(Button)]; Style r_0_s = new Style(typeof(Button), r_0_s_bo as Style); Setter r_0_s_S_0 = new Setter(Button.WidthProperty, 200F); r_0_s.Setters.Add(r_0_s_S_0); Setter r_0_s_S_1 = new Setter(Button.MarginProperty, new Thickness(0F, 1F, 0F, 1F)); r_0_s.Setters.Add(r_0_s_S_1); Setter r_0_s_S_2 = new Setter(Button.SnapsToDevicePixelsProperty, false); r_0_s.Setters.Add(r_0_s_S_2); EventTrigger r_0_s_ET_0 = new EventTrigger(Button.MouseEnterEvent); r_0_s.Triggers.Add(r_0_s_ET_0); BeginStoryboard r_0_s_ET_0_AC_0 = new BeginStoryboard(); r_0_s_ET_0_AC_0.Name = "r_0_s_ET_0_AC_0"; r_0_s_ET_0.AddAction(r_0_s_ET_0_AC_0); Storyboard r_0_s_ET_0_AC_0_SB = new Storyboard(); r_0_s_ET_0_AC_0.Storyboard = r_0_s_ET_0_AC_0_SB; r_0_s_ET_0_AC_0_SB.Name = "r_0_s_ET_0_AC_0_SB"; ThicknessAnimation r_0_s_ET_0_AC_0_SB_TL_0 = new ThicknessAnimation(); r_0_s_ET_0_AC_0_SB_TL_0.Name = "r_0_s_ET_0_AC_0_SB_TL_0"; r_0_s_ET_0_AC_0_SB_TL_0.Duration = new Duration(new TimeSpan(0, 0, 0, 0, 500)); r_0_s_ET_0_AC_0_SB_TL_0.From = new Thickness(0F, 1F, 0F, 1F); r_0_s_ET_0_AC_0_SB_TL_0.To = new Thickness(0F, 5F, 0F, 5F); SineEase r_0_s_ET_0_AC_0_SB_TL_0_EA = new SineEase(); r_0_s_ET_0_AC_0_SB_TL_0.EasingFunction = r_0_s_ET_0_AC_0_SB_TL_0_EA; Storyboard.SetTargetProperty(r_0_s_ET_0_AC_0_SB_TL_0, Button.MarginProperty); r_0_s_ET_0_AC_0_SB.Children.Add(r_0_s_ET_0_AC_0_SB_TL_0); FloatAnimation r_0_s_ET_0_AC_0_SB_TL_1 = new FloatAnimation(); r_0_s_ET_0_AC_0_SB_TL_1.Name = "r_0_s_ET_0_AC_0_SB_TL_1"; r_0_s_ET_0_AC_0_SB_TL_1.Duration = new Duration(new TimeSpan(0, 0, 0, 0, 500)); r_0_s_ET_0_AC_0_SB_TL_1.To = 220F; SineEase r_0_s_ET_0_AC_0_SB_TL_1_EA = new SineEase(); r_0_s_ET_0_AC_0_SB_TL_1.EasingFunction = r_0_s_ET_0_AC_0_SB_TL_1_EA; Storyboard.SetTargetProperty(r_0_s_ET_0_AC_0_SB_TL_1, Button.WidthProperty); r_0_s_ET_0_AC_0_SB.Children.Add(r_0_s_ET_0_AC_0_SB_TL_1); EventTrigger r_0_s_ET_1 = new EventTrigger(Button.MouseLeaveEvent); r_0_s.Triggers.Add(r_0_s_ET_1); BeginStoryboard r_0_s_ET_1_AC_0 = new BeginStoryboard(); r_0_s_ET_1_AC_0.Name = "r_0_s_ET_1_AC_0"; r_0_s_ET_1.AddAction(r_0_s_ET_1_AC_0); Storyboard r_0_s_ET_1_AC_0_SB = new Storyboard(); r_0_s_ET_1_AC_0.Storyboard = r_0_s_ET_1_AC_0_SB; r_0_s_ET_1_AC_0_SB.Name = "r_0_s_ET_1_AC_0_SB"; ThicknessAnimation r_0_s_ET_1_AC_0_SB_TL_0 = new ThicknessAnimation(); r_0_s_ET_1_AC_0_SB_TL_0.Name = "r_0_s_ET_1_AC_0_SB_TL_0"; r_0_s_ET_1_AC_0_SB_TL_0.Duration = new Duration(new TimeSpan(0, 0, 0, 0, 500)); r_0_s_ET_1_AC_0_SB_TL_0.From = new Thickness(0F, 5F, 0F, 5F); r_0_s_ET_1_AC_0_SB_TL_0.To = new Thickness(0F, 1F, 0F, 1F); SineEase r_0_s_ET_1_AC_0_SB_TL_0_EA = new SineEase(); r_0_s_ET_1_AC_0_SB_TL_0.EasingFunction = r_0_s_ET_1_AC_0_SB_TL_0_EA; Storyboard.SetTargetProperty(r_0_s_ET_1_AC_0_SB_TL_0, Button.MarginProperty); r_0_s_ET_1_AC_0_SB.Children.Add(r_0_s_ET_1_AC_0_SB_TL_0); FloatAnimation r_0_s_ET_1_AC_0_SB_TL_1 = new FloatAnimation(); r_0_s_ET_1_AC_0_SB_TL_1.Name = "r_0_s_ET_1_AC_0_SB_TL_1"; r_0_s_ET_1_AC_0_SB_TL_1.Duration = new Duration(new TimeSpan(0, 0, 0, 0, 500)); r_0_s_ET_1_AC_0_SB_TL_1.To = 200F; SineEase r_0_s_ET_1_AC_0_SB_TL_1_EA = new SineEase(); r_0_s_ET_1_AC_0_SB_TL_1.EasingFunction = r_0_s_ET_1_AC_0_SB_TL_1_EA; Storyboard.SetTargetProperty(r_0_s_ET_1_AC_0_SB_TL_1, Button.WidthProperty); r_0_s_ET_1_AC_0_SB.Children.Add(r_0_s_ET_1_AC_0_SB_TL_1); this.Add("buttonAnimStyle", r_0_s); // Resource - [buttonStyle] Style var r_1_s_bo = this[typeof(Button)]; Style r_1_s = new Style(typeof(Button), r_1_s_bo as Style); Setter r_1_s_S_0 = new Setter(Button.BackgroundProperty, new SolidColorBrush(new ColorW(255, 140, 0, 255))); r_1_s.Setters.Add(r_1_s_S_0); Setter r_1_s_S_1 = new Setter(Button.WidthProperty, 200F); r_1_s.Setters.Add(r_1_s_S_1); Setter r_1_s_S_2 = new Setter(Button.PaddingProperty, new Thickness(2F)); r_1_s.Setters.Add(r_1_s_S_2); this.Add("buttonStyle", r_1_s); // Resource - [logoEmptyKeys] BitmapImage BitmapImage r_2_bm = new BitmapImage(); r_2_bm.TextureAsset = "Images/EmptyKeysLogoTextSmall"; this.Add("logoEmptyKeys", r_2_bm); // Resource - [MessageBoxButtonYes] String this.Add("MessageBoxButtonYes", "Yes!"); // Resource - [Sounds] SoundSourceCollection var r_4_sounds = new SoundSourceCollection(); SoundManager.Instance.AddSound("Click"); r_4_sounds.Add(new SoundSource { SoundType = SoundType.ButtonsClick, SoundAsset = "Click", Volume = 1f }); SoundManager.Instance.AddSound("KeyPress"); r_4_sounds.Add(new SoundSource { SoundType = SoundType.TextBoxKeyPress, SoundAsset = "KeyPress", Volume = 1f }); SoundManager.Instance.AddSound("Move"); r_4_sounds.Add(new SoundSource { SoundType = SoundType.TabControlMove, SoundAsset = "Move", Volume = 1f }); SoundManager.Instance.AddSound("Select"); r_4_sounds.Add(new SoundSource { SoundType = SoundType.TabControlSelect, SoundAsset = "Select", Volume = 1f }); this.Add("Sounds", r_4_sounds); // Resource - [TitleResource] String this.Add("TitleResource", "Basic UI Example"); // Resource - [ToolTipText] String this.Add("ToolTipText", "Click to open message box"); ImageManager.Instance.AddImage("Images/EmptyKeysLogoTextSmall"); } } }
EmptyKeys/UI_Examples
BasicUI_Xenko/BasicUI_Xenko/BasicUI_Xenko.UserInterface/GeneratedUI/Dictionary.xaml.cs
C#
mit
7,744
/* __ _____ _____ _____ __| | __| | | | JSON for Modern C++ (fuzz test support) | | |__ | | | | | | version 2.1.0 |_____|_____|_____|_|___| https://github.com/nlohmann/json This file implements a parser test suitable for fuzz testing. Given a byte array data, it performs the following steps: - j1 = parse(data) - s1 = serialize(j1) - j2 = parse(s1) - s2 = serialize(j2) - assert(s1 == s2) The provided function `LLVMFuzzerTestOneInput` can be used in different fuzzer drivers. Licensed under the MIT License <http://opensource.org/licenses/MIT>. */ #include <iostream> #include <sstream> #include <json.hpp> using json = nlohmann::json; // see http://llvm.org/docs/LibFuzzer.html extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { try { // step 1: parse input json j1 = json::parse(data, data + size); try { // step 2: round trip // first serialization std::string s1 = j1.dump(); // parse serialization json j2 = json::parse(s1); // second serialization std::string s2 = j2.dump(); // serializations must match assert(s1 == s2); } catch (const std::invalid_argument&) { // parsing a JSON serialization must not fail assert(false); } } catch (const std::invalid_argument&) { // parse errors are ok, because input may be random bytes } // return 0 - non-zero return values are reserved for future use return 0; }
stanmihai4/json
test/src/fuzzer-parse_json.cpp
C++
mit
1,608
'use strict'; var defaultEnvConfig = require('./default'); module.exports = { db: { uri: process.env.MONGOHQ_URL || process.env.MONGODB_URI || 'mongodb://' + (process.env.DB_1_PORT_27017_TCP_ADDR || 'localhost') + '/flipflop-test', options: { user: '', pass: '' }, // Enable mongoose debug mode debug: process.env.MONGODB_DEBUG || false }, log: { // logging with Morgan - https://github.com/expressjs/morgan // Can specify one of 'combined', 'common', 'dev', 'short', 'tiny' // format: 'dev' // fileLogger: { // directoryPath: process.cwd(), // fileName: 'app.log', // maxsize: 10485760, // maxFiles: 2, // json: false // } }, port: process.env.PORT || 3001, app: { title: defaultEnvConfig.app.title + ' - Test Environment' }, uploads: { profile: { image: { dest: './modules/users/client/img/profile/uploads/', limits: { fileSize: 100000 // Limit filesize (100kb) for testing purposes } } } }, facebook: { clientID: process.env.FACEBOOK_ID || 'APP_ID', clientSecret: process.env.FACEBOOK_SECRET || 'APP_SECRET', callbackURL: '/api/auth/facebook/callback' }, twitter: { username: '@TWITTER_USERNAME', clientID: process.env.TWITTER_KEY || 'CONSUMER_KEY', clientSecret: process.env.TWITTER_SECRET || 'CONSUMER_SECRET', callbackURL: '/api/auth/twitter/callback' }, google: { clientID: process.env.GOOGLE_ID || 'APP_ID', clientSecret: process.env.GOOGLE_SECRET || 'APP_SECRET', callbackURL: '/api/auth/google/callback' }, linkedin: { clientID: process.env.LINKEDIN_ID || 'APP_ID', clientSecret: process.env.LINKEDIN_SECRET || 'APP_SECRET', callbackURL: '/api/auth/linkedin/callback' }, github: { clientID: process.env.GITHUB_ID || 'APP_ID', clientSecret: process.env.GITHUB_SECRET || 'APP_SECRET', callbackURL: '/api/auth/github/callback' }, paypal: { clientID: process.env.PAYPAL_ID || 'CLIENT_ID', clientSecret: process.env.PAYPAL_SECRET || 'CLIENT_SECRET', callbackURL: '/api/auth/paypal/callback', sandbox: true }, mailer: { from: process.env.MAILER_FROM || 'MAILER_FROM', options: { service: process.env.MAILER_SERVICE_PROVIDER || 'MAILER_SERVICE_PROVIDER', auth: { user: process.env.MAILER_EMAIL_ID || 'MAILER_EMAIL_ID', pass: process.env.MAILER_PASSWORD || 'MAILER_PASSWORD' } } }, seedDB: { seed: process.env.MONGO_SEED === 'true', options: { logResults: process.env.MONGO_SEED_LOG_RESULTS !== 'false', seedUser: { username: process.env.MONGO_SEED_USER_USERNAME || 'seeduser', provider: 'local', email: process.env.MONGO_SEED_USER_EMAIL || 'user@localhost.com', firstName: 'User', lastName: 'Local', displayName: 'User Local', roles: ['user'] }, seedAdmin: { username: process.env.MONGO_SEED_ADMIN_USERNAME || 'seedadmin', provider: 'local', email: process.env.MONGO_SEED_ADMIN_EMAIL || 'admin@localhost.com', firstName: 'Admin', lastName: 'Local', displayName: 'Admin Local', roles: ['user', 'admin'] } } } };
tonymullen/flipflop
config/env/test.js
JavaScript
mit
3,286
package bp.details; import javax.swing.JLabel; import javax.swing.JSpinner; import javax.swing.SpinnerModel; import javax.swing.SpinnerNumberModel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import bp.model.data.Gateway; import bp.model.util.BPKeyWords; import bp.model.util.Controller; public class GatewayDetails extends ElementDetails { /** * */ private static final long serialVersionUID = -2243209273015769935L; public static final String MIN_INPUT = "Minimal input:"; private Gateway gateway = (Gateway) getElement(); private JLabel minInputLb; private JSpinner minInputSp; public GatewayDetails(Gateway element) { super(element); } @Override protected void initComponents() { super.initComponents(); this.minInputLb = new JLabel(MIN_INPUT); final SpinnerModel sm = new SpinnerNumberModel(0, 0, Integer.MAX_VALUE, 1); this.minInputSp = new JSpinner(sm); // Set the texts if available gateway = (Gateway) getElement(); if (gateway.getMinInput() != null) minInputSp.setValue(gateway.getMinInput()); } @Override protected void layoutComponents() { super.layoutComponents(); createAdvanced(); getAdvanced().add(this.minInputLb); getAdvanced().add(this.minInputSp); } @Override protected void addActions() { super.addActions(); this.minInputSp.addChangeListener(new ChangeListener() { @Override public void stateChanged(final ChangeEvent arg0) { GatewayDetails.this.gateway.updateMinInput((Integer) GatewayDetails.this.minInputSp.getValue(), Controller.DETAILS); } }); } @Override protected void dataAttributeChanged(final BPKeyWords keyWord, final Object value) { super.dataAttributeChanged(keyWord, value); if (value != null) { if (keyWord == BPKeyWords.MIN_INPUT) { this.minInputSp.setValue(value); } } } }
farkas-arpad/KROKI-mockup-tool
BusinessProcessModelingTool/src/bp/details/GatewayDetails.java
Java
mit
2,214
<?php /** * @file * Template para o tipo de conteรบdo relato dos profissionais do DAB relato_de_experi_ncia_dab_curado */ $relato_link = url(drupal_get_path_alias('node/' . $node->nid), array('absolute' => TRUE)); ?> <div id="node-<?php print $node->nid; ?>" class="<?php print $classes; ?> clearfix"<?php print $attributes; ?>> <div class="cabecalho row"> <?php if ($view_mode == 'teaser'): ?> <?php if ($display_submitted): ?> <div class="submitted col-md-8"> <?php print $user_picture; ?> <?php print $submitted; ?> <?php print render($content['og_group_ref']); ?> </div> <?php endif; ?> <?php else: ?> <?php if ($display_submitted): ?> <div class="submitted col-md-8"> <?php print $user_picture; ?> <?php print $submitted; ?> <?php print render($content['og_group_ref']); ?> </div> <?php endif; ?> <div class="node-relato-menu col-md-4"> <a href="#autores-atores" class="autores-relato-atores-experiencia"> Autores do relato e Atores da experiรชncia </a> </div> <?php endif; ?> </div> <div class="destacado clearfix"> <?php print render($content['field_imagem_de_destaque']); ?> <header> <?php print render($title_prefix); ?> <h2<?php print $title_attributes; ?>> <a rel="bookmark" href="<?php print $node_url; ?>"> <?php print $title; ?> </a> </h2> <?php print render($title_suffix); ?> <?php print render($content['field_descricao']); ?> </header> </div> <div class="dados-da-experiencia row clearfix"> <div class="dados-da-experiencia-header col-md-12"> <h3 class="dados-da-experiencia-subject"> Dados da Experiรชncia </h3> <?php print render($content['field_cidade']); ?> </div> <div class="esquerda col-md-6"> <?php print render($content['field_experiencia_ambito']); ?> <?php print render($content['field_experiencia_catespecificas']); ?> </div> <div class="direita col-md-6"> <?php print render($content['field_envolve_quais_pontos_equip']); ?> <?php print render($content['field_temas']); ?> </div> </div> <div class="content"<?php print $content_attributes; ?>> <?php // We hide the comments and links now so that we can render them later. hide($content['comments']); hide($content['links']); print render($content); ?> </div> <?php print render($content['links']); ?> <?php print render($content['comments']); ?> </div>
ABS-org/cdp_strap
theme/nodes/node--relato_de_experi_ncia_dab_curado.tpl.php
PHP
mit
2,642
๏ปฟ// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using osu.Framework.Allocation; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Framework.Graphics.Textures; using osu.Game.Beatmaps; using osu.Game.Graphics; using osu.Game.Rulesets.Mods; using osu.Game.Rulesets.Osu.Objects; using osu.Game.Rulesets.Scoring; using osu.Game.Rulesets.UI; using osuTK; using osuTK.Graphics; namespace osu.Game.Rulesets.Osu.Mods { public class OsuModBlinds : Mod, IApplicableToRulesetContainer<OsuHitObject>, IApplicableToScoreProcessor { public override string Name => "Blinds"; public override string Description => "Play with blinds on your screen."; public override string Acronym => "BL"; public override FontAwesome Icon => FontAwesome.fa_adjust; public override ModType Type => ModType.DifficultyIncrease; public override bool Ranked => false; public override double ScoreMultiplier => 1.12; private DrawableOsuBlinds blinds; public void ApplyToRulesetContainer(RulesetContainer<OsuHitObject> rulesetContainer) { rulesetContainer.Overlays.Add(blinds = new DrawableOsuBlinds(rulesetContainer.Playfield.HitObjectContainer, rulesetContainer.Beatmap)); } public void ApplyToScoreProcessor(ScoreProcessor scoreProcessor) { scoreProcessor.Health.ValueChanged += health => { blinds.AnimateClosedness((float)health.NewValue); }; } /// <summary> /// Element for the Blinds mod drawing 2 black boxes covering the whole screen which resize inside a restricted area with some leniency. /// </summary> public class DrawableOsuBlinds : Container { /// <summary> /// Black background boxes behind blind panel textures. /// </summary> private Box blackBoxLeft, blackBoxRight; private Drawable panelLeft, panelRight, bgPanelLeft, bgPanelRight; private readonly Beatmap<OsuHitObject> beatmap; /// <summary> /// Value between 0 and 1 setting a maximum "closedness" for the blinds. /// Useful for animating how far the blinds can be opened while keeping them at the original position if they are wider open than this. /// </summary> private const float target_clamp = 1; private readonly float targetBreakMultiplier = 0; private readonly float easing = 1; private readonly CompositeDrawable restrictTo; /// <summary> /// <para> /// Percentage of playfield to extend blinds over. Basically moves the origin points where the blinds start. /// </para> /// <para> /// -1 would mean the blinds always cover the whole screen no matter health. /// 0 would mean the blinds will only ever be on the edge of the playfield on 0% health. /// 1 would mean the blinds are fully outside the playfield on 50% health. /// Infinity would mean the blinds are always outside the playfield except on 100% health. /// </para> /// </summary> private const float leniency = 0.1f; public DrawableOsuBlinds(CompositeDrawable restrictTo, Beatmap<OsuHitObject> beatmap) { this.restrictTo = restrictTo; this.beatmap = beatmap; } [BackgroundDependencyLoader] private void load() { RelativeSizeAxes = Axes.Both; Children = new[] { blackBoxLeft = new Box { Anchor = Anchor.TopLeft, Origin = Anchor.TopLeft, Colour = Color4.Black, RelativeSizeAxes = Axes.Y, }, blackBoxRight = new Box { Anchor = Anchor.TopRight, Origin = Anchor.TopRight, Colour = Color4.Black, RelativeSizeAxes = Axes.Y, }, bgPanelLeft = new ModBlindsPanel { Origin = Anchor.TopRight, Colour = Color4.Gray, }, panelLeft = new ModBlindsPanel { Origin = Anchor.TopRight, }, bgPanelRight = new ModBlindsPanel { Colour = Color4.Gray }, panelRight = new ModBlindsPanel() }; } private float calculateGap(float value) => MathHelper.Clamp(value, 0, target_clamp) * targetBreakMultiplier; // lagrange polinominal for (0,0) (0.6,0.4) (1,1) should make a good curve private static float applyAdjustmentCurve(float value) => 0.6f * value * value + 0.4f * value; protected override void Update() { float start = Parent.ToLocalSpace(restrictTo.ScreenSpaceDrawQuad.TopLeft).X; float end = Parent.ToLocalSpace(restrictTo.ScreenSpaceDrawQuad.TopRight).X; float rawWidth = end - start; start -= rawWidth * leniency * 0.5f; end += rawWidth * leniency * 0.5f; float width = (end - start) * 0.5f * applyAdjustmentCurve(calculateGap(easing)); // different values in case the playfield ever moves from center to somewhere else. blackBoxLeft.Width = start + width; blackBoxRight.Width = DrawWidth - end + width; panelLeft.X = start + width; panelRight.X = end - width; bgPanelLeft.X = start; bgPanelRight.X = end; } protected override void LoadComplete() { const float break_open_early = 500; const float break_close_late = 250; base.LoadComplete(); var firstObj = beatmap.HitObjects[0]; var startDelay = firstObj.StartTime - firstObj.TimePreempt; using (BeginAbsoluteSequence(startDelay + break_close_late, true)) leaveBreak(); foreach (var breakInfo in beatmap.Breaks) { if (breakInfo.HasEffect) { using (BeginAbsoluteSequence(breakInfo.StartTime - break_open_early, true)) { enterBreak(); using (BeginDelayedSequence(breakInfo.Duration + break_open_early + break_close_late, true)) leaveBreak(); } } } } private void enterBreak() => this.TransformTo(nameof(targetBreakMultiplier), 0f, 1000, Easing.OutSine); private void leaveBreak() => this.TransformTo(nameof(targetBreakMultiplier), 1f, 2500, Easing.OutBounce); /// <summary> /// 0 is open, 1 is closed. /// </summary> public void AnimateClosedness(float value) => this.TransformTo(nameof(easing), value, 200, Easing.OutQuint); public class ModBlindsPanel : Sprite { [BackgroundDependencyLoader] private void load(TextureStore textures) { Texture = textures.Get("Play/osu/blinds-panel"); } } } } }
naoey/osu
osu.Game.Rulesets.Osu/Mods/OsuModBlinds.cs
C#
mit
7,835
#!/hpf/largeprojects/ccmbio/naumenko/tools/bcbio/anaconda/bin/python """ Looks for a specific sample """ import re import sys import os import os.path sample = sys.argv[1] family,sample_only = sample.split("_") match = re.match('\d*',family) if match: prefix=str(int(match.group(0))/100) report_path = prefix+'x/'+family report=0 bam=0 errors = [] if os.path.isfile(report_path+'/'+family+'.csv'): #print("Report exists") report=1 else: errors.append('Error: no report') if os.path.isfile(report_path+'/'+sample+'.bam'): #print("Bam exists") bam=1 else: errors.append(' ERROR: no bam') if (bam==1 and report==1): print(sample+'\t'+os.getcwd()+"/"+report_path+"\t"+os.getcwd()+"/"+report_path+'/'+sample+'.bam') else: print(sample+'\t'+' '.join(errors)) else: print("Family ID is not starting with digital")
naumenko-sa/cre
cre.locate_sample.py
Python
mit
895
# -*- coding: utf-8 -*- """ Date: 2/2/2017 Team: Satoshi Nakamoto @Authors: Alex Levering and Hector Muro Non-standard dependencies: * Twython * NLTK * Folium * Geocoder * psycopg2 TO DO BEFOREHAND: The following steps are non-automatable and have to be performed manually. * Have the NLTK vader lexicon locally (nltk.download("vader_lexicon")) * Have PostGIS installed on PostgreSQL * Set the file paths specified below to wherever your folder is * Upgrade folium to the latest version (0.2.1) """ # Naming options for tables, intermediates and outputs are available in the wrapper. if __name__ == "__main__": """ The tool is not supplied with Tweets out-of-the-box. Set 'gather_data' to True and leave it running for a while. If loop is false it will terminate in a minute or so and create a map from the results automatically This tool was tested and intended for OSGeo Live installs used in the GeoScripting course. """ import tweetAnalysisWrapper tweetAnalysisWrapper.performTweetResearch(folder_path = r"/home/user/git/SatoshiNakamotoGeoscripting/Final_assignment", defaultdb = "postgres", # Making a new database requires connecting to an existing database user = "user", # PostgreSQL username (user is default value on OSGeo Live) password = "user", # PostgreSQL password (user is default on OSGeo Live) ouputdb = "tweet_research", # Specify the output database that is to be created tweet_table_name = "tweets", # Output table where the Tweets are stored gather_data = True, # When True: Will gather data from the Twitter stream search_terms = ["Trump"], # Twitter terms to search for loop_gathering = False, # When True: Will not stop gathering when terminated - use for prolonged gathering APP_KEY = "", # Get these from developer.twitter.com when you make your application APP_SECRET = "", OAUTH_TOKEN = "", OAUTH_TOKEN_SECRET = "")
SatoshiNakamotoGeoscripting/SatoshiNakamotoGeoscripting
Final_assignment/main.py
Python
mit
2,489
require 'swing_support/extensions' module SwingSupport # Class that implements ActionListener interface around a given block class ActionListener java_implements java.awt.event.ActionListener def initialize &block @listener_block = block end java_signature 'public void actionPerformed(ActionEvent event)' # from ActionListener interface: Invoked when an action event occurs. def actionPerformed event @listener_block.call event end end end
arvicco/swing
lib/swing_support/action_listener.rb
Ruby
mit
491
import { Component, OnInit, Input } from '@angular/core'; import { LoadingController, NavController } from 'ionic-angular'; import { Geolocation } from 'ionic-native'; import { Observable } from 'rxjs/Observable'; import { OriginLocationComponent } from '../origin-location/origin-location'; // import { AvailableProvidersComponent } from '../available-providers/available-providers'; @Component({ selector: 'google-map', templateUrl: 'google-map.html', entryComponents: [OriginLocationComponent] }) export class GoogleMapComponent implements OnInit { @Input() isServiceRequested: boolean; public location; map; public isMapIdle: boolean; constructor(public navCtrl: NavController, public loadingCtrl: LoadingController) {} ngOnInit(){ this.map = this.createMap(); this.addMapEventListeners(); this.getLocation().subscribe(location => { this.centerLocation(location) }) } addMapEventListeners(){ google.maps.event.addListener(this.map, 'dragstart', ()=>{ this.isMapIdle = false; }) google.maps.event.addListener(this.map, 'idle', ()=>{ this.isMapIdle = true; }) } getLocation() { let loading = this.loadingCtrl.create({ content: 'Locating...', spinner: 'bubbles' }); loading.present() setTimeout(() => { loading.dismiss(); }, 5000) let options = {timeout: 10000, enableHighAccuracy: true}; let locationObs = Observable.create(observable => { Geolocation.getCurrentPosition(options) .then(resp => { let lat = resp.coords.latitude; let lng = resp.coords.longitude; let location = new google.maps.LatLng(lat, lng); console.log(lat, lng) observable.next(location); }, (err) => { console.log('Geolocation err: ' + err); loading.dismiss(); }) }) return locationObs; } createMap(location = new google.maps.LatLng(39.1031, -84.5120)){ let mapOptions = { center: location, zoom: 13, mapTypeId: google.maps.MapTypeId.ROADMAP, disableDefaultUI: true } let mapEl = document.getElementById('map'); let map = new google.maps.Map(mapEl, mapOptions); return map; } centerLocation(location){ if (location){ this.map.panTo(location) } else { this.getLocation().subscribe(currentLocation => { this.map.panTo(currentLocation) }) } } }
RNATA/services-ionic
src/components/google-map/google-map.ts
TypeScript
mit
2,461
๏ปฟusing System.Collections.Generic; using System.Linq; namespace NeuralNetwork { struct ForwardPropagationResult { public List<List<double>> Sums { get; } public List<List<double>> Activations { get; } public int Prediction { get; } public ForwardPropagationResult(List<List<double>> sums, List<List<double>> activations) { Sums = sums; Activations = activations; // The predicted label is the index of the node with the highest activation. Prediction = Activations.Last().IndexOfHighestValue(); } } }
andreimuntean/NeuralNetwork
NeuralNetwork/NeuralNetwork/ForwardPropagationResult.cs
C#
mit
616
'use strict'; const { messages, ruleName } = require('..'); testRule({ ruleName, config: [ { border: 2, '/^margin/': 1, }, ], accept: [ { code: 'a { margin: 0; }', }, { code: 'a { margin: 1px; }', }, { code: 'a { margin: var(--foo); }', description: 'deals with CSS variables', }, { code: 'a { margin: 1px /* 3px */; }', description: 'ignore values in comments', }, { code: 'a { margin-inline: 1px; }', }, { code: 'a { margin: ; }', }, { code: 'a { border: 1px; }', }, { code: 'a { border: 1px solid; }', }, { code: 'a { transition: margin-right 2s ease-in-out; }', description: 'irrelevant shorthand', }, ], reject: [ { code: 'a { margin: 1px 2px; }', message: messages.rejected('margin', 1), line: 1, column: 5, }, { code: 'a { margin-inline: 1px 2px; }', message: messages.rejected('margin-inline', 1), line: 1, column: 5, }, { code: 'a { margin: var(--foo) var(--bar); }', message: messages.rejected('margin', 1), line: 1, column: 5, description: 'deals with CSS variables', }, { code: 'a { margin: 1px 2px 3px 4px; }', message: messages.rejected('margin', 1), line: 1, column: 5, }, { code: 'a { margin: 0 0 0 0; }', message: messages.rejected('margin', 1), line: 1, column: 5, }, { code: 'a { border: 1px solid blue; }', message: messages.rejected('border', 2), line: 1, column: 5, }, ], });
stylelint/stylelint
lib/rules/declaration-property-max-values/__tests__/index.js
JavaScript
mit
1,491
<?php /* Copyright 2015 Lcf.vs - Released under the MIT license - https://github.com/Lcfvs/DOMArch */ namespace DOMArch\Url; use DOMArch\Config; use DOMArch\Constants; use DOMArch\Crypto; use DOMArch\Url; class Encrypted extends Url { public static function parse($str) { $url = parent::parse($str); $key = Config::global()->get('common')->get('encryptionKey'); $query = Crypto::decryptUri(substr($url->getPath(), 1), $key); return $url->reset($url->_parseParams($query)); } public function _getUri() { $key = Config::global()->get('common')->get('encryptionKey'); return '/' . Crypto::encryptUri(parent::_getUri(), $key); } }
dom-arch/dom-arch
lib/Url/Encrypted/Encrypted.php
PHP
mit
715
<?php namespace Rebase\BigvBundle\Entity; use Doctrine\ORM\Mapping as ORM; use Rebase\BigvBundle\Entity\Game; /** * Rebase\BigvBundle\Entity\Slot * * @ORM\Table() * @ORM\Entity */ class Slot { /** * @var integer $id * * @ORM\Column(name="id", type="integer") * @ORM\Id * @ORM\GeneratedValue(strategy="AUTO") */ private $id; /** * @ORM\ManyToOne(targetEntity="Court", inversedBy="slots") * @ORM\JoinColumn(name="court_id", referencedColumnName="id") */ protected $court; /** * @ORM\OneToOne(targetEntity="Game", inversedBy="slot") * @ORM\JoinColumn(name="game_id", referencedColumnName="id") */ protected $game; /** * @ORM\ManyToOne(targetEntity="Round", inversedBy="slot") * @ORM\JoinColumn(name="round_id", referencedColumnName="id") */ private $round; /** * @ORM\Column(type="integer") */ private $priority; /** * @ORM\Column(type="datetime") */ private $start; /** * @ORM\Column(type="datetime") */ private $end; /** * Get id * * @return integer */ public function getId() { return $this->id; } public function getShortDate() { return $this->start->format("y-m-d"); } public function getDate() { return new \DateTime($this->getStart()->format("y-m-d")); } /** * Set venue * * @param Rebase\BigvBundle\Entity\Court $court */ public function setCourt(\Rebase\BigvBundle\Entity\Court $court) { $this->court = $court; } /** * Get venue * * @return Rebase\BigvBundle\Entity\Venue */ public function getCourt() { return $this->court; } /** * Set priority * * @param integer $priority */ public function setPriority($priority) { $this->priority = $priority; } /** * Get priority * * @return integer */ public function getPriority() { return $this->priority; } public function getPriorityName() { switch($this->priority) { case 1: return "OK"; break; case 2: return "Good"; break; case 3: return "Great"; break; } return "??"; } /** * Set start * * @param datetime $start */ public function setStart($start) { $this->start = $start; } /** * Get start * * @return datetime */ public function getStart() { return $this->start; } /** * Set end * * @param datetime $end */ public function setEnd($end) { $this->end = $end; } /** * Get end * * @return datetime */ public function getEnd() { return $this->end; } /** * Set round * * @param Rebase\BigvBundle\Entity\Round $round */ public function setRound(\Rebase\BigvBundle\Entity\Round $round) { $this->round = $round; } /** * Get round * * @return Rebase\BigvBundle\Entity\Round */ public function getRound() { return $this->round; } /** * Set game * * @param Rebase\BigvBundle\Entity\Game $game */ public function setGame(\Rebase\BigvBundle\Entity\Game $game) { $this->game = $game; } public function removeGame() { if ($this->game) { $v = $this->game; $this->game = NULL; $v->removeSlot(); } } /** * Get game * * @return Rebase\BigvBundle\Entity\game */ public function getGame() { return $this->game; } }
daemonl/fxm
src/Rebase/BigvBundle/Entity/Slot.php
PHP
mit
3,896
// Regular expression that matches all symbols in the `Kaithi` script as per Unicode v6.0.0: /\uD804[\uDC80-\uDCC1]/;
mathiasbynens/unicode-data
6.0.0/scripts/Kaithi-regex.js
JavaScript
mit
117
๏ปฟusing School.Domain.Models; namespace School.Domain.Contracts.Repositories { public interface IUserRepository : IRepository<User> { User GetByIdWithProperties(int id); User Authenticate(string username, string password); User GetByRefreshTokenId(string refreshTokenId); } }
mersocarlin/school-web-api
School/School.Domain/Contracts/Repositories/IUserRepository.cs
C#
mit
314
package com.cmput402w2016.t1.webapi.handler; import com.cmput402w2016.t1.data.Segment; import com.cmput402w2016.t1.webapi.Helper; import com.cmput402w2016.t1.webapi.WebApi; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import java.util.Map; /** * Handler for the /segment webservice route */ public class SegmentHandler implements HttpHandler { /** * Handle the web request to the server * * @param httpExchange HttpExchange object containing the request */ @Override public void handle(HttpExchange httpExchange) { // Get & parse query try { String requestMethod = httpExchange.getRequestMethod(); if (requestMethod.equalsIgnoreCase("GET")) { String query = httpExchange.getRequestURI().getRawQuery(); Map<String, String> stringStringMap = Helper.queryToMap(query); if (stringStringMap.containsKey("geohash")) { String geohash = stringStringMap.get("geohash"); String neighbors = Segment.getClosestSegmentFromGeohash(geohash, WebApi.get_segment_table()); Helper.requestResponse(httpExchange, 200, neighbors); httpExchange.close(); return; } else if (stringStringMap.containsKey("lat") && stringStringMap.containsKey("lon")) { String lat = stringStringMap.get("lat"); String lon = stringStringMap.get("lon"); String neighbors = Segment.getClosestSegmentFromLatLon(lat, lon, WebApi.get_segment_table()); Helper.requestResponse(httpExchange, 200, neighbors); httpExchange.close(); return; } } Helper.malformedRequestResponse(httpExchange, 400, "Invalid query to the segment api"); httpExchange.close(); } catch (Exception e) { // Wasn't returned earlier, something must be wrong e.printStackTrace(); Helper.malformedRequestResponse(httpExchange, 400, e.getMessage()); httpExchange.close(); } } }
cmput402w2016/CMPUT402W16T1
MapCore/src/main/java/com/cmput402w2016/t1/webapi/handler/SegmentHandler.java
Java
mit
2,216
<div class="row"> <div class="col-lg-12"> <form method="post" action="<?php echo site_url('admin/pages/save'); ?>"> <input type="hidden" name="id" value="<?php echo !empty($page) ? $page->id : ''; ?>"/> <section class="panel"> <header class="panel-heading"><span class="h4"><?php echo !empty($page) ? $page->title : 'Pagina noua'; ?></span></header> <div class="panel-body"> <div class="form-group"> <label>Title *</label> <input name="title" type="text" value="<?php echo !empty($page) ? $page->title : ''; ?>" required class="form-control"> </div> <div class="form-group"> <label>Image</label> <input name="file" type="file" class="form-control"> </div> <div class="form-group"> <label>Content *</label> <textarea name="content" class="form-control" required rows="10"><?php echo !empty($page) ? $page->content : ''; ?></textarea> </div> </div> <footer class="panel-footer text-right bg-light lter"> <a href="<?php echo site_url('admin/pages'); ?>" class="btn btn-s-xs">Cancel</a> <button type="submit" class="btn btn-success btn-s-xs">Save</button> </footer> </section> </form> </div> </div>
nguyentrannhatrang/thoitrangchobe
application/views/admin/page.php
PHP
mit
1,628
package com.swfarm.biz.chain.bo; import java.io.Serializable; import java.sql.Timestamp; import java.util.Random; import com.swfarm.pub.framework.FormNumberCache; public class JobExecutionState implements Serializable { private Long id; private String jobName; private String jobInstanceName; private String saleChannel; private String accountNumber; private Timestamp executionTime = new Timestamp(System.currentTimeMillis()); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getJobName() { return jobName; } public void setJobName(String jobName) { this.jobName = jobName; } public String getJobInstanceName() { return jobInstanceName; } public void setJobInstanceName(String jobInstanceName) { this.jobInstanceName = jobInstanceName; } public String getSaleChannel() { return saleChannel; } public void setSaleChannel(String saleChannel) { this.saleChannel = saleChannel; } public String getAccountNumber() { return accountNumber; } public void setAccountNumber(String accountNumber) { this.accountNumber = accountNumber; } public Timestamp getExecutionTime() { return executionTime; } public void setExecutionTime(Timestamp executionTime) { this.executionTime = executionTime; } public String generateJobInstanceName() { StringBuffer jobInstanceNameBuffer = new StringBuffer(); jobInstanceNameBuffer.append(this.jobName); jobInstanceNameBuffer.append(System.currentTimeMillis()); Random random = new Random(); int i1 = FormNumberCache.getRandomInteger(1, 9, random); int i2 = FormNumberCache.getRandomInteger(1, 9, random); int i3 = FormNumberCache.getRandomInteger(1, 9, random); int i4 = FormNumberCache.getRandomInteger(1, 9, random); jobInstanceNameBuffer.append(i1); jobInstanceNameBuffer.append(i2); jobInstanceNameBuffer.append(i3); jobInstanceNameBuffer.append(i4); return jobInstanceNameBuffer.toString(); } public static void main(String[] args) { } }
zhangqiang110/my4j
pms/src/main/java/com/swfarm/biz/chain/bo/JobExecutionState.java
Java
mit
2,105
import React from 'react'; import HomeLayout from '../layouts/HomeLayout'; import BookEditor from '../components/BookEditor'; import { get } from '../utils/request'; class BookEdit extends React.Component { constructor(props) { super(props); this.state = { book: null }; } componentWillMount() { const bookId = this.context.router.params.id; get('http://localhost:3000/book/' + bookId) .then(res => { this.setState({ book: res }); }); } render() { const { book } = this.state; return book ? <BookEditor editTarget={book} /> : <span>ๅŠ ่ฝฝไธญ...</span>; } } BookEdit.contextTypes = { router: React.PropTypes.object.isRequired }; export default BookEdit;
prodigalyijun/demo-by-antd
src/pages/BookEdit.js
JavaScript
mit
829
package jasm const header = ` function jasm(stdlib, foreign, heap) { "use asm"; var pc = 0; // pseudo program counter var sp = 0; // stack pointer var ret = 0; // return address, for jal var r0 = 0, r1 = 0, r2 = 0, r3 = 0; // general purpose 32-bit registers var f0 = 0.0, f1 = 0.0, f2 = 0.0, f3 = 0.0; // temp floating point registers var err = 0; var memI32 = new stdlib.Int32Array(heap); var memU32 = new stdlib.Uint32Array(heap); var memI8 = new stdlib.Int8Array(heap); var memU8 = new stdlib.Uint8Array(heap); var memF64 = new stdlib.Float64Array(heap); function setpc(newpc) { newpc = newpc|0; pc = newpc|0; } function setsp(newsp) { newsp = newsp|0; sp = newsp|0; } function seterr(newerr) { newerr = newerr|0; err = newerr|0; } function setret(newret) { newret = newret|0; ret = newret|0; } function getpc() { return pc|0; } function getsp() { return sp|0; } function getret() { return ret|0; } function geterr() { return err|0; } function getr1() { return r1|0; } function getr2() { return r2|0; } function getr3() { return r3|0; } function getf0() { return +f0; } function getf1() { return +f1; } function getf2() { return +f2; } function getf3() { return +f3; } function clearRegs() { pc = 0|0; sp = 0|0; ret = 0|0; err = 0|0; r0 = 0|0; r1 = 0|0; r2 = 0|0; r3 = 0|0; f0 = 0.0; f1 = 0.0; f2 = 0.0; f3 = 0.0; } function step() { var pc = 0; pc_ = pc|0; pc = (pc + 4) | 0; switch (pc_|0) { ` const footer = ` default: err = 1|0; } } function run(ncycle) { ncycle = ncycle|0; while (ncycle|0 > 0) { step(); r0 = 0|0; ncycle = ((ncycle|0) + -1)|0; if ((err|0) != (0|0)) { break; } } } return { setpc: setpc, setsp: setsp, seterr: seterr, setret: setret, getpc: getpc, getsp: getsp, geterr: geterr, getret: getret, getr1: getr1, getr2: getr2, getr3: getr3, getf0: getf0, getf1: getf1, getf2: getf2, getf3: getf3, clearRegs: clearRegs, run: run, }; } `
h8liu/xlang
jasm/header.go
GO
mit
2,400
// Copyright (c) 2011-2014 The Bitcoin developers // Copyright (c) 2014-2015 The Gamblr developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "overviewpage.h" #include "ui_overviewpage.h" #include "bitcoinunits.h" #include "clientmodel.h" #include "darksend.h" #include "darksendconfig.h" #include "guiconstants.h" #include "guiutil.h" #include "optionsmodel.h" #include "transactionfilterproxy.h" #include "transactiontablemodel.h" #include "walletmodel.h" #include "init.h" #include <QAbstractItemDelegate> #include <QPainter> #include <QTimer> #define DECORATION_SIZE 48 #define ICON_OFFSET 16 #define NUM_ITEMS 5 class TxViewDelegate : public QAbstractItemDelegate { Q_OBJECT public: TxViewDelegate(): QAbstractItemDelegate(), unit(BitcoinUnits::CHIP) { } inline void paint(QPainter *painter, const QStyleOptionViewItem &option, const QModelIndex &index ) const { painter->save(); QIcon icon = qvariant_cast<QIcon>(index.data(Qt::DecorationRole)); QRect mainRect = option.rect; mainRect.moveLeft(ICON_OFFSET); QRect decorationRect(mainRect.topLeft(), QSize(DECORATION_SIZE, DECORATION_SIZE)); int xspace = DECORATION_SIZE + 8; int ypad = 6; int halfheight = (mainRect.height() - 2*ypad)/2; QRect amountRect(mainRect.left() + xspace, mainRect.top()+ypad, mainRect.width() - xspace - ICON_OFFSET, halfheight); QRect addressRect(mainRect.left() + xspace, mainRect.top()+ypad+halfheight, mainRect.width() - xspace, halfheight); icon.paint(painter, decorationRect); QDateTime date = index.data(TransactionTableModel::DateRole).toDateTime(); QString address = index.data(Qt::DisplayRole).toString(); qint64 amount = index.data(TransactionTableModel::AmountRole).toLongLong(); bool confirmed = index.data(TransactionTableModel::ConfirmedRole).toBool(); QVariant value = index.data(Qt::ForegroundRole); QColor foreground = option.palette.color(QPalette::Text); if(value.canConvert<QBrush>()) { QBrush brush = qvariant_cast<QBrush>(value); foreground = brush.color(); } painter->setPen(foreground); painter->drawText(addressRect, Qt::AlignLeft|Qt::AlignVCenter, address); if(amount < 0) { foreground = COLOR_NEGATIVE; } else if(!confirmed) { foreground = COLOR_UNCONFIRMED; } else { foreground = option.palette.color(QPalette::Text); } painter->setPen(foreground); QString amountText = BitcoinUnits::formatWithUnit(unit, amount, true); if(!confirmed) { amountText = QString("[") + amountText + QString("]"); } painter->drawText(amountRect, Qt::AlignRight|Qt::AlignVCenter, amountText); painter->setPen(option.palette.color(QPalette::Text)); painter->drawText(amountRect, Qt::AlignLeft|Qt::AlignVCenter, GUIUtil::dateTimeStr(date)); painter->restore(); } inline QSize sizeHint(const QStyleOptionViewItem &option, const QModelIndex &index) const { return QSize(DECORATION_SIZE, DECORATION_SIZE); } int unit; }; #include "overviewpage.moc" OverviewPage::OverviewPage(QWidget *parent) : QWidget(parent), ui(new Ui::OverviewPage), clientModel(0), walletModel(0), currentBalance(-1), currentUnconfirmedBalance(-1), currentImmatureBalance(-1), txdelegate(new TxViewDelegate()), filter(0) { ui->setupUi(this); // Recent transactions ui->listTransactions->setItemDelegate(txdelegate); ui->listTransactions->setIconSize(QSize(DECORATION_SIZE, DECORATION_SIZE)); ui->listTransactions->setMinimumHeight(NUM_ITEMS * (DECORATION_SIZE + 2)); ui->listTransactions->setAttribute(Qt::WA_MacShowFocusRect, false); connect(ui->listTransactions, SIGNAL(clicked(QModelIndex)), this, SLOT(handleTransactionClicked(QModelIndex))); // init "out of sync" warning labels ui->labelWalletStatus->setText("(" + tr("out of sync") + ")"); ui->labelDarksendSyncStatus->setText("(" + tr("out of sync") + ")"); ui->labelTransactionsStatus->setText("(" + tr("out of sync") + ")"); showingDarkSendMessage = 0; darksendActionCheck = 0; lastNewBlock = 0; if(fLiteMode){ ui->frameDarksend->setVisible(false); } else if(!fMasterNode) { timer = new QTimer(this); connect(timer, SIGNAL(timeout()), this, SLOT(darkSendStatus())); timer->start(333); } if(fMasterNode){ ui->toggleDarksend->setText("(" + tr("Disabled") + ")"); ui->darksendAuto->setText("(" + tr("Disabled") + ")"); ui->darksendReset->setText("(" + tr("Disabled") + ")"); ui->frameDarksend->setEnabled(false); }else if(!fEnableDarksend){ ui->toggleDarksend->setText(tr("Start Darksend Mixing")); } else { ui->toggleDarksend->setText(tr("Stop Darksend Mixing")); } // start with displaying the "out of sync" warnings showOutOfSyncWarning(true); } void OverviewPage::handleTransactionClicked(const QModelIndex &index) { if(filter) emit transactionClicked(filter->mapToSource(index)); } OverviewPage::~OverviewPage() { if(!fLiteMode && !fMasterNode) disconnect(timer, SIGNAL(timeout()), this, SLOT(darkSendStatus())); delete ui; } void OverviewPage::setBalance(qint64 balance, qint64 unconfirmedBalance, qint64 immatureBalance, qint64 anonymizedBalance) { int unit = walletModel->getOptionsModel()->getDisplayUnit(); currentBalance = balance; currentUnconfirmedBalance = unconfirmedBalance; currentImmatureBalance = immatureBalance; currentAnonymizedBalance = anonymizedBalance; ui->labelBalance->setText(BitcoinUnits::formatWithUnit(unit, balance)); ui->labelUnconfirmed->setText(BitcoinUnits::formatWithUnit(unit, unconfirmedBalance)); ui->labelImmature->setText(BitcoinUnits::formatWithUnit(unit, immatureBalance)); ui->labelAnonymized->setText(BitcoinUnits::formatWithUnit(unit, anonymizedBalance)); ui->labelTotal->setText(BitcoinUnits::formatWithUnit(unit, balance + unconfirmedBalance + immatureBalance)); // only show immature (newly mined) balance if it's non-zero, so as not to complicate things // for the non-mining users bool showImmature = immatureBalance != 0; ui->labelImmature->setVisible(showImmature); ui->labelImmatureText->setVisible(showImmature); if(cachedTxLocks != nCompleteTXLocks){ cachedTxLocks = nCompleteTXLocks; ui->listTransactions->update(); } } void OverviewPage::setClientModel(ClientModel *model) { this->clientModel = model; if(model) { // Show warning if this is a prerelease version connect(model, SIGNAL(alertsChanged(QString)), this, SLOT(updateAlerts(QString))); updateAlerts(model->getStatusBarWarnings()); } } void OverviewPage::setWalletModel(WalletModel *model) { this->walletModel = model; if(model && model->getOptionsModel()) { // Set up transaction list filter = new TransactionFilterProxy(); filter->setSourceModel(model->getTransactionTableModel()); filter->setLimit(NUM_ITEMS); filter->setDynamicSortFilter(true); filter->setSortRole(Qt::EditRole); filter->setShowInactive(false); filter->sort(TransactionTableModel::Status, Qt::DescendingOrder); ui->listTransactions->setModel(filter); ui->listTransactions->setModelColumn(TransactionTableModel::ToAddress); // Keep up to date with wallet setBalance(model->getBalance(), model->getUnconfirmedBalance(), model->getImmatureBalance(), model->getAnonymizedBalance()); connect(model, SIGNAL(balanceChanged(qint64, qint64, qint64, qint64)), this, SLOT(setBalance(qint64, qint64, qint64, qint64))); connect(model->getOptionsModel(), SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit())); connect(ui->darksendAuto, SIGNAL(clicked()), this, SLOT(darksendAuto())); connect(ui->darksendReset, SIGNAL(clicked()), this, SLOT(darksendReset())); connect(ui->toggleDarksend, SIGNAL(clicked()), this, SLOT(toggleDarksend())); } // update the display unit, to not use the default ("CHIP") updateDisplayUnit(); } void OverviewPage::updateDisplayUnit() { if(walletModel && walletModel->getOptionsModel()) { if(currentBalance != -1) setBalance(currentBalance, currentUnconfirmedBalance, currentImmatureBalance, currentAnonymizedBalance); // Update txdelegate->unit with the current unit txdelegate->unit = walletModel->getOptionsModel()->getDisplayUnit(); ui->listTransactions->update(); } } void OverviewPage::updateAlerts(const QString &warnings) { this->ui->labelAlerts->setVisible(!warnings.isEmpty()); this->ui->labelAlerts->setText(warnings); } void OverviewPage::showOutOfSyncWarning(bool fShow) { ui->labelWalletStatus->setVisible(fShow); ui->labelDarksendSyncStatus->setVisible(fShow); ui->labelTransactionsStatus->setVisible(fShow); } void OverviewPage::updateDarksendProgress() { if(IsInitialBlockDownload()) return; int64_t nBalance = pwalletMain->GetBalance(); if(nBalance == 0) { ui->darksendProgress->setValue(0); QString s(tr("No inputs detected")); ui->darksendProgress->setToolTip(s); return; } //get denominated unconfirmed inputs if(pwalletMain->GetDenominatedBalance(true, true) > 0) { QString s(tr("Found unconfirmed denominated outputs, will wait till they confirm to recalculate.")); ui->darksendProgress->setToolTip(s); return; } //Get the anon threshold int64_t nMaxToAnonymize = nAnonymizeGamblrAmount*COIN; // If it's more than the wallet amount, limit to that. if(nMaxToAnonymize > nBalance) nMaxToAnonymize = nBalance; if(nMaxToAnonymize == 0) return; // calculate parts of the progress, each of them shouldn't be higher than 1: // mixing progress of denominated balance int64_t denominatedBalance = pwalletMain->GetDenominatedBalance(); float denomPart = 0; if(denominatedBalance > 0) { denomPart = (float)pwalletMain->GetNormalizedAnonymizedBalance() / denominatedBalance; denomPart = denomPart > 1 ? 1 : denomPart; if(denomPart == 1 && nMaxToAnonymize > denominatedBalance) nMaxToAnonymize = denominatedBalance; } // % of fully anonymized balance float anonPart = 0; if(nMaxToAnonymize > 0) { anonPart = (float)pwalletMain->GetAnonymizedBalance() / nMaxToAnonymize; // if anonPart is > 1 then we are done, make denomPart equal 1 too anonPart = anonPart > 1 ? (denomPart = 1, 1) : anonPart; } // apply some weights to them (sum should be <=100) and calculate the whole progress int progress = 80 * denomPart + 20 * anonPart; if(progress >= 100) progress = 100; ui->darksendProgress->setValue(progress); std::ostringstream convert; convert << "Progress: " << progress << "%, inputs have an average of " << pwalletMain->GetAverageAnonymizedRounds() << " of " << nDarksendRounds << " rounds"; QString s(convert.str().c_str()); ui->darksendProgress->setToolTip(s); } void OverviewPage::darkSendStatus() { int nBestHeight = chainActive.Tip()->nHeight; if(nBestHeight != darkSendPool.cachedNumBlocks) { //we we're processing lots of blocks, we'll just leave if(GetTime() - lastNewBlock < 10) return; lastNewBlock = GetTime(); updateDarksendProgress(); QString strSettings(" " + tr("Rounds")); strSettings.prepend(QString::number(nDarksendRounds)).prepend(" / "); strSettings.prepend(BitcoinUnits::formatWithUnit( walletModel->getOptionsModel()->getDisplayUnit(), nAnonymizeGamblrAmount * COIN) ); ui->labelAmountRounds->setText(strSettings); } if(!fEnableDarksend) { if(nBestHeight != darkSendPool.cachedNumBlocks) { darkSendPool.cachedNumBlocks = nBestHeight; ui->darksendEnabled->setText(tr("Disabled")); ui->darksendStatus->setText(""); ui->toggleDarksend->setText(tr("Start Darksend Mixing")); } return; } // check darksend status and unlock if needed if(nBestHeight != darkSendPool.cachedNumBlocks) { // Balance and number of transactions might have changed darkSendPool.cachedNumBlocks = nBestHeight; /* *******************************************************/ ui->darksendEnabled->setText(tr("Enabled")); } int state = darkSendPool.GetState(); int entries = darkSendPool.GetEntriesCount(); int accepted = darkSendPool.GetLastEntryAccepted(); /* ** @TODO this string creation really needs some clean ups ---vertoe ** */ std::ostringstream convert; if(state == POOL_STATUS_IDLE) { convert << tr("Darksend is idle.").toStdString(); } else if(state == POOL_STATUS_ACCEPTING_ENTRIES) { if(entries == 0) { if(darkSendPool.strAutoDenomResult.size() == 0){ convert << tr("Mixing in progress...").toStdString(); } else { convert << darkSendPool.strAutoDenomResult; } showingDarkSendMessage = 0; } else if (accepted == 1) { convert << tr("Darksend request complete: Your transaction was accepted into the pool!").toStdString(); if(showingDarkSendMessage % 10 > 8) { darkSendPool.lastEntryAccepted = 0; showingDarkSendMessage = 0; } } else { if(showingDarkSendMessage % 70 <= 40) convert << tr("Submitted following entries to masternode:").toStdString() << " " << entries << "/" << darkSendPool.GetMaxPoolTransactions(); else if(showingDarkSendMessage % 70 <= 50) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) ."; else if(showingDarkSendMessage % 70 <= 60) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) .."; else if(showingDarkSendMessage % 70 <= 70) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) ..."; } } else if(state == POOL_STATUS_SIGNING) { if(showingDarkSendMessage % 70 <= 10) convert << tr("Found enough users, signing ...").toStdString(); else if(showingDarkSendMessage % 70 <= 20) convert << tr("Found enough users, signing ( waiting").toStdString() << ". )"; else if(showingDarkSendMessage % 70 <= 30) convert << tr("Found enough users, signing ( waiting").toStdString() << ".. )"; else if(showingDarkSendMessage % 70 <= 40) convert << tr("Found enough users, signing ( waiting").toStdString() << "... )"; } else if(state == POOL_STATUS_TRANSMISSION) { convert << tr("Transmitting final transaction.").toStdString(); } else if (state == POOL_STATUS_IDLE) { convert << tr("Darksend is idle.").toStdString(); } else if (state == POOL_STATUS_FINALIZE_TRANSACTION) { convert << tr("Finalizing transaction.").toStdString(); } else if(state == POOL_STATUS_ERROR) { convert << tr("Darksend request incomplete:").toStdString() << " " << darkSendPool.lastMessage << ". " << tr("Will retry...").toStdString(); } else if(state == POOL_STATUS_SUCCESS) { convert << tr("Darksend request complete:").toStdString() << " " << darkSendPool.lastMessage; } else if(state == POOL_STATUS_QUEUE) { if(showingDarkSendMessage % 70 <= 50) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " ."; else if(showingDarkSendMessage % 70 <= 60) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " .."; else if(showingDarkSendMessage % 70 <= 70) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " ..."; } else { convert << tr("Unknown state:").toStdString() << " id = " << state; } if(state == POOL_STATUS_ERROR || state == POOL_STATUS_SUCCESS) darkSendPool.Check(); QString s(convert.str().c_str()); s = tr("Last Darksend message:\n") + s; if(s != ui->darksendStatus->text()) LogPrintf("Last Darksend message: %s\n", convert.str().c_str()); ui->darksendStatus->setText(s); if(darkSendPool.sessionDenom == 0){ ui->labelSubmittedDenom->setText(tr("N/A")); } else { std::string out; darkSendPool.GetDenominationsToString(darkSendPool.sessionDenom, out); QString s2(out.c_str()); ui->labelSubmittedDenom->setText(s2); } showingDarkSendMessage++; darksendActionCheck++; // Get DarkSend Denomination Status } void OverviewPage::darksendAuto(){ darkSendPool.DoAutomaticDenominating(); } void OverviewPage::darksendReset(){ darkSendPool.Reset(); QMessageBox::warning(this, tr("Darksend"), tr("Darksend was successfully reset."), QMessageBox::Ok, QMessageBox::Ok); } void OverviewPage::toggleDarksend(){ if(!fEnableDarksend){ int64_t balance = pwalletMain->GetBalance(); float minAmount = 1.49 * COIN; if(balance < minAmount){ QString strMinAmount( BitcoinUnits::formatWithUnit( walletModel->getOptionsModel()->getDisplayUnit(), minAmount)); QMessageBox::warning(this, tr("Darksend"), tr("Darksend requires at least %1 to use.").arg(strMinAmount), QMessageBox::Ok, QMessageBox::Ok); return; } // if wallet is locked, ask for a passphrase if (walletModel->getEncryptionStatus() == WalletModel::Locked) { WalletModel::UnlockContext ctx(walletModel->requestUnlock(false)); if(!ctx.isValid()) { //unlock was cancelled darkSendPool.cachedNumBlocks = 0; QMessageBox::warning(this, tr("Darksend"), tr("Wallet is locked and user declined to unlock. Disabling Darksend."), QMessageBox::Ok, QMessageBox::Ok); if (fDebug) LogPrintf("Wallet is locked and user declined to unlock. Disabling Darksend.\n"); return; } } } darkSendPool.cachedNumBlocks = 0; fEnableDarksend = !fEnableDarksend; if(!fEnableDarksend){ ui->toggleDarksend->setText(tr("Start Darksend Mixing")); } else { ui->toggleDarksend->setText(tr("Stop Darksend Mixing")); /* show darksend configuration if client has defaults set */ if(nAnonymizeGamblrAmount == 0){ DarksendConfig dlg(this); dlg.setModel(walletModel); dlg.exec(); } darkSendPool.DoAutomaticDenominating(); } }
knolza/gamblr
src/qt/overviewpage.cpp
C++
mit
19,463
#!/usr/bin/python #coding: utf-8 from __future__ import unicode_literals import os import unittest import xlrd import msp.schedule_parser as schedule_parser __author__ = "Andrey Konovalov" __copyright__ = "Copyright (C) 2014 Andrey Konovalov" __license__ = "MIT" __version__ = "0.1" this_dir, this_filename = os.path.split(__file__) SCHEDULE_PATH = os.path.join(this_dir, "..", "data", "2013_fall", "4kurs.xls") class WeekdayRangeTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetWeekdayRange(0), (4, 11)) self.assertEqual(self.schedule.GetWeekdayRange(1), (12, 19)) self.assertEqual(self.schedule.GetWeekdayRange(2), (20, 27)) self.assertEqual(self.schedule.GetWeekdayRange(3), (28, 37)) self.assertEqual(self.schedule.GetWeekdayRange(4), (38, 47)) self.assertEqual(self.schedule.GetWeekdayRange(5), (48, 57)) class DepartmentCountTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetDepartmentCount(), 9) class DepartmentRangeTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetDepartmentRange(0), (2, 11)) self.assertEqual(self.schedule.GetDepartmentRange(1), (13, 20)) self.assertEqual(self.schedule.GetDepartmentRange(2), (22, 32)) self.assertEqual(self.schedule.GetDepartmentRange(3), (34, 36)) self.assertEqual(self.schedule.GetDepartmentRange(4), (38, 43)) self.assertEqual(self.schedule.GetDepartmentRange(5), (45, 53)) self.assertEqual(self.schedule.GetDepartmentRange(6), (55, 62)) self.assertEqual(self.schedule.GetDepartmentRange(7), (64, 71)) self.assertEqual(self.schedule.GetDepartmentRange(8), (73, 77)) class DepartmentsRowTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetDepartmentsRow(), 3) class HoursColumnTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetHoursColumn(), 1) class HoursRangesTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetHoursRanges(0), [(4, 5), (5, 6), (6, 7), (7, 8), (8, 9), (9, 10), (10, 11)]) self.assertEqual(self.schedule.GetHoursRanges(3), [(28, 30), (30, 31), (31, 32), (32, 34), (34, 35), (35, 36), (36, 37)]) self.assertEqual(self.schedule.GetHoursRanges(5), [(48, 49), (49, 50), (50, 52), (52, 53), (53, 54), (54, 56), (56, 57)]) class GroupCountTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetGroupCount(0), 9) self.assertEqual(self.schedule.GetGroupCount(1), 7) self.assertEqual(self.schedule.GetGroupCount(2), 8) self.assertEqual(self.schedule.GetGroupCount(3), 2) self.assertEqual(self.schedule.GetGroupCount(4), 5) self.assertEqual(self.schedule.GetGroupCount(5), 8) self.assertEqual(self.schedule.GetGroupCount(6), 7) self.assertEqual(self.schedule.GetGroupCount(7), 7) self.assertEqual(self.schedule.GetGroupCount(8), 4) class GroupListTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetGroupList(0), ['011', '012', '013', '014', '015', '016', '017', '018', '019']) self.assertEqual(self.schedule.GetGroupList(1), ['021', '022', '023', '024', '025', '026', '028']) self.assertEqual(self.schedule.GetGroupList(3), ['041', '042']) self.assertEqual(self.schedule.GetGroupList(8), ['0111', '0112', '0113', '0114']) class GroupRangeTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetGroupRange(0, 0), (2, 3)) self.assertEqual(self.schedule.GetGroupRange(0, 1), (3, 4)) self.assertEqual(self.schedule.GetGroupRange(2, 1), (23, 25)) self.assertEqual(self.schedule.GetGroupRange(2, 2), (25, 26)) self.assertEqual(self.schedule.GetGroupRange(2, 3), (26, 28)) self.assertEqual(self.schedule.GetGroupRange(5, 3), (48, 49)) self.assertEqual(self.schedule.GetGroupRange(8, 0), (73, 74)) self.assertEqual(self.schedule.GetGroupRange(8, 3), (76, 77)) class WeekdayByRowTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetWeekdayByRow(4), 0) self.assertEqual(self.schedule.GetWeekdayByRow(5), 0) self.assertEqual(self.schedule.GetWeekdayByRow(10), 0) self.assertEqual(self.schedule.GetWeekdayByRow(13), 1) self.assertEqual(self.schedule.GetWeekdayByRow(25), 2) self.assertEqual(self.schedule.GetWeekdayByRow(26), 2) self.assertEqual(self.schedule.GetWeekdayByRow(28), 3) self.assertEqual(self.schedule.GetWeekdayByRow(44), 4) self.assertEqual(self.schedule.GetWeekdayByRow(48), 5) self.assertEqual(self.schedule.GetWeekdayByRow(56), 5) class PairByRowTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetPairByRow(4), (0, 0)) self.assertEqual(self.schedule.GetPairByRow(5), (1, 0)) self.assertEqual(self.schedule.GetPairByRow(10), (6, 0)) self.assertEqual(self.schedule.GetPairByRow(12), (0, 0)) self.assertEqual(self.schedule.GetPairByRow(28), (0, 0)) self.assertEqual(self.schedule.GetPairByRow(29), (0, 1)) self.assertEqual(self.schedule.GetPairByRow(30), (1, 0)) self.assertEqual(self.schedule.GetPairByRow(33), (3, 1)) self.assertEqual(self.schedule.GetPairByRow(56), (6, 0)) class DepartmentByColumnTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetDepartmentIndexByColumn(2), 0) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(3), 0) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(10), 0) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(13), 1) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(18), 1) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(19), 1) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(22), 2) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(24), 2) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(31), 2) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(39), 4) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(64), 7) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(70), 7) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(73), 8) self.assertEqual(self.schedule.GetDepartmentIndexByColumn(76), 8) class GroupByColumnTest(unittest.TestCase): def setUp(self): self.schedule = schedule_parser.Schedule() self.schedule.Parse(SCHEDULE_PATH) def runTest(self): self.assertEqual(self.schedule.GetGroupIndexByColumn(2), (0, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(3), (1, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(10), (8, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(23), (1, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(24), (1, 1)) self.assertEqual(self.schedule.GetGroupIndexByColumn(25), (2, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(26), (3, 0)) self.assertEqual(self.schedule.GetGroupIndexByColumn(27), (3, 1)) self.assertEqual(self.schedule.GetGroupIndexByColumn(76), (3, 0)) def suite(): loader = unittest.TestLoader() suite = unittest.TestSuite() suite.addTest(WeekdayRangeTest()) suite.addTest(DepartmentCountTest()) suite.addTest(DepartmentRangeTest()) suite.addTest(DepartmentsRowTest()) suite.addTest(HoursColumnTest()) suite.addTest(HoursRangesTest()) suite.addTest(GroupCountTest()) suite.addTest(GroupListTest()) suite.addTest(GroupRangeTest()) suite.addTest(WeekdayByRowTest()) suite.addTest(PairByRowTest()) suite.addTest(DepartmentByColumnTest()) suite.addTest(GroupByColumnTest()) return suite if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
xairy/mipt-schedule-parser
msp/test/schedule_tests.py
Python
mit
8,974
var Peer = require('../lib/Peer'); var Connection = require('../lib/Connection'); var dns = require('dns'); // get a peer from dns seed dns.resolve('dnsseed.bluematt.me', function(err, seeds) { // use the first peer var peer = new Peer(seeds[0], 8608); //Custom peer: //var peer = new Peer('180.153.139.246', '8888'); // create a connection without an existing socket // but specify a socks5 proxy to create a socket // that's bound to that proxy in it's place var connection = new Connection(null, peer, { proxy: { host: '127.0.0.1', port: 9050 } }); connection.open(); connection.on('connect', function(data) { console.log('connected through socks5!'); }); connection.on('error', function(err) { console.log('There was an error running this example.'); console.log('Are you running Tor? Tor must running for this example to work.'); console.log('If you still get an error, you may need to use a different proxy from here:'); console.log('http://sockslist.net/'); //console.log(err); }); });
Bushstar/bitcore
examples/ConnectionTor.js
JavaScript
mit
1,072
require 'raven' # This file gets copied to shared/configs and linked from # config/initializers/raven.rb at deployment by capistrano. Raven.configure do |config| # Replace this URL with the one provided by getsentry. config.dsn = 'https://9ee5c448b2dc42be81448b502b164820:59004b5ea4b545f5af58d310a3802ea8@app.getsentry.com/12422' end
Swirrl/digitalsocial
config/initializers/raven_production_example.rb
Ruby
mit
340
import gzip import glob import numpy as np import pandas as pd from scipy.stats import pearsonr from scipy.stats import spearmanr def get_num_lines_gz(filename): num_lines = 0 with gzip.open(filename, "r") as fp: for line in fp: num_lines += 1 return num_lines def main(): """get stats from PAS-seq - num reads per file - gene quant level spearman correlations """ # files DATA_DIR = "/mnt/lab_data/kundaje/projects/skin/data/bds/processed.chipseq.2017-01-23.histones" # params marks = ["H3K27ac", "H3K4me1", "H3K27me3", "CTCF"] days = np.arange(0, 7, 3) days = ["d{}".format(day).replace(".", "") for day in days] reps = ["1", "2"] # results results = {} results["mark_or_tf"] = [] results["timepoint"] = [] results["replicate"] = [] #results["num_input_reads"] = [] results["num_nodup_reads"] = [] results["NRF"] = [] results["PBC1"] = [] results["PBC2"] = [] results["num_macs2_peaks"] = [] results["num_overlap_peaks"] = [] results["num_idr_peaks"] = [] for mark in marks: print mark for day in days: for rep in reps: # timepoint, rep results["mark_or_tf"].append(mark) results["timepoint"].append(day) results["replicate"].append(rep) # nodup reads nodup_log = glob.glob( "{}/*{}*{}*/qc/rep{}/*nodup.flagstat.qc".format( DATA_DIR, day, mark, rep))[0] with open(nodup_log, "r") as fp: for line in fp: if "in total" in line: num_nodup_reads = line.split("+")[0].strip() results["num_nodup_reads"].append(num_nodup_reads) # NRF/PBC1/PBC2 lib_log = glob.glob( "{}/*{}*{}*/qc/rep{}/*nodup.pbc.qc".format( DATA_DIR, day, mark, rep))[0] with open(lib_log, "r") as fp: # cols 5,6,7 is NRF/PBC1/PBC2 for line in fp: fields = line.strip().split() results["NRF"].append(fields[4]) results["PBC1"].append(fields[5]) results["PBC2"].append(fields[6]) # peak files macs2_peaks = glob.glob( "{}/*{}*{}*/peak/macs2/rep{}/*narrowPeak.gz".format( DATA_DIR, day, mark, rep))[0] num_macs2 = get_num_lines_gz(macs2_peaks) results["num_macs2_peaks"].append(num_macs2) if "CTCF" in mark: idr_peaks = glob.glob( "{}/*{}*{}*/peak/idr/true_reps/rep1-rep2/*filt.narrowPeak.gz".format( DATA_DIR, day, mark))[0] num_idr = get_num_lines_gz(idr_peaks) results["num_idr_peaks"].append(num_idr) results["num_overlap_peaks"].append("NA") else: results["num_idr_peaks"].append("NA") overlap_peaks = glob.glob( "{}/*{}*{}*/peak/macs2/overlap/*filt.narrowPeak.gz".format( DATA_DIR, day, mark, rep))[0] num_overlap = get_num_lines_gz(overlap_peaks) results["num_overlap_peaks"].append(num_overlap) # dataframe results = pd.DataFrame(results) ordered_headers = [ "mark_or_tf", "timepoint", "replicate", #"num_input_reads", "num_nodup_reads", "NRF", "PBC1", "PBC2", "num_macs2_peaks", "num_overlap_peaks", "num_idr_peaks"] results = results[ordered_headers] out_file = "ggr.ChIP-seq.QC.summary.txt" results.to_csv(out_file, sep="\t", header=True, index=False) return main()
vervacity/ggr-project
scripts/data_qc/summarize_chipseq_qc.py
Python
mit
4,085
interface IEmploymentSkill { name: string; } export default IEmploymentSkill;
borkovskij/scheduleGrsu
src/app/models/employmentSkill.ts
TypeScript
mit
81
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.avs.implementation; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.avs.fluent.PlacementPoliciesClient; import com.azure.resourcemanager.avs.fluent.models.PlacementPolicyInner; import com.azure.resourcemanager.avs.models.PlacementPolicies; import com.azure.resourcemanager.avs.models.PlacementPolicy; import com.fasterxml.jackson.annotation.JsonIgnore; public final class PlacementPoliciesImpl implements PlacementPolicies { @JsonIgnore private final ClientLogger logger = new ClientLogger(PlacementPoliciesImpl.class); private final PlacementPoliciesClient innerClient; private final com.azure.resourcemanager.avs.AvsManager serviceManager; public PlacementPoliciesImpl( PlacementPoliciesClient innerClient, com.azure.resourcemanager.avs.AvsManager serviceManager) { this.innerClient = innerClient; this.serviceManager = serviceManager; } public PagedIterable<PlacementPolicy> list(String resourceGroupName, String privateCloudName, String clusterName) { PagedIterable<PlacementPolicyInner> inner = this.serviceClient().list(resourceGroupName, privateCloudName, clusterName); return Utils.mapPage(inner, inner1 -> new PlacementPolicyImpl(inner1, this.manager())); } public PagedIterable<PlacementPolicy> list( String resourceGroupName, String privateCloudName, String clusterName, Context context) { PagedIterable<PlacementPolicyInner> inner = this.serviceClient().list(resourceGroupName, privateCloudName, clusterName, context); return Utils.mapPage(inner, inner1 -> new PlacementPolicyImpl(inner1, this.manager())); } public PlacementPolicy get( String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName) { PlacementPolicyInner inner = this.serviceClient().get(resourceGroupName, privateCloudName, clusterName, placementPolicyName); if (inner != null) { return new PlacementPolicyImpl(inner, this.manager()); } else { return null; } } public Response<PlacementPolicy> getWithResponse( String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName, Context context) { Response<PlacementPolicyInner> inner = this .serviceClient() .getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new PlacementPolicyImpl(inner.getValue(), this.manager())); } else { return null; } } public void delete( String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName) { this.serviceClient().delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName); } public void delete( String resourceGroupName, String privateCloudName, String clusterName, String placementPolicyName, Context context) { this.serviceClient().delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context); } public PlacementPolicy getById(String id) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds"); if (privateCloudName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id))); } String clusterName = Utils.getValueFromIdByName(id, "clusters"); if (clusterName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); } String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies"); if (placementPolicyName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id))); } return this .getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, Context.NONE) .getValue(); } public Response<PlacementPolicy> getByIdWithResponse(String id, Context context) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds"); if (privateCloudName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id))); } String clusterName = Utils.getValueFromIdByName(id, "clusters"); if (clusterName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); } String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies"); if (placementPolicyName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id))); } return this.getWithResponse(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context); } public void deleteById(String id) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds"); if (privateCloudName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id))); } String clusterName = Utils.getValueFromIdByName(id, "clusters"); if (clusterName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); } String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies"); if (placementPolicyName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id))); } this.delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, Context.NONE); } public void deleteByIdWithResponse(String id, Context context) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String privateCloudName = Utils.getValueFromIdByName(id, "privateClouds"); if (privateCloudName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'privateClouds'.", id))); } String clusterName = Utils.getValueFromIdByName(id, "clusters"); if (clusterName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); } String placementPolicyName = Utils.getValueFromIdByName(id, "placementPolicies"); if (placementPolicyName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'placementPolicies'.", id))); } this.delete(resourceGroupName, privateCloudName, clusterName, placementPolicyName, context); } private PlacementPoliciesClient serviceClient() { return this.innerClient; } private com.azure.resourcemanager.avs.AvsManager manager() { return this.serviceManager; } public PlacementPolicyImpl define(String name) { return new PlacementPolicyImpl(name, this.manager()); } }
Azure/azure-sdk-for-java
sdk/avs/azure-resourcemanager-avs/src/main/java/com/azure/resourcemanager/avs/implementation/PlacementPoliciesImpl.java
Java
mit
11,094
const WSP = require('../dist/ws') import axios from 'axios' import * as WS from 'ws' const turnOn = async (port: number = 8095) => { await axios.get('http://127.0.0.1:8085/on/' + port) return true } const shutDown = async (port: number = 8095) => { await axios.get('http://127.0.0.1:8085/off/' + port) return true } const createNew = async (config = {}, port = 8095): Promise<any> => { await turnOn(port) const ws = new WSP(Object.assign({ url: '127.0.0.1:' + port, // log: (...a) => console.log(...a), adapter: (host, protocols) => new (WS as any)(host, protocols) }, config)) return ws } export { createNew, turnOn, shutDown }
houd1ni/WebsocketPromisify
test/utils.ts
TypeScript
mit
668
// The MIT License (MIT) // // Copyright (c) Andrew Armstrong/FacticiusVir 2020 // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // This file was automatically generated and should not be edited directly. using System; using System.Runtime.InteropServices; namespace SharpVk.Interop.Multivendor { /// <summary> /// /// </summary> [StructLayout(LayoutKind.Sequential)] public unsafe partial struct PhysicalDevicePCIBusInfoProperties { /// <summary> /// The type of this structure /// </summary> public SharpVk.StructureType SType; /// <summary> /// Null or a pointer to an extension-specific structure /// </summary> public void* Next; /// <summary> /// The PCI bus domain /// </summary> public uint PciDomain; /// <summary> /// The PCI bus identifier /// </summary> public uint PciBus; /// <summary> /// The PCI device identifier /// </summary> public uint PciDevice; /// <summary> /// The PCI device function identifier /// </summary> public uint PciFunction; } }
FacticiusVir/SharpVk
src/SharpVk/Interop/Multivendor/PhysicalDevicePCIBusInfoProperties.gen.cs
C#
mit
2,270
student_phoneNumber_name = {1: 'a', 3: 'c', 2: 'b'} def Handler() : while (1) : choice = eval(input("Enter :\t 1 - to search student name \n \t 2 - to insert new student record \n \t 0 - to quit\n")) print(choice) if (choice == 1) : if (student_phoneNumber_name) : phone_number = input("Enter student's phone number : ") name = SearchRecord(phone_number) if (name) : print("name : " + name ) else : print(str(phone_number) + "Does not exist in record" + str(name)) else : print("Record is empty ") elif (choice == 2) : phone_number = input("Enter student's phone number : ") name = input("Enter student's name : ") #best example to understand input() and raw_input() InsertRecord(phone_number, name) elif (choice == 0) : break else: print("Enter correct choice") def InsertRecord(x, y): student_phoneNumber_name[x] = y return; def SearchRecord(x): print(x) if (x in student_phoneNumber_name) : return student_phoneNumber_name[x] return False Handler() print(student_phoneNumber_name)
ajitghz582/PythonLearning
DAY_1_ASSIGNMENTS/1_name_phone_number.py
Python
mit
1,070
import {Utils} from "../service/Utils"; Template.registerHelper( "displayHours", function (date) { return new moment(date).format("H[h]"); } ); Template.registerHelper( "displayHoursMinute", function (date) { return new moment(date).format("H[h]mm"); } ); Template.registerHelper( "displayHoursMinuteSeconde", function (date) { return new moment(date).format("H[h]mm ss[sec]"); } ); Template.registerHelper( "displayDateTime", function (date) { return new moment(date).format("ddd DD MMM HH[h]mm"); } ); Template.registerHelper( "displayDay", function (date) { return new moment(date).format("DD MMM"); } ); Template.registerHelper( "skillLabel", function () { return Skills.findOne({_id: this.toString()}).label; } ); Template.registerHelper( "onUpdateError", function (error) { return function (error) { Utils.onUpdateError(error.reason) } }); Template.registerHelper( "onUpdateSuccess", function (message) { return function (message) { Utils.onUpdateSuccess(message); } }); Template.registerHelper( "onDeleteError", function (error) { return function (error) { Utils.onUpdateError(error.reason) } }); Template.registerHelper( "onDeleteSuccess", function (message) { return function (message) { Utils.onUpdateSuccess(message); } }); Template.registerHelper( "allTeams", function () { return Teams.find(); } ); Template.registerHelper('equals', function (a, b) { return a === b; }); Template.registerHelper('adds', function (a, b) { return a + b; }); Template.registerHelper( "allOptionsTeams", function () { return Teams.find({ name: { $ne: ASSIGNMENTREADYTEAM } }); } ); Template.registerHelper( "allSkills", function (userId) { var userTeams = Meteor.users.findOne({_id: userId}).teams; return Skills.find({ teams: { $in: userTeams } }); } ); Template.registerHelper('ifNotEmpty', function (item, options) { if (item) { if (item instanceof Array) { if (item.length > 0) { return options.fn(this); } else { return options.inverse(this); } } else { if (item.fetch().length > 0) { return options.fn(this); } else { return options.inverse(this); } } } else { return options.inverse(this); } }); Template.registerHelper("equals", function (a, b) { return a === b; } ); Template.registerHelper("isMore", function (a, b) { return a > b; } ); Template.registerHelper("displayValidationState", function (state) { return DisplayedValidationState[state]; }); Template.registerHelper("RolesEnum", function () { return RolesEnum; }); Template.registerHelper( "currentUserId", function () { return Meteor.users.findOne({_id: Meteor.userId()})._id; } ); Template.registerHelper( "isCurrentUserTheOneLogged", function (currentUserId) { return currentUserId === Meteor.users.findOne({_id: Meteor.userId()})._id; } ) Template.registerHelper( "currentUserIdObject", function () { return { _id: Meteor.users.findOne({_id: Meteor.userId()})._id } } ); Template.registerHelper("cursorLength", function (array) { return array.fetch().length; } );
assomaker/manifmaker
app/client/helpers-events/global-helpers.js
JavaScript
mit
3,295
// +build windows package sers // taken from https://github.com/tarm/goserial // and slightly modified // (C) 2011, 2012 Tarmigan Casebolt, Benjamin Siegert, Michael Meier // All rights reserved. // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file. import ( "fmt" "os" "sync" "syscall" "unsafe" ) type serialPort struct { f *os.File fd syscall.Handle rl sync.Mutex wl sync.Mutex ro *syscall.Overlapped wo *syscall.Overlapped } type structDCB struct { DCBlength, BaudRate uint32 flags [4]byte wReserved, XonLim, XoffLim uint16 ByteSize, Parity, StopBits byte XonChar, XoffChar, ErrorChar, EofChar, EvtChar byte wReserved1 uint16 } type structTimeouts struct { ReadIntervalTimeout uint32 ReadTotalTimeoutMultiplier uint32 ReadTotalTimeoutConstant uint32 WriteTotalTimeoutMultiplier uint32 WriteTotalTimeoutConstant uint32 } //func openPort(name string) (rwc io.ReadWriteCloser, err error) { // TODO func Open(name string) (rwc SerialPort, err error) { if len(name) > 0 && name[0] != '\\' { name = "\\\\.\\" + name } h, err := syscall.CreateFile(syscall.StringToUTF16Ptr(name), syscall.GENERIC_READ|syscall.GENERIC_WRITE, 0, nil, syscall.OPEN_EXISTING, syscall.FILE_ATTRIBUTE_NORMAL|syscall.FILE_FLAG_OVERLAPPED, 0) if err != nil { return nil, err } f := os.NewFile(uintptr(h), name) defer func() { if err != nil { f.Close() } }() /*if err = setCommState(h, baud); err != nil { return }*/ if err = setupComm(h, 64, 64); err != nil { return } if err = setCommTimeouts(h, 0.0); err != nil { return } if err = setCommMask(h); err != nil { return } ro, err := newOverlapped() if err != nil { return } wo, err := newOverlapped() if err != nil { return } port := new(serialPort) port.f = f port.fd = h port.ro = ro port.wo = wo return port, nil } func (p *serialPort) Close() error { return p.f.Close() } func (p *serialPort) Write(buf []byte) (int, error) { p.wl.Lock() defer p.wl.Unlock() if err := resetEvent(p.wo.HEvent); err != nil { return 0, err } var n uint32 err := syscall.WriteFile(p.fd, buf, &n, p.wo) //fmt.Printf("n %d err %v\n", n, err) _ = fmt.Printf if err != nil && err != syscall.ERROR_IO_PENDING { //fmt.Printf("returning...\n") return int(n), err } return getOverlappedResult(p.fd, p.wo) } func (p *serialPort) Read(buf []byte) (int, error) { //fmt.Printf("read(<%d bytes>)\n", len(buf)) if p == nil || p.f == nil { return 0, fmt.Errorf("Invalid port on read %v %v", p, p.f) } p.rl.Lock() defer p.rl.Unlock() if err := resetEvent(p.ro.HEvent); err != nil { return 0, err } var done uint32 //fmt.Printf("calling ReadFile... ") err := syscall.ReadFile(p.fd, buf, &done, p.ro) //fmt.Printf(" done. %d, %v\n", done, err) if err != nil && err != syscall.ERROR_IO_PENDING { return int(done), err } //fmt.Printf("getting OverlappedResult... ") n, err := getOverlappedResult(p.fd, p.ro) //fmt.Printf(" done. n %d err %v\n", n, err) if n == 0 && err == nil { return n, winSersTimeout{} } return n, err } var ( nSetCommState, nSetCommTimeouts, nSetCommMask, nSetupComm, nGetOverlappedResult, nCreateEvent, nResetEvent uintptr ) func init() { k32, err := syscall.LoadLibrary("kernel32.dll") if err != nil { panic("LoadLibrary " + err.Error()) } defer syscall.FreeLibrary(k32) nSetCommState = getProcAddr(k32, "SetCommState") nSetCommTimeouts = getProcAddr(k32, "SetCommTimeouts") nSetCommMask = getProcAddr(k32, "SetCommMask") nSetupComm = getProcAddr(k32, "SetupComm") nGetOverlappedResult = getProcAddr(k32, "GetOverlappedResult") nCreateEvent = getProcAddr(k32, "CreateEventW") nResetEvent = getProcAddr(k32, "ResetEvent") } func getProcAddr(lib syscall.Handle, name string) uintptr { addr, err := syscall.GetProcAddress(lib, name) if err != nil { panic(name + " " + err.Error()) } return addr } func setCommState(h syscall.Handle, baud, databits, parity, handshake int) error { var params structDCB params.DCBlength = uint32(unsafe.Sizeof(params)) params.flags[0] = 0x01 // fBinary params.flags[0] |= 0x10 // Assert DSR params.ByteSize = byte(databits) params.BaudRate = uint32(baud) //params.ByteSize = 8 switch parity { case N: params.flags[0] &^= 0x02 params.Parity = 0 // NOPARITY case E: params.flags[0] |= 0x02 params.Parity = 2 // EVENPARITY case O: params.flags[0] |= 0x02 params.Parity = 1 // ODDPARITY default: return StringError("invalid parity setting") } switch handshake { case NO_HANDSHAKE: // TODO: reset handshake default: return StringError("only NO_HANDSHAKE is supported on windows") } r, _, err := syscall.Syscall(nSetCommState, 2, uintptr(h), uintptr(unsafe.Pointer(&params)), 0) if r == 0 { return err } return nil } func setCommTimeouts(h syscall.Handle, constTimeout float64) error { var timeouts structTimeouts const MAXDWORD = 1<<32 - 1 timeouts.ReadIntervalTimeout = MAXDWORD timeouts.ReadTotalTimeoutMultiplier = MAXDWORD //timeouts.ReadTotalTimeoutConstant = MAXDWORD - 1 if constTimeout == 0 { timeouts.ReadTotalTimeoutConstant = MAXDWORD - 1 } else { timeouts.ReadTotalTimeoutConstant = uint32(constTimeout * 1000.0) } /* From http://msdn.microsoft.com/en-us/library/aa363190(v=VS.85).aspx For blocking I/O see below: Remarks: If an application sets ReadIntervalTimeout and ReadTotalTimeoutMultiplier to MAXDWORD and sets ReadTotalTimeoutConstant to a value greater than zero and less than MAXDWORD, one of the following occurs when the ReadFile function is called: If there are any bytes in the input buffer, ReadFile returns immediately with the bytes in the buffer. If there are no bytes in the input buffer, ReadFile waits until a byte arrives and then returns immediately. If no bytes arrive within the time specified by ReadTotalTimeoutConstant, ReadFile times out. */ r, _, err := syscall.Syscall(nSetCommTimeouts, 2, uintptr(h), uintptr(unsafe.Pointer(&timeouts)), 0) if r == 0 { return err } return nil } func setupComm(h syscall.Handle, in, out int) error { r, _, err := syscall.Syscall(nSetupComm, 3, uintptr(h), uintptr(in), uintptr(out)) if r == 0 { return err } return nil } func setCommMask(h syscall.Handle) error { const EV_RXCHAR = 0x0001 r, _, err := syscall.Syscall(nSetCommMask, 2, uintptr(h), EV_RXCHAR, 0) if r == 0 { return err } return nil } func resetEvent(h syscall.Handle) error { r, _, err := syscall.Syscall(nResetEvent, 1, uintptr(h), 0, 0) if r == 0 { return err } return nil } func newOverlapped() (*syscall.Overlapped, error) { var overlapped syscall.Overlapped r, _, err := syscall.Syscall6(nCreateEvent, 4, 0, 1, 0, 0, 0, 0) if r == 0 { return nil, err } overlapped.HEvent = syscall.Handle(r) return &overlapped, nil } func getOverlappedResult(h syscall.Handle, overlapped *syscall.Overlapped) (int, error) { var n int r, _, err := syscall.Syscall6(nGetOverlappedResult, 4, uintptr(h), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(&n)), 1, 0, 0) if r == 0 { return n, err } //fmt.Printf("n %d err %v\n", n, err) return n, nil } func (sp *serialPort) SetMode(baudrate, databits, parity, stopbits, handshake int) error { if err := setCommState(syscall.Handle(sp.f.Fd()), baudrate, databits, parity, handshake); err != nil { return err } //return StringError("SetMode not implemented yet on Windows") return nil } func (sp *serialPort) SetReadParams(minread int, timeout float64) error { // TODO: minread is ignored! return setCommTimeouts(sp.fd, timeout) //return StringError("SetReadParams not implemented yet on Windows") } type winSersTimeout struct{} func (wst winSersTimeout) Error() string { return "a timeout has occured" } func (wst winSersTimeout) Timeout() bool { return true }
capnm/termzero
src/termzero/sers/sers_windows.go
GO
mit
8,070
<?php namespace Soy\Phinx; use League\CLImate\CLImate; use Soy\Task\CliTask; class CreateTask extends CliTask { use ConfigTrait; /** * @var string */ protected $name; /** * @param CLImate $climate * @param Config $config */ public function __construct(CLImate $climate, Config $config) { parent::__construct($climate); $this->config = $config; } /** * @return string */ public function getCommand() { $command = $this->getBinary() . ' create ' . $this->getName() . ' ' . $this->config->getDefaultArguments(); if (count($this->getArguments()) > 0) { $command .= ' ' . implode($this->getArguments()); } return $command; } /** * @return string */ public function getName() { return $this->name; } /** * @param string $name * @return $this */ public function setName($name) { $this->name = $name; return $this; } }
soy-php/phinx-task
src/Soy/Phinx/CreateTask.php
PHP
mit
1,041
"use strict"; /* * Copyright (c) 2013-2019 Bert Freudenberg * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ Object.extend(Squeak.Primitives.prototype, 'ScratchPluginAdditions', { // methods not handled by generated ScratchPlugin scratch_primitiveOpenURL: function(argCount) { var url = this.stackNonInteger(0).bytesAsString(); if (url == "") return false; if (/^\/SqueakJS\//.test(url)) { url = url.slice(10); // remove file root var path = Squeak.splitFilePath(url), template = Squeak.Settings["squeak-template:" + path.dirname]; if (template) url = JSON.parse(template).url + "/" + path.basename; } window.open(url, "_blank"); // likely blocked as pop-up, but what can we do? return this.popNIfOK(argCount); }, scratch_primitiveGetFolderPath: function(argCount) { var index = this.stackInteger(0); if (!this.success) return false; var path; switch (index) { case 1: path = '/'; break; // home dir // case 2: path = '/desktop'; break; // desktop // case 3: path = '/documents'; break; // documents // case 4: path = '/pictures'; break; // my pictures // case 5: path = '/music'; break; // my music } if (!path) return false; this.vm.popNandPush(argCount + 1, this.makeStString(this.filenameToSqueak(path))); return true; }, });
bertfreudenberg/SqueakJS
vm.plugins.scratch.browser.js
JavaScript
mit
2,534
<?php /* TwigBundle:Exception:traces.html.twig */ class __TwigTemplate_172db66ae695714ab527ff22427d903f9649fdf96e97dbc795a82cac4ba36f2f extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); $this->parent = false; $this->blocks = array( ); } protected function doDisplay(array $context, array $blocks = array()) { // line 1 echo "<div class=\"block\"> "; // line 2 if (($this->getContext($context, "count") > 0)) { // line 3 echo " <h2> <span><small>["; // line 4 echo twig_escape_filter($this->env, (($this->getContext($context, "count") - $this->getContext($context, "position")) + 1), "html", null, true); echo "/"; echo twig_escape_filter($this->env, ($this->getContext($context, "count") + 1), "html", null, true); echo "]</small></span> "; // line 5 echo $this->env->getExtension('code')->abbrClass($this->getAttribute($this->getContext($context, "exception"), "class")); echo ": "; echo $this->env->getExtension('code')->formatFileFromText(nl2br(twig_escape_filter($this->env, $this->getAttribute($this->getContext($context, "exception"), "message"), "html", null, true))); echo "&nbsp; "; // line 6 ob_start(); // line 7 echo " <a href=\"#\" onclick=\"toggle('traces-"; echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "', 'traces'); switchIcons('icon-traces-"; echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "-open', 'icon-traces-"; echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "-close'); return false;\"> <img class=\"toggle\" id=\"icon-traces-"; // line 8 echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "-close\" alt=\"-\" src=\"data:image/gif;base64,R0lGODlhEgASAMQSANft94TG57Hb8GS44ez1+mC24IvK6ePx+Wa44dXs92+942e54o3L6W2844/M6dnu+P/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABIALAAAAAASABIAQAVCoCQBTBOd6Kk4gJhGBCTPxysJb44K0qD/ER/wlxjmisZkMqBEBW5NHrMZmVKvv9hMVsO+hE0EoNAstEYGxG9heIhCADs=\" style=\"display: "; echo (((0 == $this->getContext($context, "count"))) ? ("inline") : ("none")); echo "\" /> <img class=\"toggle\" id=\"icon-traces-"; // line 9 echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "-open\" alt=\"+\" src=\"data:image/gif;base64,R0lGODlhEgASAMQTANft99/v+Ga44bHb8ITG52S44dXs9+z1+uPx+YvK6WC24G+944/M6W28443L6dnu+Ge54v/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABMALAAAAAASABIAQAVS4DQBTiOd6LkwgJgeUSzHSDoNaZ4PU6FLgYBA5/vFID/DbylRGiNIZu74I0h1hNsVxbNuUV4d9SsZM2EzWe1qThVzwWFOAFCQFa1RQq6DJB4iIQA7\" style=\"display: "; echo (((0 == $this->getContext($context, "count"))) ? ("none") : ("inline")); echo "\" /> </a> "; echo trim(preg_replace('/>\s+</', '><', ob_get_clean())); // line 12 echo " </h2> "; } else { // line 14 echo " <h2>Stack Trace</h2> "; } // line 16 echo " <a id=\"traces-link-"; // line 17 echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "\"></a> <ol class=\"traces list-exception\" id=\"traces-"; // line 18 echo twig_escape_filter($this->env, $this->getContext($context, "position"), "html", null, true); echo "\" style=\"display: "; echo (((0 == $this->getContext($context, "count"))) ? ("block") : ("none")); echo "\"> "; // line 19 $context['_parent'] = (array) $context; $context['_seq'] = twig_ensure_traversable($this->getAttribute($this->getContext($context, "exception"), "trace")); foreach ($context['_seq'] as $context["i"] => $context["trace"]) { // line 20 echo " <li> "; // line 21 $this->env->loadTemplate("TwigBundle:Exception:trace.html.twig")->display(array("prefix" => $this->getContext($context, "position"), "i" => $this->getContext($context, "i"), "trace" => $this->getContext($context, "trace"))); // line 22 echo " </li> "; } $_parent = $context['_parent']; unset($context['_seq'], $context['_iterated'], $context['i'], $context['trace'], $context['_parent'], $context['loop']); $context = array_intersect_key($context, $_parent) + $_parent; // line 24 echo " </ol> </div> "; } public function getTemplateName() { return "TwigBundle:Exception:traces.html.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 94 => 22, 92 => 21, 89 => 20, 85 => 19, 79 => 18, 75 => 17, 72 => 16, 68 => 14, 64 => 12, 56 => 9, 50 => 8, 41 => 7, 27 => 4, 24 => 3, 22 => 2, 201 => 92, 199 => 91, 196 => 90, 187 => 84, 183 => 82, 173 => 74, 171 => 73, 168 => 72, 166 => 71, 163 => 70, 158 => 67, 156 => 66, 151 => 63, 142 => 59, 138 => 57, 136 => 56, 133 => 55, 123 => 47, 121 => 46, 117 => 44, 115 => 43, 112 => 42, 105 => 40, 101 => 24, 91 => 31, 86 => 28, 69 => 25, 66 => 24, 62 => 23, 51 => 20, 49 => 19, 39 => 6, 32 => 12, 19 => 1, 57 => 12, 54 => 21, 43 => 8, 40 => 7, 33 => 5, 30 => 3,); } }
Alexandrovic/WebSmartravel
app/cache/dev/twig/17/2d/b66ae695714ab527ff22427d903f9649fdf96e97dbc795a82cac4ba36f2f.php
PHP
mit
6,030
<?php namespace CallFire\Common\Resource; class RetryResults extends AbstractResource { }
CallFire/CallFire-PHP-SDK
src/CallFire/Common/Resource/RetryResults.php
PHP
mit
93
const React = require('react'); const { ViewPropTypes } = ReactNative = require('react-native'); const { View, Animated, StyleSheet, ScrollView, Text, Platform, Dimensions, I18nManager } = ReactNative; const Button = require('./Button'); //import { PropTypes } from 'react' const WINDOW_WIDTH = Dimensions.get('window').width; const ScrollableTabBar = React.createClass({ propTypes: { goToPage: React.PropTypes.func, activeTab: React.PropTypes.number, tabs: React.PropTypes.array, backgroundColor: React.PropTypes.string, activeTextColor: React.PropTypes.string, inactiveTextColor: React.PropTypes.string, scrollOffset: React.PropTypes.number, //style: ViewPropTypes.style, //tabStyle: ViewPropTypes.style, //tabsContainerStyle: ViewPropTypes.style, //tabStyle: ViewPropTypes.style, textStyle: Text.propTypes.style, renderTab: React.PropTypes.func, //underlineStyle: ViewPropTypes.style, onScroll:React.PropTypes.func, }, getDefaultProps() { return { scrollOffset: 52, activeTextColor: 'navy', inactiveTextColor: 'black', backgroundColor: null, style: {}, tabStyle: {}, tabsContainerStyle: {}, tabStyle: {}, underlineStyle: {}, }; }, getInitialState() { this._tabsMeasurements = []; return { _leftTabUnderline: new Animated.Value(0), _widthTabUnderline: new Animated.Value(0), _containerWidth: null, }; }, componentDidMount() { this.props.scrollValue.addListener(this.updateView); }, updateView(offset) { //console.log("updateView="+JSON.stringify(offset)); //console.log("updateView="+JSON.stringify(this.props)); const position = Math.floor(offset.value); const pageOffset = offset.value % 1; const tabCount = this.props.tabs.length; const lastTabPosition = tabCount - 1; if (tabCount === 0 || offset.value < 0 || offset.value > lastTabPosition) { return; } if (this.necessarilyMeasurementsCompleted(position, position === lastTabPosition)) { this.updateTabPanel(position, pageOffset); this.updateTabUnderline(position, pageOffset, tabCount); } }, necessarilyMeasurementsCompleted(position, isLastTab) { return this._tabsMeasurements[position] && (isLastTab || this._tabsMeasurements[position + 1]) && this._tabContainerMeasurements && this._containerMeasurements; }, updateTabPanel(position, pageOffset) { const containerWidth = this._containerMeasurements.width; const tabWidth = this._tabsMeasurements[position].width; //console.log("containerWidth="+containerWidth+" tabWidth="+tabWidth); const nextTabMeasurements = this._tabsMeasurements[position + 1]; const nextTabWidth = nextTabMeasurements && nextTabMeasurements.width || 0; const tabOffset = this._tabsMeasurements[position].left; const absolutePageOffset = pageOffset * tabWidth; let newScrollX = tabOffset + absolutePageOffset; // center tab and smooth tab change (for when tabWidth changes a lot between two tabs) newScrollX -= (containerWidth - (1 - pageOffset) * tabWidth - pageOffset * nextTabWidth) / 2; newScrollX = newScrollX >= 0 ? newScrollX : 0; if (Platform.OS === 'android') { this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, }); } else { const rightBoundScroll = this._tabContainerMeasurements.width - (this._containerMeasurements.width); newScrollX = newScrollX > rightBoundScroll ? rightBoundScroll : newScrollX; this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, }); } }, updateTabUnderline(position, pageOffset, tabCount) { const tabPad = this.props.underlineAlignText?this.props.tabPadding:0; const lineLeft = this._tabsMeasurements[position].left; const lineRight = this._tabsMeasurements[position].right; if (position < tabCount - 1) { const nextTabLeft = this._tabsMeasurements[position + 1].left; const nextTabRight = this._tabsMeasurements[position + 1].right; const newLineLeft = (pageOffset * nextTabLeft + (1 - pageOffset) * lineLeft); const newLineRight = (pageOffset * nextTabRight + (1 - pageOffset) * lineRight); this.state._leftTabUnderline.setValue(newLineLeft+tabPad); this.state._widthTabUnderline.setValue(newLineRight - newLineLeft -tabPad*2); } else { this.state._leftTabUnderline.setValue(lineLeft+tabPad); this.state._widthTabUnderline.setValue(lineRight - lineLeft-tabPad*2); } }, renderTab(name, page, isTabActive, onPressHandler, onLayoutHandler) { const { activeTextColor, inactiveTextColor, textStyle, } = this.props; const textColor = isTabActive ? activeTextColor : inactiveTextColor; const fontWeight = isTabActive ? 'bold' : 'normal'; return <Button key={`${name}_${page}`} accessible={true} accessibilityLabel={name} accessibilityTraits='button' onPress={() => onPressHandler(page)} onLayout={onLayoutHandler} > <View style={[this.props.tabStyle||styles.tab, ]}> <Text style={[{color: textColor, fontWeight, }, textStyle, ]}> {name} </Text> </View> </Button>; }, measureTab(page, event) { console.log("measureTab="+page+"layout "+JSON.stringify(event.nativeEvent.layout)); const { x, width, height, } = event.nativeEvent.layout; this._tabsMeasurements[page] = {left: x, right: x + width, width, height, }; this.updateView({value: this.props.scrollValue._value, }); }, render() { const tabUnderlineStyle = { position: 'absolute', height: 1, backgroundColor: 'navy', bottom: 0, }; const key = I18nManager.isRTL ? 'right' : 'left'; const dynamicTabUnderline = { [`${key}`]: this.state._leftTabUnderline, width: this.state._widthTabUnderline } return <View style={[this.props.tabsContainerStyle||styles.container, ]} onLayout={this.onContainerLayout} > <ScrollView automaticallyAdjustContentInsets={false} ref={(scrollView) => { this._scrollView = scrollView; }} horizontal={true} showsHorizontalScrollIndicator={false} showsVerticalScrollIndicator={false} directionalLockEnabled={true} onScroll={this.props.onScroll} bounces={false} scrollsToTop={false} > <View style={[styles.tabs, {width: this.state._containerWidth, }, ]} ref={'tabContainer'} onLayout={this.onTabContainerLayout} > {this.props.tabs.map((name, page) => { const isTabActive = this.props.activeTab === page; const renderTab = this.props.renderTab || this.renderTab; return renderTab(name, page, isTabActive, this.props.goToPage, this.measureTab.bind(this, page)); })} <Animated.View style={[tabUnderlineStyle, dynamicTabUnderline, this.props.underlineStyle, ]} /> </View> </ScrollView> </View>; }, componentWillReceiveProps(nextProps) { // If the tabs change, force the width of the tabs container to be recalculated if (JSON.stringify(this.props.tabs) !== JSON.stringify(nextProps.tabs) && this.state._containerWidth) { this.setState({ _containerWidth: null, }); } }, onTabContainerLayout(e) { this._tabContainerMeasurements = e.nativeEvent.layout; let width = this._tabContainerMeasurements.width; if (width < WINDOW_WIDTH) { width = WINDOW_WIDTH; } this.setState({ _containerWidth: width, }); this.updateView({value: this.props.scrollValue._value, }); }, onContainerLayout(e) { this._containerMeasurements = e.nativeEvent.layout; this.updateView({value: this.props.scrollValue._value, }); }, }); module.exports = ScrollableTabBar; const styles = StyleSheet.create({ tab: { height: 49, alignItems: 'center', justifyContent: 'center', paddingLeft: 20, paddingRight: 20, }, container: { height: 50, borderWidth: 1, borderTopWidth: 0, borderLeftWidth: 0, borderRightWidth: 0, borderColor: '#ccc', }, tabs: { flexDirection: 'row', // justifyContent: 'space-around', android่ฎพๅค‡ๅฏ่ƒฝๆ’ž่ฝฆ }, });
jackuhan/react-native-viewpager-indicator
ScrollableTabBar.js
JavaScript
mit
8,319
๏ปฟusing System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("eXpand.XafMVVM.Module.Win")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("-")] [assembly: AssemblyProduct("eXpand.XafMVVM.Module.Win")] [assembly: AssemblyCopyright("Copyright ยฉ - 2007")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Revision and Build Numbers // by using the '*' as shown below: [assembly: AssemblyVersion("1.0.*")]
biohazard999/XafMVVM
src/XMVVM/XMVVM.ExpressApp.Demos/XMVVM.ExpressApp.Demos.Module.Win/Properties/AssemblyInfo.cs
C#
mit
1,201
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.webodf; import android.os.Bundle; import org.apache.cordova.*; public class WebODF extends CordovaActivity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); super.init(); // Set by <content src="index.html" /> in config.xml super.loadUrl(Config.getStartUrl()); //super.loadUrl("file:///android_asset/www/index.html"); } }
brandon-bailey/osdms
assets/webodf/programs/cordova/platforms/android/src/org/webodf/WebODF.java
Java
mit
1,303
๏ปฟusing System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using System.Threading.Tasks; using Windows.ApplicationModel; using Windows.ApplicationModel.Activation; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI.Popups; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Navigation; namespace murlok_uwp { /// <summary> /// Provides application-specific behavior to supplement the default Application class. /// </summary> sealed partial class App : Application { /// <summary> /// Initializes the singleton application object. This is the first line of authored code /// executed, and as such is the logical equivalent of main() or WinMain(). /// </summary> public App() { this.InitializeComponent(); this.Suspending += OnSuspending; } /// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected override void OnLaunched(LaunchActivatedEventArgs e) { #if DEBUG if (System.Diagnostics.Debugger.IsAttached) { this.DebugSettings.EnableFrameRateCounter = true; } #endif Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; if (e.PreviousExecutionState == ApplicationExecutionState.Terminated) { //TODO: Load state from previously suspended application } // Place the frame in the current Window Window.Current.Content = rootFrame; } if (e.PrelaunchActivated == false) { if (rootFrame.Content == null) { // When the navigation stack isn't restored navigate to the first page, // configuring the new page by passing required information as a navigation // parameter rootFrame.Navigate(typeof(MainPage), e.Arguments); } // Ensure the current window is active Window.Current.Activate(); } LaunchBackgroundProcess(); } /// <summary> /// Invoked when Navigation to a certain page fails /// </summary> /// <param name="sender">The Frame which failed navigation</param> /// <param name="e">Details about the navigation failure</param> void OnNavigationFailed(object sender, NavigationFailedEventArgs e) { throw new Exception("Failed to load Page " + e.SourcePageType.FullName); } /// <summary> /// Invoked when application execution is being suspended. Application state is saved /// without knowing whether the application will be terminated or resumed with the contents /// of memory still intact. /// </summary> /// <param name="sender">The source of the suspend request.</param> /// <param name="e">Details about the suspend request.</param> private void OnSuspending(object sender, SuspendingEventArgs e) { var deferral = e.SuspendingOperation.GetDeferral(); //TODO: Save application state and stop any background activity deferral.Complete(); } private async void LaunchBackgroundProcess() { try { await FullTrustProcessLauncher.LaunchFullTrustProcessForCurrentAppAsync(); } catch (Exception) { System.Diagnostics.Debug.WriteLine("BackgroundProcess failed to launch"); } } } }
murlokswarm/windows
native/murlok-uwp/App.xaml.cs
C#
mit
4,543
<?php namespace jk204\SIMalliance\ASN1\FileSystem; use FG\ASN1\Universal\OctetString; class filePath extends OctetString { }
jk204/sim-profile-asn1
src/jk204/SIMalliance/ASN1/FileSystem/filePath.php
PHP
mit
128
class ExercisesController < ApplicationController load_and_authorize_resource before_action :set_exercise, only: [:show, :edit, :update, :destroy] respond_to :html def index @exercises = Exercise.all respond_with(@exercises) end def show respond_with(@exercise) end def new @exercise = Exercise.new respond_with(@exercise) end def edit end def create @exercise = Exercise.new(exercise_params) @exercise.save respond_with(@exercise) end def update @exercise.update(exercise_params) respond_with(@exercise) end def destroy @exercise.destroy respond_with(@exercise) end private def set_exercise @exercise = Exercise.find(params[:id]) end def exercise_params params.require(:exercise).permit(:title, :content) end end
project-awesome/pa-skills-oauth-cancan
app/controllers/exercises_controller.rb
Ruby
mit
837
/* Copyright (c) 2015 - 2021 Advanced Micro Devices, Inc. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <hip/hip_runtime_api.h> #include "test_common.h" int main() { int val; hipDeviceAttribute_t attr = hipDeviceAttributeMaxThreadsPerBlock; ///< Maximum number of threads per block. HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, 0)); HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, 0)); HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, -1)); HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, -1)); attr = hipDeviceAttribute_t(91); HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, 0)); HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, 0)); HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, -1)); HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, -1)); }
ROCm-Developer-Tools/HIP
tests/src/nvcc/Device/hipDeviceGetAttribute.cpp
C++
mit
1,836
<?php namespace Oro\Bundle\FlexibleEntityBundle\AttributeType; use Oro\Bundle\FlexibleEntityBundle\AttributeType\AbstractAttributeType; use Oro\Bundle\FlexibleEntityBundle\Model\FlexibleValueInterface; /** * Datetime attribute type */ class DateTimeType extends AbstractAttributeType { /** * {@inheritdoc} */ protected function prepareValueFormOptions(FlexibleValueInterface $value) { $options = parent::prepareValueFormOptions($value); $options['widget'] = 'single_text'; $options['input'] = 'datetime'; return $options; } /** * {@inheritdoc} */ public function getName() { return 'oro_flexibleentity_datetime'; } }
umpirsky/platform
src/Oro/Bundle/FlexibleEntityBundle/AttributeType/DateTimeType.php
PHP
mit
716
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; namespace T2rkus.Spark.Core.Cli { public class CommandOption { public CommandOption(string template, CommandOptionType optionType) { Template = template; OptionType = optionType; Values = new List<string>(); foreach (var part in Template.Split(new[] { ' ', '|' }, StringSplitOptions.RemoveEmptyEntries)) { if (part.StartsWith("--")) { LongName = part.Substring(2); } else if (part.StartsWith("-")) { var optName = part.Substring(1); // If there is only one char and it is not an English letter, it is a symbol option (e.g. "-?") if (optName.Length == 1 && !IsEnglishLetter(optName[0])) { SymbolName = optName; } else { ShortName = optName; } } else if (part.StartsWith("<") && part.EndsWith(">")) { ValueName = part.Substring(1, part.Length - 2); } else { throw new ArgumentException($"Invalid template pattern '{template}'", nameof(template)); } } if (string.IsNullOrEmpty(LongName) && string.IsNullOrEmpty(ShortName) && string.IsNullOrEmpty(SymbolName)) { throw new ArgumentException($"Invalid template pattern '{template}'", nameof(template)); } } public string Template { get; set; } public string ShortName { get; set; } public string LongName { get; set; } public string SymbolName { get; set; } public string ValueName { get; set; } public string Description { get; set; } public List<string> Values { get; private set; } public CommandOptionType OptionType { get; private set; } public bool ShowInHelpText { get; set; } = true; public bool Inherited { get; set; } public bool TryParse(string value) { switch (OptionType) { case CommandOptionType.MultipleValue: Values.Add(value); break; case CommandOptionType.SingleValue: if (Values.Any()) { return false; } Values.Add(value); break; case CommandOptionType.NoValue: if (value != null) { return false; } // Add a value to indicate that this option was specified Values.Add("on"); break; default: break; } return true; } public bool HasValue() { return Values.Any(); } public string Value() { return HasValue() ? Values[0] : null; } private bool IsEnglishLetter(char c) { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); } } }
T2RKUS/Spark
T2rkus.Spark/T2rkus.Spark.Core.Cli/T2rkus.Spark.Core.Cli/Parser/CommandOption.cs
C#
mit
3,607
/* * This file is part of jGui API, licensed under the MIT License (MIT). * * Copyright (c) 2016 johni0702 <https://github.com/johni0702> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package de.johni0702.minecraft.gui.element; import de.johni0702.minecraft.gui.GuiRenderer; import de.johni0702.minecraft.gui.RenderInfo; import de.johni0702.minecraft.gui.container.GuiContainer; import de.johni0702.minecraft.gui.utils.lwjgl.Dimension; import de.johni0702.minecraft.gui.utils.lwjgl.Point; import de.johni0702.minecraft.gui.utils.lwjgl.ReadableDimension; import de.johni0702.minecraft.gui.versions.MCVer; import net.minecraft.client.MinecraftClient; import net.minecraft.util.Identifier; public abstract class AbstractGuiElement<T extends AbstractGuiElement<T>> implements GuiElement<T> { protected static final Identifier TEXTURE = new Identifier("jgui", "gui.png"); private final MinecraftClient minecraft = MCVer.getMinecraft(); private GuiContainer container; private GuiElement tooltip; private boolean enabled = true; protected Dimension minSize, maxSize; /** * The last size this element was render at layer 0. * May be {@code null} when this element has not yet been rendered. */ private ReadableDimension lastSize; public AbstractGuiElement() { } public AbstractGuiElement(GuiContainer container) { container.addElements(null, this); } protected abstract T getThis(); @Override public void layout(ReadableDimension size, RenderInfo renderInfo) { if (size == null) { if (getContainer() == null) { throw new RuntimeException("Any top containers must implement layout(null, ...) themselves!"); } getContainer().layout(size, renderInfo.layer(renderInfo.layer + getLayer())); return; } if (renderInfo.layer == 0) { lastSize = size; } } @Override public void draw(GuiRenderer renderer, ReadableDimension size, RenderInfo renderInfo) { } @Override public T setEnabled(boolean enabled) { this.enabled = enabled; return getThis(); } @Override public T setEnabled() { return setEnabled(true); } @Override public T setDisabled() { return setEnabled(false); } @Override public GuiElement getTooltip(RenderInfo renderInfo) { if (tooltip != null && lastSize != null) { Point mouse = new Point(renderInfo.mouseX, renderInfo.mouseY); if (container != null) { container.convertFor(this, mouse); } if (mouse.getX() > 0 && mouse.getY() > 0 && mouse.getX() < lastSize.getWidth() && mouse.getY() < lastSize.getHeight()) { return tooltip; } } return null; } @Override public T setTooltip(GuiElement tooltip) { this.tooltip = tooltip; return getThis(); } @Override public T setContainer(GuiContainer container) { this.container = container; return getThis(); } public T setMinSize(ReadableDimension minSize) { this.minSize = new Dimension(minSize); return getThis(); } public T setMaxSize(ReadableDimension maxSize) { this.maxSize = new Dimension(maxSize); return getThis(); } public T setSize(ReadableDimension size) { setMinSize(size); return setMaxSize(size); } public T setSize(int width, int height) { return setSize(new Dimension(width, height)); } public T setWidth(int width) { if (minSize == null) { minSize = new Dimension(width, 0); } else { minSize.setWidth(width); } if (maxSize == null) { maxSize = new Dimension(width, Integer.MAX_VALUE); } else { maxSize.setWidth(width); } return getThis(); } public T setHeight(int height) { if (minSize == null) { minSize = new Dimension(0, height); } else { minSize.setHeight(height); } if (maxSize == null) { maxSize = new Dimension(Integer.MAX_VALUE, height); } else { maxSize.setHeight(height); } return getThis(); } public int getLayer() { return 0; } @Override public ReadableDimension getMinSize() { ReadableDimension calcSize = calcMinSize(); if (minSize == null) { return calcSize; } else { if (minSize.getWidth() >= calcSize.getWidth() && minSize.getHeight() >= calcSize.getHeight()) { return minSize; } else { return new Dimension( Math.max(calcSize.getWidth(), minSize.getWidth()), Math.max(calcSize.getHeight(), minSize.getHeight()) ); } } } protected abstract ReadableDimension calcMinSize(); @Override public ReadableDimension getMaxSize() { return maxSize == null ? new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE) : maxSize; } public MinecraftClient getMinecraft() { return this.minecraft; } public GuiContainer getContainer() { return this.container; } public boolean isEnabled() { return this.enabled; } protected ReadableDimension getLastSize() { return this.lastSize; } }
ReplayMod/jGui
src/main/java/de/johni0702/minecraft/gui/element/AbstractGuiElement.java
Java
mit
6,656
#-- encoding: UTF-8 #-- copyright # OpenProject is a project management system. # Copyright (C) 2012-2013 the OpenProject Foundation (OPF) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License version 3. # # OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows: # Copyright (C) 2006-2013 Jean-Philippe Lang # Copyright (C) 2010-2013 the ChiliProject Team # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # See doc/COPYRIGHT.rdoc for more details. #++ require File.expand_path('../../test_helper', __FILE__) class EnabledModuleTest < ActiveSupport::TestCase def test_enabling_wiki_should_create_a_wiki CustomField.delete_all FactoryGirl.create(:type_standard) project = Project.create!(:name => 'Project with wiki', :identifier => 'wikiproject') assert_nil project.wiki project.enabled_module_names = ['wiki'] wiki = FactoryGirl.create :wiki, :project => project project.reload assert_not_nil project.wiki assert_equal 'Wiki', project.wiki.start_page end def test_reenabling_wiki_should_not_create_another_wiki project = FactoryGirl.create :project wiki = FactoryGirl.create :wiki, :project => project project.reload assert_not_nil project.wiki project.enabled_module_names = [] project.reload assert_no_difference 'Wiki.count' do project.enabled_module_names = ['wiki'] end assert_not_nil project.wiki end end
mximos/openproject-heroku
test/unit/enabled_module_test.rb
Ruby
mit
2,169
var expect = require('chai').expect, sinon = require('sinon'), EventEmitter = require('../src/EventEmitter'); describe('EventEmitter tests', function() { var emitter, foo, bar; beforeEach(function() { emitter = new EventEmitter(); foo = sinon.spy(); bar = sinon.spy(); }); describe('.on', function() { it('should throw error if foo is not a function', function() { var fn = emitter.on.bind(null, 'abc', 'abc'); expect(fn).to.throw(TypeError); }); it('should register event with emitter._events', function() { emitter.on('data', foo); expect(emitter._events.data[0]).to.equal(foo); }); it('should be able to register multiple foos', function() { emitter.on('data', foo); emitter.on('data', bar); expect(emitter._events.data[0]).to.equal(foo); expect(emitter._events.data[1]).to.equal(bar); }); it('should return itself', function() { expect(emitter.on('data', foo)).to.equal(emitter); }); it('emits newListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.on('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'newListener', 'foo', foo); }); }); describe('.emit', function() { beforeEach(function() { emitter.on('data', foo); emitter.on('data', bar); }); it('should trigger listeners bound to event', function() { emitter.emit('data'); expect(foo.calledOnce).to.be.true; expect(bar.calledOnce).to.be.true; }); it('should trigger listeners in order', function() { emitter.emit('data'); expect(foo.calledBefore(bar)).to.be.true; }); it('should apply arguments to each listener', function() { var arg1 = 1, arg2 = '2', arg3 = {}; emitter.emit('data', arg1, arg2, arg3); sinon.assert.calledWithExactly(foo, arg1, arg2, arg3); }); it('should bind "this" to the emitter in listener', function(done) { var fn = function() { expect(this).to.equal(emitter); done(); }; emitter.on('data', fn); emitter.emit('data'); }); it('should return true if listeners were fired', function() { expect(emitter.emit('data')).to.be.true; }); it('should return false if no listeners fired', function() { expect(emitter.emit('adf')).to.be.false; }); }); describe('.removeAllListeners', function() { beforeEach(function() { emitter.on('foo', foo); emitter.on('foo', function() {}); emitter.on('bar', bar); }); it('should remove all listeners if no parameter', function() { emitter.removeAllListeners(); expect(emitter._events).to.be.empty; }); it('should only remove listeners to specified event', function() { emitter.removeAllListeners('foo'); expect(emitter._events.foo).to.be.undefined; expect(emitter._events.bar).to.not.be.undefined; }); it('should return the emitter', function() { expect(emitter.removeAllListeners()).to.equal(emitter); }); }); describe('.removeListener', function() { var baz; beforeEach(function() { baz = sinon.spy(); emitter.on('foo', foo); emitter.on('foo', baz); emitter.on('bar', bar); }); it('should remove only one listener for event', function() { emitter.removeListener('foo', baz); expect(emitter._events.foo.length).to.equal(1); expect(emitter._events.foo[0]).to.equal(foo); }); it('should throw error if listener is not a function', function() { var fn = emitter.removeListener.bind(emitter, 'foo', 'foo'); expect(fn).to.throw(TypeError); }); it('should return the emitter', function() { expect(emitter.removeListener('foo', foo)).to.equal(emitter); }); it('should be able to remove listener added by .once', function() { var qux = sinon.spy(); emitter.once('bar', qux); emitter.removeListener('bar', qux); expect(emitter._events.bar.length).to.equal(1); expect(emitter._events.bar[0]).to.equal(bar); }); it('should emit removeListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.removeListener('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'removeListener', 'foo', foo); }); }); describe('.once', function() { it('should throw error if listener is not a function', function() { var fn = emitter.once.bind(null, 'abc', 'abc'); expect(fn).to.throw(TypeError); }); it('should register a listener', function() { emitter.once('foo', foo); expect(emitter._events.foo.length).to.equal(1); }); it('should run registered function', function() { emitter.once('foo', foo); emitter.emit('foo'); expect(foo.calledOnce).to.be.true; }); it('should remove listener after .emit', function() { emitter.once('foo', foo); emitter.emit('foo'); expect(emitter._events.foo).to.be.empty; }); it('should pass all parameters from listener', function() { var arg1 = 1, arg2 = '2', arg3 = {}; emitter.once('foo', foo); emitter.emit('foo', arg1, arg2, arg3); sinon.assert.calledWithExactly(foo, arg1, arg2, arg3); }); it('should return the emitter', function() { expect(emitter.once('foo', foo)).to.equal(emitter); }); it('emits newListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.once('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'newListener', 'foo', foo); }); }); describe('.listeners', function() { beforeEach(function() { emitter.on('foo', foo); emitter.on('bar', bar); }); it('should return an array of listeners for an event', function() { expect(emitter.listeners('foo')).to.deep.equal([foo]); }); it('should return an empty array for unregistered events', function() { expect(emitter.listeners('abcd')).to.deep.equal([]); }); }); describe('.addListener', function() { it('should be alias to .on', function() { expect(emitter.addListener).to.equal(emitter.on); }); }); describe('.off', function() { it('should alias to .removeListener', function() { expect(emitter.off).to.equal(emitter.removeListener); }); }); describe('EventEmitter.listenerCount', function() { beforeEach(function() { emitter.on('foo', foo); emitter.on('foo', function() {}); emitter.on('bar', bar); }); it('should return 0 for non emitters', function() { expect(EventEmitter.listenerCount(1)).to.equal(0); }); it('should return 0 for no listeners', function() { expect(EventEmitter.listenerCount(emitter, 'baz')).to.equal(0); }); it('should return number of listeners', function() { expect(EventEmitter.listenerCount(emitter, 'foo')).to.equal(2); }); }); });
jimgswang/EventEmitter
test/EventEmitter.test.js
JavaScript
mit
8,204
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # In the development environment your application's code is reloaded on # every request. This slows down response time but is perfect for development # since you don't have to restart the web server when you make code changes. config.cache_classes = false # Do not eager load code on boot. config.eager_load = false # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false config.action_mailer.default_url_options = { host: 'localhost', port: 3000 } # Don't care if the mailer can't send. config.action_mailer.raise_delivery_errors = false # Print deprecation notices to the Rails logger. config.active_support.deprecation = :log # Raise an error on page load if there are pending migrations. config.active_record.migration_error = :page_load # Debug mode disables concatenation and preprocessing of assets. # This option may cause significant delays in view rendering with a large # number of complex assets. config.assets.debug = true # Asset digests allow you to set far-future HTTP expiration dates on all assets, # yet still be able to expire them through the digest params. config.assets.digest = true # Adds additional error checking when serving assets at runtime. # Checks for improperly declared sprockets dependencies. # Raises helpful error messages. config.assets.raise_runtime_errors = true # Raises error for missing translations # config.action_view.raise_on_missing_translations = true config.logger = Logger.new(STDOUT) end
mdibaiee/pack
config/environments/development.rb
Ruby
mit
1,718
/* * Copyright (C) 2008 Apple Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @constructor * @extends {WebInspector.Object} * @param {string} id * @param {string} name */ WebInspector.ProfileType = function(id, name) { WebInspector.Object.call(this); this._id = id; this._name = name; /** @type {!Array.<!WebInspector.ProfileHeader>} */ this._profiles = []; /** @type {?WebInspector.ProfileHeader} */ this._profileBeingRecorded = null; this._nextProfileUid = 1; window.addEventListener("unload", this._clearTempStorage.bind(this), false); } /** * @enum {string} */ WebInspector.ProfileType.Events = { AddProfileHeader: "add-profile-header", ProfileComplete: "profile-complete", RemoveProfileHeader: "remove-profile-header", ViewUpdated: "view-updated" } WebInspector.ProfileType.prototype = { /** * @return {boolean} */ hasTemporaryView: function() { return false; }, /** * @return {?string} */ fileExtension: function() { return null; }, get statusBarItems() { return []; }, get buttonTooltip() { return ""; }, get id() { return this._id; }, get treeItemTitle() { return this._name; }, get name() { return this._name; }, /** * @return {boolean} */ buttonClicked: function() { return false; }, get description() { return ""; }, /** * @return {boolean} */ isInstantProfile: function() { return false; }, /** * @return {boolean} */ isEnabled: function() { return true; }, /** * @return {!Array.<!WebInspector.ProfileHeader>} */ getProfiles: function() { /** * @param {!WebInspector.ProfileHeader} profile * @return {boolean} * @this {WebInspector.ProfileType} */ function isFinished(profile) { return this._profileBeingRecorded !== profile; } return this._profiles.filter(isFinished.bind(this)); }, /** * @return {?Element} */ decorationElement: function() { return null; }, /** * @nosideeffects * @param {number} uid * @return {?WebInspector.ProfileHeader} */ getProfile: function(uid) { for (var i = 0; i < this._profiles.length; ++i) { if (this._profiles[i].uid === uid) return this._profiles[i]; } return null; }, /** * @param {!File} file */ loadFromFile: function(file) { var name = file.name; if (name.endsWith(this.fileExtension())) name = name.substr(0, name.length - this.fileExtension().length); var profile = this.createProfileLoadedFromFile(name); profile.setFromFile(); this.setProfileBeingRecorded(profile); this.addProfile(profile); profile.loadFromFile(file); }, /** * @param {string} title * @return {!WebInspector.ProfileHeader} */ createProfileLoadedFromFile: function(title) { throw new Error("Needs implemented."); }, /** * @param {!WebInspector.ProfileHeader} profile */ addProfile: function(profile) { this._profiles.push(profile); this.dispatchEventToListeners(WebInspector.ProfileType.Events.AddProfileHeader, profile); }, /** * @param {!WebInspector.ProfileHeader} profile */ removeProfile: function(profile) { var index = this._profiles.indexOf(profile); if (index === -1) return; this._profiles.splice(index, 1); this._disposeProfile(profile); }, _clearTempStorage: function() { for (var i = 0; i < this._profiles.length; ++i) this._profiles[i].removeTempFile(); }, /** * @nosideeffects * @return {?WebInspector.ProfileHeader} */ profileBeingRecorded: function() { return this._profileBeingRecorded; }, /** * @param {?WebInspector.ProfileHeader} profile */ setProfileBeingRecorded: function(profile) { if (this._profileBeingRecorded) this._profileBeingRecorded.target().profilingLock.release(); if (profile) profile.target().profilingLock.acquire(); this._profileBeingRecorded = profile; }, profileBeingRecordedRemoved: function() { }, _reset: function() { var profiles = this._profiles.slice(0); for (var i = 0; i < profiles.length; ++i) this._disposeProfile(profiles[i]); this._profiles = []; this._nextProfileUid = 1; }, /** * @param {!WebInspector.ProfileHeader} profile */ _disposeProfile: function(profile) { this.dispatchEventToListeners(WebInspector.ProfileType.Events.RemoveProfileHeader, profile); profile.dispose(); if (this._profileBeingRecorded === profile) { this.profileBeingRecordedRemoved(); this.setProfileBeingRecorded(null); } }, __proto__: WebInspector.Object.prototype } /** * @interface */ WebInspector.ProfileType.DataDisplayDelegate = function() { } WebInspector.ProfileType.DataDisplayDelegate.prototype = { /** * @param {?WebInspector.ProfileHeader} profile * @return {?WebInspector.View} */ showProfile: function(profile) { }, /** * @param {!HeapProfilerAgent.HeapSnapshotObjectId} snapshotObjectId * @param {string} perspectiveName */ showObject: function(snapshotObjectId, perspectiveName) { } } /** * @constructor * @extends {WebInspector.TargetAwareObject} * @param {!WebInspector.Target} target * @param {!WebInspector.ProfileType} profileType * @param {string} title */ WebInspector.ProfileHeader = function(target, profileType, title) { WebInspector.TargetAwareObject.call(this, target); this._profileType = profileType; this.title = title; this.uid = profileType._nextProfileUid++; this._fromFile = false; } /** * @constructor * @param {?string} subtitle * @param {boolean|undefined} wait */ WebInspector.ProfileHeader.StatusUpdate = function(subtitle, wait) { /** @type {?string} */ this.subtitle = subtitle; /** @type {boolean|undefined} */ this.wait = wait; } WebInspector.ProfileHeader.Events = { UpdateStatus: "UpdateStatus", ProfileReceived: "ProfileReceived" } WebInspector.ProfileHeader.prototype = { /** * @return {!WebInspector.ProfileType} */ profileType: function() { return this._profileType; }, /** * @param {?string} subtitle * @param {boolean=} wait */ updateStatus: function(subtitle, wait) { this.dispatchEventToListeners(WebInspector.ProfileHeader.Events.UpdateStatus, new WebInspector.ProfileHeader.StatusUpdate(subtitle, wait)); }, /** * Must be implemented by subclasses. * @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate * @return {!WebInspector.ProfileSidebarTreeElement} */ createSidebarTreeElement: function(dataDisplayDelegate) { throw new Error("Needs implemented."); }, /** * @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate * @return {!WebInspector.View} */ createView: function(dataDisplayDelegate) { throw new Error("Not implemented."); }, removeTempFile: function() { if (this._tempFile) this._tempFile.remove(); }, dispose: function() { }, /** * @param {!Function} callback */ load: function(callback) { }, /** * @return {boolean} */ canSaveToFile: function() { return false; }, saveToFile: function() { throw new Error("Needs implemented"); }, /** * @param {!File} file */ loadFromFile: function(file) { throw new Error("Needs implemented"); }, /** * @return {boolean} */ fromFile: function() { return this._fromFile; }, setFromFile: function() { this._fromFile = true; }, __proto__: WebInspector.TargetAwareObject.prototype } /** * @constructor * @implements {WebInspector.Searchable} * @implements {WebInspector.ProfileType.DataDisplayDelegate} * @extends {WebInspector.PanelWithSidebarTree} */ WebInspector.ProfilesPanel = function() { WebInspector.PanelWithSidebarTree.call(this, "profiles"); this.registerRequiredCSS("panelEnablerView.css"); this.registerRequiredCSS("heapProfiler.css"); this.registerRequiredCSS("profilesPanel.css"); this._target = /** @type {!WebInspector.Target} */ (WebInspector.targetManager.activeTarget()); this._target.profilingLock.addEventListener(WebInspector.Lock.Events.StateChanged, this._onProfilingStateChanged, this); this._searchableView = new WebInspector.SearchableView(this); var mainView = new WebInspector.VBox(); this._searchableView.show(mainView.element); mainView.show(this.mainElement()); this.profilesItemTreeElement = new WebInspector.ProfilesSidebarTreeElement(this); this.sidebarTree.appendChild(this.profilesItemTreeElement); this.profileViews = document.createElement("div"); this.profileViews.id = "profile-views"; this.profileViews.classList.add("vbox"); this._searchableView.element.appendChild(this.profileViews); var statusBarContainer = document.createElementWithClass("div", "profiles-status-bar"); mainView.element.insertBefore(statusBarContainer, mainView.element.firstChild); this._statusBarElement = statusBarContainer.createChild("div", "status-bar"); this.sidebarElement().classList.add("profiles-sidebar-tree-box"); var statusBarContainerLeft = document.createElementWithClass("div", "profiles-status-bar"); this.sidebarElement().insertBefore(statusBarContainerLeft, this.sidebarElement().firstChild); this._statusBarButtons = statusBarContainerLeft.createChild("div", "status-bar"); this.recordButton = new WebInspector.StatusBarButton("", "record-profile-status-bar-item"); this.recordButton.addEventListener("click", this.toggleRecordButton, this); this._statusBarButtons.appendChild(this.recordButton.element); this.clearResultsButton = new WebInspector.StatusBarButton(WebInspector.UIString("Clear all profiles."), "clear-status-bar-item"); this.clearResultsButton.addEventListener("click", this._reset, this); this._statusBarButtons.appendChild(this.clearResultsButton.element); this._profileTypeStatusBarItemsContainer = this._statusBarElement.createChild("div"); this._profileViewStatusBarItemsContainer = this._statusBarElement.createChild("div"); this._profileGroups = {}; this._launcherView = new WebInspector.MultiProfileLauncherView(this); this._launcherView.addEventListener(WebInspector.MultiProfileLauncherView.EventTypes.ProfileTypeSelected, this._onProfileTypeSelected, this); this._profileToView = []; this._typeIdToSidebarSection = {}; var types = WebInspector.ProfileTypeRegistry.instance.profileTypes(); for (var i = 0; i < types.length; i++) this._registerProfileType(types[i]); this._launcherView.restoreSelectedProfileType(); this.profilesItemTreeElement.select(); this._showLauncherView(); this._createFileSelectorElement(); this.element.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true); this._registerShortcuts(); this._configureCpuProfilerSamplingInterval(); WebInspector.settings.highResolutionCpuProfiling.addChangeListener(this._configureCpuProfilerSamplingInterval, this); } /** * @constructor */ WebInspector.ProfileTypeRegistry = function() { this._profileTypes = []; this.cpuProfileType = new WebInspector.CPUProfileType(); this._addProfileType(this.cpuProfileType); this.heapSnapshotProfileType = new WebInspector.HeapSnapshotProfileType(); this._addProfileType(this.heapSnapshotProfileType); this.trackingHeapSnapshotProfileType = new WebInspector.TrackingHeapSnapshotProfileType(); this._addProfileType(this.trackingHeapSnapshotProfileType); HeapProfilerAgent.enable(); if (Capabilities.isMainFrontend && WebInspector.experimentsSettings.canvasInspection.isEnabled()) { this.canvasProfileType = new WebInspector.CanvasProfileType(); this._addProfileType(this.canvasProfileType); } } WebInspector.ProfileTypeRegistry.prototype = { /** * @param {!WebInspector.ProfileType} profileType */ _addProfileType: function(profileType) { this._profileTypes.push(profileType); }, /** * @return {!Array.<!WebInspector.ProfileType>} */ profileTypes: function() { return this._profileTypes; } } WebInspector.ProfilesPanel.prototype = { /** * @return {!WebInspector.SearchableView} */ searchableView: function() { return this._searchableView; }, _createFileSelectorElement: function() { if (this._fileSelectorElement) this.element.removeChild(this._fileSelectorElement); this._fileSelectorElement = WebInspector.createFileSelectorElement(this._loadFromFile.bind(this)); this.element.appendChild(this._fileSelectorElement); }, _findProfileTypeByExtension: function(fileName) { var types = WebInspector.ProfileTypeRegistry.instance.profileTypes(); for (var i = 0; i < types.length; i++) { var type = types[i]; var extension = type.fileExtension(); if (!extension) continue; if (fileName.endsWith(type.fileExtension())) return type; } return null; }, _registerShortcuts: function() { this.registerShortcuts(WebInspector.ShortcutsScreen.ProfilesPanelShortcuts.StartStopRecording, this.toggleRecordButton.bind(this)); }, _configureCpuProfilerSamplingInterval: function() { var intervalUs = WebInspector.settings.highResolutionCpuProfiling.get() ? 100 : 1000; ProfilerAgent.setSamplingInterval(intervalUs, didChangeInterval); function didChangeInterval(error) { if (error) WebInspector.messageSink.addErrorMessage(error, true); } }, /** * @param {!File} file */ _loadFromFile: function(file) { this._createFileSelectorElement(); var profileType = this._findProfileTypeByExtension(file.name); if (!profileType) { var extensions = []; var types = WebInspector.ProfileTypeRegistry.instance.profileTypes(); for (var i = 0; i < types.length; i++) { var extension = types[i].fileExtension(); if (!extension || extensions.indexOf(extension) !== -1) continue; extensions.push(extension); } WebInspector.messageSink.addMessage(WebInspector.UIString("Can't load file. Only files with extensions '%s' can be loaded.", extensions.join("', '"))); return; } if (!!profileType.profileBeingRecorded()) { WebInspector.messageSink.addMessage(WebInspector.UIString("Can't load profile while another profile is recording.")); return; } profileType.loadFromFile(file); }, /** * @return {boolean} */ toggleRecordButton: function() { if (!this.recordButton.enabled()) return true; var type = this._selectedProfileType; var isProfiling = type.buttonClicked(); this._updateRecordButton(isProfiling); if (isProfiling) { this._launcherView.profileStarted(); if (type.hasTemporaryView()) this.showProfile(type.profileBeingRecorded()); } else { this._launcherView.profileFinished(); } return true; }, _onProfilingStateChanged: function() { this._updateRecordButton(this.recordButton.toggled); }, /** * @param {boolean} toggled */ _updateRecordButton: function(toggled) { var enable = toggled || !this._target.profilingLock.isAcquired(); this.recordButton.setEnabled(enable); this.recordButton.toggled = toggled; if (enable) this.recordButton.title = this._selectedProfileType ? this._selectedProfileType.buttonTooltip : ""; else this.recordButton.title = WebInspector.UIString("Another profiler is already active"); if (this._selectedProfileType) this._launcherView.updateProfileType(this._selectedProfileType, enable); }, _profileBeingRecordedRemoved: function() { this._updateRecordButton(false); this._launcherView.profileFinished(); }, /** * @param {!WebInspector.Event} event */ _onProfileTypeSelected: function(event) { this._selectedProfileType = /** @type {!WebInspector.ProfileType} */ (event.data); this._updateProfileTypeSpecificUI(); }, _updateProfileTypeSpecificUI: function() { this._updateRecordButton(this.recordButton.toggled); this._profileTypeStatusBarItemsContainer.removeChildren(); var statusBarItems = this._selectedProfileType.statusBarItems; if (statusBarItems) { for (var i = 0; i < statusBarItems.length; ++i) this._profileTypeStatusBarItemsContainer.appendChild(statusBarItems[i]); } }, _reset: function() { WebInspector.Panel.prototype.reset.call(this); var types = WebInspector.ProfileTypeRegistry.instance.profileTypes(); for (var i = 0; i < types.length; i++) types[i]._reset(); delete this.visibleView; delete this.currentQuery; this.searchCanceled(); this._profileGroups = {}; this._updateRecordButton(false); this._launcherView.profileFinished(); this.sidebarTree.element.classList.remove("some-expandable"); this._launcherView.detach(); this.profileViews.removeChildren(); this._profileViewStatusBarItemsContainer.removeChildren(); this.removeAllListeners(); this.recordButton.visible = true; this._profileViewStatusBarItemsContainer.classList.remove("hidden"); this.clearResultsButton.element.classList.remove("hidden"); this.profilesItemTreeElement.select(); this._showLauncherView(); }, _showLauncherView: function() { this.closeVisibleView(); this._profileViewStatusBarItemsContainer.removeChildren(); this._launcherView.show(this.profileViews); this.visibleView = this._launcherView; }, _garbageCollectButtonClicked: function() { HeapProfilerAgent.collectGarbage(); }, /** * @param {!WebInspector.ProfileType} profileType */ _registerProfileType: function(profileType) { this._launcherView.addProfileType(profileType); var profileTypeSection = new WebInspector.ProfileTypeSidebarSection(this, profileType); this._typeIdToSidebarSection[profileType.id] = profileTypeSection this.sidebarTree.appendChild(profileTypeSection); profileTypeSection.childrenListElement.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true); /** * @param {!WebInspector.Event} event * @this {WebInspector.ProfilesPanel} */ function onAddProfileHeader(event) { this._addProfileHeader(/** @type {!WebInspector.ProfileHeader} */ (event.data)); } /** * @param {!WebInspector.Event} event * @this {WebInspector.ProfilesPanel} */ function onRemoveProfileHeader(event) { this._removeProfileHeader(/** @type {!WebInspector.ProfileHeader} */ (event.data)); } /** * @param {!WebInspector.Event} event * @this {WebInspector.ProfilesPanel} */ function profileComplete(event) { this.showProfile(/** @type {!WebInspector.ProfileHeader} */ (event.data)); } profileType.addEventListener(WebInspector.ProfileType.Events.ViewUpdated, this._updateProfileTypeSpecificUI, this); profileType.addEventListener(WebInspector.ProfileType.Events.AddProfileHeader, onAddProfileHeader, this); profileType.addEventListener(WebInspector.ProfileType.Events.RemoveProfileHeader, onRemoveProfileHeader, this); profileType.addEventListener(WebInspector.ProfileType.Events.ProfileComplete, profileComplete, this); var profiles = profileType.getProfiles(); for (var i = 0; i < profiles.length; i++) this._addProfileHeader(profiles[i]); }, /** * @param {?Event} event */ _handleContextMenuEvent: function(event) { var element = event.srcElement; while (element && !element.treeElement && element !== this.element) element = element.parentElement; if (!element) return; if (element.treeElement && element.treeElement.handleContextMenuEvent) { element.treeElement.handleContextMenuEvent(event, this); return; } var contextMenu = new WebInspector.ContextMenu(event); if (this.visibleView instanceof WebInspector.HeapSnapshotView) { this.visibleView.populateContextMenu(contextMenu, event); } if (element !== this.element || event.srcElement === this.sidebarElement()) { contextMenu.appendItem(WebInspector.UIString("Load\u2026"), this._fileSelectorElement.click.bind(this._fileSelectorElement)); } contextMenu.show(); }, showLoadFromFileDialog: function() { this._fileSelectorElement.click(); }, /** * @param {!WebInspector.ProfileHeader} profile */ _addProfileHeader: function(profile) { var profileType = profile.profileType(); var typeId = profileType.id; this._typeIdToSidebarSection[typeId].addProfileHeader(profile); if (!this.visibleView || this.visibleView === this._launcherView) this.showProfile(profile); }, /** * @param {!WebInspector.ProfileHeader} profile */ _removeProfileHeader: function(profile) { if (profile.profileType()._profileBeingRecorded === profile) this._profileBeingRecordedRemoved(); var i = this._indexOfViewForProfile(profile); if (i !== -1) this._profileToView.splice(i, 1); var profileType = profile.profileType(); var typeId = profileType.id; var sectionIsEmpty = this._typeIdToSidebarSection[typeId].removeProfileHeader(profile); // No other item will be selected if there aren't any other profiles, so // make sure that view gets cleared when the last profile is removed. if (sectionIsEmpty) { this.profilesItemTreeElement.select(); this._showLauncherView(); } }, /** * @param {?WebInspector.ProfileHeader} profile * @return {?WebInspector.View} */ showProfile: function(profile) { if (!profile || (profile.profileType().profileBeingRecorded() === profile) && !profile.profileType().hasTemporaryView()) return null; var view = this._viewForProfile(profile); if (view === this.visibleView) return view; this.closeVisibleView(); view.show(this.profileViews); this.visibleView = view; var profileTypeSection = this._typeIdToSidebarSection[profile.profileType().id]; var sidebarElement = profileTypeSection.sidebarElementForProfile(profile); sidebarElement.revealAndSelect(); this._profileViewStatusBarItemsContainer.removeChildren(); var statusBarItems = view.statusBarItems; if (statusBarItems) for (var i = 0; i < statusBarItems.length; ++i) this._profileViewStatusBarItemsContainer.appendChild(statusBarItems[i]); return view; }, /** * @param {!HeapProfilerAgent.HeapSnapshotObjectId} snapshotObjectId * @param {string} perspectiveName */ showObject: function(snapshotObjectId, perspectiveName) { var heapProfiles = WebInspector.ProfileTypeRegistry.instance.heapSnapshotProfileType.getProfiles(); for (var i = 0; i < heapProfiles.length; i++) { var profile = heapProfiles[i]; // FIXME: allow to choose snapshot if there are several options. if (profile.maxJSObjectId >= snapshotObjectId) { this.showProfile(profile); var view = this._viewForProfile(profile); view.highlightLiveObject(perspectiveName, snapshotObjectId); break; } } }, /** * @param {!WebInspector.ProfileHeader} profile * @return {!WebInspector.View} */ _viewForProfile: function(profile) { var index = this._indexOfViewForProfile(profile); if (index !== -1) return this._profileToView[index].view; var view = profile.createView(this); view.element.classList.add("profile-view"); this._profileToView.push({ profile: profile, view: view}); return view; }, /** * @param {!WebInspector.ProfileHeader} profile * @return {number} */ _indexOfViewForProfile: function(profile) { for (var i = 0; i < this._profileToView.length; i++) { if (this._profileToView[i].profile === profile) return i; } return -1; }, closeVisibleView: function() { if (this.visibleView) this.visibleView.detach(); delete this.visibleView; }, /** * @param {string} query * @param {boolean} shouldJump * @param {boolean=} jumpBackwards */ performSearch: function(query, shouldJump, jumpBackwards) { this.searchCanceled(); var visibleView = this.visibleView; if (!visibleView) return; /** * @this {WebInspector.ProfilesPanel} */ function finishedCallback(view, searchMatches) { if (!searchMatches) return; this._searchableView.updateSearchMatchesCount(searchMatches); this._searchResultsView = view; if (shouldJump) { if (jumpBackwards) view.jumpToLastSearchResult(); else view.jumpToFirstSearchResult(); this._searchableView.updateCurrentMatchIndex(view.currentSearchResultIndex()); } } visibleView.currentQuery = query; visibleView.performSearch(query, finishedCallback.bind(this)); }, jumpToNextSearchResult: function() { if (!this._searchResultsView) return; if (this._searchResultsView !== this.visibleView) return; this._searchResultsView.jumpToNextSearchResult(); this._searchableView.updateCurrentMatchIndex(this._searchResultsView.currentSearchResultIndex()); }, jumpToPreviousSearchResult: function() { if (!this._searchResultsView) return; if (this._searchResultsView !== this.visibleView) return; this._searchResultsView.jumpToPreviousSearchResult(); this._searchableView.updateCurrentMatchIndex(this._searchResultsView.currentSearchResultIndex()); }, searchCanceled: function() { if (this._searchResultsView) { if (this._searchResultsView.searchCanceled) this._searchResultsView.searchCanceled(); this._searchResultsView.currentQuery = null; this._searchResultsView = null; } this._searchableView.updateSearchMatchesCount(0); }, /** * @param {!Event} event * @param {!WebInspector.ContextMenu} contextMenu * @param {!Object} target */ appendApplicableItems: function(event, contextMenu, target) { if (!(target instanceof WebInspector.RemoteObject)) return; if (WebInspector.inspectorView.currentPanel() !== this) return; var object = /** @type {!WebInspector.RemoteObject} */ (target); var objectId = object.objectId; if (!objectId) return; var heapProfiles = WebInspector.ProfileTypeRegistry.instance.heapSnapshotProfileType.getProfiles(); if (!heapProfiles.length) return; /** * @this {WebInspector.ProfilesPanel} */ function revealInView(viewName) { HeapProfilerAgent.getHeapObjectId(objectId, didReceiveHeapObjectId.bind(this, viewName)); } /** * @this {WebInspector.ProfilesPanel} */ function didReceiveHeapObjectId(viewName, error, result) { if (WebInspector.inspectorView.currentPanel() !== this) return; if (!error) this.showObject(result, viewName); } if (WebInspector.settings.showAdvancedHeapSnapshotProperties.get()) contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Reveal in Dominators view" : "Reveal in Dominators View"), revealInView.bind(this, "Dominators")); contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Reveal in Summary view" : "Reveal in Summary View"), revealInView.bind(this, "Summary")); }, __proto__: WebInspector.PanelWithSidebarTree.prototype } /** * @constructor * @extends {WebInspector.SidebarSectionTreeElement} * @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate * @param {!WebInspector.ProfileType} profileType */ WebInspector.ProfileTypeSidebarSection = function(dataDisplayDelegate, profileType) { WebInspector.SidebarSectionTreeElement.call(this, profileType.treeItemTitle, null, true); this._dataDisplayDelegate = dataDisplayDelegate; this._profileTreeElements = []; this._profileGroups = {}; this.hidden = true; } /** * @constructor */ WebInspector.ProfileTypeSidebarSection.ProfileGroup = function() { this.profileSidebarTreeElements = []; this.sidebarTreeElement = null; } WebInspector.ProfileTypeSidebarSection.prototype = { /** * @param {!WebInspector.ProfileHeader} profile */ addProfileHeader: function(profile) { this.hidden = false; var profileType = profile.profileType(); var sidebarParent = this; var profileTreeElement = profile.createSidebarTreeElement(this._dataDisplayDelegate); this._profileTreeElements.push(profileTreeElement); if (!profile.fromFile() && profileType.profileBeingRecorded() !== profile) { var profileTitle = profile.title; var group = this._profileGroups[profileTitle]; if (!group) { group = new WebInspector.ProfileTypeSidebarSection.ProfileGroup(); this._profileGroups[profileTitle] = group; } group.profileSidebarTreeElements.push(profileTreeElement); var groupSize = group.profileSidebarTreeElements.length; if (groupSize === 2) { // Make a group TreeElement now that there are 2 profiles. group.sidebarTreeElement = new WebInspector.ProfileGroupSidebarTreeElement(this._dataDisplayDelegate, profile.title); var firstProfileTreeElement = group.profileSidebarTreeElements[0]; // Insert at the same index for the first profile of the group. var index = this.children.indexOf(firstProfileTreeElement); this.insertChild(group.sidebarTreeElement, index); // Move the first profile to the group. var selected = firstProfileTreeElement.selected; this.removeChild(firstProfileTreeElement); group.sidebarTreeElement.appendChild(firstProfileTreeElement); if (selected) firstProfileTreeElement.revealAndSelect(); firstProfileTreeElement.small = true; firstProfileTreeElement.mainTitle = WebInspector.UIString("Run %d", 1); this.treeOutline.element.classList.add("some-expandable"); } if (groupSize >= 2) { sidebarParent = group.sidebarTreeElement; profileTreeElement.small = true; profileTreeElement.mainTitle = WebInspector.UIString("Run %d", groupSize); } } sidebarParent.appendChild(profileTreeElement); }, /** * @param {!WebInspector.ProfileHeader} profile * @return {boolean} */ removeProfileHeader: function(profile) { var index = this._sidebarElementIndex(profile); if (index === -1) return false; var profileTreeElement = this._profileTreeElements[index]; this._profileTreeElements.splice(index, 1); var sidebarParent = this; var group = this._profileGroups[profile.title]; if (group) { var groupElements = group.profileSidebarTreeElements; groupElements.splice(groupElements.indexOf(profileTreeElement), 1); if (groupElements.length === 1) { // Move the last profile out of its group and remove the group. var pos = sidebarParent.children.indexOf(group.sidebarTreeElement); this.insertChild(groupElements[0], pos); groupElements[0].small = false; groupElements[0].mainTitle = group.sidebarTreeElement.title; this.removeChild(group.sidebarTreeElement); } if (groupElements.length !== 0) sidebarParent = group.sidebarTreeElement; } sidebarParent.removeChild(profileTreeElement); profileTreeElement.dispose(); if (this.children.length) return false; this.hidden = true; return true; }, /** * @param {!WebInspector.ProfileHeader} profile * @return {?WebInspector.ProfileSidebarTreeElement} */ sidebarElementForProfile: function(profile) { var index = this._sidebarElementIndex(profile); return index === -1 ? null : this._profileTreeElements[index]; }, /** * @param {!WebInspector.ProfileHeader} profile * @return {number} */ _sidebarElementIndex: function(profile) { var elements = this._profileTreeElements; for (var i = 0; i < elements.length; i++) { if (elements[i].profile === profile) return i; } return -1; }, __proto__: WebInspector.SidebarSectionTreeElement.prototype } /** * @constructor * @implements {WebInspector.ContextMenu.Provider} */ WebInspector.ProfilesPanel.ContextMenuProvider = function() { } WebInspector.ProfilesPanel.ContextMenuProvider.prototype = { /** * @param {!Event} event * @param {!WebInspector.ContextMenu} contextMenu * @param {!Object} target */ appendApplicableItems: function(event, contextMenu, target) { WebInspector.inspectorView.panel("profiles").appendApplicableItems(event, contextMenu, target); } } /** * @constructor * @extends {WebInspector.SidebarTreeElement} * @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate * @param {!WebInspector.ProfileHeader} profile * @param {string} className */ WebInspector.ProfileSidebarTreeElement = function(dataDisplayDelegate, profile, className) { this._dataDisplayDelegate = dataDisplayDelegate; this.profile = profile; WebInspector.SidebarTreeElement.call(this, className, profile.title, "", profile, false); this.refreshTitles(); profile.addEventListener(WebInspector.ProfileHeader.Events.UpdateStatus, this._updateStatus, this); if (profile.canSaveToFile()) this._createSaveLink(); else profile.addEventListener(WebInspector.ProfileHeader.Events.ProfileReceived, this._onProfileReceived, this); } WebInspector.ProfileSidebarTreeElement.prototype = { _createSaveLink: function() { this._saveLinkElement = this.titleContainer.createChild("span", "save-link"); this._saveLinkElement.textContent = WebInspector.UIString("Save"); this._saveLinkElement.addEventListener("click", this._saveProfile.bind(this), false); }, _onProfileReceived: function(event) { this._createSaveLink(); }, /** * @param {!WebInspector.Event} event */ _updateStatus: function(event) { var statusUpdate = event.data; if (statusUpdate.subtitle !== null) this.subtitle = statusUpdate.subtitle; if (typeof statusUpdate.wait === "boolean") this.wait = statusUpdate.wait; this.refreshTitles(); }, dispose: function() { this.profile.removeEventListener(WebInspector.ProfileHeader.Events.UpdateStatus, this._updateStatus, this); this.profile.removeEventListener(WebInspector.ProfileHeader.Events.ProfileReceived, this._onProfileReceived, this); }, onselect: function() { this._dataDisplayDelegate.showProfile(this.profile); }, /** * @return {boolean} */ ondelete: function() { this.profile.profileType().removeProfile(this.profile); return true; }, /** * @param {!Event} event * @param {!WebInspector.ProfilesPanel} panel */ handleContextMenuEvent: function(event, panel) { var profile = this.profile; var contextMenu = new WebInspector.ContextMenu(event); // FIXME: use context menu provider contextMenu.appendItem(WebInspector.UIString("Load\u2026"), panel._fileSelectorElement.click.bind(panel._fileSelectorElement)); if (profile.canSaveToFile()) contextMenu.appendItem(WebInspector.UIString("Save\u2026"), profile.saveToFile.bind(profile)); contextMenu.appendItem(WebInspector.UIString("Delete"), this.ondelete.bind(this)); contextMenu.show(); }, _saveProfile: function(event) { this.profile.saveToFile(); }, __proto__: WebInspector.SidebarTreeElement.prototype } /** * @constructor * @extends {WebInspector.SidebarTreeElement} * @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate * @param {string} title * @param {string=} subtitle */ WebInspector.ProfileGroupSidebarTreeElement = function(dataDisplayDelegate, title, subtitle) { WebInspector.SidebarTreeElement.call(this, "profile-group-sidebar-tree-item", title, subtitle, null, true); this._dataDisplayDelegate = dataDisplayDelegate; } WebInspector.ProfileGroupSidebarTreeElement.prototype = { onselect: function() { if (this.children.length > 0) this._dataDisplayDelegate.showProfile(this.children[this.children.length - 1].profile); }, __proto__: WebInspector.SidebarTreeElement.prototype } /** * @constructor * @extends {WebInspector.SidebarTreeElement} * @param {!WebInspector.ProfilesPanel} panel */ WebInspector.ProfilesSidebarTreeElement = function(panel) { this._panel = panel; this.small = false; WebInspector.SidebarTreeElement.call(this, "profile-launcher-view-tree-item", WebInspector.UIString("Profiles"), "", null, false); } WebInspector.ProfilesSidebarTreeElement.prototype = { onselect: function() { this._panel._showLauncherView(); }, get selectable() { return true; }, __proto__: WebInspector.SidebarTreeElement.prototype } importScript("../sdk/CPUProfileModel.js"); importScript("CPUProfileDataGrid.js"); importScript("CPUProfileBottomUpDataGrid.js"); importScript("CPUProfileTopDownDataGrid.js"); importScript("CPUProfileFlameChart.js"); importScript("CPUProfileView.js"); importScript("HeapSnapshotCommon.js"); importScript("HeapSnapshotProxy.js"); importScript("HeapSnapshotDataGrids.js"); importScript("HeapSnapshotGridNodes.js"); importScript("HeapSnapshotView.js"); importScript("ProfileLauncherView.js"); importScript("CanvasProfileView.js"); importScript("CanvasReplayStateView.js"); WebInspector.ProfileTypeRegistry.instance = new WebInspector.ProfileTypeRegistry();
buglloc/ios-debug-proxy-devtools
profiler/ProfilesPanel.js
JavaScript
mit
42,236
//============================================================================= // Darken Region // LAX_DarkenRegion.js // v0.02 //============================================================================= //============================================================================= /*: * @plugindesc v0.02 Use regions to black out areas. * @author LuciusAxelrod * * * @help * Place regions on the map in the editor, then either add them to the default * list or add them to the dark region list using the add command listed below. * Note: Tiles without a region are in region 0. Adding region 0 to the dark * region list will black out every tile * * Plugin Commands: * DarkenRegion add [region list] # Adds the listed regions to the dark * region list. The list is space * separated. For example: * DarkenRegion add 1 3 5 78 * DarkenRegion remove [region list] # Removes the listed regions from the * dark region list. The list is space * separated. For example: * DarkenRegion remove 4 7 200 2 * DarkenRegion toggle [region list] # Toggle on/off each of the listed * regions. For example: * DarkenRegion toggle 1 5 7 112 250 * DarkenRegion clear # Clears the dark region list. */ //============================================================================= //============================================================================= // Parameter Variables //============================================================================= (function() { var _Game_Interpreter_pluginCommand = Game_Interpreter.prototype.pluginCommand; Game_Interpreter.prototype.pluginCommand = function(command, args) { _Game_Interpreter_pluginCommand.call(this, command, args); if (command === 'DarkenRegion') { if(args[0] === 'add') { for(var i = 1; i < args.length; i++) { $gameSystem.addToDarkList(args[i]); } } else if(args[0] === 'remove') { for(var i = 1; i < args.length; i++) { $gameSystem.removeFromDarkList(args[i]); } } else if(args[0] === 'toggle') { for(var i = 1; i < args.length; i++) { if($gameSystem.isDarkRegion(args[i])) { $gameSystem.removeFromDarkList(args[i]); } else { $gameSystem.addToDarkList(args[i]); } } } else if(args[0] === 'clear') { $gameSystem.clearDarkList(); } } }; Game_System.prototype.isDarkRegion = function(regionId) { if(this._darkList) { return !!this._darkList[regionId]; } } Game_System.prototype.addToDarkList = function(regionId) { if(!this._darkList) { this.clearDarkList(); } this._darkList[Number(regionId)] = true; } Game_System.prototype.removeFromDarkList = function(regionId) { if(this._darkList) { this._darkList[Number(regionId)] = false; } } Game_System.prototype.clearDarkList = function() { this._darkList = []; } Tilemap.prototype._paintTiles = function(startX, startY, x, y) { var tableEdgeVirtualId = 10000; var darkRegionVirtualId = 10000; var mx = startX + x; var my = startY + y; var dx = (mx * this._tileWidth).mod(this._layerWidth); var dy = (my * this._tileHeight).mod(this._layerHeight); var lx = dx / this._tileWidth; var ly = dy / this._tileHeight; var tileId0 = this._readMapData(mx, my, 0); var tileId1 = this._readMapData(mx, my, 1); var tileId2 = this._readMapData(mx, my, 2); var tileId3 = this._readMapData(mx, my, 3); var tileId5 = this._readMapData(mx, my, 5); var shadowBits = this._readMapData(mx, my, 4); var upperTileId1 = this._readMapData(mx, my - 1, 1); var lowerTiles = []; var upperTiles = []; if (this._isHigherTile(tileId0)) { upperTiles.push(tileId0); } else { lowerTiles.push(tileId0); } if (this._isHigherTile(tileId1)) { upperTiles.push(tileId1); } else { lowerTiles.push(tileId1); } lowerTiles.push(-shadowBits); if (this._isTableTile(upperTileId1) && !this._isTableTile(tileId1)) { if (!Tilemap.isShadowingTile(tileId0)) { lowerTiles.push(tableEdgeVirtualId + upperTileId1); } } if (this._isOverpassPosition(mx, my)) { upperTiles.push(tileId2); upperTiles.push(tileId3); } else { if (this._isHigherTile(tileId2)) { upperTiles.push(tileId2); } else { lowerTiles.push(tileId2); } if (this._isHigherTile(tileId3)) { upperTiles.push(tileId3); } else { lowerTiles.push(tileId3); } if($gameSystem.isDarkRegion(tileId5)){ upperTiles.push(darkRegionVirtualId + tileId5); } } var lastLowerTiles = this._readLastTiles(0, lx, ly); if (!lowerTiles.equals(lastLowerTiles) || (Tilemap.isTileA1(tileId0) && this._frameUpdated)) { this._lowerBitmap.clearRect(dx, dy, this._tileWidth, this._tileHeight); for (var i = 0; i < lowerTiles.length; i++) { var lowerTileId = lowerTiles[i]; if (lowerTileId < 0) { this._drawShadow(this._lowerBitmap, shadowBits, dx, dy); } else if (lowerTileId >= tableEdgeVirtualId) { this._drawTableEdge(this._lowerBitmap, upperTileId1, dx, dy); } else { this._drawTile(this._lowerBitmap, lowerTileId, dx, dy); } } this._writeLastTiles(0, lx, ly, lowerTiles); } var lastUpperTiles = this._readLastTiles(1, lx, ly); if (!upperTiles.equals(lastUpperTiles)) { this._upperBitmap.clearRect(dx, dy, this._tileWidth, this._tileHeight); for (var j = 0; j < upperTiles.length; j++) { if(upperTiles[j] >= darkRegionVirtualId) { this._drawDarkness(this._upperBitmap, dx, dy); } else { this._drawTile(this._upperBitmap, upperTiles[j], dx, dy); } } this._writeLastTiles(1, lx, ly, upperTiles); } }; Tilemap.prototype._drawDarkness = function(bitmap, dx, dy) { var w = this._tileWidth; var h = this._tileHeight; var color = 'rgba(0,0,0,1)'; bitmap.fillRect(dx, dy, w, h, color); }; })();
LuciusAxelrod/LAX_Plugins
LAX_DarkenRegion/v0.02/LAX_DarkenRegion.js
JavaScript
mit
6,112
"use strict"; (function() { function get_promise(endpoint) { return function($http) { return $http.get(endpoint); }; } angular.module('pagerbot-admin', ['ngRoute', 'ngTable', 'angular-loading-bar']) .config(function ($routeProvider) { $routeProvider .when('/intro', { templateUrl: 'views/intro.html', controller: 'PagerdutyCtrl', resolve: { pd: function(pagerduty_promise) { return pagerduty_promise; } } }) .when('/chatbot-settings', { templateUrl: 'views/bot.html', controller: 'BotSetupCtrl', resolve: { bot_info: get_promise('/api/bot') } }) .when('/plugin-setup', { templateUrl: 'views/plugins.html', controller: 'PluginSetupCtrl', resolve: { plugin_info: get_promise('/api/plugins') } }) .when('/user-aliases', { templateUrl: 'views/users.html', controller: 'UserAliasCtrl', resolve: { users: get_promise('/api/users') } }) .when('/schedule-aliases', { templateUrl: 'views/schedules.html', controller: 'ScheduleAliasCtrl', resolve: { schedules: get_promise('/api/schedules') } }) .when('/deploy', { templateUrl: 'views/deploy.html', controller: 'DeployCtrl' }) .otherwise({ redirectTo: '/intro' }); }); })();
stripe-contrib/pagerbot
public/js/app.js
JavaScript
mit
1,579
<?php /** * AAddress filter form. * * @package alumni * @subpackage filter * @author E.R. Nurwijayadi * @version 1.0 */ class AAddressFormFilter extends BaseAAddressFormFilter { /** * @see AddressFormFilter */ static protected $order_by_choices = array( null => '', 6 => 'ID', 21 => 'Name (Alumna/us)', '' => '----------', 60 => 'Address', 61 => 'Region', 63 => 'Code: Country', 64 => 'Code: Province', 65 => 'Code: District', 66 => 'Postal Code', 67 => 'Street', 68 => 'Area', 69 => 'Building' ); public function configure() { $this->widgetSchema->setFormFormatterName('list'); $this->disableCSRFProtection(); $this->addAllCommunityFields($this); $this->widgetSchema['order_by'] = new sfWidgetFormChoice(array( 'choices' => self::$order_by_choices)); $this->validatorSchema['order_by'] = new sfValidatorPass(); $this->widgetSchema['department_id'] = new sfWidgetFormChoice(array( 'choices' => array(null => '') )); $this->useFields(array( 'department_id', 'faculty_id', 'program_id', 'class_year', 'decade', 'order_by' )); $query = Doctrine_Core::getTable('AAddress') ->createQuery('r') ->leftJoin('r.Country n') ->leftJoin('r.Province p') ->leftJoin('r.District w') ->leftJoin('r.Alumni a') ->leftJoin('a.ACommunities ac'); $this->setQuery($query); } public function addOrderByColumnQuery(Doctrine_Query $query, $field, $values) { $order_by_choices = array( 6 => 'r.lid', 21 => 'a.name', 60 => 'r.address', 61 => 'r.region', 63 => 'r.country_id', 64 => 'r.province_id', 65 => 'r.district_id', 66 => 'r.postal_code', 67 => 'r.street', 68 => 'r.area', 69 => 'r.building' ); if ( array_key_exists($values, $order_by_choices) ) $query->orderBy( $order_by_choices[$values] ); } /* This parts needs Trait in PHP 5.4 */ public function addDepartmentIdColumnQuery(Doctrine_Query $query, $field, $values) { if (!empty($values) ) $query->andWhere('ac.department_id = ?', $values); } public function addFacultyIdColumnQuery(Doctrine_Query $query, $field, $values) { if (!empty($values) ) $query->andWhere('ac.faculty_id = ?', $values); } public function addProgramIdColumnQuery(Doctrine_Query $query, $field, $values) { if (!empty($values) ) $query->andWhere('ac.program_id = ?', $values); } public function addClassYearColumnQuery(Doctrine_Query $query, $field, $values) { if (!empty($values['text']) ) $query->andWhere('ac.class_year = ?', $values['text']); } public function addDecadeColumnQuery(Doctrine_Query $query, $field, $values) { $decades = array(1960, 1970, 1980, 1990, 2000, 2010); if ( in_array( $values, $decades ) ) { $query->andWhere('ac.class_year >= ?', $values); $query->andWhere('ac.class_year <= ?', $values+9); } } }
epsi-rns/AlumniBook-SF
lib/filter/doctrine/AAddressFormFilter.class.php
PHP
mit
2,921
<?php use Illuminate\Support\Facades\Schema; use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class DailySeedAndCleanup extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('featured_games', function (Blueprint $table) { $table->increments('id'); $table->date('day')->unique(); $table->integer('seed_id'); $table->string('description')->default(''); $table->timestamps(); }); Schema::table('seeds', function (Blueprint $table) { $table->dropColumn(['patch']); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::table('seeds', function (Blueprint $table) { $table->json('patch'); }); Schema::dropIfExists('featured_games'); } }
sporchia/alttp_vt_randomizer
database/migrations/2017_11_17_174259_daily_seed_and_cleanup.php
PHP
mit
972
<?php namespace GEPedag\EntidadesBundle\Controller; use Symfony\Component\HttpFoundation\Request; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Method; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Template; use GEPedag\EntidadesBundle\Entity\Asignatura; use GEPedag\EntidadesBundle\Form\AsignaturaType; class AsignaturaController extends Controller { /** * @Method("GET") * @Template() */ public function indexAction() { $em = $this->getDoctrine()->getManager(); $entities = $em->getRepository('GEPedagEntidadesBundle:Asignatura')->findAll(); return [ 'entities' => $entities ]; } /** * @Method("POST") */ public function createAction(Request $request) { $asignatura = new Asignatura(); $form = $this->createForm(new AsignaturaType(), $asignatura); $form->handleRequest($request); if ($form->isValid()) { $em = $this->getDoctrine()->getManager(); $em->persist($asignatura); $em->flush(); $this->get('session')->getFlashBag()->add( 'success', 'La asignatura <i>' . $asignatura . '</i> se ha creado con รฉxito!'); return $this->redirect($this->generateUrl('ge_asign_homepage')); } // print_r($form->getErrors());die; // foreach ($form->getErrors() as $error) { // } $this->get('session')->getFlashBag()->add('error', 'Error al registrar la asignatura.'); return $this->redirect($this->generateUrl('ge_asign_homepage')); } /** * @Method("GET") * @Template("GEPedagEntidadesBundle:Asignatura:new_edit.html.twig") */ public function newAction() { $asignatura = new Asignatura(); $titulo = 'Crear'; $form = $this->createForm(new AsignaturaType(), $asignatura); $form->add('submit', 'submit', array('label' => $titulo)); return [ 'action' => $this->generateUrl('ge_asign_create'), 'entity' => $asignatura, 'form' => $form->createView(), 'titulo' => $titulo, ]; } /** * @Method("GET") */ public function editAction($id) { $titulo = 'Actualizar'; $em = $this->getDoctrine()->getManager(); $asignatura = $em->getRepository('GEPedagEntidadesBundle:Asignatura')->find($id); if (!$asignatura) { throw $this->createNotFoundException('No existe la Asignatura con id: ' . $id); } $editForm = $this->createForm(new AsignaturaType(), $asignatura); $editForm->add('submit', 'submit', array('label' => $titulo)); return $this->render('GEPedagEntidadesBundle:Asignatura:new_edit.html.twig', [ 'action' => $this->generateUrl('ge_asign_update', array('id' => $asignatura->getId())), 'entity' => $asignatura, 'titulo' => $titulo, 'form' => $editForm->createView() ]); } /** * @Method("POST") */ public function updateAction(Request $request, $id) { $em = $this->getDoctrine()->getManager(); $asignatura = $em->getRepository('GEPedagEntidadesBundle:Asignatura')->find($id); if (!$asignatura) { $this->get('session')->getFlashBag()->add( 'error', 'No existe la asignatura con id: ' . $id); return $this->redirect($this->generateUrl('ge_asign_homepage')); } $editForm = $this->createForm(new AsignaturaType(), $asignatura); $editForm->handleRequest($request); if ($editForm->isValid()) { $em->persist($asignatura); $em->flush(); $this->get('session')->getFlashBag()->add( 'success', 'Muy Bien! La asignatura <i>' . $asignatura . '</i> se ha actualizado con รฉxito!'); return $this->redirect($this->generateUrl('ge_asign_homepage')); } $this->get('session')->getFlashBag()->add( 'success', 'Error al crear la asignatura.'); return $this->redirect($this->generateUrl('ge_asign_homepage')); } /** * @Method("GET") */ public function deleteAction(Request $request, $id) { $em = $this->getDoctrine()->getManager(); $asignatura = $em->getRepository('GEPedagEntidadesBundle:Asignatura')->find($id); if (!$asignatura) { $this->get('session')->getFlashBag()->add( 'error', 'No existe la asignatura con id: ' . $id); return $this->redirect($this->generateUrl('ge_asign_homepage')); } $em->remove($asignatura); $em->flush(); $this->get('session')->getFlashBag()->add( 'success', 'La asignatura <i>' . $asignatura . '</i> se ha eliminado.'); return $this->redirect($this->generateUrl('ge_asign_homepage')); } }
yanpozka/management-system-symfony
src/GEPedag/EntidadesBundle/Controller/AsignaturaController.php
PHP
mit
4,996
""" Tests for Dynamo3 """ import sys import unittest from decimal import Decimal from pickle import dumps, loads from urllib.parse import urlparse from botocore.exceptions import ClientError from mock import ANY, MagicMock, patch from dynamo3 import ( Binary, Dynamizer, DynamoDBConnection, DynamoDBError, DynamoKey, GlobalIndex, Limit, Table, ThroughputException, ) from dynamo3.constants import STRING from dynamo3.result import Capacity, ConsumedCapacity, Count, ResultSet, add_dicts class BaseSystemTest(unittest.TestCase): """Base class for system tests""" dynamo: DynamoDBConnection = None # type: ignore def setUp(self): super(BaseSystemTest, self).setUp() # Clear out any pre-existing tables for tablename in self.dynamo.list_tables(): self.dynamo.delete_table(tablename) def tearDown(self): super(BaseSystemTest, self).tearDown() for tablename in self.dynamo.list_tables(): self.dynamo.delete_table(tablename) self.dynamo.clear_hooks() class TestMisc(BaseSystemTest): """Tests that don't fit anywhere else""" def tearDown(self): super(TestMisc, self).tearDown() self.dynamo.default_return_capacity = False def test_connection_host(self): """Connection can access host of endpoint""" urlparse(self.dynamo.host) def test_connection_region(self): """Connection can access name of connected region""" self.assertTrue(isinstance(self.dynamo.region, str)) def test_connect_to_region(self): """Can connect to a dynamo region""" conn = DynamoDBConnection.connect("us-west-1") self.assertIsNotNone(conn.host) def test_connect_to_region_creds(self): """Can connect to a dynamo region with credentials""" conn = DynamoDBConnection.connect( "us-west-1", access_key="abc", secret_key="12345" ) self.assertIsNotNone(conn.host) def test_connect_to_host_without_session(self): """Can connect to a dynamo host without passing in a session""" conn = DynamoDBConnection.connect("us-west-1", host="localhost") self.assertIsNotNone(conn.host) @patch("dynamo3.connection.time") def test_retry_on_throughput_error(self, time): """Throughput exceptions trigger a retry of the request""" def call(*_, **__): """Dummy service call""" response = { "ResponseMetadata": { "HTTPStatusCode": 400, }, "Error": { "Code": "ProvisionedThroughputExceededException", "Message": "Does not matter", }, } raise ClientError(response, "list_tables") with patch.object(self.dynamo, "client") as client: client.list_tables.side_effect = call with self.assertRaises(ThroughputException): self.dynamo.call("list_tables") self.assertEqual(len(time.sleep.mock_calls), self.dynamo.request_retries - 1) self.assertTrue(time.sleep.called) def test_describe_missing(self): """Describing a missing table returns None""" ret = self.dynamo.describe_table("foobar") self.assertIsNone(ret) def test_magic_table_props(self): """Table can look up properties on response object""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) ret = self.dynamo.describe_table("foobar") assert ret is not None self.assertEqual(ret.item_count, ret["ItemCount"]) with self.assertRaises(KeyError): self.assertIsNotNone(ret["Missing"]) def test_magic_index_props(self): """Index can look up properties on response object""" index = GlobalIndex.all("idx-name", DynamoKey("id")) index.response = {"FooBar": 2} self.assertEqual(index["FooBar"], 2) with self.assertRaises(KeyError): self.assertIsNotNone(index["Missing"]) def test_describe_during_delete(self): """Describing a table during a delete operation should not crash""" response = { "ItemCount": 0, "ProvisionedThroughput": { "NumberOfDecreasesToday": 0, "ReadCapacityUnits": 5, "WriteCapacityUnits": 5, }, "TableName": "myTableName", "TableSizeBytes": 0, "TableStatus": "DELETING", } table = Table.from_response(response) self.assertEqual(table.status, "DELETING") def test_delete_missing(self): """Deleting a missing table returns False""" ret = self.dynamo.delete_table("foobar") self.assertTrue(not ret) def test_re_raise_passthrough(self): """DynamoDBError can re-raise itself if missing original exception""" err = DynamoDBError(400, Code="ErrCode", Message="Ouch", args={}) caught = False try: err.re_raise() except DynamoDBError as e: caught = True self.assertEqual(err, e) self.assertTrue(caught) def test_re_raise(self): """DynamoDBError can re-raise itself with stacktrace of original exc""" caught = False try: try: raise Exception("Hello") except Exception as e1: err = DynamoDBError( 400, Code="ErrCode", Message="Ouch", args={}, exc_info=sys.exc_info(), ) err.re_raise() except DynamoDBError as e: caught = True import traceback tb = traceback.format_tb(e.__traceback__) self.assertIn("Hello", tb[-1]) self.assertEqual(e.status_code, 400) self.assertTrue(caught) def test_default_return_capacity(self): """When default_return_capacity=True, always return capacity""" self.dynamo.default_return_capacity = True with patch.object(self.dynamo, "call") as call: call().get.return_value = None rs = self.dynamo.scan("foobar") list(rs) call.assert_called_with( "scan", TableName="foobar", ReturnConsumedCapacity="INDEXES", ConsistentRead=False, ) def test_list_tables_page(self): """Call to ListTables should page results""" hash_key = DynamoKey("id") for i in range(120): self.dynamo.create_table("table%d" % i, hash_key=hash_key) tables = list(self.dynamo.list_tables(110)) self.assertEqual(len(tables), 110) def test_limit_complete(self): """A limit with item_capacity = 0 is 'complete'""" limit = Limit(item_limit=0) self.assertTrue(limit.complete) def test_wait_create_table(self): """Create table shall wait for the table to come online.""" tablename = "foobar_wait" hash_key = DynamoKey("id") self.dynamo.create_table(tablename, hash_key=hash_key, wait=True) self.assertIsNotNone(self.dynamo.describe_table(tablename)) def test_wait_delete_table(self): """Delete table shall wait for the table to go offline.""" tablename = "foobar_wait" hash_key = DynamoKey("id") self.dynamo.create_table(tablename, hash_key=hash_key, wait=True) result = self.dynamo.delete_table(tablename, wait=True) self.assertTrue(result) class TestDataTypes(BaseSystemTest): """Tests for Dynamo data types""" def make_table(self): """Convenience method for making a table""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) def test_string(self): """Store and retrieve a string""" self.make_table() self.dynamo.put_item("foobar", {"id": "abc"}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["id"], "abc") self.assertTrue(isinstance(item["id"], str)) def test_int(self): """Store and retrieve an int""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "num": 1}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["num"], 1) def test_float(self): """Store and retrieve a float""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "num": 1.1}) item = list(self.dynamo.scan("foobar"))[0] self.assertAlmostEqual(float(item["num"]), 1.1) def test_decimal(self): """Store and retrieve a Decimal""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "num": Decimal("1.1")}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["num"], Decimal("1.1")) def test_binary(self): """Store and retrieve a binary""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "data": Binary("abc")}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["data"].value, b"abc") def test_binary_bytes(self): """Store and retrieve bytes as a binary""" self.make_table() data = {"a": 1, "b": 2} self.dynamo.put_item("foobar", {"id": "a", "data": Binary(dumps(data))}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(loads(item["data"].value), data) def test_string_set(self): """Store and retrieve a string set""" self.make_table() item = { "id": "a", "datas": set(["a", "b"]), } self.dynamo.put_item("foobar", item) ret = list(self.dynamo.scan("foobar"))[0] self.assertEqual(ret, item) def test_number_set(self): """Store and retrieve a number set""" self.make_table() item = { "id": "a", "datas": set([1, 2, 3]), } self.dynamo.put_item("foobar", item) ret = list(self.dynamo.scan("foobar"))[0] self.assertEqual(ret, item) def test_binary_set(self): """Store and retrieve a binary set""" self.make_table() item = { "id": "a", "datas": set([Binary("a"), Binary("b")]), } self.dynamo.put_item("foobar", item) ret = list(self.dynamo.scan("foobar"))[0] self.assertEqual(ret, item) def test_binary_equal(self): """Binary should eq other Binaries and also raw bytestrings""" self.assertEqual(Binary("a"), Binary("a")) self.assertEqual(Binary("a"), b"a") self.assertFalse(Binary("a") != Binary("a")) def test_binary_repr(self): """Binary repr should wrap the contained value""" self.assertEqual(repr(Binary("a")), "Binary(%r)" % b"a") def test_binary_converts_unicode(self): """Binary will convert unicode to bytes""" b = Binary("a") self.assertTrue(isinstance(b.value, bytes)) def test_binary_force_string(self): """Binary must wrap a string type""" with self.assertRaises(TypeError): Binary(2) # type: ignore def test_bool(self): """Store and retrieve a boolean""" self.make_table() self.dynamo.put_item("foobar", {"id": "abc", "b": True}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["b"], True) self.assertTrue(isinstance(item["b"], bool)) def test_list(self): """Store and retrieve a list""" self.make_table() self.dynamo.put_item("foobar", {"id": "abc", "l": ["a", 1, False]}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["l"], ["a", 1, False]) def test_dict(self): """Store and retrieve a dict""" self.make_table() data = { "i": 1, "s": "abc", "n": None, "l": ["a", 1, True], "b": False, } self.dynamo.put_item("foobar", {"id": "abc", "d": data}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["d"], data) def test_nested_dict(self): """Store and retrieve a nested dict""" self.make_table() data = { "s": "abc", "d": { "i": 42, }, } self.dynamo.put_item("foobar", {"id": "abc", "d": data}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["d"], data) def test_nested_list(self): """Store and retrieve a nested list""" self.make_table() data = [ 1, [ True, None, "abc", ], ] self.dynamo.put_item("foobar", {"id": "abc", "l": data}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["l"], data) def test_unrecognized_type(self): """Dynamizer throws error on unrecognized type""" value = { "ASDF": "abc", } with self.assertRaises(TypeError): self.dynamo.dynamizer.decode(value) class TestDynamizer(unittest.TestCase): """Tests for the Dynamizer""" def test_register_encoder(self): """Can register a custom encoder""" from datetime import datetime dynamizer = Dynamizer() dynamizer.register_encoder(datetime, lambda d, v: (STRING, v.isoformat())) now = datetime.utcnow() self.assertEqual(dynamizer.raw_encode(now), (STRING, now.isoformat())) def test_encoder_missing(self): """If no encoder is found, raise ValueError""" from datetime import datetime dynamizer = Dynamizer() with self.assertRaises(ValueError): dynamizer.encode(datetime.utcnow()) class TestResultModels(unittest.TestCase): """Tests for the model classes in results.py""" def test_add_dicts_base_case(self): """add_dict where one argument is None returns the other""" f = object() self.assertEqual(add_dicts(f, None), f) self.assertEqual(add_dicts(None, f), f) def test_add_dicts(self): """Merge two dicts of values together""" a = { "a": 1, "b": 2, } b = { "a": 3, "c": 4, } ret = add_dicts(a, b) self.assertEqual( ret, { "a": 4, "b": 2, "c": 4, }, ) def test_count_repr(self): """Count repr""" count = Count(0, 0) self.assertEqual(repr(count), "Count(0)") def test_count_addition(self): """Count addition""" count = Count(4, 2) self.assertEqual(count + 5, 9) def test_count_subtraction(self): """Count subtraction""" count = Count(4, 2) self.assertEqual(count - 2, 2) def test_count_multiplication(self): """Count multiplication""" count = Count(4, 2) self.assertEqual(2 * count, 8) def test_count_division(self): """Count division""" count = Count(4, 2) self.assertEqual(count / 2, 2) def test_count_add_none_capacity(self): """Count addition with one None consumed_capacity""" cap = Capacity(3, 0) count = Count(4, 2) count2 = Count(5, 3, cap) ret = count + count2 self.assertEqual(ret, 9) self.assertEqual(ret.scanned_count, 5) self.assertEqual(ret.consumed_capacity, cap) def test_count_add_capacity(self): """Count addition with consumed_capacity""" count = Count(4, 2, Capacity(3, 0)) count2 = Count(5, 3, Capacity(2, 0)) ret = count + count2 self.assertEqual(ret, 9) self.assertEqual(ret.scanned_count, 5) self.assertEqual(ret.consumed_capacity.read, 5) def test_capacity_math(self): """Capacity addition and equality""" cap = Capacity(2, 4) s = set([cap]) self.assertIn(Capacity(2, 4), s) self.assertNotEqual(Capacity(1, 4), cap) self.assertEqual(Capacity(1, 1) + Capacity(2, 2), Capacity(3, 3)) def test_capacity_format(self): """String formatting for Capacity""" c = Capacity(1, 3) self.assertEqual(str(c), "R:1.0 W:3.0") c = Capacity(0, 0) self.assertEqual(str(c), "0") def test_total_consumed_capacity(self): """ConsumedCapacity can parse results with only Total""" response = { "TableName": "foobar", "ReadCapacityUnits": 4, "WriteCapacityUnits": 5, } cap = ConsumedCapacity.from_response(response) self.assertEqual(cap.total, (4, 5)) self.assertIsNone(cap.table_capacity) def test_consumed_capacity_equality(self): """ConsumedCapacity addition and equality""" cap = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), }, { "g-index": Capacity(0, 3), }, ) c2 = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), "l-index2": Capacity(0, 7), }, ) self.assertNotEqual(cap, c2) c3 = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), }, { "g-index": Capacity(0, 3), }, ) self.assertIn(cap, set([c3])) combined = cap + c2 self.assertEqual( cap + c2, ConsumedCapacity( "foobar", Capacity(0, 20), Capacity(0, 4), { "l-index": Capacity(0, 8), "l-index2": Capacity(0, 7), }, { "g-index": Capacity(0, 3), }, ), ) self.assertIn(str(Capacity(0, 3)), str(combined)) def test_add_different_tables(self): """Cannot add ConsumedCapacity of two different tables""" c1 = ConsumedCapacity("foobar", Capacity(1, 28)) c2 = ConsumedCapacity("boofar", Capacity(3, 0)) with self.assertRaises(TypeError): c1 += c2 def test_always_continue_query(self): """Regression test. If result has no items but does have LastEvaluatedKey, keep querying. """ conn = MagicMock() conn.dynamizer.decode_keys.side_effect = lambda x: x items = ["a", "b"] results = [ {"Items": [], "LastEvaluatedKey": {"foo": 1, "bar": 2}}, {"Items": [], "LastEvaluatedKey": {"foo": 1, "bar": 2}}, {"Items": items}, ] conn.call.side_effect = lambda *_, **__: results.pop(0) rs = ResultSet(conn, Limit()) results = list(rs) self.assertEqual(results, items) class TestHooks(BaseSystemTest): """Tests for connection callback hooks""" def tearDown(self): super(TestHooks, self).tearDown() for hooks in self.dynamo._hooks.values(): while hooks: hooks.pop() def test_precall(self): """precall hooks are called before an API call""" hook = MagicMock() self.dynamo.subscribe("precall", hook) def throw(**_): """Throw an exception to terminate the request""" raise Exception() with patch.object(self.dynamo, "client") as client: client.describe_table.side_effect = throw with self.assertRaises(Exception): self.dynamo.describe_table("foobar") hook.assert_called_with(self.dynamo, "describe_table", {"TableName": "foobar"}) def test_postcall(self): """postcall hooks are called after API call""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) calls = [] def hook(*args): """Log the call into a list""" calls.append(args) self.dynamo.subscribe("postcall", hook) self.dynamo.describe_table("foobar") self.assertEqual(len(calls), 1) args = calls[0] self.assertEqual(len(args), 4) conn, command, kwargs, response = args self.assertEqual(conn, self.dynamo) self.assertEqual(command, "describe_table") self.assertEqual(kwargs["TableName"], "foobar") self.assertEqual(response["Table"]["TableName"], "foobar") def test_capacity(self): """capacity hooks are called whenever response has ConsumedCapacity""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) hook = MagicMock() self.dynamo.subscribe("capacity", hook) with patch.object(self.dynamo, "client") as client: client.scan.return_value = { "Items": [], "ConsumedCapacity": { "TableName": "foobar", "ReadCapacityUnits": 4, }, } rs = self.dynamo.scan("foobar") list(rs) cap = ConsumedCapacity("foobar", Capacity(4, 0)) hook.assert_called_with(self.dynamo, "scan", ANY, ANY, cap) def test_subscribe(self): """Can subscribe and unsubscribe from hooks""" hook = lambda: None self.dynamo.subscribe("precall", hook) self.assertEqual(len(self.dynamo._hooks["precall"]), 1) self.dynamo.unsubscribe("precall", hook) self.assertEqual(len(self.dynamo._hooks["precall"]), 0)
stevearc/dynamo3
tests/__init__.py
Python
mit
22,100
๏ปฟusing System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("02.StaticReadOnlyField")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("02.StaticReadOnlyField")] [assembly: AssemblyCopyright("Copyright ยฉ 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("a881564f-3790-4077-a240-9edd76806251")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
DimitarGaydardzhiev/TelerikAcademy
03. OOP/02. Defining-Classes-Part-2-Static-Members-Generics/02.StaticReadOnlyField/Properties/AssemblyInfo.cs
C#
mit
1,420
Alchemy sentiment analysis: fb12d2c55fff36e1e268584e261b6b010b37279f Africa Is Talking: 676dbd926bbb04fa69ce90ee81d3f5ffee2692aaf80eb5793bd70fe93e77dc2e
crakama/bc_7_twitment
keys.py
Python
mit
156
module.exports = { project: { server: { basePath: '', ip: '0.0.0.0', request: { sesskey: 'sid', limit: 5000, parameters: 60 }, render: 'swig', path: { routes: 'app/routes', views: 'app/views', public: 'public/', docs: false }, views: { extension: 'swig', errors: 'errors/' } } }, environment: { server: { debug: true, host: 'localhost', port: 3000, request: { secret: new Date().getTime() + '' + Math.random(), cors: true, geolocation: false }, views: { cache: false } } } };
PearlVentures/Crux
boilerplate/server/config.js
JavaScript
mit
699
<?php namespace RedMedica\ConsultasBundle\Entity; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\ORM\Mapping as ORM; use RedMedica\ConsultasBundle\Entity\Article; use FOS\ElasticaBundle\Configuration\Search; /** * Category * * @ORM\Table(name="category") * @ORM\Entity() * @Search(repositoryClass="RedMedica\ConsultasBundle\Entity\SearchRepository\CategoryRepository") */ class Category { /** * @var integer * * @ORM\Column(name="id", type="integer", nullable=false) * @ORM\Id * @ORM\GeneratedValue(strategy="IDENTITY") */ protected $id; /** * @var string * * @ORM\Column(name="label", type="string", length=250, nullable=false) */ protected $label; /** * @var Doctrine\Common\Collections\ArrayCollection * * @ORM\OneToMany(targetEntity="RedMedica\ConsultasBundle\Entity\Article", mappedBy="category") */ protected $articles; public function __construct() { $this->articles = new ArrayCollection(); } public function __toString() { return $this->label; } public function getId() { return $this->id; } public function setLabel($label) { $this->label = $label; return $this; } public function getLabel() { return $this->label; } public function addArticle(Article $article) { $this->articles->add($article); return $this; } public function setArticles($articles) { $this->articles = $articles; return $this; } public function getArticles() { return $this->articles; } }
dysan1376/hospi
src/RedMedica/ConsultasBundle/Entity/Category.php
PHP
mit
1,682
import React from "react"; import styled from 'styled-components' import Link from './link'; const nextArrow = "/icons/next-arrow.png"; const prevArrow = "/icons/prev-arrow.png"; const PatternLink = styled.span` width: 100%; display: flex; flex-direction: column; padding: 1em; float: ${props => props.previous ? 'left' : 'right'} @media(min-width: $width-tablet) { width: auto; } `; const ImageContainer = styled.span` height: 50px; `; const Image = styled.img` height: 100%; background-color: white; float: ${props => props.previous ? 'right' : 'left'} `; const ArrowContainer = styled.div` display: flex; flex-direction: ${props => props.previous ? 'row-reverse' : 'row'}; align-items: center; `; const Name = styled.p` padding: 10px 0; `; const Arrow = styled.img` height: 10px; flex-direction: row-reverse; padding: ${props => props.previous ? '0 10px 0 0' : '0 0 0 10px'}; `; const NextPrevPattern = ({pattern, direction}) => { const previous = direction === "previous" return ( <Link href={pattern.url}> <PatternLink previous={previous}> <ImageContainer> <Image previous={previous} src={pattern.painted || pattern.lineDrawing} /> </ImageContainer> <ArrowContainer previous={previous}> <Name>{pattern.name}</Name> { (direction === "next") && <Arrow src={nextArrow}/> } { (direction === "previous") && <Arrow previous src={prevArrow} /> } </ArrowContainer> </PatternLink> </Link> ) }; export default NextPrevPattern;
redfieldstefan/kibaktile.com
src/components/next-prev-pattern.js
JavaScript
mit
1,640
#!/usr/bin/node --harmony 'use strict' const noble = require('noble'), program = require('commander') program .version('0.0.1') .option('-p, --prefix <integer>', 'Manufacturer identifier prefixed to all fan commands', parseInt) .option('-t, --target [mac]', 'MAC address of devices to target', function(val){ return val.toLowerCase() }) .option('-s, --service <uuid>', 'UUID of fan controller BLE service') .option('-w, --write <uuid>', 'UUID of fan controller BLE write characteristic') .option('-n, --notify <uuid>', 'UUID of fan controller BLE notify characteristic') class FanRequest { writeInto(buffer) { throw new TypeError('Must override method') } toBuffer() { var buffer if (program.prefix > 0) { buffer = new Buffer(13) buffer.writeUInt8(program.prefix) this.writeInto(buffer.slice(1)) } else { buffer = new Buffer(12) this.writeInto(buffer) } const checksum = buffer.slice(0, buffer.length - 1).reduce(function(a, b){ return a + b }, 0) & 255 buffer.writeUInt8(checksum, buffer.length - 1) return buffer } } class FanGetStateRequest extends FanRequest { writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(160) } } Math.clamp = function(number, min, max) { return Math.max(min, Math.min(number, max)) } class FanUpdateLightRequest extends FanRequest { constructor(isOn, level) { super() this.on = isOn ? 1 : 0 this.level = Math.clamp(level, 0, 100) } writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(161) buffer.writeUInt8(255, 4) buffer.writeUInt8(100, 5) buffer.writeUInt8((this.on << 7) | this.level, 6) buffer.fill(255, 7, 10) } } class FanUpdateLevelRequest extends FanRequest { constructor(level) { super() this.level = Math.clamp(level, 0, 3) } writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(161) buffer.writeUInt8(this.level, 4) buffer.fill(255, 5, 10) } } class FanResponse { static fromBuffer(buffer) { if (program.prefix > 0) { buffer = buffer.slice(1) } if (buffer.readUInt8(0) != 176) { return null } const response = new FanResponse() const windVelocity = buffer.readUInt8(2) response.supportsFanReversal = (windVelocity & 0b00100000) != 0 response.maximumFanLevel = windVelocity & 0b00011111 const currentWindVelocity = buffer.readUInt8(4) response.isFanReversed = (currentWindVelocity & 0b10000000) != 0 response.fanLevel = currentWindVelocity & 0b00011111 const currentBrightness = buffer.readUInt8(6) response.lightIsOn = (currentBrightness & 0b10000000) != 0 response.lightBrightness = (currentBrightness & 0b01111111) return response } } // MARK: - var command program .command('current') .description('print current state') .action(function(env, options) { command = new FanGetStateRequest() }) program .command('fan') .description('adjusts the fan') .option('-l --level <size>', 'Fan speed', /^(off|low|medium|high)$/i, 'high') .action(function(env, options) { var level switch (env.level) { case 'low': level = 1 break case 'medium': level = 2 break case 'high': level = 3 break default: level = 0 break } command = new FanUpdateLevelRequest(level) }) program .command('light <on|off>') .description('adjusts the light') .option('-l, --level <percent>', 'Light brightness', parseInt, 100) .action(function(env, options) { command = new FanUpdateLightRequest(env !== 'off', options.level) }) program.parse(process.argv); if (!command) { program.help(); } if (!program.target) { throw new Error('MAC address required') } const serviceUUID = program.service || '539c681361a021374f79bf1a11984790' const writeUUID = program.write || '539c681361a121374f79bf1a11984790' const notifyUUID = program.notify || '539c681361a221374f79bf1a11984790' noble.on('stateChange', function(state) { if (state === 'poweredOn') { console.log('scanning.') noble.startScanning([ serviceUUID ], false) } else { noble.stopScanning() } }) noble.on('discover', function(peripheral) { console.log('found ' + peripheral.address) if (peripheral.address !== program.target) { return } noble.stopScanning() explore(peripheral) }); function bail(error) { console.log('failed: ' + error); process.exit(1) } function explore(peripheral) { console.log('connecting.') peripheral.once('disconnect', function() { peripheral.removeAllListeners() explore(peripheral) }) peripheral.connect(function(error) { if (error) { bail(error); } peripheral.discoverSomeServicesAndCharacteristics([ serviceUUID ], [ writeUUID, notifyUUID ], function(error, services, characteristics) { if (error) { bail(error); } var service = services[0] var write = characteristics[0], notify = characteristics[1] notify.on('data', function(data, isNotification) { const response = FanResponse.fromBuffer(data) if (response) { console.log(response) } else { console.log('sent') } process.exit() }) notify.subscribe(function(error) { if (error) { bail(error); } console.log('sending') const buffer = command.toBuffer() write.write(buffer, false, function(error){ if (error) { bail(error); } }) }) }) }) }
zwaldowski/homebridge-satellite-fan
test/poc.js
JavaScript
mit
5,557
const HEX_SHORT = /^#([a-fA-F0-9]{3})$/; const HEX = /^#([a-fA-F0-9]{6})$/; function roundColors(obj, round) { if (!round) return obj; const o = {}; for (let k in obj) { o[k] = Math.round(obj[k]); } return o; } function hasProp(obj, key) { return obj.hasOwnProperty(key); } function isRgb(obj) { return hasProp(obj, "r") && hasProp(obj, "g") && hasProp(obj, "b"); } export default class Color { static normalizeHex(hex) { if (HEX.test(hex)) { return hex; } else if (HEX_SHORT.test(hex)) { const r = hex.slice(1, 2); const g = hex.slice(2, 3); const b = hex.slice(3, 4); return `#${r + r}${g + g}${b + b}`; } return null; } static hexToRgb(hex) { const normalizedHex = this.normalizeHex(hex); if (normalizedHex == null) { return null; } const m = normalizedHex.match(HEX); const i = parseInt(m[1], 16); const r = (i >> 16) & 0xFF; const g = (i >> 8) & 0xFF; const b = i & 0xFF; return { r, g, b }; } static rgbToHex(rgb) { const { r, g, b} = rgb; const i = ((Math.round(r) & 0xFF) << 16) + ((Math.round(g) & 0xFF) << 8) + (Math.round(b) & 0xFF); const s = i.toString(16).toLowerCase(); return `#${"000000".substring(s.length) + s}`; } static rgbToHsv(rgb, round = true) { const { r, g, b } = rgb; const min = Math.min(r, g, b); const max = Math.max(r, g, b); const delta = max - min; const hsv = {}; if (max === 0) { hsv.s = 0; } else { hsv.s = (delta / max * 1000) / 10; } if (max === min) { hsv.h = 0; } else if (r === max) { hsv.h = (g - b) / delta; } else if (g === max) { hsv.h = 2 + (b - r) / delta; } else { hsv.h = 4 + (r - g) / delta; } hsv.h = Math.min(hsv.h * 60, 360); hsv.h = hsv.h < 0 ? hsv.h + 360 : hsv.h; hsv.v = ((max / 255) * 1000) / 10; return roundColors(hsv, round); } static rgbToXyz(rgb, round = true) { const r = rgb.r / 255; const g = rgb.g / 255; const b = rgb.b / 255; const rr = r > 0.04045 ? Math.pow(((r + 0.055) / 1.055), 2.4) : r / 12.92; const gg = g > 0.04045 ? Math.pow(((g + 0.055) / 1.055), 2.4) : g / 12.92; const bb = b > 0.04045 ? Math.pow(((b + 0.055) / 1.055), 2.4) : b / 12.92; const x = (rr * 0.4124 + gg * 0.3576 + bb * 0.1805) * 100; const y = (rr * 0.2126 + gg * 0.7152 + bb * 0.0722) * 100; const z = (rr * 0.0193 + gg * 0.1192 + bb * 0.9505) * 100; return roundColors({ x, y, z }, round); } static rgbToLab(rgb, round = true) { const xyz = Color.rgbToXyz(rgb, false); let { x, y, z } = xyz; x /= 95.047; y /= 100; z /= 108.883; x = x > 0.008856 ? Math.pow(x, 1 / 3) : 7.787 * x + 16 / 116; y = y > 0.008856 ? Math.pow(y, 1 / 3) : 7.787 * y + 16 / 116; z = z > 0.008856 ? Math.pow(z, 1 / 3) : 7.787 * z + 16 / 116; const l = (116 * y) - 16; const a = 500 * (x - y); const b = 200 * (y - z); return roundColors({ l, a, b }, round); } constructor(value) { this.original = value; if (isRgb(value)) { this.rgb = value; this.hex = Color.rgbToHex(value); } else { this.hex = Color.normalizeHex(value); this.rgb = Color.hexToRgb(this.hex); } this.hsv = Color.rgbToHsv(this.rgb); } }
tsuyoshiwada/color-classifier
src/utils/color.js
JavaScript
mit
3,342
export { default } from 'ember-validation/components/ember-validation-error-list';
ajile/ember-validation
app/components/ember-validation-error-list.js
JavaScript
mit
83
/*global window */ /** * @license countdown.js v2.5.2 http://countdownjs.org * Copyright (c)2006-2014 Stephen M. McKamey. * Licensed under The MIT License. */ /*jshint bitwise:false */ /** * @public * @type {Object|null} */ var module; /** * API entry * @public * @param {function(Object)|Date|number} start the starting date * @param {function(Object)|Date|number} end the ending date * @param {number} units the units to populate * @return {Object|number} */ var countdown = ( /** * @param {Object} module CommonJS Module */ function(module) { /*jshint smarttabs:true */ 'use strict'; /** * @private * @const * @type {number} */ var MILLISECONDS = 0x001; /** * @private * @const * @type {number} */ var SECONDS = 0x002; /** * @private * @const * @type {number} */ var MINUTES = 0x004; /** * @private * @const * @type {number} */ var HOURS = 0x008; /** * @private * @const * @type {number} */ var DAYS = 0x010; /** * @private * @const * @type {number} */ var WEEKS = 0x020; /** * @private * @const * @type {number} */ var MONTHS = 0x040; /** * @private * @const * @type {number} */ var YEARS = 0x080; /** * @private * @const * @type {number} */ var DECADES = 0x100; /** * @private * @const * @type {number} */ var CENTURIES = 0x200; /** * @private * @const * @type {number} */ var MILLENNIA = 0x400; /** * @private * @const * @type {number} */ var DEFAULTS = YEARS|MONTHS|DAYS|HOURS|MINUTES|SECONDS; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_SECOND = 1000; /** * @private * @const * @type {number} */ var SECONDS_PER_MINUTE = 60; /** * @private * @const * @type {number} */ var MINUTES_PER_HOUR = 60; /** * @private * @const * @type {number} */ var HOURS_PER_DAY = 24; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; /** * @private * @const * @type {number} */ var DAYS_PER_WEEK = 7; /** * @private * @const * @type {number} */ var MONTHS_PER_YEAR = 12; /** * @private * @const * @type {number} */ var YEARS_PER_DECADE = 10; /** * @private * @const * @type {number} */ var DECADES_PER_CENTURY = 10; /** * @private * @const * @type {number} */ var CENTURIES_PER_MILLENNIUM = 10; /** * @private * @param {number} x number * @return {number} */ var ceil = Math.ceil; /** * @private * @param {number} x number * @return {number} */ var floor = Math.floor; /** * @private * @param {Date} ref reference date * @param {number} shift number of months to shift * @return {number} number of days shifted */ function borrowMonths(ref, shift) { var prevTime = ref.getTime(); // increment month by shift ref.setMonth( ref.getMonth() + shift ); // this is the trickiest since months vary in length return Math.round( (ref.getTime() - prevTime) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerMonth(ref) { var a = ref.getTime(); // increment month by 1 var b = new Date(a); b.setMonth( ref.getMonth() + 1 ); // this is the trickiest since months vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerYear(ref) { var a = ref.getTime(); // increment year by 1 var b = new Date(a); b.setFullYear( ref.getFullYear() + 1 ); // this is the trickiest since years (periodically) vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * Applies the Timespan to the given date. * * @private * @param {Timespan} ts * @param {Date=} date * @return {Date} */ function addToDate(ts, date) { date = (date instanceof Date) || ((date !== null) && isFinite(date)) ? new Date(+date) : new Date(); if (!ts) { return date; } // if there is a value field, use it directly var value = +ts.value || 0; if (value) { date.setTime(date.getTime() + value); return date; } value = +ts.milliseconds || 0; if (value) { date.setMilliseconds(date.getMilliseconds() + value); } value = +ts.seconds || 0; // if (value) { date.setSeconds(date.getSeconds() + value); // } value = +ts.minutes || 0; if (value) { date.setMinutes(date.getMinutes() + value); } value = +ts.hours || 0; if (value) { date.setHours(date.getHours() + value); } value = +ts.weeks || 0; if (value) { value *= DAYS_PER_WEEK; } value += +ts.days || 0; if (value) { date.setDate(date.getDate() + value); } value = +ts.months || 0; if (value) { date.setMonth(date.getMonth() + value); } value = +ts.millennia || 0; if (value) { value *= CENTURIES_PER_MILLENNIUM; } value += +ts.centuries || 0; if (value) { value *= DECADES_PER_CENTURY; } value += +ts.decades || 0; if (value) { value *= YEARS_PER_DECADE; } value += +ts.years || 0; if (value) { date.setFullYear(date.getFullYear() + value); } return date; } /** * @private * @const * @type {number} */ var LABEL_MILLISECONDS = 0; /** * @private * @const * @type {number} */ var LABEL_SECONDS = 1; /** * @private * @const * @type {number} */ var LABEL_MINUTES = 2; /** * @private * @const * @type {number} */ var LABEL_HOURS = 3; /** * @private * @const * @type {number} */ var LABEL_DAYS = 4; /** * @private * @const * @type {number} */ var LABEL_WEEKS = 5; /** * @private * @const * @type {number} */ var LABEL_MONTHS = 6; /** * @private * @const * @type {number} */ var LABEL_YEARS = 7; /** * @private * @const * @type {number} */ var LABEL_DECADES = 8; /** * @private * @const * @type {number} */ var LABEL_CENTURIES = 9; /** * @private * @const * @type {number} */ var LABEL_MILLENNIA = 10; /** * @private * @type {Array} */ var LABELS_SINGLUAR; /** * @private * @type {Array} */ var LABELS_PLURAL; /** * @private * @type {string} */ var LABEL_LAST; /** * @private * @type {string} */ var LABEL_DELIM; /** * @private * @type {string} */ var LABEL_NOW; /** * Formats a number as a string * * @private * @param {number} value * @return {string} */ var formatNumber; /** * @private * @param {number} value * @param {number} unit unit index into label list * @return {string} */ function plurality(value, unit) { return formatNumber(value)+((value === 1) ? LABELS_SINGLUAR[unit] : LABELS_PLURAL[unit]); } /** * Formats the entries with singular or plural labels * * @private * @param {Timespan} ts * @return {Array} */ var formatList; /** * Timespan representation of a duration of time * * @private * @this {Timespan} * @constructor */ function Timespan() {} /** * Formats the Timespan as a sentence * * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toString = function(emptyLabel) { var label = formatList(this); var count = label.length; if (!count) { return emptyLabel ? ''+emptyLabel : LABEL_NOW; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Formats the Timespan as a sentence in HTML * * @param {string=} tag HTML tag name to wrap each value * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toHTML = function(tag, emptyLabel) { tag = tag || 'span'; var label = formatList(this); var count = label.length; if (!count) { emptyLabel = emptyLabel || LABEL_NOW; return emptyLabel ? '<'+tag+'>'+emptyLabel+'</'+tag+'>' : emptyLabel; } for (var i=0; i<count; i++) { // wrap each unit in tag label[i] = '<'+tag+'>'+label[i]+'</'+tag+'>'; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Applies the Timespan to the given date * * @param {Date=} date the date to which the timespan is added. * @return {Date} */ Timespan.prototype.addTo = function(date) { return addToDate(this, date); }; /** * Formats the entries as English labels * * @private * @param {Timespan} ts * @return {Array} */ formatList = function(ts) { var list = []; var value = ts.millennia; if (value) { list.push(plurality(value, LABEL_MILLENNIA)); } value = ts.centuries; if (value) { list.push(plurality(value, LABEL_CENTURIES)); } value = ts.decades; if (value) { list.push(plurality(value, LABEL_DECADES)); } value = ts.years; if (value) { list.push(plurality(value, LABEL_YEARS)); } value = ts.months; if (value) { list.push(plurality(value, LABEL_MONTHS)); } value = ts.weeks; if (value) { list.push(plurality(value, LABEL_WEEKS)); } value = ts.days; if (value) { list.push(plurality(value, LABEL_DAYS)); } value = ts.hours; if (value) { list.push(plurality(value, LABEL_HOURS)); } value = ts.minutes; if (value) { list.push(plurality(value, LABEL_MINUTES)); } value = ts.seconds; // if (value) { list.push(plurality(value, LABEL_SECONDS)); // } value = ts.milliseconds; if (value) { list.push(plurality(value, LABEL_MILLISECONDS)); } return list; }; /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts * @param {string} toUnit */ function rippleRounded(ts, toUnit) { switch (toUnit) { case 'seconds': if (ts.seconds !== SECONDS_PER_MINUTE || isNaN(ts.minutes)) { return; } // ripple seconds up to minutes ts.minutes++; ts.seconds = 0; /* falls through */ case 'minutes': if (ts.minutes !== MINUTES_PER_HOUR || isNaN(ts.hours)) { return; } // ripple minutes up to hours ts.hours++; ts.minutes = 0; /* falls through */ case 'hours': if (ts.hours !== HOURS_PER_DAY || isNaN(ts.days)) { return; } // ripple hours up to days ts.days++; ts.hours = 0; /* falls through */ case 'days': if (ts.days !== DAYS_PER_WEEK || isNaN(ts.weeks)) { return; } // ripple days up to weeks ts.weeks++; ts.days = 0; /* falls through */ case 'weeks': if (ts.weeks !== daysPerMonth(ts.refMonth)/DAYS_PER_WEEK || isNaN(ts.months)) { return; } // ripple weeks up to months ts.months++; ts.weeks = 0; /* falls through */ case 'months': if (ts.months !== MONTHS_PER_YEAR || isNaN(ts.years)) { return; } // ripple months up to years ts.years++; ts.months = 0; /* falls through */ case 'years': if (ts.years !== YEARS_PER_DECADE || isNaN(ts.decades)) { return; } // ripple years up to decades ts.decades++; ts.years = 0; /* falls through */ case 'decades': if (ts.decades !== DECADES_PER_CENTURY || isNaN(ts.centuries)) { return; } // ripple decades up to centuries ts.centuries++; ts.decades = 0; /* falls through */ case 'centuries': if (ts.centuries !== CENTURIES_PER_MILLENNIUM || isNaN(ts.millennia)) { return; } // ripple centuries up to millennia ts.millennia++; ts.centuries = 0; /* falls through */ } } /** * Ripple up partial units one place * * @private * @param {Timespan} ts timespan * @param {number} frac accumulated fractional value * @param {string} fromUnit source unit name * @param {string} toUnit target unit name * @param {number} conversion multiplier between units * @param {number} digits max number of decimal digits to output * @return {number} new fractional value */ function fraction(ts, frac, fromUnit, toUnit, conversion, digits) { if (ts[fromUnit] >= 0) { frac += ts[fromUnit]; delete ts[fromUnit]; } frac /= conversion; if (frac + 1 <= 1) { // drop if below machine epsilon return 0; } if (ts[toUnit] >= 0) { // ensure does not have more than specified number of digits ts[toUnit] = +(ts[toUnit] + frac).toFixed(digits); rippleRounded(ts, toUnit); return 0; } return frac; } /** * Ripple up partial units to next existing * * @private * @param {Timespan} ts * @param {number} digits max number of decimal digits to output */ function fractional(ts, digits) { var frac = fraction(ts, 0, 'milliseconds', 'seconds', MILLISECONDS_PER_SECOND, digits); if (!frac) { return; } frac = fraction(ts, frac, 'seconds', 'minutes', SECONDS_PER_MINUTE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'minutes', 'hours', MINUTES_PER_HOUR, digits); if (!frac) { return; } frac = fraction(ts, frac, 'hours', 'days', HOURS_PER_DAY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'days', 'weeks', DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'weeks', 'months', daysPerMonth(ts.refMonth)/DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'months', 'years', daysPerYear(ts.refMonth)/daysPerMonth(ts.refMonth), digits); if (!frac) { return; } frac = fraction(ts, frac, 'years', 'decades', YEARS_PER_DECADE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'decades', 'centuries', DECADES_PER_CENTURY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'centuries', 'millennia', CENTURIES_PER_MILLENNIUM, digits); // should never reach this with remaining fractional value if (frac) { throw new Error('Fractional unit overflow'); } } /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts */ function ripple(ts) { var x; if (ts.milliseconds < 0) { // ripple seconds down to milliseconds x = ceil(-ts.milliseconds / MILLISECONDS_PER_SECOND); ts.seconds -= x; ts.milliseconds += x * MILLISECONDS_PER_SECOND; } else if (ts.milliseconds >= MILLISECONDS_PER_SECOND) { // ripple milliseconds up to seconds ts.seconds += floor(ts.milliseconds / MILLISECONDS_PER_SECOND); ts.milliseconds %= MILLISECONDS_PER_SECOND; } if (ts.seconds < 0) { // ripple minutes down to seconds x = ceil(-ts.seconds / SECONDS_PER_MINUTE); ts.minutes -= x; ts.seconds += x * SECONDS_PER_MINUTE; } else if (ts.seconds >= SECONDS_PER_MINUTE) { // ripple seconds up to minutes ts.minutes += floor(ts.seconds / SECONDS_PER_MINUTE); ts.seconds %= SECONDS_PER_MINUTE; } if (ts.minutes < 0) { // ripple hours down to minutes x = ceil(-ts.minutes / MINUTES_PER_HOUR); ts.hours -= x; ts.minutes += x * MINUTES_PER_HOUR; } else if (ts.minutes >= MINUTES_PER_HOUR) { // ripple minutes up to hours ts.hours += floor(ts.minutes / MINUTES_PER_HOUR); ts.minutes %= MINUTES_PER_HOUR; } if (ts.hours < 0) { // ripple days down to hours x = ceil(-ts.hours / HOURS_PER_DAY); ts.days -= x; ts.hours += x * HOURS_PER_DAY; } else if (ts.hours >= HOURS_PER_DAY) { // ripple hours up to days ts.days += floor(ts.hours / HOURS_PER_DAY); ts.hours %= HOURS_PER_DAY; } while (ts.days < 0) { // NOTE: never actually seen this loop more than once // ripple months down to days ts.months--; ts.days += borrowMonths(ts.refMonth, 1); } // weeks is always zero here if (ts.days >= DAYS_PER_WEEK) { // ripple days up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } if (ts.months < 0) { // ripple years down to months x = ceil(-ts.months / MONTHS_PER_YEAR); ts.years -= x; ts.months += x * MONTHS_PER_YEAR; } else if (ts.months >= MONTHS_PER_YEAR) { // ripple months up to years ts.years += floor(ts.months / MONTHS_PER_YEAR); ts.months %= MONTHS_PER_YEAR; } // years is always non-negative here // decades, centuries and millennia are always zero here if (ts.years >= YEARS_PER_DECADE) { // ripple years up to decades ts.decades += floor(ts.years / YEARS_PER_DECADE); ts.years %= YEARS_PER_DECADE; if (ts.decades >= DECADES_PER_CENTURY) { // ripple decades up to centuries ts.centuries += floor(ts.decades / DECADES_PER_CENTURY); ts.decades %= DECADES_PER_CENTURY; if (ts.centuries >= CENTURIES_PER_MILLENNIUM) { // ripple centuries up to millennia ts.millennia += floor(ts.centuries / CENTURIES_PER_MILLENNIUM); ts.centuries %= CENTURIES_PER_MILLENNIUM; } } } } /** * Remove any units not requested * * @private * @param {Timespan} ts * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function pruneUnits(ts, units, max, digits) { var count = 0; // Calc from largest unit to smallest to prevent underflow if (!(units & MILLENNIA) || (count >= max)) { // ripple millennia down to centuries ts.centuries += ts.millennia * CENTURIES_PER_MILLENNIUM; delete ts.millennia; } else if (ts.millennia) { count++; } if (!(units & CENTURIES) || (count >= max)) { // ripple centuries down to decades ts.decades += ts.centuries * DECADES_PER_CENTURY; delete ts.centuries; } else if (ts.centuries) { count++; } if (!(units & DECADES) || (count >= max)) { // ripple decades down to years ts.years += ts.decades * YEARS_PER_DECADE; delete ts.decades; } else if (ts.decades) { count++; } if (!(units & YEARS) || (count >= max)) { // ripple years down to months ts.months += ts.years * MONTHS_PER_YEAR; delete ts.years; } else if (ts.years) { count++; } if (!(units & MONTHS) || (count >= max)) { // ripple months down to days if (ts.months) { ts.days += borrowMonths(ts.refMonth, ts.months); } delete ts.months; if (ts.days >= DAYS_PER_WEEK) { // ripple day overflow back up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } } else if (ts.months) { count++; } if (!(units & WEEKS) || (count >= max)) { // ripple weeks down to days ts.days += ts.weeks * DAYS_PER_WEEK; delete ts.weeks; } else if (ts.weeks) { count++; } if (!(units & DAYS) || (count >= max)) { //ripple days down to hours ts.hours += ts.days * HOURS_PER_DAY; delete ts.days; } else if (ts.days) { count++; } if (!(units & HOURS) || (count >= max)) { // ripple hours down to minutes ts.minutes += ts.hours * MINUTES_PER_HOUR; delete ts.hours; } else if (ts.hours) { count++; } if (!(units & MINUTES) || (count >= max)) { // ripple minutes down to seconds ts.seconds += ts.minutes * SECONDS_PER_MINUTE; delete ts.minutes; } else if (ts.minutes) { count++; } if (!(units & SECONDS) || (count >= max)) { // ripple seconds down to milliseconds ts.milliseconds += ts.seconds * MILLISECONDS_PER_SECOND; delete ts.seconds; } else if (ts.seconds) { count++; } // nothing to ripple milliseconds down to // so ripple back up to smallest existing unit as a fractional value if (!(units & MILLISECONDS) || (count >= max)) { fractional(ts, digits); } } /** * Populates the Timespan object * * @private * @param {Timespan} ts * @param {?Date} start the starting date * @param {?Date} end the ending date * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function populate(ts, start, end, units, max, digits) { var now = new Date(); ts.start = start = start || now; ts.end = end = end || now; ts.units = units; ts.value = end.getTime() - start.getTime(); if (ts.value < 0) { // swap if reversed var tmp = end; end = start; start = tmp; } // reference month for determining days in month ts.refMonth = new Date(start.getFullYear(), start.getMonth(), 15, 12, 0, 0); try { // reset to initial deltas ts.millennia = 0; ts.centuries = 0; ts.decades = 0; ts.years = end.getFullYear() - start.getFullYear(); ts.months = end.getMonth() - start.getMonth(); ts.weeks = 0; ts.days = end.getDate() - start.getDate(); ts.hours = end.getHours() - start.getHours(); ts.minutes = end.getMinutes() - start.getMinutes(); ts.seconds = end.getSeconds() - start.getSeconds(); ts.milliseconds = end.getMilliseconds() - start.getMilliseconds(); ripple(ts); pruneUnits(ts, units, max, digits); } finally { delete ts.refMonth; } return ts; } /** * Determine an appropriate refresh rate based upon units * * @private * @param {number} units the units to populate * @return {number} milliseconds to delay */ function getDelay(units) { if (units & MILLISECONDS) { // refresh very quickly return MILLISECONDS_PER_SECOND / 30; //30Hz } if (units & SECONDS) { // refresh every second return MILLISECONDS_PER_SECOND; //1Hz } if (units & MINUTES) { // refresh every minute return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE; } if (units & HOURS) { // refresh hourly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR; } if (units & DAYS) { // refresh daily return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY; } // refresh the rest weekly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY * DAYS_PER_WEEK; } /** * API entry point * * @public * @param {Date|number|Timespan|null|function(Timespan,number)} start the starting date * @param {Date|number|Timespan|null|function(Timespan,number)} end the ending date * @param {number=} units the units to populate * @param {number=} max number of labels to output * @param {number=} digits max number of decimal digits to output * @return {Timespan|number} */ function countdown(start, end, units, max, digits) { var callback; // ensure some units or use defaults units = +units || DEFAULTS; // max must be positive max = (max > 0) ? max : NaN; // clamp digits to an integer between [0, 20] digits = (digits > 0) ? (digits < 20) ? Math.round(digits) : 20 : 0; // ensure start date var startTS = null; if ('function' === typeof start) { callback = start; start = null; } else if (!(start instanceof Date)) { if ((start !== null) && isFinite(start)) { start = new Date(+start); } else { if ('object' === typeof startTS) { startTS = /** @type{Timespan} */(start); } start = null; } } // ensure end date var endTS = null; if ('function' === typeof end) { callback = end; end = null; } else if (!(end instanceof Date)) { if ((end !== null) && isFinite(end)) { end = new Date(+end); } else { if ('object' === typeof end) { endTS = /** @type{Timespan} */(end); } end = null; } } // must wait to interpret timespans until after resolving dates if (startTS) { start = addToDate(startTS, end); } if (endTS) { end = addToDate(endTS, start); } if (!start && !end) { // used for unit testing return new Timespan(); } if (!callback) { return populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)); } // base delay off units var delay = getDelay(units), timerId, fn = function() { callback( populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)), timerId ); }; fn(); return (timerId = setInterval(fn, delay)); } /** * @public * @const * @type {number} */ countdown.MILLISECONDS = MILLISECONDS; /** * @public * @const * @type {number} */ countdown.SECONDS = SECONDS; /** * @public * @const * @type {number} */ countdown.MINUTES = MINUTES; /** * @public * @const * @type {number} */ countdown.HOURS = HOURS; /** * @public * @const * @type {number} */ countdown.DAYS = DAYS; /** * @public * @const * @type {number} */ countdown.WEEKS = WEEKS; /** * @public * @const * @type {number} */ countdown.MONTHS = MONTHS; /** * @public * @const * @type {number} */ countdown.YEARS = YEARS; /** * @public * @const * @type {number} */ countdown.DECADES = DECADES; /** * @public * @const * @type {number} */ countdown.CENTURIES = CENTURIES; /** * @public * @const * @type {number} */ countdown.MILLENNIA = MILLENNIA; /** * @public * @const * @type {number} */ countdown.DEFAULTS = DEFAULTS; /** * @public * @const * @type {number} */ countdown.ALL = MILLENNIA|CENTURIES|DECADES|YEARS|MONTHS|WEEKS|DAYS|HOURS|MINUTES|SECONDS|MILLISECONDS; /** * Override the unit labels * @public * @param {string|Array=} singular a pipe ('|') delimited list of singular unit name overrides * @param {string|Array=} plural a pipe ('|') delimited list of plural unit name overrides * @param {string=} last a delimiter before the last unit (default: ' and ') * @param {string=} delim a delimiter to use between all other units (default: ', ') * @param {string=} empty a label to use when all units are zero (default: '') * @param {function(number):string=} formatter a function which formats numbers as a string */ countdown.setLabels = function(singular, plural, last, delim, empty, formatter) { singular = singular || []; if (singular.split) { singular = singular.split('|'); } plural = plural || []; if (plural.split) { plural = plural.split('|'); } for (var i=LABEL_MILLISECONDS; i<=LABEL_MILLENNIA; i++) { // override any specified units LABELS_SINGLUAR[i] = singular[i] || LABELS_SINGLUAR[i]; LABELS_PLURAL[i] = plural[i] || LABELS_PLURAL[i]; } LABEL_LAST = ('string' === typeof last) ? last : LABEL_LAST; LABEL_DELIM = ('string' === typeof delim) ? delim : LABEL_DELIM; LABEL_NOW = ('string' === typeof empty) ? empty : LABEL_NOW; formatNumber = ('function' === typeof formatter) ? formatter : formatNumber; }; /** * Revert to the default unit labels * @public */ var resetLabels = countdown.resetLabels = function() { LABELS_SINGLUAR = ' millisecond| second| minute| hour| day| week| month| year| decade| century| millennium'.split('|'); LABELS_PLURAL = ' milliseconds| seconds| minutes| hours| days| weeks| months| years| decades| centuries| millennia'.split('|'); LABEL_LAST = ' and '; LABEL_DELIM = ', '; LABEL_NOW = ''; formatNumber = function(value) { return '<span class="contest_timedelta">' + value + "</span>"; }; }; resetLabels(); if (module && module.exports) { module.exports = countdown; } else if (typeof window.define === 'function' && typeof window.define.amd !== 'undefined') { window.define('countdown', [], function() { return countdown; }); } return countdown; })(module);
entpy/beauty-and-pics
beauty_and_pics/website/static/website/js/vendor/countdown.js
JavaScript
mit
27,520
var formMode="detail"; /*formMode ้กต้ขๆจกๅผ ้กต้ขๆœ‰ไธ‰็งๆจกๅผ detail add modify*/ var panelType="form"; /*panelType ้ขๆฟ็ฑปๅž‹ form่กจๅ• search ๆŸฅ่ฏข child ไปŽ่กจๅฏน่ฑก*/ var editIndex = undefined; /*datagrid ็ผ–่พ‘ๅฏน่ฑก็š„่กŒๅท*/ var dg1EditIndex = undefined; var objName=label.objName; /*้กต้ข็ฎก็†ๅฏน่ฑกๅ็งฐ*/ var lblDetailStr=label.detailStr; /*ๅœจไธๅŒ็š„่ฏญ็งไธ‹ๅบ”่ฏฅไธๅŒ*/ var lblAddStr=label.addStr; /*ๅœจไธๅŒ็š„่ฏญ็งไธ‹ๅบ”่ฏฅไธๅŒ*/ var lblEditStr=label.editStr; /*ๅœจไธๅŒ็š„่ฏญ็งไธ‹ๅบ”่ฏฅไธๅŒ*/ var pageName=null; /*ๆ นๆฎpageName่ƒฝๅคŸๅ–ๅพ—ๆŒ‰้’ฎๅฎšไน‰*/ var pageHeight=0; /*pageHeight ้กต้ข้ซ˜ๅบฆ*/ var topHeight=366; /*datagrid้ซ˜ๅบฆ*/ var dgHeadHeight=28; /*datagrid ๆ”ถ็ผฉๅŽ้ซ˜ๅบฆ*/ var downHeight=30; /*ๅบ•้ƒจ้ซ˜ๅบฆ*/ var paddingHeight=11; /*้กต้ขๅ†…่กฅไธ้ซ˜ๅบฆ paddingTop+paddingBottom*/ var gridToolbar = null; /*ๆŒ‰้’ฎๅฎšไน‰ */ var dgConf=null; /*dgConf้…็ฝฎไฟกๆฏ*/ var dg1Conf=null; function initConf(){} /*ๅœจๆญคๅˆๅง‹ๅŒ–ๆœฌ้กต้ข็š„ๆ‰€ๆœ‰้…็ฝฎไฟกๆฏ*/ function initButton(){ for(var i=0;i<gridToolbar.length;i++){ var b=gridToolbar[i];/*้ฆ–ๆฌก่ฟ่กŒๆ—ถๆ‰€ๆœ‰ๆŒ‰้’ฎ้ƒฝๆ˜ฏdisable็Šถๆ€*/ $("#"+b.id).linkbutton({iconCls: b.iconCls,text:b.text,disabled:true,handler:b.handler,plain:1}); } } function initBtnDisabled() { var btnDisabled=[{"id":"btn_refresh"},{"id":"btn_search"}]; for(var i=0;i<btnDisabled.length;i++) { $('#'+btnDisabled[i].id).linkbutton('enable'); } } function component() { initConf(); if(window.innerHeight) pageHeight=window.innerHeight; else pageHeight=document.documentElement.clientHeight; $('#middle').css("height",pageHeight-topHeight-downHeight-paddingHeight); $('#tab').tabs({ onSelect:tab_select, fit:true }); /*่ฟ™ๆ—ถๅ€™ๅฏ่ƒฝ่ฟ˜ๆฒกๆœ‰key ๆ‰€ไปฅไธ่ƒฝ็›ดๆŽฅ็ป‘ๅฎšdomๅฏน่ฑก๏ผŒๅช่ƒฝไฝฟ็”จdom id*/ installKey("btn_collapse",Keys.f1,null,null,null); installKey("btn_edit",Keys.f2,null,null,null); installKey("btn_search",Keys.f3,null,null,null); installKey("btn_add",Keys.f4,null,null,null); installKey("btn_delete",Keys.del,null,null,null); installKey("btn2_save",Keys.s,true,null,null); installKey("btn2_search",Keys.q,true,null,null); installKey("btn2_edit",Keys.e,true,null,null); document.onhelp=function(){return false}; /*ไธบไบ†ๅฑ่”ฝIE็š„F1ๆŒ‰้”ฎ*/ window.onhelp=function(){return false}; /*ไธบไบ†ๅฑ่”ฝIE็š„F1ๆŒ‰้”ฎ*/ $('#btn2_save').linkbutton({iconCls: 'icon-save'}).click(btn2_save); $('#btn2_edit').linkbutton({iconCls: 'icon-save'}).click(btn2_update), $('#btn2_search').linkbutton({iconCls: 'icon-search'}).click(btn2_search); $('#btn2_addItem').linkbutton({iconCls: 'icon-add'}).click(btn2_addItem); $('#btn2_editItem').linkbutton({iconCls: 'icon-edit'}).click(btn2_editItem); $('#btn2_rmItem').linkbutton({iconCls: 'icon-remove'}).click(btn2_rmItem); $('#btn2_ok').linkbutton({iconCls: 'icon-ok'}).click(btn2_ok); dgConf.toolbar='#tb'; dgConf.onCollapse=dg_collapse; dgConf.onSelect=dg_select; dgConf.singleSelect=true; dgConf.onLoadSuccess=dg_load; dgConf.onClickRow=dg_click; dgConf.onDblClickRow=dg_dbl; dgConf.onExpand=dg_expand; dgConf.collapsible=true; dgConf.collapseID="btn_collapse"; dgConf.pagination=true; dgConf.fit=true; dgConf.rownumbers=true; dgConf.singleSelect=true; dg1Conf.onClickRow=dg1_click; dg1Conf.onDblClickRow=dg1_dbl; $("#dg").datagrid(dgConf); initButton(); initBtnDisabled(); $('#top').css("height","auto"); lov_init(); $(".formChild").height(pageHeight-topHeight-downHeight-paddingHeight-dgHeadHeight-1); //$("#ff1 input").attr("readonly",1); /*่ฏฆ็ป†่กจๅ•็š„่พ“ๅ…ฅๆก†ๅช่ฏป*/ } function showChildGrid(param){/*dg ้€‰ไธญไบ‹ไปถ่งฆๅ‘*/ $("#dg1").datagrid(dg1Conf); } function showForm(row){/*dg ้€‰ไธญไบ‹ไปถ่งฆๅ‘*/ //$("#ff1").form("load",row); //$("#ff2").form("load",row);; } function dg_collapse(){/*ๆ”ถ็ผฉๅŽ ๆ€ปๆ˜ฏ่ฆไฟฎๆ”นtabs ไผš่งฆๅ‘tab_selectไบ‹ไปถ ้‚ฃไนˆๅ‰้ขๅฐฑ้œ€่ฆๅฐ†panel็š„selectedๅฑžๆ€ง่ฎพไธบtrue*/ var panel=$("#tab").tabs("getSelected"); /*ๅ…ˆ่Žทๅ–selectedๅฏน่ฑก*/ if(panel!=null) panel.panel({selected:1}); $('#middle').css("height",pageHeight-dgHeadHeight-downHeight-paddingHeight); $(".formChild").height(pageHeight-dgHeadHeight-downHeight-paddingHeight-dgHeadHeight-1); $("#tab").tabs({fit:true,stopSelect:true});/*tabๅ‘็”Ÿๅ˜ๅŒ–ไบ† ไผš่งฆๅ‘tab_selectไบ‹ไปถ */ if(panel!=null) panel.panel({selected:0}); } function dg_expand(){ var panel=$("#tab").tabs("getSelected"); if(panel!=null) panel.panel({selected:1}); $('#middle').css("height",pageHeight-topHeight-downHeight-paddingHeight); $(".formChild").height(pageHeight-topHeight-downHeight-paddingHeight-dgHeadHeight-1); $("#tab").tabs({fit:true,stopSelect:true}); if(panel!=null) panel.panel({selected:0}); } function dg_load(){/*้€‰ไธญ็ฌฌไธ€่กŒ*/ $('#mask').css('display', "none"); $('#dg').datagrid('selectRow', 0); } function dg_select(rowIndex, rowData){/*้€‰ไธญไบ‹ไปถ ๅกซๅ……ff1 ff2 dg1*/ showChildGrid(rowData);/*ๅญ่กจๆจกๅผไธ‹๏ผŒ้‡็ป˜ๅญ่กจๅˆ—่กจ*/ showForm(rowData,"add"); useDetailMode(); } function dg_add(){/*ๅˆ—่กจๆ–ฐๅขžๆŒ‰้’ฎไบ‹ไปถ*/ useAddMode(); } function dg_edit(){/*ๅˆ—่กจ็ผ–่พ‘ๆŒ‰้’ฎ่งฆๅ‘ไบ‹ไปถ*/ var row=$('#dg').datagrid('getSelected'); if(row){ useEditMode(); } else $.messager.alert('้€‰ๆ‹ฉๆ็คบ', '่ฏท้€‰ๆ‹ฉๆ‚จ็ผ–่พ‘็š„ๆ•ฐๆฎ!',"info"); } function dg_delete(){/*ๅˆ—่กจๅˆ ้™คๆŒ‰้’ฎ่งฆๅ‘ไบ‹ไปถ*/ var confirmBack=function(r){ if(!r) return; var p=$('#dg').datagrid('getRowIndex',$('#dg').datagrid('getSelected')); /*ๆ‰ง่กŒๆœๅŠกๅ™จ่ฏทๆฑ‚๏ผŒๅฎŒๆˆๆœๅŠก็ซฏๆ•ฐๆฎ็š„ๅˆ ้™ค ็„ถๅŽๅฎŒๆˆๅ‰็ซฏ็š„ๅˆ ้™ค*/ if (p == undefined){return} $('#dg').datagrid('cancelEdit', p) .datagrid('deleteRow', p); /*ๅˆ ้™คๆˆๅŠŸๅŽๅบ”่ฏฅๅˆทๆ–ฐ้กต้ข ๅนถๆŠŠไธ‹ไธ€ๆก้€‰ไธญ*/ var currRows=$('#dg').datagrid('getRows').length; if(p>=currRows) p--; if(p>=0) $('#dg').datagrid('selectRow', p);/*ๅฆ‚ๆžœๅทฒ็ปๅˆฐๆœซๅฐพๅˆ™ ้€‰ไธญp-1 */ } var row=$('#dg').datagrid('getSelected'); if(row) $.messager.confirm('็กฎ่ฎคๆ็คบ', 'ๆ‚จ็กฎ่ฎค่ฆๅˆ ้™ค่ฟ™ๆกๆ•ฐๆฎๅ—?', confirmBack); else $.messager.alert('้€‰ๆ‹ฉๆ็คบ', '่ฏท้€‰ๆ‹ฉๆ‚จ่ฆๅˆ ้™ค็š„ๆ•ฐๆฎ!',"info"); } function dg_refresh(){/*ๅˆ—่กจๅˆทๆ–ฐๆŒ‰้’ฎไบ‹ไปถ*/ } function dg_search(){/*ๅˆ—่กจๆœ็ดขไบ‹ไปถ searchๆจกๅผไธๅ†็ฆ็”จๅ…ถไป–้ขๆฟ*/ panelType="search"; $('#tab').tabs("select",1); } function dg_click(index){ /*ๅˆ‡ๆขๅ›ž่ฏฆ็ป†ไฟกๆฏๆจกๅผ ้ฆ–ๅ…ˆๅˆคๆ–ญtab็š„ๅฝ“ๅ‰้€‰้กน*/ if(panelType=="search"){ $('#tab').tabs("select",0); } } function dg_dbl(){/*ๅˆ—่กจๅŒๅ‡ปไบ‹ไปถ ๅŒๅ‡ป่ฟ›ๅ…ฅ็ผ–่พ‘ๆจกๅผ*/ document.getElementById("btn_edit").click();/*ๅŒๅ‡ป็ญ‰ๅŒไบŽ็‚นๅ‡ป็ผ–่พ‘ๆŒ‰้’ฎ*/ } function tab_select(title,index){/*้€‰้กนๅก็š„ๅˆ‡ๆข ้œ€่ฆๆ›ดๆ”นๆŒ‰้’ฎ็š„ๆ˜พ็คบ*/ $('#down a').css("display","none"); if(index==0){/*ๆ นๆฎgrid็š„็Šถๆ€ๆฅ็”ŸๆˆๆŒ‰้’ฎ add edit*/ $('#btn2_addItem').css("display","inline-block");/*ๆ–ฐๅขž่กŒๆŒ‰้’ฎ*/ $('#btn2_editItem').css("display","inline-block");/*ๅˆ ้™ค่กŒๆŒ‰้’ฎ*/ $('#btn2_rmItem').css("display","inline-block");/*ๅˆ ้™ค่กŒๆŒ‰้’ฎ*/ $('#btn2_ok').css("display","inline-block");/*commitๆŒ‰้’ฎ*/ } else if(index==1){/*ๆŸฅ่ฏข้€‰้กนๅก ๅˆ‡ๆขๅˆฐๆŸฅ่ฏข้กต็ญพ็ญ‰ๅŒไบŽๆŒ‰้’ฎ search่ขซ็‚นๅ‡ป*/ panelType="search"; $('#btn2_search').css("display","inline-block");/*ๆœ็ดขๆŒ‰้’ฎ*/ } } function useDetailMode(row){ //formMode="detail"; //$('#ff2').css("display","none"); //$('#ff1').css("display","block"); //if(panelType=="search") $('#tab').tabs("select",0); //else tab_select(); } function btn2_addItem(){ if(dg1_endEditing()){/*็ป“ๆŸ็ผ–่พ‘็Šถๆ€ๆˆๅŠŸ*/ var p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected')); /*ๆ‰ง่กŒๆœๅŠกๅ™จ่ฏทๆฑ‚๏ผŒๅฎŒๆˆๆœๅŠก็ซฏๆ•ฐๆฎ็š„ๅˆ ้™ค ็„ถๅŽๅฎŒๆˆๅ‰็ซฏ็š„ๅˆ ้™ค*/ if (p == undefined){return} $('#dg1').datagrid('unselectAll'); $('#dg1').datagrid('insertRow',{index:p+1,row:{}}) .datagrid('beginEdit', p+1) .datagrid('selectRow', p+1); dg1EditIndex=p+1; } else{ $('#dg1').datagrid('selectRow', dg1EditIndex); } } function btn2_editItem(){ var index=$('#dg1').datagrid('getRowIndex', $('#dg1').datagrid('getSelected')); if (dg1EditIndex != index){ if (dg1_endEditing()){ $('#dg1').datagrid('selectRow', index) .datagrid('beginEdit', index); dg1EditIndex = index; } else { $('#dg1').datagrid('selectRow', dg1EditIndex); } } } function btn2_rmItem(){ var confirmBack=function(r){ if(!r) return; var p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected')); if (p == undefined){return} $('#dg1').datagrid('cancelEdit', p) .datagrid('deleteRow', p); var currRows=$('#dg1').datagrid('getRows').length; if(p>=currRows) p--; if(p>=0) $('#dg1').datagrid('selectRow', p);/*ๅฆ‚ๆžœๅทฒ็ปๅˆฐๆœซๅฐพๅˆ™ ้€‰ไธญp-1 */ } var row=$('#dg1').datagrid('getSelected'); if(row) $.messager.confirm('็กฎ่ฎคๆ็คบ', 'ๆ‚จ็กฎ่ฎค่ฆๅˆ ้™ค่ฟ™ๆกๆ•ฐๆฎๅ—?', confirmBack); else $.messager.alert('้€‰ๆ‹ฉๆ็คบ', '่ฏท้€‰ๆ‹ฉๆ‚จ่ฆๅˆ ้™ค็š„ๆ•ฐๆฎ!',"info"); } function dg1_endEditing(){ if (dg1EditIndex == undefined){return true} var flag=$('#dg1').datagrid('validateRow',dg1EditIndex); if(flag){/*ๅฆ‚ๆžœๆ ก้ชŒ้€š่ฟ‡ ๅ…่ฎธ็ป“ๆŸ็ผ–่พ‘็Šถๆ€*/ $('#dg1').datagrid('endEdit', dg1EditIndex); dg1EditIndex = undefined; return true; } return false; } function dg1_click(index){/*ไปŽ่กจๅ•ๅ‡ปไบ‹ไปถ ๅœจ็ผ–่พ‘ๆจกๅผไธ‹ๆ‰“ๅผ€็ผ–่พ‘*/ if (dg1EditIndex != index){ dg1_endEditing(); } } function dg1_dbl(index){/*ไปŽ่กจๅŒๅ‡ปไบ‹ไปถ ๅŒๅ‡ป่ฟ›ๅ…ฅ็ผ–่พ‘ๆจกๅผ*/ document.getElementById("btn2_editItem").click();/*ๅŒๅ‡ป็ญ‰ๅŒไบŽ็‚นๅ‡ป็ผ–่พ‘ๆŒ‰้’ฎ*/ } function useAddMode(){}; function useEditMode(){}; function form_change(type){}/*type= add|edit*/ function removeValidate(){}/*type= enable|remove*/ function btn2_save(){} function btn2_update(){} function btn2_search(){} function btn2_ok(){} function lov_init(){}/*็ป‘ๅฎšๅ€ผๅˆ—่กจ*/
ldjking/wbscreen
web/wb/2tp/template/js/common/copy/a3.js
JavaScript
mit
9,914
/* * Jermit * * The MIT License (MIT) * * Copyright (C) 2018 Kevin Lamonte * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * * @author Kevin Lamonte [kevin.lamonte@gmail.com] * @version 1 */ package jermit.protocol.zmodem; /** * ZEofHeader represents the end of a file. */ class ZEofHeader extends Header { // ------------------------------------------------------------------------ // Constructors ----------------------------------------------------------- // ------------------------------------------------------------------------ /** * Public constructor. */ public ZEofHeader() { this(0); } /** * Public constructor. * * @param data the data field for this header */ public ZEofHeader(final int data) { super(Type.ZEOF, (byte) 0x0B, "ZEOF", data); } // ------------------------------------------------------------------------ // Header ----------------------------------------------------------------- // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // ZEofHeader ------------------------------------------------------------- // ------------------------------------------------------------------------ /** * Get the file size value. * * @return the value */ public int getFileSize() { return data; } }
klamonte/jermit
src/jermit/protocol/zmodem/ZEofHeader.java
Java
mit
2,504
import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; public class FormLoader { public static String connectionString = "jdbc:hsqldb:file:db-data/teamsandplayers"; static Connection con; public static void main(String[] args) throws Exception { try { Class.forName("org.hsqldb.jdbc.JDBCDriver"); } catch (ClassNotFoundException e) { throw e; } MainTeamForm form = new MainTeamForm(); form.setVisible(true); try { // will create DB if does not exist // "SA" is default user with hypersql con = DriverManager.getConnection(connectionString, "SA", ""); } catch (SQLException e) { throw e; } finally { con.close(); System.out.println("Program complete"); } } }
a-r-d/java-1-class-demos
jframe-actionlistener-access-db-cxn/homework-start/Week13Assignment10/src/FormLoader.java
Java
mit
788
<?php namespace Memento\Test; use Memento; class SingleTest extends Harness { /** @dataProvider provideClients */ public function testStoreMethod(Memento\Client $client) { $success = $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires()); $this->assertTrue($success); $this->assertEquals($this->getExpires(), $client->getExpires($this->getKey())); $this->assertEquals($this->getExpires(), $client->getTtl($this->getKey())); // default should be the same as expires // store with ttl $success = $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl()); $this->assertTrue($success); $this->assertLessThanOrEqual($this->getExpires(), $client->getExpires($this->getKey())); $this->assertLessThanOrEqual($this->getTtl(), $client->getTtl($this->getKey())); } /** @dataProvider provideClients */ public function testExists(Memento\Client $client) { $client->store($this->getKey(), true); $exists = $client->exists($this->getKey()); $this->assertTrue($exists); } /** @dataProvider provideClients */ public function testRetrieve(Memento\Client $client) { $client->store($this->getKey(), array('foo' => 'bar')); $data = $client->retrieve($this->getKey()); $this->assertEquals($data, array('foo' => 'bar')); } /** @dataProvider provideClients */ public function testInvalidRetrieve(Memento\Client $client) { $data = $client->retrieve(new Memento\Key(md5(time() . rand(0, 1000)))); $this->assertEquals($data, null); } /** @dataProvider provideClients */ public function testInvalidate(Memento\Client $client) { $client->store($this->getKey(), true); $invalid = $client->invalidate($this->getKey()); $this->assertTrue($invalid); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); } /** @dataProvider provideClients */ public function testTerminate(Memento\Client $client) { $client->store($this->getKey(), true); $terminated = $client->terminate($this->getKey()); $this->assertTrue($terminated); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); } /** @dataProvider provideClients */ public function testExpires(Memento\Client $client) { $client->store($this->getKey(), array('foo' => 'bar'), 1, $ttl = 5); sleep(3); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); // check if cache exists but include expired caches $exists = $client->exists($this->getKey(), true); $this->assertTrue($exists); $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl()); $this->assertTrue($client->exists($this->getKey())); $client->expire($this->getKey()); sleep(1); $this->assertFalse($client->exists($this->getKey())); // check if cache exists but include expired caches $exists = $client->exists($this->getKey(), true); $this->assertTrue($exists); } }
garyr/memento
test/Memento/Test/SingleTest.php
PHP
mit
3,252
#!/usr/bin/env python3 """ Categorize and analyze user sessions. Read in ecfs_obfuscated_filtered.gz file, output some fancy results. """ from collections import defaultdict from collections import Counter import sys import time import os import resource import json import fnmatch from pipes import Pipes import operator from operation import Operation KB = 1024 MB = KB * 1024 GB = MB * 1024 TB = GB * 1024 PB = TB * 1024 MONITOR_LINES = 100000 class UserSession(): def __init__(self, user_id): self.user_id = user_id self.from_ts = 0 self.till_ts = 0 self.get_requests = 0 self.reget_requests = 0 self.put_requests = 0 self.get_bytes = 0 self.put_bytes = 0 self.rename_requests = 0 self.del_requests = 0 self.get_dirs = 0 self.put_dirs = 0 self.put_files_per_dir = 0.0 self.get_files_per_dir = 0.0 self.window_seconds = 0 self.file_cnt_gets = Counter() self.file_cnt_puts = Counter() self.dir_cnt_gets = Counter() self.dir_cnt_puts = Counter() self.num_ops = 0 self.last_ts = 0 def add_op(self, op): self.num_ops += 1 if op.ts < self.last_ts: raise Exception("Timestamp too old") else: self.last_ts = op.ts if op.optype == 'g': self.get_requests += 1 self.get_bytes += op.size self.file_cnt_gets[op.obj_id] += 1 self.dir_cnt_gets[op.parent_dir_id] += 1 elif op.optype == 'p': self.put_requests += 1 self.put_bytes += op.size self.file_cnt_puts[op.obj_id] += 1 self.dir_cnt_puts[op.parent_dir_id] += 1 elif op.optype == 'd': self.del_requests += 1 elif op.optype == 'r': self.rename_requests += 1 #update last time stamp in the session self.till_ts = op.ts + op.execution_time def finish(self): self.get_dirs = len(self.dir_cnt_gets) if self.get_dirs > 0: self.get_files_per_dir = float(self.get_requests) / self.get_dirs self.put_dirs = len(self.dir_cnt_puts) if self.put_dirs > 0: self.put_files_per_dir = float(self.put_requests) / self.put_dirs """ set reget_counter :param counter: contains [ 1, 1, 5] counts of objects. value > 1 is a re-retrieval. :return: """ for c in self.file_cnt_gets.values(): if c > 1: self.reget_requests += (c - 1) # self.announce() return ";".join([str(x) for x in [ self.user_id, self.from_ts, self.till_ts, self.till_ts - self.from_ts, self.get_requests, self.reget_requests, self.put_requests, self.get_bytes, self.put_bytes, self.rename_requests, self.del_requests, self.get_dirs, self.put_dirs, self.put_files_per_dir, self.get_files_per_dir, self.window_seconds ]] ) def announce(self): print("closed session. gets: %r, regets: %r, puts: %r, dels: %r, renames: %r get_dirs: %r, put_dirs: %r, get_bytes: %r put_bytes: %r window_seconds: %d" % \ (self.get_requests, self.reget_requests, self.put_requests, self.del_requests, self.rename_requests, self.get_dirs, self.put_dirs, self.get_bytes, self.put_bytes, self.window_seconds)) def find_clusters(atimes): foo = Counter() bar = dict() for i in xrange(120, 3660, 10): clusters = get_clusters(atimes, i) cs = len(clusters) foo[cs] += 1 # note first occurance of this cluster size. if cs not in bar: bar[cs] = i # print(len(atimes), i, cs) return bar[foo.most_common()[0][0]] def get_clusters(data, maxgap): '''Arrange data into groups where successive elements differ by no more than *maxgap* >>> cluster([1, 6, 9, 100, 102, 105, 109, 134, 139], maxgap=10) [[1, 6, 9], [100, 102, 105, 109], [134, 139]] >>> cluster([1, 6, 9, 99, 100, 102, 105, 134, 139, 141], maxgap=10) [[1, 6, 9], [99, 100, 102, 105], [134, 139, 141]] ''' data.sort() groups = [[data[0]]] for x in data[1:]: if abs(x - groups[-1][-1]) <= maxgap: groups[-1].append(x) else: groups.append([x]) return groups def analyze_user_session(user_session_file, out_pipeline, target_file_name): with open(user_session_file, 'r') as sf: ops = list() atimes = list() for line in sf: op = Operation() op.init(line.strip()) ops.append(op) atimes.append(op.ts) ops.sort(key=operator.attrgetter('ts')) atimes.sort() window_seconds = find_clusters(atimes) session_counter = 1 uf = os.path.basename(user_session_file) user_id = uf[:uf.find(".user_session.csv")] session = UserSession(user_id) session.window_seconds = window_seconds for op in ops: if session.from_ts == 0: session.from_ts = op.ts session.till_ts = op.ts + op.execution_time if (session.till_ts + window_seconds) < op.ts: # this session is over, so archive it. out_pipeline.write_to(target_file_name, session.finish()) del session session = UserSession(user_id) session.window_seconds = window_seconds session_counter += 1 session.add_op(op) if session.num_ops > 0: out_pipeline.write_to(target_file_name, session.finish()) print("sessions: %d with window_seconds: %d" %(session_counter, window_seconds)) if __name__ == "__main__": source_dir = os.path.abspath(sys.argv[1]) result = os.path.abspath(sys.argv[2]) results_dir = os.path.dirname(result) target_file_name = os.path.basename(result) users_session_files = [os.path.join(dirpath, f) for dirpath, dirnames, files in os.walk(source_dir) for f in fnmatch.filter(files, '*.user_session.csv')] #remove the old log file, as outpipe is append only. if os.path.exists(os.path.join(results_dir, target_file_name)): os.remove(os.path.join(results_dir, target_file_name)) out_pipe = Pipes(results_dir) csv_header = ";".join(["user_id", "from_ts", "till_ts", "session_lifetime", "get_requests", "reget_requests", "put_requests", "get_bytes", "put_bytes", "rename_requests", "del_requests", "get_dirs", "put_dirs", "put_files_per_dir", "get_files_per_dir", "window_seconds" ]) out_pipe.write_to(target_file_name, csv_header) cnt = 0 for sf in users_session_files: cnt += 1 print ("working on %d/%d" % (cnt, len(users_session_files))) analyze_user_session(sf, out_pipe, target_file_name) # if cnt >=20: # break out_pipe.close() print("wrote results to %s: " % (os.path.join(results_dir, target_file_name)))
zdvresearch/fast15-paper-extras
ecfs_user_sessions/src/analyze_user_sessions.py
Python
mit
7,526
package esl import ( "io" "errors" "unicode/utf8" ) // Buffer ... type buffer []byte // MemoryReader ... type memReader [ ]byte // MemoryWriter ... type memWriter [ ]byte // ErrBufferSize indicates that memory cannot be allocated to store data in a buffer. var ErrBufferSize = errors.New(`could not allocate memory`) func newBuffer( size int ) *buffer { buf := make([ ]byte, 0, size ) return (*buffer)(&buf) } func ( buf *buffer ) reader( ) *memReader { n := len( *buf ) rbuf := ( *buf )[:n:n] return ( *memReader )( &rbuf ) } func ( buf *buffer ) writer( ) *memWriter { return ( *memWriter )( buf ) } func ( buf *buffer ) grow( n int ) error { if ( len( *buf )+ n ) > cap( *buf ) { // Not enough space to store [:+(n)]byte(s) mbuf, err := makebuf( cap( *buf )+ n ) if ( err != nil ) { return ( err ) } copy( mbuf, *buf ) *( buf ) = mbuf } return nil } // allocates a byte slice of size. // If the allocation fails, returns error // indicating that memory cannot be allocated to store data in a buffer. func makebuf( size int ) ( buf [ ]byte, memerr error ) { defer func( ) { // If the make fails, give a known error. if ( recover( ) != nil ) { ( memerr ) = ErrBufferSize } }( ) return make( [ ]byte, 0, size ), nil } func ( buf *memReader ) Read( b [ ]byte ) ( n int, err error ) { if len( *buf ) == 0 { return ( 0 ), io.EOF } n, *buf = copy( b, *buf ), ( *buf )[ n: ] return // n, nil } func ( buf *memReader ) ReadByte( ) ( c byte, err error ) { if len(*buf) == 0 { return ( 0 ), io.EOF } c, *buf = (*buf)[0], (*buf)[1:] return // c, nil } func ( buf *memReader ) ReadRune( ) ( r rune, size int, err error ) { if len(*buf) == 0 { return 0, 0, io.EOF } r, size = utf8.DecodeRune(*buf) *buf = (*buf)[size:] return // r, size, nil } func ( buf *memReader ) WriteTo( w io.Writer ) ( n int64, err error ) { for len( *buf ) > 0 { rw, err := w.Write( *buf ) if ( rw > 0 ) { n, *buf = n + int64( rw ), (*buf)[rw:] } if ( err != nil ) { return n, err } } return ( 0 ), io.EOF } func ( buf *memWriter ) Write( b []byte ) ( n int, err error ) { *buf = append( *buf, b...) return len( b ), nil } func ( buf *memWriter ) WriteByte( c byte ) error { *buf = append( *buf, c ) return ( nil ) } func ( buf *memWriter ) WriteRune( r rune ) error { if ( r < utf8.RuneSelf ) { return buf.WriteByte( byte( r )) } b := *buf n := len( b ) if ( n + utf8.UTFMax ) > cap( b ) { b = make( []byte, ( n + utf8.UTFMax )) copy( b, *buf ) } w := utf8.EncodeRune( b[ n:( n + utf8.UTFMax )], r ) *buf = b[ :( n + w )] return nil } func ( buf *memWriter ) WriteString( s string ) ( n int, err error ) { *buf = append( *buf, s...) return len( s ), nil } // func (buf *memWriter) ReadFrom(r io.Reader) (n int64, err error) { // // NOTE: indefinite allocation! Try to use io.WriterTo interface! // }
navrotskyj/acr
src/pkg/esl/io.go
GO
mit
2,905
package com.zimbra.cs.versioncheck; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Date; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import com.zimbra.common.util.ZimbraLog; import com.zimbra.common.account.Key; import com.zimbra.common.account.Key.ServerBy; import com.zimbra.common.service.ServiceException; import com.zimbra.common.soap.AdminConstants; import com.zimbra.common.soap.SoapFaultException; import com.zimbra.common.soap.SoapTransport; import com.zimbra.common.util.CliUtil; import com.zimbra.cs.account.Config; import com.zimbra.cs.account.Provisioning; import com.zimbra.cs.account.Server; import com.zimbra.cs.client.LmcSession; import com.zimbra.cs.client.soap.LmcSoapClientException; import com.zimbra.cs.client.soap.LmcVersionCheckRequest; import com.zimbra.cs.client.soap.LmcVersionCheckResponse; import com.zimbra.cs.util.BuildInfo; import com.zimbra.cs.util.SoapCLI; import com.zimbra.common.util.DateUtil; /** * @author Greg Solovyev */ public class VersionCheckUtil extends SoapCLI { private static final String OPT_CHECK_VERSION = "c"; private static final String OPT_MANUAL_CHECK_VERSION = "m"; private static final String SHOW_LAST_STATUS = "r"; protected VersionCheckUtil() throws ServiceException { super(); } public static void main(String[] args) { CliUtil.toolSetup(); SoapTransport.setDefaultUserAgent("zmcheckversion", BuildInfo.VERSION); VersionCheckUtil util = null; try { util = new VersionCheckUtil(); } catch (ServiceException e) { System.err.println(e.getMessage()); System.exit(1); } try { util.setupCommandLineOptions(); CommandLine cl = null; try { cl = util.getCommandLine(args); } catch (ParseException e) { System.out.println(e.getMessage()); util.usage(); System.exit(1); } if (cl == null) { System.exit(1); } if (cl.hasOption(OPT_CHECK_VERSION)) { //check schedule Provisioning prov = Provisioning.getInstance(); Config config; config = prov.getConfig(); String updaterServerId = config.getAttr(Provisioning.A_zimbraVersionCheckServer); if (updaterServerId != null) { Server server = prov.get(Key.ServerBy.id, updaterServerId); if (server != null) { Server localServer = prov.getLocalServer(); if (localServer!=null) { if(!localServer.getId().equalsIgnoreCase(server.getId())) { System.out.println("Wrong server"); System.exit(0); } } } } String versionInterval = config.getAttr(Provisioning.A_zimbraVersionCheckInterval); if(versionInterval == null || versionInterval.length()==0 || versionInterval.equalsIgnoreCase("0")) { System.out.println("Automatic updates are disabled"); System.exit(0); } else { long checkInterval = DateUtil.getTimeIntervalSecs(versionInterval,0); String lastAttempt = config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt); if(lastAttempt != null) { Date lastChecked = DateUtil.parseGeneralizedTime(config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt)); Date now = new Date(); if (now.getTime()/1000- lastChecked.getTime()/1000 >= checkInterval) { util.doVersionCheck(); } else { System.out.println("Too early"); System.exit(0); } } else { util.doVersionCheck(); } } } else if (cl.hasOption(OPT_MANUAL_CHECK_VERSION)) { util.doVersionCheck(); } else if (cl.hasOption(SHOW_LAST_STATUS)) { util.doResult(); System.exit(0); } else { util.usage(); System.exit(1); } } catch (Exception e) { System.err.println(e.getMessage()); ZimbraLog.extensions.error("Error in versioncheck util", e); util.usage(null); System.exit(1); } } private void doVersionCheck() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException { LmcSession session = auth(); LmcVersionCheckRequest req = new LmcVersionCheckRequest(); req.setAction(AdminConstants.VERSION_CHECK_CHECK); req.setSession(session); req.invoke(getServerUrl()); } private void doResult() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException { try { LmcSession session = auth(); LmcVersionCheckRequest req = new LmcVersionCheckRequest(); req.setAction(AdminConstants.VERSION_CHECK_STATUS); req.setSession(session); LmcVersionCheckResponse res = (LmcVersionCheckResponse) req.invoke(getServerUrl()); List <VersionUpdate> updates = res.getUpdates(); for(Iterator <VersionUpdate> iter = updates.iterator();iter.hasNext();){ VersionUpdate update = iter.next(); String critical; if(update.isCritical()) { critical = "critical"; } else { critical = "not critical"; } System.out.println( String.format("Found a %s update. Update is %s . Update version: %s. For more info visit: %s", update.getType(),critical,update.getVersion(),update.getUpdateURL()) ); } } catch (SoapFaultException soape) { System.out.println("Cought SoapFaultException"); System.out.println(soape.getStackTrace().toString()); throw (soape); } catch (LmcSoapClientException lmce) { System.out.println("Cought LmcSoapClientException"); System.out.println(lmce.getStackTrace().toString()); throw (lmce); } catch (ServiceException se) { System.out.println("Cought ServiceException"); System.out.println(se.getStackTrace().toString()); throw (se); } catch (IOException ioe) { System.out.println("Cought IOException"); System.out.println(ioe.getStackTrace().toString()); throw (ioe); } } protected void setupCommandLineOptions() { // super.setupCommandLineOptions(); Options options = getOptions(); Options hiddenOptions = getHiddenOptions(); hiddenOptions.addOption(OPT_CHECK_VERSION, "autocheck", false, "Initiate version check request (exits if zimbraVersionCheckInterval==0)"); options.addOption(SHOW_LAST_STATUS, "result", false, "Show results of last version check."); options.addOption(OPT_MANUAL_CHECK_VERSION, "manual", false, "Initiate version check request."); } protected String getCommandUsage() { return "zmcheckversion <options>"; } }
nico01f/z-pec
ZimbraAdminVersionCheck/src/java/com/zimbra/cs/versioncheck/VersionCheckUtil.java
Java
mit
7,334
๏ปฟusing System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace EmployeeFinder.Models { public enum Position { Bartender, Waiter, Bellboy, Receptionist, Manager, Housekeeper, Chef, Manintanace } }
GeorgiNik/EmployeeFinder
EmployeeFinder.Models/Position.cs
C#
mit
340
๏ปฟusing System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Domain { public class Meeting { public int ConsultantId { get; set; } public Consultant Consultant { get; set; } public int UserId { get; set; } public User User { get; set; } public DateTime BeginTime { get; set; } public DateTime EndTime { get; set; } public override string ToString() { return $"{BeginTime} -> {EndTime}"; } } }
rohansen/Code-Examples
Database/TransactionScopeWithGUI/Domain/Meeting.cs
C#
mit
559
module PiwikAnalytics module Helpers def piwik_tracking_tag config = PiwikAnalytics.configuration return if config.disabled? if config.use_async? file = "piwik_analytics/piwik_tracking_tag_async" else file = "piwik_analytics/piwik_tracking_tag" end render({ :file => file, :locals => {:url => config.url, :id_site => config.id_site} }) end end end
piwik/piwik-ruby-tracking
lib/piwik_analytics/helpers.rb
Ruby
mit
435
#!/usr/bin/env python # -*- coding: utf-8 -*- """ @author Stephan Reith @date 31.08.2016 This is a simple example to demonstrate how the ROS Spinnaker Interface can be used. You will also need a ROS Listener and a ROS Talker to send and receive data. Make sure they communicate over the same ROS topics and std_msgs.Int64 ROS Messages used in here. """ import spynnaker.pyNN as pynn from ros_spinnaker_interface import ROS_Spinnaker_Interface # import transfer_functions as tf from ros_spinnaker_interface import SpikeSourcePoisson from ros_spinnaker_interface import SpikeSinkSmoothing ts = 0.1 n_neurons = 1 simulation_time = 10000 # ms pynn.setup(timestep=ts, min_delay=ts, max_delay=2.0*ts) pop = pynn.Population(size=n_neurons, cellclass=pynn.IF_curr_exp, cellparams={}, label='pop') # The ROS_Spinnaker_Interface just needs to be initialised. The following parameters are possible: ros_interface = ROS_Spinnaker_Interface( n_neurons_source=n_neurons, # number of neurons of the injector population Spike_Source_Class=SpikeSourcePoisson, # the transfer function ROS Input -> Spikes you want to use. Spike_Sink_Class=SpikeSinkSmoothing, # the transfer function Spikes -> ROS Output you want to use. # You can choose from the transfer_functions module # or write one yourself. output_population=pop, # the pynn population you wish to receive the # live spikes from. ros_topic_send='to_spinnaker', # the ROS topic used for the incoming ROS values. ros_topic_recv='from_spinnaker', # the ROS topic used for the outgoing ROS values. clk_rate=1000, # mainloop clock (update) rate in Hz. ros_output_rate=10) # number of ROS messages send out per second. # Build your network, run the simulation and optionally record the spikes and voltages. pynn.Projection(ros_interface, pop, pynn.OneToOneConnector(weights=5, delays=1)) pop.record() pop.record_v() pynn.run(simulation_time) spikes = pop.getSpikes() pynn.end() # Plot import pylab spike_times = [spike[1] for spike in spikes] spike_ids = [spike[0] for spike in spikes] pylab.plot(spike_times, spike_ids, ".") pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Spike Plot') pylab.xlim(xmin=0) pylab.show()
reiths/ros_spinnaker_interface
examples/example_ros_spinnaker_interface.py
Python
mit
2,533
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {CompileDirectiveMetadata, CompileStylesheetMetadata, CompileTemplateMetadata, templateSourceUrl} from './compile_metadata'; import {CompilerConfig, preserveWhitespacesDefault} from './config'; import {ViewEncapsulation} from './core'; import * as html from './ml_parser/ast'; import {HtmlParser} from './ml_parser/html_parser'; import {InterpolationConfig} from './ml_parser/interpolation_config'; import {ParseTreeResult as HtmlParseTreeResult} from './ml_parser/parser'; import {ResourceLoader} from './resource_loader'; import {extractStyleUrls, isStyleUrlResolvable} from './style_url_resolver'; import {PreparsedElementType, preparseElement} from './template_parser/template_preparser'; import {UrlResolver} from './url_resolver'; import {isDefined, stringify, SyncAsync, syntaxError} from './util'; export interface PrenormalizedTemplateMetadata { ngModuleType: any; componentType: any; moduleUrl: string; template: string|null; templateUrl: string|null; styles: string[]; styleUrls: string[]; interpolation: [string, string]|null; encapsulation: ViewEncapsulation|null; animations: any[]; preserveWhitespaces: boolean|null; } export class DirectiveNormalizer { private _resourceLoaderCache = new Map<string, SyncAsync<string>>(); constructor( private _resourceLoader: ResourceLoader, private _urlResolver: UrlResolver, private _htmlParser: HtmlParser, private _config: CompilerConfig) {} clearCache(): void { this._resourceLoaderCache.clear(); } clearCacheFor(normalizedDirective: CompileDirectiveMetadata): void { if (!normalizedDirective.isComponent) { return; } const template = normalizedDirective.template !; this._resourceLoaderCache.delete(template.templateUrl!); template.externalStylesheets.forEach((stylesheet) => { this._resourceLoaderCache.delete(stylesheet.moduleUrl!); }); } private _fetch(url: string): SyncAsync<string> { let result = this._resourceLoaderCache.get(url); if (!result) { result = this._resourceLoader.get(url); this._resourceLoaderCache.set(url, result); } return result; } normalizeTemplate(prenormData: PrenormalizedTemplateMetadata): SyncAsync<CompileTemplateMetadata> { if (isDefined(prenormData.template)) { if (isDefined(prenormData.templateUrl)) { throw syntaxError(`'${ stringify(prenormData .componentType)}' component cannot define both template and templateUrl`); } if (typeof prenormData.template !== 'string') { throw syntaxError(`The template specified for component ${ stringify(prenormData.componentType)} is not a string`); } } else if (isDefined(prenormData.templateUrl)) { if (typeof prenormData.templateUrl !== 'string') { throw syntaxError(`The templateUrl specified for component ${ stringify(prenormData.componentType)} is not a string`); } } else { throw syntaxError( `No template specified for component ${stringify(prenormData.componentType)}`); } if (isDefined(prenormData.preserveWhitespaces) && typeof prenormData.preserveWhitespaces !== 'boolean') { throw syntaxError(`The preserveWhitespaces option for component ${ stringify(prenormData.componentType)} must be a boolean`); } return SyncAsync.then( this._preParseTemplate(prenormData), (preparsedTemplate) => this._normalizeTemplateMetadata(prenormData, preparsedTemplate)); } private _preParseTemplate(prenomData: PrenormalizedTemplateMetadata): SyncAsync<PreparsedTemplate> { let template: SyncAsync<string>; let templateUrl: string; if (prenomData.template != null) { template = prenomData.template; templateUrl = prenomData.moduleUrl; } else { templateUrl = this._urlResolver.resolve(prenomData.moduleUrl, prenomData.templateUrl!); template = this._fetch(templateUrl); } return SyncAsync.then( template, (template) => this._preparseLoadedTemplate(prenomData, template, templateUrl)); } private _preparseLoadedTemplate( prenormData: PrenormalizedTemplateMetadata, template: string, templateAbsUrl: string): PreparsedTemplate { const isInline = !!prenormData.template; const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation!); const templateUrl = templateSourceUrl( {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}}, {isInline, templateUrl: templateAbsUrl}); const rootNodesAndErrors = this._htmlParser.parse( template, templateUrl, {tokenizeExpansionForms: true, interpolationConfig}); if (rootNodesAndErrors.errors.length > 0) { const errorString = rootNodesAndErrors.errors.join('\n'); throw syntaxError(`Template parse errors:\n${errorString}`); } const templateMetadataStyles = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: prenormData.styles, moduleUrl: prenormData.moduleUrl})); const visitor = new TemplatePreparseVisitor(); html.visitAll(visitor, rootNodesAndErrors.rootNodes); const templateStyles = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: visitor.styles, styleUrls: visitor.styleUrls, moduleUrl: templateAbsUrl})); const styles = templateMetadataStyles.styles.concat(templateStyles.styles); const inlineStyleUrls = templateMetadataStyles.styleUrls.concat(templateStyles.styleUrls); const styleUrls = this ._normalizeStylesheet(new CompileStylesheetMetadata( {styleUrls: prenormData.styleUrls, moduleUrl: prenormData.moduleUrl})) .styleUrls; return { template, templateUrl: templateAbsUrl, isInline, htmlAst: rootNodesAndErrors, styles, inlineStyleUrls, styleUrls, ngContentSelectors: visitor.ngContentSelectors, }; } private _normalizeTemplateMetadata( prenormData: PrenormalizedTemplateMetadata, preparsedTemplate: PreparsedTemplate): SyncAsync<CompileTemplateMetadata> { return SyncAsync.then( this._loadMissingExternalStylesheets( preparsedTemplate.styleUrls.concat(preparsedTemplate.inlineStyleUrls)), (externalStylesheets) => this._normalizeLoadedTemplateMetadata( prenormData, preparsedTemplate, externalStylesheets)); } private _normalizeLoadedTemplateMetadata( prenormData: PrenormalizedTemplateMetadata, preparsedTemplate: PreparsedTemplate, stylesheets: Map<string, CompileStylesheetMetadata>): CompileTemplateMetadata { // Algorithm: // - produce exactly 1 entry per original styleUrl in // CompileTemplateMetadata.externalStylesheets with all styles inlined // - inline all styles that are referenced by the template into CompileTemplateMetadata.styles. // Reason: be able to determine how many stylesheets there are even without loading // the template nor the stylesheets, so we can create a stub for TypeScript always synchronously // (as resource loading may be async) const styles = [...preparsedTemplate.styles]; this._inlineStyles(preparsedTemplate.inlineStyleUrls, stylesheets, styles); const styleUrls = preparsedTemplate.styleUrls; const externalStylesheets = styleUrls.map(styleUrl => { const stylesheet = stylesheets.get(styleUrl)!; const styles = [...stylesheet.styles]; this._inlineStyles(stylesheet.styleUrls, stylesheets, styles); return new CompileStylesheetMetadata({moduleUrl: styleUrl, styles: styles}); }); let encapsulation = prenormData.encapsulation; if (encapsulation == null) { encapsulation = this._config.defaultEncapsulation; } if (encapsulation === ViewEncapsulation.Emulated && styles.length === 0 && styleUrls.length === 0) { encapsulation = ViewEncapsulation.None; } return new CompileTemplateMetadata({ encapsulation, template: preparsedTemplate.template, templateUrl: preparsedTemplate.templateUrl, htmlAst: preparsedTemplate.htmlAst, styles, styleUrls, ngContentSelectors: preparsedTemplate.ngContentSelectors, animations: prenormData.animations, interpolation: prenormData.interpolation, isInline: preparsedTemplate.isInline, externalStylesheets, preserveWhitespaces: preserveWhitespacesDefault( prenormData.preserveWhitespaces, this._config.preserveWhitespaces), }); } private _inlineStyles( styleUrls: string[], stylesheets: Map<string, CompileStylesheetMetadata>, targetStyles: string[]) { styleUrls.forEach(styleUrl => { const stylesheet = stylesheets.get(styleUrl)!; stylesheet.styles.forEach(style => targetStyles.push(style)); this._inlineStyles(stylesheet.styleUrls, stylesheets, targetStyles); }); } private _loadMissingExternalStylesheets( styleUrls: string[], loadedStylesheets: Map<string, CompileStylesheetMetadata> = new Map<string, CompileStylesheetMetadata>()): SyncAsync<Map<string, CompileStylesheetMetadata>> { return SyncAsync.then( SyncAsync.all(styleUrls.filter((styleUrl) => !loadedStylesheets.has(styleUrl)) .map( styleUrl => SyncAsync.then( this._fetch(styleUrl), (loadedStyle) => { const stylesheet = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: [loadedStyle], moduleUrl: styleUrl})); loadedStylesheets.set(styleUrl, stylesheet); return this._loadMissingExternalStylesheets( stylesheet.styleUrls, loadedStylesheets); }))), (_) => loadedStylesheets); } private _normalizeStylesheet(stylesheet: CompileStylesheetMetadata): CompileStylesheetMetadata { const moduleUrl = stylesheet.moduleUrl!; const allStyleUrls = stylesheet.styleUrls.filter(isStyleUrlResolvable) .map(url => this._urlResolver.resolve(moduleUrl, url)); const allStyles = stylesheet.styles.map(style => { const styleWithImports = extractStyleUrls(this._urlResolver, moduleUrl, style); allStyleUrls.push(...styleWithImports.styleUrls); return styleWithImports.style; }); return new CompileStylesheetMetadata( {styles: allStyles, styleUrls: allStyleUrls, moduleUrl: moduleUrl}); } } interface PreparsedTemplate { template: string; templateUrl: string; isInline: boolean; htmlAst: HtmlParseTreeResult; styles: string[]; inlineStyleUrls: string[]; styleUrls: string[]; ngContentSelectors: string[]; } class TemplatePreparseVisitor implements html.Visitor { ngContentSelectors: string[] = []; styles: string[] = []; styleUrls: string[] = []; ngNonBindableStackCount: number = 0; visitElement(ast: html.Element, context: any): any { const preparsedElement = preparseElement(ast); switch (preparsedElement.type) { case PreparsedElementType.NG_CONTENT: if (this.ngNonBindableStackCount === 0) { this.ngContentSelectors.push(preparsedElement.selectAttr); } break; case PreparsedElementType.STYLE: let textContent = ''; ast.children.forEach(child => { if (child instanceof html.Text) { textContent += child.value; } }); this.styles.push(textContent); break; case PreparsedElementType.STYLESHEET: this.styleUrls.push(preparsedElement.hrefAttr); break; default: break; } if (preparsedElement.nonBindable) { this.ngNonBindableStackCount++; } html.visitAll(this, ast.children); if (preparsedElement.nonBindable) { this.ngNonBindableStackCount--; } return null; } visitExpansion(ast: html.Expansion, context: any): any { html.visitAll(this, ast.cases); } visitExpansionCase(ast: html.ExpansionCase, context: any): any { html.visitAll(this, ast.expression); } visitComment(ast: html.Comment, context: any): any { return null; } visitAttribute(ast: html.Attribute, context: any): any { return null; } visitText(ast: html.Text, context: any): any { return null; } }
matsko/angular
packages/compiler/src/directive_normalizer.ts
TypeScript
mit
12,863
<?php /* TwigBundle:Exception:error.atom.twig */ class __TwigTemplate_405349459f7f2e8922747537b1c12aa2323bb61b0265aaf549db7e51eafd66f4 extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); $this->parent = false; $this->blocks = array( ); } protected function doDisplay(array $context, array $blocks = array()) { // line 1 $this->env->loadTemplate("TwigBundle:Exception:error.xml.twig")->display(array_merge($context, array("exception" => (isset($context["exception"]) ? $context["exception"] : $this->getContext($context, "exception"))))); } public function getTemplateName() { return "TwigBundle:Exception:error.atom.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 19 => 1, 79 => 21, 72 => 13, 69 => 12, 47 => 18, 40 => 11, 37 => 10, 22 => 1, 246 => 32, 157 => 56, 145 => 46, 139 => 45, 131 => 42, 123 => 41, 120 => 40, 115 => 39, 111 => 38, 108 => 37, 101 => 33, 98 => 32, 96 => 31, 83 => 25, 74 => 14, 66 => 11, 55 => 16, 52 => 21, 50 => 14, 43 => 9, 41 => 8, 35 => 9, 32 => 4, 29 => 6, 209 => 82, 203 => 78, 199 => 76, 193 => 73, 189 => 71, 187 => 70, 182 => 68, 176 => 64, 173 => 63, 168 => 62, 164 => 58, 162 => 57, 154 => 54, 149 => 51, 147 => 50, 144 => 49, 141 => 48, 133 => 42, 130 => 41, 125 => 38, 122 => 37, 116 => 36, 112 => 35, 109 => 34, 106 => 36, 103 => 32, 99 => 30, 95 => 28, 92 => 29, 86 => 24, 82 => 22, 80 => 24, 73 => 19, 64 => 19, 60 => 6, 57 => 12, 54 => 22, 51 => 10, 48 => 9, 45 => 17, 42 => 16, 39 => 6, 36 => 5, 33 => 4, 30 => 3,); } }
Mchichou/UEOptionnelles
app/cache/dev/twig/40/53/49459f7f2e8922747537b1c12aa2323bb61b0265aaf549db7e51eafd66f4.php
PHP
mit
1,789
/** * HTTP.test */ "use strict"; /* Node modules */ /* Third-party modules */ var steeplejack = require("steeplejack"); /* Files */ describe("HTTPError test", function () { var HTTPError; beforeEach(function () { injector(function (_HTTPError_) { HTTPError = _HTTPError_; }); }); describe("Instantation tests", function () { it("should extend the steeplejack Fatal exception", function () { var obj = new HTTPError("text"); expect(obj).to.be.instanceof(HTTPError) .to.be.instanceof(steeplejack.Exceptions.Fatal); expect(obj.type).to.be.equal("HTTPError"); expect(obj.message).to.be.equal("text"); expect(obj.httpCode).to.be.equal(500); expect(obj.getHttpCode()).to.be.equal(500); }); it("should set the HTTP code in the first input", function () { var obj = new HTTPError(401); expect(obj.httpCode).to.be.equal(401); expect(obj.getHttpCode()).to.be.equal(401); }); }); });
riggerthegeek/steeplejack-errors
test/unit/errors/HTTP.test.js
JavaScript
mit
1,103
require File.join(File.dirname(__FILE__), './scribd-carrierwave/version') require File.join(File.dirname(__FILE__), './scribd-carrierwave/config') require 'carrierwave' require 'rscribd' require 'configatron' module ScribdCarrierWave class << self def included(base) base.extend ClassMethods end def upload uploader file_path = full_path(uploader) args = { file: file_path, access: ( uploader.class.public? ? 'public' : 'private' )} type = File.extname(file_path) if type type = type.gsub(/^\./, '').gsub(/\?.*$/, '') args.merge!(type: type) if type != '' end scribd_user.upload(args) end def destroy uploader document = scribd_user.find_document(uploader.ipaper_id) rescue nil document.destroy if !document.nil? end def load_ipaper_document(id) scribd_user.find_document(id) rescue nil end def full_path uploader if uploader.url =~ /^http(s?):\/\// uploader.url else uploader.root + uploader.url end end module ClassMethods def public? @public end def has_ipaper(public = false) include InstanceMethods after :store, :upload_to_scribd before :remove, :delete_from_scribd @public = !!public end end module InstanceMethods def self.included(base) base.extend ClassMethods end def upload_to_scribd files res = ScribdCarrierWave::upload(self) set_params res end def delete_from_scribd ScribdCarrierWave::destroy(self) end def display_ipaper(options = {}) id = options.delete(:id) <<-END <script type="text/javascript" src="//www.scribd.com/javascripts/scribd_api.js"></script> <div id="embedded_doc#{id}">#{options.delete(:alt)}</div> <script type="text/javascript"> var scribd_doc = scribd.Document.getDoc(#{ipaper_id}, '#{ipaper_access_key}'); scribd_doc.addParam( 'jsapi_version', 2 ); #{options.map do |k,v| " scribd_doc.addParam('#{k.to_s}', #{v.is_a?(String) ? "'#{v.to_s}'" : v.to_s});" end.join("\n")} scribd_doc.write("embedded_doc#{id}"); </script> END end def fullscreen_url "http://www.scribd.com/fullscreen/#{ipaper_id}?access_key=#{ipaper_access_key}" end def ipaper_id self.model.send("#{self.mounted_as.to_s}_ipaper_id") end def ipaper_access_key self.model.send("#{self.mounted_as.to_s}_ipaper_access_key") end # Responds the Scribd::Document associated with this model, or nil if it does not exist. def ipaper_document @document ||= ScribdCarrierWave::load_ipaper_document(ipaper_id) end private def set_params res self.model.update_attributes({"#{self.mounted_as}_ipaper_id" => res.doc_id, "#{self.mounted_as}_ipaper_access_key" => res.access_key}) end end private def scribd_user Scribd::API.instance.key = ScribdCarrierWave.config.key Scribd::API.instance.secret = ScribdCarrierWave.config.secret @scribd_user = Scribd::User.login(ScribdCarrierWave.config.username, ScribdCarrierWave.config.password) end end end CarrierWave::Uploader::Base.send(:include, ScribdCarrierWave) if Object.const_defined?("CarrierWave")
milkfarm/scribd-carrierwave
lib/scribd-carrierwave.rb
Ruby
mit
3,507
var gulp = require('gulp'); var babel = require('gulp-babel'); var concat = require('gulp-concat'); var merge = require('merge-stream'); var stylus = require('gulp-stylus'); var rename = require("gulp-rename"); var uglify = require("gulp-uglify"); var cssmin = require("gulp-cssmin"); var ngAnnotate = require('gulp-ng-annotate'); var nib = require("nib"); var watch = require('gulp-watch'); function compileJs(devOnly) { var othersUmd = gulp.src(['src/**/*.js', '!src/main.js']) .pipe(babel({ modules: 'umdStrict', moduleRoot: 'angular-chatbar', moduleIds: true })), mainUmd = gulp.src('src/main.js') .pipe(babel({ modules: 'umdStrict', moduleIds: true, moduleId: 'angular-chatbar' })), stream = merge(othersUmd, mainUmd) .pipe(concat('angular-chatbar.umd.js')) .pipe(gulp.dest('dist')) ; if (!devOnly) { stream = stream .pipe(ngAnnotate()) .pipe(uglify()) .pipe(rename('angular-chatbar.umd.min.js')) .pipe(gulp.dest('dist')); } return stream; } function compileCss(name, devOnly) { var stream = gulp.src('styles/' + name + '.styl') .pipe(stylus({use: nib()})) .pipe(rename('angular-' + name + '.css')) .pipe(gulp.dest('dist')) ; if (!devOnly) { stream = stream.pipe(cssmin()) .pipe(rename('angular-' + name + '.min.css')) .pipe(gulp.dest('dist')); } return stream; } function compileAllCss(devOnly) { var streams = []; ['chatbar', 'chatbar.default-theme', 'chatbar.default-animations'].forEach(function (name) { streams.push(compileCss(name, devOnly)); }); return merge.apply(null, streams); } gulp.task('default', function() { return merge.apply(compileJs(), compileAllCss()); }); gulp.task('_watch', function() { watch('styles/**/*.styl', function () { compileAllCss(true); }); watch('src/**/*.js', function () { compileJs(true); }); }); gulp.task('watch', ['default', '_watch']);
jlowcs/angular-chatbar
gulpfile.js
JavaScript
mit
1,878
# -*- coding: utf-8 -*- """ Resource Import Tools @copyright: 2011-12 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # @todo: remove all interactive error reporting out of the _private methods, and raise exceptions instead. __all__ = ["S3Importer", "S3ImportJob", "S3ImportItem"] import os import sys import cPickle import tempfile from datetime import datetime from copy import deepcopy try: from cStringIO import StringIO # Faster, where available except: from StringIO import StringIO try: from lxml import etree except ImportError: print >> sys.stderr, "ERROR: lxml module needed for XML handling" raise try: import json # try stdlib (Python 2.6) except ImportError: try: import simplejson as json # try external module except: import gluon.contrib.simplejson as json # fallback to pure-Python module from gluon import * from gluon.serializers import json as jsons from gluon.storage import Storage, Messages from gluon.tools import callback from s3utils import SQLTABLES3 from s3crud import S3CRUD from s3xml import S3XML from s3utils import s3_mark_required, s3_has_foreign_key, s3_get_foreign_key DEBUG = False if DEBUG: print >> sys.stderr, "S3IMPORTER: DEBUG MODE" def _debug(m): print >> sys.stderr, m else: _debug = lambda m: None # ============================================================================= class S3Importer(S3CRUD): """ Transformable formats (XML, JSON, CSV) import handler """ UPLOAD_TABLE_NAME = "s3_import_upload" # ------------------------------------------------------------------------- def apply_method(self, r, **attr): """ Apply CRUD methods @param r: the S3Request @param attr: dictionary of parameters for the method handler @returns: output object to send to the view Known means of communicating with this module: It expects a URL of the form: /prefix/name/import It will interpret the http requests as follows: GET will trigger the upload POST will trigger either commits or display the import details DELETE will trigger deletes It will accept one of the following control vars: item: to specify a single item in the import job job: to specify a job It should not receive both so job takes precedent over item For CSV imports, the calling controller can add extra fields to the upload form to add columns to each row in the CSV. To add the extra fields, pass a named parameter "csv_extra_fields" to the s3_rest_controller call (or the S3Request call, respectively): s3_rest_controller(module, resourcename, csv_extra_fields=[ dict(label="ColumnLabelInTheCSV", field=field_instance) ]) The Field instance "field" will be added to the upload form, and the user input will be added to each row of the CSV under the label as specified. If the "field" validator has options, the input value will be translated into the option representation, otherwise the value will be used as-is. Note that the "label" in the dict is the column label in the CSV, whereas the field label for the form is to be set in the Field instance passed as "field". You can add any arbitrary number of csv_extra_fields to the list. Additionally, you may want to allow the user to choose whether the import shall first remove all existing data in the target table. To do so, pass a label for the "replace_option" to the request: s3_rest_controller(module, resourcename, replace_option=T("Remove existing data before import")) This will add the respective checkbox to the upload form. You may also want to provide a link to download a CSV template from the upload form. To do that, add the resource name to the request attributes: s3_rest_controller(module, resourcename, csv_template="<resourcename>") This will provide a link to: - static/formats/s3csv/<controller>/<resourcename>.csv at the top of the upload form. """ _debug("S3Importer.apply_method(%s)" % r) # Messages T = current.T messages = self.messages = Messages(T) messages.download_template = "Download Template" messages.invalid_file_format = "Invalid File Format" messages.unsupported_file_type = "Unsupported file type of %s" messages.stylesheet_not_found = "No Stylesheet %s could be found to manage the import file." messages.no_file = "No file submitted" messages.file_open_error = "Unable to open the file %s" messages.file_not_found = "The file to upload is missing" messages.no_records_to_import = "No records to import" messages.no_job_to_delete = "No job to delete, maybe it has already been deleted." messages.title_job_read = "Details of the selected import job" messages.title_job_list = "List of import items" messages.file_uploaded = "Import file uploaded" messages.upload_submit_btn = "Upload Data File" messages.open_btn = "Open" messages.view_btn = "View" messages.delete_btn = "Delete" messages.item_show_details = "Display Details" messages.job_total_records = "Total records in the Import Job" messages.job_records_selected = "Records selected" messages.job_deleted = "Import job deleted" messages.job_completed = "Job run on %s. With result of (%s)" messages.import_file = "Import File" messages.import_file_comment = "Upload a file formatted according to the Template." messages.user_name = "User Name" messages.commit_total_records_imported = "%s records imported" messages.commit_total_records_ignored = "%s records ignored" messages.commit_total_errors = "%s records in error" try: self.uploadTitle = current.response.s3.crud_strings[self.tablename].title_upload except: self.uploadTitle = T("Upload a %s import file" % r.function) # @todo: correct to switch this off for the whole session? current.session.s3.ocr_enabled = False # Reset all errors/warnings self.error = None self.warning = None # CSV upload configuration if "csv_stylesheet" in attr: self.csv_stylesheet = attr["csv_stylesheet"] else: self.csv_stylesheet = None self.csv_extra_fields = None self.csv_extra_data = None # Environment self.controller = r.controller self.function = r.function # Target table for the data import self.controller_resource = self.resource self.controller_table = self.table self.controller_tablename = self.tablename # Table for uploads self.__define_table() self.upload_resource = None self.item_resource = None # XSLT Path self.xslt_path = os.path.join(r.folder, r.XSLT_PATH) self.xslt_extension = r.XSLT_EXTENSION # Check authorization authorised = self.permit("create", self.upload_tablename) and \ self.permit("create", self.controller_tablename) if not authorised: if r.method is not None: r.unauthorised() else: return dict(form=None) # @todo: clean this up source = None transform = None upload_id = None items = None # @todo get the data from either get_vars or post_vars appropriately # for post -> commit_items would need to add the uploadID if "transform" in r.get_vars: transform = r.get_vars["transform"] if "filename" in r.get_vars: source = r.get_vars["filename"] if "job" in r.post_vars: upload_id = r.post_vars["job"] elif "job" in r.get_vars: upload_id = r.get_vars["job"] items = self._process_item_list(upload_id, r.vars) if "delete" in r.get_vars: r.http = "DELETE" # If we have an upload ID, then get upload and import job self.upload_id = upload_id query = (self.upload_table.id == upload_id) self.upload_job = current.db(query).select(limitby=(0, 1)).first() if self.upload_job: self.job_id = self.upload_job.job_id else: self.job_id = None # Now branch off to the appropriate controller function if r.http == "GET": if source != None: self.commit(source, transform) output = self.upload(r, **attr) if upload_id != None: output = self.display_job(upload_id) else: output = self.upload(r, **attr) elif r.http == "POST": if items != None: output = self.commit_items(upload_id, items) else: output = self.generate_job(r, **attr) elif r.http == "DELETE": if upload_id != None: output = self.delete_job(upload_id) else: r.error(405, current.manager.ERROR.BAD_METHOD) return output # ------------------------------------------------------------------------- def upload(self, r, **attr): """ This will display the upload form It will ask for a file to be uploaded or for a job to be selected. If a file is uploaded then it will guess at the file type and ask for the transform file to be used. The transform files will be in a dataTable with the module specific files shown first and after those all other known transform files. Once the transform file is selected the import process can be started which will generate an importJob, and a "POST" method will occur If a job is selected it will have two actions, open and delete. Open will mean that a "GET" method will occur, with the job details passed in. Whilst the delete action will trigger a "DELETE" method. """ _debug("S3Importer.upload()") request = self.request form = self._upload_form(r, **attr) output = self._create_upload_dataTable() if request.representation == "aadata": return output output.update(form=form, title=self.uploadTitle) return output # ------------------------------------------------------------------------- def generate_job(self, r, **attr): """ Generate an ImportJob from the submitted upload form """ _debug("S3Importer.display()") response = current.response s3 = response.s3 db = current.db table = self.upload_table title=self.uploadTitle form = self._upload_form(r, **attr) r = self.request r.read_body() sfilename = form.vars.file try: ofilename = r.post_vars["file"].filename except: form.errors.file = self.messages.no_file if form.errors: response.flash = "" output = self._create_upload_dataTable() output.update(form=form, title=title) elif not sfilename or \ ofilename not in r.files or r.files[ofilename] is None: response.flash = "" response.error = self.messages.file_not_found output = self._create_upload_dataTable() output.update(form=form, title=title) else: output = dict() query = (table.file == sfilename) db(query).update(controller=self.controller, function=self.function, filename=ofilename, user_id=current.session.auth.user.id) # must commit here to separate this transaction from # the trial import phase which will be rolled back. db.commit() extension = ofilename.rsplit(".", 1).pop() if extension not in ("csv", "xls"): response.flash = None response.error = self.messages.invalid_file_format return self.upload(r, **attr) upload_file = r.files[ofilename] if extension == "xls": if "xls_parser" in s3: upload_file.seek(0) upload_file = s3.xls_parser(upload_file.read()) extension = "csv" if upload_file is None: response.flash = None response.error = self.messages.file_not_found return self.upload(r, **attr) else: upload_file.seek(0) row = db(query).select(table.id, limitby=(0, 1)).first() upload_id = row.id if "single_pass" in r.vars: single_pass = r.vars["single_pass"] else: single_pass = None self._generate_import_job(upload_id, upload_file, extension, commit_job = single_pass) if upload_id is None: row = db(query).update(status = 2) # in error if self.error != None: response.error = self.error if self.warning != None: response.warning = self.warning response.flash = "" return self.upload(r, **attr) else: if single_pass: current.session.flash = self.messages.file_uploaded # For a single pass retain the vars from the original URL next_URL = URL(r=self.request, f=self.function, args=["import"], vars=current.request.get_vars ) redirect(next_URL) s3.dataTable_vars = {"job" : upload_id} return self.display_job(upload_id) return output # ------------------------------------------------------------------------- def display_job(self, upload_id): """ @todo: docstring? """ _debug("S3Importer.display_job()") request = self.request response = current.response db = current.db table = self.upload_table job_id = self.job_id output = dict() if job_id == None: # redirect to the start page (removes all vars) query = (table.id == upload_id) row = db(query).update(status = 2) # in error current.session.warning = self.messages.no_records_to_import redirect(URL(r=request, f=self.function, args=["import"])) # Get the status of the upload job query = (table.id == upload_id) row = db(query).select(table.status, table.modified_on, table.summary_added, table.summary_error, table.summary_ignored, limitby=(0, 1)).first() status = row.status # completed display details if status == 3: # Completed # @todo currently this is an unnecessary server call, # change for completed records to be a display details # and thus avoid the round trip. # but keep this code to protect against hand-crafted URLs # (and the 'go back' syndrome on the browser) result = (row.summary_added, row.summary_error, row.summary_ignored, ) self._display_completed_job(result, row.modified_on) redirect(URL(r=request, f=self.function, args=["import"])) # otherwise display import items response.view = self._view(request, "list.html") output = self._create_import_item_dataTable(upload_id, job_id) if request.representation == "aadata": return output if response.s3.error_report: error_report = "Errors|" + "|".join(response.s3.error_report) error_tip = A("All Errors", _class="errortip", _title=error_report) else: # @todo: restore the error tree from all items? error_tip = "" rowcount = len(self._get_all_items(upload_id)) rheader = DIV(TABLE( TR( TH("%s: " % self.messages.job_total_records), TD(rowcount, _id="totalAvaliable"), TH("%s: " % self.messages.job_records_selected), TD(0, _id="totalSelected"), TH(error_tip) ), )) output["title"] = self.messages.title_job_read output["rheader"] = rheader output["subtitle"] = self.messages.title_job_list return output # ------------------------------------------------------------------------- def commit(self, source, transform): """ @todo: docstring? """ _debug("S3Importer.commit(%s, %s)" % (source, transform)) db = current.db session = current.session request = self.request try: openFile = open(source, "r") except: session.error = self.messages.file_open_error % source redirect(URL(r=request, f=self.function)) # @todo: manage different file formats # @todo: find file format from request.extension fileFormat = "csv" # insert data in the table and get the ID try: user = session.auth.user.id except: user = None upload_id = self.upload_table.insert(controller=self.controller, function=self.function, filename = source, user_id = user, status = 1) db.commit() # create the import job result = self._generate_import_job(upload_id, openFile, fileFormat, stylesheet=transform ) if result == None: if self.error != None: if session.error == None: session.error = self.error else: session.error += self.error if self.warning != None: if session.warning == None: session.warning = self.warning else: session.warning += self.warning else: items = self._get_all_items(upload_id, True) # commit the import job self._commit_import_job(upload_id, items) result = self._update_upload_job(upload_id) # get the results and display msg = "%s : %s %s %s" % (source, self.messages.commit_total_records_imported, self.messages.commit_total_errors, self.messages.commit_total_records_ignored) msg = msg % result if session.flash == None: session.flash = msg else: session.flash += msg # @todo: return the upload_id? # ------------------------------------------------------------------------- def commit_items(self, upload_id, items): """ @todo: docstring? """ _debug("S3Importer.commit_items(%s, %s)" % (upload_id, items)) # Save the import items self._commit_import_job(upload_id, items) # Update the upload table # change the status to completed # record the summary details # delete the upload file result = self._update_upload_job(upload_id) # redirect to the start page (removes all vars) self._display_completed_job(result) redirect(URL(r=self.request, f=self.function, args=["import"])) # ------------------------------------------------------------------------- def delete_job(self, upload_id): """ Delete an uploaded file and the corresponding import job @param upload_id: the upload ID """ _debug("S3Importer.delete_job(%s)" % (upload_id)) db = current.db request = self.request resource = request.resource # use self.resource? response = current.response # Get the import job ID job_id = self.job_id # Delete the import job (if any) if job_id: result = resource.import_xml(None, id = None, tree = None, job_id = job_id, delete_job = True) # @todo: check result # now delete the upload entry query = (self.upload_table.id == upload_id) count = db(query).delete() # @todo: check that the record has been deleted # Now commit the changes db.commit() result = count # return to the main import screen # @todo: check result properly if result == False: response.warning = self.messages.no_job_to_delete else: response.flash = self.messages.job_deleted # redirect to the start page (remove all vars) self.next = self.request.url(vars=dict()) return # ======================================================================== # Utility methods # ======================================================================== def _upload_form(self, r, **attr): """ Create and process the upload form, including csv_extra_fields """ EXTRA_FIELDS = "csv_extra_fields" TEMPLATE = "csv_template" REPLACE_OPTION = "replace_option" session = current.session response = current.response s3 = response.s3 request = self.request table = self.upload_table formstyle = s3.crud.formstyle response.view = self._view(request, "list_create.html") if REPLACE_OPTION in attr: replace_option = attr[REPLACE_OPTION] if replace_option is not None: table.replace_option.readable = True table.replace_option.writable = True table.replace_option.label = replace_option fields = [f for f in table if f.readable or f.writable and not f.compute] if EXTRA_FIELDS in attr: extra_fields = attr[EXTRA_FIELDS] if extra_fields is not None: fields.extend([f["field"] for f in extra_fields if "field" in f]) self.csv_extra_fields = extra_fields labels, required = s3_mark_required(fields) if required: s3.has_required = True form = SQLFORM.factory(table_name=self.UPLOAD_TABLE_NAME, labels=labels, formstyle=formstyle, upload = os.path.join(request.folder, "uploads", "imports"), separator = "", message=self.messages.file_uploaded, *fields) args = ["s3csv"] template = attr.get(TEMPLATE, True) if template is True: args.extend([self.controller, "%s.csv" % self.function]) elif isinstance(template, basestring): args.extend([self.controller, "%s.csv" % template]) elif isinstance(template, (tuple, list)): args.extend(template[:-1]) args.append("%s.csv" % template[-1]) else: template = None if template is not None: url = URL(r=request, c="static", f="formats", args=args) try: # only add the download link if the template can be opened open("%s/../%s" % (r.folder, url)) form[0][0].insert(0, TR(TD(A(self.messages.download_template, _href=url)), _id="template__row")) except: pass if form.accepts(r.post_vars, session, formname="upload_form"): upload_id = table.insert(**table._filter_fields(form.vars)) if self.csv_extra_fields: self.csv_extra_data = Storage() for f in self.csv_extra_fields: label = f.get("label", None) if not label: continue field = f.get("field", None) value = f.get("value", None) if field: if field.name in form.vars: data = form.vars[field.name] else: data = field.default value = data requires = field.requires if not isinstance(requires, (list, tuple)): requires = [requires] if requires: requires = requires[0] if isinstance(requires, IS_EMPTY_OR): requires = requires.other try: options = requires.options() except: pass else: for k, v in options: if k == str(data): value = v elif value is None: continue self.csv_extra_data[label] = value s3.no_formats = True return form # ------------------------------------------------------------------------- def _create_upload_dataTable(self): """ List of previous Import jobs """ db = current.db request = self.request controller = self.controller function = self.function s3 = current.response.s3 table = self.upload_table s3.filter = (table.controller == controller) & \ (table.function == function) fields = ["id", "filename", "created_on", "user_id", "replace_option", "status"] self._use_upload_table() # Hide the list of prior uploads for now #output = self._dataTable(fields, sort_by = [[2,"desc"]]) output = dict() self._use_controller_table() if request.representation == "aadata": return output query = (table.status != 3) # Status of Pending or in-Error rows = db(query).select(table.id) restrictOpen = [str(row.id) for row in rows] query = (table.status == 3) # Status of Completed rows = db(query).select(table.id) restrictView = [str(row.id) for row in rows] s3.actions = [ dict(label=str(self.messages.open_btn), _class="action-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]"}), restrict = restrictOpen ), dict(label=str(self.messages.view_btn), _class="action-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]"}), restrict = restrictView ), dict(label=str(self.messages.delete_btn), _class="delete-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]", "delete":"True" } ) ), ] # Display an Error if no job is attached with this record query = (table.status == 1) # Pending rows = db(query).select(table.id) s3.dataTableStyleAlert = [str(row.id) for row in rows] query = (table.status == 2) # in error rows = db(query).select(table.id) s3.dataTableStyleWarning = [str(row.id) for row in rows] return output # ------------------------------------------------------------------------- def _create_import_item_dataTable(self, upload_id, job_id): """ @todo: docstring? """ s3 = current.response.s3 represent = {"element" : self._item_element_represent} self._use_import_item_table(job_id) # Add a filter to the dataTable query s3.filter = (self.table.job_id == job_id) & \ (self.table.tablename == self.controller_tablename) # Get a list of the records that have an error of None query = (self.table.job_id == job_id) & \ (self.table.tablename == self.controller_tablename) rows = current.db(query).select(self.table.id, self.table.error) select_list = [] error_list = [] for row in rows: if row.error: error_list.append(str(row.id)) else: select_list.append("%s" % row.id) select_id = ",".join(select_list) output = self._dataTable(["id", "element", "error"], sort_by = [[1, "asc"]], represent=represent) self._use_controller_table() if self.request.representation == "aadata": return output # Highlight rows in error in red s3.dataTableStyleWarning = error_list s3.dataTableSelectable = True s3.dataTablePostMethod = True table = output["items"] job = INPUT(_type="hidden", _id="importUploadID", _name="job", _value="%s" % upload_id) mode = INPUT(_type="hidden", _id="importMode", _name="mode", _value="Inclusive") # only select the rows with no errors selected = INPUT(_type="hidden", _id="importSelected", _name="selected", _value="[%s]" % select_id) form = FORM(table, job, mode, selected) output["items"] = form s3.dataTableSelectSubmitURL = "import?job=%s&" % upload_id s3.actions = [ dict(label= str(self.messages.item_show_details), _class="action-btn", _jqclick="$('.importItem.'+id).toggle();", ), ] return output # ------------------------------------------------------------------------- def _generate_import_job(self, upload_id, openFile, fileFormat, stylesheet=None, commit_job=False): """ This will take a s3_import_upload record and generate the importJob @param uploadFilename: The name of the uploaded file @todo: complete parameter descriptions """ _debug("S3Importer._generate_import_job(%s, %s, %s, %s)" % (upload_id, openFile, fileFormat, stylesheet ) ) db = current.db request = self.request resource = request.resource # --------------------------------------------------------------------- # CSV if fileFormat == "csv" or fileFormat == "comma-separated-values": fmt = "csv" src = openFile # --------------------------------------------------------------------- # XML # @todo: implement #elif fileFormat == "xml": # --------------------------------------------------------------------- # S3JSON # @todo: implement #elif fileFormat == "s3json": # --------------------------------------------------------------------- # PDF # @todo: implement #elif fileFormat == "pdf": # --------------------------------------------------------------------- # Unsupported Format else: msg = self.messages.unsupported_file_type % fileFormat self.error = msg _debug(msg) return None # Get the stylesheet if stylesheet == None: stylesheet = self._get_stylesheet() if stylesheet == None: return None # before calling import tree ensure the db.table is the controller_table self.table = self.controller_table self.tablename = self.controller_tablename # Pass stylesheet arguments args = Storage() mode = request.get_vars.get("xsltmode", None) if mode is not None: args.update(mode=mode) # Generate the import job resource.import_xml(src, format=fmt, extra_data=self.csv_extra_data, stylesheet=stylesheet, ignore_errors = True, commit_job = commit_job, **args) job = resource.job if job is None: if resource.error: # Error self.error = resource.error return None else: # Nothing to import self.warning = self.messages.no_records_to_import return None else: # Job created job_id = job.job_id errors = current.xml.collect_errors(job) if errors: current.response.s3.error_report = errors query = (self.upload_table.id == upload_id) result = db(query).update(job_id=job_id) # @todo: add check that result == 1, if not we are in error # Now commit the changes db.commit() self.job_id = job_id return True # ------------------------------------------------------------------------- def _get_stylesheet(self, file_format="csv"): """ Get the stylesheet for transformation of the import @param file_format: the import source file format """ if file_format == "csv": xslt_path = os.path.join(self.xslt_path, "s3csv") else: xslt_path = os.path.join(self.xslt_path, file_format, "import.xsl") return xslt_path # Use the "csv_stylesheet" parameter to override the CSV stylesheet subpath # and filename, e.g. # s3_rest_controller(module, resourcename, # csv_stylesheet=("inv", "inv_item.xsl")) if self.csv_stylesheet: if isinstance(self.csv_stylesheet, (tuple, list)): stylesheet = os.path.join(xslt_path, *self.csv_stylesheet) else: stylesheet = os.path.join(xslt_path, self.controller, self.csv_stylesheet) else: xslt_filename = "%s.%s" % (self.function, self.xslt_extension) stylesheet = os.path.join(xslt_path, self.controller, xslt_filename) if os.path.exists(stylesheet) is False: msg = self.messages.stylesheet_not_found % stylesheet self.error = msg _debug(msg) return None return stylesheet # ------------------------------------------------------------------------- def _commit_import_job(self, upload_id, items): """ This will save all of the selected import items @todo: parameter descriptions? """ _debug("S3Importer._commit_import_job(%s, %s)" % (upload_id, items)) db = current.db resource = self.request.resource # Load the items from the s3_import_item table self.importDetails = dict() table = self.upload_table query = (table.id == upload_id) row = db(query).select(table.job_id, table.replace_option, limitby=(0, 1)).first() if row is None: return False else: job_id = row.job_id current.response.s3.import_replace = row.replace_option itemTable = S3ImportJob.define_item_table() if itemTable != None: #**************************************************************** # EXPERIMENTAL # This doesn't delete related items # but import_tree will tidy it up later #**************************************************************** # get all the items selected for import rows = self._get_all_items(upload_id, as_string=True) # loop through each row and delete the items not required self._store_import_details(job_id, "preDelete") for id in rows: if str(id) not in items: # @todo: replace with a helper method from the API _debug("Deleting item.id = %s" % id) query = (itemTable.id == id) db(query).delete() #**************************************************************** # EXPERIMENTAL #**************************************************************** # set up the table we will import data into self.table = self.controller_table self.tablename = self.controller_tablename self._store_import_details(job_id, "preImportTree") # Now commit the remaining items msg = resource.import_xml(None, job_id = job_id, ignore_errors = True) return resource.error is None # ------------------------------------------------------------------------- def _store_import_details(self, job_id, key): """ This will store the details from an importJob @todo: parameter descriptions? """ _debug("S3Importer._store_import_details(%s, %s)" % (job_id, key)) itemTable = S3ImportJob.define_item_table() query = (itemTable.job_id == job_id) & \ (itemTable.tablename == self.controller_tablename) rows = current.db(query).select(itemTable.data, itemTable.error) items = [dict(data=row.data, error=row.error) for row in rows] self.importDetails[key] = items # ------------------------------------------------------------------------- def _update_upload_job(self, upload_id): """ This will record the results from the import, and change the status of the upload job @todo: parameter descriptions? @todo: report errors in referenced records, too """ _debug("S3Importer._update_upload_job(%s)" % (upload_id)) request = self.request resource = request.resource db = current.db totalPreDelete = len(self.importDetails["preDelete"]) totalPreImport = len(self.importDetails["preImportTree"]) totalIgnored = totalPreDelete - totalPreImport if resource.error_tree is None: totalErrors = 0 else: totalErrors = len(resource.error_tree.findall( "resource[@name='%s']" % resource.tablename)) totalRecords = totalPreImport - totalErrors if totalRecords < 0: totalRecords = 0 query = (self.upload_table.id == upload_id) result = db(query).update(summary_added=totalRecords, summary_error=totalErrors, summary_ignored = totalIgnored, status = 3) # Now commit the changes db.commit() return (totalRecords, totalErrors, totalIgnored) # ------------------------------------------------------------------------- def _display_completed_job(self, totals, timestmp=None): """ Generate a summary flash message for a completed import job @param totals: the job totals as tuple (total imported, total errors, total ignored) @param timestmp: the timestamp of the completion """ session = current.session msg = "%s - %s - %s" % \ (self.messages.commit_total_records_imported, self.messages.commit_total_errors, self.messages.commit_total_records_ignored) msg = msg % totals if timestmp != None: session.flash = self.messages.job_completed % \ (self.date_represent(timestmp), msg) elif totals[1] is not 0: session.error = msg elif totals[2] is not 0: session.warning = msg else: session.flash = msg # ------------------------------------------------------------------------- def _dataTable(self, list_fields = [], sort_by = [[1, "asc"]], represent={}, ): """ Method to get the data for the dataTable This can be either a raw html representation or and ajax call update Additional data will be cached to limit calls back to the server @param list_fields: list of field names @param sort_by: list of sort by columns @param represent: a dict of field callback functions used to change how the data will be displayed @return: a dict() In html representations this will be a table of the data plus the sortby instructions In ajax this will be a json response In addition the following values will be made available: totalRecords Number of records in the filtered data set totalDisplayRecords Number of records to display start Start point in the ordered data set limit Number of records in the ordered set NOTE: limit - totalDisplayRecords = total cached """ # ******************************************************************** # Common tasks # ******************************************************************** db = current.db session = current.session request = self.request response = current.response resource = self.resource s3 = response.s3 representation = request.representation table = self.table tablename = self.tablename vars = request.get_vars output = dict() # Check permission to read this table authorised = self.permit("read", tablename) if not authorised: request.unauthorised() # List of fields to select from # fields is a list of Field objects # list_field is a string list of field names if list_fields == []: fields = resource.readable_fields() else: fields = [table[f] for f in list_fields if f in table.fields] if not fields: fields = [] # attach any represent callbacks for f in fields: if f.name in represent: f.represent = represent[f.name] # Make sure that we have the table id as the first column if fields[0].name != table.fields[0]: fields.insert(0, table[table.fields[0]]) list_fields = [f.name for f in fields] # Filter if s3.filter is not None: self.resource.add_filter(s3.filter) # ******************************************************************** # ajax call # ******************************************************************** if representation == "aadata": start = vars.get("iDisplayStart", None) limit = vars.get("iDisplayLength", None) if limit is not None: try: start = int(start) limit = int(limit) except ValueError: start = None limit = None # use default else: start = None # use default # Using the sort variables sent from dataTables if vars.iSortingCols: orderby = self.ssp_orderby(resource, list_fields) # Echo sEcho = int(vars.sEcho or 0) # Get the list items = resource.sqltable(fields=list_fields, start=start, limit=limit, orderby=orderby, download_url=self.download_url, as_page=True) or [] # Ugly hack to change any occurrence of [id] with the true id # Needed because the represent doesn't know the id for i in range(len(items)): id = items[i][0] for j in range(len(items[i])): new = items[i][j].replace("[id]",id) items[i][j] = new totalrows = self.resource.count() result = dict(sEcho = sEcho, iTotalRecords = totalrows, iTotalDisplayRecords = totalrows, aaData = items) output = jsons(result) # ******************************************************************** # html 'initial' call # ******************************************************************** else: # catch all start = 0 limit = 1 # Sort by vars["iSortingCols"] = len(sort_by) # generate the dataTables.js variables for sorting index = 0 for col in sort_by: colName = "iSortCol_%s" % str(index) colValue = col[0] dirnName = "sSortDir_%s" % str(index) if len(col) > 1: dirnValue = col[1] else: dirnValue = "asc" vars[colName] = colValue vars[dirnName] = dirnValue # Now using these sort variables generate the order by statement orderby = self.ssp_orderby(resource, list_fields) del vars["iSortingCols"] for col in sort_by: del vars["iSortCol_%s" % str(index)] del vars["sSortDir_%s" % str(index)] # Get the first row for a quick up load items = resource.sqltable(fields=list_fields, start=start, limit=1, orderby=orderby, download_url=self.download_url) totalrows = resource.count() if items: if totalrows: if s3.dataTable_iDisplayLength: limit = 2 * s3.dataTable_iDisplayLength else: limit = 50 # Add a test on the first call here: # Now get the limit rows for ajax style update of table sqltable = resource.sqltable(fields=list_fields, start=start, limit=limit, orderby=orderby, download_url=self.download_url, as_page=True) aadata = dict(aaData = sqltable or []) # Ugly hack to change any occurrence of [id] with the true id # Needed because the represent doesn't know the id for i in range(len(aadata["aaData"])): id = aadata["aaData"][i][0] for j in range(len(aadata["aaData"][i])): new = aadata["aaData"][i][j].replace("[id]",id) aadata["aaData"][i][j] = new aadata.update(iTotalRecords=totalrows, iTotalDisplayRecords=totalrows) response.aadata = jsons(aadata) s3.start = 0 s3.limit = limit else: # No items in database # s3import tables don't have a delete field but kept for the record if "deleted" in table: available_records = db(table.deleted == False) else: available_records = db(table.id > 0) # check for any records on an unfiltered table if available_records.select(table.id, limitby=(0, 1)).first(): items = self.crud_string(tablename, "msg_no_match") else: items = self.crud_string(tablename, "msg_list_empty") output.update(items=items, sortby=sort_by) # Value to be added to the dataTable ajax call s3.dataTable_Method = "import" return output # ------------------------------------------------------------------------- def _item_element_represent(self, value): """ Represent the element in an import item for dataTable display @param value: the string containing the element """ T = current.T db = current.db value = S3XML.xml_decode(value) try: element = etree.fromstring(value) except: # XMLSyntaxError: return the element as-is return DIV(value) tablename = element.get("name") table = current.db[tablename] output = DIV() details = TABLE(_class="importItem [id]") header, rows = self._add_item_details(element.findall("data"), table) if header is not None: output.append(header) # Add components, if present components = element.findall("resource") for component in components: ctablename = component.get("name") ctable = db[ctablename] self._add_item_details(component.findall("data"), ctable, details=rows, prefix=True) if rows: details.append(TBODY(rows)) # Add error messages, if present errors = current.xml.collect_errors(element) if errors: details.append(TFOOT(TR(TH("%s:" % T("Errors")), TD(UL([LI(e) for e in errors]))))) if rows == [] and components == []: # At this stage we don't have anything to display to see if we can # find something to show. This could be the case when a table being # imported is a resolver for a many to many relationship refdetail = TABLE(_class="importItem [id]") references = element.findall("reference") for reference in references: tuid = reference.get("tuid") resource = reference.get("resource") refdetail.append(TR(TD(resource), TD(tuid))) output.append(refdetail) else: output.append(details) return str(output) # ------------------------------------------------------------------------- @staticmethod def _add_item_details(data, table, details=None, prefix=False): """ Add details of the item element @param data: the list of data elements in the item element @param table: the table for the data @param details: the existing details rows list (to append to) """ tablename = table._tablename if details is None: details = [] first = None firstString = None header = None for child in data: f = child.get("field", None) if f not in table.fields: continue elif f == "wkt": # Skip bulky WKT fields continue field = table[f] ftype = str(field.type) value = child.get("value", None) if not value: value = child.text try: value = S3Importer._decode_data(field, value) except: pass if value: value = S3XML.xml_encode(unicode(value)) else: value = "" if f != None and value != None: headerText = P(B("%s: " % f), value) if not first: first = headerText if ftype == "string" and not firstString: firstString = headerText if f == "name": header = headerText if prefix: details.append(TR(TH("%s.%s:" % (tablename, f)), TD(value))) else: details.append(TR(TH("%s:" % f), TD(value))) if not header: if firstString: header = firstString else: header = first return (header, details) # ------------------------------------------------------------------------- @staticmethod def _decode_data(field, value): """ Try to decode string data into their original type @param field: the Field instance @param value: the stringified value @todo: replace this by ordinary decoder """ if field.type == "string" or \ field.type == "string" or \ field.type == "password" or \ field.type == "upload" or \ field.type == "text": return value elif field.type == "integer" or field.type == "id": return int(value) elif field.type == "double" or field.type == "decimal": return double(value) elif field.type == 'boolean': if value and not str(value)[:1].upper() in ["F", "0"]: return "T" else: return "F" elif field.type == "date": return value # @todo fix this to get a date elif field.type == "time": return value # @todo fix this to get a time elif field.type == "datetime": return value # @todo fix this to get a datetime else: return value # ------------------------------------------------------------------------- @staticmethod def date_represent(date_obj): """ Represent a datetime object as string @param date_obj: the datetime object @todo: replace by S3DateTime method? """ return date_obj.strftime("%d %B %Y, %I:%M%p") # ------------------------------------------------------------------------- def _process_item_list(self, upload_id, vars): """ Get the list of IDs for the selected items from the "mode" and "selected" request variables @param upload_id: the upload_id @param vars: the request variables """ items = None if "mode" in vars: mode = vars["mode"] if "selected" in vars: selected = vars["selected"].split(",") else: selected = [] if mode == "Inclusive": items = selected elif mode == "Exclusive": all_items = self._get_all_items(upload_id, as_string=True) items = [i for i in all_items if i not in selected] return items # ------------------------------------------------------------------------- def _get_all_items(self, upload_id, as_string=False): """ Get a list of the record IDs of all import items for the the given upload ID @param upload_id: the upload ID @param as_string: represent each ID as string """ item_table = S3ImportJob.define_item_table() upload_table = self.upload_table query = (upload_table.id == upload_id) & \ (item_table.job_id == upload_table.job_id) & \ (item_table.tablename == self.controller_tablename) rows = current.db(query).select(item_table.id) if as_string: items = [str(row.id) for row in rows] else: items = [row.id for row in rows] return items # ------------------------------------------------------------------------- def _use_upload_table(self): """ Set the resource and the table to being s3_import_upload """ if self.upload_resource == None: from s3resource import S3Resource (prefix, name) = self.UPLOAD_TABLE_NAME.split("_",1) self.upload_resource = S3Resource(prefix, name) self.resource = self.upload_resource self.table = self.upload_table self.tablename = self.upload_tablename # ------------------------------------------------------------------------- def _use_controller_table(self): """ Set the resource and the table to be the imported resource """ self.resource = self.controller_resource self.table = self.controller_table self.tablename = self.controller_tablename # ------------------------------------------------------------------------- def _use_import_item_table(self, job_id): """ Set the resource and the table to being s3_import_item """ if self.item_resource == None: from s3resource import S3Resource (prefix, name) = S3ImportJob.ITEM_TABLE_NAME.split("_",1) self.item_resource = S3Resource(prefix, name) self.resource = self.item_resource self.tablename = S3ImportJob.ITEM_TABLE_NAME self.table = S3ImportJob.define_item_table() # ------------------------------------------------------------------------- def __define_table(self): """ Configures the upload table """ _debug("S3Importer.__define_table()") T = current.T db = current.db request = current.request self.upload_tablename = self.UPLOAD_TABLE_NAME import_upload_status = { 1: T("Pending"), 2: T("In error"), 3: T("Completed"), } def user_name_represent(id): # @todo: use s3_present_user? rep_str = "-" table = db.auth_user query = (table.id == id) row = db(query).select(table.first_name, table.last_name, limitby=(0, 1)).first() if row: rep_str = "%s %s" % (row.first_name, row.last_name) return rep_str def status_represent(index): if index == None: return "Unknown" # @todo: use messages (internationalize) else: return import_upload_status[index] now = request.utcnow table = self.define_upload_table() table.file.upload_folder = os.path.join(request.folder, "uploads", #"imports" ) table.file.comment = DIV(_class="tooltip", _title="%s|%s" % (self.messages.import_file, self.messages.import_file_comment)) table.file.label = self.messages.import_file table.status.requires = IS_IN_SET(import_upload_status, zero=None) table.status.represent = status_represent table.user_id.label = self.messages.user_name table.user_id.represent = user_name_represent table.created_on.default = now table.created_on.represent = self.date_represent table.modified_on.default = now table.modified_on.update = now table.modified_on.represent = self.date_represent table.replace_option.label = T("Replace") self.upload_table = db[self.UPLOAD_TABLE_NAME] # ------------------------------------------------------------------------- @classmethod def define_upload_table(cls): """ Defines the upload table """ db = current.db uploadfolder = os.path.join(current.request.folder, "uploads", ) if cls.UPLOAD_TABLE_NAME not in db: upload_table = db.define_table(cls.UPLOAD_TABLE_NAME, Field("controller", readable=False, writable=False), Field("function", readable=False, writable=False), Field("file", "upload", uploadfolder=os.path.join(current.request.folder, "uploads", "imports"), autodelete=True), Field("filename", readable=False, writable=False), Field("status", "integer", default=1, readable=False, writable=False), Field("extra_data", readable=False, writable=False), Field("replace_option", "boolean", default=False, readable=False, writable=False), Field("job_id", length=128, readable=False, writable=False), Field("user_id", "integer", readable=False, writable=False), Field("created_on", "datetime", readable=False, writable=False), Field("modified_on", "datetime", readable=False, writable=False), Field("summary_added", "integer", readable=False, writable=False), Field("summary_error", "integer", readable=False, writable=False), Field("summary_ignored", "integer", readable=False, writable=False), Field("completed_details", "text", readable=False, writable=False)) else: upload_table = db[cls.UPLOAD_TABLE_NAME] return upload_table # ============================================================================= class S3ImportItem(object): """ Class representing an import item (=a single record) """ METHOD = Storage( CREATE="create", UPDATE="update", DELETE="delete" ) POLICY = Storage( THIS="THIS", # keep local instance OTHER="OTHER", # update unconditionally NEWER="NEWER", # update if import is newer MASTER="MASTER" # update if import is master ) # ------------------------------------------------------------------------- def __init__(self, job): """ Constructor @param job: the import job this item belongs to """ self.job = job self.ERROR = current.manager.ERROR # Locking and error handling self.lock = False self.error = None # Identification import uuid self.item_id = uuid.uuid4() # unique ID for this item self.id = None self.uid = None # Data elements self.table = None self.tablename = None self.element = None self.data = None self.original = None self.components = [] self.references = [] self.load_components = [] self.load_references = [] self.parent = None self.skip = False # Conflict handling self.mci = 2 self.mtime = datetime.utcnow() self.modified = True self.conflict = False # Allowed import methods self.strategy = job.strategy # Update and conflict resolution policies self.update_policy = job.update_policy self.conflict_policy = job.conflict_policy # Actual import method self.method = None self.onvalidation = None self.onaccept = None # Item import status flags self.accepted = None self.permitted = False self.committed = False # Writeback hook for circular references: # Items which need a second write to update references self.update = [] # ------------------------------------------------------------------------- def __repr__(self): """ Helper method for debugging """ _str = "<S3ImportItem %s {item_id=%s uid=%s id=%s error=%s data=%s}>" % \ (self.table, self.item_id, self.uid, self.id, self.error, self.data) return _str # ------------------------------------------------------------------------- def parse(self, element, original=None, table=None, tree=None, files=None): """ Read data from a <resource> element @param element: the element @param table: the DB table @param tree: the import tree @param files: uploaded files @returns: True if successful, False if not (sets self.error) """ db = current.db xml = current.xml manager = current.manager validate = manager.validate s3db = current.s3db self.element = element if table is None: tablename = element.get(xml.ATTRIBUTE.name, None) try: table = s3db[tablename] except: self.error = self.ERROR.BAD_RESOURCE element.set(xml.ATTRIBUTE.error, self.error) return False self.table = table self.tablename = table._tablename if original is None: original = manager.original(table, element) data = xml.record(table, element, files=files, original=original, validate=validate) if data is None: self.error = self.ERROR.VALIDATION_ERROR self.accepted = False if not element.get(xml.ATTRIBUTE.error, False): element.set(xml.ATTRIBUTE.error, str(self.error)) return False self.data = data if original is not None: self.original = original self.id = original[table._id.name] if xml.UID in original: self.uid = original[xml.UID] self.data.update({xml.UID:self.uid}) elif xml.UID in data: self.uid = data[xml.UID] if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] _debug("New item: %s" % self) return True # ------------------------------------------------------------------------- def deduplicate(self): RESOLVER = "deduplicate" if self.id: return table = self.table if table is None: return if self.original is not None: original = self.original else: original = current.manager.original(table, self.data) if original is not None: self.original = original self.id = original[table._id.name] UID = current.xml.UID if UID in original: self.uid = original[UID] self.data.update({UID:self.uid}) self.method = self.METHOD.UPDATE else: resolve = current.s3db.get_config(self.tablename, RESOLVER) if self.data and resolve: resolve(self) return # ------------------------------------------------------------------------- def authorize(self): """ Authorize the import of this item, sets self.permitted """ db = current.db manager = current.manager authorize = manager.permit self.permitted = False if not self.table: return False prefix = self.tablename.split("_", 1)[0] if prefix in manager.PROTECTED: return False if not authorize: self.permitted = True self.method = self.METHOD.CREATE if self.id: if self.data.deleted is True: self.method = self.METHOD.DELETE self.accepted = True else: if not self.original: query = (self.table.id == self.id) self.original = db(query).select(limitby=(0, 1)).first() if self.original: self.method = self.METHOD.UPDATE if self.method == self.METHOD.CREATE: self.id = 0 if authorize: self.permitted = authorize(self.method, self.tablename, record_id=self.id) return self.permitted # ------------------------------------------------------------------------- def validate(self): """ Validate this item (=record onvalidation), sets self.accepted """ if self.accepted is not None: return self.accepted if self.data is None or not self.table: self.accepted = False return False form = Storage() form.method = self.method form.vars = self.data if self.id: form.vars.id = self.id form.errors = Storage() tablename = self.tablename key = "%s_onvalidation" % self.method s3db = current.s3db onvalidation = s3db.get_config(tablename, key, s3db.get_config(tablename, "onvalidation")) if onvalidation: try: callback(onvalidation, form, tablename=tablename) except: pass # @todo need a better handler here. self.accepted = True if form.errors: error = current.xml.ATTRIBUTE.error for k in form.errors: e = self.element.findall("data[@field='%s']" % k) if not e: e = self.element.findall("reference[@field='%s']" % k) if not e: e = self.element form.errors[k] = "[%s] %s" % (k, form.errors[k]) else: e = e[0] e.set(error, str(form.errors[k]).decode("utf-8")) self.error = self.ERROR.VALIDATION_ERROR self.accepted = False return self.accepted # ------------------------------------------------------------------------- def commit(self, ignore_errors=False): """ Commit this item to the database @param ignore_errors: skip invalid components (still reports errors) """ db = current.db s3db = current.s3db xml = current.xml manager = current.manager table = self.table # Check if already committed if self.committed: # already committed return True # If the parent item gets skipped, then skip this item as well if self.parent is not None and self.parent.skip: return True _debug("Committing item %s" % self) # Resolve references self._resolve_references() # Validate if not self.validate(): _debug("Validation error: %s (%s)" % (self.error, xml.tostring(self.element, pretty_print=True))) self.skip = True return ignore_errors elif self.components: for component in self.components: if not component.validate(): if hasattr(component, "tablename"): tn = component.tablename else: tn = None _debug("Validation error, component=%s" % tn) component.skip = True # Skip this item on any component validation errors # unless ignore_errors is True if ignore_errors: continue else: self.skip = True return False # De-duplicate self.deduplicate() # Log this item if manager.log is not None: manager.log(self) # Authorize item if not self.authorize(): _debug("Not authorized - skip") self.error = manager.ERROR.NOT_PERMITTED self.skip = True return ignore_errors _debug("Method: %s" % self.method) # Check if import method is allowed in strategy if not isinstance(self.strategy, (list, tuple)): self.strategy = [self.strategy] if self.method not in self.strategy: _debug("Method not in strategy - skip") self.error = manager.ERROR.NOT_PERMITTED self.skip = True return True this = self.original if not this and self.id and \ self.method in (self.METHOD.UPDATE, self.METHOD.DELETE): query = (table.id == self.id) this = db(query).select(limitby=(0, 1)).first() this_mtime = None this_mci = 0 if this: if xml.MTIME in table.fields: this_mtime = xml.as_utc(this[xml.MTIME]) if xml.MCI in table.fields: this_mci = this[xml.MCI] self.mtime = xml.as_utc(self.mtime) # Conflict detection this_modified = True self.modified = True self.conflict = False last_sync = xml.as_utc(self.job.last_sync) if last_sync: if this_mtime and this_mtime < last_sync: this_modified = False if self.mtime and self.mtime < last_sync: self.modified = False if self.modified and this_modified: self.conflict = True if self.conflict and \ self.method in (self.METHOD.UPDATE, self.METHOD.DELETE): _debug("Conflict: %s" % self) if self.job.onconflict: self.job.onconflict(self) if self.data is not None: data = Storage(self.data) else: data = Storage() # Update existing record if self.method == self.METHOD.UPDATE: if this: if "deleted" in this and this.deleted: policy = self._get_update_policy(None) if policy == self.POLICY.NEWER and \ this_mtime and this_mtime > self.mtime or \ policy == self.POLICY.MASTER and \ (this_mci == 0 or self.mci != 1): self.skip = True return True fields = data.keys() for f in fields: if f not in this: continue if isinstance(this[f], datetime): if xml.as_utc(data[f]) == xml.as_utc(this[f]): del data[f] continue else: if data[f] == this[f]: del data[f] continue remove = False policy = self._get_update_policy(f) if policy == self.POLICY.THIS: remove = True elif policy == self.POLICY.NEWER: if this_mtime and this_mtime > self.mtime: remove = True elif policy == self.POLICY.MASTER: if this_mci == 0 or self.mci != 1: remove = True if remove: del data[f] self.data.update({f:this[f]}) if "deleted" in this and this.deleted: # Undelete re-imported records: data.update(deleted=False) if "deleted_fk" in table: data.update(deleted_fk="") if "created_by" in table: data.update(created_by=table.created_by.default) if "modified_by" in table: data.update(modified_by=table.modified_by.default) if not self.skip and not self.conflict and \ (len(data) or self.components or self.references): if self.uid and xml.UID in table: data.update({xml.UID:self.uid}) if xml.MTIME in table: data.update({xml.MTIME: self.mtime}) if xml.MCI in data: # retain local MCI on updates del data[xml.MCI] query = (table._id == self.id) try: success = db(query).update(**dict(data)) except: self.error = sys.exc_info()[1] self.skip = True return False if success: self.committed = True else: # Nothing to update self.committed = True # Create new record elif self.method == self.METHOD.CREATE: # Do not apply field policy to UID and MCI UID = xml.UID if UID in data: del data[UID] MCI = xml.MCI if MCI in data: del data[MCI] for f in data: policy = self._get_update_policy(f) if policy == self.POLICY.MASTER and self.mci != 1: del data[f] if len(data) or self.components or self.references: # Restore UID and MCI if self.uid and UID in table.fields: data.update({UID:self.uid}) if MCI in table.fields: data.update({MCI:self.mci}) # Insert the new record try: success = table.insert(**dict(data)) except: self.error = sys.exc_info()[1] self.skip = True return False if success: self.id = success self.committed = True else: # Nothing to create self.skip = True return True # Delete local record elif self.method == self.METHOD.DELETE: if this: if this.deleted: self.skip = True policy = self._get_update_policy(None) if policy == self.POLICY.THIS: self.skip = True elif policy == self.POLICY.NEWER and \ (this_mtime and this_mtime > self.mtime): self.skip = True elif policy == self.POLICY.MASTER and \ (this_mci == 0 or self.mci != 1): self.skip = True else: self.skip = True if not self.skip and not self.conflict: prefix, name = self.tablename.split("_", 1) resource = manager.define_resource(prefix, name, id=self.id) ondelete = s3db.get_config(self.tablename, "ondelete") success = resource.delete(ondelete=ondelete, cascade=True) if resource.error: self.error = resource.error self.skip = True return ignore_errors _debug("Success: %s, id=%s %sd" % (self.tablename, self.id, self.skip and "skippe" or \ self.method)) return True # Audit + onaccept on successful commits if self.committed: form = Storage() form.method = self.method form.vars = self.data tablename = self.tablename prefix, name = tablename.split("_", 1) if self.id: form.vars.id = self.id if manager.audit is not None: manager.audit(self.method, prefix, name, form=form, record=self.id, representation="xml") s3db.update_super(table, form.vars) if self.method == self.METHOD.CREATE: current.auth.s3_set_record_owner(table, self.id) key = "%s_onaccept" % self.method onaccept = s3db.get_config(tablename, key, s3db.get_config(tablename, "onaccept")) if onaccept: callback(onaccept, form, tablename=self.tablename) # Update referencing items if self.update and self.id: for u in self.update: item = u.get("item", None) if not item: continue field = u.get("field", None) if isinstance(field, (list, tuple)): pkey, fkey = field query = table.id == self.id row = db(query).select(table[pkey], limitby=(0, 1)).first() if row: item._update_reference(fkey, row[pkey]) else: item._update_reference(field, self.id) _debug("Success: %s, id=%s %sd" % (self.tablename, self.id, self.skip and "skippe" or \ self.method)) return True # ------------------------------------------------------------------------- def _get_update_policy(self, field): """ Get the update policy for a field (if the item will update an existing record) @param field: the name of the field """ if isinstance(self.update_policy, dict): r = self.update_policy.get(field, self.update_policy.get("__default__", self.POLICY.THIS)) else: r = self.update_policy if not r in self.POLICY.values(): r = self.POLICY.THIS return r # ------------------------------------------------------------------------- def _resolve_references(self): """ Resolve the references of this item (=look up all foreign keys from other items of the same job). If a foreign key is not yet available, it will be scheduled for later update. """ if not self.table: return items = self.job.items for reference in self.references: item = None field = reference.field entry = reference.entry if not entry: continue # Resolve key tuples if isinstance(field, (list,tuple)): pkey, fkey = field else: pkey, fkey = ("id", field) # Resolve the key table name ktablename, key, multiple = s3_get_foreign_key(self.table[fkey]) if not ktablename: if self.tablename == "auth_user" and \ fkey == "organisation_id": ktablename = "org_organisation" else: continue if entry.tablename: ktablename = entry.tablename try: ktable = current.s3db[ktablename] except: continue # Resolve the foreign key (value) fk = entry.id if entry.item_id: item = items[entry.item_id] if item: fk = item.id if fk and pkey != "id": row = current.db(ktable._id == fk).select(ktable[pkey], limitby=(0, 1)).first() if not row: fk = None continue else: fk = row[pkey] # Update record data if fk: if multiple: val = self.data.get(fkey, []) if fk not in val: val.append(fk) self.data[fkey] = val else: self.data[fkey] = fk else: if fkey in self.data and not multiple: del self.data[fkey] if item: item.update.append(dict(item=self, field=fkey)) # ------------------------------------------------------------------------- def _update_reference(self, field, value): """ Helper method to update a foreign key in an already written record. Will be called by the referenced item after (and only if) it has been committed. This is only needed if the reference could not be resolved before commit due to circular references. @param field: the field name of the foreign key @param value: the value of the foreign key """ if not value or not self.table: return db = current.db if self.id and self.permitted: fieldtype = str(self.table[field].type) if fieldtype.startswith("list:reference"): query = (self.table.id == self.id) record = db(query).select(self.table[field], limitby=(0,1)).first() if record: values = record[field] if value not in values: values.append(value) db(self.table.id == self.id).update(**{field:values}) else: db(self.table.id == self.id).update(**{field:value}) # ------------------------------------------------------------------------- def store(self, item_table=None): """ Store this item in the DB """ _debug("Storing item %s" % self) if item_table is None: return None db = current.db query = item_table.item_id == self.item_id row = db(query).select(item_table.id, limitby=(0, 1)).first() if row: record_id = row.id else: record_id = None record = Storage(job_id = self.job.job_id, item_id = self.item_id, tablename = self.tablename, record_uid = self.uid, error = self.error) if self.element is not None: element_str = current.xml.tostring(self.element, xml_declaration=False) record.update(element=element_str) if self.data is not None: data = Storage() for f in self.data.keys(): table = self.table if f not in table.fields: continue fieldtype = str(self.table[f].type) if fieldtype == "id" or s3_has_foreign_key(self.table[f]): continue data.update({f:self.data[f]}) data_str = cPickle.dumps(data) record.update(data=data_str) ritems = [] for reference in self.references: field = reference.field entry = reference.entry store_entry = None if entry: if entry.item_id is not None: store_entry = dict(field=field, item_id=str(entry.item_id)) elif entry.uid is not None: store_entry = dict(field=field, tablename=entry.tablename, uid=str(entry.uid)) if store_entry is not None: ritems.append(json.dumps(store_entry)) if ritems: record.update(ritems=ritems) citems = [c.item_id for c in self.components] if citems: record.update(citems=citems) if self.parent: record.update(parent=self.parent.item_id) if record_id: db(item_table.id == record_id).update(**record) else: record_id = item_table.insert(**record) _debug("Record ID=%s" % record_id) return record_id # ------------------------------------------------------------------------- def restore(self, row): """ Restore an item from a item table row. This does not restore the references (since this can not be done before all items are restored), must call job.restore_references() to do that @param row: the item table row """ xml = current.xml self.item_id = row.item_id self.accepted = None self.permitted = False self.committed = False tablename = row.tablename self.id = None self.uid = row.record_uid if row.data is not None: self.data = cPickle.loads(row.data) else: self.data = Storage() data = self.data if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] UID = xml.UID if UID in data: self.uid = data[UID] self.element = etree.fromstring(row.element) if row.citems: self.load_components = row.citems if row.ritems: self.load_references = [json.loads(ritem) for ritem in row.ritems] self.load_parent = row.parent try: table = current.s3db[tablename] except: self.error = self.ERROR.BAD_RESOURCE return False else: self.table = table self.tablename = tablename original = current.manager.original(table, self.data) if original is not None: self.original = original self.id = original[table._id.name] if UID in original: self.uid = original[UID] self.data.update({UID:self.uid}) self.error = row.error if self.error and not self.data: # Validation error return False return True # ============================================================================= class S3ImportJob(): """ Class to import an element tree into the database """ JOB_TABLE_NAME = "s3_import_job" ITEM_TABLE_NAME = "s3_import_item" # ------------------------------------------------------------------------- def __init__(self, manager, table, tree=None, files=None, job_id=None, strategy=None, update_policy=None, conflict_policy=None, last_sync=None, onconflict=None): """ Constructor @param manager: the S3RequestManager instance performing this job @param tree: the element tree to import @param files: files attached to the import (for upload fields) @param job_id: restore job from database (record ID or job_id) @param strategy: the import strategy @param update_policy: the update policy @param conflict_policy: the conflict resolution policy @param last_sync: the last synchronization time stamp (datetime) @param onconflict: custom conflict resolver function """ self.error = None # the last error self.error_tree = etree.Element(current.xml.TAG.root) self.table = table self.tree = tree self.files = files self.directory = Storage() self.elements = Storage() self.items = Storage() self.references = [] self.job_table = None self.item_table = None self.count = 0 # total number of records imported self.created = [] # IDs of created records self.updated = [] # IDs of updated records self.deleted = [] # IDs of deleted records # Import strategy self.strategy = strategy if self.strategy is None: self.strategy = [S3ImportItem.METHOD.CREATE, S3ImportItem.METHOD.UPDATE, S3ImportItem.METHOD.DELETE] if not isinstance(self.strategy, (tuple, list)): self.strategy = [self.strategy] # Update policy (default=always update) self.update_policy = update_policy if not self.update_policy: self.update_policy = S3ImportItem.POLICY.OTHER # Conflict resolution policy (default=always update) self.conflict_policy = conflict_policy if not self.conflict_policy: self.conflict_policy = S3ImportItem.POLICY.OTHER # Synchronization settings self.mtime = None self.last_sync = last_sync self.onconflict = onconflict if job_id: self.__define_tables() jobtable = self.job_table if str(job_id).isdigit(): query = jobtable.id == job_id else: query = jobtable.job_id == job_id row = current.db(query).select(limitby=(0, 1)).first() if not row: raise SyntaxError("Job record not found") self.job_id = row.job_id if not self.table: tablename = row.tablename try: table = current.s3db[tablename] except: pass else: import uuid self.job_id = uuid.uuid4() # unique ID for this job # ------------------------------------------------------------------------- def add_item(self, element=None, original=None, components=None, parent=None, joinby=None): """ Parse and validate an XML element and add it as new item to the job. @param element: the element @param original: the original DB record (if already available, will otherwise be looked-up by this function) @param components: a dictionary of components (as in S3Resource) to include in the job (defaults to all defined components) @param parent: the parent item (if this is a component) @param joinby: the component join key(s) (if this is a component) @returns: a unique identifier for the new item, or None if there was an error. self.error contains the last error, and self.error_tree an element tree with all failing elements including error attributes. """ if element in self.elements: # element has already been added to this job return self.elements[element] # Parse the main element item = S3ImportItem(self) # Update lookup lists item_id = item.item_id self.items[item_id] = item if element is not None: self.elements[element] = item_id if not item.parse(element, original=original, files=self.files): self.error = item.error item.accepted = False if parent is None: self.error_tree.append(deepcopy(item.element)) else: # Now parse the components table = item.table components = current.s3db.get_components(table, names=components) cnames = Storage() cinfos = Storage() for alias in components: component = components[alias] pkey = component.pkey if component.linktable: ctable = component.linktable fkey = component.lkey else: ctable = component.table fkey = component.fkey ctablename = ctable._tablename if ctablename in cnames: cnames[ctablename].append(alias) else: cnames[ctablename] = [alias] cinfos[(ctablename, alias)] = Storage(component = component, ctable = ctable, pkey = pkey, fkey = fkey, original = None, uid = None) add_item = self.add_item xml = current.xml for celement in xml.components(element, names=cnames.keys()): # Get the component tablename ctablename = celement.get(xml.ATTRIBUTE.name, None) if not ctablename: continue # Get the component alias (for disambiguation) calias = celement.get(xml.ATTRIBUTE.alias, None) if calias is None: if ctablename not in cnames: continue aliases = cnames[ctablename] if len(aliases) == 1: calias = aliases[0] else: # ambiguous components *must* use alias continue if (ctablename, calias) not in cinfos: continue else: cinfo = cinfos[(ctablename, calias)] component = cinfo.component original = cinfo.original ctable = cinfo.ctable pkey = cinfo.pkey fkey = cinfo.fkey if not component.multiple: if cinfo.uid is not None: continue if original is None and item.id: query = (table.id == item.id) & \ (table[pkey] == ctable[fkey]) original = current.db(query).select(ctable.ALL, limitby=(0, 1)).first() if original: cinfo.uid = uid = original.get(xml.UID, None) celement.set(xml.UID, uid) cinfo.original = original item_id = add_item(element=celement, original=original, parent=item, joinby=(pkey, fkey)) if item_id is None: item.error = self.error self.error_tree.append(deepcopy(item.element)) else: citem = self.items[item_id] citem.parent = item item.components.append(citem) # Handle references table = item.table tree = self.tree if tree is not None: fields = [table[f] for f in table.fields] rfields = filter(s3_has_foreign_key, fields) item.references = self.lookahead(element, table=table, fields=rfields, tree=tree, directory=self.directory) for reference in item.references: entry = reference.entry if entry and entry.element is not None: item_id = add_item(element=entry.element) if item_id: entry.update(item_id=item_id) # Parent reference if parent is not None: entry = Storage(item_id=parent.item_id, element=parent.element, tablename=parent.tablename) item.references.append(Storage(field=joinby, entry=entry)) return item.item_id # ------------------------------------------------------------------------- def lookahead(self, element, table=None, fields=None, tree=None, directory=None): """ Find referenced elements in the tree @param element: the element @param table: the DB table @param fields: the FK fields in the table @param tree: the import tree @param directory: a dictionary to lookup elements in the tree (will be filled in by this function) """ db = current.db s3db = current.s3db xml = current.xml import_uid = xml.import_uid ATTRIBUTE = xml.ATTRIBUTE TAG = xml.TAG UID = xml.UID reference_list = [] root = None if tree is not None: if isinstance(tree, etree._Element): root = tree else: root = tree.getroot() references = element.findall("reference") for reference in references: field = reference.get(ATTRIBUTE.field, None) # Ignore references without valid field-attribute if not field or field not in fields: continue # Find the key table multiple = False fieldtype = str(table[field].type) if fieldtype.startswith("reference"): ktablename = fieldtype[10:] elif fieldtype.startswith("list:reference"): ktablename = fieldtype[15:] multiple = True else: # ignore if the field is not a reference type continue try: ktable = s3db[ktablename] except: # Invalid tablename - skip continue tablename = reference.get(ATTRIBUTE.resource, None) # Ignore references to tables without UID field: if UID not in ktable.fields: continue # Fall back to key table name if tablename is not specified: if not tablename: tablename = ktablename # Super-entity references must use the super-key: if tablename != ktablename: field = (ktable._id.name, field) # Ignore direct references to super-entities: if tablename == ktablename and ktable._id.name != "id": continue # Get the foreign key uids = reference.get(UID, None) attr = UID if not uids: uids = reference.get(ATTRIBUTE.tuid, None) attr = ATTRIBUTE.tuid if uids and multiple: uids = json.loads(uids) elif uids: uids = [uids] # Find the elements and map to DB records relements = [] # Create a UID<->ID map id_map = Storage() if attr == UID and uids: _uids = map(import_uid, uids) query = ktable[UID].belongs(_uids) records = db(query).select(ktable.id, ktable[UID]) id_map = dict([(r[UID], r.id) for r in records]) if not uids: # Anonymous reference: <resource> inside the element expr = './/%s[@%s="%s"]' % (TAG.resource, ATTRIBUTE.name, tablename) relements = reference.xpath(expr) if relements and not multiple: relements = [relements[0]] elif root is not None: for uid in uids: entry = None # Entry already in directory? if directory is not None: entry = directory.get((tablename, attr, uid), None) if not entry: expr = ".//%s[@%s='%s' and @%s='%s']" % ( TAG.resource, ATTRIBUTE.name, tablename, attr, uid) e = root.xpath(expr) if e: # Element in the source => append to relements relements.append(e[0]) else: # No element found, see if original record exists _uid = import_uid(uid) if _uid and _uid in id_map: _id = id_map[_uid] entry = Storage(tablename=tablename, element=None, uid=uid, id=_id, item_id=None) reference_list.append(Storage(field=field, entry=entry)) else: continue else: reference_list.append(Storage(field=field, entry=entry)) # Create entries for all newly found elements for relement in relements: uid = relement.get(attr, None) if attr == UID: _uid = import_uid(uid) id = _uid and id_map and id_map.get(_uid, None) or None else: _uid = None id = None entry = Storage(tablename=tablename, element=relement, uid=uid, id=id, item_id=None) # Add entry to directory if uid and directory is not None: directory[(tablename, attr, uid)] = entry # Append the entry to the reference list reference_list.append(Storage(field=field, entry=entry)) return reference_list # ------------------------------------------------------------------------- def load_item(self, row): """ Load an item from the item table (counterpart to add_item when restoring a job from the database) """ item = S3ImportItem(self) if not item.restore(row): self.error = item.error if item.load_parent is None: self.error_tree.append(deepcopy(item.element)) # Update lookup lists item_id = item.item_id self.items[item_id] = item return item_id # ------------------------------------------------------------------------- def resolve(self, item_id, import_list): """ Resolve the reference list of an item @param item_id: the import item UID @param import_list: the ordered list of items (UIDs) to import """ item = self.items[item_id] if item.lock or item.accepted is False: return False references = [] for reference in item.references: ritem_id = reference.entry.item_id if ritem_id and ritem_id not in import_list: references.append(ritem_id) for ritem_id in references: item.lock = True if self.resolve(ritem_id, import_list): import_list.append(ritem_id) item.lock = False return True # ------------------------------------------------------------------------- def commit(self, ignore_errors=False): """ Commit the import job to the DB @param ignore_errors: skip any items with errors (does still report the errors) """ ATTRIBUTE = current.xml.ATTRIBUTE # Resolve references import_list = [] for item_id in self.items: self.resolve(item_id, import_list) if item_id not in import_list: import_list.append(item_id) # Commit the items items = self.items count = 0 mtime = None created = [] cappend = created.append updated = [] deleted = [] tablename = self.table._tablename for item_id in import_list: item = items[item_id] error = None success = item.commit(ignore_errors=ignore_errors) error = item.error if error: self.error = error element = item.element if element is not None: if not element.get(ATTRIBUTE.error, False): element.set(ATTRIBUTE.error, str(self.error)) self.error_tree.append(deepcopy(element)) if not ignore_errors: return False elif item.tablename == tablename: count += 1 if mtime is None or item.mtime > mtime: mtime = item.mtime if item.id: if item.method == item.METHOD.CREATE: cappend(item.id) elif item.method == item.METHOD.UPDATE: updated.append(item.id) elif item.method == item.METHOD.DELETE: deleted.append(item.id) self.count = count self.mtime = mtime self.created = created self.updated = updated self.deleted = deleted return True # ------------------------------------------------------------------------- def __define_tables(self): """ Define the database tables for jobs and items """ self.job_table = self.define_job_table() self.item_table = self.define_item_table() # ------------------------------------------------------------------------- @classmethod def define_job_table(cls): db = current.db if cls.JOB_TABLE_NAME not in db: job_table = db.define_table(cls.JOB_TABLE_NAME, Field("job_id", length=128, unique=True, notnull=True), Field("tablename"), Field("timestmp", "datetime", default=datetime.utcnow())) else: job_table = db[cls.JOB_TABLE_NAME] return job_table # ------------------------------------------------------------------------- @classmethod def define_item_table(cls): db = current.db if cls.ITEM_TABLE_NAME not in db: item_table = db.define_table(cls.ITEM_TABLE_NAME, Field("item_id", length=128, unique=True, notnull=True), Field("job_id", length=128), Field("tablename", length=128), #Field("record_id", "integer"), Field("record_uid"), Field("error", "text"), Field("data", "text"), Field("element", "text"), Field("ritems", "list:string"), Field("citems", "list:string"), Field("parent", length=128)) else: item_table = db[cls.ITEM_TABLE_NAME] return item_table # ------------------------------------------------------------------------- def store(self): """ Store this job and all its items in the job table """ db = current.db _debug("Storing Job ID=%s" % self.job_id) self.__define_tables() jobtable = self.job_table query = jobtable.job_id == self.job_id row = db(query).select(jobtable.id, limitby=(0, 1)).first() if row: record_id = row.id else: record_id = None record = Storage(job_id=self.job_id) try: tablename = self.table._tablename except: pass else: record.update(tablename=tablename) for item in self.items.values(): item.store(item_table=self.item_table) if record_id: db(jobtable.id == record_id).update(**record) else: record_id = jobtable.insert(**record) _debug("Job record ID=%s" % record_id) return record_id # ------------------------------------------------------------------------- def get_tree(self): """ Reconstruct the element tree of this job """ if self.tree is not None: return tree else: xml = current.xml root = etree.Element(xml.TAG.root) for item in self.items.values(): if item.element is not None and not item.parent: if item.tablename == self.table._tablename or \ item.element.get(xml.UID, None) or \ item.element.get(xml.ATTRIBUTE.tuid, None): root.append(deepcopy(item.element)) return etree.ElementTree(root) # ------------------------------------------------------------------------- def delete(self): """ Delete this job and all its items from the job table """ db = current.db _debug("Deleting job ID=%s" % self.job_id) self.__define_tables() item_table = self.item_table query = item_table.job_id == self.job_id db(query).delete() job_table = self.job_table query = job_table.job_id == self.job_id db(query).delete() # ------------------------------------------------------------------------- def restore_references(self): """ Restore the job's reference structure after loading items from the item table """ db = current.db UID = current.xml.UID for item in self.items.values(): for citem_id in item.load_components: if citem_id in self.items: item.components.append(self.items[citem_id]) item.load_components = [] for ritem in item.load_references: field = ritem["field"] if "item_id" in ritem: item_id = ritem["item_id"] if item_id in self.items: _item = self.items[item_id] entry = Storage(tablename=_item.tablename, element=_item.element, uid=_item.uid, id=_item.id, item_id=item_id) item.references.append(Storage(field=field, entry=entry)) else: _id = None uid = ritem.get("uid", None) tablename = ritem.get("tablename", None) if tablename and uid: try: table = current.s3db[tablename] except: continue if UID not in table.fields: continue query = table[UID] == uid row = db(query).select(table._id, limitby=(0, 1)).first() if row: _id = row[table._id.name] else: continue entry = Storage(tablename = ritem["tablename"], element=None, uid = ritem["uid"], id = _id, item_id = None) item.references.append(Storage(field=field, entry=entry)) item.load_references = [] if item.load_parent is not None: item.parent = self.items[item.load_parent] item.load_parent = None # END =========================================================================
ashwyn/eden-message_parser
modules/s3/s3import.py
Python
mit
123,322
package br.ufrj.g2matricula.domain; import org.springframework.data.elasticsearch.annotations.Document; import javax.persistence.*; import javax.validation.constraints.*; import java.io.Serializable; import java.util.Objects; import br.ufrj.g2matricula.domain.enumeration.MatriculaStatus; /** * A Matricula. */ @Entity @Table(name = "matricula") @Document(indexName = "matricula") public class Matricula implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @NotNull @Enumerated(EnumType.STRING) @Column(name = "status", nullable = false) private MatriculaStatus status; @ManyToOne private Aluno dreAluno; @ManyToOne private Curso curso; // jhipster-needle-entity-add-field - JHipster will add fields here, do not remove public Long getId() { return id; } public void setId(Long id) { this.id = id; } public MatriculaStatus getStatus() { return status; } public Matricula status(MatriculaStatus status) { this.status = status; return this; } public void setStatus(MatriculaStatus status) { this.status = status; } public Aluno getDreAluno() { return dreAluno; } public Matricula dreAluno(Aluno aluno) { this.dreAluno = aluno; return this; } public void setDreAluno(Aluno aluno) { this.dreAluno = aluno; } public Curso getCurso() { return curso; } public Matricula curso(Curso curso) { this.curso = curso; return this; } public void setCurso(Curso curso) { this.curso = curso; } // jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here, do not remove @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Matricula matricula = (Matricula) o; if (matricula.getId() == null || getId() == null) { return false; } return Objects.equals(getId(), matricula.getId()); } @Override public int hashCode() { return Objects.hashCode(getId()); } @Override public String toString() { return "Matricula{" + "id=" + getId() + ", status='" + getStatus() + "'" + "}"; } }
DamascenoRafael/cos482-qualidade-de-software
www/src/main/java/br/ufrj/g2matricula/domain/Matricula.java
Java
mit
2,535
<?php class Admin_GeneralModel extends CI_Model { public function GetAdminModuleCategoryList() { $this->db->select('CID, CategoryName'); $this->db->from('admin_module_category'); $this->db->order_by('Ordering'); $query = $this->db->get(); if($query->num_rows()) return $query; else return FALSE; } public function GetAdminModuleList() { $this->db->select('MID, CID, ModuleName, DisplayName'); $this->db->from('admin_module'); $this->db->order_by('Ordering'); $query = $this->db->get(); if($query->num_rows()) return $query; else return FALSE; } public function GetAdminModuleActions($MID = NULL) { $this->db->select('AID, MID, Action'); $this->db->from('admin_module_action'); if($MID != NULL) $this->db->where('MID', $MID); $query = $this->db->get(); if($query->num_rows()) return $query->result(); else return FALSE; } } ?>
dernst91/deCMS
application/models/Admin_GeneralModel.php
PHP
mit
895
RSpec.describe("executables", skip_db_cleaner: true) do include SharedSpecSetup before do #migrations don't work if we are still connected to the db ActiveRecord::Base.remove_connection end it "extracts the schema" do output = `bin/extract #{config_filename} production #{schema_filename} 2>&1` expect(output).to match(/extracted to/) expect(output).to match(/#{schema_filename}/) end it "transfers the schema" do output = `bin/transfer-schema #{config_filename} production test config/include_tables.txt 2>&1` expect(output).to match(/transferred schema from production to test/) end end
ifad/sybase-schema-extractor
spec/bin_spec.rb
Ruby
mit
636
// @flow import { StyleSheet } from 'react-native'; import { colors } from '../../themes'; const styles = StyleSheet.create({ divider: { height: 1, marginHorizontal: 0, backgroundColor: colors.darkDivider, }, }); export default styles;
Dennitz/Timetable
src/components/styles/HorizontalDividerList.styles.js
JavaScript
mit
254
require 'ffi' module ProcessShared module Posix module Errno extend FFI::Library ffi_lib FFI::Library::LIBC attach_variable :errno, :int # Replace methods in +syms+ with error checking wrappers that # invoke the original method and raise a {SystemCallError} with # the current errno if the return value is an error. # # Errors are detected if the block returns true when called with # the original method's return value. def error_check(*syms, &is_err) unless block_given? is_err = lambda { |v| (v == -1) } end syms.each do |sym| method = self.method(sym) new_method_body = proc do |*args| ret = method.call(*args) if is_err.call(ret) raise SystemCallError.new("error in #{sym}", Errno.errno) else ret end end define_singleton_method(sym, &new_method_body) define_method(sym, &new_method_body) end end end end end
pmahoney/process_shared
lib/process_shared/posix/errno.rb
Ruby
mit
1,066
'use strict'; const _ = require('lodash'); const co = require('co'); const Promise = require('bluebird'); const AWS = require('aws-sdk'); AWS.config.region = 'us-east-1'; const cloudwatch = Promise.promisifyAll(new AWS.CloudWatch()); const Lambda = new AWS.Lambda(); const START_TIME = new Date('2017-06-07T01:00:00.000Z'); const DAYS = 2; const ONE_DAY = 24 * 60 * 60 * 1000; let addDays = (startDt, n) => new Date(startDt.getTime() + ONE_DAY * n); let getFuncStats = co.wrap(function* (funcName) { let getStats = co.wrap(function* (startTime, endTime) { let req = { MetricName: 'Duration', Namespace: 'AWS/Lambda', Period: 60, Dimensions: [ { Name: 'FunctionName', Value: funcName } ], Statistics: [ 'Maximum' ], Unit: 'Milliseconds', StartTime: startTime, EndTime: endTime }; let resp = yield cloudwatch.getMetricStatisticsAsync(req); return resp.Datapoints.map(dp => { return { timestamp: dp.Timestamp, value: dp.Maximum }; }); }); let stats = []; for (let i = 0; i < DAYS; i++) { // CloudWatch only allows us to query 1440 data points per request, which // at 1 min period is 24 hours let startTime = addDays(START_TIME, i); let endTime = addDays(startTime, 1); let oneDayStats = yield getStats(startTime, endTime); stats = stats.concat(oneDayStats); } return _.sortBy(stats, s => s.timestamp); }); let listFunctions = co.wrap(function* (marker, acc) { acc = acc || []; let resp = yield Lambda.listFunctions({ Marker: marker, MaxItems: 100 }).promise(); let functions = resp.Functions .map(f => f.FunctionName) .filter(fn => fn.includes("aws-coldstart") && !fn.endsWith("run")); acc = acc.concat(functions); if (resp.NextMarker) { return yield listFunctions(resp.NextMarker, acc); } else { return acc; } }); listFunctions() .then(co.wrap(function* (funcs) { for (let func of funcs) { let stats = yield getFuncStats(func); stats.forEach(stat => console.log(`${func},${stat.timestamp},${stat.value}`)); } }));
theburningmonk/lambda-coldstart-comparison
download-stats.js
JavaScript
mit
2,153
from __future__ import absolute_import, division, print_function, unicode_literals import string import urllib try: from urllib.parse import urlparse, urlencode, urljoin, parse_qsl, urlunparse from urllib.request import urlopen, Request from urllib.error import HTTPError except ImportError: from urlparse import urlparse, urljoin, urlunparse, parse_qsl from urllib import urlencode from urllib2 import urlopen, Request, HTTPError from random import SystemRandom try: UNICODE_ASCII_CHARACTERS = (string.ascii_letters + string.digits) except AttributeError: UNICODE_ASCII_CHARACTERS = (string.ascii_letters.decode('ascii') + string.digits.decode('ascii')) def random_ascii_string(length): random = SystemRandom() return ''.join([random.choice(UNICODE_ASCII_CHARACTERS) for x in range(length)]) def url_query_params(url): """Return query parameters as a dict from the specified URL. :param url: URL. :type url: str :rtype: dict """ return dict(parse_qsl(urlparse(url).query, True)) def url_dequery(url): """Return a URL with the query component removed. :param url: URL to dequery. :type url: str :rtype: str """ url = urlparse(url) return urlunparse((url.scheme, url.netloc, url.path, url.params, '', url.fragment)) def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse(base) query_params = {} query_params.update(parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for k, v in additional_params.items(): if v is None: query_params.pop(k) return urlunparse((url.scheme, url.netloc, url.path, url.params, urlencode(query_params), url.fragment))
VulcanTechnologies/oauth2lib
oauth2lib/utils.py
Python
mit
2,411
๏ปฟ/* The MIT License (MIT) Copyright (c) 2014 Banbury & Play-Em Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using UnityEngine; #if UNITY_EDITOR using UnityEditor; using System.IO; #endif namespace SpritesAndBones.Editor { [CustomEditor(typeof(Skin2D))] public class Skin2DEditor : UnityEditor.Editor { private Skin2D skin; private float baseSelectDistance = 0.1f; private float changedBaseSelectDistance = 0.1f; private int selectedIndex = -1; private Color handleColor = Color.green; private void OnEnable() { skin = (Skin2D)target; } public override void OnInspectorGUI() { DrawDefaultInspector(); EditorGUILayout.Separator(); if (GUILayout.Button("Toggle Mesh Outline")) { Skin2D.showMeshOutline = !Skin2D.showMeshOutline; } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Save as Prefab")) { skin.SaveAsPrefab(); } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Recalculate Bone Weights")) { skin.RecalculateBoneWeights(); } EditorGUILayout.Separator(); handleColor = EditorGUILayout.ColorField("Handle Color", handleColor); changedBaseSelectDistance = EditorGUILayout.Slider("Handle Size", baseSelectDistance, 0, 1); if (baseSelectDistance != changedBaseSelectDistance) { baseSelectDistance = changedBaseSelectDistance; EditorUtility.SetDirty(this); SceneView.RepaintAll(); } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Create Control Points")) { skin.CreateControlPoints(skin.GetComponent<SkinnedMeshRenderer>()); } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Reset Control Points")) { skin.ResetControlPointPositions(); } if (skin.points != null && skin.controlPoints != null && skin.controlPoints.Length > 0 && selectedIndex != -1 && GUILayout.Button("Reset Selected Control Point")) { if (skin.controlPoints[selectedIndex].originalPosition != skin.GetComponent<MeshFilter>().sharedMesh.vertices[selectedIndex]) { skin.controlPoints[selectedIndex].originalPosition = skin.GetComponent<MeshFilter>().sharedMesh.vertices[selectedIndex]; } skin.controlPoints[selectedIndex].ResetPosition(); skin.points.SetPoint(skin.controlPoints[selectedIndex]); } if (GUILayout.Button("Remove Control Points")) { skin.RemoveControlPoints(); } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Generate Mesh Asset")) { #if UNITY_EDITOR // Check if the Meshes directory exists, if not, create it. if (!Directory.Exists("Assets/Meshes")) { AssetDatabase.CreateFolder("Assets", "Meshes"); AssetDatabase.Refresh(); } Mesh mesh = new Mesh(); mesh.name = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.name.Replace(".SkinnedMesh", ".Mesh"); ; mesh.vertices = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.vertices; mesh.triangles = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.triangles; mesh.normals = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.normals; mesh.uv = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.uv; mesh.uv2 = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.uv2; mesh.bounds = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.bounds; ScriptableObjectUtility.CreateAsset(mesh, "Meshes/" + skin.gameObject.name + ".Mesh"); #endif } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial != null && GUILayout.Button("Generate Material Asset")) { #if UNITY_EDITOR Material material = new Material(skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial); material.CopyPropertiesFromMaterial(skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial); skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial = material; if (!Directory.Exists("Assets/Materials")) { AssetDatabase.CreateFolder("Assets", "Materials"); AssetDatabase.Refresh(); } AssetDatabase.CreateAsset(material, "Assets/Materials/" + material.mainTexture.name + ".mat"); Debug.Log("Created material " + material.mainTexture.name + " for " + skin.gameObject.name); #endif } } private void OnSceneGUI() { if (skin != null && skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && skin.controlPoints != null && skin.controlPoints.Length > 0 && skin.points != null) { Event e = Event.current; Handles.matrix = skin.transform.localToWorldMatrix; EditorGUI.BeginChangeCheck(); Ray r = HandleUtility.GUIPointToWorldRay(e.mousePosition); Vector2 mousePos = r.origin; float selectDistance = HandleUtility.GetHandleSize(mousePos) * baseSelectDistance; #region Draw vertex handles Handles.color = handleColor; for (int i = 0; i < skin.controlPoints.Length; i++) { if (Handles.Button(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity, selectDistance, selectDistance, Handles.CircleCap)) { selectedIndex = i; } if (selectedIndex == i) { EditorGUI.BeginChangeCheck(); skin.controlPoints[i].position = Handles.DoPositionHandle(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity); if (EditorGUI.EndChangeCheck()) { skin.points.SetPoint(skin.controlPoints[i]); Undo.RecordObject(skin, "Changed Control Point"); Undo.RecordObject(skin.points, "Changed Control Point"); EditorUtility.SetDirty(this); } } } #endregion Draw vertex handles } } } }
Apelsin/UnitySpritesAndBones
Assets/SpritesAndBones/Scripts/Editor/Skin2DEditor.cs
C#
mit
8,335