Dataset Preview
Viewer
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
Job manager was killed while running this job (job exceeded maximum duration).
Error code:   JobManagerExceededMaximumDurationError

Need help to make the dataset viewer work? Open a discussion for direct support.

code
string
repo_name
string
path
string
language
string
license
string
size
int32
# -*- coding: utf-8 -*- # © 2009 Pexego/Comunitea # © 2011-2012 Iker Coranti (www.avanzosc.es) # © 2014 Juanjo Algaz (gutierrezweb.es) # © 2014-2016 Pedro M. Baeza # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0). { "name": "Account balance reporting engine", "version": "8.0.1.2.0", "author": "Pexego, " "AvanzOSC, " "Tecnativa, " "Odoo Community Association (OCA)", "website": "http://www.pexego.es", "category": "Accounting & Finance", "contributors": [ "Juanjo Algaz <juanjoa@malagatic.com>", "Joaquín Gutierrez <joaquing.pedrosa@gmail.com>", "Pedro M. Baeza <pedro.baeza@tecnativa.com>", "Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>", ], "license": 'AGPL-3', "depends": [ "account", ], "data": [ "security/ir.model.access.csv", "views/account_account_view.xml", "views/account_balance_reporting_template_view.xml", "views/account_balance_reporting_report_view.xml", "views/account_balance_reporting_menu.xml", "report/account_balance_reporting_reports.xml", "report/report_generic.xml", "wizard/wizard_print_view.xml", ], "installable": True, }
Endika/l10n-spain
account_balance_reporting/__openerp__.py
Python
agpl-3.0
1,278
/* Copyright (C) 1999 Claude SIMON (http://q37.info/contact/). This file is part of the Epeios framework. The Epeios framework is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Epeios framework is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with the Epeios framework. If not, see <http://www.gnu.org/licenses/> */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include "rgstry.h" #include "err.h" #include "cio.h" using cio::CIn; using cio::COut; using cio::CErr; void Generic( int argc, char *argv[] ) { qRH qRB qRR qRT qRE } int main( int argc, char *argv[] ) { qRFH qRFB COut << "Test of library " << RGSTRY_NAME << ' ' << __DATE__" "__TIME__"\n"; qRFR qRFT qRFE return ERRExitValue; }
epeios-q37/epeios
devel/rgstry/rgstry_test.cpp
C++
agpl-3.0
1,175
/* * Tanaguru - Automated webpage assessment * Copyright (C) 2008-2015 Tanaguru.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contact us by mail: tanaguru AT tanaguru DOT org */ package org.tanaguru.rules.rgaa42019; import org.tanaguru.entity.audit.TestSolution; import static org.tanaguru.rules.keystore.AttributeStore.ABSENT_ATTRIBUTE_VALUE; import static org.tanaguru.rules.keystore.AttributeStore.SRC_ATTR; import static org.tanaguru.rules.keystore.MarkerStore.DECORATIVE_IMAGE_MARKER; import static org.tanaguru.rules.keystore.MarkerStore.INFORMATIVE_IMAGE_MARKER; import org.apache.commons.lang3.tuple.ImmutablePair; import org.tanaguru.entity.audit.ProcessResult; import org.tanaguru.rules.keystore.HtmlElementStore; import org.tanaguru.rules.keystore.RemarkMessageStore; import org.tanaguru.rules.rgaa42019.test.Rgaa42019RuleImplementationTestCase; /** * Unit test class for the implementation of the rule 1-8-2 of the referential Rgaa 4-2019. * * @author edaconceicao */ public class Rgaa42019Rule010802Test extends Rgaa42019RuleImplementationTestCase { /** * Default constructor */ public Rgaa42019Rule010802Test (String testName){ super(testName); } @Override protected void setUpRuleImplementationClassName() { setRuleImplementationClassName( "org.tanaguru.rules.rgaa42019.Rgaa42019Rule010802"); } @Override protected void setUpWebResourceMap() { addWebResource("Rgaa42019.Test.01.08.02-3NMI-01", createParameter("Rules", INFORMATIVE_IMAGE_MARKER, "informative-image")); addWebResource("Rgaa42019.Test.01.08.02-4NA-01"); addWebResource("Rgaa42019.Test.01.08.02-4NA-02"); addWebResource("Rgaa42019.Test.01.08.02-4NA-03", createParameter("Rules", DECORATIVE_IMAGE_MARKER, "decorative-image")); } @Override protected void setProcess() { //---------------------------------------------------------------------- //------------------------------3NMI-01------------------------------ //---------------------------------------------------------------------- ProcessResult processResult = processPageTest("Rgaa42019.Test.01.08.02-3NMI-01"); checkResultIsPreQualified(processResult, 2, 2); checkRemarkIsPresent( processResult, TestSolution.NEED_MORE_INFO, RemarkMessageStore.CHECK_TEXT_STYLED_PRESENCE_OF_INFORMATIVE_IMG_MSG, HtmlElementStore.INPUT_ELEMENT, 1, new ImmutablePair(SRC_ATTR, ABSENT_ATTRIBUTE_VALUE)); checkRemarkIsPresent( processResult, TestSolution.NEED_MORE_INFO, RemarkMessageStore.CHECK_NATURE_OF_IMAGE_AND_TEXT_STYLED_PRESENCE_MSG, HtmlElementStore.INPUT_ELEMENT, 2, new ImmutablePair(SRC_ATTR, ABSENT_ATTRIBUTE_VALUE)); //---------------------------------------------------------------------- //------------------------------4NA-01------------------------------ //---------------------------------------------------------------------- checkResultIsNotApplicable(processPageTest("Rgaa42019.Test.01.08.02-4NA-01")); //---------------------------------------------------------------------- //------------------------------4NA-02------------------------------ //---------------------------------------------------------------------- checkResultIsNotApplicable(processPageTest("Rgaa42019.Test.01.08.02-4NA-02")); //---------------------------------------------------------------------- //------------------------------4NA-03---------------------------------- //---------------------------------------------------------------------- checkResultIsNotApplicable(processPageTest("Rgaa42019.Test.01.08.02-4NA-03")); } // @Override // protected void setConsolidate() { // // // The consolidate method can be removed when real implementation is done. // // The assertions are automatically tested regarding the file names by // // the abstract parent class // assertEquals(TestSolution.NOT_TESTED, // consolidate("Rgaa4-2019.Test.1.8.2-3NMI-01").getValue()); //} }
Tanaguru/Tanaguru
rules/rgaa4-2019/src/test/java/org/tanaguru/rules/rgaa42019/Rgaa42019Rule010802Test.java
Java
agpl-3.0
4,980
<?php namespace UpgradeDigital; /** * Resource tag count model represents a count of resources grouped by a * particular tag set e.g. room type. * PHP version 5 * * @package Client * @author Damien Allison <damien@upgrade-digital.com> * @copyright 2015 Upgrade Digital * @license https://github.com/Upgrade-Digital/client/blob/master/LICENSE.md * @link https://github.com/Upgrade-Digital/client */ class ResourceTagCount { /** * @var string */ public $urn; /** * The tag used to group the results, typcically resourceCode * @var Tag */ public $tag; /** * Tag list used to pass information on the resource tag grouping. * @var Tag[] */ /** * @var integer */ public $count; /** * @var ResourceRate[] */ public $rates; /** * @var ProductSelector[] */ public $products; }
Upgrade-Digital/client
php/UpgradeDigital/ResourceTagCount.php
PHP
agpl-3.0
864
# -*- coding:utf-8 -*- # # # Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>. # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # { 'name': 'Capture picture with webcam', 'version': '1.0', 'category': 'Generic Modules/Human Resources', 'description': """ TApplicant WebCam ========= Capture employee pictures with an attached web cam. """, 'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>," "Odoo Community Association (OCA)", 'website': 'http://miketelahun.wordpress.com', 'license': 'AGPL-3', 'depends': [ 'hr', 'web', 'trip' ], 'js': [ 'static/src/js/jquery.webcam.js', 'static/src/js/tapplicant_webcam.js', ], 'css': [ 'static/src/css/tapplicant_webcam.css', ], 'qweb': [ 'static/src/xml/tapplicant_webcam.xml', ], 'data': [ 'tapplicant_webcam_data.xml', 'tapplicant_webcam_view.xml', ], 'installable': True, 'active': False, }
nishad-jobsglobal/odoo-marriot
openerp/addons/tapplicant_webcam/__openerp__.py
Python
agpl-3.0
1,685
/* * Copyright (C) 2000 - 2021 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "https://www.silverpeas.org/legal/floss_exception.html" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.silverpeas.core.mylinks.dao; import org.silverpeas.core.persistence.jdbc.sql.JdbcSqlQuery; import java.sql.SQLException; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; /** * @author silveryocha */ public class MyLinksDAOITUtil { private MyLinksDAOITUtil() { } static void assertLinkIds(final List<Integer> actualIds, final Integer... expectedIds) { if (expectedIds.length == 0) { assertThat(actualIds, empty()); } else { assertThat(actualIds, contains(expectedIds)); } } static List<Integer> getAllLinkIds() throws SQLException { return JdbcSqlQuery.createSelect("linkid") .from("SB_MyLinks_Link") .orderBy("linkid") .execute(r -> r.getInt(1)); } static void assertCategoryIds(final List<Integer> actualIds, final Integer... expectedIds) { if (expectedIds.length == 0) { assertThat(actualIds, empty()); } else { assertThat(actualIds, contains(expectedIds)); } } static List<Integer> getAllCategoryIds() throws SQLException { return JdbcSqlQuery.createSelect("catid") .from("SB_MyLinks_Cat") .orderBy("catid") .execute(r -> r.getInt(1)); } static void assertOfCouples(final List<String> actualCouples, final String... expectedCouples) { if (expectedCouples.length == 0) { assertThat(actualCouples, empty()); } else { assertThat(actualCouples, contains(expectedCouples)); } } static List<String> getAllOfCouples() throws SQLException { return JdbcSqlQuery.createSelect("*") .from("SB_MyLinks_LinkCat") .orderBy("catid, linkid") .execute(r -> r.getInt("catid") + "/" + r.getInt("linkid")); } }
SilverDav/Silverpeas-Core
core-services/mylinks/src/integration-test/java/org/silverpeas/core/mylinks/dao/MyLinksDAOITUtil.java
Java
agpl-3.0
2,953
# ActivitySim # Copyright (C) 2014-2015 Synthicity, LLC # See full license in LICENSE.txt. import os.path import numpy as np import pandas as pd import pandas.util.testing as pdt import pytest from ..activitysim import eval_variables from .. import mnl # this is lifted straight from urbansim's test_mnl.py @pytest.fixture(scope='module', params=[ ('fish.csv', 'fish_choosers.csv', pd.DataFrame( [[-0.02047652], [0.95309824]], index=['price', 'catch'], columns=['Alt']), pd.DataFrame([ [0.2849598, 0.2742482, 0.1605457, 0.2802463], [0.1498991, 0.4542377, 0.2600969, 0.1357664]], columns=['beach', 'boat', 'charter', 'pier']))]) def test_data(request): data, choosers, spec, probabilities = request.param return { 'data': data, 'choosers': choosers, 'spec': spec, 'probabilities': probabilities } @pytest.fixture def choosers(test_data): filen = os.path.join( os.path.dirname(__file__), 'data', test_data['choosers']) return pd.read_csv(filen) @pytest.fixture def spec(test_data): return test_data['spec'] @pytest.fixture def choosers_dm(choosers, spec): return eval_variables(spec.index, choosers) @pytest.fixture def utilities(choosers_dm, spec, test_data): utils = choosers_dm.dot(spec).astype('float') return pd.DataFrame( utils.as_matrix().reshape(test_data['probabilities'].shape), columns=test_data['probabilities'].columns) def test_utils_to_probs(utilities, test_data): probs = mnl.utils_to_probs(utilities) pdt.assert_frame_equal(probs, test_data['probabilities']) def test_utils_to_probs_raises(): with pytest.raises(RuntimeError): mnl.utils_to_probs( pd.DataFrame([[1, 2, np.inf, 3]])) def test_make_choices_only_one(): probs = pd.DataFrame( [[1, 0, 0], [0, 1, 0]], columns=['a', 'b', 'c'], index=['x', 'y']) choices = mnl.make_choices(probs) pdt.assert_series_equal( choices, pd.Series([0, 1], index=['x', 'y'])) def test_make_choices_real_probs(random_seed, utilities): probs = mnl.utils_to_probs(utilities) choices = mnl.make_choices(probs) pdt.assert_series_equal( choices, pd.Series([1, 2], index=[0, 1])) @pytest.fixture(scope='module') def interaction_choosers(): return pd.DataFrame({ 'attr': ['a', 'b', 'c', 'b']}, index=['w', 'x', 'y', 'z']) @pytest.fixture(scope='module') def interaction_alts(): return pd.DataFrame({ 'prop': [10, 20, 30, 40]}, index=[1, 2, 3, 4]) def test_interaction_dataset_no_sample(interaction_choosers, interaction_alts): expected = pd.DataFrame({ 'attr': ['a'] * 4 + ['b'] * 4 + ['c'] * 4 + ['b'] * 4, 'prop': [10, 20, 30, 40] * 4, 'chooser_idx': ['w'] * 4 + ['x'] * 4 + ['y'] * 4 + ['z'] * 4}, index=[1, 2, 3, 4] * 4) interacted = mnl.interaction_dataset( interaction_choosers, interaction_alts) interacted, expected = interacted.align(expected, axis=1) pdt.assert_frame_equal(interacted, expected) def test_interaction_dataset_sampled( interaction_choosers, interaction_alts, random_seed): expected = pd.DataFrame({ 'attr': ['a'] * 2 + ['b'] * 2 + ['c'] * 2 + ['b'] * 2, 'prop': [30, 40, 10, 30, 40, 10, 20, 10], 'chooser_idx': ['w'] * 2 + ['x'] * 2 + ['y'] * 2 + ['z'] * 2}, index=[3, 4, 1, 3, 4, 1, 2, 1]) interacted = mnl.interaction_dataset( interaction_choosers, interaction_alts, sample_size=2) interacted, expected = interacted.align(expected, axis=1) pdt.assert_frame_equal(interacted, expected)
bhargavasana/activitysim
activitysim/tests/test_mnl.py
Python
agpl-3.0
3,724
/* * Funambol is a mobile platform developed by Funambol, Inc. * Copyright (C) 2008 Funambol, Inc. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by * the Free Software Foundation with the addition of the following permission * added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED * WORK IN WHICH THE COPYRIGHT IS OWNED BY FUNAMBOL, FUNAMBOL DISCLAIMS THE * WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301 USA. * * You can contact Funambol, Inc. headquarters at 643 Bair Island Road, Suite * 305, Redwood City, CA 94063, USA, or at email address info@funambol.com. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License * version 3, these Appropriate Legal Notices must retain the display of the * "Powered by Funambol" logo. If the display of the logo is not reasonably * feasible for technical reasons, the Appropriate Legal Notices must display * the words "Powered by Funambol". */ package com.funambol.syncml.client; import java.util.Enumeration; import java.util.Vector; import java.util.Date; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import com.funambol.sync.SyncItem; import com.funambol.sync.SourceConfig; import com.funambol.sync.SyncException; import com.funambol.sync.SyncAnchor; import com.funambol.sync.client.RawFileSyncSource; import com.funambol.sync.client.ChangesTracker; import com.funambol.syncml.protocol.SyncMLStatus; import com.funambol.platform.FileAdapter; import com.funambol.util.Log; import com.funambol.util.Base64; /** * An implementation of TrackableSyncSource, providing * the ability to sync briefcases (files). The source can handle both raw files * and OMA files (file objects). By default the source formats items according * to the OMA file object spec, but it is capable of receiving also raw files, * if their MIME type is not OMA file objects. */ public class FileSyncSource extends RawFileSyncSource { private static final String TAG_LOG = "FileSyncSource"; protected class FileSyncItem extends RawFileSyncItem { protected String prologue; protected String epilogue; public FileSyncItem(String fileName, String key) throws IOException { super(fileName, key, null, SyncItem.STATE_NEW, null); } public FileSyncItem(String fileName, String key, String type, char state, String parent) throws IOException { super(fileName, key, type, state, parent); FileAdapter file = new FileAdapter(fileName); if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) { // Initialize the prologue FileObject fo = new FileObject(); fo.setName(file.getName()); fo.setModified(new Date(file.lastModified())); prologue = fo.formatPrologue(); // Initialize the epilogue epilogue = fo.formatEpilogue(); // Compute the size of the FileObject int bodySize = Base64.computeEncodedSize((int)file.getSize()); // Set the size setObjectSize(prologue.length() + bodySize + epilogue.length()); } else { // The size is the raw file size setObjectSize(file.getSize()); } // Release the file object file.close(); } /** * Creates a new output stream to write to. If the item type is * FileDataObject, then the output stream takes care of parsing the XML * part of the object and it fills a FileObject that can be retrieved * later. @see FileObjectOutputStream for more details * Note that the output stream is unique, so that is can be reused * across different syncml messages. */ public OutputStream getOutputStream() throws IOException { if (os == null) { os = super.getOutputStream(); // If this item is a file object, we shall use the // FileObjectOutputStream if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) { FileObject fo = new FileObject(); os = new FileObjectOutputStream(fo, os); } } return os; } /** * Creates a new input stream to read from. If the source is configured * to handle File Data Object, then the stream returns the XML * description of the file. @see FileObjectInputStream for more details. */ public InputStream getInputStream() throws IOException { FileAdapter file = new FileAdapter(fileName); InputStream is = super.getInputStream(); // If this item is a file object, we shall use the // FileObjectOutputStream if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) { is = new FileObjectInputStream(prologue, is, epilogue, (int)file.getSize()); } return is; } // If we do not reimplement the getContent, it will return a null // content, but this is not used in the ss, so there's no need to // redefine it } protected String directory; protected String extensions[] = {}; //------------------------------------------------------------- Constructors /** * FileSyncSource constructor: initialize source config */ public FileSyncSource(SourceConfig config, ChangesTracker tracker, String directory) { super(config, tracker, directory); } protected void applyFileProperties(FileSyncItem fsi) throws IOException { OutputStream os = fsi.getOutputStream(); if (os instanceof FileObjectOutputStream) { FileObjectOutputStream foos = (FileObjectOutputStream)os; applyFileObjectProperties(fsi, foos); // The key for this item must be updated with the real // file name FileObject fo = foos.getFileObject(); String newName = fo.getName(); // The name is mandatory, but we try to be more robust here // and deal with items with no name if (newName != null) { fsi.setKey(directory + newName); } } } protected void applyFileObjectProperties(FileSyncItem fsi, FileObjectOutputStream foos) throws IOException { FileObject fo = foos.getFileObject(); String newName = fo.getName(); FileAdapter file = new FileAdapter(fsi.getFileName()); if (newName != null) { // Rename the file file.rename(directory + newName); } else { Log.error(TAG_LOG, "The received item does not have a valid name."); } file.close(); // Apply the modified date if present FileAdapter newFile = new FileAdapter(directory + newName); if (newFile != null) { Date lastModified = fo.getModified(); if (newFile.isSetLastModifiedSupported() && lastModified != null) { newFile.setLastModified(lastModified.getTime()); } newFile.close(); } } }
zjujunge/funambol
externals/java-sdk/syncml/src/main/java/com/funambol/syncml/client/FileSyncSource.java
Java
agpl-3.0
8,195
<?php class order_CancelOrderAction extends f_action_BaseJSONAction { /** * @param Context $context * @param Request $request */ public function _execute($context, $request) { $labels = array(""); foreach ($this->getDocumentInstanceArrayFromRequest($request) as $order) { if ($order instanceof order_persistentdocument_order) { $order->getDocumentService()->cancelOrder($order); $this->logAction($order); $labels[] = $order->getOrderNumber(); } } return $this->sendJSON(array('message' => LocaleService::getInstance()->transBO('m.order.bo.actions.cancel-order-success', array(), array('OrderNumbers' => implode("\n ", $labels))))); } }
RBSChange/modules.order
actions/CancelOrderAction.class.php
PHP
agpl-3.0
697
#!/bin/sh echo "Source Statistics:" wc `find . -regextype posix-basic -iregex '.*\.\(scm\|h\|c\|y\|l\)' | grep -v 'test\/test_eyeball\|CMake\|build\|vendor'` echo echo "Commit Count: " `git log | grep '^commit' | wc -l` echo
arlaneenalra/Bootstrap-Scheme
count.sh
Shell
agpl-3.0
227
// // KPFairPlayAssetResourceLoaderHandler.h // KALTURAPlayerSDK // // Created by Noam Tamim on 09/08/2016. // Copyright © 2016 Kaltura. All rights reserved. // @import AVFoundation; @interface KPFairPlayAssetResourceLoaderHandler : NSObject <AVAssetResourceLoaderDelegate> @property (nonatomic, copy) NSString* licenseUri; @property (nonatomic, copy) NSData* certificate; +(dispatch_queue_t)globalNotificationQueue; @end
kaltura/player-sdk-native-ios
KALTURAPlayerSDK/KPFairPlayAssetResourceLoaderHandler.h
C
agpl-3.0
429
/* * Copyright (C) 2010 Romain Reuillon * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openmole.core.batch.control trait UsageControl { def waitAToken: AccessToken def tryGetToken: Option[AccessToken] def releaseToken(token: AccessToken) def available: Int def tryWithToken[B](f: Option[AccessToken] ⇒ B) = { val t = tryGetToken try f(t) finally t.foreach(releaseToken) } def withToken[B](f: (AccessToken ⇒ B)): B = { val token = waitAToken try f(token) finally releaseToken(token) } }
ISCPIF/PSEExperiments
openmole-src/openmole/core/org.openmole.core.batch/src/main/scala/org/openmole/core/batch/control/UsageControl.scala
Scala
agpl-3.0
1,174
DELETE FROM `weenie` WHERE `class_Id` = 42123; INSERT INTO `weenie` (`class_Id`, `class_Name`, `type`, `last_Modified`) VALUES (42123, 'ace42123-warden', 10, '2019-02-10 00:00:00') /* Creature */; INSERT INTO `weenie_properties_int` (`object_Id`, `type`, `value`) VALUES (42123, 1, 16) /* ItemType - Creature */ , (42123, 2, 31) /* CreatureType - Human */ , (42123, 6, -1) /* ItemsCapacity */ , (42123, 7, -1) /* ContainersCapacity */ , (42123, 16, 32) /* ItemUseable - Remote */ , (42123, 25, 150) /* Level */ , (42123, 93, 6292504) /* PhysicsState - ReportCollisions, IgnoreCollisions, Gravity, ReportCollisionsAsEnvironment, EdgeSlide */ , (42123, 95, 8) /* RadarBlipColor - Yellow */ , (42123, 113, 2) /* Gender - Female */ , (42123, 133, 4) /* ShowableOnRadar - ShowAlways */ , (42123, 134, 16) /* PlayerKillerStatus - RubberGlue */ , (42123, 188, 3) /* HeritageGroup - Sho */ , (42123, 8007, 0) /* PCAPRecordedAutonomousMovement */; INSERT INTO `weenie_properties_bool` (`object_Id`, `type`, `value`) VALUES (42123, 1, True ) /* Stuck */ , (42123, 19, False) /* Attackable */; INSERT INTO `weenie_properties_float` (`object_Id`, `type`, `value`) VALUES (42123, 54, 3) /* UseRadius */; INSERT INTO `weenie_properties_string` (`object_Id`, `type`, `value`) VALUES (42123, 1, 'Warden') /* Name */ , (42123, 5, 'Portal Warden') /* Template */ , (42123, 8006, 'AAA9AAAAAAA=') /* PCAPRecordedCurrentMotionState */; INSERT INTO `weenie_properties_d_i_d` (`object_Id`, `type`, `value`) VALUES (42123, 1, 0x0200004E) /* Setup */ , (42123, 2, 0x09000001) /* MotionTable */ , (42123, 3, 0x20000002) /* SoundTable */ , (42123, 6, 0x0400007E) /* PaletteBase */ , (42123, 8, 0x06001036) /* Icon */ , (42123, 9, 0x05001058) /* EyesTexture */ , (42123, 10, 0x05001073) /* NoseTexture */ , (42123, 11, 0x050010B5) /* MouthTexture */ , (42123, 15, 0x04002010) /* HairPalette */ , (42123, 16, 0x040004AF) /* EyesPalette */ , (42123, 17, 0x040004A1) /* SkinPalette */ , (42123, 8001, 9437238) /* PCAPRecordedWeenieHeader - ItemsCapacity, ContainersCapacity, Usable, UseRadius, RadarBlipColor, RadarBehavior */ , (42123, 8003, 4) /* PCAPRecordedObjectDesc - Stuck */ , (42123, 8005, 100355) /* PCAPRecordedPhysicsDesc - CSetup, MTable, STable, Position, Movement */; INSERT INTO `weenie_properties_position` (`object_Id`, `position_Type`, `obj_Cell_Id`, `origin_X`, `origin_Y`, `origin_Z`, `angles_W`, `angles_X`, `angles_Y`, `angles_Z`) VALUES (42123, 8040, 0x8A0201A7, 36.3116, -51.5629, 0.005, 0.900447, 0, 0, -0.434966) /* PCAPRecordedLocation */ /* @teleloc 0x8A0201A7 [36.311600 -51.562900 0.005000] 0.900447 0.000000 0.000000 -0.434966 */; INSERT INTO `weenie_properties_i_i_d` (`object_Id`, `type`, `value`) VALUES (42123, 8000, 0xDBF00AD7) /* PCAPRecordedObjectIID */; INSERT INTO `weenie_properties_attribute` (`object_Id`, `type`, `init_Level`, `level_From_C_P`, `c_P_Spent`) VALUES (42123, 1, 180, 0, 0) /* Strength */ , (42123, 2, 190, 0, 0) /* Endurance */ , (42123, 3, 170, 0, 0) /* Quickness */ , (42123, 4, 170, 0, 0) /* Coordination */ , (42123, 5, 150, 0, 0) /* Focus */ , (42123, 6, 160, 0, 0) /* Self */; INSERT INTO `weenie_properties_attribute_2nd` (`object_Id`, `type`, `init_Level`, `level_From_C_P`, `c_P_Spent`, `current_Level`) VALUES (42123, 1, 80, 0, 0, 175) /* MaxHealth */ , (42123, 3, 110, 0, 0, 300) /* MaxStamina */ , (42123, 5, 40, 0, 0, 200) /* MaxMana */; INSERT INTO `weenie_properties_palette` (`object_Id`, `sub_Palette_Id`, `offset`, `length`) VALUES (42123, 67109969, 92, 4) , (42123, 67110049, 0, 24) , (42123, 67110063, 32, 8) , (42123, 67110349, 64, 8) , (42123, 67110539, 72, 8) , (42123, 67111245, 40, 24) , (42123, 67115003, 84, 12) , (42123, 67115003, 136, 8) , (42123, 67115003, 144, 16) , (42123, 67115003, 96, 12) , (42123, 67115003, 174, 12) , (42123, 67115003, 216, 24) , (42123, 67115003, 168, 6) , (42123, 67115003, 160, 8) , (42123, 67115003, 240, 10) , (42123, 67115010, 186, 30) , (42123, 67115020, 72, 12) , (42123, 67115020, 108, 28) , (42123, 67115020, 250, 6) , (42123, 67117072, 24, 8); INSERT INTO `weenie_properties_texture_map` (`object_Id`, `index`, `old_Id`, `new_Id`) VALUES (42123, 0, 83889072, 83886685) , (42123, 0, 83889342, 83889386) , (42123, 1, 83887064, 83886241) , (42123, 2, 83887066, 83887055) , (42123, 5, 83887064, 83886241) , (42123, 6, 83887066, 83887055) , (42123, 9, 83887070, 83886781) , (42123, 9, 83887062, 83886686) , (42123, 10, 83887069, 83886782) , (42123, 11, 83887067, 83891213) , (42123, 13, 83887069, 83886782) , (42123, 14, 83887067, 83891213) , (42123, 16, 83886232, 83890685) , (42123, 16, 83886668, 83890264) , (42123, 16, 83886837, 83890291) , (42123, 16, 83886684, 83890357); INSERT INTO `weenie_properties_anim_part` (`object_Id`, `index`, `animation_Id`) VALUES (42123, 0, 16789976) , (42123, 1, 16789977) , (42123, 2, 16789980) , (42123, 3, 16789983) , (42123, 4, 16789981) , (42123, 5, 16789978) , (42123, 6, 16789979) , (42123, 7, 16789982) , (42123, 8, 16789987) , (42123, 9, 16789969) , (42123, 10, 16789972) , (42123, 11, 16789974) , (42123, 12, 16789986) , (42123, 13, 16789971) , (42123, 14, 16789973) , (42123, 15, 16789984) , (42123, 16, 16789985);
ACEmulator/ACE-World
Database/3-Core/9 WeenieDefaults/SQL/Creature/Human/42123 Warden.sql
SQL
agpl-3.0
5,777
DELETE FROM `weenie` WHERE `class_Id` = 8946; INSERT INTO `weenie` (`class_Id`, `class_Name`, `type`, `last_Modified`) VALUES (8946, 'scrolllightningstreak6', 34, '2019-02-10 00:00:00') /* Scroll */; INSERT INTO `weenie_properties_int` (`object_Id`, `type`, `value`) VALUES (8946, 1, 8192) /* ItemType - Writable */ , (8946, 5, 30) /* EncumbranceVal */ , (8946, 16, 8) /* ItemUseable - Contained */ , (8946, 19, 1000) /* Value */ , (8946, 93, 1044) /* PhysicsState - Ethereal, IgnoreCollisions, Gravity */ , (8946, 8041, 101) /* PCAPRecordedPlacement - Resting */; INSERT INTO `weenie_properties_bool` (`object_Id`, `type`, `value`) VALUES (8946, 22, True ) /* Inscribable */; INSERT INTO `weenie_properties_float` (`object_Id`, `type`, `value`) VALUES (8946, 39, 1.5) /* DefaultScale */; INSERT INTO `weenie_properties_string` (`object_Id`, `type`, `value`) VALUES (8946, 1, 'Scroll of Lightning Streak VI') /* Name */ , (8946, 14, 'Use this item to attempt to learn its spell.') /* Use */ , (8946, 16, 'Inscribed spell: Lightning Streak VI Sends a bolt of lighting streaking towards the target. The bolt does 36-71 points of electrical damage to the first thing it hits.') /* LongDesc */; INSERT INTO `weenie_properties_d_i_d` (`object_Id`, `type`, `value`) VALUES (8946, 1, 0x0200018A) /* Setup */ , (8946, 8, 0x06003595) /* Icon */ , (8946, 22, 0x3400002B) /* PhysicsEffectTable */ , (8946, 28, 1819) /* Spell - LightningStreak6 */ , (8946, 8001, 6307864) /* PCAPRecordedWeenieHeader - Value, Usable, Container, Burden, Spell */ , (8946, 8003, 18) /* PCAPRecordedObjectDesc - Inscribable, Attackable */ , (8946, 8005, 135297) /* PCAPRecordedPhysicsDesc - CSetup, ObjScale, PeTable, AnimationFrame */; INSERT INTO `weenie_properties_i_i_d` (`object_Id`, `type`, `value`) VALUES (8946, 8000, 0xDB6B84DE) /* PCAPRecordedObjectIID */;
ACEmulator/ACE-World
Database/3-Core/9 WeenieDefaults/SQL/Scroll/Writable/08946 Scroll of Lightning Streak VI.sql
SQL
agpl-3.0
1,993
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## """ High-level objects for fields. """ from collections import OrderedDict from datetime import date, datetime from functools import partial from operator import attrgetter from types import NoneType import logging import pytz import xmlrpclib from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT DATE_LENGTH = len(date.today().strftime(DATE_FORMAT)) DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT)) EMPTY_DICT = frozendict() _logger = logging.getLogger(__name__) class SpecialValue(object): """ Encapsulates a value in the cache in place of a normal value. """ def __init__(self, value): self.value = value def get(self): return self.value class FailedValue(SpecialValue): """ Special value that encapsulates an exception instead of a value. """ def __init__(self, exception): self.exception = exception def get(self): raise self.exception def _check_value(value): """ Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """ return value.get() if isinstance(value, SpecialValue) else value def resolve_all_mro(cls, name, reverse=False): """ Return the (successively overridden) values of attribute ``name`` in ``cls`` in mro order, or inverse mro order if ``reverse`` is true. """ klasses = reversed(cls.__mro__) if reverse else cls.__mro__ for klass in klasses: if name in klass.__dict__: yield klass.__dict__[name] class MetaField(type): """ Metaclass for field classes. """ by_type = {} def __new__(meta, name, bases, attrs): """ Combine the ``_slots`` dict from parent classes, and determine ``__slots__`` for them on the new class. """ base_slots = {} for base in reversed(bases): base_slots.update(getattr(base, '_slots', ())) slots = dict(base_slots) slots.update(attrs.get('_slots', ())) attrs['__slots__'] = set(slots) - set(base_slots) attrs['_slots'] = slots return type.__new__(meta, name, bases, attrs) def __init__(cls, name, bases, attrs): super(MetaField, cls).__init__(name, bases, attrs) if cls.type and cls.type not in MetaField.by_type: MetaField.by_type[cls.type] = cls # compute class attributes to avoid calling dir() on fields cls.column_attrs = [] cls.related_attrs = [] cls.description_attrs = [] for attr in dir(cls): if attr.startswith('_column_'): cls.column_attrs.append((attr[8:], attr)) elif attr.startswith('_related_'): cls.related_attrs.append((attr[9:], attr)) elif attr.startswith('_description_'): cls.description_attrs.append((attr[13:], attr)) class Field(object): """ The field descriptor contains the field definition, and manages accesses and assignments of the corresponding field on records. The following attributes may be provided when instanciating a field: :param string: the label of the field seen by users (string); if not set, the ORM takes the field name in the class (capitalized). :param help: the tooltip of the field seen by users (string) :param readonly: whether the field is readonly (boolean, by default ``False``) :param required: whether the value of the field is required (boolean, by default ``False``) :param index: whether the field is indexed in database (boolean, by default ``False``) :param default: the default value for the field; this is either a static value, or a function taking a recordset and returning a value :param states: a dictionary mapping state values to lists of UI attribute-value pairs; possible attributes are: 'readonly', 'required', 'invisible'. Note: Any state-based condition requires the ``state`` field value to be available on the client-side UI. This is typically done by including it in the relevant views, possibly made invisible if not relevant for the end-user. :param groups: comma-separated list of group xml ids (string); this restricts the field access to the users of the given groups only :param bool copy: whether the field value should be copied when the record is duplicated (default: ``True`` for normal fields, ``False`` for ``one2many`` and computed fields, including property fields and related fields) :param string oldname: the previous name of this field, so that ORM can rename it automatically at migration .. _field-computed: .. rubric:: Computed fields One can define a field whose value is computed instead of simply being read from the database. The attributes that are specific to computed fields are given below. To define such a field, simply provide a value for the attribute ``compute``. :param compute: name of a method that computes the field :param inverse: name of a method that inverses the field (optional) :param search: name of a method that implement search on the field (optional) :param store: whether the field is stored in database (boolean, by default ``False`` on computed fields) :param compute_sudo: whether the field should be recomputed as superuser to bypass access rights (boolean, by default ``False``) The methods given for ``compute``, ``inverse`` and ``search`` are model methods. Their signature is shown in the following example:: upper = fields.Char(compute='_compute_upper', inverse='_inverse_upper', search='_search_upper') @api.depends('name') def _compute_upper(self): for rec in self: rec.upper = rec.name.upper() if rec.name else False def _inverse_upper(self): for rec in self: rec.name = rec.upper.lower() if rec.upper else False def _search_upper(self, operator, value): if operator == 'like': operator = 'ilike' return [('name', operator, value)] The compute method has to assign the field on all records of the invoked recordset. The decorator :meth:`openerp.api.depends` must be applied on the compute method to specify the field dependencies; those dependencies are used to determine when to recompute the field; recomputation is automatic and guarantees cache/database consistency. Note that the same method can be used for several fields, you simply have to assign all the given fields in the method; the method will be invoked once for all those fields. By default, a computed field is not stored to the database, and is computed on-the-fly. Adding the attribute ``store=True`` will store the field's values in the database. The advantage of a stored field is that searching on that field is done by the database itself. The disadvantage is that it requires database updates when the field must be recomputed. The inverse method, as its name says, does the inverse of the compute method: the invoked records have a value for the field, and you must apply the necessary changes on the field dependencies such that the computation gives the expected value. Note that a computed field without an inverse method is readonly by default. The search method is invoked when processing domains before doing an actual search on the model. It must return a domain equivalent to the condition: ``field operator value``. .. _field-related: .. rubric:: Related fields The value of a related field is given by following a sequence of relational fields and reading a field on the reached model. The complete sequence of fields to traverse is specified by the attribute :param related: sequence of field names Some field attributes are automatically copied from the source field if they are not redefined: ``string``, ``help``, ``readonly``, ``required`` (only if all fields in the sequence are required), ``groups``, ``digits``, ``size``, ``translate``, ``sanitize``, ``selection``, ``comodel_name``, ``domain``, ``context``. All semantic-free attributes are copied from the source field. By default, the values of related fields are not stored to the database. Add the attribute ``store=True`` to make it stored, just like computed fields. Related fields are automatically recomputed when their dependencies are modified. .. _field-company-dependent: .. rubric:: Company-dependent fields Formerly known as 'property' fields, the value of those fields depends on the company. In other words, users that belong to different companies may see different values for the field on a given record. :param company_dependent: whether the field is company-dependent (boolean) .. _field-incremental-definition: .. rubric:: Incremental definition A field is defined as class attribute on a model class. If the model is extended (see :class:`~openerp.models.Model`), one can also extend the field definition by redefining a field with the same name and same type on the subclass. In that case, the attributes of the field are taken from the parent class and overridden by the ones given in subclasses. For instance, the second class below only adds a tooltip on the field ``state``:: class First(models.Model): _name = 'foo' state = fields.Selection([...], required=True) class Second(models.Model): _inherit = 'foo' state = fields.Selection(help="Blah blah blah") """ __metaclass__ = MetaField type = None # type of the field (string) relational = False # whether the field is a relational one _slots = { '_attrs': EMPTY_DICT, # dictionary of field attributes; it contains: # - all attributes after __init__() # - free attributes only after set_class_name() 'automatic': False, # whether the field is automatically created ("magic" field) 'inherited': False, # whether the field is inherited (_inherits) 'column': None, # the column corresponding to the field 'setup_done': False, # whether the field has been set up 'name': None, # name of the field 'model_name': None, # name of the model of this field 'comodel_name': None, # name of the model of values (if relational) 'store': True, # whether the field is stored in database 'index': False, # whether the field is indexed in database 'manual': False, # whether the field is a custom field 'copy': True, # whether the field is copied over by BaseModel.copy() 'depends': (), # collection of field dependencies 'recursive': False, # whether self depends on itself 'compute': None, # compute(recs) computes field on recs 'compute_sudo': False, # whether field should be recomputed as admin 'inverse': None, # inverse(recs) inverses field on recs 'search': None, # search(recs, operator, value) searches on self 'related': None, # sequence of field names, for related fields 'related_sudo': True, # whether related fields should be read as admin 'company_dependent': False, # whether ``self`` is company-dependent (property field) 'default': None, # default(recs) returns the default value 'string': None, # field label 'help': None, # field tooltip 'readonly': False, # whether the field is readonly 'required': False, # whether the field is required 'states': None, # set readonly and required depending on state 'groups': None, # csv list of group xml ids 'change_default': False, # whether the field may trigger a "user-onchange" 'deprecated': None, # whether the field is deprecated 'inverse_fields': (), # collection of inverse fields (objects) 'computed_fields': (), # fields computed with the same method as self 'related_field': None, # corresponding related field '_triggers': (), # invalidation and recomputation triggers } def __init__(self, string=None, **kwargs): kwargs['string'] = string attrs = {key: val for key, val in kwargs.iteritems() if val is not None} self._attrs = attrs or EMPTY_DICT def __getattr__(self, name): """ Access non-slot field attribute. """ try: return self._attrs[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): """ Set slot or non-slot field attribute. """ try: object.__setattr__(self, name, value) except AttributeError: if self._attrs: self._attrs[name] = value else: self._attrs = {name: value} # replace EMPTY_DICT def __delattr__(self, name): """ Remove non-slot field attribute. """ try: del self._attrs[name] except KeyError: raise AttributeError(name) def new(self, **kwargs): """ Return a field of the same type as ``self``, with its own parameters. """ return type(self)(**kwargs) def set_class_name(self, cls, name): """ Assign the model class and field name of ``self``. """ self_attrs = self._attrs for attr, value in self._slots.iteritems(): setattr(self, attr, value) self.model_name = cls._name self.name = name # determine all inherited field attributes attrs = {} for field in resolve_all_mro(cls, name, reverse=True): if isinstance(field, type(self)): attrs.update(field._attrs) else: attrs.clear() attrs.update(self_attrs) # necessary in case self is not in cls # initialize ``self`` with ``attrs`` if attrs.get('compute'): # by default, computed fields are not stored, not copied and readonly attrs['store'] = attrs.get('store', False) attrs['copy'] = attrs.get('copy', False) attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse')) if attrs.get('related'): # by default, related fields are not stored and not copied attrs['store'] = attrs.get('store', False) attrs['copy'] = attrs.get('copy', False) # fix for function fields overridden by regular columns if not isinstance(attrs.get('column'), (NoneType, fields.function)): attrs.pop('store', None) for attr, value in attrs.iteritems(): setattr(self, attr, value) if not self.string and not self.related: # related fields get their string from their parent field self.string = name.replace('_', ' ').capitalize() # determine self.default and cls._defaults in a consistent way self._determine_default(cls, name) def _determine_default(self, cls, name): """ Retrieve the default value for ``self`` in the hierarchy of ``cls``, and determine ``self.default`` and ``cls._defaults`` accordingly. """ self.default = None # traverse the class hierarchy upwards, and take the first field # definition with a default or _defaults for self for klass in cls.__mro__: if name in klass.__dict__: field = klass.__dict__[name] if not isinstance(field, type(self)): # klass contains another value overridden by self return if 'default' in field._attrs: # take the default in field, and adapt it for cls._defaults value = field._attrs['default'] if callable(value): from openerp import api self.default = value cls._defaults[name] = api.model( lambda recs: self.convert_to_write(value(recs)) ) else: self.default = lambda recs: value cls._defaults[name] = value return defaults = klass.__dict__.get('_defaults') or {} if name in defaults: # take the value from _defaults, and adapt it for self.default value = defaults[name] if callable(value): func = lambda recs: value(recs._model, recs._cr, recs._uid, recs._context) else: func = lambda recs: value self.default = lambda recs: self.convert_to_cache( func(recs), recs, validate=False, ) cls._defaults[name] = value return def __str__(self): return "%s.%s" % (self.model_name, self.name) def __repr__(self): return "%s.%s" % (self.model_name, self.name) ############################################################################ # # Field setup # def setup(self, env): """ Make sure that ``self`` is set up, except for recomputation triggers. """ if not self.setup_done: if self.related: self._setup_related(env) else: self._setup_regular(env) self.setup_done = True # # Setup of non-related fields # def _setup_regular(self, env): """ Setup the attributes of a non-related field. """ recs = env[self.model_name] def make_depends(deps): return tuple(deps(recs) if callable(deps) else deps) # convert compute into a callable and determine depends if isinstance(self.compute, basestring): # if the compute method has been overridden, concatenate all their _depends self.depends = () for method in resolve_all_mro(type(recs), self.compute, reverse=True): self.depends += make_depends(getattr(method, '_depends', ())) self.compute = getattr(type(recs), self.compute) else: self.depends = make_depends(getattr(self.compute, '_depends', ())) # convert inverse and search into callables if isinstance(self.inverse, basestring): self.inverse = getattr(type(recs), self.inverse) if isinstance(self.search, basestring): self.search = getattr(type(recs), self.search) # # Setup of related fields # def _setup_related(self, env): """ Setup the attributes of a related field. """ # fix the type of self.related if necessary if isinstance(self.related, basestring): self.related = tuple(self.related.split('.')) # determine the chain of fields, and make sure they are all set up recs = env[self.model_name] fields = [] for name in self.related: field = recs._fields[name] field.setup(env) recs = recs[name] fields.append(field) self.related_field = field # check type consistency if self.type != field.type: raise Warning("Type of related field %s is inconsistent with %s" % (self, field)) # determine dependencies, compute, inverse, and search self.depends = ('.'.join(self.related),) self.compute = self._compute_related if not (self.readonly or field.readonly): self.inverse = self._inverse_related if field._description_searchable: # allow searching on self only if the related field is searchable self.search = self._search_related # copy attributes from field to self (string, help, etc.) for attr, prop in self.related_attrs: if not getattr(self, attr): setattr(self, attr, getattr(field, prop)) for attr, value in field._attrs.iteritems(): if attr not in self._attrs: setattr(self, attr, value) # special case for states: copy it only for inherited fields if not self.states and self.inherited: self.states = field.states # special case for required: check if all fields are required if not self.store and not self.required: self.required = all(field.required for field in fields) def _compute_related(self, records): """ Compute the related field ``self`` on ``records``. """ # when related_sudo, bypass access rights checks when reading values others = records.sudo() if self.related_sudo else records for record, other in zip(records, others): if not record.id: # draft record, do not switch to another environment other = record # traverse the intermediate fields; follow the first record at each step for name in self.related[:-1]: other = other[name][:1] record[self.name] = other[self.related[-1]] def _inverse_related(self, records): """ Inverse the related field ``self`` on ``records``. """ # store record values, otherwise they may be lost by cache invalidation! record_value = {record: record[self.name] for record in records} for record in records: other = record # traverse the intermediate fields, and keep at most one record for name in self.related[:-1]: other = other[name][:1] if other: other[self.related[-1]] = record_value[record] def _search_related(self, records, operator, value): """ Determine the domain to search on field ``self``. """ return [('.'.join(self.related), operator, value)] # properties used by _setup_related() to copy values from related field _related_comodel_name = property(attrgetter('comodel_name')) _related_string = property(attrgetter('string')) _related_help = property(attrgetter('help')) _related_readonly = property(attrgetter('readonly')) _related_groups = property(attrgetter('groups')) @property def base_field(self): """ Return the base field of an inherited field, or ``self``. """ return self.related_field.base_field if self.inherited else self # # Setup of field triggers # # The triggers is a collection of pairs (field, path) of computed fields # that depend on ``self``. When ``self`` is modified, it invalidates the cache # of each ``field``, and registers the records to recompute based on ``path``. # See method ``modified`` below for details. # def add_trigger(self, trigger): """ Add a recomputation trigger on ``self``. """ if trigger not in self._triggers: self._triggers += (trigger,) def setup_triggers(self, env): """ Add the necessary triggers to invalidate/recompute ``self``. """ model = env[self.model_name] for path in self.depends: self._setup_dependency([], model, path.split('.')) def _setup_dependency(self, path0, model, path1): """ Make ``self`` depend on ``model``; `path0 + path1` is a dependency of ``self``, and ``path0`` is the sequence of field names from ``self.model`` to ``model``. """ env = model.env head, tail = path1[0], path1[1:] if head == '*': # special case: add triggers on all fields of model (except self) fields = set(model._fields.itervalues()) - set([self]) else: fields = [model._fields[head]] for field in fields: if field == self: _logger.debug("Field %s is recursively defined", self) self.recursive = True continue #_logger.debug("Add trigger on %s to recompute %s", field, self) field.add_trigger((self, '.'.join(path0 or ['id']))) # add trigger on inverse fields, too for invf in field.inverse_fields: #_logger.debug("Add trigger on %s to recompute %s", invf, self) invf.add_trigger((self, '.'.join(path0 + [head]))) # recursively traverse the dependency if tail: comodel = env[field.comodel_name] self._setup_dependency(path0 + [head], comodel, tail) @property def dependents(self): """ Return the computed fields that depend on ``self``. """ return (field for field, path in self._triggers) ############################################################################ # # Field description # def get_description(self, env): """ Return a dictionary that describes the field ``self``. """ desc = {'type': self.type} for attr, prop in self.description_attrs: value = getattr(self, prop) if callable(value): value = value(env) if value is not None: desc[attr] = value return desc # properties used by get_description() _description_store = property(attrgetter('store')) _description_manual = property(attrgetter('manual')) _description_depends = property(attrgetter('depends')) _description_related = property(attrgetter('related')) _description_company_dependent = property(attrgetter('company_dependent')) _description_readonly = property(attrgetter('readonly')) _description_required = property(attrgetter('required')) _description_states = property(attrgetter('states')) _description_groups = property(attrgetter('groups')) _description_change_default = property(attrgetter('change_default')) _description_deprecated = property(attrgetter('deprecated')) @property def _description_searchable(self): return bool(self.store or self.search or (self.column and self.column._fnct_search)) @property def _description_sortable(self): return self.store or (self.inherited and self.related_field._description_sortable) def _description_string(self, env): if self.string and env.lang: field = self.base_field name = "%s,%s" % (field.model_name, field.name) trans = env['ir.translation']._get_source(name, 'field', env.lang) return trans or self.string return self.string def _description_help(self, env): if self.help and env.lang: name = "%s,%s" % (self.model_name, self.name) trans = env['ir.translation']._get_source(name, 'help', env.lang) return trans or self.help return self.help ############################################################################ # # Conversion to column instance # def to_column(self): """ Return a column object corresponding to ``self``, or ``None``. """ if not self.store and self.compute: # non-stored computed fields do not have a corresponding column self.column = None return None # determine column parameters #_logger.debug("Create fields._column for Field %s", self) args = {} for attr, prop in self.column_attrs: args[attr] = getattr(self, prop) for attr, value in self._attrs.iteritems(): args[attr] = value if self.company_dependent: # company-dependent fields are mapped to former property fields args['type'] = self.type args['relation'] = self.comodel_name self.column = fields.property(**args) elif self.column: # let the column provide a valid column for the given parameters self.column = self.column.new(_computed_field=bool(self.compute), **args) else: # create a fresh new column of the right type self.column = getattr(fields, self.type)(**args) return self.column # properties used by to_column() to create a column instance _column_copy = property(attrgetter('copy')) _column_select = property(attrgetter('index')) _column_manual = property(attrgetter('manual')) _column_string = property(attrgetter('string')) _column_help = property(attrgetter('help')) _column_readonly = property(attrgetter('readonly')) _column_required = property(attrgetter('required')) _column_states = property(attrgetter('states')) _column_groups = property(attrgetter('groups')) _column_change_default = property(attrgetter('change_default')) _column_deprecated = property(attrgetter('deprecated')) ############################################################################ # # Conversion of values # def null(self, env): """ return the null value for this field in the given environment """ return False def convert_to_cache(self, value, record, validate=True): """ convert ``value`` to the cache level in ``env``; ``value`` may come from an assignment, or have the format of methods :meth:`BaseModel.read` or :meth:`BaseModel.write` :param record: the target record for the assignment, or an empty recordset :param bool validate: when True, field-specific validation of ``value`` will be performed """ return value def convert_to_read(self, value, use_name_get=True): """ convert ``value`` from the cache to a value as returned by method :meth:`BaseModel.read` :param bool use_name_get: when True, value's diplay name will be computed using :meth:`BaseModel.name_get`, if relevant for the field """ return False if value is None else value def convert_to_write(self, value, target=None, fnames=None): """ convert ``value`` from the cache to a valid value for method :meth:`BaseModel.write`. :param target: optional, the record to be modified with this value :param fnames: for relational fields only, an optional collection of field names to convert """ return self.convert_to_read(value) def convert_to_onchange(self, value): """ convert ``value`` from the cache to a valid value for an onchange method v7. """ return self.convert_to_write(value) def convert_to_export(self, value, env): """ convert ``value`` from the cache to a valid value for export. The parameter ``env`` is given for managing translations. """ if not value: return '' return value if env.context.get('export_raw_data') else ustr(value) def convert_to_display_name(self, value, record=None): """ convert ``value`` from the cache to a suitable display name. """ return ustr(value) ############################################################################ # # Descriptor methods # def __get__(self, record, owner): """ return the value of field ``self`` on ``record`` """ if record is None: return self # the field is accessed through the owner class if not record: # null record -> return the null value for this field return self.null(record.env) # only a single record may be accessed record.ensure_one() try: return record._cache[self] except KeyError: pass # cache miss, retrieve value if record.id: # normal record -> read or compute value for this field self.determine_value(record) else: # draft record -> compute the value or let it be null self.determine_draft_value(record) # the result should be in cache now return record._cache[self] def __set__(self, record, value): """ set the value of field ``self`` on ``record`` """ env = record.env # only a single record may be updated record.ensure_one() # adapt value to the cache level value = self.convert_to_cache(value, record) if env.in_draft or not record.id: # determine dependent fields spec = self.modified_draft(record) # set value in cache, inverse field, and mark record as dirty record._cache[self] = value if env.in_onchange: for invf in self.inverse_fields: invf._update(value, record) record._set_dirty(self.name) # determine more dependent fields, and invalidate them if self.relational: spec += self.modified_draft(record) env.invalidate(spec) else: # simply write to the database, and update cache record.write({self.name: self.convert_to_write(value)}) record._cache[self] = value ############################################################################ # # Computation of field values # def _compute_value(self, records): """ Invoke the compute method on ``records``. """ # initialize the fields to their corresponding null value in cache for field in self.computed_fields: records._cache[field] = field.null(records.env) records.env.computed[field].update(records._ids) self.compute(records) for field in self.computed_fields: records.env.computed[field].difference_update(records._ids) def compute_value(self, records): """ Invoke the compute method on ``records``; the results are in cache. """ with records.env.do_in_draft(): try: self._compute_value(records) except (AccessError, MissingError): # some record is forbidden or missing, retry record by record for record in records: try: self._compute_value(record) except Exception as exc: record._cache[self.name] = FailedValue(exc) def determine_value(self, record): """ Determine the value of ``self`` for ``record``. """ env = record.env if self.column and not (self.depends and env.in_draft): # this is a stored field or an old-style function field if self.depends: # this is a stored computed field, check for recomputation recs = record._recompute_check(self) if recs: # recompute the value (only in cache) self.compute_value(recs) # HACK: if result is in the wrong cache, copy values if recs.env != env: for source, target in zip(recs, recs.with_env(env)): try: values = target._convert_to_cache({ f.name: source[f.name] for f in self.computed_fields }, validate=False) except MissingError as e: values = FailedValue(e) target._cache.update(values) # the result is saved to database by BaseModel.recompute() return # read the field from database record._prefetch_field(self) elif self.compute: # this is either a non-stored computed field, or a stored computed # field in draft mode if self.recursive: self.compute_value(record) else: recs = record._in_cache_without(self) self.compute_value(recs) else: # this is a non-stored non-computed field record._cache[self] = self.null(env) def determine_draft_value(self, record): """ Determine the value of ``self`` for the given draft ``record``. """ if self.compute: self._compute_value(record) else: record._cache[self] = SpecialValue(self.null(record.env)) def determine_inverse(self, records): """ Given the value of ``self`` on ``records``, inverse the computation. """ if self.inverse: self.inverse(records) def determine_domain(self, records, operator, value): """ Return a domain representing a condition on ``self``. """ if self.search: return self.search(records, operator, value) else: return [(self.name, operator, value)] ############################################################################ # # Notification when fields are modified # def modified(self, records): """ Notify that field ``self`` has been modified on ``records``: prepare the fields/records to recompute, and return a spec indicating what to invalidate. """ # invalidate the fields that depend on self, and prepare recomputation spec = [(self, records._ids)] for field, path in self._triggers: if path and field.store: # don't move this line to function top, see log env = records.env(user=SUPERUSER_ID, context={'active_test': False}) target = env[field.model_name].search([(path, 'in', records.ids)]) if target: spec.append((field, target._ids)) # recompute field on target in the environment of records, # and as user admin if required if field.compute_sudo: target = target.with_env(records.env(user=SUPERUSER_ID)) else: target = target.with_env(records.env) target._recompute_todo(field) else: spec.append((field, None)) return spec def modified_draft(self, records): """ Same as :meth:`modified`, but in draft mode. """ env = records.env # invalidate the fields on the records in cache that depend on # ``records``, except fields currently being computed spec = [] for field, path in self._triggers: target = env[field.model_name] computed = target.browse(env.computed[field]) if path == 'id': target = records - computed elif path: target = (target.browse(env.cache[field]) - computed).filtered( lambda rec: rec._mapped_cache(path) & records ) else: target = target.browse(env.cache[field]) - computed if target: spec.append((field, target._ids)) return spec class Boolean(Field): type = 'boolean' def convert_to_cache(self, value, record, validate=True): return bool(value) def convert_to_export(self, value, env): if env.context.get('export_raw_data'): return value return ustr(value) class Integer(Field): type = 'integer' _slots = { 'group_operator': None, # operator for aggregating values 'group_expression': None, # advance expression for aggregating values } _related_group_operator = property(attrgetter('group_operator')) _column_group_operator = property(attrgetter('group_operator')) _related_group_expression = property(attrgetter('group_expression')) _column_group_expression = property(attrgetter('group_expression')) def convert_to_cache(self, value, record, validate=True): if isinstance(value, dict): # special case, when an integer field is used as inverse for a one2many return value.get('id', False) return int(value or 0) def convert_to_read(self, value, use_name_get=True): # Integer values greater than 2^31-1 are not supported in pure XMLRPC, # so we have to pass them as floats :-( if value and value > xmlrpclib.MAXINT: return float(value) return value def _update(self, records, value): # special case, when an integer field is used as inverse for a one2many records._cache[self] = value.id or 0 def convert_to_export(self, value, env): if value or value == 0: return value if env.context.get('export_raw_data') else ustr(value) return '' class Float(Field): """ The precision digits are given by the attribute :param digits: a pair (total, decimal), or a function taking a database cursor and returning a pair (total, decimal) """ type = 'float' _slots = { '_digits': None, # digits argument passed to class initializer 'group_operator': None, # operator for aggregating values 'group_expression': None, # advance expression for aggregating values } def __init__(self, string=None, digits=None, **kwargs): super(Float, self).__init__(string=string, _digits=digits, **kwargs) @property def digits(self): if callable(self._digits): with fields._get_cursor() as cr: return self._digits(cr) else: return self._digits def _setup_digits(self, env): """ Setup the digits for ``self`` and its corresponding column """ pass def _setup_regular(self, env): super(Float, self)._setup_regular(env) self._setup_digits(env) _related__digits = property(attrgetter('_digits')) _related_group_operator = property(attrgetter('group_operator')) _related_group_expression = property(attrgetter('group_expression')) _description_digits = property(attrgetter('digits')) _column_digits = property(lambda self: not callable(self._digits) and self._digits) _column_digits_compute = property(lambda self: callable(self._digits) and self._digits) _column_group_operator = property(attrgetter('group_operator')) _column_group_expression = property(attrgetter('group_expression')) def convert_to_cache(self, value, record, validate=True): # apply rounding here, otherwise value in cache may be wrong! value = float(value or 0.0) digits = self.digits return float_round(value, precision_digits=digits[1]) if digits else value def convert_to_export(self, value, env): if value or value == 0.0: return value if env.context.get('export_raw_data') else ustr(value) return '' class _String(Field): """ Abstract class for string fields. """ _slots = { 'translate': False, # whether the field is translated } _column_translate = property(attrgetter('translate')) _related_translate = property(attrgetter('translate')) _description_translate = property(attrgetter('translate')) class Char(_String): """ Basic string field, can be length-limited, usually displayed as a single-line string in clients :param int size: the maximum size of values stored for that field :param bool translate: whether the values of this field can be translated """ type = 'char' _slots = { 'size': None, # maximum size of values (deprecated) } _column_size = property(attrgetter('size')) _related_size = property(attrgetter('size')) _description_size = property(attrgetter('size')) def _setup_regular(self, env): super(Char, self)._setup_regular(env) assert isinstance(self.size, (NoneType, int)), \ "Char field %s with non-integer size %r" % (self, self.size) def convert_to_cache(self, value, record, validate=True): if value is None or value is False: return False return ustr(value)[:self.size] class Text(_String): """ Very similar to :class:`~.Char` but used for longer contents, does not have a size and usually displayed as a multiline text box. :param translate: whether the value of this field can be translated """ type = 'text' def convert_to_cache(self, value, record, validate=True): if value is None or value is False: return False return ustr(value) class Html(_String): type = 'html' _slots = { 'sanitize': True, # whether value must be sanitized 'strip_style': False, # whether to strip style attributes } _column_sanitize = property(attrgetter('sanitize')) _related_sanitize = property(attrgetter('sanitize')) _description_sanitize = property(attrgetter('sanitize')) _column_strip_style = property(attrgetter('strip_style')) _related_strip_style = property(attrgetter('strip_style')) _description_strip_style = property(attrgetter('strip_style')) def convert_to_cache(self, value, record, validate=True): if value is None or value is False: return False if validate and self.sanitize: return html_sanitize(value, strip_style=self.strip_style) return value class Date(Field): type = 'date' @staticmethod def today(*args): """ Return the current day in the format expected by the ORM. This function may be used to compute default values. """ return date.today().strftime(DATE_FORMAT) @staticmethod def context_today(record, timestamp=None): """ Return the current date as seen in the client's timezone in a format fit for date fields. This method may be used to compute default values. :param datetime timestamp: optional datetime value to use instead of the current date and time (must be a datetime, regular dates can't be converted between timezones.) :rtype: str """ today = timestamp or datetime.now() context_today = None tz_name = record._context.get('tz') or record.env.user.tz if tz_name: try: today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST context_today = today_utc.astimezone(pytz.timezone(tz_name)) except Exception: _logger.debug("failed to compute context/client-specific today date, using UTC value for `today`", exc_info=True) return (context_today or today).strftime(DATE_FORMAT) @staticmethod def from_string(value): """ Convert an ORM ``value`` into a :class:`date` value. """ if not value: return None value = value[:DATE_LENGTH] return datetime.strptime(value, DATE_FORMAT).date() @staticmethod def to_string(value): """ Convert a :class:`date` value into the format expected by the ORM. """ return value.strftime(DATE_FORMAT) if value else False def convert_to_cache(self, value, record, validate=True): if not value: return False if isinstance(value, basestring): if validate: # force parsing for validation self.from_string(value) return value[:DATE_LENGTH] return self.to_string(value) def convert_to_export(self, value, env): if not value: return '' return self.from_string(value) if env.context.get('export_raw_data') else ustr(value) class Datetime(Field): type = 'datetime' @staticmethod def now(*args): """ Return the current day and time in the format expected by the ORM. This function may be used to compute default values. """ return datetime.now().strftime(DATETIME_FORMAT) @staticmethod def context_timestamp(record, timestamp): """Returns the given timestamp converted to the client's timezone. This method is *not* meant for use as a _defaults initializer, because datetime fields are automatically converted upon display on client side. For _defaults you :meth:`fields.datetime.now` should be used instead. :param datetime timestamp: naive datetime value (expressed in UTC) to be converted to the client timezone :rtype: datetime :return: timestamp converted to timezone-aware datetime in context timezone """ assert isinstance(timestamp, datetime), 'Datetime instance expected' tz_name = record._context.get('tz') or record.env.user.tz utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST if tz_name: try: context_tz = pytz.timezone(tz_name) return utc_timestamp.astimezone(context_tz) except Exception: _logger.debug("failed to compute context/client-specific timestamp, " "using the UTC value", exc_info=True) return utc_timestamp @staticmethod def from_string(value): """ Convert an ORM ``value`` into a :class:`datetime` value. """ if not value: return None value = value[:DATETIME_LENGTH] if len(value) == DATE_LENGTH: value += " 00:00:00" return datetime.strptime(value, DATETIME_FORMAT) @staticmethod def to_string(value): """ Convert a :class:`datetime` value into the format expected by the ORM. """ return value.strftime(DATETIME_FORMAT) if value else False def convert_to_cache(self, value, record, validate=True): if not value: return False if isinstance(value, basestring): if validate: # force parsing for validation self.from_string(value) value = value[:DATETIME_LENGTH] if len(value) == DATE_LENGTH: value += " 00:00:00" return value return self.to_string(value) def convert_to_export(self, value, env): if not value: return '' return self.from_string(value) if env.context.get('export_raw_data') else ustr(value) def convert_to_display_name(self, value, record=None): assert record, 'Record expected' return Datetime.to_string(Datetime.context_timestamp(record, Datetime.from_string(value))) class Binary(Field): type = 'binary' class Selection(Field): """ :param selection: specifies the possible values for this field. It is given as either a list of pairs (``value``, ``string``), or a model method, or a method name. :param selection_add: provides an extension of the selection in the case of an overridden field. It is a list of pairs (``value``, ``string``). The attribute ``selection`` is mandatory except in the case of :ref:`related fields <field-related>` or :ref:`field extensions <field-incremental-definition>`. """ type = 'selection' _slots = { 'selection': None, # [(value, string), ...], function or method name } def __init__(self, selection=None, string=None, **kwargs): if callable(selection): from openerp import api selection = api.expected(api.model, selection) super(Selection, self).__init__(selection=selection, string=string, **kwargs) def _setup_regular(self, env): super(Selection, self)._setup_regular(env) assert self.selection is not None, "Field %s without selection" % self def _setup_related(self, env): super(Selection, self)._setup_related(env) # selection must be computed on related field field = self.related_field self.selection = lambda model: field._description_selection(model.env) def set_class_name(self, cls, name): super(Selection, self).set_class_name(cls, name) # determine selection (applying 'selection_add' extensions) for field in resolve_all_mro(cls, name, reverse=True): if isinstance(field, type(self)): # We cannot use field.selection or field.selection_add here # because those attributes are overridden by ``set_class_name``. if 'selection' in field._attrs: self.selection = field._attrs['selection'] if 'selection_add' in field._attrs: # use an OrderedDict to update existing values selection_add = field._attrs['selection_add'] self.selection = OrderedDict(self.selection + selection_add).items() else: self.selection = None def _description_selection(self, env): """ return the selection list (pairs (value, label)); labels are translated according to context language """ selection = self.selection if isinstance(selection, basestring): return getattr(env[self.model_name], selection)() if callable(selection): return selection(env[self.model_name]) # translate selection labels if env.lang: name = "%s,%s" % (self.model_name, self.name) translate = partial( env['ir.translation']._get_source, name, 'selection', env.lang) return [(value, translate(label) if label else label) for value, label in selection] else: return selection @property def _column_selection(self): if isinstance(self.selection, basestring): method = self.selection return lambda self, *a, **kw: getattr(self, method)(*a, **kw) else: return self.selection def get_values(self, env): """ return a list of the possible values """ selection = self.selection if isinstance(selection, basestring): selection = getattr(env[self.model_name], selection)() elif callable(selection): selection = selection(env[self.model_name]) return [value for value, _ in selection] def convert_to_cache(self, value, record, validate=True): if not validate: return value or False if value in self.get_values(record.env): return value elif not value: return False raise ValueError("Wrong value for %s: %r" % (self, value)) def convert_to_export(self, value, env): if not isinstance(self.selection, list): # FIXME: this reproduces an existing buggy behavior! return value if value else '' for item in self._description_selection(env): if item[0] == value: return item[1] return False class Reference(Selection): type = 'reference' _slots = { 'size': None, # maximum size of values (deprecated) } _related_size = property(attrgetter('size')) _column_size = property(attrgetter('size')) def _setup_regular(self, env): super(Reference, self)._setup_regular(env) assert isinstance(self.size, (NoneType, int)), \ "Reference field %s with non-integer size %r" % (self, self.size) def convert_to_cache(self, value, record, validate=True): if isinstance(value, BaseModel): if ((not validate or value._name in self.get_values(record.env)) and len(value) <= 1): return value.with_env(record.env) or False elif isinstance(value, basestring): res_model, res_id = value.split(',') return record.env[res_model].browse(int(res_id)) elif not value: return False raise ValueError("Wrong value for %s: %r" % (self, value)) def convert_to_read(self, value, use_name_get=True): return "%s,%s" % (value._name, value.id) if value else False def convert_to_export(self, value, env): return value.name_get()[0][1] if value else '' def convert_to_display_name(self, value, record=None): return ustr(value and value.display_name) class _Relational(Field): """ Abstract class for relational fields. """ relational = True _slots = { 'domain': [], # domain for searching values 'context': {}, # context for searching values } def _setup_regular(self, env): super(_Relational, self)._setup_regular(env) if self.comodel_name not in env.registry: _logger.warning("Field %s with unknown comodel_name %r" % (self, self.comodel_name)) self.comodel_name = '_unknown' @property def _related_domain(self): if callable(self.domain): # will be called with another model than self's return lambda recs: self.domain(recs.env[self.model_name]) else: # maybe not correct if domain is a string... return self.domain _related_context = property(attrgetter('context')) _description_relation = property(attrgetter('comodel_name')) _description_context = property(attrgetter('context')) def _description_domain(self, env): return self.domain(env[self.model_name]) if callable(self.domain) else self.domain _column_obj = property(attrgetter('comodel_name')) _column_domain = property(attrgetter('domain')) _column_context = property(attrgetter('context')) def null(self, env): return env[self.comodel_name] def modified(self, records): # Invalidate cache for self.inverse_fields, too. Note that recomputation # of fields that depend on self.inverse_fields is already covered by the # triggers (see above). spec = super(_Relational, self).modified(records) for invf in self.inverse_fields: spec.append((invf, None)) return spec class Many2one(_Relational): """ The value of such a field is a recordset of size 0 (no record) or 1 (a single record). :param comodel_name: name of the target model (string) :param domain: an optional domain to set on candidate values on the client side (domain or string) :param context: an optional context to use on the client side when handling that field (dictionary) :param ondelete: what to do when the referred record is deleted; possible values are: ``'set null'``, ``'restrict'``, ``'cascade'`` :param auto_join: whether JOINs are generated upon search through that field (boolean, by default ``False``) :param delegate: set it to ``True`` to make fields of the target model accessible from the current model (corresponds to ``_inherits``) The attribute ``comodel_name`` is mandatory except in the case of related fields or field extensions. """ type = 'many2one' _slots = { 'ondelete': 'set null', # what to do when value is deleted 'auto_join': False, # whether joins are generated upon search 'delegate': False, # whether self implements delegation } def __init__(self, comodel_name=None, string=None, **kwargs): super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs) def set_class_name(self, cls, name): super(Many2one, self).set_class_name(cls, name) # determine self.delegate if not self.delegate: self.delegate = name in cls._inherits.values() _column_ondelete = property(attrgetter('ondelete')) _column_auto_join = property(attrgetter('auto_join')) def _update(self, records, value): """ Update the cached value of ``self`` for ``records`` with ``value``. """ records._cache[self] = value def convert_to_cache(self, value, record, validate=True): if isinstance(value, (NoneType, int, long)): return record.env[self.comodel_name].browse(value) if isinstance(value, BaseModel): if value._name == self.comodel_name and len(value) <= 1: return value.with_env(record.env) raise ValueError("Wrong value for %s: %r" % (self, value)) elif isinstance(value, tuple): return record.env[self.comodel_name].browse(value[0]) elif isinstance(value, dict): return record.env[self.comodel_name].new(value) else: return self.null(record.env) def convert_to_read(self, value, use_name_get=True): if use_name_get and value: # evaluate name_get() as superuser, because the visibility of a # many2one field value (id and name) depends on the current record's # access rights, and not the value's access rights. try: value_sudo = value.sudo() # performance trick: make sure that all records of the same # model as value in value.env will be prefetched in value_sudo.env value_sudo.env.prefetch[value._name].update(value.env.prefetch[value._name]) return value_sudo.name_get()[0] except MissingError: # Should not happen, unless the foreign key is missing. return False else: return value.id def convert_to_write(self, value, target=None, fnames=None): return value.id def convert_to_onchange(self, value): return value.id def convert_to_export(self, value, env): return value.name_get()[0][1] if value else '' def convert_to_display_name(self, value, record=None): return ustr(value.display_name) class UnionUpdate(SpecialValue): """ Placeholder for a value update; when this value is taken from the cache, it returns ``record[field.name] | value`` and stores it in the cache. """ def __init__(self, field, record, value): self.args = (field, record, value) def get(self): field, record, value = self.args # in order to read the current field's value, remove self from cache del record._cache[field] # read the current field's value, and update it in cache only record._cache[field] = new_value = record[field.name] | value return new_value class _RelationalMulti(_Relational): """ Abstract class for relational fields *2many. """ def _update(self, records, value): """ Update the cached value of ``self`` for ``records`` with ``value``. """ for record in records: if self in record._cache: record._cache[self] = record[self.name] | value else: record._cache[self] = UnionUpdate(self, record, value) def convert_to_cache(self, value, record, validate=True): if isinstance(value, BaseModel): if value._name == self.comodel_name: return value.with_env(record.env) elif isinstance(value, list): # value is a list of record ids or commands comodel = record.env[self.comodel_name] ids = OrderedSet(record[self.name].ids) # modify ids with the commands for command in value: if isinstance(command, (tuple, list)): if command[0] == 0: ids.add(comodel.new(command[2]).id) elif command[0] == 1: comodel.browse(command[1]).update(command[2]) ids.add(command[1]) elif command[0] == 2: # note: the record will be deleted by write() ids.discard(command[1]) elif command[0] == 3: ids.discard(command[1]) elif command[0] == 4: ids.add(command[1]) elif command[0] == 5: ids.clear() elif command[0] == 6: ids = OrderedSet(command[2]) elif isinstance(command, dict): ids.add(comodel.new(command).id) else: ids.add(command) # return result as a recordset return comodel.browse(list(ids)) elif not value: return self.null(record.env) raise ValueError("Wrong value for %s: %s" % (self, value)) def convert_to_read(self, value, use_name_get=True): return value.ids def convert_to_write(self, value, target=None, fnames=None): # remove/delete former records if target is None: set_ids = [] result = [(6, 0, set_ids)] add_existing = lambda id: set_ids.append(id) else: tag = 2 if self.type == 'one2many' else 3 result = [(tag, record.id) for record in target[self.name] - value] add_existing = lambda id: result.append((4, id)) if fnames is None: # take all fields in cache, except the inverses of self fnames = set(value._fields) - set(MAGIC_COLUMNS) for invf in self.inverse_fields: fnames.discard(invf.name) # add new and existing records for record in value: if not record.id: values = {k: v for k, v in record._cache.iteritems() if k in fnames} values = record._convert_to_write(values) result.append((0, 0, values)) elif record._is_dirty(): values = {k: record._cache[k] for k in record._get_dirty() if k in fnames} values = record._convert_to_write(values) result.append((1, record.id, values)) else: add_existing(record.id) return result def convert_to_export(self, value, env): return ','.join(name for id, name in value.name_get()) if value else '' def convert_to_display_name(self, value, record=None): raise NotImplementedError() def _compute_related(self, records): """ Compute the related field ``self`` on ``records``. """ for record in records: value = record # traverse the intermediate fields, and keep at most one record for name in self.related[:-1]: value = value[name][:1] record[self.name] = value[self.related[-1]] class One2many(_RelationalMulti): """ One2many field; the value of such a field is the recordset of all the records in ``comodel_name`` such that the field ``inverse_name`` is equal to the current record. :param comodel_name: name of the target model (string) :param inverse_name: name of the inverse ``Many2one`` field in ``comodel_name`` (string) :param domain: an optional domain to set on candidate values on the client side (domain or string) :param context: an optional context to use on the client side when handling that field (dictionary) :param auto_join: whether JOINs are generated upon search through that field (boolean, by default ``False``) :param limit: optional limit to use upon read (integer) The attributes ``comodel_name`` and ``inverse_name`` are mandatory except in the case of related fields or field extensions. """ type = 'one2many' _slots = { 'inverse_name': None, # name of the inverse field 'auto_join': False, # whether joins are generated upon search 'limit': None, # optional limit to use upon read 'copy': False, # o2m are not copied by default } def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs): super(One2many, self).__init__( comodel_name=comodel_name, inverse_name=inverse_name, string=string, **kwargs ) def _setup_regular(self, env): super(One2many, self)._setup_regular(env) if self.inverse_name: # link self to its inverse field and vice-versa comodel = env[self.comodel_name] invf = comodel._fields[self.inverse_name] # In some rare cases, a ``One2many`` field can link to ``Int`` field # (res_model/res_id pattern). Only inverse the field if this is # a ``Many2one`` field. if isinstance(invf, Many2one): self.inverse_fields += (invf,) invf.inverse_fields += (self,) _description_relation_field = property(attrgetter('inverse_name')) _column_fields_id = property(attrgetter('inverse_name')) _column_auto_join = property(attrgetter('auto_join')) _column_limit = property(attrgetter('limit')) class Many2many(_RelationalMulti): """ Many2many field; the value of such a field is the recordset. :param comodel_name: name of the target model (string) The attribute ``comodel_name`` is mandatory except in the case of related fields or field extensions. :param relation: optional name of the table that stores the relation in the database (string) :param column1: optional name of the column referring to "these" records in the table ``relation`` (string) :param column2: optional name of the column referring to "those" records in the table ``relation`` (string) The attributes ``relation``, ``column1`` and ``column2`` are optional. If not given, names are automatically generated from model names, provided ``model_name`` and ``comodel_name`` are different! :param domain: an optional domain to set on candidate values on the client side (domain or string) :param context: an optional context to use on the client side when handling that field (dictionary) :param limit: optional limit to use upon read (integer) """ type = 'many2many' _slots = { 'relation': None, # name of table 'column1': None, # column of table referring to model 'column2': None, # column of table referring to comodel 'limit': None, # optional limit to use upon read } def __init__(self, comodel_name=None, relation=None, column1=None, column2=None, string=None, **kwargs): super(Many2many, self).__init__( comodel_name=comodel_name, relation=relation, column1=column1, column2=column2, string=string, **kwargs ) def _setup_regular(self, env): super(Many2many, self)._setup_regular(env) if not self.relation and self.store: # retrieve self.relation from the corresponding column column = self.to_column() if isinstance(column, fields.many2many): self.relation, self.column1, self.column2 = \ column._sql_names(env[self.model_name]) if self.relation: m2m = env.registry._m2m # if inverse field has already been setup, it is present in m2m invf = m2m.get((self.relation, self.column2, self.column1)) if invf: self.inverse_fields += (invf,) invf.inverse_fields += (self,) else: # add self in m2m, so that its inverse field can find it m2m[(self.relation, self.column1, self.column2)] = self _column_rel = property(attrgetter('relation')) _column_id1 = property(attrgetter('column1')) _column_id2 = property(attrgetter('column2')) _column_limit = property(attrgetter('limit')) class Serialized(Field): """ Minimal support for existing sparse and serialized fields. """ type = 'serialized' def convert_to_cache(self, value, record, validate=True): return value or {} class Id(Field): """ Special case for field 'id'. """ type = 'integer' _slots = { 'string': 'ID', 'store': True, 'readonly': True, } def to_column(self): self.column = fields.integer(self.string) return self.column def __get__(self, record, owner): if record is None: return self # the field is accessed through the class owner if not record: return False return record.ensure_one()._ids[0] def __set__(self, record, value): raise TypeError("field 'id' cannot be assigned") # imported here to avoid dependency cycle issues from openerp import SUPERUSER_ID, registry from .exceptions import Warning, AccessError, MissingError from .models import BaseModel, MAGIC_COLUMNS from .osv import fields
Antiun/odoo
openerp/fields.py
Python
agpl-3.0
75,603
package lila.game import scala.concurrent.duration._ import lila.db.dsl._ import lila.memo.AsyncCache import lila.user.UserRepo final class CrosstableApi( coll: Coll, gameColl: Coll, system: akka.actor.ActorSystem) { import Crosstable.Result private val maxGames = 20 def apply(game: Game): Fu[Option[Crosstable]] = game.userIds.distinct match { case List(u1, u2) => apply(u1, u2) case _ => fuccess(none) } def apply(u1: String, u2: String): Fu[Option[Crosstable]] = coll.uno[Crosstable](select(u1, u2)) orElse createFast(u1, u2) def nbGames(u1: String, u2: String): Fu[Int] = coll.find( select(u1, u2), $doc("s1" -> true, "s2" -> true) ).uno[Bdoc] map { res => ~(for { o <- res s1 <- o.getAs[Int]("s1") s2 <- o.getAs[Int]("s2") } yield (s1 + s2) / 10) } def add(game: Game): Funit = game.userIds.distinct.sorted match { case List(u1, u2) => val result = Result(game.id, game.winnerUserId) val bsonResult = Crosstable.crosstableBSONHandler.writeResult(result, u1) def incScore(userId: String) = $int(game.winnerUserId match { case Some(u) if u == userId => 10 case None => 5 case _ => 0 }) val bson = $doc( "$inc" -> $doc( "s1" -> incScore(u1), "s2" -> incScore(u2)) ) ++ $doc("$push" -> $doc( Crosstable.BSONFields.results -> $doc( "$each" -> List(bsonResult), "$slice" -> -maxGames ))) coll.update(select(u1, u2), bson).void case _ => funit } private def exists(u1: String, u2: String) = coll.exists(select(u1, u2)) private def createFast(u1: String, u2: String) = creationCache(u1 -> u2).withTimeoutDefault(1 second, none)(system) // to avoid creating it twice during a new matchup private val creationCache = AsyncCache[(String, String), Option[Crosstable]]( name = "crosstable", f = (create _).tupled, timeToLive = 20 seconds) private val winnerProjection = $doc(Game.BSONFields.winnerId -> true) private def create(x1: String, x2: String): Fu[Option[Crosstable]] = { UserRepo.orderByGameCount(x1, x2) map (_ -> List(x1, x2).sorted) flatMap { case (Some((u1, u2)), List(su1, su2)) => val selector = $doc( Game.BSONFields.playerUids $all List(u1, u2), Game.BSONFields.status $gte chess.Status.Mate.id) import reactivemongo.api.ReadPreference gameColl.find(selector, winnerProjection) .sort($doc(Game.BSONFields.createdAt -> -1)) .cursor[Bdoc](readPreference = ReadPreference.secondary) .gather[List]().map { docs => val (s1, s2) = docs.foldLeft(0 -> 0) { case ((s1, s2), doc) => doc.getAs[String](Game.BSONFields.winnerId) match { case Some(u) if u == su1 => (s1 + 10, s2) case Some(u) if u == su2 => (s1, s2 + 10) case _ => (s1 + 5, s2 + 5) } } Crosstable( Crosstable.User(su1, s1), Crosstable.User(su2, s2), results = docs.take(maxGames).flatMap { doc => doc.getAs[String](Game.BSONFields.id).map { id => Result(id, doc.getAs[String](Game.BSONFields.winnerId)) } }.reverse ) } flatMap { crosstable => coll insert crosstable inject crosstable.some } case _ => fuccess(none) } } recoverWith lila.db.recoverDuplicateKey { _ => coll.uno[Crosstable](select(x1, x2)) } recover { case e: Exception => logger.error("CrosstableApi.create", e) none } private def select(u1: String, u2: String) = $id(Crosstable.makeKey(u1, u2)) }
clarkerubber/lila
modules/game/src/main/CrosstableApi.scala
Scala
agpl-3.0
3,891
package io.fidelcoria.ayfmap.controller; import java.util.HashMap; import java.util.Map; import org.springframework.stereotype.Component; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; @Component public class MainController { @FXML Label actionHeaderBar; @FXML TabPane actionTabPane; @FXML private GenerateTabController generateTabController; @FXML private ImportTabController importTabController; @FXML private DataTabController dataTabController; private static final Map<String, String> tabTitles; static { tabTitles = new HashMap<>(); tabTitles.put("generate-tab", "Generate Documents"); tabTitles.put("import-tab", "Import Documents"); tabTitles.put("edit-tab", "Edit Data"); } /** * Update the actionHeaderBar to reflect the selected tab */ public void tabClicked() { for (Tab tab : actionTabPane.getTabs()) { if (tab.isSelected()) { String title = tabTitles.get(tab.getId()); actionHeaderBar.setText(title); break; } } } }
fidelcoria/AYFM-Scheduling
AssignmentPlanner/src/main/java/io/fidelcoria/ayfmap/controller/MainController.java
Java
agpl-3.0
1,099
$(function() { function FirmwareUpdaterViewModel(parameters) { var self = this; self.settingsViewModel = parameters[0]; self.loginState = parameters[1]; self.connection = parameters[2]; self.printerState = parameters[3]; self.configPathAvrdude = ko.observable(); self.hexFileName = ko.observable(undefined); self.hexFileURL = ko.observable(undefined); self.alertMessage = ko.observable(""); self.alertType = ko.observable("alert-warning"); self.showAlert = ko.observable(false); self.missingParamToFlash = ko.observable(false); self.progressBarText = ko.observable(); self.isBusy = ko.observable(false); self.updateAvailable = ko.observable(false); self.pathBroken = ko.observable(false); self.pathOk = ko.observable(false); self.pathText = ko.observable(); self.pathHelpVisible = ko.computed(function() { return self.pathBroken() || self.pathOk(); }); self.inSettingsDialog = false; self.selectHexPath = $("#settings_firmwareupdater_selectHexPath"); self.configurationDialog = $("#settings_plugin_firmwareupdater_configurationdialog"); self.selectHexPath.fileupload({ dataType: "hex", maxNumberOfFiles: 1, autoUpload: false, add: function(e, data) { if (data.files.length == 0) { return false; } self.hexData = data; self.hexFileName(data.files[0].name); } }) self.startFlashFromFile = function() { if (!self.loginState.isAdmin()){ self.alertType("alert-warning") self.alertMessage(gettext("Administrator privileges are needed to flash firmware.")); self.showAlert(true); return false; } if (self.printerState.isPrinting() || self.printerState.isPaused()){ self.alertType("alert-warning") self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished.")); self.showAlert(true); return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { self.alertType("alert-warning") self.alertMessage(gettext("AVRDUDE path not configured")); self.showAlert(true); return false; } if (!self.hexFileName()) { self.alertType("alert-warning") self.alertMessage(gettext("Hex file path not specified")); self.showAlert(true); return false; } if (!self.connection.selectedPort()) { self.alertType("alert-warning") self.alertMessage(gettext("Port not selected")); self.showAlert(true); return false; } self.progressBarText("Flashing firmware..."); self.isBusy(true); self.showAlert(false); var form = { selected_port: self.connection.selectedPort() }; self.hexData.formData = form; self.hexData.submit(); } self.startFlashFromURL = function() { if (!self.loginState.isAdmin()){ self.alertType("alert-warning") self.alertMessage(gettext("Administrator privileges are needed to flash firmware.")); self.showAlert(true); return false; } if (self.printerState.isPrinting() || self.printerState.isPaused()){ self.alertType("alert-warning") self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished.")); self.showAlert(true); return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { self.alertType("alert-warning") self.alertMessage(gettext("AVRDUDE path not configured")); self.showAlert(true); return false; } if (!self.hexFileURL()) { self.alertType("alert-warning") self.alertMessage(gettext("Hex file URL not specified")); self.showAlert(true); return false; } if (!self.connection.selectedPort()) { self.alertType("alert-warning") self.alertMessage(gettext("Port not selected")); self.showAlert(true); return false; } self.isBusy(true); self.showAlert(false); self.progressBarText("Flashing firmware..."); $.ajax({ url: PLUGIN_BASEURL + "firmwareupdater/flashFirmwareWithURL", type: "POST", dataType: "json", data: JSON.stringify({ selected_port: self.connection.selectedPort(), hex_url: self.hexFileURL() }), contentType: "application/json; charset=UTF-8" }) } self.checkForUpdates = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ self.alertType("alert-warning") self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished.")); self.showAlert(true); return false; } if (!self.connection.selectedPort()) { self.alertType("alert-warning") self.alertMessage(gettext("Port not selected")); self.showAlert(true); return false; } self.isBusy(true); self.showAlert(false); $.ajax({ url: PLUGIN_BASEURL + "firmwareupdater/checkForUpdates", type: "POST", dataType: "json", data: JSON.stringify({ selected_port: self.connection.selectedPort(), }), contentType: "application/json; charset=UTF-8" }); } self.flashUpdate = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ self.alertType("alert-warning") self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished.")); self.showAlert(true); return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { self.alertType("alert-warning") self.alertMessage(gettext("AVRDUDE path not configured")); self.showAlert(true); return false; } if (!self.connection.selectedPort()) { self.alertType("alert-warning") self.alertMessage(gettext("Port not selected")); self.showAlert(true); return false; } self.isBusy(true); self.showAlert(false); self.progressBarText("Flashing firmware..."); console.log(AJAX_BASEURL + "system"); $.ajax({ url: PLUGIN_BASEURL + "firmwareupdater/flashUpdate", type: "POST", dataType: "json", data: JSON.stringify({ selected_port: self.connection.selectedPort() }), contentType: "application/json; charset=UTF-8" }); } self.onDataUpdaterPluginMessage = function(plugin, data) { if (plugin != "firmwareupdater") { return; } if (data.type == "status" && data.status_type == "check_update_status") { if (data.status_value == "progress") { self.progressBarText(data.status_description); return; } if (data.status_value == "update_available") { if (!self.inSettingsDialog) { self.showUpdateAvailablePopup(data.status_description); } self.updateAvailable(true); self.isBusy(false); return; } if (data.status_value == "up_to_date") { self.updateAvailable(false); self.isBusy(false); self.showAlert(false); if (self.inSettingsDialog) { self.alertType("alert-success"); self.alertMessage(data.status_description); self.showAlert(true); } return; } if (data.status_value == "error") { self.updateAvailable(false); self.isBusy(false); self.alertType("alert-danger"); self.alertMessage(data.status_description); self.showAlert(true); return; } } if (data.type == "status" && data.status_type == "flashing_status") { if (data.status_value == "starting_flash") { self.isBusy(true); } else if (data.status_value == "progress") { self.progressBarText(data.status_description); } else if (data.status_value == "info") { self.alertType("alert-info"); self.alertMessage(data.status_description); self.showAlert(true); } else if (data.status_value == "successful") { self.showPopup("success", "Flashing Successful", ""); self.isBusy(false); self.showAlert(false); self.hexFileName(undefined); self.hexFileURL(undefined); } else if (data.status_value == "error") { self.showPopup("error", "Flashing Failed", data.status_description); self.isBusy(false); self.showAlert(false); } } } self.showPluginConfig = function() { self.configPathAvrdude(self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()); self.configurationDialog.modal(); } self.onConfigClose = function() { self._saveAvrdudePath(); self.configurationDialog.modal("hide"); self.onConfigHidden(); if (self.configPathAvrdude()) { self.showAlert(false); } } self._saveAvrdudePath = function() { var data = { plugins: { firmwareupdater: { avrdude_path: self.configPathAvrdude(), } } } self.settingsViewModel.saveData(data); } self.onConfigHidden = function() { self.pathBroken(false); self.pathOk(false); self.pathText(""); } self.testAvrdudePath = function() { $.ajax({ url: API_BASEURL + "util/test", type: "POST", dataType: "json", data: JSON.stringify({ command: "path", path: self.configPathAvrdude(), check_type: "file", check_access: "x" }), contentType: "application/json; charset=UTF-8", success: function(response) { if (!response.result) { if (!response.exists) { self.pathText(gettext("The path doesn't exist")); } else if (!response.typeok) { self.pathText(gettext("The path is not a file")); } else if (!response.access) { self.pathText(gettext("The path is not an executable")); } } else { self.pathText(gettext("The path is valid")); } self.pathOk(response.result); self.pathBroken(!response.result); } }) } self.isReadyToFlashFromFile = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { return false; } if (!self.connection.selectedPort()) { return false; } if (!self.hexFileName()) { return false; } self.showAlert(false); return true; } self.isReadyToFlashFromURL = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { return false; } if (!self.connection.selectedPort()) { return false; } if (!self.hexFileURL()) { return false; } self.showAlert(false); return true; } self.isReadyToCheck = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ return false; } if (!self.connection.selectedPort()) { return false; } return true; } self.isReadyToUpdate = function() { if (self.printerState.isPrinting() || self.printerState.isPaused()){ return false; } if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) { return false; } if (!self.connection.selectedPort() || self.connection.selectedPort() == "AUTO") { return false; } return true; } self.onSettingsShown = function() { self.inSettingsDialog = true; } self.onSettingsHidden = function() { self.inSettingsDialog = false; self.showAlert(false); } // Popup Messages self.showUpdateAvailablePopup = function(new_fw_version) { self.updateAvailablePopup = new PNotify({ title: gettext('Firmware Update Available'), text: gettext('Version ') + new_fw_version, icon: true, hide: false, type: 'success', buttons: { closer: true, sticker: false, }, history: { history: false } }); }; self.showPopup = function(message_type, title, text){ if (self.popup !== undefined){ self.closePopup(); } self.popup = new PNotify({ title: gettext(title), text: text, type: message_type, hide: false }); } self.closePopup = function() { if (self.popup !== undefined) { self.popup.remove(); } }; } OCTOPRINT_VIEWMODELS.push([ FirmwareUpdaterViewModel, ["settingsViewModel", "loginStateViewModel", "connectionViewModel", "printerStateViewModel"], [document.getElementById("settings_plugin_firmwareupdater")] ]); });
mcecchi/SuperOcto
OctoPrint-FirmwareUpdater/octoprint_firmwareupdater/static/js/firmwareupdater.js
JavaScript
agpl-3.0
16,233
/* ************************************************************************ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * * (c) 2009. (c) 2009. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 * All rights reserved Tous droits réservés * * NRC disclaims any warranties, Le CNRC dénie toute garantie * expressed, implied, or énoncée, implicite ou légale, * statutory, of any kind with de quelque nature que ce * respect to the software, soit, concernant le logiciel, * including without limitation y compris sans restriction * any warranty of merchantability toute garantie de valeur * or fitness for a particular marchande ou de pertinence * purpose. NRC shall not be pour un usage particulier. * liable in any event for any Le CNRC ne pourra en aucun cas * damages, whether direct or être tenu responsable de tout * indirect, special or general, dommage, direct ou indirect, * consequential or incidental, particulier ou général, * arising from the use of the accessoire ou fortuit, résultant * software. Neither the name de l'utilisation du logiciel. Ni * of the National Research le nom du Conseil National de * Council of Canada nor the Recherches du Canada ni les noms * names of its contributors may de ses participants ne peuvent * be used to endorse or promote être utilisés pour approuver ou * products derived from this promouvoir les produits dérivés * software without specific prior de ce logiciel sans autorisation * written permission. préalable et particulière * par écrit. * * This file is part of the Ce fichier fait partie du projet * OpenCADC project. OpenCADC. * * OpenCADC is free software: OpenCADC est un logiciel libre ; * you can redistribute it and/or vous pouvez le redistribuer ou le * modify it under the terms of modifier suivant les termes de * the GNU Affero General Public la “GNU Affero General Public * License as published by the License” telle que publiée * Free Software Foundation, par la Free Software Foundation * either version 3 of the : soit la version 3 de cette * License, or (at your option) licence, soit (à votre gré) * any later version. toute version ultérieure. * * OpenCADC is distributed in the OpenCADC est distribué * hope that it will be useful, dans l’espoir qu’il vous * but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE * without even the implied GARANTIE : sans même la garantie * warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ * or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF * PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence * General Public License for Générale Publique GNU Affero * more details. pour plus de détails. * * You should have received Vous devriez avoir reçu une * a copy of the GNU Affero copie de la Licence Générale * General Public License along Publique GNU Affero avec * with OpenCADC. If not, see OpenCADC ; si ce n’est * <http://www.gnu.org/licenses/>. pas le cas, consultez : * <http://www.gnu.org/licenses/>. * * $Revision: 4 $ * ************************************************************************ */ package ca.nrc.cadc.vos.server; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import ca.nrc.cadc.io.ByteCountInputStream; import ca.nrc.cadc.uws.JobInfo; import ca.nrc.cadc.uws.Parameter; import ca.nrc.cadc.uws.web.InlineContentException; import ca.nrc.cadc.uws.web.InlineContentHandler; import ca.nrc.cadc.uws.web.UWSInlineContentHandler; import ca.nrc.cadc.vos.Transfer; import ca.nrc.cadc.vos.TransferParsingException; import ca.nrc.cadc.vos.TransferReader; import ca.nrc.cadc.vos.TransferWriter; import ca.nrc.cadc.vos.VOSURI; public class TransferInlineContentHandler implements UWSInlineContentHandler { private static Logger log = Logger.getLogger(TransferInlineContentHandler.class); // 6Kb XML Doc size limit private static final long DOCUMENT_SIZE_MAX = 6144L; private static final String TEXT_XML = "text/xml"; public TransferInlineContentHandler() { } public Content accept(String name, String contentType, InputStream inputStream) throws InlineContentException, IOException { if (!contentType.equals(TEXT_XML)) throw new IllegalArgumentException("Transfer document expected Content-Type is " + TEXT_XML + " not " + contentType); if (inputStream == null) throw new IOException("The InputStream is closed"); // wrap the input stream in a byte counter to limit bytes read ByteCountInputStream sizeLimitInputStream = new ByteCountInputStream(inputStream, DOCUMENT_SIZE_MAX); try { TransferReader reader = new TransferReader(true); Transfer transfer = reader.read(sizeLimitInputStream, VOSURI.SCHEME); log.debug("Transfer: read " + sizeLimitInputStream.getByteCount() + " bytes."); TransferWriter tw = new TransferWriter(); StringWriter sw = new StringWriter(); tw.write(transfer, sw); Content content = new Content(); content.name = CONTENT_JOBINFO; content.value = new JobInfo(sw.toString(), contentType, true);; return content; } catch (TransferParsingException e) { throw new InlineContentException("Unable to create JobInfo from Transfer Document", e); } } }
opencadc/vos
cadc-vos-server/src/main/java/ca/nrc/cadc/vos/server/TransferInlineContentHandler.java
Java
agpl-3.0
6,452
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>Keyword noodle soup</title> <link rel="stylesheet" href="show.css"> </head> <body> <div id="app"> <h1>Loading…</h1> </div> <script src="underscore-min.js"></script> <script src="d3.v3.min.js" charset="utf-8"></script> <script src="show.js"></script> </body> </html>
overview/overview-apps
keyword-noodle-soup/public/show.html
HTML
agpl-3.0
375
# -*- coding: utf-8 -*- import io import os from dlstats.fetchers.bea import BEA as Fetcher import httpretty from dlstats.tests.base import RESOURCES_DIR as BASE_RESOURCES_DIR from dlstats.tests.fetchers.base import BaseFetcherTestCase import unittest from unittest import mock RESOURCES_DIR = os.path.abspath(os.path.join(BASE_RESOURCES_DIR, "bea")) DATA_BEA_10101_An = { "filepath": os.path.abspath(os.path.join(RESOURCES_DIR, "nipa-section1.xls.zip")), "DSD": { "provider": "BEA", "filepath": None, "dataset_code": "nipa-section1-10101-a", "dsd_id": "nipa-section1-10101-a", "is_completed": True, "categories_key": "nipa-section1", "categories_parents": ["national", "nipa"], "categories_root": ["national", "nipa", "nipa-fa2004", "nipa-underlying"], "concept_keys": ['concept', 'frequency'], "codelist_keys": ['concept', 'frequency'], "codelist_count": { "concept": 25, "frequency": 1 }, "dimension_keys": ['concept', 'frequency'], "dimension_count": { "concept": 25, "frequency": 1 }, "attribute_keys": [], "attribute_count": None, }, "series_accept": 25, "series_reject_frequency": 0, "series_reject_empty": 0, "series_all_values": 1175, "series_key_first": "A191RL1-A", "series_key_last": "A191RP1-A", "series_sample": { 'provider_name': 'BEA', 'dataset_code': 'nipa-section1-10101-a', 'key': 'A191RL1-A', 'name': 'Gross domestic product - Annually', 'frequency': 'A', 'last_update': None, 'first_value': { 'value': '3.1', 'period': '1969', 'attributes': None, }, 'last_value': { 'value': '2.4', 'period': '2015', 'attributes': None, }, 'dimensions': { 'concept': 'a191rl1', "frequency": 'a' }, 'attributes': None, } } def _get_datasets_settings(self): return { "nipa-section1-10101-a": { 'dataset_code': 'nipa-section1-10101-a', 'name': 'Table 1.1.1. Percent Change From Preceding Period in Real Gross Domestic Product - Annually', 'last_update': None, 'metadata': { 'filename': 'nipa-section1.xls.zip', 'sheet_name': '10101 Ann', 'url': 'http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2' }, } } class FetcherTestCase(BaseFetcherTestCase): # nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase FETCHER_KLASS = Fetcher DATASETS = { 'nipa-section1-10101-a': DATA_BEA_10101_An } DATASET_FIRST = "nipa-fa2004-section1-101-a" DATASET_LAST = "nipa-underlying-section9-90500U-a" DEBUG_MODE = False def _load_files(self, dataset_code): url = "http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2" self.register_url(url, self.DATASETS[dataset_code]["filepath"]) @httpretty.activate @unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test") def test_load_datasets_first(self): dataset_code = "nipa-section1-10101-a" self._load_files(dataset_code) self.assertLoadDatasetsFirst([dataset_code]) @httpretty.activate @unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test") def test_load_datasets_update(self): dataset_code = "nipa-section1-10101-a" self._load_files(dataset_code) self.assertLoadDatasetsUpdate([dataset_code]) #@httpretty.activate @unittest.skipIf(True, "TODO") def test_build_data_tree(self): dataset_code = "nipa-section1-10101-a" self.assertDataTree(dataset_code) @httpretty.activate @mock.patch("dlstats.fetchers.bea.BEA._get_datasets_settings", _get_datasets_settings) def test_upsert_dataset_10101(self): # nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase.test_upsert_dataset_10101 dataset_code = "nipa-section1-10101-a" self._load_files(dataset_code) self.assertProvider() dataset = self.assertDataset(dataset_code) names = { 'a191rl1': 'Gross domestic product', 'dpcerl1': 'Personal consumption expenditures', 'dgdsrl1': 'Personal consumption expenditures - Goods', 'ddurrl1': 'Personal consumption expenditures - Goods - Durable goods', 'dndgrl1': 'Personal consumption expenditures - Goods - Nondurable goods', 'dserrl1': 'Personal consumption expenditures - Services', 'a006rl1': 'Gross private domestic investment', 'a007rl1': 'Gross private domestic investment - Fixed investment', 'a008rl1': 'Gross private domestic investment - Fixed investment - Nonresidential', 'y033rl1': 'Gross private domestic investment - Fixed investment - Nonresidential - Equipment', 'a011rl1': 'Gross private domestic investment - Fixed investment - Residential', 'a020rl1': 'Net exports of goods and services - Exports', 'a191rp1': 'Addendum: - Gross domestic product, current dollars' } for k, v in names.items(): self.assertTrue(k in dataset["codelists"]["concept"]) self.assertEquals(dataset["codelists"]["concept"][k], v) series_list = self.assertSeries(dataset_code) series_keys = {s["key"].lower(): s for s in series_list} for k, v in names.items(): search_k = "%s-a" % k search_name = "%s - Annually" % v self.assertTrue(search_k in series_keys, "%s not in series_keys" % search_k) self.assertEquals(series_keys[search_k]["name"], search_name) for series in series_list: self.assertEquals(series["last_update_ds"], dataset["last_update"])
Widukind/dlstats
dlstats/tests/fetchers/test_bea.py
Python
agpl-3.0
6,159
<!DOCTYPE html> <html lang="en" > <head> <title>第14課 - forRange with Lists forRange與列表 - 2017Spring 協同產品設計實習 (虎尾科大MDE)</title> <!-- Using the latest rendering mode for IE --> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <link rel="canonical" href="./Lesson 14.html"> <meta name="author" content="40423222" /> <meta name="keywords" content="notes,Lesson 14" /> <meta name="description" content="關於for的range( )配合列表的介紹 參考資訊: 網站: https://automatetheboringstuff.com/chapter4/ 影片: https://www.youtube.com/watch?v=umTnflPbYww" /> <meta property="og:site_name" content="2017Spring 協同產品設計實習 (虎尾科大MDE)" /> <meta property="og:type" content="article"/> <meta property="og:title" content="第14課 - forRange with Lists forRange與列表"/> <meta property="og:url" content="./Lesson 14.html"/> <meta property="og:description" content="關於for的range( )配合列表的介紹 參考資訊: 網站: https://automatetheboringstuff.com/chapter4/ 影片: https://www.youtube.com/watch?v=umTnflPbYww"/> <meta property="article:published_time" content="2017-09-10" /> <meta property="article:section" content="Python" /> <meta property="article:tag" content="notes" /> <meta property="article:tag" content="Lesson 14" /> <meta property="article:author" content="40423222" /> <!-- Bootstrap --> <link rel="stylesheet" href="./theme/css/bootstrap.united.min.css" type="text/css"/> <link href="./theme/css/font-awesome.min.css" rel="stylesheet"> <link href="./theme/css/pygments/monokai.css" rel="stylesheet"> <link href="./theme/tipuesearch/tipuesearch.css" rel="stylesheet"> <link rel="stylesheet" href="./theme/css/style.css" type="text/css"/> <link href="./feeds/all.atom.xml" type="application/atom+xml" rel="alternate" title="2017Spring 協同產品設計實習 (虎尾科大MDE) ATOM Feed"/> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shCore.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushJScript.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushJava.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushPython.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushSql.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushXml.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushPhp.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushCpp.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushCss.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushCSharp.js"></script> <script type="text/javascript" src="https://coursemdetw.github.io/project_site_files/files/syntaxhighlighter/shBrushBash.js"></script> <script type='text/javascript'> (function(){ var corecss = document.createElement('link'); var themecss = document.createElement('link'); var corecssurl = "https://chiamingyen.github.io/kmolab_data/files/syntaxhighlighter/css/shCore.css"; if ( corecss.setAttribute ) { corecss.setAttribute( "rel", "stylesheet" ); corecss.setAttribute( "type", "text/css" ); corecss.setAttribute( "href", corecssurl ); } else { corecss.rel = "stylesheet"; corecss.href = corecssurl; } document.getElementsByTagName("head")[0].insertBefore( corecss, document.getElementById("syntaxhighlighteranchor") ); var themecssurl = "https://chiamingyen.github.io/kmolab_data/files/syntaxhighlighter/css/shThemeDefault.css?ver=3.0.9b"; if ( themecss.setAttribute ) { themecss.setAttribute( "rel", "stylesheet" ); themecss.setAttribute( "type", "text/css" ); themecss.setAttribute( "href", themecssurl ); } else { themecss.rel = "stylesheet"; themecss.href = themecssurl; } //document.getElementById("syntaxhighlighteranchor").appendChild(themecss); document.getElementsByTagName("head")[0].insertBefore( themecss, document.getElementById("syntaxhighlighteranchor") ); })(); SyntaxHighlighter.config.strings.expandSource = '+ expand source'; SyntaxHighlighter.config.strings.help = '?'; SyntaxHighlighter.config.strings.alert = 'SyntaxHighlighter\n\n'; SyntaxHighlighter.config.strings.noBrush = 'Can\'t find brush for: '; SyntaxHighlighter.config.strings.brushNotHtmlScript = 'Brush wasn\'t configured for html-script option: '; SyntaxHighlighter.defaults['pad-line-numbers'] = false; SyntaxHighlighter.defaults['toolbar'] = false; SyntaxHighlighter.all(); </script> </head> <body> <div class="navbar navbar-default navbar-fixed-top" role="navigation"> <div class="container"> <div class="navbar-header"> <button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-ex1-collapse"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a href="./" class="navbar-brand"> 2017Spring 協同產品設計實習 (虎尾科大MDE) </a> </div> <div class="collapse navbar-collapse navbar-ex1-collapse"> <ul class="nav navbar-nav"> <li><a href="./pages/about/"> About </a></li> <li class="active"> <a href="./category/python.html">Python</a> </li> </ul> <ul class="nav navbar-nav navbar-right"> <li><span> <form class="navbar-search" action="./search.html"> <input type="text" class="search-query" placeholder="Search" name="q" id="tipue_search_input" required> </form></span> </li> <li><a href="./archives.html"><i class="fa fa-th-list"></i><span class="icon-label">Archives</span></a></li> </ul> </div> <!-- /.navbar-collapse --> </div> </div> <!-- /.navbar --> <!-- Banner --> <!-- End Banner --> <div class="container"> <div class="row"> <div class="col-sm-9"> <section id="content"> <article> <header class="page-header"> <h1> <a href="./Lesson 14.html" rel="bookmark" title="Permalink to 第14課 - forRange with Lists forRange與列表"> 第14課 - forRange with Lists forRange與列表 </a> </h1> </header> <div class="entry-content"> <div class="panel"> <div class="panel-body"> <footer class="post-info"> <span class="label label-default">Date</span> <span class="published"> <i class="fa fa-calendar"></i><time datetime="2017-09-10T18:44:00+08:00"> Sun 10 September 2017</time> </span> <span class="label label-default">By</span> <a href="./author/40423222.html"><i class="fa fa-user"></i> 40423222</a> <span class="label label-default">Tags</span> <a href="./tag/notes.html">notes</a> / <a href="./tag/lesson-14.html">Lesson 14</a> </footer><!-- /.post-info --> </div> </div> <p>關於for的range( )配合列表的介紹<br> 參考資訊: 網站: <a href="https://automatetheboringstuff.com/chapter4/">https://automatetheboringstuff.com/chapter4/</a> 影片: <a href="https://www.youtube.com/watch?v=umTnflPbYww">https://www.youtube.com/watch?v=umTnflPbYww</a></p> <hr> <!-- 關於for的range與列表功用的介紹 --> <h3>說明:</h3> <p>forRange with Lists forRange與列表 <ul> <li>for i in range(4) 等於 for i in [0, 1, 2, 3],所以在L-13的Lists能使用for i in ListsName <li>常見的Python所使用的range範圍為 range(len(someList)),也就是看列表長度 <li>可利用List來建立operators(運算符號),利用值in列表得出True or False <li>更多功能在下方圖片介紹 </ul></p> <hr> <!-- 因為圖片字太小所以要更改平時的圖片格式,需要改回來 可以去參考第11課 --> <h3>forRange跟Lists的用法:</h3> <p><img src="./../data/L-14/img/forRange with Lists.png" width="850"></p> </div> <!-- /.entry-content --> </article> </section> </div> <div class="col-sm-3" id="sidebar"> <aside> <section class="well well-sm"> <ul class="list-group list-group-flush"> <li class="list-group-item"><h4><i class="fa fa-home fa-lg"></i><span class="icon-label">Recent Posts</span></h4> <ul class="list-group" id="recentposts"> <li class="list-group-item"> <a href="./Lesson 16.html"> 第16課 - type( ), mutable and Immutable, copy( ) 列表更多應用 </a> </li> <li class="list-group-item"> <a href="./Lesson 15.html"> 第15課 - index( ), append( ), insert( ), remove( ), sort( ) 用於列表的指令 </a> </li> <li class="list-group-item"> <a href="./Lesson 14.html"> 第14課 - forRange with Lists forRange與列表 </a> </li> <li class="list-group-item"> <a href="./Lesson 13.html"> 第13課 - Lists 列表 </a> </li> <li class="list-group-item"> <a href="./Flowchart.html"> Python flowchart 目錄 </a> </li> </ul> </li> <li class="list-group-item"><a href="./categories.html"><h4><i class="fa fa-home fa-lg"></i><span class="icon-label">Categories</span></h4></a> <ul class="list-group" id="categories"> <li class="list-group-item"> <a href="./category/python.html"> <i class="fa fa-folder-open fa-lg"></i> Python </a> </li> </ul> </li> <li class="list-group-item"><a href="./tags.html"><h4><i class="fa fa-tags fa-lg"></i><span class="icon-label">Tags</span></h4></a> <ul class="list-group list-inline tagcloud" id="tags"> </ul> </li> <li class="list-group-item"><h4><i class="fa fa-external-link-square fa-lg"></i><span class="icon-label">Links</span></h4> <ul class="list-group" id="links"> <li class="list-group-item"> <a href="http://getpelican.com/" target="_blank"> Pelican </a> </li> <li class="list-group-item"> <a href="https://github.com/DandyDev/pelican-bootstrap3/" target="_blank"> pelican-bootstrap3 </a> </li> <li class="list-group-item"> <a href="https://github.com/getpelican/pelican-plugins" target="_blank"> pelican-plugins </a> </li> <li class="list-group-item"> <a href="https://github.com/Tipue/Tipue-Search" target="_blank"> Tipue search </a> </li> </ul> </li> </ul> </section> </aside> </div> </div> </div> <footer> <div class="container"> <hr> <div class="row"> <div class="col-xs-10">&copy; 2017 KMOL &middot; Powered by <a href="https://github.com/DandyDev/pelican-bootstrap3" target="_blank">pelican-bootstrap3</a>, <a href="http://docs.getpelican.com/" target="_blank">Pelican</a>, <a href="http://getbootstrap.com" target="_blank">Bootstrap</a> </div> <div class="col-xs-2"><p class="pull-right"><i class="fa fa-arrow-up"></i> <a href="#">Back to top</a></p></div> </div> </div> </footer> <script src="./theme/js/jquery.min.js"></script> <!-- Include all compiled plugins (below), or include individual files as needed --> <script src="./theme/js/bootstrap.min.js"></script> <!-- for https://github.com/jsor/lity lightbox video popup --> <link href="./theme/css/lity.css" rel="stylesheet"> <script src="./theme/js/lity.js"></script> <!-- Enable responsive features in IE8 with Respond.js (https://github.com/scottjehl/Respond) --> <script src="./theme/js/respond.min.js"></script> </body> </html>
40423222/Python
blog/Lesson 14.html
HTML
agpl-3.0
13,619
# frozen_string_literal: true require 'spec_helper' describe 'SharedStrings' do it 'custom_shared_strings_name.xlsx' do xlsx = OoxmlParser::Parser.parse('spec/workbook/shared_strings/custom_shared_strings_name.xlsx') expect(xlsx.shared_strings_table.count).to eq(1) end end
ONLYOFFICE/ooxml_parser
spec/workbook/shared_strings_spec.rb
Ruby
agpl-3.0
288
exports.main = function(env){ var capsule = env.capsule; var mtests = capsule.tests.modules; var thsocket = capsule.tests.modules.transport.http.socket_srv; // mtests.http_responder.test(capsule); // thsocket.test({ 'url' : 'http://localhost:8810/sockethh.js'}, capsule); var thttp = capsule.tests.modules.transport.http.server; thttp.test({ 'url' : 'http://localhost:8810/krevetk/o'}, capsule); }
ixdu/capsule
tests/deployer/nodejs_srv/capsulated.js
JavaScript
agpl-3.0
445
/******************************************************************************* * This file is part of Termitaria, a project management tool * Copyright (C) 2008-2013 CodeSphere S.R.L., www.codesphere.ro * * Termitaria is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; either version 3 of * the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Termitaria. If not, see <http://www.gnu.org/licenses/> . ******************************************************************************/ package ro.cs.logaudit.entity; /** * @author matti_joona * */ public class Role { private int roleId; private String name; private String description; private String observation; private Module module; /** * @return the module */ public Module getModule() { return module; } /** * @param module the module to set */ public void setModule(Module module) { this.module = module; } /** * @return the roleId */ public int getRoleId() { return roleId; } /** * @param roleId the roleId to set */ public void setRoleId(int roleId) { this.roleId = roleId; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the observation */ public String getObservation() { return observation; } /** * @param observation the observation to set */ public void setObservation(String observation) { this.observation = observation; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuffer sb = new StringBuffer("["); sb.append(this.getClass().getSimpleName()); sb.append(": "); sb.append("roleId = ") .append(roleId) .append(", "); sb.append("name = ") .append(name) .append(", "); sb.append("description = ") .append(description).append(", "); sb.append("observation = ") .append(observation).append(", "); sb.append("module = ") .append(module) .append("]"); return sb.toString(); } }
CodeSphere/termitaria
TermitariaAudit/JavaSource/ro/cs/logaudit/entity/Role.java
Java
agpl-3.0
2,881
--[[ catfact.lua Returns cat facts. Based on a plugin by matthewhesketh. Copyright 2016 topkecleon <drew@otou.to> This code is licensed under the GNU AGPLv3. See /LICENSE for details. ]]-- local JSON = require('dkjson') local HTTP = require('socket.http') local utilities = require('otouto.utilities') local catfact = {name = 'catfact'} function catfact:init() catfact.triggers = utilities.triggers(self.info.username, self.config.cmd_pat) :t('catfact', true).table catfact.command = 'catfact' catfact.doc = 'Returns a cat fact.' catfact.url = 'http://catfacts-api.appspot.com/api/facts' end function catfact:action(msg) local jstr, code = HTTP.request(catfact.url) if code ~= 200 then utilities.send_reply(msg, self.config.errors.connection) return end local data = JSON.decode(jstr) local output = '*Cat Fact*\n_' .. data.facts[1] .. '_' utilities.send_message(msg.chat.id, output, true, nil, true) end return catfact
bb010g/otouto
otouto/plugins/catfact.lua
Lua
agpl-3.0
1,011
<?php include_once '../../../../../../lib/defaults.php'; include_once '../fns/place_point_method_page.php'; include_once '../../../../../fns/ApiDoc/trueResult.php'; place_point_method_page('delete', [ [ 'name' => 'id', 'description' => 'The ID of the point to delete.', ], ], ApiDoc\trueResult(), [ 'POINT_NOT_FOUND' => "A point with the ID doesn't exist.", ]);
zvini/website
www/help/api-doc/place/point/delete/index.php
PHP
agpl-3.0
392
package de.dvdb.domain.model.social; import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; @Entity @Table(name = "dvdb2_fbsession") public class FacebookSession implements Serializable { private static final long serialVersionUID = -8753714944734959457L; private Long id; private String sessionKey; private Long user; @Id @GeneratedValue(strategy = GenerationType.AUTO) public Long getId() { return id; } public void setId(Long id) { this.id = id; } @Column(name = "user_id") public Long getUser() { return user; } public void setUser(Long user) { this.user = user; } @Column(name = "sessionkey") public String getSessionKey() { return sessionKey; } public void setSessionKey(String sessionKey) { this.sessionKey = sessionKey; } }
chris-dvdb/dvdb.de
dvdb-ejb/src/main/java/de/dvdb/domain/model/social/FacebookSession.java
Java
agpl-3.0
961
/* Copyright (c) 2006-2012 by OpenLayers Contributors (see authors.txt for * full list of contributors). Published under the 2-clause BSD license. * See license.txt in the OpenLayers distribution or repository for the * full text of the license. */ /** * @requires OpenLayers/Format/SLD/v1.js * @requires OpenLayers/Format/Filter/v1_0_0.js */ /** * Class: OpenLayers.Format.SLD.v1_0_0 * Write SLD version 1.0.0. * * Inherits from: * - <OpenLayers.Format.SLD.v1> */ OpenLayers.Format.SLD.v1_0_0 = OpenLayers.Class( OpenLayers.Format.SLD.v1, { /** * Constant: VERSION * {String} 1.0.0 */ VERSION: "1.0.0", /** * Property: schemaLocation * {String} http://www.opengis.net/sld * http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd */ schemaLocation: "http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd", /** * Constructor: OpenLayers.Format.SLD.v1_0_0 * Instances of this class are not created directly. Use the * <OpenLayers.Format.SLD> constructor instead. * * Parameters: * options - {Object} An optional object whose properties will be set on * this instance. */ CLASS_NAME: "OpenLayers.Format.SLD.v1_0_0" });
B3Partners/geo-ov
src/main/webapp/openlayers/lib/OpenLayers/Format/SLD/v1_0_0.js
JavaScript
agpl-3.0
1,351
// =================================================================================================== // _ __ _ _ // | |/ /__ _| | |_ _ _ _ _ __ _ // | ' </ _` | | _| || | '_/ _` | // |_|\_\__,_|_|\__|\_,_|_| \__,_| // // This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // // Copyright (C) 2006-2015 Kaltura Inc. // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as // published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. // // @ignore // =================================================================================================== package com.kaltura.client.types; import org.w3c.dom.Element; import com.kaltura.client.KalturaParams; import com.kaltura.client.KalturaApiException; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class KalturaUiConfAdminFilter extends KalturaUiConfAdminBaseFilter { public KalturaUiConfAdminFilter() { } public KalturaUiConfAdminFilter(Element node) throws KalturaApiException { super(node); } public KalturaParams toParams() throws KalturaApiException { KalturaParams kparams = super.toParams(); kparams.add("objectType", "KalturaUiConfAdminFilter"); return kparams; } }
moskiteau/KalturaGeneratedAPIClientsJava
src/main/java/com/kaltura/client/types/KalturaUiConfAdminFilter.java
Java
agpl-3.0
2,117
# -*- coding: utf-8 -*- import os import shutil import sys import datetime from invoke import task from invoke.util import cd from pelican.server import ComplexHTTPRequestHandler, RootedHTTPServer CONFIG = { # Local path configuration (can be absolute or relative to tasks.py) 'deploy_path': '..', # Github Pages configuration 'github_pages_branch': 'gh-pages', 'commit_message': "'Publish site on {}'".format(datetime.date.today().isoformat()), # Port for `serve` 'port': 8000, } @task def clean(c): """Remove generated files""" if os.path.isdir(CONFIG['deploy_path']): shutil.rmtree(CONFIG['deploy_path']) os.makedirs(CONFIG['deploy_path']) @task def build(c): """Build local version of site""" c.run('pelican -s pelicanconf.py') @task def rebuild(c): """`build` with the delete switch""" c.run('pelican -d -s pelicanconf.py') @task def regenerate(c): """Automatically regenerate site upon file modification""" c.run('pelican -r -s pelicanconf.py') @task def serve(c): """Serve site at http://localhost:8000/""" class AddressReuseTCPServer(RootedHTTPServer): allow_reuse_address = True server = AddressReuseTCPServer( CONFIG['deploy_path'], ('', CONFIG['port']), ComplexHTTPRequestHandler) sys.stderr.write('Serving on port {port} ...\n'.format(**CONFIG)) server.serve_forever() @task def reserve(c): """`build`, then `serve`""" build(c) serve(c) @task def preview(c): """Build production version of site""" c.run('pelican -s publishconf.py') @task def publish(c): """Publish to production via rsync""" c.run('pelican -s publishconf.py') c.run( 'rsync --delete --exclude ".DS_Store" -pthrvz -c ' '{} {production}:{dest_path}'.format( CONFIG['deploy_path'].rstrip('/') + '/', **CONFIG)) @task def gh_pages(c): """Publish to GitHub Pages""" preview(c) c.run('ghp-import -b {github_pages_branch} ' '-m {commit_message} ' '{deploy_path} -p'.format(**CONFIG))
webcamoid/webcamoid.github.io
internal/tasks.py
Python
agpl-3.0
2,105
<?php /** * This file is part of the Checkbook NYC financial transparency software. * * Copyright (C) 2012, 2013 New York City * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ ?> <?php echo eval($node->widgetConfig->header); ?> <a class="trends-export" href="/export/download/trends_pledged_rev_cov_nyc_trans_csv?dataUrl=/node/<?php echo $node->nid ?>">Export</a> <h5>(in thousands)<br/>New York City Transitional Finance Authority</h5> <table id="table_<?php echo widget_unique_identifier($node) ?>" style='display:none' class="trendsShowOnLoad <?php echo $node->widgetConfig->html_class ?>"> <?php if (isset($node->widgetConfig->caption_column)) { echo '<caption>' . $node->data[0][$node->widgetConfig->caption_column] . '</caption>'; } else if (isset($node->widgetConfig->caption)) { echo '<caption>' . $node->widgetConfig->caption . '</caption>'; } ?> <thead> <tr> <th class="number rowspan2Top"><div class="trendCen" >Fiscal</div></th> <th rowspan="2" class="number"><div class="trendCen">PIT<br>Revenue<sup>(1)</sup></div></th> <th rowspan="2" class="number"><div class="trendCen">Sales Tax<br>Revenue<sup>(2)</sup></div></th> <th rowspan="2" class="number"><div class="trendCen">Total<br>Receipt</div></th> <th rowspan="2" class="number"><div class="trendCen">Other<sup>(3)</sup></div></th> <th rowspan="2" class="number"><div class="trendCen">Investment<br>Earnings</div></th> <th rowspan="2" class="number"><div class="trendCen">Total<br>Revenue</div></th> <th colspan="3" class="centrig"><div class="trendCen">Future Tax Secured<br>Bonds Debt Service</div></th> <th rowspan="2" class="number"><div class="trendCen">Operating<br>Expenses</div></th> <th rowspan="2" class="number"><div class="trendCen">Total to be<br>Covered</div></th> <th rowspan="2">&nbsp;</th> </tr> <tr> <th class="number rowspan2Bot"><div class="trendCen">Year</div></th> <th class="number"><div class="trendCen">Interest</div></th> <th class="number"><div class="trendCen">Principal</div></th> <th class="number"><div class="trendCen">Total</div></th> </tr> </thead> <tbody> <?php $count = 1; foreach( $node->data as $row){ $dollar_sign = ($count == 1)? '<div class="dollarItem" >$</div>':''; $count++; echo "<tr><td class='number'><div class='tdCen'>" . $row['fiscal_year'] . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['pit_revenue']>0)?number_format($row['pit_revenue']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['sales_tax_revenue']>0)?number_format($row['sales_tax_revenue']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['total_receipt']>0)?number_format($row['total_receipt']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['other']>0)?number_format($row['other']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['investment_earnings']>0)?number_format($row['investment_earnings']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['total_revenue']>0)?number_format($row['total_revenue']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['interest']>0)?number_format($row['interest']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['pricipal']>0)?number_format($row['pricipal']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['total']>0)?number_format($row['total']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['operating_expenses']>0)?number_format($row['operating_expenses']):'-') . "</div></td>"; echo "<td class='number'>" . $dollar_sign . "<div class='tdCen'>" . (($row['total_to_be_covered']>0)?number_format($row['total_to_be_covered']):'-') . "</div></td>"; echo "<td>&nbsp;</td>"; echo "</tr>"; } ?> </tbody> </table> <div class="footnote"> <p>(1) Personal income tax (PIT).</p> <p>(2) Sales tax revenue has not been required by the TFA. This amount is available to cover debt service if required.</p> <p>(3) Grant from City and Federal Subsidy.</p></div> <?php widget_data_tables_add_js($node); ?>
MomixSolutions/MyGovCenter
source/webapp/sites/all/modules/custom/checkbook_trends/templates/debt_capacity_trends/pledged_rev_cov_nyc_trans.tpl.php
PHP
agpl-3.0
5,320
/* * _Bullseye.cpp * * Created on: Aug 21, 2015 * Author: yankai */ #include "_Bullseye.h" #ifdef USE_OPENCV #ifdef USE_CUDA namespace kai { _Bullseye::_Bullseye() { m_abs = 90; m_scale1 = 0.25; m_scale2 = 0.0625; m_thr1 = 200; m_thr2 = 255; } _Bullseye::~_Bullseye() { } bool _Bullseye::init(void* pKiss) { IF_F(!this->_DetectorBase::init(pKiss)); Kiss* pK = (Kiss*)pKiss; pK->v("abs", &m_abs); pK->v("scale1", &m_scale1); pK->v("scale2", &m_scale2); pK->v("thr1", &m_thr1); pK->v("thr2", &m_thr2); m_nClass = 1; return true; } bool _Bullseye::start(void) { NULL_F(m_pT); return m_pT->start(getUpdate, this); } void _Bullseye::update(void) { while(m_pT->bRun()) { m_pT->autoFPSfrom(); if(check() >= 0) { detect(); if(m_bGoSleep) m_pU->clear(); } m_pT->autoFPSto(); } } int _Bullseye::check(void) { NULL__(m_pU,-1); NULL__(m_pV,-1); IF__(m_pV->BGR()->bEmpty(),-1); return this->_DetectorBase::check(); } void _Bullseye::detect(void) { GpuMat mBGR = *(m_pV->BGR()->gm()); GpuMat mHSV; cuda::cvtColor(mBGR, mHSV, COLOR_BGR2HSV); vector<GpuMat> vmHSV(3); split(mHSV, vmHSV); GpuMat mH = vmHSV[0]; GpuMat mS = vmHSV[1]; GpuMat mV = vmHSV[2]; GpuMat gHred; GpuMat gScaleHred; GpuMat gScaleS; GpuMat gBulleye; GpuMat gThr; cuda::absdiff(mH, Scalar(m_abs), gHred); cuda::multiply(gHred, Scalar(m_scale1), gScaleHred); cuda::multiply(mS, Scalar(m_scale2), gScaleS); cuda::multiply(gScaleHred, gScaleS, gBulleye); cuda::threshold(gBulleye, gThr, m_thr1, m_thr2, THRESH_BINARY); //THRESH_BINARY_INV); Mat mThr; gThr.download(mThr); vector< vector< Point > > vvContours; findContours(mThr, vvContours, RETR_EXTERNAL, CHAIN_APPROX_NONE); float kx = 1.0/mBGR.cols; float ky = 1.0/mBGR.rows; _Object o; vector<Point> vPoly; for (unsigned int i=0; i<vvContours.size(); i++) { vPoly.clear(); approxPolyDP( vvContours[i], vPoly, 3, true ); Rect r = boundingRect(vPoly); o.init(); o.setTstamp(m_pT->getTfrom()); o.setBB2D(rect2BB<vFloat4>(r)); o.scale(kx,ky); o.setTopClass(0, o.area()); m_pU->add(o); LOG_I("ID: "+ i2str(o.getTopClass())); } m_pU->swap(); } } #endif #endif
yankailab/OpenKAI
src/Detector/_Bullseye.cpp
C++
agpl-3.0
2,195
DELETE FROM `weenie` WHERE `class_Id` = 35548; INSERT INTO `weenie` (`class_Id`, `class_Name`, `type`, `last_Modified`) VALUES (35548, 'ace35548-assassinsdagger', 6, '2019-02-10 00:00:00') /* MeleeWeapon */; INSERT INTO `weenie_properties_int` (`object_Id`, `type`, `value`) VALUES (35548, 1, 1) /* ItemType - MeleeWeapon */ , (35548, 5, 100) /* EncumbranceVal */ , (35548, 9, 1048576) /* ValidLocations - MeleeWeapon */ , (35548, 10, 1048576) /* CurrentWieldedLocation - MeleeWeapon */ , (35548, 16, 1) /* ItemUseable - No */ , (35548, 51, 1) /* CombatUse - Melee */ , (35548, 93, 1044) /* PhysicsState - Ethereal, IgnoreCollisions, Gravity */ , (35548, 8041, 1) /* PCAPRecordedPlacement - RightHandCombat */; INSERT INTO `weenie_properties_bool` (`object_Id`, `type`, `value`) VALUES (35548, 22, True ) /* Inscribable */; INSERT INTO `weenie_properties_string` (`object_Id`, `type`, `value`) VALUES (35548, 1, 'Assassin''s Dagger') /* Name */; INSERT INTO `weenie_properties_d_i_d` (`object_Id`, `type`, `value`) VALUES (35548, 1, 33558325) /* Setup */ , (35548, 3, 536870932) /* SoundTable */ , (35548, 8, 100674287) /* Icon */ , (35548, 22, 872415275) /* PhysicsEffectTable */ , (35548, 52, 100689403) /* IconUnderlay */ , (35548, 8001, 2327056) /* PCAPRecordedWeenieHeader - Usable, CombatUse, Wielder, ValidLocations, CurrentlyWieldedLocation, Burden */ , (35548, 8002, 1) /* PCAPRecordedWeenieHeader2 - IconUnderlay */ , (35548, 8003, 67108882) /* PCAPRecordedObjectDesc - Inscribable, Attackable, IncludesSecondHeader */ , (35548, 8005, 170017) /* PCAPRecordedPhysicsDesc - CSetup, Parent, STable, PeTable, Position, AnimationFrame */ , (35548, 8009, 1) /* PCAPRecordedParentLocation - RightHand */; INSERT INTO `weenie_properties_position` (`object_Id`, `position_Type`, `obj_Cell_Id`, `origin_X`, `origin_Y`, `origin_Z`, `angles_W`, `angles_X`, `angles_Y`, `angles_Z`) VALUES (35548, 8040, 23855631, 93.55366, -21.31657, -0.071, -0.5565441, -0.5565441, -0.4361865, -0.4361865) /* PCAPRecordedLocation */ /* @teleloc 0x016C020F [93.553660 -21.316570 -0.071000] -0.556544 -0.556544 -0.436187 -0.436187 */; INSERT INTO `weenie_properties_i_i_d` (`object_Id`, `type`, `value`) VALUES (35548, 8000, 3358114065) /* PCAPRecordedObjectIID */ , (35548, 8008, 1342620634) /* PCAPRecordedParentIID */;
LtRipley36706/ACE-World
Database/3-Core/9 WeenieDefaults/SQL/MeleeWeapon/MeleeWeapon/35548 Assassin's Dagger.sql
SQL
agpl-3.0
2,502
End of preview.

This is a cleaner version of Github-code dataset, we add the following filters:

  • Average line length < 100
  • Alpha numeric characters fraction > 0.25
  • Remove auto-generated files (keyword search)

3.39M files are removed making up 2.94% of the dataset.

Downloads last month
10,170
Edit dataset card

Models trained or fine-tuned on codeparrot/github-code-clean