code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
"use strict";
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
var Test = function Test() {
_classCallCheck(this, Test);
arr.map(x => x * x);
}; | lydell/babel | test/fixtures/transformation/api/blacklist/expected.js | JavaScript | mit | 264 |
CKEDITOR.plugins.setLang("imagebase","en-au",{captionPlaceholder:"Enter image caption"}); | cdnjs/cdnjs | ajax/libs/ckeditor/4.17.2/plugins/imagebase/lang/en-au.min.js | JavaScript | mit | 89 |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var DataSource = (function () {
function DataSource() {
}
return DataSource;
}());
exports.DataSource = DataSource;
| cdnjs/cdnjs | ajax/libs/deeplearn/0.6.0-alpha6/contrib/data/datasource.js | JavaScript | mit | 205 |
package multilanguage;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.ui.IWorkbenchActionConstants;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.actions.ActionFactory;
import org.eclipse.ui.actions.ActionFactory.IWorkbenchAction;
import org.eclipse.ui.application.ActionBarAdvisor;
import org.eclipse.ui.application.IActionBarConfigurer;
/**
* An action bar advisor is responsible for creating, adding, and disposing of
* the actions added to a workbench window. Each window will be populated with
* new actions.
*/
public class ApplicationActionBarAdvisor extends ActionBarAdvisor {
// Actions - important to allocate these only in makeActions, and then use
// them
// in the fill methods. This ensures that the actions aren't recreated
// when fillActionBars is called with FILL_PROXY.
private IWorkbenchAction exitAction;
public ApplicationActionBarAdvisor(IActionBarConfigurer configurer) {
super(configurer);
}
protected void makeActions(final IWorkbenchWindow window) {
// Creates the actions and registers them.
// Registering is needed to ensure that key bindings work.
// The corresponding commands keybindings are defined in the plugin.xml
// file.
// Registering also provides automatic disposal of the actions when
// the window is closed.
exitAction = ActionFactory.QUIT.create(window);
register(exitAction);
}
protected void fillMenuBar(IMenuManager menuBar) {
MenuManager fileMenu = new MenuManager("&File",
IWorkbenchActionConstants.M_FILE);
menuBar.add(fileMenu);
fileMenu.add(exitAction);
}
}
| NounVannakGitHub/Java-Tutorial-Vollegen | MultiLanguage/src/multilanguage/ApplicationActionBarAdvisor.java | Java | epl-1.0 | 1,642 |
<?php
namespace Drupal\Tests\language\Kernel;
use Drupal\language\Entity\ConfigurableLanguage;
use Drupal\KernelTests\KernelTestBase;
/**
* Tests \Drupal\language\Config\LanguageConfigFactoryOverride.
*
* @group language
*/
class LanguageConfigFactoryOverrideTest extends KernelTestBase {
/**
* Modules to enable.
*
* @var array
*/
protected static $modules = ['system', 'language'];
/**
* Tests language.config_factory_override service has the default language.
*/
public function testLanguageConfigFactoryOverride() {
$this->installConfig('system');
$this->installConfig('language');
/** @var \Drupal\language\Config\LanguageConfigFactoryOverride $config_factory_override */
$config_factory_override = \Drupal::service('language.config_factory_override');
$this->assertEquals('en', $config_factory_override->getLanguage()->getId());
ConfigurableLanguage::createFromLangcode('de')->save();
// Invalidate the container.
$this->config('system.site')->set('default_langcode', 'de')->save();
$this->container->get('kernel')->rebuildContainer();
$config_factory_override = \Drupal::service('language.config_factory_override');
$this->assertEquals('de', $config_factory_override->getLanguage()->getId());
}
}
| tobiasbuhrer/tobiasb | web/core/modules/language/tests/src/Kernel/LanguageConfigFactoryOverrideTest.php | PHP | gpl-2.0 | 1,290 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.agent import agent
from lib.core.common import arrayizeValue
from lib.core.common import Backend
from lib.core.common import filterPairValues
from lib.core.common import getLimitRange
from lib.core.common import isInferenceAvailable
from lib.core.common import isNoneValue
from lib.core.common import isNumPosStrValue
from lib.core.common import isTechniqueAvailable
from lib.core.common import readInput
from lib.core.common import safeSQLIdentificatorNaming
from lib.core.common import safeStringFormat
from lib.core.common import unArrayizeValue
from lib.core.common import unsafeSQLIdentificatorNaming
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import paths
from lib.core.data import queries
from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS
from lib.core.enums import EXPECTED
from lib.core.enums import PAYLOAD
from lib.core.exception import SqlmapMissingMandatoryOptionException
from lib.core.exception import SqlmapUserQuitException
from lib.core.settings import CURRENT_DB
from lib.core.settings import METADB_SUFFIX
from lib.request import inject
from lib.techniques.brute.use import columnExists
from lib.techniques.brute.use import tableExists
class Search:
"""
This class defines search functionalities for plugins.
"""
def __init__(self):
pass
def searchDb(self):
foundDbs = []
rootQuery = queries[Backend.getIdentifiedDbms()].search_db
dbList = conf.db.split(",")
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
dbCond = rootQuery.inband.condition2
else:
dbCond = rootQuery.inband.condition
dbConsider, dbCondParam = self.likeOrExact("database")
for db in dbList:
values = []
db = safeSQLIdentificatorNaming(db)
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
db = db.upper()
infoMsg = "searching database"
if dbConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db)
logger.info(infoMsg)
if conf.excludeSysDbs:
exclDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList)
infoMsg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
logger.info(infoMsg)
else:
exclDbsQuery = ""
dbQuery = "%s%s" % (dbCond, dbCondParam)
dbQuery = dbQuery % unsafeSQLIdentificatorNaming(db)
if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct:
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
query = rootQuery.inband.query2
else:
query = rootQuery.inband.query
query = query % (dbQuery + exclDbsQuery)
values = inject.getValue(query, blind=False, time=False)
if not isNoneValue(values):
values = arrayizeValue(values)
for value in values:
value = safeSQLIdentificatorNaming(value)
foundDbs.append(value)
if not values and isInferenceAvailable() and not conf.direct:
infoMsg = "fetching number of database"
if dbConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db)
logger.info(infoMsg)
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
query = rootQuery.blind.count2
else:
query = rootQuery.blind.count
query = query % (dbQuery + exclDbsQuery)
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "no database"
if dbConsider == "1":
warnMsg += "s like"
warnMsg += " '%s' found" % unsafeSQLIdentificatorNaming(db)
logger.warn(warnMsg)
continue
indexRange = getLimitRange(count)
for index in indexRange:
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
query = rootQuery.blind.query2
else:
query = rootQuery.blind.query
query = query % (dbQuery + exclDbsQuery)
query = agent.limitQuery(index, query, dbCond)
value = unArrayizeValue(inject.getValue(query, union=False, error=False))
value = safeSQLIdentificatorNaming(value)
foundDbs.append(value)
conf.dumper.lister("found databases", foundDbs)
def searchTable(self):
bruteForce = False
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
errMsg = "information_schema not available, "
errMsg += "back-end DBMS is MySQL < 5.0"
bruteForce = True
if bruteForce:
message = "do you want to use common table existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]")
test = readInput(message, default="Y" if "Y" in message else "N")
if test[0] in ("n", "N"):
return
elif test[0] in ("q", "Q"):
raise SqlmapUserQuitException
else:
regex = "|".join(conf.tbl.split(","))
return tableExists(paths.COMMON_TABLES, regex)
foundTbls = {}
tblList = conf.tbl.split(",")
rootQuery = queries[Backend.getIdentifiedDbms()].search_table
tblCond = rootQuery.inband.condition
dbCond = rootQuery.inband.condition2
tblConsider, tblCondParam = self.likeOrExact("table")
for tbl in tblList:
values = []
tbl = safeSQLIdentificatorNaming(tbl, True)
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.FIREBIRD):
tbl = tbl.upper()
infoMsg = "searching table"
if tblConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
if dbCond and conf.db and conf.db != CURRENT_DB:
_ = conf.db.split(",")
whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")"
infoMsg += " for database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _))
elif conf.excludeSysDbs:
whereDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList)
infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
logger.info(infoMsg2)
else:
whereDbsQuery = ""
logger.info(infoMsg)
tblQuery = "%s%s" % (tblCond, tblCondParam)
tblQuery = tblQuery % unsafeSQLIdentificatorNaming(tbl)
if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct:
query = rootQuery.inband.query
query = query % (tblQuery + whereDbsQuery)
values = inject.getValue(query, blind=False, time=False)
if values and Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD):
newValues = []
if isinstance(values, basestring):
values = [values]
for value in values:
dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird"
newValues.append(["%s%s" % (dbName, METADB_SUFFIX), value])
values = newValues
for foundDb, foundTbl in filterPairValues(values):
foundDb = safeSQLIdentificatorNaming(foundDb)
foundTbl = safeSQLIdentificatorNaming(foundTbl, True)
if foundDb is None or foundTbl is None:
continue
if foundDb in foundTbls:
foundTbls[foundDb].append(foundTbl)
else:
foundTbls[foundDb] = [foundTbl]
if not values and isInferenceAvailable() and not conf.direct:
if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD):
if len(whereDbsQuery) == 0:
infoMsg = "fetching number of databases with table"
if tblConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
logger.info(infoMsg)
query = rootQuery.blind.count
query = query % (tblQuery + whereDbsQuery)
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "no databases have table"
if tblConsider == "1":
warnMsg += "s like"
warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
logger.warn(warnMsg)
continue
indexRange = getLimitRange(count)
for index in indexRange:
query = rootQuery.blind.query
query = query % (tblQuery + whereDbsQuery)
query = agent.limitQuery(index, query)
foundDb = unArrayizeValue(inject.getValue(query, union=False, error=False))
foundDb = safeSQLIdentificatorNaming(foundDb)
if foundDb not in foundTbls:
foundTbls[foundDb] = []
if tblConsider == "2":
foundTbls[foundDb].append(tbl)
if tblConsider == "2":
continue
else:
for db in conf.db.split(","):
db = safeSQLIdentificatorNaming(db)
if db not in foundTbls:
foundTbls[db] = []
else:
dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird"
foundTbls["%s%s" % (dbName, METADB_SUFFIX)] = []
for db in foundTbls.keys():
db = safeSQLIdentificatorNaming(db)
infoMsg = "fetching number of table"
if tblConsider == "1":
infoMsg += "s like"
infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db))
logger.info(infoMsg)
query = rootQuery.blind.count2
if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD):
query = query % unsafeSQLIdentificatorNaming(db)
query += " AND %s" % tblQuery
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "no table"
if tblConsider == "1":
warnMsg += "s like"
warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl)
warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db)
logger.warn(warnMsg)
continue
indexRange = getLimitRange(count)
for index in indexRange:
query = rootQuery.blind.query2
if query.endswith("'%s')"):
query = query[:-1] + " AND %s)" % tblQuery
else:
query += " AND %s" % tblQuery
if Backend.isDbms(DBMS.FIREBIRD):
query = safeStringFormat(query, index)
if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD):
query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db))
if not Backend.isDbms(DBMS.FIREBIRD):
query = agent.limitQuery(index, query)
foundTbl = unArrayizeValue(inject.getValue(query, union=False, error=False))
if not isNoneValue(foundTbl):
kb.hintValue = foundTbl
foundTbl = safeSQLIdentificatorNaming(foundTbl, True)
foundTbls[db].append(foundTbl)
for db in foundTbls.keys():
if isNoneValue(foundTbls[db]):
del foundTbls[db]
if not foundTbls:
warnMsg = "no databases contain any of the provided tables"
logger.warn(warnMsg)
return
conf.dumper.dbTables(foundTbls)
self.dumpFoundTables(foundTbls)
def searchColumn(self):
bruteForce = False
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
errMsg = "information_schema not available, "
errMsg += "back-end DBMS is MySQL < 5.0"
bruteForce = True
if bruteForce:
message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]")
test = readInput(message, default="Y" if "Y" in message else "N")
if test[0] in ("n", "N"):
return
elif test[0] in ("q", "Q"):
raise SqlmapUserQuitException
else:
regex = "|".join(conf.col.split(","))
conf.dumper.dbTableColumns(columnExists(paths.COMMON_COLUMNS, regex))
message = "do you want to dump entries? [Y/n] "
output = readInput(message, default="Y")
if output and output[0] not in ("n", "N"):
self.dumpAll()
return
rootQuery = queries[Backend.getIdentifiedDbms()].search_column
foundCols = {}
dbs = {}
whereDbsQuery = ""
whereTblsQuery = ""
infoMsgTbl = ""
infoMsgDb = ""
colList = conf.col.split(",")
origTbl = conf.tbl
origDb = conf.db
colCond = rootQuery.inband.condition
dbCond = rootQuery.inband.condition2
tblCond = rootQuery.inband.condition3
colConsider, colCondParam = self.likeOrExact("column")
for column in colList:
values = []
column = safeSQLIdentificatorNaming(column)
conf.db = origDb
conf.tbl = origTbl
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
column = column.upper()
infoMsg = "searching column"
if colConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column)
foundCols[column] = {}
if conf.tbl:
_ = conf.tbl.split(",")
whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")"
infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(tbl) for tbl in _))
if conf.db and conf.db != CURRENT_DB:
_ = conf.db.split(",")
whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")"
infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in _))
elif conf.excludeSysDbs:
whereDbsQuery = "".join(" AND %s != '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in self.excludeDbsList)
infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList))
logger.info(infoMsg2)
else:
infoMsgDb = " across all databases"
logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb))
colQuery = "%s%s" % (colCond, colCondParam)
colQuery = colQuery % unsafeSQLIdentificatorNaming(column)
if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct:
if not all((conf.db, conf.tbl)):
# Enumerate tables containing the column provided if
# either of database(s) or table(s) is not provided
query = rootQuery.inband.query
query = query % (colQuery + whereDbsQuery + whereTblsQuery)
values = inject.getValue(query, blind=False, time=False)
else:
# Assume provided databases' tables contain the
# column(s) provided
values = []
for db in conf.db.split(","):
for tbl in conf.tbl.split(","):
values.append([safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(tbl, True)])
for db, tbl in filterPairValues(values):
db = safeSQLIdentificatorNaming(db)
tbls = tbl.split(",") if not isNoneValue(tbl) else []
for tbl in tbls:
tbl = safeSQLIdentificatorNaming(tbl, True)
if db is None or tbl is None:
continue
conf.db = db
conf.tbl = tbl
conf.col = column
self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False)
if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]:
if db not in dbs:
dbs[db] = {}
if tbl not in dbs[db]:
dbs[db][tbl] = {}
dbs[db][tbl].update(kb.data.cachedColumns[db][tbl])
if db in foundCols[column]:
foundCols[column][db].append(tbl)
else:
foundCols[column][db] = [tbl]
kb.data.cachedColumns = {}
if not values and isInferenceAvailable() and not conf.direct:
if not conf.db:
infoMsg = "fetching number of databases with tables containing column"
if colConsider == "1":
infoMsg += "s like"
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column)
logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb))
query = rootQuery.blind.count
query = query % (colQuery + whereDbsQuery + whereTblsQuery)
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "no databases have tables containing column"
if colConsider == "1":
warnMsg += "s like"
warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(column)
logger.warn("%s%s" % (warnMsg, infoMsgTbl))
continue
indexRange = getLimitRange(count)
for index in indexRange:
query = rootQuery.blind.query
query = query % (colQuery + whereDbsQuery + whereTblsQuery)
query = agent.limitQuery(index, query)
db = unArrayizeValue(inject.getValue(query, union=False, error=False))
db = safeSQLIdentificatorNaming(db)
if db not in dbs:
dbs[db] = {}
if db not in foundCols[column]:
foundCols[column][db] = []
else:
for db in conf.db.split(","):
db = safeSQLIdentificatorNaming(db)
if db not in foundCols[column]:
foundCols[column][db] = []
origDb = conf.db
origTbl = conf.tbl
for column, dbData in foundCols.items():
colQuery = "%s%s" % (colCond, colCondParam)
colQuery = colQuery % unsafeSQLIdentificatorNaming(column)
for db in dbData:
conf.db = origDb
conf.tbl = origTbl
infoMsg = "fetching number of tables containing column"
if colConsider == "1":
infoMsg += "s like"
infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(column), unsafeSQLIdentificatorNaming(db))
logger.info(infoMsg)
query = rootQuery.blind.count2
query = query % unsafeSQLIdentificatorNaming(db)
query += " AND %s" % colQuery
query += whereTblsQuery
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "no tables contain column"
if colConsider == "1":
warnMsg += "s like"
warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(column)
warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db)
logger.warn(warnMsg)
continue
indexRange = getLimitRange(count)
for index in indexRange:
query = rootQuery.blind.query2
if query.endswith("'%s')"):
query = query[:-1] + " AND %s)" % (colQuery + whereTblsQuery)
else:
query += " AND %s" % (colQuery + whereTblsQuery)
query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db))
query = agent.limitQuery(index, query)
tbl = unArrayizeValue(inject.getValue(query, union=False, error=False))
kb.hintValue = tbl
tbl = safeSQLIdentificatorNaming(tbl, True)
conf.db = db
conf.tbl = tbl
conf.col = column
self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False)
if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]:
if db not in dbs:
dbs[db] = {}
if tbl not in dbs[db]:
dbs[db][tbl] = {}
dbs[db][tbl].update(kb.data.cachedColumns[db][tbl])
kb.data.cachedColumns = {}
if db in foundCols[column]:
foundCols[column][db].append(tbl)
else:
foundCols[column][db] = [tbl]
if dbs:
conf.dumper.dbColumns(foundCols, colConsider, dbs)
self.dumpFoundColumn(dbs, foundCols, colConsider)
else:
warnMsg = "no databases have tables containing any of the "
warnMsg += "provided columns"
logger.warn(warnMsg)
def search(self):
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
for item in ('db', 'tbl', 'col'):
if getattr(conf, item, None):
setattr(conf, item, getattr(conf, item).upper())
if conf.col:
self.searchColumn()
elif conf.tbl:
self.searchTable()
elif conf.db:
self.searchDb()
else:
errMsg = "missing parameter, provide -D, -T or -C along "
errMsg += "with --search"
raise SqlmapMissingMandatoryOptionException(errMsg)
| golismero/golismero | tools/sqlmap/plugins/generic/search.py | Python | gpl-2.0 | 26,113 |
/*
* Copyright (c) 1999
* Silicon Graphics Computer Systems, Inc.
*
* Copyright (c) 1999
* Boris Fomitchev
*
* This material is provided "as is", with absolutely no warranty expressed
* or implied. Any use is at your own risk.
*
* Permission to use or copy this software for any purpose is hereby granted
* without fee, provided the above notices are retained on all copies.
* Permission to modify the code and to distribute modified code is granted,
* provided the above notices are retained, and a notice that the code was
* modified is included with the above copyright notice.
*
*/
#include "stlport_prefix.h"
#include <cmath>
#include <ios>
#include <locale>
#if defined (__DECCXX)
# define NDIG 400
#else
# define NDIG 82
#endif
#if defined (_STLP_NO_LONG_DOUBLE)
# define MAXECVT 17
# define MAXFCVT 18
typedef double max_double_type;
#else
# define MAXECVT 35
# define MAXFCVT 36
typedef long double max_double_type;
#endif
#define MAXFSIG MAXECVT
#define MAXESIZ 5
#define todigit(x) ((x)+'0')
#if defined (_STLP_UNIX)
# if defined (__sun)
# include <floatingpoint.h>
# endif
# if defined (__sun) || defined (__digital__) || defined (__sgi) || defined (_STLP_SCO_OPENSERVER) || defined (__NCR_SVR)
// DEC, SGI & Solaris need this
# include <values.h>
# include <nan.h>
# endif
# if defined (__QNXNTO__) || ( defined(__GNUC__) && defined(__APPLE__) ) || defined(_STLP_USE_UCLIBC) /* 0.9.26 */ || \
defined(__FreeBSD__)
# define USE_SPRINTF_INSTEAD
# endif
# if defined( _AIX ) // JFA 3-Aug-2000
# include <math.h>
# include <float.h>
# endif
#endif
#include <cstdio>
#include <cstdlib>
//#if defined(_CRAY)
//# include <stdlib.h>
//#endif
#if defined (_STLP_MSVC_LIB) || defined (__MINGW32__) || defined (__BORLANDC__) || defined (__DJGPP) || \
defined (_STLP_SCO_OPENSERVER) || defined (__NCR_SVR)
# include <float.h>
#endif
#if defined(__MRC__) || defined(__SC__) || defined(_CRAY) //*TY 02/24/2000 - added support for MPW
# include <fp.h>
#endif
#if defined (__CYGWIN__)
# include <ieeefp.h>
#endif
#if defined (__MSL__)
# include <cstdlib> // for atoi
# include <cstdio> // for snprintf
# include <algorithm>
# include <cassert>
#endif
#if defined (__ISCPP__)
# include <cfloat>
#endif
#include <algorithm>
#if defined (__DMC__)
# define snprintf _snprintf
#endif
#if defined (__SYMBIAN32__)
# include <e32math.h>
static char* _symbian_ecvt(double x, int n, int* pt, int* sign, char* buf)
{
// normalize sign and set sign bit
if (x < 0)
{
if (sign) *sign = 1;
x = -x;
}
else
if (sign) *sign = 0;
// initialize end-of-buffer
char* end = buf+n;
*end = 0;
// if buffer will be empty anyway, return now
if (n == 0)
return buf;
// normalize number and set point position
if (x != 0.0)
{
double fex;
if (Math::Log(fex, x) != KErrNone)
return buf;
int ex = (int)fex;
if (x < 1.0)
--ex;
if (ex != 0)
{
double temp;
if (Math::Pow10(temp, ex) != KErrNone)
return buf;
x /= temp;
}
if (pt) *pt = ex + 1;
}
else
if (pt) *pt = 1;
const double dbl_epsilon = 2.2204460492503131e-16;
// render digits (except for last digit)
char* ptr = buf;
for (; (ptr+1)!=end; ++ptr)
{
char digit = (char)x;
*ptr = '0' + digit;
x = (x - (double)digit) * 10.0 * (1.0 + dbl_epsilon);
}
// render last digit, rounded
double rx;
if (Math::Round(rx, x, 0) != KErrNone)
return buf;
*ptr = '0' + (char)rx;
// detect carry on last digit and propagate it back
for (; ptr!=buf && *ptr==':'; --ptr)
{
*ptr = '0';
++*(ptr-1);
}
// detect overflow on first digit and, in case, shift
// the sequence forward
if (*buf == ':')
{
*buf = '0';
memcpy(buf+1, buf, n-1);
*buf = '1';
if (pt) ++*pt;
}
return buf;
}
static char* _symbian_fcvt(double x, int n, int* pt, int* sign, char* buf)
{
*buf = 0;
if (x < 0.0)
{
*sign = 1;
x = -x;
}
else
*sign = 0;
double fx;
if (Math::Int(fx, x) != KErrNone)
return buf;
if (fx != 0.0 || x == 0.0 || n == 0)
{
int fn = 1;
if (fx != 0.0)
{
double temp;
if (Math::Log(temp, fx) != KErrNone)
return buf;
fn += (int)temp;
}
_symbian_ecvt(fx, fn, pt, 0, buf);
}
else
*pt = 0;
if (n != 0)
{
const double dx = x - fx;
_symbian_ecvt(dx, n, 0, 0, buf+*pt);
}
return buf;
}
#endif
#if defined(__hpux) && (!defined(_INCLUDE_HPUX_SOURCE) || defined(__GNUC__))
extern "C" double erf(double);
extern "C" double erfc(double);
extern "C" double gamma(double); /* obsolescent */
extern "C" double hypot(double, double);
extern "C" int isnan(double);
extern "C" double j0(double);
extern "C" double j1(double);
extern "C" double jn(int, double);
extern "C" double lgamma(double);
extern "C" double y0(double);
extern "C" double y1(double);
extern "C" double yn(int, double);
# define HUGE_VALF _SINFINITY
# define INFINITY _SINFINITY
# define NAN _SQNAN
# define isnan(x) _ISNAN(x)
# define isinf(x) _ISINF(x)
# define signbit(x) _SIGNBIT(x)
# define isfinite(x) _ISFINITE(x)
# define isnormal(x) _ISNORMAL(x)
# define fpclassify(x) _FPCLASSIFY(x)
# define isunordered(x,y) _ISUNORDERED(x,y)
# define isgreater(x,y) _ISGREATER(x,y)
# define isgreaterequal(x,y) _ISGREATEREQUAL(x,y)
# define isless(x,y) _ISLESS(x,y)
# define islessequal(x,y) _ISLESSEQUAL(x,y)
# define islessgreater(x,y) _ISLESSGREATER(x,y)
# define FP_NORMAL 0
# define FP_ZERO 1
# define FP_INFINITE 2
# define FP_SUBNORMAL 3
# define FP_NAN 4
# define DECIMAL_DIG 17
# define _IS64(x) (sizeof(x) == sizeof(double))
# define _IS32(x) (sizeof(x) == sizeof(float))
extern "C" {
extern double copysign(double, double);
extern const float _SINFINITY;
extern const float _SQNAN;
//# if defined (_PA_RISC)
# define _ISNAN(x) (_IS32(x)?_Isnanf(x):(isnan)(x))
# define _ISINF(x) (_IS32(x)?_Isinff(x):_Isinf(x))
# define _SIGNBIT(x) (_IS32(x)?_Signbitf(x):_Signbit(x))
# define _ISFINITE(x) (_IS32(x)?_Isfinitef(x):_Isfinite(x))
# define _ISNORMAL(x) (_IS32(x)?_Isnormalf(x):_Isnormal(x))
# define _FPCLASSIFY(x) (_IS32(x)?_Fpclassifyf(x)>>1:_Fpclassify(x)>>1)
# define _ISUNORDERED(x,y) (_IS32(x)&&_IS32(y)?_Isunorderedf(x,y):_Isunordered(x,y))
extern int _Signbit(double);
extern int _Signbitf(float);
extern int _Isnanf(float);
extern int _Isfinite(double);
extern int _Isfinitef(float);
extern int _Isinf(double);
extern int _Isinff(float);
extern int _Isnormal(double);
extern int _Isnormalf(float);
extern int _Isunordered(double, double);
extern int _Isunorderedf(float, float);
extern int _Fpclassify(double);
extern int _Fpclassifyf(float);
//# else
//# include "math_ia64_internal.h"
//# define _FPCLASSIFY(x) (_IS32(x)?_Fpclassf(x):_Fpclass(x))
// extern int _Fpclass(double);
// extern int _Fpclassf(float);
//# endif
}
# if !defined (_INCLUDE_XOPEN_SOURCE_EXTENDED)
extern "C" char *fcvt(double, int, int *, int *);
extern "C" char *ecvt(double, int, int *, int *);
# endif
# if !defined (_INCLUDE_HPUX_SOURCE)
# if !defined (_LONG_DOUBLE)
# define _LONG_DOUBLE
typedef struct {
uint32_t word1, word2, word3, word4;
} long_double;
# endif /* _LONG_DOUBLE */
extern "C" char *_ldecvt(long_double, int, int *, int *);
extern "C" char *_ldfcvt(long_double, int, int *, int *);
# endif
#endif /* __hpux */
_STLP_BEGIN_NAMESPACE
_STLP_MOVE_TO_PRIV_NAMESPACE
#if defined (__MWERKS__) || defined(__BEOS__)
# define USE_SPRINTF_INSTEAD
#endif
#if defined (_AIX) || defined(__FreeBSD__) || defined(__NetBSD__) || defined(__OpenBSD__)
// Some OS'es only provide non-reentrant primitives, so we have to use additional synchronization here
# if !defined(_REENTRANT) && !defined(_THREAD_SAFE) && !(defined(_POSIX_THREADS) && defined(__OpenBSD__))
# define LOCK_CVT
# define RETURN_CVT(ecvt, x, n, pt, sign, buf) return ecvt(x, n, pt, sign);
# else
static _STLP_STATIC_MUTEX __put_float_mutex _STLP_MUTEX_INITIALIZER;
# define LOCK_CVT _STLP_auto_lock lock(__put_float_mutex);
# define RETURN_CVT(ecvt, x, n, pt, sign, buf) strcpy(buf, ecvt(x, n, pt, sign)); return buf;
# endif // !_REENTRANT
#endif // _AIX || __FreeBSD__ || __NetBSD__ || __OpenBSD__
// Tests for infinity and NaN differ on different OSs. We encapsulate
// these differences here.
#if !defined (USE_SPRINTF_INSTEAD)
# if defined (__hpux) || defined (__DJGPP) || (defined (_STLP_USE_GLIBC) && ! defined (__MSL__)) || \
defined (__FreeBSD__) || defined (__NetBSD__) || defined (__OpenBSD__)
static inline bool _Stl_is_nan_or_inf(double x)
# if defined (isfinite)
{ return !isfinite(x); }
# else
{ return !finite(x); }
# endif
static inline bool _Stl_is_neg_nan(double x) { return isnan(x) && ( copysign(1., x) < 0 ); }
static inline bool _Stl_is_inf(double x) { return isinf(x); }
// inline bool _Stl_is_neg_inf(double x) { return isinf(x) < 0; }
static inline bool _Stl_is_neg_inf(double x) { return isinf(x) && x < 0; }
# elif (defined (__unix) || defined (__unix__)) && \
!defined (__APPLE__) && !defined (__DJGPP) && !defined(__osf__) && \
!defined (_CRAY)
static inline bool _Stl_is_nan_or_inf(double x) { return IsNANorINF(x); }
static inline bool _Stl_is_inf(double x) { return IsNANorINF(x) && IsINF(x); }
static inline bool _Stl_is_neg_inf(double x) { return (IsINF(x)) && (x < 0.0); }
static inline bool _Stl_is_neg_nan(double x) { return IsNegNAN(x); }
# elif defined (__BORLANDC__) && ( __BORLANDC__ < 0x540 )
static inline bool _Stl_is_nan_or_inf(double x) { return !_finite(x); }
static inline bool _Stl_is_inf(double x) { return _Stl_is_nan_or_inf(x) && ! _isnan(x);}
static inline bool _Stl_is_neg_inf(double x) { return _Stl_is_inf(x) && x < 0 ; }
static inline bool _Stl_is_neg_nan(double x) { return _isnan(x) && x < 0 ; }
# elif defined (_STLP_MSVC_LIB) || defined (__MINGW32__) || defined (__BORLANDC__)
static inline bool _Stl_is_nan_or_inf(double x) { return !_finite(x); }
static inline bool _Stl_is_inf(double x) {
int fclass = _fpclass(x);
return fclass == _FPCLASS_NINF || fclass == _FPCLASS_PINF;
}
static inline bool _Stl_is_neg_inf(double x) { return _fpclass(x) == _FPCLASS_NINF; }
static inline bool _Stl_is_neg_nan(double x) { return _isnan(x) && _copysign(1., x) < 0 ; }
# elif defined (__MRC__) || defined (__SC__) //*TY 02/24/2000 - added support for MPW
static bool _Stl_is_nan_or_inf(double x) { return isnan(x) || !isfinite(x); }
static bool _Stl_is_inf(double x) { return !isfinite(x); }
static bool _Stl_is_neg_inf(double x) { return !isfinite(x) && signbit(x); }
static bool _Stl_is_neg_nan(double x) { return isnan(x) && signbit(x); }
# elif /* defined(__FreeBSD__) || defined(__OpenBSD__) || */ (defined(__GNUC__) && defined(__APPLE__))
static inline bool _Stl_is_nan_or_inf(double x) { return !finite(x); }
static inline bool _Stl_is_inf(double x) { return _Stl_is_nan_or_inf(x) && ! isnan(x); }
static inline bool _Stl_is_neg_inf(double x) { return _Stl_is_inf(x) && x < 0 ; }
static inline bool _Stl_is_neg_nan(double x) { return isnan(x) && copysign(1., x) < 0 ; }
# elif defined( _AIX ) // JFA 11-Aug-2000
static bool _Stl_is_nan_or_inf(double x) { return isnan(x) || !finite(x); }
static bool _Stl_is_inf(double x) { return !finite(x); }
// bool _Stl_is_neg_inf(double x) { return _class(x) == FP_MINUS_INF; }
static bool _Stl_is_neg_inf(double x) { return _Stl_is_inf(x) && ( copysign(1., x) < 0 ); }
static bool _Stl_is_neg_nan(double x) { return isnan(x) && ( copysign(1., x) < 0 ); }
# elif defined (__ISCPP__)
static inline bool _Stl_is_nan_or_inf (double x) { return _fp_isINF(x) || _fp_isNAN(x); }
static inline bool _Stl_is_inf (double x) { return _fp_isINF(x); }
static inline bool _Stl_is_neg_inf (double x) { return _fp_isINF(x) && x < 0; }
static inline bool _Stl_is_neg_nan (double x) { return _fp_isNAN(x) && x < 0; }
# elif defined (_CRAY)
# if defined (_CRAYIEEE)
static inline bool _Stl_is_nan_or_inf(double x) { return isnan(x) || isinf(x); }
static inline bool _Stl_is_inf(double x) { return isinf(x); }
static inline bool _Stl_is_neg_inf(double x) { return isinf(x) && signbit(x); }
static inline bool _Stl_is_neg_nan(double x) { return isnan(x) && signbit(x); }
# else
static inline bool _Stl_is_nan_or_inf(double x) { return false; }
static inline bool _Stl_is_inf(double x) { return false; }
static inline bool _Stl_is_neg_inf(double x) { return false; }
static inline bool _Stl_is_neg_nan(double x) { return false; }
# endif
# elif defined (__SYMBIAN32__)
static inline bool _Stl_is_nan_or_inf(double x) { return Math::IsNaN(x) || Math::IsInfinite(x); }
static inline bool _Stl_is_inf(double x) { return Math::IsInfinite(x); }
static inline bool _Stl_is_neg_inf(double x) { return Math::IsInfinite(x) && x < 0; }
static inline bool _Stl_is_neg_nan(double x) { return Math::IsNaN(x) && x < 0; }
# else // nothing from above
# define USE_SPRINTF_INSTEAD
# endif
#endif // !USE_SPRINTF_INSTEAD
#if !defined (USE_SPRINTF_INSTEAD)
// Reentrant versions of floating-point conversion functions. The argument
// lists look slightly different on different operating systems, so we're
// encapsulating the differences here.
# if defined (__CYGWIN__) || defined(__DJGPP)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return ecvtbuf(x, n, pt, sign, buf); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return fcvtbuf(x, n, pt, sign, buf); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return ecvtbuf(x, n, pt, sign, buf); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return fcvtbuf(x, n, pt, sign, buf); }
# endif
# elif defined (__SYMBIAN32__)
static inline char* _Stl_ecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _symbian_ecvt(x, n, pt, sign, buf); }
static inline char* _Stl_fcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _symbian_fcvt(x, n, pt, sign, buf); }
# elif defined (_STLP_USE_GLIBC)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return buf + ecvt_r(x, n, pt, sign, buf, NDIG+2); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return buf + fcvt_r(x, n, pt, sign, buf, NDIG+2); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return buf + qecvt_r(x, n, pt, sign, buf, NDIG+2); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return buf + qfcvt_r(x, n, pt, sign, buf, NDIG+2); }
# endif
# elif defined (_STLP_SCO_OPENSERVER) || defined (__NCR_SVR)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return ecvt(x, n, pt, sign); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return fcvt(x, n, pt, sign); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return ecvtl(x, n, pt, sign); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return fcvtl(x, n, pt, sign); }
# endif
# elif defined (__sun)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return econvert(x, n, pt, sign, buf); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return fconvert(x, n, pt, sign, buf); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return qeconvert(&x, n, pt, sign, buf); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return qfconvert(&x, n, pt, sign, buf); }
# endif
# elif defined (__DECCXX)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return (ecvt_r(x, n, pt, sign, buf, NDIG)==0 ? buf : 0); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return (fcvt_r(x, n, pt, sign, buf, NDIG)==0 ? buf : 0); }
# if !defined (_STLP_NO_LONG_DOUBLE)
// fbp : no "long double" conversions !
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return (ecvt_r((double)x, n, pt, sign, buf, NDIG)==0 ? buf : 0) ; }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return (fcvt_r((double)x, n, pt, sign, buf, NDIG)==0 ? buf : 0); }
# endif
# elif defined (__hpux)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return ecvt(x, n, pt, sign); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return fcvt(x, n, pt, sign); }
# if !defined (_STLP_NO_LONG_DOUBLE)
# if defined( _REENTRANT ) && (defined(_PTHREADS_DRAFT4) || defined(PTHREAD_THREADS_MAX))
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return (_ldecvt_r(*(long_double*)&x, n, pt, sign, buf, NDIG+2)==0 ? buf : 0); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return (_ldfcvt_r(*(long_double*)&x, n, pt, sign, buf, NDIG+2)==0 ? buf : 0); }
# else
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _ldecvt(*(long_double*)&x, n, pt, sign); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _ldfcvt(*(long_double*)&x, n, pt, sign); }
# endif
# endif
# elif defined (_AIX) || defined (__FreeBSD__) || defined (__NetBSD__) || defined (__OpenBSD__)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ LOCK_CVT RETURN_CVT(ecvt, x, n, pt, sign, buf) }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ LOCK_CVT RETURN_CVT(fcvt, x, n, pt, sign, buf) }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ LOCK_CVT RETURN_CVT(ecvt, x, n, pt, sign, buf) }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ LOCK_CVT RETURN_CVT(fcvt, x, n, pt, sign, buf) }
# endif
# elif defined (__unix) && !defined (__APPLE__) && !defined (_CRAY)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return ecvt_r(x, n, pt, sign, buf); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return fcvt_r(x, n, pt, sign, buf); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return qecvt_r(x, n, pt, sign, buf); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return qfcvt_r(x, n, pt, sign, buf); }
# endif
# elif defined (_STLP_MSVC_LIB) || defined (__MINGW32__) || defined (__BORLANDC__)
// those guys claim _cvt functions being reentrant.
# if defined (_STLP_USE_SAFE_STRING_FUNCTIONS)
# define _STLP_APPEND(a, b) a##b
# define _STLP_BUF_PARAMS , char* buf, size_t bsize
# define _STLP_SECURE_FUN(F, X, N, PT, SIGN) _STLP_APPEND(F, _s)(buf, bsize, X, N, PT, SIGN); return buf
# else
# define _STLP_CVT_DONT_NEED_BUF
# define _STLP_BUF_PARAMS
# define _STLP_SECURE_FUN(F, X, N, PT, SIGN) return F(X, N, PT, SIGN)
# endif
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign _STLP_BUF_PARAMS)
{ _STLP_SECURE_FUN(_ecvt, x, n, pt, sign); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign _STLP_BUF_PARAMS)
{ _STLP_SECURE_FUN(_fcvt, x, n, pt, sign); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign _STLP_BUF_PARAMS)
{ _STLP_SECURE_FUN(_ecvt, (double)x, n, pt, sign); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign _STLP_BUF_PARAMS)
{ _STLP_SECURE_FUN(_fcvt, (double)x, n, pt, sign); }
# endif
# undef _STLP_SECURE_FUN
# undef _STLP_BUF_PARAMS
# undef _STLP_APPEND
# elif defined (__ISCPP__)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* buf)
{ return _fp_ecvt( x, n, pt, sign, buf); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* buf)
{ return _fp_fcvt(x, n, pt, sign, buf); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _fp_ecvt( x, n, pt, sign, buf); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* buf)
{ return _fp_fcvt(x, n, pt, sign, buf); }
# endif
# elif defined (__MRC__) || defined (__SC__) || defined (_CRAY)
static inline char* _Stl_ecvtR(double x, int n, int* pt, int* sign, char* )
{ return ecvt( x, n, pt, sign ); }
static inline char* _Stl_fcvtR(double x, int n, int* pt, int* sign, char* )
{ return fcvt(x, n, pt, sign); }
# if !defined (_STLP_NO_LONG_DOUBLE)
static inline char* _Stl_qecvtR(long double x, int n, int* pt, int* sign, char* )
{ return ecvt( x, n, pt, sign ); }
static inline char* _Stl_qfcvtR(long double x, int n, int* pt, int* sign, char* )
{ return fcvt(x, n, pt, sign); }
# endif
# endif
# if defined (_STLP_CVT_DONT_NEED_BUF)
# define _STLP_CVT_BUFFER(B)
# elif !defined (_STLP_USE_SAFE_STRING_FUNCTIONS)
# define _STLP_CVT_BUFFER(B) , B
# else
# define _STLP_CVT_BUFFER(B) , _STLP_ARRAY_AND_SIZE(B)
# endif
# if !defined (_STLP_USE_SAFE_STRING_FUNCTIONS)
# define _STLP_BUFFER(B) B
# else
# define _STLP_BUFFER(B) _STLP_ARRAY_AND_SIZE(B)
# endif
//----------------------------------------------------------------------
// num_put
// __format_float formats a mantissa and exponent as returned by
// one of the conversion functions (ecvt_r, fcvt_r, qecvt_r, qfcvt_r)
// according to the specified precision and format flags. This is
// based on doprnt but is much simpler since it is concerned only
// with floating point input and does not consider all formats. It
// also does not deal with blank padding, which is handled by
// __copy_float_and_fill.
static size_t __format_float_scientific( __iostring& buf, const char *bp,
int decpt, int sign, bool is_zero,
ios_base::fmtflags flags,
int precision, bool /* islong */)
{
// sign if required
if (sign)
buf += '-';
else if (flags & ios_base::showpos)
buf += '+';
// first digit of mantissa
buf += *bp++;
// start of grouping position, grouping won't occur in scientific notation
// as it is impossible to have something like 1234.0e04 but we return a correct
// group position for coherency with __format_float_fixed.
size_t __group_pos = buf.size();
// decimal point if required
if (precision != 0 || flags & ios_base::showpoint) {
buf += '.';
}
// rest of mantissa
int rz = precision;
while (rz-- > 0 && *bp != 0)
buf += *bp++;
// exponent
char expbuf[MAXESIZ + 2];
char *suffix = expbuf + MAXESIZ;
*suffix = 0;
if (!is_zero) {
int nn = decpt - 1;
if (nn < 0)
nn = -nn;
for (; nn > 9; nn /= 10)
*--suffix = (char) todigit(nn % 10);
*--suffix = (char) todigit(nn);
}
// prepend leading zeros to exponent
while (suffix > &expbuf[MAXESIZ - 2])
*--suffix = '0';
// put in the exponent sign
*--suffix = (char) ((decpt > 0 || is_zero ) ? '+' : '-');
// put in the e
*--suffix = flags & ios_base::uppercase ? 'E' : 'e';
// copy the suffix
buf += suffix;
return __group_pos;
}
static size_t __format_float_fixed( __iostring &buf, const char *bp,
int decpt, int sign, bool /* x */,
ios_base::fmtflags flags,
int precision, bool islong )
{
if ( sign && (decpt > -precision) && (*bp != 0) )
buf += '-';
else if ( flags & ios_base::showpos )
buf += '+';
int k = 0;
int maxfsig = islong ? 2*MAXFSIG : MAXFSIG;
// digits before decimal point
int nnn = decpt;
do {
buf += ((nnn <= 0 || *bp == 0 || k >= maxfsig) ? '0' : (++k, *bp++));
} while ( --nnn > 0 );
// start of grouping position
size_t __group_pos = buf.size();
// decimal point if needed
if ( flags & ios_base::showpoint || precision > 0 ) {
buf += '.';
}
// digits after decimal point if any
nnn = (min) (precision, MAXFCVT);
while ( --nnn >= 0 ) {
buf += (++decpt <= 0 || *bp == 0 || k >= maxfsig) ? '0' : (++k, *bp++);
}
// trailing zeros if needed
if ( precision > MAXFCVT ) {
buf.append( precision - MAXFCVT, '0' );
}
return __group_pos;
}
static void __format_nan_or_inf(__iostring& buf, double x, ios_base::fmtflags flags)
{
static const char* inf[2] = { "inf", "Inf" };
static const char* nan[2] = { "nan", "NaN" };
const char** inf_or_nan;
if (_Stl_is_inf(x)) { // Infinity
inf_or_nan = inf;
if (_Stl_is_neg_inf(x))
buf += '-';
else if (flags & ios_base::showpos)
buf += '+';
} else { // NaN
inf_or_nan = nan;
if (_Stl_is_neg_nan(x))
buf += '-';
else if (flags & ios_base::showpos)
buf += '+';
}
buf += inf_or_nan[flags & ios_base::uppercase ? 1 : 0];
}
template <class max_double_type>
static inline size_t __format_float( __iostring &buf, const char * bp,
int decpt, int sign, max_double_type x,
ios_base::fmtflags flags,
int precision, bool islong)
{
size_t __group_pos = 0;
// Output of infinities and NANs does not depend on the format flags
if (_Stl_is_nan_or_inf((double)x)) { // Infinity or NaN
__format_nan_or_inf(buf, (double)x, flags);
} else { // representable number
switch (flags & ios_base::floatfield) {
case ios_base::scientific:
__group_pos = __format_float_scientific( buf, bp, decpt, sign, x == 0.0,
flags, precision, islong);
break;
case ios_base::fixed:
__group_pos = __format_float_fixed( buf, bp, decpt, sign, true,
flags, precision, islong);
break;
default: // g format
// establish default precision
if (flags & ios_base::showpoint || precision > 0) {
if (precision == 0) precision = 1;
} else
precision = 6;
// reset exponent if value is zero
if (x == 0)
decpt = 1;
int kk = precision;
if (!(flags & ios_base::showpoint)) {
size_t n = strlen(bp);
if (n < (size_t)kk)
kk = (int)n;
while (kk >= 1 && bp[kk-1] == '0')
--kk;
}
if (decpt < -3 || decpt > precision) {
precision = kk - 1;
__group_pos = __format_float_scientific( buf, bp, decpt, sign, x == 0,
flags, precision, islong);
} else {
precision = kk - decpt;
__group_pos = __format_float_fixed( buf, bp, decpt, sign, true,
flags, precision, islong);
}
break;
} /* switch */
} /* else is_nan_or_inf */
return __group_pos;
}
#else /* USE_SPRINTF_INSTEAD */
struct GroupPos {
bool operator () (char __c) const {
return __c == '.' ||
__c == 'e' || __c == 'E';
}
};
// Creates a format string for sprintf()
static int __fill_fmtbuf(char* fmtbuf, ios_base::fmtflags flags, char long_modifier) {
fmtbuf[0] = '%';
int i = 1;
if (flags & ios_base::showpos)
fmtbuf[i++] = '+';
if (flags & ios_base::showpoint)
fmtbuf[i++] = '#';
fmtbuf[i++] = '.';
fmtbuf[i++] = '*';
if (long_modifier)
fmtbuf[i++] = long_modifier;
switch (flags & ios_base::floatfield)
{
case ios_base::scientific:
fmtbuf[i++] = (flags & ios_base::uppercase) ? 'E' : 'e';
break;
case ios_base::fixed:
# if defined (__FreeBSD__)
fmtbuf[i++] = 'f';
# else
fmtbuf[i++] = (flags & ios_base::uppercase) ? 'F' : 'f';
# endif
break;
default:
fmtbuf[i++] = (flags & ios_base::uppercase) ? 'G' : 'g';
break;
}
fmtbuf[i] = 0;
return i;
}
#endif /* USE_SPRINTF_INSTEAD */
size_t _STLP_CALL
__write_float(__iostring &buf, ios_base::fmtflags flags, int precision,
double x) {
#if defined (USE_SPRINTF_INSTEAD)
/* If we want 'abitrary' precision, we should use 'abitrary' buffer size
* below. - ptr
*/
char static_buf[128];
// char *static_buf = new char [128+precision];
char fmtbuf[32];
__fill_fmtbuf(fmtbuf, flags, 0);
// snprintf(static_buf, 128+precision, fmtbuf, precision, x);
# if !defined (N_PLAT_NLM)
snprintf(_STLP_ARRAY_AND_SIZE(static_buf), fmtbuf, precision, x);
# else
sprintf(static_buf, fmtbuf, precision, x);
# endif
buf = static_buf;
// delete [] static_buf;
return find_if(buf.begin(), buf.end(), GroupPos()) - buf.begin();
#else
# if !defined (_STLP_CVT_DONT_NEED_BUF)
char cvtbuf[NDIG + 2];
# endif
char * bp;
int decpt, sign;
switch (flags & ios_base::floatfield) {
case ios_base::fixed:
bp = _Stl_fcvtR(x, (min) (precision, MAXFCVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
case ios_base::scientific :
bp = _Stl_ecvtR(x, (min) (precision + 1, MAXECVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
default :
bp = _Stl_ecvtR(x, (min) (precision, MAXECVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
}
return __format_float(buf, bp, decpt, sign, x, flags, precision, false);
#endif
}
#if !defined (_STLP_NO_LONG_DOUBLE)
size_t _STLP_CALL
__write_float(__iostring &buf, ios_base::fmtflags flags, int precision,
long double x) {
# if defined (USE_SPRINTF_INSTEAD)
/* If we want 'abitrary' precision, we should use 'abitrary' buffer size
* below. - ptr
*/
char static_buf[128];
// char *static_buf = new char [128+precision];
char fmtbuf[64];
int i = __fill_fmtbuf(fmtbuf, flags, 'L');
// snprintf(static_buf, 128+precision, fmtbuf, precision, x);
# if !defined (N_PLAT_NLM)
snprintf(_STLP_ARRAY_AND_SIZE(static_buf), fmtbuf, precision, x);
# else
sprintf(static_buf, fmtbuf, precision, x);
# endif
// we should be able to return buf + sprintf(), but we do not trust'em...
buf = static_buf;
// delete [] static_buf;
return find_if(buf.begin(), buf.end(), GroupPos()) - buf.begin();
# else
# if !defined (_STLP_CVT_DONT_NEED_BUF)
char cvtbuf[NDIG + 2];
# endif
char * bp;
int decpt, sign;
switch (flags & ios_base::floatfield) {
case ios_base::fixed:
bp = _Stl_qfcvtR(x, (min) (precision, MAXFCVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
case ios_base::scientific:
bp = _Stl_qecvtR(x, (min) (precision + 1, MAXECVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
default :
bp = _Stl_qecvtR(x, (min) (precision, MAXECVT), &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
break;
}
return __format_float(buf, bp, decpt, sign, x, flags, precision, true);
# endif /* USE_SPRINTF_INSTEAD */
}
#endif /* _STLP_NO_LONG_DOUBLE */
void _STLP_CALL __get_floor_digits(__iostring &out, _STLP_LONGEST_FLOAT_TYPE __x) {
#if defined (USE_SPRINTF_INSTEAD)
char cvtbuf[128];
# if !defined (_STLP_NO_LONG_DOUBLE)
# if !defined (N_PLAT_NLM)
snprintf(_STLP_ARRAY_AND_SIZE(cvtbuf), "%Lf", __x); // check for 1234.56!
# else
sprintf(cvtbuf, "%Lf", __x); // check for 1234.56!
# endif
# else
snprintf(_STLP_ARRAY_AND_SIZE(cvtbuf), "%f", __x); // check for 1234.56!
# endif
char *p = strchr( cvtbuf, '.' );
if ( p == 0 ) {
out.append( cvtbuf );
} else {
out.append( cvtbuf, p );
}
#else
# if !defined (_STLP_CVT_DONT_NEED_BUF)
char cvtbuf[NDIG + 2];
# endif
char * bp;
int decpt, sign;
# if !defined (_STLP_NO_LONG_DOUBLE)
bp = _Stl_qfcvtR(__x, 0, &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
# else
bp = _Stl_fcvtR(__x, 0, &decpt, &sign _STLP_CVT_BUFFER(cvtbuf));
# endif
if (sign) {
out += '-';
}
out.append(bp, bp + decpt);
#endif // USE_PRINTF_INSTEAD
}
#if !defined (_STLP_NO_WCHAR_T)
void _STLP_CALL __convert_float_buffer( __iostring const& str, __iowstring &out,
const ctype<wchar_t>& ct, wchar_t dot, bool __check_dot)
{
string::const_iterator str_ite(str.begin()), str_end(str.end());
//First loop, check the dot char
if (__check_dot) {
while (str_ite != str_end) {
if (*str_ite != '.') {
out += ct.widen(*str_ite++);
} else {
out += dot;
break;
}
}
} else {
if (str_ite != str_end) {
out += ct.widen(*str_ite);
}
}
if (str_ite != str_end) {
//Second loop, dot has been found, no check anymore
while (++str_ite != str_end) {
out += ct.widen(*str_ite);
}
}
}
#endif
void _STLP_CALL
__adjust_float_buffer(__iostring &str, char dot) {
if ('.' != dot) {
size_t __dot_pos = str.find('.');
if (__dot_pos != string::npos) {
str[__dot_pos] = dot;
}
}
}
_STLP_MOVE_TO_STD_NAMESPACE
_STLP_END_NAMESPACE
// Local Variables:
// mode:C++
// End:
| yeKcim/warmux | trunk/build/symbian/lib/stlport/src/num_put_float.cpp | C++ | gpl-2.0 | 34,762 |
// { dg-do compile }
// 2007-09-20 Benjamin Kosnik <bkoz@redhat.com>
// Copyright (C) 2007-2017 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <algorithm>
#include <functional>
#include <testsuite_api.h>
namespace std
{
using __gnu_test::NonDefaultConstructible;
typedef NonDefaultConstructible value_type;
typedef value_type* iterator_type;
typedef std::pointer_to_unary_function<value_type, void> function_type;
template function_type for_each(iterator_type, iterator_type,
function_type);
}
| mickael-guene/gcc | libstdc++-v3/testsuite/25_algorithms/for_each/requirements/explicit_instantiation/2.cc | C++ | gpl-2.0 | 1,221 |
// g2o - General Graph Optimization
// Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "vertex_pointxyz.h"
#include <stdio.h>
#ifdef G2O_HAVE_OPENGL
#include "../../stuff/opengl_wrapper.h"
#endif
#include <typeinfo>
namespace g2o {
bool VertexPointXYZ::read(std::istream& is) {
Vector3d lv;
for (int i=0; i<3; i++)
is >> lv[i];
setEstimate(lv);
return true;
}
bool VertexPointXYZ::write(std::ostream& os) const {
Vector3d lv=estimate();
for (int i=0; i<3; i++){
os << lv[i] << " ";
}
return os.good();
}
#ifdef G2O_HAVE_OPENGL
VertexPointXYZDrawAction::VertexPointXYZDrawAction(): DrawAction(typeid(VertexPointXYZ).name()){
}
bool VertexPointXYZDrawAction::refreshPropertyPtrs(HyperGraphElementAction::Parameters* params_){
if (! DrawAction::refreshPropertyPtrs(params_))
return false;
if (_previousParams){
_pointSize = _previousParams->makeProperty<FloatProperty>(_typeName + "::POINT_SIZE", 1.);
} else {
_pointSize = 0;
}
return true;
}
HyperGraphElementAction* VertexPointXYZDrawAction::operator()(HyperGraph::HyperGraphElement* element,
HyperGraphElementAction::Parameters* params ){
if (typeid(*element).name()!=_typeName)
return 0;
refreshPropertyPtrs(params);
if (! _previousParams)
return this;
if (_show && !_show->value())
return this;
VertexPointXYZ* that = static_cast<VertexPointXYZ*>(element);
glPushAttrib(GL_ENABLE_BIT | GL_POINT_BIT);
glDisable(GL_LIGHTING);
glColor3f(0.8f,0.5f,0.3f);
if (_pointSize) {
glPointSize(_pointSize->value());
}
glBegin(GL_POINTS);
glVertex3f((float)that->estimate()(0),(float)that->estimate()(1),(float)that->estimate()(2));
glEnd();
glPopAttrib();
return this;
}
#endif
VertexPointXYZWriteGnuplotAction::VertexPointXYZWriteGnuplotAction() :
WriteGnuplotAction(typeid(VertexPointXYZ).name())
{
}
HyperGraphElementAction* VertexPointXYZWriteGnuplotAction::operator()(HyperGraph::HyperGraphElement* element, HyperGraphElementAction::Parameters* params_ )
{
if (typeid(*element).name()!=_typeName)
return 0;
WriteGnuplotAction::Parameters* params=static_cast<WriteGnuplotAction::Parameters*>(params_);
if (!params->os){
std::cerr << __PRETTY_FUNCTION__ << ": warning, no valid os specified" << std::endl;
return 0;
}
VertexPointXYZ* v = static_cast<VertexPointXYZ*>(element);
*(params->os) << v->estimate().x() << " " << v->estimate().y() << " " << v->estimate().z() << " " << std::endl;
return this;
}
}
| CreativeCimmons/ORB-SLAM-Android-app | slam_ext/Thirdparty/g2o/g2o/types/slam3d/vertex_pointxyz.cpp | C++ | gpl-2.0 | 3,993 |
<?php
/**
* Floating Social Bar is the best social media plugin for WordPress
* that adds a floating bar with share buttons to your content
* without slowing down your site.
*
* @package Floating Social Bar
* @author Syed Balkhi
* @author Thomas Griffin
* @license GPL-2.0+
* @link http://wpbeginner.com/floating-social-bar/
* @copyright 2013 WPBeginner. All rights reserved.
*
* @wordpress-plugin
* Plugin Name: Floating Social Bar
* Plugin URI: http://wpbeginner.com/floating-social-bar/
* Description: Floating Social Bar is the best social media plugin for WordPress that adds a floating bar with share buttons to your content without slowing down your site.
* Version: 1.1.7
* Author: Syed Balkhi and Thomas Griffin
* Author URI: http://wpbeginner.com/
* Text Domain: fsb
* Contributors: smub, griffinjt
* License: GPL-2.0+
* License URI: http://www.gnu.org/licenses/gpl-2.0.txt
* Domain Path: /lang
*/
// If this file is called directly, abort.
if ( ! defined( 'WPINC' ) ) die;
// Load the main plugin class and widget class.
require_once( plugin_dir_path( __FILE__ ) . 'class-floating-social-bar.php' );
// Register hooks for activation, deactivation and uninstall instances.
register_activation_hook( __FILE__, array( 'floating_social_bar', 'activate' ) );
register_deactivation_hook( __FILE__, array( 'floating_social_bar', 'deactivate' ) );
register_uninstall_hook( __FILE__, array( 'floating_social_bar', 'uninstall' ) );
// Initialize the plugin.
$floating_social_bar = floating_social_bar::get_instance();
// Generate a template tag for use in template files.
if ( ! function_exists( 'floating_social_bar' ) ) {
/**
* Floating Social Bar template tag.
*
* Allows you to insert a floating social bar anywhere in your template files.
* The keys currently available are 'facebook', 'twitter', 'google',
* 'linkedin', and 'pinterest'. The value should be set to true if you want to
* display that social service in the bar. Services will be output in the order
* that you specify in the $args array.
*
* @package Floating Social Bar
* @param array $args Args used for the floating social bar.
* @param bool $return Flag for returning or echoing the slider content.
*/
function floating_social_bar( $args = array(), $return = false ) {
// Prepare the args to be output into query string shortcode format.
$output_args = '';
foreach ( $args as $k => $v )
$output_args .= $k . '=' . $v . ' ';
// Return or echo the content via shortcode.
if ( $return )
return do_shortcode( '[fsb-social-bar ' . trim( $output_args ) . ']' );
else
echo do_shortcode( '[fsb-social-bar ' . trim( $output_args ) . ']' );
}
} | AhmedSayedAhmed/MM_Portal | wp-content/plugins/floating-social-bar/floating-social-bar.php | PHP | gpl-2.0 | 2,794 |
//=set test "Test" | AlexanderDolgan/juliawp | wp-content/themes/node_modules/gulp-rigger/node_modules/rigger/test/input-settings/stringval.js | JavaScript | gpl-2.0 | 18 |
// license:BSD-3-Clause
// copyright-holders:Vas Crabb
#include "emu.h"
#include "ti8x.h"
#define LOG_GENERAL (1U << 0)
#define LOG_BITPROTO (1U << 1)
#define LOG_BYTEPROTO (1U << 2)
//#define VERBOSE (LOG_GENERAL | LOG_BITPROTO | LOG_BYTEPROTO)
#define LOG_OUTPUT_FUNC device().logerror
#include "logmacro.h"
#define LOGBITPROTO(...) LOGMASKED(LOG_BITPROTO, __VA_ARGS__)
#define LOGBYTEPROTO(...) LOGMASKED(LOG_BYTEPROTO, __VA_ARGS__)
DEFINE_DEVICE_TYPE(TI8X_LINK_PORT, ti8x_link_port_device, "ti8x_link_port", "TI-8x Link Port")
ti8x_link_port_device::ti8x_link_port_device(
machine_config const &mconfig,
char const *tag,
device_t *owner,
uint32_t clock)
: ti8x_link_port_device(mconfig, TI8X_LINK_PORT, tag, owner, clock)
{
}
ti8x_link_port_device::ti8x_link_port_device(
machine_config const &mconfig,
device_type type,
char const *tag,
device_t *owner,
uint32_t clock)
: device_t(mconfig, type, tag, owner, clock)
, device_single_card_slot_interface<device_ti8x_link_port_interface>(mconfig, *this)
, m_tip_handler(*this)
, m_ring_handler(*this)
, m_dev(nullptr)
, m_tip_in(true)
, m_tip_out(true)
, m_ring_in(true)
, m_ring_out(true)
{
}
WRITE_LINE_MEMBER(ti8x_link_port_device::tip_w)
{
if (bool(state) != m_tip_out)
{
m_tip_out = bool(state);
if (m_dev)
m_dev->input_tip(m_tip_out ? 1 : 0);
}
}
WRITE_LINE_MEMBER(ti8x_link_port_device::ring_w)
{
if (bool(state) != m_ring_out)
{
m_ring_out = bool(state);
if (m_dev)
m_dev->input_ring(m_ring_out ? 1 : 0);
}
}
void ti8x_link_port_device::device_start()
{
m_tip_handler.resolve_safe();
m_ring_handler.resolve_safe();
save_item(NAME(m_tip_in));
save_item(NAME(m_tip_out));
save_item(NAME(m_ring_in));
save_item(NAME(m_ring_out));
m_tip_in = m_tip_out = true;
m_ring_in = m_ring_out = true;
}
void ti8x_link_port_device::device_config_complete()
{
m_dev = get_card_device();
}
device_ti8x_link_port_interface::device_ti8x_link_port_interface(
machine_config const &mconfig,
device_t &device)
: device_interface(device, "ti8xlink")
, m_port(dynamic_cast<ti8x_link_port_device *>(device.owner()))
{
}
device_ti8x_link_port_bit_interface::device_ti8x_link_port_bit_interface(
machine_config const &mconfig,
device_t &device)
: device_ti8x_link_port_interface(mconfig, device)
, m_error_timer(nullptr)
, m_bit_phase(IDLE)
, m_tx_bit_buffer(EMPTY)
, m_tip_in(true)
, m_ring_in(true)
{
}
void device_ti8x_link_port_bit_interface::interface_pre_start()
{
device_ti8x_link_port_interface::interface_pre_start();
if (!m_error_timer)
m_error_timer = device().machine().scheduler().timer_alloc(timer_expired_delegate(FUNC(device_ti8x_link_port_bit_interface::bit_timeout), this));
m_bit_phase = IDLE;
m_tx_bit_buffer = EMPTY;
m_tip_in = m_ring_in = true;
}
void device_ti8x_link_port_bit_interface::interface_post_start()
{
device_ti8x_link_port_interface::interface_post_start();
device().save_item(NAME(m_bit_phase));
device().save_item(NAME(m_tx_bit_buffer));
device().save_item(NAME(m_tip_in));
device().save_item(NAME(m_ring_in));
}
void device_ti8x_link_port_bit_interface::interface_pre_reset()
{
device_ti8x_link_port_interface::interface_pre_reset();
m_error_timer->reset();
m_bit_phase = (m_tip_in && m_ring_in) ? IDLE : WAIT_IDLE;
m_tx_bit_buffer = EMPTY;
output_tip(1);
output_ring(1);
}
void device_ti8x_link_port_bit_interface::send_bit(bool data)
{
LOGBITPROTO("queue %d bit\n", data ? 1 : 0);
if (EMPTY != m_tx_bit_buffer)
device().logerror("device_ti8x_link_port_bit_interface: warning: transmit buffer overrun\n");
m_tx_bit_buffer = data ? PENDING_1 : PENDING_0;
if (IDLE == m_bit_phase)
check_tx_bit_buffer();
else if (WAIT_IDLE == m_bit_phase)
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
}
void device_ti8x_link_port_bit_interface::accept_bit()
{
switch (m_bit_phase)
{
// can't accept a bit that isn't being held
case IDLE:
case WAIT_ACK_0:
case WAIT_ACK_1:
case WAIT_REL_0:
case WAIT_REL_1:
case ACK_0:
case ACK_1:
case WAIT_IDLE:
fatalerror("device_ti8x_link_port_bit_interface: attempt to accept bit when not holding");
break;
// release the acknowledgement - if the ring doesn't rise we've lost sync
case HOLD_0:
assert(m_tip_in);
output_ring(1);
if (m_ring_in)
{
LOGBITPROTO("accepted 0 bit\n");
check_tx_bit_buffer();
}
else
{
LOGBITPROTO("accepted 0 bit, ring low (collision) - waiting for bus idle\n");
m_error_timer->reset((EMPTY == m_tx_bit_buffer) ? attotime::never : attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_IDLE;
bit_collision();
}
break;
// release the acknowledgement - if the tip doesn't rise we've lost sync
case HOLD_1:
assert(m_ring_in);
output_tip(1);
if (m_tip_in)
{
LOGBITPROTO("accepted 1 bit\n");
check_tx_bit_buffer();
}
else
{
LOGBITPROTO("accepted 1 bit, tip low (collision) - waiting for bus idle\n");
m_error_timer->reset((EMPTY == m_tx_bit_buffer) ? attotime::never : attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_IDLE;
bit_collision();
}
break;
// something very bad happened (heap smash?)
default:
throw false;
}
}
WRITE_LINE_MEMBER(device_ti8x_link_port_bit_interface::input_tip)
{
m_tip_in = bool(state);
switch (m_bit_phase)
{
// if tip falls while idle, it's the beginning of an incoming 0
case IDLE:
if (!m_tip_in)
{
LOGBITPROTO("falling edge on tip, acknowledging 0 bit\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = ACK_0;
output_ring(0);
}
break;
// we're driving tip low in this state, ignore it
case WAIT_ACK_0:
case ACK_1:
case HOLD_1:
break;
// tip must fall to acknowledge outgoing 1
case WAIT_ACK_1:
if (!m_tip_in)
{
LOGBITPROTO("falling edge on tip, 1 bit acknowledged, confirming\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_REL_1;
output_ring(1);
}
break;
// if tip falls now, we've lost sync
case WAIT_REL_0:
case HOLD_0:
if (!m_tip_in)
{
LOGBITPROTO("falling edge on tip, lost sync, waiting for bus idle\n");
m_error_timer->reset((EMPTY == m_tx_bit_buffer) ? attotime::never : attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_IDLE;
output_ring(1);
bit_collision();
}
break;
// tip must rise to complete outgoing 1 sequence
case WAIT_REL_1:
if (m_tip_in)
{
assert(!m_ring_in);
LOGBITPROTO("rising edge on tip, 1 bit sent\n");
check_tx_bit_buffer();
bit_sent();
}
break;
// tip must rise to accept our acknowledgement
case ACK_0:
if (m_tip_in)
{
LOGBITPROTO("rising edge on tip, 0 bit acknowledge confirmed, holding\n");
m_error_timer->reset();
m_bit_phase = HOLD_0;
bit_received(false);
}
break;
// if the bus is available, check for bit to send
case WAIT_IDLE:
if (m_tip_in && m_ring_in)
{
LOGBITPROTO("rising edge on tip, bus idle detected\n");
check_tx_bit_buffer();
}
break;
// something very bad happened (heap smash?)
default:
throw false;
}
}
WRITE_LINE_MEMBER(device_ti8x_link_port_bit_interface::input_ring)
{
m_ring_in = bool(state);
switch (m_bit_phase)
{
// if ring falls while idle, it's the beginning of an incoming 1
case IDLE:
if (!m_ring_in)
{
LOGBITPROTO("falling edge on ring, acknowledging 1 bit\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = ACK_1;
output_tip(0);
}
break;
// ring must fall to acknowledge outgoing 0
case WAIT_ACK_0:
if (!m_ring_in)
{
LOGBITPROTO("falling edge on ring, 0 bit acknowledged, confirming\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_REL_0;
output_tip(1);
}
break;
// we're driving ring low in this state, ignore it
case WAIT_ACK_1:
case ACK_0:
case HOLD_0:
break;
// ring must rise to complete outgoing 0 sequence
case WAIT_REL_0:
if (m_ring_in)
{
assert(!m_tip_in);
LOGBITPROTO("rising edge on ring, 0 bit sent\n");
check_tx_bit_buffer();
bit_sent();
}
break;
// if ring falls now, we've lost sync
case WAIT_REL_1:
case HOLD_1:
if (!m_ring_in)
{
LOGBITPROTO("falling edge on ring, lost sync, waiting for bus idle\n");
m_error_timer->reset((EMPTY == m_tx_bit_buffer) ? attotime::never : attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_IDLE;
output_tip(1);
bit_collision();
}
break;
// ring must rise to accept our acknowledgement
case ACK_1:
if (m_ring_in)
{
LOGBITPROTO("rising edge on ring, 1 bit acknowledge confirmed, holding\n");
m_error_timer->reset();
m_bit_phase = HOLD_1;
bit_received(true);
}
break;
// if the bus is available, check for bit to send
case WAIT_IDLE:
if (m_tip_in && m_ring_in)
{
LOGBITPROTO("rising edge on tip, bus idle detected\n");
check_tx_bit_buffer();
}
break;
// something very bad happened (heap smash?)
default:
throw false;
}
}
TIMER_CALLBACK_MEMBER(device_ti8x_link_port_bit_interface::bit_timeout)
{
switch (m_bit_phase)
{
// something very bad happened (heap smash?)
case IDLE:
case HOLD_0:
case HOLD_1:
default:
throw false;
// receive timeout
case ACK_0:
case ACK_1:
LOGBITPROTO("timeout acknowledging %d bit\n", (ACK_0 == m_bit_phase) ? 0 : 1);
output_tip(1);
output_ring(1);
if (m_tip_in && m_ring_in)
{
check_tx_bit_buffer();
}
else
{
LOGBITPROTO("waiting for bus idle\n");
m_error_timer->reset((EMPTY == m_tx_bit_buffer) ? attotime::never : attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_IDLE;
}
bit_receive_timeout();
break;
// send timeout:
case WAIT_IDLE:
assert(EMPTY != m_tx_bit_buffer);
[[fallthrough]];
case WAIT_ACK_0:
case WAIT_ACK_1:
case WAIT_REL_0:
case WAIT_REL_1:
LOGBITPROTO("timeout sending bit\n");
m_error_timer->reset();
m_bit_phase = (m_tip_in && m_ring_in) ? IDLE : WAIT_IDLE;
m_tx_bit_buffer = EMPTY;
output_tip(1);
output_ring(1);
bit_send_timeout();
break;
}
}
void device_ti8x_link_port_bit_interface::check_tx_bit_buffer()
{
assert(m_tip_in);
assert(m_ring_in);
switch (m_tx_bit_buffer)
{
// nothing to do
case EMPTY:
LOGBITPROTO("no pending bit, entering idle state\n");
m_error_timer->reset();
m_bit_phase = IDLE;
break;
// pull tip low and wait for acknowledgement
case PENDING_0:
LOGBITPROTO("sending 0 bit, pulling tip low\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_ACK_0;
m_tx_bit_buffer = EMPTY;
output_tip(0);
break;
// pull ring low and wait for acknowledgement
case PENDING_1:
LOGBITPROTO("sending 1 bit, pulling ring low\n");
m_error_timer->reset(attotime(1, 0)); // TODO: configurable timeout
m_bit_phase = WAIT_ACK_1;
m_tx_bit_buffer = EMPTY;
output_ring(0);
break;
// something very bad happened (heap smash?)
default:
throw false;
}
}
device_ti8x_link_port_byte_interface::device_ti8x_link_port_byte_interface(
machine_config const &mconfig,
device_t &device)
: device_ti8x_link_port_bit_interface(mconfig, device)
, m_tx_byte_buffer(0U)
, m_rx_byte_buffer(0U)
{
}
void device_ti8x_link_port_byte_interface::interface_pre_start()
{
device_ti8x_link_port_bit_interface::interface_pre_start();
m_tx_byte_buffer = m_rx_byte_buffer = 0U;
}
void device_ti8x_link_port_byte_interface::interface_post_start()
{
device_ti8x_link_port_bit_interface::interface_post_start();
device().save_item(NAME(m_tx_byte_buffer));
device().save_item(NAME(m_rx_byte_buffer));
}
void device_ti8x_link_port_byte_interface::interface_pre_reset()
{
device_ti8x_link_port_bit_interface::interface_pre_reset();
m_tx_byte_buffer = m_rx_byte_buffer = 0U;
}
void device_ti8x_link_port_byte_interface::send_byte(u8 data)
{
if (m_tx_byte_buffer)
device().logerror("device_ti8x_link_port_byte_interface: warning: transmit buffer overrun\n");
LOGBYTEPROTO("sending byte 0x%02X\n", data);
m_tx_byte_buffer = 0x0080 | u16(data >> 1);
send_bit(BIT(data, 0));
}
void device_ti8x_link_port_byte_interface::accept_byte()
{
assert(BIT(m_rx_byte_buffer, 8));
LOGBYTEPROTO("accepting final bit of byte\n");
m_rx_byte_buffer = 0U;
accept_bit();
}
void device_ti8x_link_port_byte_interface::bit_collision()
{
LOGBYTEPROTO("bit collection, clearing byte buffers\n");
m_tx_byte_buffer = m_rx_byte_buffer = 0U;
byte_collision();
}
void device_ti8x_link_port_byte_interface::bit_send_timeout()
{
LOGBYTEPROTO("bit send timeout, clearing send byte buffer\n");
m_tx_byte_buffer = 0U;
byte_send_timeout();
}
void device_ti8x_link_port_byte_interface::bit_receive_timeout()
{
LOGBYTEPROTO("bit receive timeout, clearing receive byte buffer\n");
m_rx_byte_buffer = 0U;
byte_receive_timeout();
}
void device_ti8x_link_port_byte_interface::bit_sent()
{
assert(m_tx_byte_buffer);
bool const data(BIT(m_tx_byte_buffer, 0));
if (m_tx_byte_buffer >>= 1)
{
LOGBYTEPROTO("bit sent, sending next bit of byte\n");
send_bit(data);
}
else
{
assert(data);
LOGBYTEPROTO("final bit of byte sent\n");
byte_sent();
}
}
void device_ti8x_link_port_byte_interface::bit_received(bool data)
{
assert(!BIT(m_rx_byte_buffer, 8));
m_rx_byte_buffer = (!m_rx_byte_buffer ? 0x8000 : (m_rx_byte_buffer >> 1)) | (data ? 0x0080U : 0x0000U);
if (BIT(m_rx_byte_buffer, 8))
{
LOGBYTEPROTO("received final bit of byte 0x%02X\n", u8(m_rx_byte_buffer));
byte_received(u8(m_rx_byte_buffer));
}
else
{
LOGBYTEPROTO("bit received, accepting\n");
accept_bit();
}
}
#include "bitsocket.h"
#include "graphlinkhle.h"
#include "teeconn.h"
#include "tispeaker.h"
void default_ti8x_link_devices(device_slot_interface &device)
{
device.option_add("bitsock", TI8X_BIT_SOCKET);
device.option_add("glinkhle", TI8X_GRAPH_LINK_HLE);
device.option_add("tee", TI8X_TEE_CONNECTOR);
device.option_add("monospkr", TI8X_SPEAKER_MONO);
device.option_add("stereospkr", TI8X_SPEAKER_STEREO);
}
| johnparker007/mame | src/devices/bus/ti8x/ti8x.cpp | C++ | gpl-2.0 | 14,113 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOWEST
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Replaces apostrophe character with its illegal double unicode counterpart
>>> tamper("1 AND '1'='1")
'1 AND %00%271%00%27=%00%271'
"""
return payload.replace('\'', "%00%27") if payload else payload
| golismero/golismero | tools/sqlmap/tamper/apostrophenullencode.py | Python | gpl-2.0 | 503 |
<?php
//============================================================+
// File name : example_2d_png.php
// Version : 1.0.000
// Begin : 2011-07-21
// Last Update : 2013-03-17
// Author : Nicola Asuni - Tecnick.com LTD - www.tecnick.com - info@tecnick.com
// License : GNU-LGPL v3 (http://www.gnu.org/copyleft/lesser.html)
// -------------------------------------------------------------------
// Copyright (C) 2009-2013 Nicola Asuni - Tecnick.com LTD
//
// This file is part of TCPDF software library.
//
// TCPDF is free software: you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// TCPDF is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
// See the GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with TCPDF. If not, see <http://www.gnu.org/licenses/>.
//
// See LICENSE.TXT file for more information.
// -------------------------------------------------------------------
//
// Description : Example for tcpdf_barcodes_2d.php class
//
//============================================================+
/**
* @file
* Example for tcpdf_barcodes_2d.php class
* @package com.tecnick.tcpdf
* @author Nicola Asuni
* @version 1.0.009
*/
// include 2D barcode class
require_once(dirname(__FILE__).'/../../tcpdf_barcodes_2d.php');
// set the barcode content and type
$barcodeobj = new TCPDF2DBarcode('http://www.tcpdf.org', 'PDF417');
// output the barcode as PNG image
$barcodeobj->getBarcodePNG(4, 4, array(0,0,0));
//============================================================+
// END OF FILE
//============================================================+
| siteslab/profile | sites/all/libraries/tcpdf/examples/barcodes/example_2d_pdf417_png.php | PHP | gpl-2.0 | 1,979 |
/***************************************************************************
tag: The SourceWorks Tue Sep 7 00:55:18 CEST 2010 ServiceRequester.hpp
ServiceRequester.hpp - description
-------------------
begin : Tue September 07 2010
copyright : (C) 2010 The SourceWorks
email : peter@thesourceworks.com
***************************************************************************
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU General Public *
* License as published by the Free Software Foundation; *
* version 2 of the License. *
* *
* As a special exception, you may use this file as part of a free *
* software library without restriction. Specifically, if other files *
* instantiate templates or use macros or inline functions from this *
* file, or you compile this file and link it with other files to *
* produce an executable, this file does not by itself cause the *
* resulting executable to be covered by the GNU General Public *
* License. This exception does not however invalidate any other *
* reasons why the executable file might be covered by the GNU General *
* Public License. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307 USA *
* *
***************************************************************************/
#ifndef ORO_SERVICEREQUESTER_HPP_
#define ORO_SERVICEREQUESTER_HPP_
#include "rtt-config.h"
#include "rtt-fwd.hpp"
#include "base/OperationCallerBaseInvoker.hpp"
#include "Service.hpp"
#include <map>
#include <vector>
#include <string>
#include <boost/enable_shared_from_this.hpp>
#if BOOST_VERSION >= 104000 && BOOST_VERSION < 105300
#include <boost/smart_ptr/enable_shared_from_this2.hpp>
#endif
#if BOOST_VERSION >= 105300
#include <boost/smart_ptr/enable_shared_from_raw.hpp>
#endif
namespace RTT
{
/**
* An object that expresses you wish to use a service.
* The ServiceRequester is symmetrical to the Service.
* Where a Service registers operations that a component can
* execute ('provides'), the ServiceRequester registers the methods that a caller
* wishes to call ('requires'). One method in a ServiceRequester maps
* to one operation in a Service.
*
* Typical use is to inherit from ServiceRequester and add named OperationCaller objects
* to it using addOperationCaller. @see RTT::Scripting for an example.
* @ingroup Services
*/
class RTT_API ServiceRequester :
#if BOOST_VERSION >= 104000
#if BOOST_VERSION < 105300
public boost::enable_shared_from_this2<ServiceRequester>
#else
public boost::enable_shared_from_raw
#endif
#else
public boost::enable_shared_from_this<ServiceRequester>
#endif
{
public:
typedef std::vector<std::string> RequesterNames;
typedef std::vector<std::string> OperationCallerNames;
typedef boost::shared_ptr<ServiceRequester> shared_ptr;
typedef boost::shared_ptr<const ServiceRequester> shared_constptr;
#if BOOST_VERSION >= 105300
ServiceRequester::shared_ptr shared_from_this() { return boost::shared_from_raw(this); }
ServiceRequester::shared_constptr shared_from_this() const { return boost::shared_from_raw(this); }
#endif
ServiceRequester(const std::string& name, TaskContext* owner = 0);
virtual ~ServiceRequester();
const std::string& getRequestName() const { return mrname; }
RequesterNames getRequesterNames() const;
/**
* The owner is the top-level TaskContext owning this service
* (indirectly).
*/
TaskContext* getServiceOwner() const { return mrowner; }
/**
* Sets the owning TaskContext that is considered as the
* caller of requested operations.
*/
void setOwner(TaskContext* new_owner);
/**
* Returns the service we're referencing.
* In case you used connectTo to more than one service,
* this returns the service which was used when connectTo
* first returned true.
*/
Service::shared_ptr getReferencedService() { return mprovider; }
bool addOperationCaller( base::OperationCallerBaseInvoker& mbi);
OperationCallerNames getOperationCallerNames() const;
base::OperationCallerBaseInvoker* getOperationCaller(const std::string& name);
ServiceRequester::shared_ptr requires();
ServiceRequester::shared_ptr requires(const std::string& service_name);
/**
* Add a new ServiceRequester to this TaskContext.
*
* @param obj This object becomes owned by this TaskContext.
*
* @return true if it could be added, false if such
* service requester already exists.
*/
bool addServiceRequester(shared_ptr obj);
/**
* Query if this service requires certain sub-services.
* @param service_name
* @return
*/
bool requiresService(const std::string& service_name) {
return mrequests.find(service_name) != mrequests.end();
}
/**
* Connects this service's methods to the operations provided by op.
* This method tries to match as many as possible method-operation pairs.
*
* You may call this function with different instances of sp to 'resolve'
* missing functions, only the non-connected methods will be further filled in.
* @param sp An interface-compatible Service.
*
* @return true if all methods of that are required are provided, false
* if not all methods could yet be matched.
*/
virtual bool connectTo(Service::shared_ptr sp);
/**
* Returns true when all methods were resolved.
* @return
*/
virtual bool ready() const;
/**
* Disconnects all methods from their implementation.
*/
virtual void disconnect();
/**
* Remove all operation callers from this service requester.
*/
virtual void clear();
protected:
typedef std::map< std::string, ServiceRequester::shared_ptr > Requests;
/// the services we implement.
Requests mrequests;
/// Our methods
typedef std::map<std::string, base::OperationCallerBaseInvoker*> OperationCallers;
OperationCallers mmethods;
std::string mrname;
TaskContext* mrowner;
Service::shared_ptr mprovider;
};
}
#endif /* ORO_SERVICEREQUESTER_HPP_ */
| shyamalschandra/rtt | rtt/ServiceRequester.hpp | C++ | gpl-2.0 | 7,747 |
<?php
/**
* @package Joomla.Site
* @subpackage com_users
*
* @copyright Copyright (C) 2005 - 2018 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('_JEXEC') or die;
JLoader::register('UsersController', JPATH_COMPONENT . '/controller.php');
/**
* Registration controller class for Users.
*
* @since 1.6
*/
class UsersControllerRegistration extends UsersController
{
/**
* Method to activate a user.
*
* @return boolean True on success, false on failure.
*
* @since 1.6
*/
public function activate()
{
$user = JFactory::getUser();
$input = JFactory::getApplication()->input;
$uParams = JComponentHelper::getParams('com_users');
// Check for admin activation. Don't allow non-super-admin to delete a super admin
if ($uParams->get('useractivation') != 2 && $user->get('id'))
{
$this->setRedirect('index.php');
return true;
}
// If user registration or account activation is disabled, throw a 403.
if ($uParams->get('useractivation') == 0 || $uParams->get('allowUserRegistration') == 0)
{
JError::raiseError(403, JText::_('JLIB_APPLICATION_ERROR_ACCESS_FORBIDDEN'));
return false;
}
$model = $this->getModel('Registration', 'UsersModel');
$token = $input->getAlnum('token');
// Check that the token is in a valid format.
if ($token === null || strlen($token) !== 32)
{
JError::raiseError(403, JText::_('JINVALID_TOKEN'));
return false;
}
// Get the User ID
$userIdToActivate = $model->getUserIdFromToken($token);
if (!$userIdToActivate)
{
JError::raiseError(403, JText::_('COM_USERS_ACTIVATION_TOKEN_NOT_FOUND'));
return false;
}
// Get the user we want to activate
$userToActivate = JFactory::getUser($userIdToActivate);
// Admin activation is on and admin is activating the account
if (($uParams->get('useractivation') == 2) && $userToActivate->getParam('activate', 0))
{
// If a user admin is not logged in, redirect them to the login page with a error message
if (!$user->authorise('core.create', 'com_users'))
{
$activationUrl = 'index.php?option=com_users&task=registration.activate&token=' . $token;
$loginUrl = 'index.php?option=com_users&view=login&return=' . base64_encode($activationUrl);
// In case we still run into this in the second step the user does not have the right permissions
$message = JText::_('COM_USERS_REGISTRATION_ACL_ADMIN_ACTIVATION_PERMISSIONS');
// When we are not logged in we should login
if ($user->guest)
{
$message = JText::_('COM_USERS_REGISTRATION_ACL_ADMIN_ACTIVATION');
}
$this->setMessage($message);
$this->setRedirect(JRoute::_($loginUrl, false));
return false;
}
}
// Attempt to activate the user.
$return = $model->activate($token);
// Check for errors.
if ($return === false)
{
// Redirect back to the home page.
$this->setMessage(JText::sprintf('COM_USERS_REGISTRATION_SAVE_FAILED', $model->getError()), 'error');
$this->setRedirect('index.php');
return false;
}
$useractivation = $uParams->get('useractivation');
// Redirect to the login screen.
if ($useractivation == 0)
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_SAVE_SUCCESS'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=login', false));
}
elseif ($useractivation == 1)
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_ACTIVATE_SUCCESS'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=login', false));
}
elseif ($return->getParam('activate'))
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_VERIFY_SUCCESS'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration&layout=complete', false));
}
else
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_ADMINACTIVATE_SUCCESS'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration&layout=complete', false));
}
return true;
}
/**
* Method to register a user.
*
* @return boolean True on success, false on failure.
*
* @since 1.6
*/
public function register()
{
// Check for request forgeries.
$this->checkToken();
// If registration is disabled - Redirect to login page.
if (JComponentHelper::getParams('com_users')->get('allowUserRegistration') == 0)
{
$this->setRedirect(JRoute::_('index.php?option=com_users&view=login', false));
return false;
}
$app = JFactory::getApplication();
$model = $this->getModel('Registration', 'UsersModel');
// Get the user data.
$requestData = $this->input->post->get('jform', array(), 'array');
// Validate the posted data.
$form = $model->getForm();
if (!$form)
{
JError::raiseError(500, $model->getError());
return false;
}
$data = $model->validate($form, $requestData);
// Check for validation errors.
if ($data === false)
{
// Get the validation messages.
$errors = $model->getErrors();
// Push up to three validation messages out to the user.
for ($i = 0, $n = count($errors); $i < $n && $i < 3; $i++)
{
if ($errors[$i] instanceof Exception)
{
$app->enqueueMessage($errors[$i]->getMessage(), 'error');
}
else
{
$app->enqueueMessage($errors[$i], 'error');
}
}
// Save the data in the session.
$app->setUserState('com_users.registration.data', $requestData);
// Redirect back to the registration screen.
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration', false));
return false;
}
// Attempt to save the data.
$return = $model->register($data);
// Check for errors.
if ($return === false)
{
// Save the data in the session.
$app->setUserState('com_users.registration.data', $data);
// Redirect back to the edit screen.
$this->setMessage($model->getError(), 'error');
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration', false));
return false;
}
// Flush the data from the session.
$app->setUserState('com_users.registration.data', null);
// Redirect to the profile screen.
if ($return === 'adminactivate')
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_COMPLETE_VERIFY'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration&layout=complete', false));
}
elseif ($return === 'useractivate')
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_COMPLETE_ACTIVATE'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=registration&layout=complete', false));
}
else
{
$this->setMessage(JText::_('COM_USERS_REGISTRATION_SAVE_SUCCESS'));
$this->setRedirect(JRoute::_('index.php?option=com_users&view=login', false));
}
return true;
}
}
| tkaniowski/bj25 | components/com_users/controllers/registration.php | PHP | gpl-2.0 | 6,815 |
# -*- coding: utf-8 -*-
"""
This script initializes the plugin, making it known to QGIS.
"""
def classFactory(iface):
from HelloWorld import HelloWorld
return HelloWorld(iface)
| qgis/QGIS-Django | qgis-app/plugins/tests/HelloWorld/2.3-full-changed-repository/HelloWorld/__init__.py | Python | gpl-2.0 | 183 |
<?php
/**
* @author JoomlaShine.com http://www.joomlashine.com
* @copyright Copyright (C) 2008 - 2011 JoomlaShine.com. All rights reserved.
* @license GNU/GPL v2 http://www.gnu.org/licenses/gpl-2.0.html
*/
// No direct access
defined('_JEXEC') or die('Restricted index access');
// Load template framework
if (!defined('JSN_PATH_TPLFRAMEWORK')) {
require_once JPATH_ROOT . '/plugins/system/jsntplframework/jsntplframework.defines.php';
require_once JPATH_ROOT . '/plugins/system/jsntplframework/libraries/joomlashine/loader.php';
}
define('YOURBASEPATH', dirname(__FILE__));
if (!isset($this->error))
{
$this->error = JError::raiseWarning(404, JText::_('JERROR_ALERTNOAUTHOR'));
$this->debug = false;
}
// Preparing template parameters
JSNTplTemplateHelper::prepare(false, false);
// Retrieve document object
$document = JFactory::getDocument();
/* URL where logo image should link to (! without preceding slash !)
Leave this box empty if you want your logo to be clickable. */
$logoLink = $document->logoLink;
if (strpos($logoLink, "http")=== false && $logoLink != '')
{
$utils = JSNTplUtils::getInstance();
$logoLink = $utils->trimPreceddingSlash($logoLink);
$logoLink = $this->baseurl . '/' . $logoLink;
}
?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<!-- <?php echo $document->template; ?> <?php echo $document->version ?> -->
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="<?php echo $this->language; ?>" lang="<?php echo $this->language; ?>" dir="<?php echo $this->direction; ?>">
<head>
<title><?php echo $this->error->getCode(); ?>-<?php echo $this->title; ?></title>
<link rel="stylesheet" href="<?php echo $this->baseurl . '/templates/' . $this->template ?>/css/error.css" type="text/css" />
</head>
<body id="jsn-master" class="jsn-color-<?php echo $document->templateColor ?>">
<div id="jsn-page">
<div id="jsn-page_inner">
<div id="jsn-header">
<div id="jsn-logo">
<a href="<?php echo $logoLink ?>" title="<?php echo $document->logoSlogan; ?>">
<?php
if ($document->logoFile != "")
$logo_path = $document->logoFile;
else
$logo_path = $this->baseurl . '/templates/' . $this->template . "/images/logo.png";
?>
<img src="<?php echo $logo_path; ?>" alt="<?php echo $document->logoSlogan; ?>" />
</a>
</div>
</div>
<div id="jsn-body" class="clearafter">
<div id="jsn-error-heading">
<h1><?php echo $this->error->getCode(); ?> <span class="heading-medium"><?php echo JText::_('JERROR_ERROR'); ?></span></h1>
</div>
<div id="jsn-error-content" class="jsn-error-page">
<div id="jsn-error-content_inner">
<h1><span class="heading-small"><?php echo $this->error->getMessage(); ?></span></h1>
<hr />
<h3><?php echo JText::_('JERROR_LAYOUT_NOT_ABLE_TO_VISIT'); ?></h3>
<ul>
<li><?php echo JText::_('JERROR_LAYOUT_AN_OUT_OF_DATE_BOOKMARK_FAVOURITE'); ?></li>
<li><?php echo JText::_('JERROR_LAYOUT_SEARCH_ENGINE_OUT_OF_DATE_LISTING'); ?></li>
<li><?php echo JText::_('JERROR_LAYOUT_MIS_TYPED_ADDRESS'); ?></li>
<li><?php echo JText::_('JERROR_LAYOUT_YOU_HAVE_NO_ACCESS_TO_THIS_PAGE'); ?></li>
<li><?php echo JText::_('JERROR_LAYOUT_REQUESTED_RESOURCE_WAS_NOT_FOUND'); ?></li>
<li><?php echo JText::_('JERROR_LAYOUT_ERROR_HAS_OCCURRED_WHILE_PROCESSING_YOUR_REQUEST'); ?></li>
</ul>
<hr />
<h3><?php echo JText::_('JSN_TPLFW_ERROR_LAYOUT_SEARCH_ON_THE_WEBSITE'); ?></h3>
<form id="search-form" method="post" action="index.php">
<div class="search">
<input type="text" onfocus="if(this.value=='search...') this.value='';" onblur="if(this.value=='') this.value='search...';" value="" size="20" class="inputbox" alt="Search" maxlength="20" id="mod-search-searchword" name="searchword">
<input type="submit" onclick="this.form.searchword.focus();" class="button link-button" value="Search">
</div>
<input type="hidden" value="search" name="task">
<input type="hidden" value="com_search" name="option">
<input type="hidden" value="435" name="Itemid">
</form>
<p id="link-goback">or <a href="<?php echo $this->baseurl; ?>/index.php" class="link-action" title="<?php echo JText::_('JERROR_LAYOUT_GO_TO_THE_HOME_PAGE'); ?>"><?php echo JText::_('JERROR_LAYOUT_GO_TO_THE_HOME_PAGE'); ?></a></p>
</div>
</div>
</div>
</div>
</div>
</body>
</html> | Galene/svitanok.ck.ua | templates/jsn_yoyo_pro/error.php | PHP | gpl-2.0 | 4,569 |
/*
* Copyright (C) 2012-2013 Team XBMC
* http://kodi.tv
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "EventLoop.h"
#include "XBMCApp.h"
#include "AndroidExtra.h"
#include <dlfcn.h>
#define IS_FROM_SOURCE(v, s) ((v & s) == s)
CEventLoop::CEventLoop(android_app* application)
: m_enabled(false),
m_application(application),
m_activityHandler(NULL), m_inputHandler(NULL)
{
if (m_application == NULL)
return;
m_application->userData = this;
m_application->onAppCmd = activityCallback;
m_application->onInputEvent = inputCallback;
}
void CEventLoop::run(IActivityHandler &activityHandler, IInputHandler &inputHandler)
{
int ident;
int events;
struct android_poll_source* source;
m_activityHandler = &activityHandler;
m_inputHandler = &inputHandler;
CXBMCApp::android_printf("CEventLoop: starting event loop");
while (1)
{
// We will block forever waiting for events.
while ((ident = ALooper_pollAll(-1, NULL, &events, (void**)&source)) >= 0)
{
// Process this event.
if (source != NULL)
source->process(m_application, source);
// Check if we are exiting.
if (m_application->destroyRequested)
{
CXBMCApp::android_printf("CEventLoop: we are being destroyed");
return;
}
}
}
}
void CEventLoop::processActivity(int32_t command)
{
switch (command)
{
case APP_CMD_CONFIG_CHANGED:
m_activityHandler->onConfigurationChanged();
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
m_activityHandler->onCreateWindow(m_application->window);
// set the proper DPI value
m_inputHandler->setDPI(CXBMCApp::GetDPI());
break;
case APP_CMD_WINDOW_RESIZED:
// The window has been resized
m_activityHandler->onResizeWindow();
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
m_activityHandler->onDestroyWindow();
break;
case APP_CMD_GAINED_FOCUS:
m_activityHandler->onGainFocus();
break;
case APP_CMD_LOST_FOCUS:
m_activityHandler->onLostFocus();
break;
case APP_CMD_LOW_MEMORY:
m_activityHandler->onLowMemory();
break;
case APP_CMD_START:
m_activityHandler->onStart();
break;
case APP_CMD_RESUME:
m_activityHandler->onResume();
break;
case APP_CMD_SAVE_STATE:
// The system has asked us to save our current state. Do so.
m_activityHandler->onSaveState(&m_application->savedState, &m_application->savedStateSize);
break;
case APP_CMD_PAUSE:
m_activityHandler->onPause();
break;
case APP_CMD_STOP:
m_activityHandler->onStop();
break;
case APP_CMD_DESTROY:
m_activityHandler->onDestroy();
break;
default:
break;
}
}
int32_t CEventLoop::processInput(AInputEvent* event)
{
int32_t rtn = 0;
int32_t type = AInputEvent_getType(event);
int32_t source = AInputEvent_getSource(event);
// handle joystick input
if (IS_FROM_SOURCE(source, AINPUT_SOURCE_GAMEPAD) || IS_FROM_SOURCE(source, AINPUT_SOURCE_JOYSTICK))
{
if (m_inputHandler->onJoyStickEvent(event))
return true;
}
switch(type)
{
case AINPUT_EVENT_TYPE_KEY:
rtn = m_inputHandler->onKeyboardEvent(event);
break;
case AINPUT_EVENT_TYPE_MOTION:
if (IS_FROM_SOURCE(source, AINPUT_SOURCE_TOUCHSCREEN))
rtn = m_inputHandler->onTouchEvent(event);
else if (IS_FROM_SOURCE(source, AINPUT_SOURCE_MOUSE))
rtn = m_inputHandler->onMouseEvent(event);
break;
}
return rtn;
}
void CEventLoop::activityCallback(android_app* application, int32_t command)
{
if (application == NULL || application->userData == NULL)
return;
CEventLoop& eventLoop = *((CEventLoop*)application->userData);
eventLoop.processActivity(command);
}
int32_t CEventLoop::inputCallback(android_app* application, AInputEvent* event)
{
if (application == NULL || application->userData == NULL || event == NULL)
return 0;
CEventLoop& eventLoop = *((CEventLoop*)application->userData);
return eventLoop.processInput(event);
}
| ironman771/xbmc | xbmc/platform/android/activity/EventLoop.cpp | C++ | gpl-2.0 | 4,826 |
<?php
/**
* The template for displaying posts in the Audio post format
*
* @package WordPress
* @subpackage Twenty_Fourteen
* @since Twenty Fourteen 1.0
*/
?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<?php twentyfourteen_post_thumbnail(); ?>
<header class="entry-header">
<?php if ( in_array( 'category', get_object_taxonomies( get_post_type() ) ) && twentyfourteen_categorized_blog() ) : ?>
<div class="entry-meta">
<span class="cat-links"><?php echo get_the_category_list( _x( ', ', 'Used between list items, there is a space after the comma.', 'twentyfourteen' ) ); ?></span>
</div><!-- .entry-meta -->
<?php
endif;
if ( is_single() ) :
the_title( '<h1 class="entry-title">', '</h1>' );
else :
the_title( '<h1 class="entry-title"><a href="' . esc_url( get_permalink() ) . '" rel="bookmark">', '</a></h1>' );
endif;
?>
<div class="entry-meta">
<span class="post-format">
<a class="entry-format" href="<?php echo esc_url( get_post_format_link( 'audio' ) ); ?>"><?php echo get_post_format_string( 'audio' ); ?></a>
</span>
<?php twentyfourteen_posted_on(); ?>
<?php if ( ! post_password_required() && ( comments_open() || get_comments_number() ) ) : ?>
<span class="comments-link"><?php comments_popup_link( __( 'Leave a comment', 'twentyfourteen' ), __( '1 Comment', 'twentyfourteen' ), __( '% Comments', 'twentyfourteen' ) ); ?></span>
<?php endif; ?>
<?php edit_post_link( __( 'Edit', 'twentyfourteen' ), '<span class="edit-link">', '</span>' ); ?>
</div><!-- .entry-meta -->
</header><!-- .entry-header -->
<div class="entry-content">
<?php
/* translators: %s: Name of current post */
the_content( sprintf(
esc_html__( 'Continue reading %s', 'twentyfourteen' ),
the_title( '<span class="screen-reader-text">', '</span> <span class="meta-nav">→</span>', false )
) );
wp_link_pages( array(
'before' => '<div class="page-links"><span class="page-links-title">' . __( 'Pages:', 'twentyfourteen' ) . '</span>',
'after' => '</div>',
'link_before' => '<span>',
'link_after' => '</span>',
) );
?>
</div><!-- .entry-content -->
<?php the_tags( '<footer class="entry-meta"><span class="tag-links">', '', '</span></footer>' ); ?>
</article><!-- #post-## -->
| di0fref/wordpress_fahlslstad | wp-content/themes/twentyfourteen/content-audio.php | PHP | gpl-2.0 | 2,316 |
<?php
namespace Drupal\Tests\node\Kernel\Migrate\d6;
use Drupal\Tests\migrate_drupal\Kernel\d6\MigrateDrupal6TestBase;
/**
* Test D6NodeDeriver.
*
* @group migrate_drupal_6
*/
class MigrateNodeDeriverTest extends MigrateDrupal6TestBase {
/**
* The migration plugin manager.
*
* @var \Drupal\migrate\Plugin\MigrationPluginManagerInterface
*/
protected $pluginManager;
/**
* {@inheritdoc}
*/
public function setUp(): void {
parent::setUp();
$this->pluginManager = $this->container->get('plugin.manager.migration');
}
/**
* Tests node translation migrations with translation disabled.
*/
public function testNoTranslations() {
// Without content_translation, there should be no translation migrations.
$migrations = $this->pluginManager->createInstances('d6_node_translation');
$this->assertSame([], $migrations,
"No node translation migrations without content_translation");
}
/**
* Tests node translation migrations with translation enabled.
*/
public function testTranslations() {
// With content_translation, there should be translation migrations for
// each content type.
$this->enableModules(['language', 'content_translation']);
$this->assertTrue($this->container->get('plugin.manager.migration')->hasDefinition('d6_node_translation:story'), "Node translation migrations exist after content_translation installed");
}
}
| tobiasbuhrer/tobiasb | web/core/modules/node/tests/src/Kernel/Migrate/d6/MigrateNodeDeriverTest.php | PHP | gpl-2.0 | 1,430 |
@extends('layouts.loggedout')
@section('content')
@if(Session::has('error'))
<div class="clearfix">
<div class="alert alert-danger">
{{ Session::get('error') }}
</div>
</div>
@endif
<h1 class="col-sm-12">{{ trans('reminders.password_reset') }}</h1>
{{ Form::open(array('route' => array('password.update'))) }}
<p>{{ Form::label('email', 'Email') }}
{{ Form::text('email','',array('class' => 'form-control', 'required' => true)) }}</p>
<p>{{ Form::label('password', 'Password') }}
{{ Form::password('password',array('class' => 'form-control', 'required' => true)) }}</p>
<p>{{ Form::label('password_confirmation', 'Password confirm') }}
{{ Form::password('password_confirmation',array('class' => 'form-control', 'required' => true)) }}</p>
{{ Form::hidden('token', $token) }}
<p>{{ Form::submit('Submit',array('class' => 'btn btn-primary')) }}</p>
{{ Form::close() }}
@stop | ubc/learninglocker | app/views/system/password/reset.blade.php | PHP | gpl-3.0 | 953 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_redis_instance_info
description:
- Gather info for GCP Instance
short_description: Gather info for GCP Instance
version_added: '2.8'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
region:
description:
- The name of the Redis region of the instance.
required: true
type: str
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: get info on an instance
gcp_redis_instance_info:
region: us-central1
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
resources:
description: List of resources
returned: always
type: complex
contains:
alternativeLocationId:
description:
- Only applicable to STANDARD_HA tier which protects the instance against zonal
failures by provisioning it across two zones.
- If provided, it must be a different zone from the one provided in [locationId].
returned: success
type: str
authorizedNetwork:
description:
- The full name of the Google Compute Engine network to which the instance is
connected. If left unspecified, the default network will be used.
returned: success
type: str
createTime:
description:
- The time the instance was created in RFC3339 UTC "Zulu" format, accurate to
nanoseconds.
returned: success
type: str
currentLocationId:
description:
- The current zone where the Redis endpoint is placed.
- For Basic Tier instances, this will always be the same as the [locationId]
provided by the user at creation time. For Standard Tier instances, this can
be either [locationId] or [alternativeLocationId] and can change after a failover
event.
returned: success
type: str
displayName:
description:
- An arbitrary and optional user-provided name for the instance.
returned: success
type: str
host:
description:
- Hostname or IP address of the exposed Redis endpoint used by clients to connect
to the service.
returned: success
type: str
labels:
description:
- Resource labels to represent user provided metadata.
returned: success
type: dict
redisConfigs:
description:
- Redis configuration parameters, according to U(http://redis.io/topics/config).
- 'Please check Memorystore documentation for the list of supported parameters:
U(https://cloud.google.com/memorystore/docs/redis/reference/rest/v1/projects.locations.instances#Instance.FIELDS.redis_configs)
.'
returned: success
type: dict
locationId:
description:
- The zone where the instance will be provisioned. If not provided, the service
will choose a zone for the instance. For STANDARD_HA tier, instances will
be created across two zones for protection against zonal failures. If [alternativeLocationId]
is also provided, it must be different from [locationId].
returned: success
type: str
name:
description:
- The ID of the instance or a fully qualified identifier for the instance.
returned: success
type: str
memorySizeGb:
description:
- Redis memory size in GiB.
returned: success
type: int
port:
description:
- The port number of the exposed Redis endpoint.
returned: success
type: int
redisVersion:
description:
- 'The version of Redis software. If not provided, latest supported version
will be used. Currently, the supported values are: - REDIS_4_0 for Redis 4.0
compatibility - REDIS_3_2 for Redis 3.2 compatibility .'
returned: success
type: str
reservedIpRange:
description:
- The CIDR range of internal addresses that are reserved for this instance.
If not provided, the service will choose an unused /29 block, for example,
10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique and non-overlapping with
existing subnets in an authorized network.
returned: success
type: str
tier:
description:
- 'The service tier of the instance. Must be one of these values: - BASIC: standalone
instance - STANDARD_HA: highly available primary/replica instances .'
returned: success
type: str
region:
description:
- The name of the Redis region of the instance.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(region=dict(required=True, type='str')))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/cloud-platform']
return_value = {'resources': fetch_list(module, collection(module))}
module.exit_json(**return_value)
def collection(module):
return "https://redis.googleapis.com/v1/projects/{project}/locations/{region}/instances".format(**module.params)
def fetch_list(module, link):
auth = GcpSession(module, 'redis')
return auth.list(link, return_if_object, array_name='instances')
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| anryko/ansible | lib/ansible/modules/cloud/google/gcp_redis_instance_info.py | Python | gpl-3.0 | 9,179 |
<?php
/**
* Implements safety checks for safe iframes.
*
* @warning This filter is *critical* for ensuring that %HTML.SafeIframe
* works safely.
*/
class HTMLPurifier_URIFilter_SafeIframe extends HTMLPurifier_URIFilter
{
/**
* @type string
*/
public $name = 'SafeIframe';
/**
* @type bool
*/
public $always_load = true;
/**
* @type string
*/
protected $regexp = null;
// XXX: The not so good bit about how this is all set up now is we
// can't check HTML.SafeIframe in the 'prepare' step: we have to
// defer till the actual filtering.
/**
* @param HTMLPurifier_Config $config
* @return bool
*/
public function prepare($config)
{
$this->regexp = $config->get('URI.SafeIframeRegexp');
return true;
}
/**
* @param HTMLPurifier_URI $uri
* @param HTMLPurifier_Config $config
* @param HTMLPurifier_Context $context
* @return bool
*/
public function filter(&$uri, $config, $context)
{
// check if filter not applicable
if (!$config->get('HTML.SafeIframe')) {
return true;
}
// check if the filter should actually trigger
if (!$context->get('EmbeddedURI', true)) {
return true;
}
$token = $context->get('CurrentToken', true);
if (!($token && $token->name == 'iframe')) {
return true;
}
// check if we actually have some whitelists enabled
if ($this->regexp === null) {
return false;
}
// actually check the whitelists
if (!preg_match($this->regexp, $uri->toString())) {
return false;
}
// Make sure that if we're an HTTPS site, the iframe is also HTTPS
if (is_https() && $uri->scheme == 'http') {
// Convert it to a protocol-relative URL
$uri->scheme = null;
}
return $uri;
}
}
// vim: et sw=4 sts=4
| TheCrowsJoker/mahara | htdocs/lib/htmlpurifier/HTMLPurifier/URIFilter/SafeIframe.php | PHP | gpl-3.0 | 1,996 |
#region References
using System;
using System.Collections.Generic;
using Server.Engines.ConPVP;
using Server.Items;
using Server.Misc;
using Server.Mobiles;
using Server.Network;
using Server.Spells.Bushido;
using Server.Spells.Necromancy;
using Server.Spells.Ninjitsu;
using Server.Spells.Second;
using Server.Spells.Spellweaving;
using Server.Targeting;
#endregion
namespace Server.Spells
{
public abstract class Spell : ISpell
{
private readonly Mobile m_Caster;
private readonly Item m_Scroll;
private readonly SpellInfo m_Info;
private SpellState m_State;
private long m_StartCastTime;
public SpellState State { get { return m_State; } set { m_State = value; } }
public Mobile Caster { get { return m_Caster; } }
public SpellInfo Info { get { return m_Info; } }
public string Name { get { return m_Info.Name; } }
public string Mantra { get { return m_Info.Mantra; } }
public Type[] Reagents { get { return m_Info.Reagents; } }
public Item Scroll { get { return m_Scroll; } }
public long StartCastTime { get { return m_StartCastTime; } }
private static readonly TimeSpan NextSpellDelay = TimeSpan.FromSeconds(0.75);
private static TimeSpan AnimateDelay = TimeSpan.FromSeconds(1.5);
public virtual SkillName CastSkill { get { return SkillName.Magery; } }
public virtual SkillName DamageSkill { get { return SkillName.EvalInt; } }
public virtual bool RevealOnCast { get { return true; } }
public virtual bool ClearHandsOnCast { get { return true; } }
public virtual bool ShowHandMovement { get { return true; } }
public virtual bool DelayedDamage { get { return false; } }
public virtual bool DelayedDamageStacking { get { return true; } }
//In reality, it's ANY delayed Damage spell Post-AoS that can't stack, but, only
//Expo & Magic Arrow have enough delay and a short enough cast time to bring up
//the possibility of stacking 'em. Note that a MA & an Explosion will stack, but
//of course, two MA's won't.
private static readonly Dictionary<Type, DelayedDamageContextWrapper> m_ContextTable =
new Dictionary<Type, DelayedDamageContextWrapper>();
private class DelayedDamageContextWrapper
{
private readonly Dictionary<Mobile, Timer> m_Contexts = new Dictionary<Mobile, Timer>();
public void Add(Mobile m, Timer t)
{
Timer oldTimer;
if (m_Contexts.TryGetValue(m, out oldTimer))
{
oldTimer.Stop();
m_Contexts.Remove(m);
}
m_Contexts.Add(m, t);
}
public void Remove(Mobile m)
{
m_Contexts.Remove(m);
}
}
public void StartDelayedDamageContext(Mobile m, Timer t)
{
if (DelayedDamageStacking)
{
return; //Sanity
}
DelayedDamageContextWrapper contexts;
if (!m_ContextTable.TryGetValue(GetType(), out contexts))
{
contexts = new DelayedDamageContextWrapper();
m_ContextTable.Add(GetType(), contexts);
}
contexts.Add(m, t);
}
public void RemoveDelayedDamageContext(Mobile m)
{
DelayedDamageContextWrapper contexts;
if (!m_ContextTable.TryGetValue(GetType(), out contexts))
{
return;
}
contexts.Remove(m);
}
public void HarmfulSpell(Mobile m)
{
if (m is BaseCreature)
{
((BaseCreature)m).OnHarmfulSpell(m_Caster);
}
}
public Spell(Mobile caster, Item scroll, SpellInfo info)
{
m_Caster = caster;
m_Scroll = scroll;
m_Info = info;
}
public virtual int GetNewAosDamage(int bonus, int dice, int sides, Mobile singleTarget)
{
if (singleTarget != null)
{
return GetNewAosDamage(bonus, dice, sides, (Caster.Player && singleTarget.Player), GetDamageScalar(singleTarget));
}
else
{
return GetNewAosDamage(bonus, dice, sides, false);
}
}
public virtual int GetNewAosDamage(int bonus, int dice, int sides, bool playerVsPlayer)
{
return GetNewAosDamage(bonus, dice, sides, playerVsPlayer, 1.0);
}
public virtual int GetNewAosDamage(int bonus, int dice, int sides, bool playerVsPlayer, double scalar)
{
int damage = Utility.Dice(dice, sides, bonus) * 100;
int damageBonus = 0;
int inscribeSkill = GetInscribeFixed(m_Caster);
int inscribeBonus = (inscribeSkill + (1000 * (inscribeSkill / 1000))) / 200;
damageBonus += inscribeBonus;
int intBonus = Caster.Int / 10;
damageBonus += intBonus;
int sdiBonus = AosAttributes.GetValue(m_Caster, AosAttribute.SpellDamage);
#region Mondain's Legacy
sdiBonus += ArcaneEmpowermentSpell.GetSpellBonus(m_Caster, playerVsPlayer);
#endregion
// PvP spell damage increase cap of 15% from an items magic property, 30% if spell school focused.
if (playerVsPlayer)
{
if (SpellHelper.HasSpellMastery(m_Caster) && sdiBonus > 30)
{
sdiBonus = 30;
}
if (!SpellHelper.HasSpellMastery(m_Caster) && sdiBonus > 15)
{
sdiBonus = 15;
}
}
damageBonus += sdiBonus;
TransformContext context = TransformationSpellHelper.GetContext(Caster);
if (context != null && context.Spell is ReaperFormSpell)
{
damageBonus += ((ReaperFormSpell)context.Spell).SpellDamageBonus;
}
damage = AOS.Scale(damage, 100 + damageBonus);
int evalSkill = GetDamageFixed(m_Caster);
int evalScale = 30 + ((9 * evalSkill) / 100);
damage = AOS.Scale(damage, evalScale);
damage = AOS.Scale(damage, (int)(scalar * 100));
return damage / 100;
}
public virtual bool IsCasting { get { return m_State == SpellState.Casting; } }
public virtual void OnCasterHurt()
{
//Confirm: Monsters and pets cannot be disturbed.
if (!Caster.Player)
{
return;
}
if (IsCasting)
{
object o = ProtectionSpell.Registry[m_Caster];
bool disturb = true;
if (o != null && o is double)
{
if (((double)o) > Utility.RandomDouble() * 100.0)
{
disturb = false;
}
}
#region Stygian Abyss
int focus = SAAbsorptionAttributes.GetValue(Caster, SAAbsorptionAttribute.CastingFocus);
if (focus > 0)
{
if (focus > 30)
{
focus = 30;
}
if (focus > Utility.Random(100))
{
disturb = false;
Caster.SendLocalizedMessage(1113690); // You regain your focus and continue casting the spell.
}
}
#endregion
if (disturb)
{
Disturb(DisturbType.Hurt, false, true);
}
}
}
public virtual void OnCasterKilled()
{
Disturb(DisturbType.Kill);
}
public virtual void OnConnectionChanged()
{
FinishSequence();
}
public virtual bool OnCasterMoving(Direction d)
{
if (IsCasting && BlocksMovement)
{
m_Caster.SendLocalizedMessage(500111); // You are frozen and can not move.
return false;
}
return true;
}
public virtual bool OnCasterEquiping(Item item)
{
if (IsCasting)
{
Disturb(DisturbType.EquipRequest);
}
return true;
}
public virtual bool OnCasterUsingObject(object o)
{
if (m_State == SpellState.Sequencing)
{
Disturb(DisturbType.UseRequest);
}
return true;
}
public virtual bool OnCastInTown(Region r)
{
return m_Info.AllowTown;
}
public virtual bool ConsumeReagents()
{
if (m_Caster.AccessLevel >= AccessLevel.Counselor)
return true;
if (m_Scroll != null || !m_Caster.Player)
{
return true;
}
if (AosAttributes.GetValue(m_Caster, AosAttribute.LowerRegCost) > Utility.Random(100))
{
return true;
}
if (DuelContext.IsFreeConsume(m_Caster))
{
return true;
}
Container pack = m_Caster.Backpack;
if (pack == null)
{
return false;
}
if (pack.ConsumeTotal(m_Info.Reagents, m_Info.Amounts) == -1)
{
return true;
}
return false;
}
public virtual double GetInscribeSkill(Mobile m)
{
// There is no chance to gain
// m.CheckSkill( SkillName.Inscribe, 0.0, 120.0 );
return m.Skills[SkillName.Inscribe].Value;
}
public virtual int GetInscribeFixed(Mobile m)
{
// There is no chance to gain
// m.CheckSkill( SkillName.Inscribe, 0.0, 120.0 );
return m.Skills[SkillName.Inscribe].Fixed;
}
public virtual int GetDamageFixed(Mobile m)
{
//m.CheckSkill( DamageSkill, 0.0, m.Skills[DamageSkill].Cap );
return m.Skills[DamageSkill].Fixed;
}
public virtual double GetDamageSkill(Mobile m)
{
//m.CheckSkill( DamageSkill, 0.0, m.Skills[DamageSkill].Cap );
return m.Skills[DamageSkill].Value;
}
public virtual double GetResistSkill(Mobile m)
{
return m.Skills[SkillName.MagicResist].Value;
}
public virtual double GetDamageScalar(Mobile target)
{
double scalar = 1.0;
if (!Core.AOS) //EvalInt stuff for AoS is handled elsewhere
{
double casterEI = m_Caster.Skills[DamageSkill].Value;
double targetRS = target.Skills[SkillName.MagicResist].Value;
/*
if( Core.AOS )
targetRS = 0;
*/
//m_Caster.CheckSkill( DamageSkill, 0.0, 120.0 );
if (casterEI > targetRS)
{
scalar = (1.0 + ((casterEI - targetRS) / 500.0));
}
else
{
scalar = (1.0 + ((casterEI - targetRS) / 200.0));
}
// magery damage bonus, -25% at 0 skill, +0% at 100 skill, +5% at 120 skill
scalar += (m_Caster.Skills[CastSkill].Value - 100.0) / 400.0;
if (!target.Player && !target.Body.IsHuman /*&& !Core.AOS*/)
{
scalar *= 2.0; // Double magery damage to monsters/animals if not AOS
}
}
if (target is BaseCreature)
{
((BaseCreature)target).AlterDamageScalarFrom(m_Caster, ref scalar);
}
if (m_Caster is BaseCreature)
{
((BaseCreature)m_Caster).AlterDamageScalarTo(target, ref scalar);
}
if (Core.SE)
{
scalar *= GetSlayerDamageScalar(target);
}
target.Region.SpellDamageScalar(m_Caster, target, ref scalar);
if (Evasion.CheckSpellEvasion(target)) //Only single target spells an be evaded
{
scalar = 0;
}
return scalar;
}
public virtual double GetSlayerDamageScalar(Mobile defender)
{
Spellbook atkBook = Spellbook.FindEquippedSpellbook(m_Caster);
double scalar = 1.0;
if (atkBook != null)
{
SlayerEntry atkSlayer = SlayerGroup.GetEntryByName(atkBook.Slayer);
SlayerEntry atkSlayer2 = SlayerGroup.GetEntryByName(atkBook.Slayer2);
if (atkSlayer != null && atkSlayer.Slays(defender) || atkSlayer2 != null && atkSlayer2.Slays(defender))
{
defender.FixedEffect(0x37B9, 10, 5); //TODO: Confirm this displays on OSIs
scalar = 2.0;
}
TransformContext context = TransformationSpellHelper.GetContext(defender);
if ((atkBook.Slayer == SlayerName.Silver || atkBook.Slayer2 == SlayerName.Silver) && context != null &&
context.Type != typeof(HorrificBeastSpell))
{
scalar += .25; // Every necromancer transformation other than horrific beast take an additional 25% damage
}
if (scalar != 1.0)
{
return scalar;
}
}
ISlayer defISlayer = Spellbook.FindEquippedSpellbook(defender);
if (defISlayer == null)
{
defISlayer = defender.Weapon as ISlayer;
}
if (defISlayer != null)
{
SlayerEntry defSlayer = SlayerGroup.GetEntryByName(defISlayer.Slayer);
SlayerEntry defSlayer2 = SlayerGroup.GetEntryByName(defISlayer.Slayer2);
if (defSlayer != null && defSlayer.Group.OppositionSuperSlays(m_Caster) ||
defSlayer2 != null && defSlayer2.Group.OppositionSuperSlays(m_Caster))
{
scalar = 2.0;
}
}
return scalar;
}
public virtual void DoFizzle()
{
m_Caster.LocalOverheadMessage(MessageType.Regular, 0x3B2, 502632); // The spell fizzles.
if (m_Caster.Player)
{
if (Core.AOS)
{
m_Caster.FixedParticles(0x3735, 1, 30, 9503, EffectLayer.Waist);
}
else
{
m_Caster.FixedEffect(0x3735, 6, 30);
}
m_Caster.PlaySound(0x5C);
}
}
private CastTimer m_CastTimer;
private AnimTimer m_AnimTimer;
public void Disturb(DisturbType type)
{
Disturb(type, true, false);
}
public virtual bool CheckDisturb(DisturbType type, bool firstCircle, bool resistable)
{
if (resistable && m_Scroll is BaseWand)
{
return false;
}
return true;
}
public void Disturb(DisturbType type, bool firstCircle, bool resistable)
{
if (!CheckDisturb(type, firstCircle, resistable))
{
return;
}
if (m_State == SpellState.Casting)
{
if (!firstCircle && !Core.AOS && this is MagerySpell && ((MagerySpell)this).Circle == SpellCircle.First)
{
return;
}
m_State = SpellState.None;
m_Caster.Spell = null;
OnDisturb(type, true);
if (m_CastTimer != null)
{
m_CastTimer.Stop();
}
if (m_AnimTimer != null)
{
m_AnimTimer.Stop();
}
if (Core.AOS && m_Caster.Player && type == DisturbType.Hurt)
{
DoHurtFizzle();
}
m_Caster.NextSpellTime = Core.TickCount + (int)GetDisturbRecovery().TotalMilliseconds;
}
else if (m_State == SpellState.Sequencing)
{
if (!firstCircle && !Core.AOS && this is MagerySpell && ((MagerySpell)this).Circle == SpellCircle.First)
{
return;
}
m_State = SpellState.None;
m_Caster.Spell = null;
OnDisturb(type, false);
Target.Cancel(m_Caster);
if (Core.AOS && m_Caster.Player && type == DisturbType.Hurt)
{
DoHurtFizzle();
}
}
}
public virtual void DoHurtFizzle()
{
m_Caster.FixedEffect(0x3735, 6, 30);
m_Caster.PlaySound(0x5C);
}
public virtual void OnDisturb(DisturbType type, bool message)
{
if (message)
{
m_Caster.SendLocalizedMessage(500641); // Your concentration is disturbed, thus ruining thy spell.
}
}
public virtual bool CheckCast()
{
return true;
}
public virtual void SayMantra()
{
if (m_Scroll is BaseWand)
{
return;
}
if (m_Info.Mantra != null && m_Info.Mantra.Length > 0 && m_Caster.Player)
{
m_Caster.PublicOverheadMessage(MessageType.Spell, m_Caster.SpeechHue, true, m_Info.Mantra, false);
}
}
public virtual bool BlockedByHorrificBeast { get { return true; } }
public virtual bool BlockedByAnimalForm { get { return true; } }
public virtual bool BlocksMovement { get { return true; } }
public virtual bool CheckNextSpellTime { get { return !(m_Scroll is BaseWand); } }
public bool Cast()
{
m_StartCastTime = Core.TickCount;
if (Core.AOS && m_Caster.Spell is Spell && ((Spell)m_Caster.Spell).State == SpellState.Sequencing)
{
((Spell)m_Caster.Spell).Disturb(DisturbType.NewCast);
}
if (!m_Caster.CheckAlive())
{
return false;
}
else if (m_Caster is PlayerMobile && ((PlayerMobile)m_Caster).Peaced)
{
m_Caster.SendLocalizedMessage(1072060); // You cannot cast a spell while calmed.
}
else if (m_Scroll is BaseWand && m_Caster.Spell != null && m_Caster.Spell.IsCasting)
{
m_Caster.SendLocalizedMessage(502643); // You can not cast a spell while frozen.
}
else if (m_Caster.Spell != null && m_Caster.Spell.IsCasting)
{
m_Caster.SendLocalizedMessage(502642); // You are already casting a spell.
}
else if (BlockedByHorrificBeast && TransformationSpellHelper.UnderTransformation(m_Caster, typeof(HorrificBeastSpell)) ||
(BlockedByAnimalForm && AnimalForm.UnderTransformation(m_Caster)))
{
m_Caster.SendLocalizedMessage(1061091); // You cannot cast that spell in this form.
}
else if (!(m_Scroll is BaseWand) && (m_Caster.Paralyzed || m_Caster.Frozen))
{
m_Caster.SendLocalizedMessage(502643); // You can not cast a spell while frozen.
}
else if (CheckNextSpellTime && Core.TickCount - m_Caster.NextSpellTime < 0)
{
m_Caster.SendLocalizedMessage(502644); // You have not yet recovered from casting a spell.
}
else if (m_Caster is PlayerMobile && ((PlayerMobile)m_Caster).PeacedUntil > DateTime.UtcNow)
{
m_Caster.SendLocalizedMessage(1072060); // You cannot cast a spell while calmed.
}
#region Dueling
else if (m_Caster is PlayerMobile && ((PlayerMobile)m_Caster).DuelContext != null &&
!((PlayerMobile)m_Caster).DuelContext.AllowSpellCast(m_Caster, this))
{ }
#endregion
else if (m_Caster.Mana >= ScaleMana(GetMana()))
{
#region Stygian Abyss
if (m_Caster.Race == Race.Gargoyle && m_Caster.Flying)
{
var tiles = Caster.Map.Tiles.GetStaticTiles(Caster.X, Caster.Y, true);
ItemData itemData;
bool cancast = true;
for (int i = 0; i < tiles.Length && cancast; ++i)
{
itemData = TileData.ItemTable[tiles[i].ID & TileData.MaxItemValue];
cancast = !(itemData.Name == "hover over");
}
if (!cancast)
{
if (m_Caster.IsPlayer())
{
m_Caster.SendLocalizedMessage(1113750); // You may not cast spells while flying over such precarious terrain.
return false;
}
else
{
m_Caster.SendMessage("Your staff level allows you to cast while flying over precarious terrain.");
}
}
}
#endregion
if (m_Caster.Spell == null && m_Caster.CheckSpellCast(this) && CheckCast() &&
m_Caster.Region.OnBeginSpellCast(m_Caster, this))
{
m_State = SpellState.Casting;
m_Caster.Spell = this;
if (!(m_Scroll is BaseWand) && RevealOnCast)
{
m_Caster.RevealingAction();
}
SayMantra();
TimeSpan castDelay = GetCastDelay();
if (ShowHandMovement && (m_Caster.Body.IsHuman || (m_Caster.Player && m_Caster.Body.IsMonster)))
{
int count = (int)Math.Ceiling(castDelay.TotalSeconds / AnimateDelay.TotalSeconds);
if (count != 0)
{
m_AnimTimer = new AnimTimer(this, count);
m_AnimTimer.Start();
}
if (m_Info.LeftHandEffect > 0)
{
Caster.FixedParticles(0, 10, 5, m_Info.LeftHandEffect, EffectLayer.LeftHand);
}
if (m_Info.RightHandEffect > 0)
{
Caster.FixedParticles(0, 10, 5, m_Info.RightHandEffect, EffectLayer.RightHand);
}
}
if (ClearHandsOnCast)
{
m_Caster.ClearHands();
}
if (Core.ML)
{
WeaponAbility.ClearCurrentAbility(m_Caster);
}
m_CastTimer = new CastTimer(this, castDelay);
//m_CastTimer.Start();
OnBeginCast();
if (castDelay > TimeSpan.Zero)
{
m_CastTimer.Start();
}
else
{
m_CastTimer.Tick();
}
return true;
}
else
{
return false;
}
}
else
{
m_Caster.LocalOverheadMessage(MessageType.Regular, 0x22, 502625); // Insufficient mana
}
return false;
}
public abstract void OnCast();
public virtual void OnBeginCast()
{ }
public virtual void GetCastSkills(out double min, out double max)
{
min = max = 0; //Intended but not required for overriding.
}
public virtual bool CheckFizzle()
{
if (m_Scroll is BaseWand)
{
return true;
}
double minSkill, maxSkill;
GetCastSkills(out minSkill, out maxSkill);
if (DamageSkill != CastSkill)
{
Caster.CheckSkill(DamageSkill, 0.0, Caster.Skills[DamageSkill].Cap);
}
return Caster.CheckSkill(CastSkill, minSkill, maxSkill);
}
public abstract int GetMana();
public virtual int ScaleMana(int mana)
{
double scalar = 1.0;
if (!MindRotSpell.GetMindRotScalar(Caster, ref scalar))
{
scalar = 1.0;
}
// Lower Mana Cost = 40%
int lmc = AosAttributes.GetValue(m_Caster, AosAttribute.LowerManaCost);
if (lmc > 40)
{
lmc = 40;
}
scalar -= (double)lmc / 100;
return (int)(mana * scalar);
}
public virtual TimeSpan GetDisturbRecovery()
{
if (Core.AOS)
{
return TimeSpan.Zero;
}
double delay = 1.0 - Math.Sqrt((Core.TickCount - m_StartCastTime) / 1000.0 / GetCastDelay().TotalSeconds);
if (delay < 0.2)
{
delay = 0.2;
}
return TimeSpan.FromSeconds(delay);
}
public virtual int CastRecoveryBase { get { return 6; } }
public virtual int CastRecoveryFastScalar { get { return 1; } }
public virtual int CastRecoveryPerSecond { get { return 4; } }
public virtual int CastRecoveryMinimum { get { return 0; } }
public virtual TimeSpan GetCastRecovery()
{
if (!Core.AOS)
{
return NextSpellDelay;
}
int fcr = AosAttributes.GetValue(m_Caster, AosAttribute.CastRecovery);
fcr -= ThunderstormSpell.GetCastRecoveryMalus(m_Caster);
int fcrDelay = -(CastRecoveryFastScalar * fcr);
int delay = CastRecoveryBase + fcrDelay;
if (delay < CastRecoveryMinimum)
{
delay = CastRecoveryMinimum;
}
return TimeSpan.FromSeconds((double)delay / CastRecoveryPerSecond);
}
public abstract TimeSpan CastDelayBase { get; }
public virtual double CastDelayFastScalar { get { return 1; } }
public virtual double CastDelaySecondsPerTick { get { return 0.25; } }
public virtual TimeSpan CastDelayMinimum { get { return TimeSpan.FromSeconds(0.25); } }
//public virtual int CastDelayBase{ get{ return 3; } }
//public virtual int CastDelayFastScalar{ get{ return 1; } }
//public virtual int CastDelayPerSecond{ get{ return 4; } }
//public virtual int CastDelayMinimum{ get{ return 1; } }
public virtual TimeSpan GetCastDelay()
{
if (m_Scroll is BaseWand)
{
return Core.ML ? CastDelayBase : TimeSpan.Zero; // TODO: Should FC apply to wands?
}
// Faster casting cap of 2 (if not using the protection spell)
// Faster casting cap of 0 (if using the protection spell)
// Paladin spells are subject to a faster casting cap of 4
// Paladins with magery of 70.0 or above are subject to a faster casting cap of 2
int fcMax = 4;
if (CastSkill == SkillName.Magery || CastSkill == SkillName.Necromancy ||
(CastSkill == SkillName.Chivalry && m_Caster.Skills[SkillName.Magery].Value >= 70.0))
{
fcMax = 2;
}
int fc = AosAttributes.GetValue(m_Caster, AosAttribute.CastSpeed);
if (fc > fcMax)
{
fc = fcMax;
}
if (ProtectionSpell.Registry.Contains(m_Caster))
{
fc -= 2;
}
if (EssenceOfWindSpell.IsDebuffed(m_Caster))
{
fc -= EssenceOfWindSpell.GetFCMalus(m_Caster);
}
TimeSpan baseDelay = CastDelayBase;
TimeSpan fcDelay = TimeSpan.FromSeconds(-(CastDelayFastScalar * fc * CastDelaySecondsPerTick));
//int delay = CastDelayBase + circleDelay + fcDelay;
TimeSpan delay = baseDelay + fcDelay;
if (delay < CastDelayMinimum)
{
delay = CastDelayMinimum;
}
#region Mondain's Legacy
if (DreadHorn.IsUnderInfluence(m_Caster))
{
delay.Add(delay);
}
#endregion
//return TimeSpan.FromSeconds( (double)delay / CastDelayPerSecond );
return delay;
}
public virtual void FinishSequence()
{
m_State = SpellState.None;
if (m_Caster.Spell == this)
{
m_Caster.Spell = null;
}
}
public virtual int ComputeKarmaAward()
{
return 0;
}
public virtual bool CheckSequence()
{
int mana = ScaleMana(GetMana());
if (m_Caster.Deleted || !m_Caster.Alive || m_Caster.Spell != this || m_State != SpellState.Sequencing)
{
DoFizzle();
}
else if (m_Scroll != null && !(m_Scroll is Runebook) &&
(m_Scroll.Amount <= 0 || m_Scroll.Deleted || m_Scroll.RootParent != m_Caster ||
(m_Scroll is BaseWand && (((BaseWand)m_Scroll).Charges <= 0 || m_Scroll.Parent != m_Caster))))
{
DoFizzle();
}
else if (!ConsumeReagents())
{
m_Caster.LocalOverheadMessage(MessageType.Regular, 0x22, 502630); // More reagents are needed for this spell.
}
else if (m_Caster.Mana < mana)
{
m_Caster.LocalOverheadMessage(MessageType.Regular, 0x22, 502625); // Insufficient mana for this spell.
}
else if (Core.AOS && (m_Caster.Frozen || m_Caster.Paralyzed))
{
m_Caster.SendLocalizedMessage(502646); // You cannot cast a spell while frozen.
DoFizzle();
}
else if (m_Caster is PlayerMobile && ((PlayerMobile)m_Caster).PeacedUntil > DateTime.UtcNow)
{
m_Caster.SendLocalizedMessage(1072060); // You cannot cast a spell while calmed.
DoFizzle();
}
else if (CheckFizzle())
{
m_Caster.Mana -= mana;
if (m_Scroll is SpellScroll)
{
m_Scroll.Consume();
}
#region SA
else if (m_Scroll is SpellStone)
{
// The SpellScroll check above isn't removing the SpellStones for some reason.
m_Scroll.Delete();
}
#endregion
else if (m_Scroll is BaseWand)
{
((BaseWand)m_Scroll).ConsumeCharge(m_Caster);
m_Caster.RevealingAction();
}
if (m_Scroll is BaseWand)
{
bool m = m_Scroll.Movable;
m_Scroll.Movable = false;
if (ClearHandsOnCast)
{
m_Caster.ClearHands();
}
m_Scroll.Movable = m;
}
else
{
if (ClearHandsOnCast)
{
m_Caster.ClearHands();
}
}
int karma = ComputeKarmaAward();
if (karma != 0)
{
Titles.AwardKarma(Caster, karma, true);
}
if (TransformationSpellHelper.UnderTransformation(m_Caster, typeof(VampiricEmbraceSpell)))
{
bool garlic = false;
for (int i = 0; !garlic && i < m_Info.Reagents.Length; ++i)
{
garlic = (m_Info.Reagents[i] == Reagent.Garlic);
}
if (garlic)
{
m_Caster.SendLocalizedMessage(1061651); // The garlic burns you!
AOS.Damage(m_Caster, Utility.RandomMinMax(17, 23), 100, 0, 0, 0, 0);
}
}
return true;
}
else
{
DoFizzle();
}
return false;
}
public bool CheckBSequence(Mobile target)
{
return CheckBSequence(target, false);
}
public bool CheckBSequence(Mobile target, bool allowDead)
{
if (!target.Alive && !allowDead)
{
m_Caster.SendLocalizedMessage(501857); // This spell won't work on that!
return false;
}
else if (Caster.CanBeBeneficial(target, true, allowDead) && CheckSequence())
{
Caster.DoBeneficial(target);
return true;
}
else
{
return false;
}
}
public bool CheckHSequence(Mobile target)
{
if (!target.Alive)
{
m_Caster.SendLocalizedMessage(501857); // This spell won't work on that!
return false;
}
else if (Caster.CanBeHarmful(target) && CheckSequence())
{
Caster.DoHarmful(target);
return true;
}
else
{
return false;
}
}
private class AnimTimer : Timer
{
private readonly Spell m_Spell;
public AnimTimer(Spell spell, int count)
: base(TimeSpan.Zero, AnimateDelay, count)
{
m_Spell = spell;
Priority = TimerPriority.FiftyMS;
}
protected override void OnTick()
{
if (m_Spell.State != SpellState.Casting || m_Spell.m_Caster.Spell != m_Spell)
{
Stop();
return;
}
if (!m_Spell.Caster.Mounted && m_Spell.m_Info.Action >= 0)
{
if (m_Spell.Caster.Body.IsHuman)
{
m_Spell.Caster.Animate(m_Spell.m_Info.Action, 7, 1, true, false, 0);
}
else if (m_Spell.Caster.Player && m_Spell.Caster.Body.IsMonster)
{
m_Spell.Caster.Animate(12, 7, 1, true, false, 0);
}
}
if (!Running)
{
m_Spell.m_AnimTimer = null;
}
}
}
private class CastTimer : Timer
{
private readonly Spell m_Spell;
public CastTimer(Spell spell, TimeSpan castDelay)
: base(castDelay)
{
m_Spell = spell;
Priority = TimerPriority.TwentyFiveMS;
}
protected override void OnTick()
{
if (m_Spell == null || m_Spell.m_Caster == null)
{
return;
}
else if (m_Spell.m_State == SpellState.Casting && m_Spell.m_Caster.Spell == m_Spell)
{
m_Spell.m_State = SpellState.Sequencing;
m_Spell.m_CastTimer = null;
m_Spell.m_Caster.OnSpellCast(m_Spell);
if (m_Spell.m_Caster.Region != null)
{
m_Spell.m_Caster.Region.OnSpellCast(m_Spell.m_Caster, m_Spell);
}
m_Spell.m_Caster.NextSpellTime = Core.TickCount + (int)m_Spell.GetCastRecovery().TotalMilliseconds;
// Spell.NextSpellDelay;
Target originalTarget = m_Spell.m_Caster.Target;
m_Spell.OnCast();
if (m_Spell.m_Caster.Player && m_Spell.m_Caster.Target != originalTarget && m_Spell.Caster.Target != null)
{
m_Spell.m_Caster.Target.BeginTimeout(m_Spell.m_Caster, TimeSpan.FromSeconds(30.0));
}
m_Spell.m_CastTimer = null;
}
}
public void Tick()
{
OnTick();
}
}
}
} | zerodowned/JustUO | Scripts/Spells/Base/Spell.cs | C# | gpl-3.0 | 28,192 |
# coding=utf-8
"""Test for GIS utilities functions."""
import unittest
# noinspection PyUnresolvedReferences
import qgis # pylint: disable=unused-import
from qgis.core import QgsRectangle
from safe.definitions.constants import INASAFE_TEST
from safe.utilities.gis import (
is_polygon_layer,
is_raster_y_inverted,
wkt_to_rectangle,
validate_geo_array)
from safe.test.utilities import (
clone_raster_layer,
load_test_vector_layer,
load_test_raster_layer,
standard_data_path,
get_qgis_app)
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST)
class TestQGIS(unittest.TestCase):
def test_is_polygonal_layer(self):
"""Test we can get the correct attributes back."""
# Polygon layer
layer = load_test_vector_layer(
'aggregation',
'district_osm_jakarta.geojson',
clone=True
)
message = 'isPolygonLayer, %s layer should be polygonal' % layer
self.assertTrue(is_polygon_layer(layer), message)
# Point layer
layer = load_test_vector_layer('hazard', 'volcano_point.geojson')
message = '%s layer should be polygonal' % layer
self.assertFalse(is_polygon_layer(layer), message)
# Raster layer
layer = clone_raster_layer(
name='earthquake',
extension='.tif',
include_keywords=True,
source_directory=standard_data_path('hazard')
)
message = ('%s raster layer should not be polygonal' % layer)
self.assertFalse(is_polygon_layer(layer), message)
def test_raster_y_inverted(self):
"""Test if we can detect an upside down raster."""
# We should have one test with an inverted raster but as it's not
# usual, I'm not going to spend time.
layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
self.assertFalse(is_raster_y_inverted(layer))
def test_rectangle_from_wkt(self):
"""Test we can a create a rectangle from a WKT."""
rectangle = wkt_to_rectangle('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))')
self.assertTrue(isinstance(rectangle, QgsRectangle))
rectangle = wkt_to_rectangle('POLYGON ((0 1, 1 1, 1 0, 0 0))')
self.assertIsNone(rectangle)
def test_validate_geo_array(self):
"""Test validate geographic extent method.
.. versionadded:: 3.2
"""
# Normal case
min_longitude = 20.389938354492188
min_latitude = -34.10782492987083
max_longitude = 20.712661743164062
max_latitude = -34.008273470938335
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertTrue(validate_geo_array(extent))
# min_latitude >= max_latitude
min_latitude = 34.10782492987083
max_latitude = -34.008273470938335
min_longitude = 20.389938354492188
max_longitude = 20.712661743164062
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
# min_longitude >= max_longitude
min_latitude = -34.10782492987083
max_latitude = -34.008273470938335
min_longitude = 34.10782492987083
max_longitude = -34.008273470938335
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
# min_latitude < -90 or > 90
min_latitude = -134.10782492987083
max_latitude = -34.008273470938335
min_longitude = 20.389938354492188
max_longitude = 20.712661743164062
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
# max_latitude < -90 or > 90
min_latitude = -9.10782492987083
max_latitude = 91.10782492987083
min_longitude = 20.389938354492188
max_longitude = 20.712661743164062
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
# min_longitude < -180 or > 180
min_latitude = -34.10782492987083
max_latitude = -34.008273470938335
min_longitude = -184.10782492987083
max_longitude = 20.712661743164062
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
# max_longitude < -180 or > 180
min_latitude = -34.10782492987083
max_latitude = -34.008273470938335
min_longitude = 20.389938354492188
max_longitude = 180.712661743164062
extent = [min_longitude, min_latitude, max_longitude, max_latitude]
self.assertFalse(validate_geo_array(extent))
if __name__ == '__main__':
unittest.main()
| AIFDR/inasafe | safe/utilities/test/test_gis.py | Python | gpl-3.0 | 4,837 |
<?php
namespace App\Console\Commands;
use App\Console\LnmsCommand;
use App\Models\Device;
use Illuminate\Database\Eloquent\Builder;
use LibreNMS\Config;
use LibreNMS\Polling\ConnectivityHelper;
use Symfony\Component\Console\Input\InputArgument;
class DevicePing extends LnmsCommand
{
protected $name = 'device:ping';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
$this->addArgument('device spec', InputArgument::REQUIRED);
}
/**
* Execute the console command.
*
* @return int
*/
public function handle(): int
{
$spec = $this->argument('device spec');
$devices = Device::query()->when($spec !== 'all', function (Builder $query) use ($spec) {
/** @phpstan-var Builder<Device> $query */
return $query->where('device_id', $spec)
->orWhere('hostname', $spec)
->limit(1);
})->get();
if ($devices->isEmpty()) {
$devices = [new Device(['hostname' => $spec])];
}
Config::set('icmp_check', true); // ignore icmp disabled, this is an explicit user action
/** @var Device $device */
foreach ($devices as $device) {
$helper = new ConnectivityHelper($device);
$response = $helper->isPingable();
$this->line($device->displayName() . ' : ' . ($response->wasSkipped() ? 'skipped' : $response));
}
return 0;
}
}
| arrmo/librenms | app/Console/Commands/DevicePing.php | PHP | gpl-3.0 | 1,546 |
package net.minecraft.server;
import org.bukkit.craftbukkit.event.CraftEventFactory; // CraftBukkit
public class EntityPig extends EntityAnimal {
private final PathfinderGoalPassengerCarrotStick bp;
public EntityPig(World world) {
super(world);
this.a(0.9F, 0.9F);
this.getNavigation().a(true);
this.goalSelector.a(0, new PathfinderGoalFloat(this));
this.goalSelector.a(1, new PathfinderGoalPanic(this, 1.25D));
this.goalSelector.a(2, this.bp = new PathfinderGoalPassengerCarrotStick(this, 0.3F));
this.goalSelector.a(3, new PathfinderGoalBreed(this, 1.0D));
this.goalSelector.a(4, new PathfinderGoalTempt(this, 1.2D, Items.CARROT_STICK, false));
this.goalSelector.a(4, new PathfinderGoalTempt(this, 1.2D, Items.CARROT, false));
this.goalSelector.a(5, new PathfinderGoalFollowParent(this, 1.1D));
this.goalSelector.a(6, new PathfinderGoalRandomStroll(this, 1.0D));
this.goalSelector.a(7, new PathfinderGoalLookAtPlayer(this, EntityHuman.class, 6.0F));
this.goalSelector.a(8, new PathfinderGoalRandomLookaround(this));
}
public boolean bk() {
return true;
}
protected void aD() {
super.aD();
this.getAttributeInstance(GenericAttributes.maxHealth).setValue(10.0D);
this.getAttributeInstance(GenericAttributes.d).setValue(0.25D);
}
protected void bn() {
super.bn();
}
public boolean bE() {
ItemStack itemstack = ((EntityHuman) this.passenger).be();
return itemstack != null && itemstack.getItem() == Items.CARROT_STICK;
}
protected void c() {
super.c();
this.datawatcher.a(16, Byte.valueOf((byte) 0));
}
public void b(NBTTagCompound nbttagcompound) {
super.b(nbttagcompound);
nbttagcompound.setBoolean("Saddle", this.hasSaddle());
}
public void a(NBTTagCompound nbttagcompound) {
super.a(nbttagcompound);
this.setSaddle(nbttagcompound.getBoolean("Saddle"));
}
protected String t() {
return "mob.pig.say";
}
protected String aT() {
return "mob.pig.say";
}
protected String aU() {
return "mob.pig.death";
}
protected void a(int i, int j, int k, Block block) {
this.makeSound("mob.pig.step", 0.15F, 1.0F);
}
public boolean a(EntityHuman entityhuman) {
if (super.a(entityhuman)) {
return true;
} else if (this.hasSaddle() && !this.world.isStatic && (this.passenger == null || this.passenger == entityhuman)) {
entityhuman.mount(this);
return true;
} else {
return false;
}
}
protected Item getLoot() {
return this.isBurning() ? Items.GRILLED_PORK : Items.PORK;
}
protected void dropDeathLoot(boolean flag, int i) {
int j = this.random.nextInt(3) + 1 + this.random.nextInt(1 + i);
for (int k = 0; k < j; ++k) {
if (this.isBurning()) {
this.a(Items.GRILLED_PORK, 1);
} else {
this.a(Items.PORK, 1);
}
}
if (this.hasSaddle()) {
this.a(Items.SADDLE, 1);
}
}
public boolean hasSaddle() {
return (this.datawatcher.getByte(16) & 1) != 0;
}
public void setSaddle(boolean flag) {
if (flag) {
this.datawatcher.watch(16, Byte.valueOf((byte) 1));
} else {
this.datawatcher.watch(16, Byte.valueOf((byte) 0));
}
}
public void a(EntityLightning entitylightning) {
if (!this.world.isStatic) {
EntityPigZombie entitypigzombie = new EntityPigZombie(this.world);
// CraftBukkit start
if (CraftEventFactory.callPigZapEvent(this, entitylightning, entitypigzombie).isCancelled()) {
return;
}
// CraftBukkit end
entitypigzombie.setEquipment(0, new ItemStack(Items.GOLD_SWORD));
entitypigzombie.setPositionRotation(this.locX, this.locY, this.locZ, this.yaw, this.pitch);
// CraftBukkit - added a reason for spawning this creature
this.world.addEntity(entitypigzombie, org.bukkit.event.entity.CreatureSpawnEvent.SpawnReason.LIGHTNING);
this.die();
}
}
protected void b(float f) {
super.b(f);
if (f > 5.0F && this.passenger instanceof EntityHuman) {
((EntityHuman) this.passenger).a((Statistic) AchievementList.u);
}
}
public EntityPig b(EntityAgeable entityageable) {
return new EntityPig(this.world);
}
public boolean c(ItemStack itemstack) {
return itemstack != null && itemstack.getItem() == Items.CARROT;
}
public PathfinderGoalPassengerCarrotStick ca() {
return this.bp;
}
public EntityAgeable createChild(EntityAgeable entityageable) {
return this.b(entityageable);
}
}
| starlis/EMC-CraftBukkit | src/main/java/net/minecraft/server/EntityPig.java | Java | gpl-3.0 | 4,987 |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package protoreflect provides interfaces to dynamically manipulate messages.
//
// This package includes type descriptors which describe the structure of types
// defined in proto source files and value interfaces which provide the
// ability to examine and manipulate the contents of messages.
//
//
// Protocol Buffer Descriptors
//
// Protobuf descriptors (e.g., EnumDescriptor or MessageDescriptor)
// are immutable objects that represent protobuf type information.
// They are wrappers around the messages declared in descriptor.proto.
// Protobuf descriptors alone lack any information regarding Go types.
//
// Enums and messages generated by this module implement Enum and ProtoMessage,
// where the Descriptor and ProtoReflect.Descriptor accessors respectively
// return the protobuf descriptor for the values.
//
// The protobuf descriptor interfaces are not meant to be implemented by
// user code since they might need to be extended in the future to support
// additions to the protobuf language. Protobuf descriptors can be constructed
// using the "google.golang.org/protobuf/reflect/protodesc" package.
//
//
// Go Type Descriptors
//
// A type descriptor (e.g., EnumType or MessageType) is a constructor for
// a concrete Go type that represents the associated protobuf descriptor.
// There is commonly a one-to-one relationship between protobuf descriptors and
// Go type descriptors, but it can potentially be a one-to-many relationship.
//
// Enums and messages generated by this module implement Enum and ProtoMessage,
// where the Type and ProtoReflect.Type accessors respectively
// return the protobuf descriptor for the values.
//
// The "google.golang.org/protobuf/types/dynamicpb" package can be used to
// create Go type descriptors from protobuf descriptors.
//
//
// Value Interfaces
//
// The Enum and Message interfaces provide a reflective view over an
// enum or message instance. For enums, it provides the ability to retrieve
// the enum value number for any concrete enum type. For messages, it provides
// the ability to access or manipulate fields of the message.
//
// To convert a proto.Message to a protoreflect.Message, use the
// former's ProtoReflect method. Since the ProtoReflect method is new to the
// v2 message interface, it may not be present on older message implementations.
// The "github.com/golang/protobuf/proto".MessageReflect function can be used
// to obtain a reflective view on older messages.
//
//
// Relationships
//
// The following diagrams demonstrate the relationships between
// various types declared in this package.
//
//
// ┌───────────────────────────────────┐
// V │
// ┌────────────── New(n) ─────────────┐ │
// │ │ │
// │ ┌──── Descriptor() ──┐ │ ┌── Number() ──┐ │
// │ │ V V │ V │
// ╔════════════╗ ╔════════════════╗ ╔════════╗ ╔════════════╗
// ║ EnumType ║ ║ EnumDescriptor ║ ║ Enum ║ ║ EnumNumber ║
// ╚════════════╝ ╚════════════════╝ ╚════════╝ ╚════════════╝
// Λ Λ │ │
// │ └─── Descriptor() ──┘ │
// │ │
// └────────────────── Type() ───────┘
//
// • An EnumType describes a concrete Go enum type.
// It has an EnumDescriptor and can construct an Enum instance.
//
// • An EnumDescriptor describes an abstract protobuf enum type.
//
// • An Enum is a concrete enum instance. Generated enums implement Enum.
//
//
// ┌──────────────── New() ─────────────────┐
// │ │
// │ ┌─── Descriptor() ─────┐ │ ┌── Interface() ───┐
// │ │ V V │ V
// ╔═════════════╗ ╔═══════════════════╗ ╔═════════╗ ╔══════════════╗
// ║ MessageType ║ ║ MessageDescriptor ║ ║ Message ║ ║ ProtoMessage ║
// ╚═════════════╝ ╚═══════════════════╝ ╚═════════╝ ╚══════════════╝
// Λ Λ │ │ Λ │
// │ └──── Descriptor() ────┘ │ └─ ProtoReflect() ─┘
// │ │
// └─────────────────── Type() ─────────┘
//
// • A MessageType describes a concrete Go message type.
// It has a MessageDescriptor and can construct a Message instance.
//
// • A MessageDescriptor describes an abstract protobuf message type.
//
// • A Message is a concrete message instance. Generated messages implement
// ProtoMessage, which can convert to/from a Message.
//
//
// ┌── TypeDescriptor() ──┐ ┌───── Descriptor() ─────┐
// │ V │ V
// ╔═══════════════╗ ╔═════════════════════════╗ ╔═════════════════════╗
// ║ ExtensionType ║ ║ ExtensionTypeDescriptor ║ ║ ExtensionDescriptor ║
// ╚═══════════════╝ ╚═════════════════════════╝ ╚═════════════════════╝
// Λ │ │ Λ │ Λ
// └─────── Type() ───────┘ │ └─── may implement ────┘ │
// │ │
// └────── implements ────────┘
//
// • An ExtensionType describes a concrete Go implementation of an extension.
// It has an ExtensionTypeDescriptor and can convert to/from
// abstract Values and Go values.
//
// • An ExtensionTypeDescriptor is an ExtensionDescriptor
// which also has an ExtensionType.
//
// • An ExtensionDescriptor describes an abstract protobuf extension field and
// may not always be an ExtensionTypeDescriptor.
package protoreflect
import (
"fmt"
"regexp"
"strings"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/internal/pragma"
)
type doNotImplement pragma.DoNotImplement
// ProtoMessage is the top-level interface that all proto messages implement.
// This is declared in the protoreflect package to avoid a cyclic dependency;
// use the proto.Message type instead, which aliases this type.
type ProtoMessage interface{ ProtoReflect() Message }
// Syntax is the language version of the proto file.
type Syntax syntax
type syntax int8 // keep exact type opaque as the int type may change
const (
Proto2 Syntax = 2
Proto3 Syntax = 3
)
// IsValid reports whether the syntax is valid.
func (s Syntax) IsValid() bool {
switch s {
case Proto2, Proto3:
return true
default:
return false
}
}
// String returns s as a proto source identifier (e.g., "proto2").
func (s Syntax) String() string {
switch s {
case Proto2:
return "proto2"
case Proto3:
return "proto3"
default:
return fmt.Sprintf("<unknown:%d>", s)
}
}
// GoString returns s as a Go source identifier (e.g., "Proto2").
func (s Syntax) GoString() string {
switch s {
case Proto2:
return "Proto2"
case Proto3:
return "Proto3"
default:
return fmt.Sprintf("Syntax(%d)", s)
}
}
// Cardinality determines whether a field is optional, required, or repeated.
type Cardinality cardinality
type cardinality int8 // keep exact type opaque as the int type may change
// Constants as defined by the google.protobuf.Cardinality enumeration.
const (
Optional Cardinality = 1 // appears zero or one times
Required Cardinality = 2 // appears exactly one time; invalid with Proto3
Repeated Cardinality = 3 // appears zero or more times
)
// IsValid reports whether the cardinality is valid.
func (c Cardinality) IsValid() bool {
switch c {
case Optional, Required, Repeated:
return true
default:
return false
}
}
// String returns c as a proto source identifier (e.g., "optional").
func (c Cardinality) String() string {
switch c {
case Optional:
return "optional"
case Required:
return "required"
case Repeated:
return "repeated"
default:
return fmt.Sprintf("<unknown:%d>", c)
}
}
// GoString returns c as a Go source identifier (e.g., "Optional").
func (c Cardinality) GoString() string {
switch c {
case Optional:
return "Optional"
case Required:
return "Required"
case Repeated:
return "Repeated"
default:
return fmt.Sprintf("Cardinality(%d)", c)
}
}
// Kind indicates the basic proto kind of a field.
type Kind kind
type kind int8 // keep exact type opaque as the int type may change
// Constants as defined by the google.protobuf.Field.Kind enumeration.
const (
BoolKind Kind = 8
EnumKind Kind = 14
Int32Kind Kind = 5
Sint32Kind Kind = 17
Uint32Kind Kind = 13
Int64Kind Kind = 3
Sint64Kind Kind = 18
Uint64Kind Kind = 4
Sfixed32Kind Kind = 15
Fixed32Kind Kind = 7
FloatKind Kind = 2
Sfixed64Kind Kind = 16
Fixed64Kind Kind = 6
DoubleKind Kind = 1
StringKind Kind = 9
BytesKind Kind = 12
MessageKind Kind = 11
GroupKind Kind = 10
)
// IsValid reports whether the kind is valid.
func (k Kind) IsValid() bool {
switch k {
case BoolKind, EnumKind,
Int32Kind, Sint32Kind, Uint32Kind,
Int64Kind, Sint64Kind, Uint64Kind,
Sfixed32Kind, Fixed32Kind, FloatKind,
Sfixed64Kind, Fixed64Kind, DoubleKind,
StringKind, BytesKind, MessageKind, GroupKind:
return true
default:
return false
}
}
// String returns k as a proto source identifier (e.g., "bool").
func (k Kind) String() string {
switch k {
case BoolKind:
return "bool"
case EnumKind:
return "enum"
case Int32Kind:
return "int32"
case Sint32Kind:
return "sint32"
case Uint32Kind:
return "uint32"
case Int64Kind:
return "int64"
case Sint64Kind:
return "sint64"
case Uint64Kind:
return "uint64"
case Sfixed32Kind:
return "sfixed32"
case Fixed32Kind:
return "fixed32"
case FloatKind:
return "float"
case Sfixed64Kind:
return "sfixed64"
case Fixed64Kind:
return "fixed64"
case DoubleKind:
return "double"
case StringKind:
return "string"
case BytesKind:
return "bytes"
case MessageKind:
return "message"
case GroupKind:
return "group"
default:
return fmt.Sprintf("<unknown:%d>", k)
}
}
// GoString returns k as a Go source identifier (e.g., "BoolKind").
func (k Kind) GoString() string {
switch k {
case BoolKind:
return "BoolKind"
case EnumKind:
return "EnumKind"
case Int32Kind:
return "Int32Kind"
case Sint32Kind:
return "Sint32Kind"
case Uint32Kind:
return "Uint32Kind"
case Int64Kind:
return "Int64Kind"
case Sint64Kind:
return "Sint64Kind"
case Uint64Kind:
return "Uint64Kind"
case Sfixed32Kind:
return "Sfixed32Kind"
case Fixed32Kind:
return "Fixed32Kind"
case FloatKind:
return "FloatKind"
case Sfixed64Kind:
return "Sfixed64Kind"
case Fixed64Kind:
return "Fixed64Kind"
case DoubleKind:
return "DoubleKind"
case StringKind:
return "StringKind"
case BytesKind:
return "BytesKind"
case MessageKind:
return "MessageKind"
case GroupKind:
return "GroupKind"
default:
return fmt.Sprintf("Kind(%d)", k)
}
}
// FieldNumber is the field number in a message.
type FieldNumber = protowire.Number
// FieldNumbers represent a list of field numbers.
type FieldNumbers interface {
// Len reports the number of fields in the list.
Len() int
// Get returns the ith field number. It panics if out of bounds.
Get(i int) FieldNumber
// Has reports whether n is within the list of fields.
Has(n FieldNumber) bool
doNotImplement
}
// FieldRanges represent a list of field number ranges.
type FieldRanges interface {
// Len reports the number of ranges in the list.
Len() int
// Get returns the ith range. It panics if out of bounds.
Get(i int) [2]FieldNumber // start inclusive; end exclusive
// Has reports whether n is within any of the ranges.
Has(n FieldNumber) bool
doNotImplement
}
// EnumNumber is the numeric value for an enum.
type EnumNumber int32
// EnumRanges represent a list of enum number ranges.
type EnumRanges interface {
// Len reports the number of ranges in the list.
Len() int
// Get returns the ith range. It panics if out of bounds.
Get(i int) [2]EnumNumber // start inclusive; end inclusive
// Has reports whether n is within any of the ranges.
Has(n EnumNumber) bool
doNotImplement
}
var (
regexName = regexp.MustCompile(`^[_a-zA-Z][_a-zA-Z0-9]*$`)
regexFullName = regexp.MustCompile(`^[_a-zA-Z][_a-zA-Z0-9]*(\.[_a-zA-Z][_a-zA-Z0-9]*)*$`)
)
// Name is the short name for a proto declaration. This is not the name
// as used in Go source code, which might not be identical to the proto name.
type Name string // e.g., "Kind"
// IsValid reports whether n is a syntactically valid name.
// An empty name is invalid.
func (n Name) IsValid() bool {
return regexName.MatchString(string(n))
}
// Names represent a list of names.
type Names interface {
// Len reports the number of names in the list.
Len() int
// Get returns the ith name. It panics if out of bounds.
Get(i int) Name
// Has reports whether s matches any names in the list.
Has(s Name) bool
doNotImplement
}
// FullName is a qualified name that uniquely identifies a proto declaration.
// A qualified name is the concatenation of the proto package along with the
// fully-declared name (i.e., name of parent preceding the name of the child),
// with a '.' delimiter placed between each Name.
//
// This should not have any leading or trailing dots.
type FullName string // e.g., "google.protobuf.Field.Kind"
// IsValid reports whether n is a syntactically valid full name.
// An empty full name is invalid.
func (n FullName) IsValid() bool {
return regexFullName.MatchString(string(n))
}
// Name returns the short name, which is the last identifier segment.
// A single segment FullName is the Name itself.
func (n FullName) Name() Name {
if i := strings.LastIndexByte(string(n), '.'); i >= 0 {
return Name(n[i+1:])
}
return Name(n)
}
// Parent returns the full name with the trailing identifier removed.
// A single segment FullName has no parent.
func (n FullName) Parent() FullName {
if i := strings.LastIndexByte(string(n), '.'); i >= 0 {
return n[:i]
}
return ""
}
// Append returns the qualified name appended with the provided short name.
//
// Invariant: n == n.Parent().Append(n.Name()) // assuming n is valid
func (n FullName) Append(s Name) FullName {
if n == "" {
return FullName(s)
}
return n + "." + FullName(s)
}
| pberndro/smartpi_exporter | vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go | GO | gpl-3.0 | 15,836 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.mozstumbler.service.stumblerthread.datahandling;
import org.json.JSONObject;
import org.mozilla.mozstumbler.service.stumblerthread.datahandling.base.SerializedJSONRows;
import org.mozilla.mozstumbler.service.stumblerthread.datahandling.base.JSONRowsObjectBuilder;
import org.mozilla.mozstumbler.service.utils.Zipper;
/*
ReportBatchBuilder accepts MLS GeoSubmit JSON blobs and serializes them to
string form.
*/
public class ReportBatchBuilder extends JSONRowsObjectBuilder {
public int getCellCount() {
int result = 0;
for (JSONObject obj: mJSONEntries) {
assert(obj instanceof MLSJSONObject);
result += ((MLSJSONObject) obj).getCellCount();
}
return result;
}
public int getWifiCount() {
int result = 0;
for (JSONObject obj : mJSONEntries) {
assert(obj instanceof MLSJSONObject);
result += ((MLSJSONObject) obj).getWifiCount();
}
return result;
}
@Override
public SerializedJSONRows finalizeToJSONRowsObject() {
int obs = entriesCount();
int wifis = getWifiCount();
int cells = getCellCount();
boolean preserveDataAfterGenerateJSON = false;
byte[] zippedbytes = Zipper.zipData(generateJSON(preserveDataAfterGenerateJSON).getBytes());
return new ReportBatch(zippedbytes,
SerializedJSONRows.StorageState.IN_MEMORY,
obs, wifis, cells);
}
}
| petercpg/MozStumbler | libraries/stumbler/src/main/java/org/mozilla/mozstumbler/service/stumblerthread/datahandling/ReportBatchBuilder.java | Java | mpl-2.0 | 1,691 |
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.core.util;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
/**
* Provide an incremental per VM unique ID
*
* unique id starts from zero and is incremented each time getUniqID is called.
* If Long.MAX_VALUE is reached then then IllegalStateArgument exception is thrown
*
* @See {@link ProActiveRandom}
*
*/
public class ProActiveCounter {
static Logger logger = ProActiveLogger.getLogger(Loggers.CORE);
static long counter = 0;
synchronized static public long getUniqID() {
if (counter == Long.MAX_VALUE) {
throw new IllegalStateException(ProActiveCounter.class.getSimpleName() +
" counter reached max value");
} else {
return counter++;
}
}
}
| moliva/proactive | src/Core/org/objectweb/proactive/core/util/ProActiveCounter.java | Java | agpl-3.0 | 2,303 |
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2013-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.features.vaadin.nodemaps.internal.gwt.client;
import org.discotools.gwt.leaflet.client.jsobject.JSObject;
import org.discotools.gwt.leaflet.client.types.LatLng;
public class SearchResult extends JSObject {
protected SearchResult() {}
public static final SearchResult create(final String title, final LatLng latLng) {
final SearchResult result = JSObject.createJSObject().cast();
result.setTitle(title);
result.setLatLng(latLng);
return result;
}
public final String getTitle() {
return getPropertyAsString("title");
}
public final SearchResult setTitle(final String title) {
setProperty("title", title);
return this;
}
public final LatLng getLatLng() {
return new LatLng(getProperty("latLng"));
}
public final SearchResult setLatLng(final LatLng latLng) {
setProperty("latLng", latLng.getJSObject());
return this;
}
}
| rdkgit/opennms | features/vaadin-node-maps/src/main/java/org/opennms/features/vaadin/nodemaps/internal/gwt/client/SearchResult.java | Java | agpl-3.0 | 2,174 |
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.extensions.annotation.common;
import com.sun.mirror.apt.AnnotationProcessor;
/** This annotation processor processes the annotations provided by default
* whith JDK 1.5. This is needed in order to suppress the unnecessary warnings that
* apt generates for these default annotations.
* See also http://forums.sun.com/thread.jspa?threadID=5345947
* @author fabratu
* @version %G%, %I%
* @since ProActive 4.10
*/
public class BogusAnnotationProcessor implements AnnotationProcessor {
public BogusAnnotationProcessor() {
}
public void process() {
// nothing!
}
}
| moliva/proactive | src/Extensions/org/objectweb/proactive/extensions/annotation/common/BogusAnnotationProcessor.java | Java | agpl-3.0 | 2,065 |
<?php
/**
* @package api
* @subpackage filters.enum
*/
class KalturaFileAssetOrderBy extends KalturaStringEnum
{
const CREATED_AT_ASC = "+createdAt";
const CREATED_AT_DESC = "-createdAt";
const UPDATED_AT_ASC = "+updatedAt";
const UPDATED_AT_DESC = "-updatedAt";
}
| ratliff/server | api_v3/lib/types/fileAsset/filters/orderEnums/KalturaFileAssetOrderBy.php | PHP | agpl-3.0 | 272 |
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.vacuumd;
/**
* <p>AutomationException class.</p>
*
* @author <a href="mailto:brozow@opennms.org">Mathew Brozowski</a>
* @version $Id: $
*/
public class AutomationException extends RuntimeException {
private static final long serialVersionUID = -8873671974245928627L;
/**
* <p>Constructor for AutomationException.</p>
*
* @param arg0 a {@link java.lang.String} object.
*/
public AutomationException(String arg0) {
super(arg0);
}
/**
* <p>Constructor for AutomationException.</p>
*
* @param arg0 a {@link java.lang.Throwable} object.
*/
public AutomationException(Throwable arg0) {
super(arg0);
}
/**
* <p>Constructor for AutomationException.</p>
*
* @param arg0 a {@link java.lang.String} object.
* @param arg1 a {@link java.lang.Throwable} object.
*/
public AutomationException(String arg0, Throwable arg1) {
super(arg0, arg1);
}
}
| tdefilip/opennms | opennms-services/src/main/java/org/opennms/netmgt/vacuumd/AutomationException.java | Java | agpl-3.0 | 2,125 |
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
require File.expand_path(File.dirname(__FILE__) + '/../views_helper')
describe "/quizzes/submission_versions" do
it "should render" do
course_with_teacher(:active_all => true)
course_quiz
view_context
ActiveRecord::Base.clear_cached_contexts
assigns[:quiz] = @quiz
assigns[:versions] = []
render "quizzes/submission_versions"
response.should_not be_nil
end
end | SaranNV/canvas-lms | spec/views/quizzes/submission_versions.html.erb_spec.rb | Ruby | agpl-3.0 | 1,143 |
/*
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
var Utils = {};
function quote(postId, postNumber) {
var callback = function (text) {
$('#post').focus();
console.log(text);
var answer = $('#postBody');
answer.focus();
if (answer) {
answer.val(answer.val() + text);
}
}
$.ajax({
url: baseUrl + '/posts/' + postId + '/quote',
type: 'POST',
data: {
selection: getSelectedPostText(postNumber)
},
success: function (data) {
callback(data.result);
},
error: function () {
callback('');
}
});
}
function getSelectedPostText(postNumber) {
var txt = '';
if (window.getSelection) {
if (window.getSelection().toString().length > 0 && isRangeInPost(window.getSelection().getRangeAt(0))
&& isSelectedPostQuoted(postNumber)) {
txt = window.getSelection().toString();
}
}
else if (document.selection) {
if (isRangeInPost(document.selection.createRange()) && isSelectedPostQuoted(postNumber)) {
txt = document.selection.createRange().text;
}
}
return txt;
}
/**
* Checks if selected document fragment is a part of the post content.
* @param {Range} range Range object which represent current selection.
* @return {boolean} <b>true</b> if if selected document fragment is a part of the post content
* <b>false</b> otherwise.
*/
function isRangeInPost(range) {
return $(range.startContainer).closest(".post-content-body").length > 0;
}
/**
* Checks if "quote" button pressed on the post which was selected.
* @param {Number} postNumber number of the post on the page which "quote" button was pressed.
* @return {boolean} <b>true</> if selected text is a part of the post which will be quoted
* <b>false</b> otherwise.
*/
function isSelectedPostQuoted(postNumber) {
return $(window.getSelection().getRangeAt(0).startContainer).closest('.post').prevAll().length == postNumber;
}
/**
* Encodes given string by escaping special HTML characters
*
* @param s string to be encoded
*/
Utils.htmlEncode = function (s) {
return $('<div/>').text(s).html();
};
/**
* Do focus to element
*
* @param target selector of element to focus
*/
Utils.focusFirstEl = function (target) {
$(target).focus();
}
/**
* Replaces all \n characters by <br> tags. Used for review comments.
*
* @param s string where perform replacing
*/
Utils.lf2br = function (s) {
return s.replace(/\n/g, "<br>");
}
/**
* Replaces all \<br> tags by \n characters. Used for review comments.
*
* @param s string where perform replacing
*/
Utils.br2lf = function (s) {
return s.replace(/<br>/gi, "\n");
}
/**
* Create form field with given label(placeholder), id, type, class and style.
*/
Utils.createFormElement = function (label, id, type, cls, style) {
var elementHtml = ' \
<div class="control-group"> \
<div class="controls"> \
<input type="' + type + '" id="' + id + '" name="' + id + '" placeholder="' + label + '" class="input-xlarge ' + cls + '" style="'+ style +'" /> \
</div> \
</div> \
';
return elementHtml;
}
/**
* Handling "onError" event for images if it's can't loaded. Invoke in config kefirbb.xml for [img] bbtag.
* */
function imgError(image) {
var imageDefault = baseUrl + "/resources/images/noimage.jpg";
image.src = imageDefault;
image.className = "thumbnail-default";
image.parentNode.href = imageDefault;
image.onerror = "";
} | illerax/jcommune | jcommune-view/jcommune-web-view/src/main/webapp/resources/javascript/app/utils.js | JavaScript | lgpl-2.1 | 4,376 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="hr" sourcelanguage="en">
<context>
<name>CmdWebBrowserBack</name>
<message>
<location filename="../../Command.cpp" line="75"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="76"/>
<source>Previous page</source>
<translation type="unfinished">Previous page</translation>
</message>
<message>
<location filename="../../Command.cpp" line="77"/>
<source>Go back to the previous page</source>
<translation type="unfinished">Go back to the previous page</translation>
</message>
</context>
<context>
<name>CmdWebBrowserNext</name>
<message>
<location filename="../../Command.cpp" line="103"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="104"/>
<source>Next page</source>
<translation type="unfinished">Next page</translation>
</message>
<message>
<location filename="../../Command.cpp" line="105"/>
<source>Go to the next page</source>
<translation type="unfinished">Go to the next page</translation>
</message>
</context>
<context>
<name>CmdWebBrowserRefresh</name>
<message>
<location filename="../../Command.cpp" line="131"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="132"/>
<location filename="../../Command.cpp" line="133"/>
<source>Refresh web page</source>
<translation type="unfinished">Refresh web page</translation>
</message>
</context>
<context>
<name>CmdWebBrowserStop</name>
<message>
<location filename="../../Command.cpp" line="158"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="159"/>
<source>Stop loading</source>
<translation type="unfinished">Stop loading</translation>
</message>
<message>
<location filename="../../Command.cpp" line="160"/>
<source>Stop the current loading</source>
<translation type="unfinished">Stop the current loading</translation>
</message>
</context>
<context>
<name>CmdWebBrowserZoomIn</name>
<message>
<location filename="../../Command.cpp" line="187"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="188"/>
<source>Zoom in</source>
<translation type="unfinished">Zoom in</translation>
</message>
<message>
<location filename="../../Command.cpp" line="189"/>
<source>Zoom into the page</source>
<translation type="unfinished">Zoom into the page</translation>
</message>
</context>
<context>
<name>CmdWebBrowserZoomOut</name>
<message>
<location filename="../../Command.cpp" line="215"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="216"/>
<source>Zoom out</source>
<translation type="unfinished">Zoom out</translation>
</message>
<message>
<location filename="../../Command.cpp" line="217"/>
<source>Zoom out of the page</source>
<translation type="unfinished">Zoom out of the page</translation>
</message>
</context>
<context>
<name>CmdWebOpenWebsite</name>
<message>
<location filename="../../Command.cpp" line="50"/>
<source>Web</source>
<translation type="unfinished">Web</translation>
</message>
<message>
<location filename="../../Command.cpp" line="51"/>
<source>Open website...</source>
<translation type="unfinished">Open website...</translation>
</message>
<message>
<location filename="../../Command.cpp" line="52"/>
<source>Opens a website in FreeCAD</source>
<translation type="unfinished">Opens a website in FreeCAD</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../../AppWebGui.cpp" line="78"/>
<location filename="../../BrowserView.cpp" line="348"/>
<source>Browser</source>
<translation type="unfinished">Browser</translation>
</message>
<message>
<location filename="../../BrowserView.cpp" line="244"/>
<source>File does not exist!</source>
<translation type="unfinished">File does not exist!</translation>
</message>
</context>
<context>
<name>WebGui::BrowserView</name>
<message>
<location filename="../../BrowserView.cpp" line="239"/>
<source>Error</source>
<translation>Pogreška</translation>
</message>
<message>
<location filename="../../BrowserView.cpp" line="319"/>
<source>Loading %1...</source>
<translation type="unfinished">Loading %1...</translation>
</message>
</context>
<context>
<name>WebGui::WebView</name>
<message>
<location filename="../../BrowserView.cpp" line="121"/>
<source>Open in External Browser</source>
<translation type="unfinished">Open in External Browser</translation>
</message>
<message>
<location filename="../../BrowserView.cpp" line="125"/>
<source>Open in new window</source>
<translation type="unfinished">Open in new window</translation>
</message>
</context>
<context>
<name>Workbench</name>
<message>
<location filename="../../Workbench.cpp" line="46"/>
<source>Navigation</source>
<translation type="unfinished">Navigation</translation>
</message>
</context>
</TS>
| wood-galaxy/FreeCAD | src/Mod/Web/Gui/Resources/translations/Web_hr.ts | TypeScript | lgpl-2.1 | 6,073 |
'use strict';
/**
* @module br/util/Number
*/
var StringUtility = require('br/util/StringUtility');
/**
* @class
* @alias module:br/util/Number
*
* @classdesc
* Utility methods for numbers
*/
function NumberUtil() {
}
/**
* Returns a numeric representation of the sign on the number.
*
* @param {Number} n The number (or a number as a string)
* @return {int} 1 for positive values, -1 for negative values, or the original value for zero and non-numeric values.
*/
NumberUtil.sgn = function(n) {
return n > 0 ? 1 : n < 0 ? -1 : n;
};
/**
* @param {Object} n
* @return {boolean} true for numbers and their string representations and false for other values including non-numeric
* strings, null, Infinity, NaN.
*/
NumberUtil.isNumber = function(n) {
if (typeof n === 'string') {
n = n.trim();
}
return n != null && n !== '' && n - n === 0;
};
/**
* Formats the number to the specified number of decimal places.
*
* @param {Number} n The number (or a number as a string).
* @param {Number} dp The number of decimal places.
* @return {String} The formatted number.
*/
NumberUtil.toFixed = function(n, dp) {
//return this.isNumber(n) && dp != null ? Number(n).toFixed(dp) : n;
//Workaround for IE8/7/6 where toFixed returns 0 for (0.5).toFixed(0) and 0.0 for (0.05).toFixed(1)
if (this.isNumber(n) && dp != null) {
var sgn = NumberUtil.sgn(n);
n = sgn * n;
var nFixed = (Math.round(Math.pow(10, dp)*n)/Math.pow(10, dp)).toFixed(dp);
return (sgn * nFixed).toFixed(dp);
}
return n;
};
/**
* Formats the number to the specified number of significant figures. This fixes the bugs in the native Number function
* of the same name that are prevalent in various browsers. If the number of significant figures is less than one,
* then the function has no effect.
*
* @param {Number} n The number (or a number as a string).
* @param {Number} sf The number of significant figures.
* @return {String} The formatted number.
*/
NumberUtil.toPrecision = function(n, sf) {
return this.isNumber(n) && sf > 0 ? Number(n).toPrecision(sf) : n;
};
/**
* Formats the number to the specified number of decimal places, omitting any trailing zeros.
*
* @param {Number} n The number (or a number as a string).
* @param {Number} rounding The number of decimal places to round.
* @return {String} The rounded number.
*/
NumberUtil.toRounded = function(n, rounding) {
//return this.isNumber(n) && rounding != null ? String(Number(Number(n).toFixed(rounding))) : n;
//Workaround for IE8/7/6 where toFixed returns 0 for (0.5).toFixed(0) and 0.0 for (0.05).toFixed(1)
if (this.isNumber(n) && rounding != null) {
var sgn = NumberUtil.sgn(n);
n = sgn * n;
var nRounded = (Math.round(Math.pow(10, rounding)*n)/Math.pow(10, rounding)).toFixed(rounding);
return sgn * nRounded;
}
return n;
};
/**
* Logarithm to base 10.
*
* @param {Number} n The number (or a number as a string).
* @return {Number} The logarithm to base 10.
*/
NumberUtil.log10 = function(n) {
return Math.log(n) / Math.LN10;
};
/**
* Rounds a floating point number
*
* @param {Number} n The number (or a number as a string).
* @return {Number} The formatted number.
*/
NumberUtil.round = function(n) {
var dp = 13 - (n ? Math.ceil(this.log10(Math.abs(n))) : 0);
return this.isNumber(n) ? Number(Number(n).toFixed(dp)) : n;
};
/**
* Pads the integer part of a number with zeros to reach the specified length.
*
* @param {Number} value The number (or a number as a string).
* @param {Number} numLength The required length of the number.
* @return {String} The formatted number.
*/
NumberUtil.pad = function(value, numLength) {
if (this.isNumber(value)) {
var nAbsolute = Math.abs(value);
var sInteger = new String(parseInt(nAbsolute));
var nSize = numLength || 0;
var sSgn = value < 0 ? "-" : "";
value = sSgn + StringUtility.repeat("0", nSize - sInteger.length) + nAbsolute;
}
return value;
};
/**
* Counts the amount of decimal places within a number.
* Also supports scientific notations
*
* @param {Number} n The number (or a number as a string).
* @return {Number} The number of decimal places
*/
NumberUtil.decimalPlaces = function(n) {
var match = (''+n).match(/(?:\.(\d+))?(?:[eE]([+-]?\d+))?$/);
if (!match) {
return 0;
}
return Math.max(
0,
// Number of digits right of decimal point.
(match[1] ? match[1].length : 0)
// Adjust for scientific notation.
- (match[2] ? +match[2] : 0));
}
module.exports = NumberUtil;
| andreoid/testing | brjs-sdk/workspace/sdk/libs/javascript/br-util/src/br/util/Number.js | JavaScript | lgpl-3.0 | 4,499 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.hraven.etl;
import java.util.concurrent.Callable;
import org.apache.hadoop.mapreduce.Job;
/**
* Can be used to run a single Hadoop job. The {@link #call()} method will block
* until the job is complete and will return a non-null return value indicating
* the success of the Hadoop job.
*/
public class JobRunner implements Callable<Boolean> {
private volatile boolean isCalled = false;
private final Job job;
/**
* Post processing step that gets called upon successful completion of the
* Hadoop job.
*/
private final Callable<Boolean> postProcessor;
/**
* Constructor
*
* @param job
* to job to run in the call method.
* @param postProcessor
* Post processing step that gets called upon successful completion
* of the Hadoop job. Can be null, in which case it will be skipped.
* Final results will be the return value of this final processing
* step.
*/
public JobRunner(Job job, Callable<Boolean> postProcessor) {
this.job = job;
this.postProcessor = postProcessor;
}
/*
* (non-Javadoc)
*
* @see java.util.concurrent.Callable#call()
*/
@Override
public Boolean call() throws Exception {
// Guard to make sure we get called only once.
if (isCalled) {
return false;
} else {
isCalled = true;
}
if (job == null) {
return false;
}
boolean success = false;
// Schedule the job on the JobTracker and wait for it to complete.
try {
success = job.waitForCompletion(true);
} catch (InterruptedException interuptus) {
// We're told to stop, so honor that.
// And restore interupt status.
Thread.currentThread().interrupt();
// Indicate that we should NOT run the postProcessor.
success = false;
}
if (success && (postProcessor != null)) {
success = postProcessor.call();
}
return success;
}
}
| ogre0403/hraven | hraven-etl/src/main/java/com/twitter/hraven/etl/JobRunner.java | Java | apache-2.0 | 2,525 |
//// [tests/cases/compiler/moduleAugmentationsImports4.ts] ////
//// [a.ts]
export class A {}
//// [b.ts]
export class B {x: number;}
//// [c.d.ts]
declare module "C" {
class Cls {y: string; }
}
//// [d.d.ts]
declare module "D" {
import {A} from "a";
import {B} from "b";
module "a" {
interface A {
getB(): B;
}
}
}
//// [e.d.ts]
/// <reference path="c.d.ts"/>
declare module "E" {
import {A} from "a";
import {Cls} from "C";
module "a" {
interface A {
getCls(): Cls;
}
}
}
//// [main.ts]
/// <reference path="d.d.ts"/>
/// <reference path="e.d.ts"/>
import {A} from "./a";
import "D";
import "E";
let a: A;
let b = a.getB().x.toFixed();
let c = a.getCls().y.toLowerCase();
//// [f.js]
define("a", ["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
var A = /** @class */ (function () {
function A() {
}
return A;
}());
exports.A = A;
});
define("b", ["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
var B = /** @class */ (function () {
function B() {
}
return B;
}());
exports.B = B;
});
define("main", ["require", "exports", "D", "E"], function (require, exports) {
"use strict";
exports.__esModule = true;
var a;
var b = a.getB().x.toFixed();
var c = a.getCls().y.toLowerCase();
});
//// [f.d.ts]
/// <reference path="tests/cases/compiler/d.d.ts" />
/// <reference path="tests/cases/compiler/e.d.ts" />
declare module "a" {
export class A {
}
}
declare module "b" {
export class B {
x: number;
}
}
declare module "main" {
import "D";
import "E";
}
| weswigham/TypeScript | tests/baselines/reference/moduleAugmentationsImports4.js | JavaScript | apache-2.0 | 1,844 |
"""
Component-level Specification
This module is called component to mirror organization of storm package.
"""
from ..storm.component import Component
class Specification(object):
def __init__(self, component_cls, name=None, parallelism=1):
if not issubclass(component_cls, Component):
raise TypeError("Invalid component: {}".format(component_cls))
if not isinstance(parallelism, int) or parallelism < 1:
raise ValueError("Parallelism must be a integer greater than 0")
self.component_cls = component_cls
self.name = name
self.parallelism = parallelism
def resolve_dependencies(self, specifications):
"""Allows specification subclasses to resolve an dependencies
that they may have on other specifications.
:param specifications: all of the specification objects for this
topology.
:type specifications: dict
"""
pass | hodgesds/streamparse | streamparse/dsl/component.py | Python | apache-2.0 | 976 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.servlets;
import org.apache.jena.query.Dataset ;
import org.apache.jena.query.DatasetFactory ;
import org.apache.jena.query.Query ;
import org.apache.jena.sparql.core.DatasetDescription ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.DynamicDatasets ;
public class SPARQL_QueryDataset extends SPARQL_Query
{
public SPARQL_QueryDataset(boolean verbose) { super() ; }
public SPARQL_QueryDataset()
{ this(false) ; }
@Override
protected void validateRequest(HttpAction action)
{ }
@Override
protected void validateQuery(HttpAction action, Query query)
{ }
@Override
protected Dataset decideDataset(HttpAction action, Query query, String queryStringLog)
{
DatasetGraph dsg = action.getActiveDSG() ;
// query.getDatasetDescription() ;
// Protocol.
DatasetDescription dsDesc = getDatasetDescription(action) ;
if (dsDesc != null )
{
//errorBadRequest("SPARQL Query: Dataset description in the protocol request") ;
dsg = DynamicDatasets.dynamicDataset(dsDesc, dsg, false) ;
}
return DatasetFactory.create(dsg) ;
}
}
| adrapereira/jena | jena-fuseki2/jena-fuseki-core/src/main/java/org/apache/jena/fuseki/servlets/SPARQL_QueryDataset.java | Java | apache-2.0 | 2,072 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.basic;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.Set;
import java.util.TreeSet;
import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
public class TransportTwoNodesSearchIT extends ESIntegTestCase {
@Override
protected int numberOfReplicas() {
return 0;
}
private Set<String> prepareData() throws Exception {
return prepareData(-1);
}
private Set<String> prepareData(int numShards) throws Exception {
Set<String> fullExpectedIds = new TreeSet<>();
Settings.Builder settingsBuilder = Settings.builder()
.put(indexSettings());
if (numShards > 0) {
settingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numShards);
}
client().admin().indices().create(createIndexRequest("test")
.settings(settingsBuilder)
.simpleMapping("foo", "type=geo_point"))
.actionGet();
ensureGreen();
for (int i = 0; i < 100; i++) {
index(Integer.toString(i), "test", i);
fullExpectedIds.add(Integer.toString(i));
}
refresh();
return fullExpectedIds;
}
private void index(String id, String nameValue, int age) throws IOException {
client().index(Requests.indexRequest("test").id(id).source(source(id, nameValue, age))).actionGet();
}
private XContentBuilder source(String id, String nameValue, int age) throws IOException {
StringBuilder multi = new StringBuilder().append(nameValue);
for (int i = 0; i < age; i++) {
multi.append(" ").append(nameValue);
}
return jsonBuilder().startObject()
.field("id", id)
.field("nid", Integer.parseInt(id))
.field("name", nameValue + id)
.field("age", age)
.field("multi", multi.toString())
.endObject();
}
public void testDfsQueryThenFetch() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put(indexSettings());
client().admin().indices().create(createIndexRequest("test")
.settings(settingsBuilder))
.actionGet();
ensureGreen();
// we need to have age (ie number of repeats of "test" term) high enough
// to produce the same 8-bit norm for all docs here, so that
// the tf is basically the entire score (assuming idf is fixed, which
// it should be if dfs is working correctly)
// With the current way of encoding norms, every length between 1048 and 1176
// are encoded into the same byte
for (int i = 1048; i < 1148; i++) {
index(Integer.toString(i - 1048), "test", i);
}
refresh();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH)
.setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat(hit.getExplanation().getDetails().length, equalTo(1));
assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(),
startsWith("n,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(),
equalTo(100L));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(),
startsWith("N,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(),
equalTo(100L));
assertThat("id[" + hit.getId() + "] -> " + hit.getExplanation().toString(), hit.getId(),
equalTo(Integer.toString(100 - total - i - 1)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30))
.get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testDfsQueryThenFetchWithSort() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH)
.setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("age", SortOrder.ASC)
.setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat(hit.getExplanation().getDetails().length, equalTo(1));
assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(),
startsWith("n,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(),
equalTo(100L));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(),
startsWith("N,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(),
equalTo(100L));
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testQueryThenFetch() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(QUERY_THEN_FETCH).setQuery(termQuery("multi", "test"))
.setSize(60).setExplain(true).addSort("nid", SortOrder.DESC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(100 - total - i - 1)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testQueryThenFetchWithFrom() throws Exception {
Set<String> fullExpectedIds = prepareData();
SearchSourceBuilder source = searchSource()
.query(matchAllQuery())
.explain(true);
Set<String> collectedIds = new TreeSet<>();
SearchResponse searchResponse = client().search(searchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH))
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
assertThat(searchResponse.getHits().getHits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.getHits().getHits()[i];
collectedIds.add(hit.getId());
}
searchResponse = client().search(searchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)).actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
assertThat(searchResponse.getHits().getHits().length, equalTo(40));
for (int i = 0; i < 40; i++) {
SearchHit hit = searchResponse.getHits().getHits()[i];
collectedIds.add(hit.getId());
}
assertThat(collectedIds, equalTo(fullExpectedIds));
}
public void testQueryThenFetchWithSort() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("multi", "test")).setSize(60).setExplain(true)
.addSort("age", SortOrder.ASC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testSimpleFacets() throws Exception {
prepareData();
SearchSourceBuilder sourceBuilder = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(20).explain(true)
.aggregation(AggregationBuilders.global("global").subAggregation(
AggregationBuilders.filter("all", termQuery("multi", "test"))))
.aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1")));
SearchResponse searchResponse = client().search(searchRequest("test").source(sourceBuilder)).actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L));
Global global = searchResponse.getAggregations().get("global");
Filter all = global.getAggregations().get("all");
Filter test1 = searchResponse.getAggregations().get("test1");
assertThat(test1.getDocCount(), equalTo(1L));
assertThat(all.getDocCount(), equalTo(100L));
}
public void testFailedSearchWithWrongQuery() throws Exception {
prepareData();
NumShards test = getNumShards("test");
logger.info("Start Testing failed search with wrong query");
try {
SearchResponse searchResponse = client().search(
searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))).actionGet();
assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries));
assertThat(searchResponse.getSuccessfulShards(), equalTo(0));
assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries));
fail("search should fail");
} catch (ElasticsearchException e) {
assertThat(e.unwrapCause(), instanceOf(SearchPhaseExecutionException.class));
// all is well
}
logger.info("Done Testing failed search");
}
public void testFailedSearchWithWrongFrom() throws Exception {
prepareData();
NumShards test = getNumShards("test");
logger.info("Start Testing failed search with wrong from");
SearchSourceBuilder source = searchSource()
.query(termQuery("multi", "test"))
.from(1000).size(20).explain(true);
SearchResponse response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertThat(response.getHits().getHits().length, equalTo(0));
assertThat(response.getTotalShards(), equalTo(test.numPrimaries));
assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries));
assertThat(response.getFailedShards(), equalTo(0));
response = client().search(searchRequest("test").searchType(QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
logger.info("Done Testing failed search");
}
public void testFailedMultiSearchWithWrongQuery() throws Exception {
prepareData();
logger.info("Start Testing failed multi search with a wrong query");
MultiSearchResponse response = client().prepareMultiSearch()
.add(client().prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz")))
.add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2)))
.add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()))
.get();
assertThat(response.getResponses().length, equalTo(3));
assertThat(response.getResponses()[0].getFailureMessage(), notNullValue());
assertThat(response.getResponses()[1].getFailureMessage(), nullValue());
assertThat(response.getResponses()[1].getResponse().getHits().getHits().length, equalTo(1));
assertThat(response.getResponses()[2].getFailureMessage(), nullValue());
assertThat(response.getResponses()[2].getResponse().getHits().getHits().length, equalTo(10));
logger.info("Done Testing failed search");
}
public void testFailedMultiSearchWithWrongQueryWithFunctionScore() throws Exception {
prepareData();
logger.info("Start Testing failed multi search with a wrong query");
MultiSearchResponse response = client().prepareMultiSearch()
// Add custom score query with bogus script
.add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1),
new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, "bar", "foo", Collections.emptyMap())))))
.add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2)))
.add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()))
.get();
assertThat(response.getResponses().length, equalTo(3));
assertThat(response.getResponses()[0].getFailureMessage(), notNullValue());
assertThat(response.getResponses()[1].getFailureMessage(), nullValue());
assertThat(response.getResponses()[1].getResponse().getHits().getHits().length, equalTo(1));
assertThat(response.getResponses()[2].getFailureMessage(), nullValue());
assertThat(response.getResponses()[2].getResponse().getHits().getHits().length, equalTo(10));
logger.info("Done Testing failed search");
}
}
| HonzaKral/elasticsearch | server/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java | Java | apache-2.0 | 19,747 |
/*
* Copyright 2014 Click Travel Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.clicktravel.cheddar.rest.exception.mapper.cdm1;
import static com.clicktravel.cheddar.rest.exception.mapper.cdm1.JsonProcessingExceptionMapperUtils.buildErrorResponse;
import javax.annotation.Priority;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonParseException;
@Provider
@Priority(Integer.MAX_VALUE)
public class JsonParseExceptionMapper implements ExceptionMapper<JsonParseException> {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public Response toResponse(final JsonParseException exception) {
if (logger.isDebugEnabled()) {
logger.debug(exception.getMessage(), exception);
}
return Response.status(Response.Status.BAD_REQUEST).entity(buildErrorResponse(exception)).build();
}
}
| clicktravel-james/Cheddar | cheddar/cheddar-rest/src/main/java/com/clicktravel/cheddar/rest/exception/mapper/cdm1/JsonParseExceptionMapper.java | Java | apache-2.0 | 1,551 |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl;
import com.intellij.lang.ASTFactory;
import com.intellij.lang.Commenter;
import com.intellij.lang.Language;
import com.intellij.lang.LanguageCommenters;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.DummyHolderFactory;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.FileElement;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
public final class PsiParserFacadeImpl implements PsiParserFacade {
private final PsiManagerEx myManager;
public PsiParserFacadeImpl(@NotNull Project project) {
myManager = PsiManagerEx.getInstanceEx(project);
}
@Override
@NotNull
public PsiElement createWhiteSpaceFromText(@NotNull @NonNls String text) throws IncorrectOperationException {
final FileElement holderElement = DummyHolderFactory.createHolder(myManager, null).getTreeElement();
final LeafElement newElement = ASTFactory.leaf(TokenType.WHITE_SPACE, holderElement.getCharTable().intern(text));
holderElement.rawAddChildren(newElement);
GeneratedMarkerVisitor.markGenerated(newElement.getPsi());
return newElement.getPsi();
}
@Override
@NotNull
public PsiComment createLineCommentFromText(@NotNull LanguageFileType fileType,
@NotNull String text) throws IncorrectOperationException {
return createLineCommentFromText(fileType.getLanguage(), text);
}
@Override
@NotNull
public PsiComment createLineCommentFromText(@NotNull final Language language,
@NotNull final String text) throws IncorrectOperationException {
Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language);
assert commenter != null;
String prefix = commenter.getLineCommentPrefix();
if (prefix == null) {
throw new IncorrectOperationException("No line comment prefix defined for language " + language.getID());
}
PsiFile aFile = createDummyFile(language, prefix + text);
return findPsiCommentChild(aFile);
}
@NotNull
@Override
public PsiComment createBlockCommentFromText(@NotNull Language language,
@NotNull String text) throws IncorrectOperationException {
Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language);
assert commenter != null : language;
final String blockCommentPrefix = commenter.getBlockCommentPrefix();
final String blockCommentSuffix = commenter.getBlockCommentSuffix();
assert blockCommentPrefix != null && blockCommentSuffix != null;
PsiFile aFile = createDummyFile(language, blockCommentPrefix + text + blockCommentSuffix);
return findPsiCommentChild(aFile);
}
@Override
@NotNull
public PsiComment createLineOrBlockCommentFromText(@NotNull Language language,
@NotNull String text) throws IncorrectOperationException {
Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language);
assert commenter != null : language;
String prefix = commenter.getLineCommentPrefix();
final String blockCommentPrefix = commenter.getBlockCommentPrefix();
final String blockCommentSuffix = commenter.getBlockCommentSuffix();
assert prefix != null || (blockCommentPrefix != null && blockCommentSuffix != null);
PsiFile aFile = createDummyFile(language, prefix != null ? (prefix + text) : (blockCommentPrefix + text + blockCommentSuffix));
return findPsiCommentChild(aFile);
}
private PsiComment findPsiCommentChild(PsiFile aFile) {
PsiComment comment = PsiTreeUtil.findChildOfType(aFile, PsiComment.class);
if (comment == null) {
throw new IncorrectOperationException("Incorrect comment \"" + aFile.getText() + "\".");
}
DummyHolderFactory.createHolder(myManager, (TreeElement)SourceTreeToPsiMap.psiElementToTree(comment), null);
return comment;
}
private PsiFile createDummyFile(final Language language, String text) {
return PsiFileFactory.getInstance(myManager.getProject()).createFileFromText("_Dummy_", language, text);
}
}
| siosio/intellij-community | platform/core-impl/src/com/intellij/psi/impl/PsiParserFacadeImpl.java | Java | apache-2.0 | 4,617 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.values;
import static org.apache.beam.sdk.values.TypeDescriptors.integers;
import static org.apache.beam.sdk.values.TypeDescriptors.iterables;
import static org.apache.beam.sdk.values.TypeDescriptors.kvs;
import static org.apache.beam.sdk.values.TypeDescriptors.lists;
import static org.apache.beam.sdk.values.TypeDescriptors.sets;
import static org.apache.beam.sdk.values.TypeDescriptors.strings;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import java.util.List;
import java.util.Set;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link TypeDescriptors}.
*/
@RunWith(JUnit4.class)
public class TypeDescriptorsTest {
@Test
public void testTypeDescriptorsIterables() throws Exception {
TypeDescriptor<Iterable<String>> descriptor = iterables(strings());
assertEquals(descriptor, new TypeDescriptor<Iterable<String>>() {});
}
@Test
public void testTypeDescriptorsSets() throws Exception {
TypeDescriptor<Set<String>> descriptor = sets(strings());
assertEquals(descriptor, new TypeDescriptor<Set<String>>() {});
}
@Test
public void testTypeDescriptorsKV() throws Exception {
TypeDescriptor<KV<String, Integer>> descriptor =
kvs(strings(), integers());
assertEquals(descriptor, new TypeDescriptor<KV<String, Integer>>() {});
}
@Test
public void testTypeDescriptorsLists() throws Exception {
TypeDescriptor<List<String>> descriptor = lists(strings());
assertEquals(descriptor, new TypeDescriptor<List<String>>() {});
assertNotEquals(descriptor, new TypeDescriptor<List<Boolean>>() {});
}
@Test
public void testTypeDescriptorsListsOfLists() throws Exception {
TypeDescriptor<List<List<String>>> descriptor = lists(lists(strings()));
assertEquals(descriptor, new TypeDescriptor<List<List<String>>>() {});
assertNotEquals(descriptor, new TypeDescriptor<List<String>>() {});
assertNotEquals(descriptor, new TypeDescriptor<List<Boolean>>() {});
}
private interface Generic<FooT, BarT> {}
private static <ActualFooT> Generic<ActualFooT, String> typeErasedGeneric() {
return new Generic<ActualFooT, String>() {};
}
private static <ActualFooT, ActualBarT> TypeDescriptor<ActualFooT> extractFooT(
Generic<ActualFooT, ActualBarT> instance) {
return TypeDescriptors.extractFromTypeParameters(
instance,
Generic.class,
new TypeDescriptors.TypeVariableExtractor<
Generic<ActualFooT, ActualBarT>, ActualFooT>() {});
}
private static <ActualFooT, ActualBarT> TypeDescriptor<ActualBarT> extractBarT(
Generic<ActualFooT, ActualBarT> instance) {
return TypeDescriptors.extractFromTypeParameters(
instance,
Generic.class,
new TypeDescriptors.TypeVariableExtractor<
Generic<ActualFooT, ActualBarT>, ActualBarT>() {});
}
private static <ActualFooT, ActualBarT> TypeDescriptor<KV<ActualFooT, ActualBarT>> extractKV(
Generic<ActualFooT, ActualBarT> instance) {
return TypeDescriptors.extractFromTypeParameters(
instance,
Generic.class,
new TypeDescriptors.TypeVariableExtractor<
Generic<ActualFooT, ActualBarT>, KV<ActualFooT, ActualBarT>>() {});
}
@Test
public void testTypeDescriptorsTypeParameterOf() throws Exception {
assertEquals(strings(), extractFooT(new Generic<String, Integer>() {}));
assertEquals(integers(), extractBarT(new Generic<String, Integer>() {}));
assertEquals(kvs(strings(), integers()), extractKV(new Generic<String, Integer>() {}));
}
@Test
public void testTypeDescriptorsTypeParameterOfErased() throws Exception {
Generic<Integer, String> instance = TypeDescriptorsTest.typeErasedGeneric();
TypeDescriptor<Integer> fooT = extractFooT(instance);
assertNotNull(fooT);
// Using toString() assertions because verifying the contents of a Type is very cumbersome,
// and the expected types can not be easily constructed directly.
assertEquals("ActualFooT", fooT.toString());
assertEquals(strings(), extractBarT(instance));
TypeDescriptor<KV<Integer, String>> kvT = extractKV(instance);
assertNotNull(kvT);
assertThat(kvT.toString(), CoreMatchers.containsString("KV<ActualFooT, java.lang.String>"));
}
}
| jbonofre/beam | sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorsTest.java | Java | apache-2.0 | 5,295 |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.intent;
import java.util.List;
import java.util.Optional;
import com.google.common.annotations.Beta;
import org.onlab.packet.MplsLabel;
import org.onosproject.core.ApplicationId;
import org.onosproject.net.Path;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Abstraction of explicit MPLS label-switched path.
*/
@Beta
public final class MplsPathIntent extends PathIntent {
private final Optional<MplsLabel> ingressLabel;
private final Optional<MplsLabel> egressLabel;
/**
* Creates a new point-to-point intent with the supplied ingress/egress
* ports and using the specified explicit path.
*
* @param appId application identifier
* @param selector traffic selector
* @param treatment treatment
* @param path traversed links
* @param ingressLabel MPLS egress label
* @param egressLabel MPLS ingress label
* @param constraints optional list of constraints
* @param priority priority to use for flows generated by this intent
* @throws NullPointerException {@code path} is null
*/
private MplsPathIntent(ApplicationId appId, TrafficSelector selector,
TrafficTreatment treatment, Path path, Optional<MplsLabel> ingressLabel,
Optional<MplsLabel> egressLabel, List<Constraint> constraints,
int priority) {
super(appId, selector, treatment, path, constraints,
priority);
this.ingressLabel = checkNotNull(ingressLabel);
this.egressLabel = checkNotNull(egressLabel);
}
/**
* Returns a new host to host intent builder.
*
* @return host to host intent builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder of a host to host intent.
*/
public static final class Builder extends PathIntent.Builder {
private Optional<MplsLabel> ingressLabel = Optional.empty();
private Optional<MplsLabel> egressLabel = Optional.empty();
private Builder() {
// Hide constructor
}
@Override
public Builder appId(ApplicationId appId) {
return (Builder) super.appId(appId);
}
@Override
public Builder key(Key key) {
return (Builder) super.key(key);
}
@Override
public Builder selector(TrafficSelector selector) {
return (Builder) super.selector(selector);
}
@Override
public Builder treatment(TrafficTreatment treatment) {
return (Builder) super.treatment(treatment);
}
@Override
public Builder constraints(List<Constraint> constraints) {
return (Builder) super.constraints(constraints);
}
@Override
public Builder priority(int priority) {
return (Builder) super.priority(priority);
}
@Override
public Builder path(Path path) {
return (Builder) super.path(path);
}
/**
* Sets the ingress label of the intent that will be built.
*
* @param ingressLabel ingress label
* @return this builder
*/
public Builder ingressLabel(Optional<MplsLabel> ingressLabel) {
this.ingressLabel = ingressLabel;
return this;
}
/**
* Sets the ingress label of the intent that will be built.
*
* @param egressLabel ingress label
* @return this builder
*/
public Builder egressLabel(Optional<MplsLabel> egressLabel) {
this.egressLabel = egressLabel;
return this;
}
/**
* Builds a host to host intent from the accumulated parameters.
*
* @return point to point intent
*/
public MplsPathIntent build() {
return new MplsPathIntent(
appId,
selector,
treatment,
path,
ingressLabel,
egressLabel,
constraints,
priority
);
}
}
/**
* Returns the MPLS label which the ingress traffic should tagged.
*
* @return ingress MPLS label
*/
public Optional<MplsLabel> ingressLabel() {
return ingressLabel;
}
/**
* Returns the MPLS label which the egress traffic should tagged.
*
* @return egress MPLS label
*/
public Optional<MplsLabel> egressLabel() {
return egressLabel;
}
}
| Zhengzl15/onos-securearp | core/api/src/main/java/org/onosproject/net/intent/MplsPathIntent.java | Java | apache-2.0 | 5,350 |
interface JavaInterface {
void foo(Object p);
}
class JavaClass1 implements JavaInterface {
@Override
public void foo(Object <caret>p) {
System.out.println(<flown1>p);
}
}
class JavaClass2 implements JavaInterface {
@Override
public void foo(Object p) {
System.err.println(p);
}
} | siosio/intellij-community | java/java-tests/testData/codeInsight/slice/forward/OneInterfaceTwoImplementations.java | Java | apache-2.0 | 304 |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.msgs.restricted;
import com.google.template.soy.msgs.SoyMsgException;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.Locale;
import java.util.Objects;
/**
* Represents a plural case value.
*
* A plural case value can be either a number, or one of {@code ZERO}, {@code ONE}, {@code TWO},
* {@code FEW}, {@code MANY} or {@code OTHER}. Here, a number is represented by the number
* {@code explicitValue} with status set to EXPLICIT and the remaining by an enum value.
*
*/
public class SoyMsgPluralCaseSpec {
/** The type. EXPLICIT indicating numeric, or one of the others indicating non-numeric. */
public enum Type { EXPLICIT, ZERO, ONE, TWO, FEW, MANY, OTHER }
/** Internal mapping of Type to String, reduces memory usage */
private static final EnumMap<Type, String> TYPE_TO_STRING = new EnumMap<>(Type.class);
static {
for (Type t : EnumSet.allOf(Type.class)) {
TYPE_TO_STRING.put(t, t.name().toLowerCase(Locale.ENGLISH));
}
}
/** ZERO, ONE, TWO, FEW, MANY or OTHER if the type is non-numeric, or EXPLICIT if numeric. */
private final Type type;
/** The numeric value if the type is numeric, -1 otherwise. */
private final int explicitValue;
/**
* Constructs an object from a non-numeric value.
* The field type is set to an enum value corresponding to the string given, and explicitValue
* is set to -1.
* @param typeStr String representation of the non-numeric value.
* @throws IllegalArgumentException if typeStr (after converting to upper
* case) does not match with any of the enum types.
*/
public SoyMsgPluralCaseSpec(String typeStr) {
type = Type.valueOf(typeStr.toUpperCase(Locale.ENGLISH));
explicitValue = -1;
}
/**
* Constructs an object from a numeric value.
* The field type is set to EXPLICIT, and explicitValue is set to the numeric value given.
* @param explicitValue The numeric value.
* @throws SoyMsgException if invalid numeric value.
*/
public SoyMsgPluralCaseSpec(int explicitValue) {
if (explicitValue >= 0) {
type = Type.EXPLICIT;
this.explicitValue = explicitValue;
} else {
throw new SoyMsgException("Negative plural case value.");
}
}
/**
* Get the type.
* @return The type. EXPLICIT if numeric.
*/
public Type getType() {
return type;
}
/**
* Get the numeric value.
* @return if numeric, return the numeric value, else -1.
*/
public int getExplicitValue() {
return explicitValue;
}
@Override
public String toString() {
return (type == Type.EXPLICIT) ? "=" + explicitValue : TYPE_TO_STRING.get(type);
}
@Override public boolean equals(Object other) {
if (!(other instanceof SoyMsgPluralCaseSpec)) {
return false;
}
SoyMsgPluralCaseSpec otherSpec = (SoyMsgPluralCaseSpec) other;
return type == otherSpec.type
&& explicitValue == otherSpec.explicitValue;
}
@Override public int hashCode() {
return Objects.hash(SoyMsgPluralCaseSpec.class, type, explicitValue);
}
}
| oujesky/closure-templates | java/src/com/google/template/soy/msgs/restricted/SoyMsgPluralCaseSpec.java | Java | apache-2.0 | 3,688 |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.editors.included;
import java.util.List;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.client.editors.included.common.IncludedModelsPageStateProvider;
import org.mockito.Mock;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(GwtMockitoTestRunner.class)
public class IncludedModelsPageStateTest {
@Mock
private IncludedModelsPageStateProvider pageProvider;
private IncludedModelsPageState state;
@Before
public void setup() {
state = new IncludedModelsPageState();
}
@Test
public void testGetCurrentDiagramNamespaceWhenPageProviderIsPresent() {
final String expectedNamespace = "://namespace";
when(pageProvider.getCurrentDiagramNamespace()).thenReturn(expectedNamespace);
state.init(pageProvider);
final String actualNamespace = state.getCurrentDiagramNamespace();
assertEquals(expectedNamespace, actualNamespace);
}
@Test
public void testGetCurrentDiagramNamespaceWhenPageProviderIsNotPresent() {
final String expectedNamespace = "";
state.init(null);
final String actualNamespace = state.getCurrentDiagramNamespace();
assertEquals(expectedNamespace, actualNamespace);
}
@Test
public void testGenerateIncludedModelsWhenPageProviderIsNotPresent() {
state.init(null);
final List<BaseIncludedModelActiveRecord> actualIncludedModels = state.generateIncludedModels();
final List<BaseIncludedModelActiveRecord> expectedIncludedModels = emptyList();
assertEquals(expectedIncludedModels, actualIncludedModels);
}
@Test
public void testGenerateIncludedModelsWhenPageProviderIsPresent() {
final List<BaseIncludedModelActiveRecord> expectedIncludedModels = asList(mock(BaseIncludedModelActiveRecord.class), mock(BaseIncludedModelActiveRecord.class));
when(pageProvider.generateIncludedModels()).thenReturn(expectedIncludedModels);
state.init(pageProvider);
final List<BaseIncludedModelActiveRecord> actualIncludedModels = state.generateIncludedModels();
assertEquals(actualIncludedModels, expectedIncludedModels);
}
}
| jomarko/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-client/src/test/java/org/kie/workbench/common/dmn/client/editors/included/IncludedModelsPageStateTest.java | Java | apache-2.0 | 3,116 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mahout.math.jet.random;
import org.apache.mahout.common.RandomUtils;
import org.apache.mahout.math.MahoutTestCase;
import org.junit.Test;
import java.util.Arrays;
import java.util.Locale;
import java.util.Random;
public final class GammaTest extends MahoutTestCase {
@Test
public void testNextDouble() {
double[] z = new double[100000];
Random gen = RandomUtils.getRandom();
for (double alpha : new double[]{1, 2, 10, 0.1, 0.01, 100}) {
Gamma g = new Gamma(alpha, 1, gen);
for (int i = 0; i < z.length; i++) {
z[i] = g.nextDouble();
}
Arrays.sort(z);
// verify that empirical CDF matches theoretical one pretty closely
for (double q : seq(0.01, 1, 0.01)) {
double p = z[(int) (q * z.length)];
assertEquals(q, g.cdf(p), 0.01);
}
}
}
@Test
public void testCdf() {
Random gen = RandomUtils.getRandom();
// verify scaling for special case of alpha = 1
for (double beta : new double[]{1, 0.1, 2, 100}) {
Gamma g1 = new Gamma(1, beta, gen);
Gamma g2 = new Gamma(1, 1, gen);
for (double x : seq(0, 0.99, 0.1)) {
assertEquals(String.format(Locale.ENGLISH, "Rate invariance: x = %.4f, alpha = 1, beta = %.1f", x, beta),
1 - Math.exp(-x * beta), g1.cdf(x), 1.0e-9);
assertEquals(String.format(Locale.ENGLISH, "Rate invariance: x = %.4f, alpha = 1, beta = %.1f", x, beta),
g2.cdf(beta * x), g1.cdf(x), 1.0e-9);
}
}
// now test scaling for a selection of values of alpha
for (double alpha : new double[]{0.01, 0.1, 1, 2, 10, 100, 1000}) {
Gamma g = new Gamma(alpha, 1, gen);
for (double beta : new double[]{0.1, 1, 2, 100}) {
Gamma g1 = new Gamma(alpha, beta, gen);
for (double x : seq(0, 0.9999, 0.001)) {
assertEquals(
String.format(Locale.ENGLISH, "Rate invariance: x = %.4f, alpha = %.2f, beta = %.1f", x, alpha, beta),
g.cdf(x * beta), g1.cdf(x), 0);
}
}
}
// now check against known values computed using R for various values of alpha
checkGammaCdf(0.01, 1, 0.0000000, 0.9450896, 0.9516444, 0.9554919, 0.9582258, 0.9603474, 0.9620810, 0.9635462, 0.9648148, 0.9659329, 0.9669321);
checkGammaCdf(0.1, 1, 0.0000000, 0.7095387, 0.7591012, 0.7891072, 0.8107067, 0.8275518, 0.8413180, 0.8529198, 0.8629131, 0.8716623, 0.8794196);
checkGammaCdf(1, 1, 0.0000000, 0.1812692, 0.3296800, 0.4511884, 0.5506710, 0.6321206, 0.6988058, 0.7534030, 0.7981035, 0.8347011, 0.8646647);
checkGammaCdf(10, 1, 0.000000e+00, 4.649808e-05, 8.132243e-03, 8.392402e-02, 2.833757e-01, 5.420703e-01, 7.576078e-01, 8.906006e-01, 9.567017e-01, 9.846189e-01, 9.950046e-01);
checkGammaCdf(100, 1, 0.000000e+00, 3.488879e-37, 1.206254e-15, 1.481528e-06, 1.710831e-02, 5.132988e-01, 9.721363e-01, 9.998389e-01, 9.999999e-01, 1.000000e+00, 1.000000e+00);
// > pgamma(seq(0,0.02,by=0.002),0.01,1)
// [1] 0.0000000 0.9450896 0.9516444 0.9554919 0.9582258 0.9603474 0.9620810 0.9635462 0.9648148 0.9659329 0.9669321
// > pgamma(seq(0,0.2,by=0.02),0.1,1)
// [1] 0.0000000 0.7095387 0.7591012 0.7891072 0.8107067 0.8275518 0.8413180 0.8529198 0.8629131 0.8716623 0.8794196
// > pgamma(seq(0,2,by=0.2),1,1)
// [1] 0.0000000 0.1812692 0.3296800 0.4511884 0.5506710 0.6321206 0.6988058 0.7534030 0.7981035 0.8347011 0.8646647
// > pgamma(seq(0,20,by=2),10,1)
// [1] 0.000000e+00 4.649808e-05 8.132243e-03 8.392402e-02 2.833757e-01 5.420703e-01 7.576078e-01 8.906006e-01 9.567017e-01 9.846189e-01 9.950046e-01
// > pgamma(seq(0,200,by=20),100,1)
// [1] 0.000000e+00 3.488879e-37 1.206254e-15 1.481528e-06 1.710831e-02 5.132988e-01 9.721363e-01 9.998389e-01 9.999999e-01 1.000000e+00 1.000000e+00
}
private static void checkGammaCdf(double alpha, double beta, double... values) {
Gamma g = new Gamma(alpha, beta, RandomUtils.getRandom());
int i = 0;
for (double x : seq(0, 2 * alpha, 2 * alpha / 10)) {
assertEquals(String.format(Locale.ENGLISH, "alpha=%.2f, i=%d, x=%.2f", alpha, i, x),
values[i], g.cdf(x), 1.0e-7);
i++;
}
}
private static double[] seq(double from, double to, double by) {
double[] r = new double[(int) Math.ceil(0.999999 * (to - from) / by)];
int i = 0;
for (double x = from; x < to - (to - from) * 1.0e-6; x += by) {
r[i++] = x;
}
return r;
}
@Test
public void testPdf() {
Random gen = RandomUtils.getRandom();
for (double alpha : new double[]{0.01, 0.1, 1, 2, 10, 100}) {
for (double beta : new double[]{0.1, 1, 2, 100}) {
Gamma g1 = new Gamma(alpha, beta, gen);
for (double x : seq(0, 0.99, 0.1)) {
double p = Math.pow(beta, alpha) * Math.pow(x, alpha - 1) *
Math.exp(-beta * x - org.apache.mahout.math.jet.stat.Gamma.logGamma(alpha));
assertEquals(String.format(Locale.ENGLISH, "alpha=%.2f, beta=%.2f, x=%.2f\n", alpha, beta, x),
p, g1.pdf(x), 1.0e-9);
}
}
}
}
}
| BigData-Lab-Frankfurt/HiBench-DSE | common/mahout-distribution-0.7-hadoop1/math/src/test/java/org/apache/mahout/math/jet/random/GammaTest.java | Java | apache-2.0 | 5,887 |
/*
* Copyright 2017 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.dao.stat;
import com.navercorp.pinpoint.common.server.bo.stat.DeadlockThreadCountBo;
/**
* @author Taejin Koo
*/
public interface DeadlockDao extends AgentStatDao<DeadlockThreadCountBo> {
}
| barneykim/pinpoint | web/src/main/java/com/navercorp/pinpoint/web/dao/stat/DeadlockDao.java | Java | apache-2.0 | 825 |
// +build linux
package main
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"math/rand"
"net"
"net/http"
"os"
"strings"
"time"
"github.com/openshift/origin/pkg/network/node/cniserver"
"github.com/containernetworking/cni/pkg/skel"
"github.com/containernetworking/cni/pkg/types"
"github.com/containernetworking/cni/pkg/types/020"
"github.com/containernetworking/cni/pkg/types/current"
"github.com/containernetworking/cni/pkg/version"
"github.com/containernetworking/plugins/pkg/ip"
"github.com/containernetworking/plugins/pkg/ipam"
"github.com/containernetworking/plugins/pkg/ns"
"github.com/vishvananda/netlink"
)
type cniPlugin struct {
socketPath string
hostNS ns.NetNS
}
func NewCNIPlugin(socketPath string, hostNS ns.NetNS) *cniPlugin {
return &cniPlugin{socketPath: socketPath, hostNS: hostNS}
}
// Create and fill a CNIRequest with this plugin's environment and stdin which
// contain the CNI variables and configuration
func newCNIRequest(args *skel.CmdArgs) *cniserver.CNIRequest {
envMap := make(map[string]string)
for _, item := range os.Environ() {
idx := strings.Index(item, "=")
if idx > 0 {
envMap[strings.TrimSpace(item[:idx])] = item[idx+1:]
}
}
return &cniserver.CNIRequest{
Env: envMap,
Config: args.StdinData,
}
}
// Send a CNI request to the CNI server via JSON + HTTP over a root-owned unix socket,
// and return the result
func (p *cniPlugin) doCNI(url string, req *cniserver.CNIRequest) ([]byte, error) {
data, err := json.Marshal(req)
if err != nil {
return nil, fmt.Errorf("failed to marshal CNI request %v: %v", req, err)
}
client := &http.Client{
Transport: &http.Transport{
Dial: func(proto, addr string) (net.Conn, error) {
return net.Dial("unix", p.socketPath)
},
},
}
var resp *http.Response
err = p.hostNS.Do(func(ns.NetNS) error {
resp, err = client.Post(url, "application/json", bytes.NewReader(data))
return err
})
if err != nil {
return nil, fmt.Errorf("failed to send CNI request: %v", err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read CNI result: %v", err)
}
if resp.StatusCode != 200 {
return nil, fmt.Errorf("CNI request failed with status %v: '%s'", resp.StatusCode, string(body))
}
return body, nil
}
// Send the ADD command environment and config to the CNI server, returning
// the IPAM result to the caller
func (p *cniPlugin) doCNIServerAdd(req *cniserver.CNIRequest, hostVeth string) (types.Result, error) {
req.HostVeth = hostVeth
body, err := p.doCNI("http://dummy/", req)
if err != nil {
return nil, err
}
// We currently expect CNI version 0.2.0 results, because that's the
// CNIVersion we pass in our config JSON
result, err := types020.NewResult(body)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal response '%s': %v", string(body), err)
}
return result, nil
}
func (p *cniPlugin) testCmdAdd(args *skel.CmdArgs) (types.Result, error) {
return p.doCNIServerAdd(newCNIRequest(args), "dummy0")
}
func (p *cniPlugin) CmdAdd(args *skel.CmdArgs) error {
req := newCNIRequest(args)
config, err := cniserver.ReadConfig(cniserver.CNIServerConfigFilePath)
if err != nil {
return err
}
var hostVeth, contVeth net.Interface
err = ns.WithNetNSPath(args.Netns, func(hostNS ns.NetNS) error {
hostVeth, contVeth, err = ip.SetupVeth(args.IfName, int(config.MTU), hostNS)
if err != nil {
return fmt.Errorf("failed to create container veth: %v", err)
}
return nil
})
if err != nil {
return err
}
result, err := p.doCNIServerAdd(req, hostVeth.Name)
if err != nil {
return err
}
// current.NewResultFromResult and ipam.ConfigureIface both think that
// a route with no gateway specified means to pass the default gateway
// as the next hop to ip.AddRoute, but that's not what we want; we want
// to pass nil as the next hop. So we need to clear the default gateway.
result020, err := types020.GetResult(result)
if err != nil {
return fmt.Errorf("failed to convert IPAM result: %v", err)
}
result020.IP4.Gateway = nil
result030, err := current.NewResultFromResult(result020)
if err != nil || len(result030.IPs) != 1 || result030.IPs[0].Version != "4" {
return fmt.Errorf("failed to convert IPAM result: %v", err)
}
// Add a sandbox interface record which ConfigureInterface expects.
// The only interface we report is the pod interface.
result030.Interfaces = []*current.Interface{
{
Name: args.IfName,
Mac: contVeth.HardwareAddr.String(),
Sandbox: args.Netns,
},
}
result030.IPs[0].Interface = current.Int(0)
err = ns.WithNetNSPath(args.Netns, func(ns.NetNS) error {
// Set up eth0
if err := ip.SetHWAddrByIP(args.IfName, result030.IPs[0].Address.IP, nil); err != nil {
return fmt.Errorf("failed to set pod interface MAC address: %v", err)
}
if err := ipam.ConfigureIface(args.IfName, result030); err != nil {
return fmt.Errorf("failed to configure container IPAM: %v", err)
}
// Set up lo
link, err := netlink.LinkByName("lo")
if err == nil {
err = netlink.LinkSetUp(link)
}
if err != nil {
return fmt.Errorf("failed to configure container loopback: %v", err)
}
// Set up macvlan0 (if it exists)
link, err = netlink.LinkByName("macvlan0")
if err == nil {
err = netlink.LinkSetUp(link)
if err != nil {
return fmt.Errorf("failed to configure macvlan device: %v", err)
}
}
return nil
})
if err != nil {
return err
}
return result.Print()
}
func (p *cniPlugin) CmdDel(args *skel.CmdArgs) error {
_, err := p.doCNI("http://dummy/", newCNIRequest(args))
return err
}
func main() {
rand.Seed(time.Now().UTC().UnixNano())
hostNS, err := ns.GetCurrentNS()
if err != nil {
panic(fmt.Sprintf("could not get current kernel netns: %v", err))
}
defer hostNS.Close()
p := NewCNIPlugin(cniserver.CNIServerSocketPath, hostNS)
skel.PluginMain(p.CmdAdd, p.CmdDel, version.Legacy)
}
| maxamillion/origin | pkg/network/sdn-cni-plugin/openshift-sdn.go | GO | apache-2.0 | 5,969 |
/// <reference path='fourslash.ts' />
//@Filename: file.tsx
//// declare module JSX {
//// interface Element { }
//// interface IntrinsicElements {
//// [|[|{| "isWriteAccess": true, "isDefinition": true, "contextRangeIndex": 0 |}div|]: {
//// name?: string;
//// isOpen?: boolean;
//// };|]
//// span: { n: string; };
//// }
//// }
//// var x = [|<[|{| "contextRangeIndex": 2 |}div|] />|];
verify.singleReferenceGroup(
`(property) JSX.IntrinsicElements.div: {
name?: string;
isOpen?: boolean;
}`, "div");
| microsoft/TypeScript | tests/cases/fourslash/tsxFindAllReferences1.ts | TypeScript | apache-2.0 | 600 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.RowProcessorClient;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService;
import org.apache.hadoop.hbase.regionserver.BaseRowProcessor;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.google.protobuf.Message;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Verifies ProcessEndpoint works.
* The tested RowProcessor performs two scans and a read-modify-write.
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestRowProcessorEndpoint {
static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class);
private static final TableName TABLE = TableName.valueOf("testtable");
private final static byte[] ROW = Bytes.toBytes("testrow");
private final static byte[] ROW2 = Bytes.toBytes("testrow2");
private final static byte[] FAM = Bytes.toBytes("friendlist");
// Column names
private final static byte[] A = Bytes.toBytes("a");
private final static byte[] B = Bytes.toBytes("b");
private final static byte[] C = Bytes.toBytes("c");
private final static byte[] D = Bytes.toBytes("d");
private final static byte[] E = Bytes.toBytes("e");
private final static byte[] F = Bytes.toBytes("f");
private final static byte[] G = Bytes.toBytes("g");
private final static byte[] COUNTER = Bytes.toBytes("counter");
private final static AtomicLong myTimer = new AtomicLong(0);
private final AtomicInteger failures = new AtomicInteger(0);
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static volatile int expectedCounter = 0;
private static int rowSize, row2Size;
private volatile static Table table = null;
private volatile static boolean swapped = false;
private volatile CountDownLatch startSignal;
private volatile CountDownLatch doneSignal;
@BeforeClass
public static void setupBeforeClass() throws Exception {
Configuration conf = util.getConfiguration();
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
RowProcessorEndpoint.class.getName());
conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
conf.setLong("hbase.hregion.row.processor.timeout", 1000L);
util.startMiniCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
public void prepareTestData() throws Exception {
try {
util.getHBaseAdmin().disableTable(TABLE);
util.getHBaseAdmin().deleteTable(TABLE);
} catch (Exception e) {
// ignore table not found
}
table = util.createTable(TABLE, FAM);
{
Put put = new Put(ROW);
put.add(FAM, A, Bytes.add(B, C)); // B, C are friends of A
put.add(FAM, B, Bytes.add(D, E, F)); // D, E, F are friends of B
put.add(FAM, C, G); // G is a friend of C
table.put(put);
rowSize = put.size();
}
Put put = new Put(ROW2);
put.add(FAM, D, E);
put.add(FAM, F, G);
table.put(put);
row2Size = put.size();
}
@Test
public void testDoubleScan() throws Throwable {
prepareTestData();
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
RowProcessorEndpoint.FriendsOfFriendsProcessor processor =
new RowProcessorEndpoint.FriendsOfFriendsProcessor(ROW, A);
RowProcessorService.BlockingInterface service =
RowProcessorService.newBlockingStub(channel);
ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
ProcessResponse protoResult = service.process(null, request);
FriendsOfFriendsProcessorResponse response =
FriendsOfFriendsProcessorResponse.parseFrom(protoResult.getRowProcessorResult());
Set<String> result = new HashSet<String>();
result.addAll(response.getResultList());
Set<String> expected =
new HashSet<String>(Arrays.asList(new String[]{"d", "e", "f", "g"}));
Get get = new Get(ROW);
LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells()));
assertEquals(expected, result);
}
@Test
public void testReadModifyWrite() throws Throwable {
prepareTestData();
failures.set(0);
int numThreads = 100;
concurrentExec(new IncrementRunner(), numThreads);
Get get = new Get(ROW);
LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells()));
int finalCounter = incrementCounter(table);
assertEquals(numThreads + 1, finalCounter);
assertEquals(0, failures.get());
}
class IncrementRunner implements Runnable {
@Override
public void run() {
try {
incrementCounter(table);
} catch (Throwable e) {
e.printStackTrace();
}
}
}
private int incrementCounter(Table table) throws Throwable {
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
RowProcessorEndpoint.IncrementCounterProcessor processor =
new RowProcessorEndpoint.IncrementCounterProcessor(ROW);
RowProcessorService.BlockingInterface service =
RowProcessorService.newBlockingStub(channel);
ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
ProcessResponse protoResult = service.process(null, request);
IncCounterProcessorResponse response = IncCounterProcessorResponse
.parseFrom(protoResult.getRowProcessorResult());
Integer result = response.getResponse();
return result;
}
private void concurrentExec(
final Runnable task, final int numThreads) throws Throwable {
startSignal = new CountDownLatch(numThreads);
doneSignal = new CountDownLatch(numThreads);
for (int i = 0; i < numThreads; ++i) {
new Thread(new Runnable() {
@Override
public void run() {
try {
startSignal.countDown();
startSignal.await();
task.run();
} catch (Throwable e) {
failures.incrementAndGet();
e.printStackTrace();
}
doneSignal.countDown();
}
}).start();
}
doneSignal.await();
}
@Test
public void testMultipleRows() throws Throwable {
prepareTestData();
failures.set(0);
int numThreads = 100;
concurrentExec(new SwapRowsRunner(), numThreads);
LOG.debug("row keyvalues:" +
stringifyKvs(table.get(new Get(ROW)).listCells()));
LOG.debug("row2 keyvalues:" +
stringifyKvs(table.get(new Get(ROW2)).listCells()));
assertEquals(rowSize, table.get(new Get(ROW)).listCells().size());
assertEquals(row2Size, table.get(new Get(ROW2)).listCells().size());
assertEquals(0, failures.get());
}
class SwapRowsRunner implements Runnable {
@Override
public void run() {
try {
swapRows(table);
} catch (Throwable e) {
e.printStackTrace();
}
}
}
private void swapRows(Table table) throws Throwable {
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
RowProcessorEndpoint.RowSwapProcessor processor =
new RowProcessorEndpoint.RowSwapProcessor(ROW, ROW2);
RowProcessorService.BlockingInterface service =
RowProcessorService.newBlockingStub(channel);
ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
service.process(null, request);
}
@Test
public void testTimeout() throws Throwable {
prepareTestData();
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
RowProcessorEndpoint.TimeoutProcessor processor =
new RowProcessorEndpoint.TimeoutProcessor(ROW);
RowProcessorService.BlockingInterface service =
RowProcessorService.newBlockingStub(channel);
ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
boolean exceptionCaught = false;
try {
service.process(null, request);
} catch (Exception e) {
exceptionCaught = true;
}
assertTrue(exceptionCaught);
}
/**
* This class defines two RowProcessors:
* IncrementCounterProcessor and FriendsOfFriendsProcessor.
*
* We define the RowProcessors as the inner class of the endpoint.
* So they can be loaded with the endpoint on the coprocessor.
*/
public static class RowProcessorEndpoint<S extends Message,T extends Message>
extends BaseRowProcessorEndpoint<S,T> implements CoprocessorService {
public static class IncrementCounterProcessor extends
BaseRowProcessor<IncrementCounterProcessorTestProtos.IncCounterProcessorRequest,
IncrementCounterProcessorTestProtos.IncCounterProcessorResponse> {
int counter = 0;
byte[] row = new byte[0];
/**
* Empty constructor for Writable
*/
IncrementCounterProcessor() {
}
IncrementCounterProcessor(byte[] row) {
this.row = row;
}
@Override
public Collection<byte[]> getRowsToLock() {
return Collections.singleton(row);
}
@Override
public IncCounterProcessorResponse getResult() {
IncCounterProcessorResponse.Builder i = IncCounterProcessorResponse.newBuilder();
i.setResponse(counter);
return i.build();
}
@Override
public boolean readOnly() {
return false;
}
@Override
public void process(long now, HRegion region,
List<Mutation> mutations, WALEdit walEdit) throws IOException {
// Scan current counter
List<Cell> kvs = new ArrayList<Cell>();
Scan scan = new Scan(row, row);
scan.addColumn(FAM, COUNTER);
doScan(region, scan, kvs);
counter = kvs.size() == 0 ? 0 :
Bytes.toInt(CellUtil.cloneValue(kvs.iterator().next()));
// Assert counter value
assertEquals(expectedCounter, counter);
// Increment counter and send it to both memstore and wal edit
counter += 1;
expectedCounter += 1;
Put p = new Put(row);
KeyValue kv =
new KeyValue(row, FAM, COUNTER, now, Bytes.toBytes(counter));
p.add(kv);
mutations.add(p);
walEdit.add(kv);
// We can also inject some meta data to the walEdit
KeyValue metaKv = new KeyValue(
row, WALEdit.METAFAMILY,
Bytes.toBytes("I just increment counter"),
Bytes.toBytes(counter));
walEdit.add(metaKv);
}
@Override
public IncCounterProcessorRequest getRequestData() throws IOException {
IncCounterProcessorRequest.Builder builder = IncCounterProcessorRequest.newBuilder();
builder.setCounter(counter);
builder.setRow(ByteStringer.wrap(row));
return builder.build();
}
@Override
public void initialize(IncCounterProcessorRequest msg) {
this.row = msg.getRow().toByteArray();
this.counter = msg.getCounter();
}
}
public static class FriendsOfFriendsProcessor extends
BaseRowProcessor<FriendsOfFriendsProcessorRequest, FriendsOfFriendsProcessorResponse> {
byte[] row = null;
byte[] person = null;
final Set<String> result = new HashSet<String>();
/**
* Empty constructor for Writable
*/
FriendsOfFriendsProcessor() {
}
FriendsOfFriendsProcessor(byte[] row, byte[] person) {
this.row = row;
this.person = person;
}
@Override
public Collection<byte[]> getRowsToLock() {
return Collections.singleton(row);
}
@Override
public FriendsOfFriendsProcessorResponse getResult() {
FriendsOfFriendsProcessorResponse.Builder builder =
FriendsOfFriendsProcessorResponse.newBuilder();
builder.addAllResult(result);
return builder.build();
}
@Override
public boolean readOnly() {
return true;
}
@Override
public void process(long now, HRegion region,
List<Mutation> mutations, WALEdit walEdit) throws IOException {
List<Cell> kvs = new ArrayList<Cell>();
{ // First scan to get friends of the person
Scan scan = new Scan(row, row);
scan.addColumn(FAM, person);
doScan(region, scan, kvs);
}
// Second scan to get friends of friends
Scan scan = new Scan(row, row);
for (Cell kv : kvs) {
byte[] friends = CellUtil.cloneValue(kv);
for (byte f : friends) {
scan.addColumn(FAM, new byte[]{f});
}
}
doScan(region, scan, kvs);
// Collect result
result.clear();
for (Cell kv : kvs) {
for (byte b : CellUtil.cloneValue(kv)) {
result.add((char)b + "");
}
}
}
@Override
public FriendsOfFriendsProcessorRequest getRequestData() throws IOException {
FriendsOfFriendsProcessorRequest.Builder builder =
FriendsOfFriendsProcessorRequest.newBuilder();
builder.setPerson(ByteStringer.wrap(person));
builder.setRow(ByteStringer.wrap(row));
builder.addAllResult(result);
FriendsOfFriendsProcessorRequest f = builder.build();
return f;
}
@Override
public void initialize(FriendsOfFriendsProcessorRequest request)
throws IOException {
this.person = request.getPerson().toByteArray();
this.row = request.getRow().toByteArray();
result.clear();
result.addAll(request.getResultList());
}
}
public static class RowSwapProcessor extends
BaseRowProcessor<RowSwapProcessorRequest, RowSwapProcessorResponse> {
byte[] row1 = new byte[0];
byte[] row2 = new byte[0];
/**
* Empty constructor for Writable
*/
RowSwapProcessor() {
}
RowSwapProcessor(byte[] row1, byte[] row2) {
this.row1 = row1;
this.row2 = row2;
}
@Override
public Collection<byte[]> getRowsToLock() {
List<byte[]> rows = new ArrayList<byte[]>();
rows.add(row1);
rows.add(row2);
return rows;
}
@Override
public boolean readOnly() {
return false;
}
@Override
public RowSwapProcessorResponse getResult() {
return RowSwapProcessorResponse.getDefaultInstance();
}
@Override
public void process(long now, HRegion region,
List<Mutation> mutations, WALEdit walEdit) throws IOException {
// Override the time to avoid race-condition in the unit test caused by
// inacurate timer on some machines
now = myTimer.getAndIncrement();
// Scan both rows
List<Cell> kvs1 = new ArrayList<Cell>();
List<Cell> kvs2 = new ArrayList<Cell>();
doScan(region, new Scan(row1, row1), kvs1);
doScan(region, new Scan(row2, row2), kvs2);
// Assert swapped
if (swapped) {
assertEquals(rowSize, kvs2.size());
assertEquals(row2Size, kvs1.size());
} else {
assertEquals(rowSize, kvs1.size());
assertEquals(row2Size, kvs2.size());
}
swapped = !swapped;
// Add and delete keyvalues
List<List<Cell>> kvs = new ArrayList<List<Cell>>();
kvs.add(kvs1);
kvs.add(kvs2);
byte[][] rows = new byte[][]{row1, row2};
for (int i = 0; i < kvs.size(); ++i) {
for (Cell kv : kvs.get(i)) {
// Delete from the current row and add to the other row
Delete d = new Delete(rows[i]);
KeyValue kvDelete =
new KeyValue(rows[i], CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv),
kv.getTimestamp(), KeyValue.Type.Delete);
d.addDeleteMarker(kvDelete);
Put p = new Put(rows[1 - i]);
KeyValue kvAdd =
new KeyValue(rows[1 - i], CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv),
now, CellUtil.cloneValue(kv));
p.add(kvAdd);
mutations.add(d);
walEdit.add(kvDelete);
mutations.add(p);
walEdit.add(kvAdd);
}
}
}
@Override
public String getName() {
return "swap";
}
@Override
public RowSwapProcessorRequest getRequestData() throws IOException {
RowSwapProcessorRequest.Builder builder = RowSwapProcessorRequest.newBuilder();
builder.setRow1(ByteStringer.wrap(row1));
builder.setRow2(ByteStringer.wrap(row2));
return builder.build();
}
@Override
public void initialize(RowSwapProcessorRequest msg) {
this.row1 = msg.getRow1().toByteArray();
this.row2 = msg.getRow2().toByteArray();
}
}
public static class TimeoutProcessor extends
BaseRowProcessor<TimeoutProcessorRequest, TimeoutProcessorResponse> {
byte[] row = new byte[0];
/**
* Empty constructor for Writable
*/
public TimeoutProcessor() {
}
public TimeoutProcessor(byte[] row) {
this.row = row;
}
public Collection<byte[]> getRowsToLock() {
return Collections.singleton(row);
}
@Override
public TimeoutProcessorResponse getResult() {
return TimeoutProcessorResponse.getDefaultInstance();
}
@Override
public void process(long now, HRegion region,
List<Mutation> mutations, WALEdit walEdit) throws IOException {
try {
// Sleep for a long time so it timeout
Thread.sleep(100 * 1000L);
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public boolean readOnly() {
return true;
}
@Override
public String getName() {
return "timeout";
}
@Override
public TimeoutProcessorRequest getRequestData() throws IOException {
TimeoutProcessorRequest.Builder builder = TimeoutProcessorRequest.newBuilder();
builder.setRow(ByteStringer.wrap(row));
return builder.build();
}
@Override
public void initialize(TimeoutProcessorRequest msg) throws IOException {
this.row = msg.getRow().toByteArray();
}
}
public static void doScan(
HRegion region, Scan scan, List<Cell> result) throws IOException {
InternalScanner scanner = null;
try {
scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
scanner = region.getScanner(scan);
result.clear();
scanner.next(result);
} finally {
if (scanner != null) scanner.close();
}
}
}
static String stringifyKvs(Collection<Cell> kvs) {
StringBuilder out = new StringBuilder();
out.append("[");
if (kvs != null) {
for (Cell kv : kvs) {
byte[] col = CellUtil.cloneQualifier(kv);
byte[] val = CellUtil.cloneValue(kv);
if (Bytes.equals(col, COUNTER)) {
out.append(Bytes.toStringBinary(col) + ":" +
Bytes.toInt(val) + " ");
} else {
out.append(Bytes.toStringBinary(col) + ":" +
Bytes.toStringBinary(val) + " ");
}
}
}
out.append("]");
return out.toString();
}
}
| amyvmiwei/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java | Java | apache-2.0 | 22,973 |
/*
* Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
*/
#include <stdio.h>
#include <string.h>
#include <fcntl.h>
#include <assert.h>
#include <sys/ioctl.h>
#include <sys/socket.h>
#include <net/if.h>
#include <linux/if_ether.h>
#include <linux/if_tun.h>
#include <linux/if_packet.h>
#include "base/logging.h"
#include "cmn/agent_cmn.h"
#include "sandesh/sandesh_types.h"
#include "sandesh/sandesh.h"
#include "sandesh/sandesh_trace.h"
#include "pkt/pkt_types.h"
#include "pkt/pkt_init.h"
#include "../pkt0_interface.h"
#define TUN_INTF_CLONE_DEV "/dev/net/tun"
#define TAP_TRACE(obj, ...) \
do { \
Tap##obj::TraceMsg(PacketTraceBuf, __FILE__, __LINE__, __VA_ARGS__); \
} while (false) \
///////////////////////////////////////////////////////////////////////////////
void Pkt0Interface::InitControlInterface() {
pkt_handler()->agent()->set_pkt_interface_name(name_);
if ((tap_fd_ = open(TUN_INTF_CLONE_DEV, O_RDWR)) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> opening tap-device");
assert(0);
}
struct ifreq ifr;
memset(&ifr, 0, sizeof(ifr));
ifr.ifr_flags = IFF_TAP | IFF_NO_PI;
strncpy(ifr.ifr_name, name_.c_str(), IF_NAMESIZE);
if (ioctl(tap_fd_, TUNSETIFF, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> creating " << name_ << "tap-device");
assert(0);
}
// We dont want the fd to be inherited by child process such as
// virsh etc... So, close tap fd on fork.
if (fcntl(tap_fd_, F_SETFD, FD_CLOEXEC) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> setting fcntl on " << name_ );
assert(0);
}
if (ioctl(tap_fd_, TUNSETPERSIST, 0) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> making tap interface non-persistent");
assert(0);
}
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, name_.c_str(), IF_NAMESIZE);
if (ioctl(tap_fd_, SIOCGIFHWADDR, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " << strerror(errno) <<
"> retrieving MAC address of the tap interface");
assert(0);
}
memcpy(mac_address_, ifr.ifr_hwaddr.sa_data, ETHER_ADDR_LEN);
int raw;
if ((raw = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL))) == -1) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> creating socket");
assert(0);
}
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, name_.data(), IF_NAMESIZE);
if (ioctl(raw, SIOCGIFINDEX, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> getting ifindex of the tap interface");
assert(0);
}
struct sockaddr_ll sll;
memset(&sll, 0, sizeof(struct sockaddr_ll));
sll.sll_family = AF_PACKET;
sll.sll_ifindex = ifr.ifr_ifindex;
sll.sll_protocol = htons(ETH_P_ALL);
if (bind(raw, (struct sockaddr *)&sll,
sizeof(struct sockaddr_ll)) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> binding the socket to the tap interface");
assert(0);
}
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, name_.data(), IF_NAMESIZE);
if (ioctl(raw, SIOCGIFFLAGS, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> getting socket flags");
assert(0);
}
ifr.ifr_flags |= IFF_UP;
if (ioctl(raw, SIOCSIFFLAGS, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> setting socket flags");
assert(0);
}
close(raw);
boost::system::error_code ec;
input_.assign(tap_fd_, ec);
assert(ec == 0);
VrouterControlInterface::InitControlInterface();
AsyncRead();
}
void Pkt0RawInterface::InitControlInterface() {
pkt_handler()->agent()->set_pkt_interface_name(name_);
int raw_;
struct ifreq ifr;
memset(&ifr, 0, sizeof(ifr));
if ((raw_ = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL))) == -1) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> creating socket");
assert(0);
}
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name,
pkt_handler()->agent()->pkt_interface_name().c_str(), IF_NAMESIZE);
if (ioctl(raw_, SIOCGIFINDEX, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> getting ifindex of the " <<
"expception packet interface");
assert(0);
}
struct sockaddr_ll sll;
memset(&sll, 0, sizeof(struct sockaddr_ll));
sll.sll_family = AF_PACKET;
sll.sll_ifindex = ifr.ifr_ifindex;
sll.sll_protocol = htons(ETH_P_ALL);
if (bind(raw_, (struct sockaddr *)&sll,
sizeof(struct sockaddr_ll)) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> binding the socket to the tap interface");
assert(0);
}
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, name_.data(), IF_NAMESIZE);
if (ioctl(raw_, SIOCGIFFLAGS, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> getting socket flags");
assert(0);
}
ifr.ifr_flags |= IFF_UP;
if (ioctl(raw_, SIOCSIFFLAGS, (void *)&ifr) < 0) {
LOG(ERROR, "Packet Tap Error <" << errno << ": " <<
strerror(errno) << "> setting socket flags");
assert(0);
}
tap_fd_ = raw_;
boost::system::error_code ec;
input_.assign(tap_fd_, ec);
assert(ec == 0);
VrouterControlInterface::InitControlInterface();
AsyncRead();
}
| facetothefate/contrail-controller | src/vnsw/agent/contrail/linux/pkt0_interface.cc | C++ | apache-2.0 | 6,144 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shiro.authz;
import org.apache.shiro.authz.permission.PermissionResolver;
import org.apache.shiro.authz.permission.PermissionResolverAware;
import org.apache.shiro.authz.permission.RolePermissionResolver;
import org.apache.shiro.authz.permission.RolePermissionResolverAware;
import org.apache.shiro.realm.Realm;
import org.apache.shiro.subject.PrincipalCollection;
import java.util.Collection;
import java.util.List;
/**
* A <tt>ModularRealmAuthorizer</tt> is an <tt>Authorizer</tt> implementation that consults one or more configured
* {@link Realm Realm}s during an authorization operation.
*
* @since 0.2
*/
public class ModularRealmAuthorizer implements Authorizer, PermissionResolverAware, RolePermissionResolverAware {
/**
* The realms to consult during any authorization check.
*/
protected Collection<Realm> realms;
/**
* A PermissionResolver to be used by <em>all</em> configured realms. Leave <code>null</code> if you wish
* to configure different resolvers for different realms.
*/
protected PermissionResolver permissionResolver;
/**
* A RolePermissionResolver to be used by <em>all</em> configured realms. Leave <code>null</code> if you wish
* to configure different resolvers for different realms.
*/
protected RolePermissionResolver rolePermissionResolver;
/**
* Default no-argument constructor, does nothing.
*/
public ModularRealmAuthorizer() {
}
/**
* Constructor that accepts the <code>Realm</code>s to consult during an authorization check. Immediately calls
* {@link #setRealms setRealms(realms)}.
*
* @param realms the realms to consult during an authorization check.
*/
public ModularRealmAuthorizer(Collection<Realm> realms) {
setRealms(realms);
}
/**
* Returns the realms wrapped by this <code>Authorizer</code> which are consulted during an authorization check.
*
* @return the realms wrapped by this <code>Authorizer</code> which are consulted during an authorization check.
*/
public Collection<Realm> getRealms() {
return this.realms;
}
/**
* Sets the realms wrapped by this <code>Authorizer</code> which are consulted during an authorization check.
*
* @param realms the realms wrapped by this <code>Authorizer</code> which are consulted during an authorization check.
*/
public void setRealms(Collection<Realm> realms) {
this.realms = realms;
applyPermissionResolverToRealms();
applyRolePermissionResolverToRealms();
}
/**
* Returns the PermissionResolver to be used on <em>all</em> configured realms, or <code>null</code (the default)
* if all realm instances will each configure their own permission resolver.
*
* @return the PermissionResolver to be used on <em>all</em> configured realms, or <code>null</code (the default)
* if realm instances will each configure their own permission resolver.
* @since 1.0
*/
public PermissionResolver getPermissionResolver() {
return this.permissionResolver;
}
/**
* Sets the specified {@link PermissionResolver PermissionResolver} on <em>all</em> of the wrapped realms that
* implement the {@link org.apache.shiro.authz.permission.PermissionResolverAware PermissionResolverAware} interface.
* <p/>
* Only call this method if you want the permission resolver to be passed to all realms that implement the
* <code>PermissionResolver</code> interface. If you do not want this to occur, the realms must
* configure themselves individually (or be configured individually).
*
* @param permissionResolver the permissionResolver to set on all of the wrapped realms that implement the
* {@link org.apache.shiro.authz.permission.PermissionResolverAware PermissionResolverAware} interface.
*/
public void setPermissionResolver(PermissionResolver permissionResolver) {
this.permissionResolver = permissionResolver;
applyPermissionResolverToRealms();
}
/**
* Sets the internal {@link #getPermissionResolver} on any internal configured
* {@link #getRealms Realms} that implement the {@link org.apache.shiro.authz.permission.PermissionResolverAware PermissionResolverAware} interface.
* <p/>
* This method is called after setting a permissionResolver on this ModularRealmAuthorizer via the
* {@link #setPermissionResolver(org.apache.shiro.authz.permission.PermissionResolver) setPermissionResolver} method.
* <p/>
* It is also called after setting one or more realms via the {@link #setRealms setRealms} method to allow these
* newly available realms to be given the <code>PermissionResolver</code> already in use.
*
* @since 1.0
*/
protected void applyPermissionResolverToRealms() {
PermissionResolver resolver = getPermissionResolver();
Collection<Realm> realms = getRealms();
if (resolver != null && realms != null && !realms.isEmpty()) {
for (Realm realm : realms) {
if (realm instanceof PermissionResolverAware) {
((PermissionResolverAware) realm).setPermissionResolver(resolver);
}
}
}
}
/**
* Returns the RolePermissionResolver to be used on <em>all</em> configured realms, or <code>null</code (the default)
* if all realm instances will each configure their own permission resolver.
*
* @return the RolePermissionResolver to be used on <em>all</em> configured realms, or <code>null</code (the default)
* if realm instances will each configure their own role permission resolver.
* @since 1.0
*/
public RolePermissionResolver getRolePermissionResolver() {
return this.rolePermissionResolver;
}
/**
* Sets the specified {@link RolePermissionResolver RolePermissionResolver} on <em>all</em> of the wrapped realms that
* implement the {@link org.apache.shiro.authz.permission.RolePermissionResolverAware PermissionResolverAware} interface.
* <p/>
* Only call this method if you want the permission resolver to be passed to all realms that implement the
* <code>RolePermissionResolver</code> interface. If you do not want this to occur, the realms must
* configure themselves individually (or be configured individually).
*
* @param rolePermissionResolver the rolePermissionResolver to set on all of the wrapped realms that implement the
* {@link org.apache.shiro.authz.permission.RolePermissionResolverAware RolePermissionResolverAware} interface.
*/
public void setRolePermissionResolver(RolePermissionResolver rolePermissionResolver) {
this.rolePermissionResolver = rolePermissionResolver;
applyRolePermissionResolverToRealms();
}
/**
* Sets the internal {@link #getRolePermissionResolver} on any internal configured
* {@link #getRealms Realms} that implement the {@link org.apache.shiro.authz.permission.RolePermissionResolverAware RolePermissionResolverAware} interface.
* <p/>
* This method is called after setting a rolePermissionResolver on this ModularRealmAuthorizer via the
* {@link #setRolePermissionResolver(org.apache.shiro.authz.permission.RolePermissionResolver) setRolePermissionResolver} method.
* <p/>
* It is also called after setting one or more realms via the {@link #setRealms setRealms} method to allow these
* newly available realms to be given the <code>RolePermissionResolver</code> already in use.
*
* @since 1.0
*/
protected void applyRolePermissionResolverToRealms() {
RolePermissionResolver resolver = getRolePermissionResolver();
Collection<Realm> realms = getRealms();
if (resolver != null && realms != null && !realms.isEmpty()) {
for (Realm realm : realms) {
if (realm instanceof RolePermissionResolverAware) {
((RolePermissionResolverAware) realm).setRolePermissionResolver(resolver);
}
}
}
}
/**
* Used by the {@link Authorizer Authorizer} implementation methods to ensure that the {@link #setRealms realms}
* has been set. The default implementation ensures the property is not null and not empty.
*
* @throws IllegalStateException if the <tt>realms</tt> property is configured incorrectly.
*/
protected void assertRealmsConfigured() throws IllegalStateException {
Collection<Realm> realms = getRealms();
if (realms == null || realms.isEmpty()) {
String msg = "Configuration error: No realms have been configured! One or more realms must be " +
"present to execute an authorization operation.";
throw new IllegalStateException(msg);
}
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, String)} returns <code>true</code>,
* <code>false</code> otherwise.
*/
public boolean isPermitted(PrincipalCollection principals, String permission) {
assertRealmsConfigured();
for (Realm realm : getRealms()) {
if (!(realm instanceof Authorizer)) continue;
if (((Authorizer) realm).isPermitted(principals, permission)) {
return true;
}
}
return false;
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, Permission)} call returns <code>true</code>,
* <code>false</code> otherwise.
*/
public boolean isPermitted(PrincipalCollection principals, Permission permission) {
assertRealmsConfigured();
for (Realm realm : getRealms()) {
if (!(realm instanceof Authorizer)) continue;
if (((Authorizer) realm).isPermitted(principals, permission)) {
return true;
}
}
return false;
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermittedAll(org.apache.shiro.subject.PrincipalCollection, String...)} call returns
* <code>true</code>, <code>false</code> otherwise.
*/
public boolean[] isPermitted(PrincipalCollection principals, String... permissions) {
assertRealmsConfigured();
if (permissions != null && permissions.length > 0) {
boolean[] isPermitted = new boolean[permissions.length];
for (int i = 0; i < permissions.length; i++) {
isPermitted[i] = isPermitted(principals, permissions[i]);
}
return isPermitted;
}
return new boolean[0];
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, List)} call returns <code>true</code>,
* <code>false</code> otherwise.
*/
public boolean[] isPermitted(PrincipalCollection principals, List<Permission> permissions) {
assertRealmsConfigured();
if (permissions != null && !permissions.isEmpty()) {
boolean[] isPermitted = new boolean[permissions.size()];
int i = 0;
for (Permission p : permissions) {
isPermitted[i++] = isPermitted(principals, p);
}
return isPermitted;
}
return new boolean[0];
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, String)} call returns <code>true</code>
* for <em>all</em> of the specified string permissions, <code>false</code> otherwise.
*/
public boolean isPermittedAll(PrincipalCollection principals, String... permissions) {
assertRealmsConfigured();
if (permissions != null && permissions.length > 0) {
for (String perm : permissions) {
if (!isPermitted(principals, perm)) {
return false;
}
}
}
return true;
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, Permission)} call returns <code>true</code>
* for <em>all</em> of the specified Permissions, <code>false</code> otherwise.
*/
public boolean isPermittedAll(PrincipalCollection principals, Collection<Permission> permissions) {
assertRealmsConfigured();
if (permissions != null && !permissions.isEmpty()) {
for (Permission permission : permissions) {
if (!isPermitted(principals, permission)) {
return false;
}
}
}
return true;
}
/**
* If !{@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, String) isPermitted(permission)}, throws
* an <code>UnauthorizedException</code> otherwise returns quietly.
*/
public void checkPermission(PrincipalCollection principals, String permission) throws AuthorizationException {
assertRealmsConfigured();
if (!isPermitted(principals, permission)) {
throw new UnauthorizedException("Subject does not have permission [" + permission + "]");
}
}
/**
* If !{@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, Permission) isPermitted(permission)}, throws
* an <code>UnauthorizedException</code> otherwise returns quietly.
*/
public void checkPermission(PrincipalCollection principals, Permission permission) throws AuthorizationException {
assertRealmsConfigured();
if (!isPermitted(principals, permission)) {
throw new UnauthorizedException("Subject does not have permission [" + permission + "]");
}
}
/**
* If !{@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, String...) isPermitted(permission)},
* throws an <code>UnauthorizedException</code> otherwise returns quietly.
*/
public void checkPermissions(PrincipalCollection principals, String... permissions) throws AuthorizationException {
assertRealmsConfigured();
if (permissions != null && permissions.length > 0) {
for (String perm : permissions) {
checkPermission(principals, perm);
}
}
}
/**
* If !{@link #isPermitted(org.apache.shiro.subject.PrincipalCollection, Permission) isPermitted(permission)} for
* <em>all</em> the given Permissions, throws
* an <code>UnauthorizedException</code> otherwise returns quietly.
*/
public void checkPermissions(PrincipalCollection principals, Collection<Permission> permissions) throws AuthorizationException {
assertRealmsConfigured();
if (permissions != null) {
for (Permission permission : permissions) {
checkPermission(principals, permission);
}
}
}
/**
* Returns <code>true</code> if any of the configured realms'
* {@link #hasRole(org.apache.shiro.subject.PrincipalCollection, String)} call returns <code>true</code>,
* <code>false</code> otherwise.
*/
public boolean hasRole(PrincipalCollection principals, String roleIdentifier) {
assertRealmsConfigured();
for (Realm realm : getRealms()) {
if (!(realm instanceof Authorizer)) continue;
if (((Authorizer) realm).hasRole(principals, roleIdentifier)) {
return true;
}
}
return false;
}
/**
* Calls {@link #hasRole(org.apache.shiro.subject.PrincipalCollection, String)} for each role name in the specified
* collection and places the return value from each call at the respective location in the returned array.
*/
public boolean[] hasRoles(PrincipalCollection principals, List<String> roleIdentifiers) {
assertRealmsConfigured();
if (roleIdentifiers != null && !roleIdentifiers.isEmpty()) {
boolean[] hasRoles = new boolean[roleIdentifiers.size()];
int i = 0;
for (String roleId : roleIdentifiers) {
hasRoles[i++] = hasRole(principals, roleId);
}
return hasRoles;
}
return new boolean[0];
}
/**
* Returns <code>true</code> iff any of the configured realms'
* {@link #hasRole(org.apache.shiro.subject.PrincipalCollection, String)} call returns <code>true</code> for
* <em>all</em> roles specified, <code>false</code> otherwise.
*/
public boolean hasAllRoles(PrincipalCollection principals, Collection<String> roleIdentifiers) {
assertRealmsConfigured();
for (String roleIdentifier : roleIdentifiers) {
if (!hasRole(principals, roleIdentifier)) {
return false;
}
}
return true;
}
/**
* If !{@link #hasRole(org.apache.shiro.subject.PrincipalCollection, String) hasRole(role)}, throws
* an <code>UnauthorizedException</code> otherwise returns quietly.
*/
public void checkRole(PrincipalCollection principals, String role) throws AuthorizationException {
assertRealmsConfigured();
if (!hasRole(principals, role)) {
throw new UnauthorizedException("Subject does not have role [" + role + "]");
}
}
/**
* Calls {@link #checkRoles(PrincipalCollection principals, String... roles) checkRoles(PrincipalCollection principals, String... roles) }.
*/
public void checkRoles(PrincipalCollection principals, Collection<String> roles) throws AuthorizationException {
//SHIRO-234 - roles.toArray() -> roles.toArray(new String[roles.size()])
if (roles != null && !roles.isEmpty()) checkRoles(principals, roles.toArray(new String[roles.size()]));
}
/**
* Calls {@link #checkRole(org.apache.shiro.subject.PrincipalCollection, String) checkRole} for each role specified.
*/
public void checkRoles(PrincipalCollection principals, String... roles) throws AuthorizationException {
assertRealmsConfigured();
if (roles != null) {
for (String role : roles) {
checkRole(principals, role);
}
}
}
}
| xuegongzi/rabbitframework | rabbitframework-security-pom/rabbitframework-security/src/main/java/org/apache/shiro/authz/ModularRealmAuthorizer.java | Java | apache-2.0 | 19,390 |
# Copyright 2013 Josh Durgin
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
from oslo.config import cfg
import webob
from webob import exc
from nova.api.openstack.compute.contrib import assisted_volume_snapshots as \
assisted_snaps
from nova.api.openstack.compute.contrib import volumes
from nova.api.openstack import extensions
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import context
from nova import exception
from nova.openstack.common import jsonutils
from nova.openstack.common import timeutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.volume import cinder
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_UUID_A = '00000000-aaaa-aaaa-aaaa-000000000000'
FAKE_UUID_B = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
FAKE_UUID_C = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
FAKE_UUID_D = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
IMAGE_UUID = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
def fake_get_instance(self, context, instance_id, want_objects=False):
return {'uuid': instance_id}
def fake_get_volume(self, context, id):
return {'id': 'woot'}
def fake_attach_volume(self, context, instance, volume_id, device):
pass
def fake_detach_volume(self, context, instance, volume):
pass
def fake_swap_volume(self, context, instance,
old_volume_id, new_volume_id):
pass
def fake_create_snapshot(self, context, volume, name, description):
return {'id': 123,
'volume_id': 'fakeVolId',
'status': 'available',
'volume_size': 123,
'created_at': '2013-01-01 00:00:01',
'display_name': 'myVolumeName',
'display_description': 'myVolumeDescription'}
def fake_delete_snapshot(self, context, snapshot_id):
pass
def fake_compute_volume_snapshot_delete(self, context, volume_id, snapshot_id,
delete_info):
pass
def fake_compute_volume_snapshot_create(self, context, volume_id,
create_info):
pass
def fake_get_instance_bdms(self, context, instance):
return [{'id': 1,
'instance_uuid': instance['uuid'],
'device_name': '/dev/fake0',
'delete_on_termination': 'False',
'virtual_name': 'MyNamesVirtual',
'snapshot_id': None,
'volume_id': FAKE_UUID_A,
'volume_size': 1},
{'id': 2,
'instance_uuid': instance['uuid'],
'device_name': '/dev/fake1',
'delete_on_termination': 'False',
'virtual_name': 'MyNamesVirtual',
'snapshot_id': None,
'volume_id': FAKE_UUID_B,
'volume_size': 1}]
class BootFromVolumeTest(test.TestCase):
def setUp(self):
super(BootFromVolumeTest, self).setUp()
self.stubs.Set(compute_api.API, 'create',
self._get_fake_compute_api_create())
fakes.stub_out_nw_api(self.stubs)
self._block_device_mapping_seen = None
self._legacy_bdm_seen = True
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Volumes', 'Block_device_mapping_v2_boot'])
def _get_fake_compute_api_create(self):
def _fake_compute_api_create(cls, context, instance_type,
image_href, **kwargs):
self._block_device_mapping_seen = kwargs.get(
'block_device_mapping')
self._legacy_bdm_seen = kwargs.get('legacy_bdm')
inst_type = flavors.get_flavor_by_flavor_id(2)
resv_id = None
return ([{'id': 1,
'display_name': 'test_server',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': IMAGE_UUID,
'user_id': 'fake',
'project_id': 'fake',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
'updated_at': datetime.datetime(2010, 11, 11, 11, 0, 0),
'progress': 0,
'fixed_ips': []
}], resv_id)
return _fake_compute_api_create
def test_create_root_volume(self):
body = dict(server=dict(
name='test_server', imageRef=IMAGE_UUID,
flavorRef=2, min_count=1, max_count=1,
block_device_mapping=[dict(
volume_id=1,
device_name='/dev/vda',
virtual='root',
delete_on_termination=False,
)]
))
req = webob.Request.blank('/v2/fake/os-volumes_boot')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
res = req.get_response(fakes.wsgi_app(
init_only=('os-volumes_boot', 'servers')))
self.assertEqual(res.status_int, 202)
server = jsonutils.loads(res.body)['server']
self.assertEqual(FAKE_UUID, server['id'])
self.assertEqual(CONF.password_length, len(server['adminPass']))
self.assertEqual(len(self._block_device_mapping_seen), 1)
self.assertTrue(self._legacy_bdm_seen)
self.assertEqual(self._block_device_mapping_seen[0]['volume_id'], 1)
self.assertEqual(self._block_device_mapping_seen[0]['device_name'],
'/dev/vda')
def test_create_root_volume_bdm_v2(self):
body = dict(server=dict(
name='test_server', imageRef=IMAGE_UUID,
flavorRef=2, min_count=1, max_count=1,
block_device_mapping_v2=[dict(
source_type='volume',
uuid=1,
device_name='/dev/vda',
boot_index=0,
delete_on_termination=False,
)]
))
req = webob.Request.blank('/v2/fake/os-volumes_boot')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
res = req.get_response(fakes.wsgi_app(
init_only=('os-volumes_boot', 'servers')))
self.assertEqual(res.status_int, 202)
server = jsonutils.loads(res.body)['server']
self.assertEqual(FAKE_UUID, server['id'])
self.assertEqual(CONF.password_length, len(server['adminPass']))
self.assertEqual(len(self._block_device_mapping_seen), 1)
self.assertFalse(self._legacy_bdm_seen)
self.assertEqual(self._block_device_mapping_seen[0]['volume_id'], 1)
self.assertEqual(self._block_device_mapping_seen[0]['boot_index'],
0)
self.assertEqual(self._block_device_mapping_seen[0]['device_name'],
'/dev/vda')
class VolumeApiTest(test.TestCase):
def setUp(self):
super(VolumeApiTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(cinder.API, "delete", fakes.stub_volume_delete)
self.stubs.Set(cinder.API, "get", fakes.stub_volume_get)
self.stubs.Set(cinder.API, "get_all", fakes.stub_volume_get_all)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Volumes'])
self.context = context.get_admin_context()
self.app = fakes.wsgi_app(init_only=('os-volumes',))
def test_volume_create(self):
self.stubs.Set(cinder.API, "create", fakes.stub_volume_create)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = webob.Request.blank('/v2/fake/os-volumes')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
resp_dict = jsonutils.loads(resp.body)
self.assertTrue('volume' in resp_dict)
self.assertEqual(resp_dict['volume']['size'],
vol['size'])
self.assertEqual(resp_dict['volume']['displayName'],
vol['display_name'])
self.assertEqual(resp_dict['volume']['displayDescription'],
vol['display_description'])
self.assertEqual(resp_dict['volume']['availabilityZone'],
vol['availability_zone'])
def test_volume_create_bad(self):
def fake_volume_create(self, context, size, name, description,
snapshot, **param):
raise exception.InvalidInput(reason="bad request data")
self.stubs.Set(cinder.API, "create", fake_volume_create)
vol = {"size": '#$?',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/fake/os-volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
volumes.VolumeController().create, req, body)
def test_volume_index(self):
req = webob.Request.blank('/v2/fake/os-volumes')
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
def test_volume_detail(self):
req = webob.Request.blank('/v2/fake/os-volumes/detail')
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
def test_volume_show(self):
req = webob.Request.blank('/v2/fake/os-volumes/123')
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
def test_volume_show_no_volume(self):
self.stubs.Set(cinder.API, "get", fakes.stub_volume_notfound)
req = webob.Request.blank('/v2/fake/os-volumes/456')
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404)
def test_volume_delete(self):
req = webob.Request.blank('/v2/fake/os-volumes/123')
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
def test_volume_delete_no_volume(self):
self.stubs.Set(cinder.API, "delete", fakes.stub_volume_notfound)
req = webob.Request.blank('/v2/fake/os-volumes/456')
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404)
class VolumeAttachTests(test.TestCase):
def setUp(self):
super(VolumeAttachTests, self).setUp()
self.stubs.Set(compute_api.API,
'get_instance_bdms',
fake_get_instance_bdms)
self.stubs.Set(compute_api.API, 'get', fake_get_instance)
self.stubs.Set(cinder.API, 'get', fake_get_volume)
self.context = context.get_admin_context()
self.expected_show = {'volumeAttachment':
{'device': '/dev/fake0',
'serverId': FAKE_UUID,
'id': FAKE_UUID_A,
'volumeId': FAKE_UUID_A
}}
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.attachments = volumes.VolumeAttachmentController(self.ext_mgr)
def test_show(self):
req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.show(req, FAKE_UUID, FAKE_UUID_A)
self.assertEqual(self.expected_show, result)
def test_detach(self):
self.stubs.Set(compute_api.API,
'detach_volume',
fake_detach_volume)
req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.delete(req, FAKE_UUID, FAKE_UUID_A)
self.assertEqual('202 Accepted', result.status)
def test_detach_vol_not_found(self):
self.stubs.Set(compute_api.API,
'detach_volume',
fake_detach_volume)
req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
self.attachments.delete,
req,
FAKE_UUID,
FAKE_UUID_C)
def test_attach_volume(self):
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume)
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
req = webob.Request.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.create(req, FAKE_UUID, body)
self.assertEqual(result['volumeAttachment']['id'],
'00000000-aaaa-aaaa-aaaa-000000000000')
def test_attach_volume_bad_id(self):
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume)
body = {
'volumeAttachment': {
'device': None,
'volumeId': 'TESTVOLUME',
}
}
req = webob.Request.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(webob.exc.HTTPBadRequest, self.attachments.create,
req, FAKE_UUID, body)
def _test_swap(self, uuid=FAKE_UUID_A):
self.stubs.Set(compute_api.API,
'swap_volume',
fake_swap_volume)
body = {'volumeAttachment': {'volumeId': FAKE_UUID_B,
'device': '/dev/fake'}}
req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid')
req.method = 'PUT'
req.body = jsonutils.dumps({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
return self.attachments.update(req, FAKE_UUID, uuid, body)
def test_swap_volume_no_extension(self):
self.assertRaises(webob.exc.HTTPBadRequest, self._test_swap)
def test_swap_volume(self):
self.ext_mgr.extensions['os-volume-attachment-update'] = True
result = self._test_swap()
self.assertEqual('202 Accepted', result.status)
def test_swap_volume_no_attachment(self):
self.ext_mgr.extensions['os-volume-attachment-update'] = True
self.assertRaises(exc.HTTPNotFound, self._test_swap, FAKE_UUID_C)
class VolumeSerializerTest(test.TestCase):
def _verify_volume_attachment(self, attach, tree):
for attr in ('id', 'volumeId', 'serverId', 'device'):
self.assertEqual(str(attach[attr]), tree.get(attr))
def _verify_volume(self, vol, tree):
self.assertEqual(tree.tag, 'volume')
for attr in ('id', 'status', 'size', 'availabilityZone', 'createdAt',
'displayName', 'displayDescription', 'volumeType',
'snapshotId'):
self.assertEqual(str(vol[attr]), tree.get(attr))
for child in tree:
self.assertTrue(child.tag in ('attachments', 'metadata'))
if child.tag == 'attachments':
self.assertEqual(1, len(child))
self.assertEqual('attachment', child[0].tag)
self._verify_volume_attachment(vol['attachments'][0], child[0])
elif child.tag == 'metadata':
not_seen = set(vol['metadata'].keys())
for gr_child in child:
self.assertTrue(gr_child.get("key") in not_seen)
self.assertEqual(str(vol['metadata'][gr_child.get("key")]),
gr_child.text)
not_seen.remove(gr_child.get("key"))
self.assertEqual(0, len(not_seen))
def test_attach_show_create_serializer(self):
serializer = volumes.VolumeAttachmentTemplate()
raw_attach = dict(
id='vol_id',
volumeId='vol_id',
serverId='instance_uuid',
device='/foo')
text = serializer.serialize(dict(volumeAttachment=raw_attach))
tree = etree.fromstring(text)
self.assertEqual('volumeAttachment', tree.tag)
self._verify_volume_attachment(raw_attach, tree)
def test_attach_index_serializer(self):
serializer = volumes.VolumeAttachmentsTemplate()
raw_attaches = [dict(
id='vol_id1',
volumeId='vol_id1',
serverId='instance1_uuid',
device='/foo1'),
dict(
id='vol_id2',
volumeId='vol_id2',
serverId='instance2_uuid',
device='/foo2')]
text = serializer.serialize(dict(volumeAttachments=raw_attaches))
tree = etree.fromstring(text)
self.assertEqual('volumeAttachments', tree.tag)
self.assertEqual(len(raw_attaches), len(tree))
for idx, child in enumerate(tree):
self.assertEqual('volumeAttachment', child.tag)
self._verify_volume_attachment(raw_attaches[idx], child)
def test_volume_show_create_serializer(self):
serializer = volumes.VolumeTemplate()
raw_volume = dict(
id='vol_id',
status='vol_status',
size=1024,
availabilityZone='vol_availability',
createdAt=timeutils.utcnow(),
attachments=[dict(
id='vol_id',
volumeId='vol_id',
serverId='instance_uuid',
device='/foo')],
displayName='vol_name',
displayDescription='vol_desc',
volumeType='vol_type',
snapshotId='snap_id',
metadata=dict(
foo='bar',
baz='quux',
),
)
text = serializer.serialize(dict(volume=raw_volume))
tree = etree.fromstring(text)
self._verify_volume(raw_volume, tree)
def test_volume_index_detail_serializer(self):
serializer = volumes.VolumesTemplate()
raw_volumes = [dict(
id='vol1_id',
status='vol1_status',
size=1024,
availabilityZone='vol1_availability',
createdAt=timeutils.utcnow(),
attachments=[dict(
id='vol1_id',
volumeId='vol1_id',
serverId='instance_uuid',
device='/foo1')],
displayName='vol1_name',
displayDescription='vol1_desc',
volumeType='vol1_type',
snapshotId='snap1_id',
metadata=dict(
foo='vol1_foo',
bar='vol1_bar',
),
),
dict(
id='vol2_id',
status='vol2_status',
size=1024,
availabilityZone='vol2_availability',
createdAt=timeutils.utcnow(),
attachments=[dict(
id='vol2_id',
volumeId='vol2_id',
serverId='instance_uuid',
device='/foo2')],
displayName='vol2_name',
displayDescription='vol2_desc',
volumeType='vol2_type',
snapshotId='snap2_id',
metadata=dict(
foo='vol2_foo',
bar='vol2_bar',
),
)]
text = serializer.serialize(dict(volumes=raw_volumes))
tree = etree.fromstring(text)
self.assertEqual('volumes', tree.tag)
self.assertEqual(len(raw_volumes), len(tree))
for idx, child in enumerate(tree):
self._verify_volume(raw_volumes[idx], child)
class TestVolumeCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestVolumeCreateRequestXMLDeserializer, self).setUp()
self.deserializer = volumes.CreateDeserializer()
def test_minimal_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
},
}
self.assertEquals(request['body'], expected)
def test_display_name(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
},
}
self.assertEquals(request['body'], expected)
def test_display_description(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
},
}
self.assertEquals(request['body'], expected)
def test_volume_type(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"display_name": "Volume-xml",
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
},
}
self.assertEquals(request['body'], expected)
def test_availability_zone(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
},
}
self.assertEquals(request['body'], expected)
def test_metadata(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
display_name="Volume-xml"
size="1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"display_name": "Volume-xml",
"size": "1",
"metadata": {
"Type": "work",
},
},
}
self.assertEquals(request['body'], expected)
def test_full_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
"metadata": {
"Type": "work",
},
},
}
self.maxDiff = None
self.assertEquals(request['body'], expected)
class CommonUnprocessableEntityTestCase(object):
resource = None
entity_name = None
controller_cls = None
kwargs = {}
"""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self):
super(CommonUnprocessableEntityTestCase, self).setUp()
self.controller = self.controller_cls()
def _unprocessable_create(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/' + self.resource)
req.method = 'POST'
kwargs = self.kwargs.copy()
kwargs['body'] = body
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, **kwargs)
def test_create_no_body(self):
self._unprocessable_create(body=None)
def test_create_missing_volume(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_create(body=body)
def test_create_malformed_entity(self):
body = {self.entity_name: 'string'}
self._unprocessable_create(body=body)
class UnprocessableVolumeTestCase(CommonUnprocessableEntityTestCase,
test.TestCase):
resource = 'os-volumes'
entity_name = 'volume'
controller_cls = volumes.VolumeController
class UnprocessableAttachmentTestCase(CommonUnprocessableEntityTestCase,
test.TestCase):
resource = 'servers/' + FAKE_UUID + '/os-volume_attachments'
entity_name = 'volumeAttachment'
controller_cls = volumes.VolumeAttachmentController
kwargs = {'server_id': FAKE_UUID}
class UnprocessableSnapshotTestCase(CommonUnprocessableEntityTestCase,
test.TestCase):
resource = 'os-snapshots'
entity_name = 'snapshot'
controller_cls = volumes.SnapshotController
class CreateSnapshotTestCase(test.TestCase):
def setUp(self):
super(CreateSnapshotTestCase, self).setUp()
self.controller = volumes.SnapshotController()
self.stubs.Set(cinder.API, 'get', fake_get_volume)
self.stubs.Set(cinder.API, 'create_snapshot_force',
fake_create_snapshot)
self.stubs.Set(cinder.API, 'create_snapshot', fake_create_snapshot)
self.req = fakes.HTTPRequest.blank('/v2/fake/os-snapshots')
self.req.method = 'POST'
self.body = {'snapshot': {'volume_id': 1}}
def test_force_true(self):
self.body['snapshot']['force'] = 'True'
self.controller.create(self.req, body=self.body)
def test_force_false(self):
self.body['snapshot']['force'] = 'f'
self.controller.create(self.req, body=self.body)
def test_force_invalid(self):
self.body['snapshot']['force'] = 'foo'
self.assertRaises(exception.InvalidParameterValue,
self.controller.create, self.req, body=self.body)
class DeleteSnapshotTestCase(test.TestCase):
def setUp(self):
super(DeleteSnapshotTestCase, self).setUp()
self.controller = volumes.SnapshotController()
self.stubs.Set(cinder.API, 'get', fake_get_volume)
self.stubs.Set(cinder.API, 'create_snapshot_force',
fake_create_snapshot)
self.stubs.Set(cinder.API, 'create_snapshot', fake_create_snapshot)
self.stubs.Set(cinder.API, 'delete_snapshot', fake_delete_snapshot)
self.req = fakes.HTTPRequest.blank('/v2/fake/os-snapshots')
def test_normal_delete(self):
self.req.method = 'POST'
self.body = {'snapshot': {'volume_id': 1}}
result = self.controller.create(self.req, body=self.body)
self.req.method = 'DELETE'
result = self.controller.delete(self.req, result['snapshot']['id'])
self.assertEqual(result.status_int, 202)
class AssistedSnapshotCreateTestCase(test.TestCase):
def setUp(self):
super(AssistedSnapshotCreateTestCase, self).setUp()
self.controller = assisted_snaps.AssistedVolumeSnapshotsController()
self.stubs.Set(compute_api.API, 'volume_snapshot_create',
fake_compute_volume_snapshot_create)
def test_assisted_create(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
body = {'snapshot': {'volume_id': 1, 'create_info': {}}}
req.method = 'POST'
self.controller.create(req, body=body)
def test_assisted_create_missing_create_info(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
body = {'snapshot': {'volume_id': 1}}
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body=body)
class AssistedSnapshotDeleteTestCase(test.TestCase):
def setUp(self):
super(AssistedSnapshotDeleteTestCase, self).setUp()
self.controller = assisted_snaps.AssistedVolumeSnapshotsController()
self.stubs.Set(compute_api.API, 'volume_snapshot_delete',
fake_compute_volume_snapshot_delete)
def test_assisted_delete(self):
params = {
'delete_info': jsonutils.dumps({'volume_id': 1}),
}
req = fakes.HTTPRequest.blank(
'/v2/fake/os-assisted-volume-snapshots?%s' %
'&'.join(['%s=%s' % (k, v) for k, v in params.iteritems()]))
req.method = 'DELETE'
result = self.controller.delete(req, '5')
self.assertEqual(result.status_int, 204)
def test_assisted_delete_missing_delete_info(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
req.method = 'DELETE'
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, '5')
| ntt-sic/nova | nova/tests/api/openstack/compute/contrib/test_volumes.py | Python | apache-2.0 | 32,169 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.persistence.api.entity.anyobject;
import org.apache.syncope.core.persistence.api.entity.Relationship;
public interface ARelationship extends Relationship<AnyObject, AnyObject> {
}
| tmess567/syncope | core/persistence-api/src/main/java/org/apache/syncope/core/persistence/api/entity/anyobject/ARelationship.java | Java | apache-2.0 | 1,023 |
package org.apache.maven.plugins.install.stubs;
import java.io.File;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public class AttachedArtifactStub0
extends InstallArtifactStub
{
public String getArtifactId()
{
return "attached-artifact-test-0";
}
public File getFile()
{
return new File( System.getProperty( "basedir" ),
"target/test-classes/unit/basic-install-test-with-attached-artifacts/"
+ "target/maven-install-test-1.0-SNAPSHOT.jar" );
}
}
| apache/maven-plugins | maven-install-plugin/src/test/java/org/apache/maven/plugins/install/stubs/AttachedArtifactStub0.java | Java | apache-2.0 | 1,311 |
/*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.server.cluster.zookeeper.exception;
/**
* @author koo.taejin
*/
public class NoNodeException extends PinpointZookeeperException {
public NoNodeException() {
}
public NoNodeException(String message) {
super(message);
}
public NoNodeException(String message, Throwable cause) {
super(message, cause);
}
public NoNodeException(Throwable cause) {
super(cause);
}
}
| barneykim/pinpoint | commons-server/src/main/java/com/navercorp/pinpoint/common/server/cluster/zookeeper/exception/NoNodeException.java | Java | apache-2.0 | 1,056 |
//// [augmentedTypesExternalModule1.ts]
export var a = 1;
class c5 { public foo() { } }
module c5 { } // should be ok everywhere
//// [augmentedTypesExternalModule1.js]
define(["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
exports.a = 1;
var c5 = /** @class */ (function () {
function c5() {
}
c5.prototype.foo = function () { };
return c5;
}());
});
| domchen/typescript-plus | tests/baselines/reference/augmentedTypesExternalModule1.js | JavaScript | apache-2.0 | 467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.resource;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.Locale;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.request.Url;
import org.apache.wicket.request.resource.ResourceReference;
import org.apache.wicket.util.io.IOUtils;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.resource.IResourceStream;
import org.apache.wicket.util.resource.ResourceStreamNotFoundException;
import org.apache.wicket.util.string.Strings;
/**
* Utilities for resources.
*
* @author Jeremy Thomerson
*/
public class ResourceUtil
{
/**
* Reads resource reference attributes (style, locale, variation) encoded in the given string.
*
* @param encodedAttributes
* the string containing the resource attributes
* @return the encoded attributes
*
* @see ResourceReference.UrlAttributes
*/
public static ResourceReference.UrlAttributes decodeResourceReferenceAttributes(String encodedAttributes)
{
Locale locale = null;
String style = null;
String variation = null;
if (Strings.isEmpty(encodedAttributes) == false)
{
String split[] = Strings.split(encodedAttributes, '-');
locale = parseLocale(split[0]);
if (split.length == 2)
{
style = Strings.defaultIfEmpty(unescapeAttributesSeparator(split[1]), null);
}
else if (split.length == 3)
{
style = Strings.defaultIfEmpty(unescapeAttributesSeparator(split[1]), null);
variation = Strings.defaultIfEmpty(unescapeAttributesSeparator(split[2]), null);
}
}
return new ResourceReference.UrlAttributes(locale, style, variation);
}
/**
* Reads resource reference attributes (style, locale, variation) encoded in the given URL.
*
* @param url
* the url containing the resource attributes
* @return the encoded attributes
*
* @see ResourceReference.UrlAttributes
*/
public static ResourceReference.UrlAttributes decodeResourceReferenceAttributes(Url url)
{
Args.notNull(url, "url");
if (url.getQueryParameters().size() > 0)
{
Url.QueryParameter param = url.getQueryParameters().get(0);
if (Strings.isEmpty(param.getValue()))
{
return decodeResourceReferenceAttributes(param.getName());
}
}
return new ResourceReference.UrlAttributes(null, null, null);
}
/**
* Encodes the given resource reference attributes returning the corresponding textual representation.
*
* @param attributes
* the resource reference attributes to encode
* @return the textual representation for the given attributes
*
* @see ResourceReference.UrlAttributes
*/
public static String encodeResourceReferenceAttributes(ResourceReference.UrlAttributes attributes)
{
if (attributes == null ||
(attributes.getLocale() == null && attributes.getStyle() == null && attributes.getVariation() == null))
{
return null;
}
else
{
StringBuilder res = new StringBuilder(32);
if (attributes.getLocale() != null)
{
res.append(attributes.getLocale());
}
boolean styleEmpty = Strings.isEmpty(attributes.getStyle());
if (!styleEmpty)
{
res.append('-');
res.append(escapeAttributesSeparator(attributes.getStyle()));
}
if (!Strings.isEmpty(attributes.getVariation()))
{
if (styleEmpty)
{
res.append("--");
}
else
{
res.append('-');
}
res.append(escapeAttributesSeparator(attributes.getVariation()));
}
return res.toString();
}
}
/**
* Encodes the attributes of the given resource reference in the specified url.
*
* @param url
* the resource reference attributes to encode
* @param reference
*
* @see ResourceReference.UrlAttributes
* @see Url
*/
public static void encodeResourceReferenceAttributes(Url url, ResourceReference reference)
{
Args.notNull(url, "url");
Args.notNull(reference, "reference");
String encoded = encodeResourceReferenceAttributes(reference.getUrlAttributes());
if (!Strings.isEmpty(encoded))
{
url.getQueryParameters().add(new Url.QueryParameter(encoded, ""));
}
}
/**
* Escapes any occurrences of <em>-</em> character in the style and variation
* attributes with <em>~</em>. Any occurrence of <em>~</em> is encoded as <em>~~</em>.
*
* @param attribute
* the attribute to escape
* @return the attribute with escaped separator character
*/
public static CharSequence escapeAttributesSeparator(String attribute)
{
CharSequence tmp = Strings.replaceAll(attribute, "~", "~~");
return Strings.replaceAll(tmp, "-", "~");
}
/**
* Parses the string representation of a {@link java.util.Locale} (for example 'en_GB').
*
* @param locale
* the string representation of a {@link java.util.Locale}
* @return the corresponding {@link java.util.Locale} instance
*/
public static Locale parseLocale(String locale)
{
if (Strings.isEmpty(locale))
{
return null;
}
else
{
String parts[] = locale.toLowerCase().split("_", 3);
if (parts.length == 1)
{
return new Locale(parts[0]);
}
else if (parts.length == 2)
{
return new Locale(parts[0], parts[1]);
}
else if (parts.length == 3)
{
return new Locale(parts[0], parts[1], parts[2]);
}
else
{
return null;
}
}
}
/**
* read string with platform default encoding from resource stream
*
* @param resourceStream
* @return string read from resource stream
*
* @see #readString(org.apache.wicket.util.resource.IResourceStream, java.nio.charset.Charset)
*/
public static String readString(IResourceStream resourceStream)
{
return readString(resourceStream, null);
}
/**
* read string with specified encoding from resource stream
*
* @param resourceStream
* string source
* @param charset
* charset for the string encoding (use <code>null</code> for platform default)
* @return string read from resource stream
*/
public static String readString(IResourceStream resourceStream, Charset charset)
{
try
{
InputStream stream = resourceStream.getInputStream();
try
{
byte[] bytes = IOUtils.toByteArray(stream);
if (charset == null)
{
charset = Charset.defaultCharset();
}
return new String(bytes, charset.name());
}
finally
{
resourceStream.close();
}
}
catch (IOException e)
{
throw new WicketRuntimeException("failed to read string from " + resourceStream, e);
}
catch (ResourceStreamNotFoundException e)
{
throw new WicketRuntimeException("failed to locate stream from " + resourceStream, e);
}
}
/**
* Reverts the escaping applied by {@linkplain #escapeAttributesSeparator(String)} - unescapes
* occurrences of <em>~</em> character in the style and variation attributes with <em>-</em>.
*
* @param attribute
* the attribute to unescape
* @return the attribute with escaped separator character
*/
public static String unescapeAttributesSeparator(String attribute)
{
String tmp = attribute.replaceAll("(\\w)~(\\w)", "$1-$2");
return Strings.replaceAll(tmp, "~~", "~").toString();
}
private ResourceUtil()
{
// no-op
}
}
| AlienQueen/wicket | wicket-core/src/main/java/org/apache/wicket/resource/ResourceUtil.java | Java | apache-2.0 | 8,028 |
/* Copyright 2015 Samsung Electronics Co., LTD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "vulkanCore.h"
#include "util/gvr_log.h"
#include <assert.h>
#include <cstring>
VulkanCore* VulkanCore::theInstance = NULL;
bool VulkanCore::CreateInstance(){
VkResult ret = VK_SUCCESS;
// Discover the number of extensions listed in the instance properties in order to allocate
// a buffer large enough to hold them.
uint32_t instanceExtensionCount = 0;
ret = vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr);
GVR_VK_CHECK(!ret);
VkBool32 surfaceExtFound = 0;
VkBool32 platformSurfaceExtFound = 0;
VkExtensionProperties* instanceExtensions = nullptr;
instanceExtensions = new VkExtensionProperties[instanceExtensionCount];
// Now request instanceExtensionCount VkExtensionProperties elements be read into out buffer
ret = vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, instanceExtensions);
GVR_VK_CHECK(!ret);
// We require two extensions, VK_KHR_surface and VK_KHR_android_surface. If they are found,
// add them to the extensionNames list that we'll use to initialize our instance with later.
uint32_t enabledExtensionCount = 0;
const char* extensionNames[16];
for (uint32_t i = 0; i < instanceExtensionCount; i++) {
if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instanceExtensions[i].extensionName)) {
surfaceExtFound = 1;
extensionNames[enabledExtensionCount++] = VK_KHR_SURFACE_EXTENSION_NAME;
}
if (!strcmp(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, instanceExtensions[i].extensionName)) {
platformSurfaceExtFound = 1;
extensionNames[enabledExtensionCount++] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
}
GVR_VK_CHECK(enabledExtensionCount < 16);
}
if (!surfaceExtFound) {
LOGE("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_SURFACE_EXTENSION_NAME" extension.");
return false;
}
if (!platformSurfaceExtFound) {
LOGE("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_ANDROID_SURFACE_EXTENSION_NAME" extension.");
return false;
}
// We specify the Vulkan version our application was built with,
// as well as names and versions for our application and engine,
// if applicable. This allows the driver to gain insight to what
// is utilizing the vulkan driver, and serve appropriate versions.
VkApplicationInfo applicationInfo = {};
applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
applicationInfo.pNext = nullptr;
applicationInfo.pApplicationName = GVR_VK_SAMPLE_NAME;
applicationInfo.applicationVersion = 0;
applicationInfo.pEngineName = "VkSample";
applicationInfo.engineVersion = 1;
applicationInfo.apiVersion = VK_API_VERSION_1_0;
// Creation information for the instance points to details about
// the application, and also the list of extensions to enable.
VkInstanceCreateInfo instanceCreateInfo = {};
instanceCreateInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
instanceCreateInfo.pNext = nullptr;
instanceCreateInfo.pApplicationInfo = &applicationInfo;
instanceCreateInfo.enabledLayerCount = 0;
instanceCreateInfo.ppEnabledLayerNames = nullptr;
instanceCreateInfo.enabledExtensionCount = enabledExtensionCount;
instanceCreateInfo.ppEnabledExtensionNames = extensionNames;
// The main Vulkan instance is created with the creation infos above.
// We do not specify a custom memory allocator for instance creation.
ret = vkCreateInstance(&instanceCreateInfo, nullptr, &(m_instance));
// we can delete the list of extensions after calling vkCreateInstance
delete[] instanceExtensions;
// Vulkan API return values can expose further information on a failure.
// For instance, INCOMPATIBLE_DRIVER may be returned if the API level
// an application is built with, exposed through VkApplicationInfo, is
// newer than the driver present on a device.
if (ret == VK_ERROR_INCOMPATIBLE_DRIVER) {
LOGE("Cannot find a compatible Vulkan installable client driver: vkCreateInstance Failure");
return false;
} else if (ret == VK_ERROR_EXTENSION_NOT_PRESENT) {
LOGE("Cannot find a specified extension library: vkCreateInstance Failure");
return false;
} else {
GVR_VK_CHECK(!ret);
}
return true;
}
bool VulkanCore::GetPhysicalDevices(){
VkResult ret = VK_SUCCESS;
// Query number of physical devices available
ret = vkEnumeratePhysicalDevices(m_instance, &(m_physicalDeviceCount), nullptr);
GVR_VK_CHECK(!ret);
if (m_physicalDeviceCount == 0)
{
LOGE("No physical devices detected.");
return false;
}
// Allocate space the the correct number of devices, before requesting their data
m_pPhysicalDevices = new VkPhysicalDevice[m_physicalDeviceCount];
ret = vkEnumeratePhysicalDevices(m_instance, &(m_physicalDeviceCount), m_pPhysicalDevices);
GVR_VK_CHECK(!ret);
// For purposes of this sample, we simply use the first device.
m_physicalDevice = m_pPhysicalDevices[0];
// By querying the device properties, we learn the device name, amongst
// other details.
vkGetPhysicalDeviceProperties(m_physicalDevice, &(m_physicalDeviceProperties));
LOGI("Vulkan Device: %s", m_physicalDeviceProperties.deviceName);
// Get Memory information and properties - this is required later, when we begin
// allocating buffers to store data.
vkGetPhysicalDeviceMemoryProperties(m_physicalDevice, &(m_physicalDeviceMemoryProperties));
return true;
}
void VulkanCore::InitDevice() {
VkResult ret = VK_SUCCESS;
// Akin to when creating the instance, we can query extensions supported by the physical device
// that we have selected to use.
uint32_t deviceExtensionCount = 0;
VkExtensionProperties *device_extensions = nullptr;
ret = vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &deviceExtensionCount, nullptr);
GVR_VK_CHECK(!ret);
VkBool32 swapchainExtFound = 0;
VkExtensionProperties* deviceExtensions = new VkExtensionProperties[deviceExtensionCount];
ret = vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &deviceExtensionCount, deviceExtensions);
GVR_VK_CHECK(!ret);
// For our example, we require the swapchain extension, which is used to present backbuffers efficiently
// to the users screen.
uint32_t enabledExtensionCount = 0;
const char* extensionNames[16] = {0};
for (uint32_t i = 0; i < deviceExtensionCount; i++) {
if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, deviceExtensions[i].extensionName)) {
swapchainExtFound = 1;
extensionNames[enabledExtensionCount++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
}
GVR_VK_CHECK(enabledExtensionCount < 16);
}
if (!swapchainExtFound) {
LOGE("vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME " extension: vkCreateInstance Failure");
// Always attempt to enable the swapchain
extensionNames[enabledExtensionCount++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
}
//InitSurface();
// Before we create our main Vulkan device, we must ensure our physical device
// has queue families which can perform the actions we require. For this, we request
// the number of queue families, and their properties.
uint32_t queueFamilyCount = 0;
vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &queueFamilyCount, nullptr);
VkQueueFamilyProperties* queueProperties = new VkQueueFamilyProperties[queueFamilyCount];
vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &queueFamilyCount, queueProperties);
GVR_VK_CHECK(queueFamilyCount >= 1);
// We query each queue family in turn for the ability to support the android surface
// that was created earlier. We need the device to be able to present its images to
// this surface, so it is important to test for this.
VkBool32* supportsPresent = new VkBool32[queueFamilyCount];
for (uint32_t i = 0; i < queueFamilyCount; i++) {
vkGetPhysicalDeviceSurfaceSupportKHR(m_physicalDevice, i, m_surface, &supportsPresent[i]);
}
// Search for a graphics queue, and ensure it also supports our surface. We want a
// queue which can be used for both, as to simplify operations.
uint32_t queueIndex = queueFamilyCount + 1;
for (uint32_t i = 0; i < queueFamilyCount; i++) {
if ((queueProperties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
if (supportsPresent[i] == VK_TRUE) {
queueIndex = i;
break;
}
}
}
delete [] supportsPresent;
delete [] queueProperties;
if (queueIndex == (queueFamilyCount + 1)) {
GVR_VK_CHECK("Could not obtain a queue family for both graphics and presentation." && 0);
}
// We have identified a queue family which both supports our android surface,
// and can be used for graphics operations.
m_queueFamilyIndex = queueIndex;
// As we create the device, we state we will be creating a queue of the
// family type required. 1.0 is the highest priority and we use that.
float queuePriorities[1] = { 1.0 };
VkDeviceQueueCreateInfo deviceQueueCreateInfo = {};
deviceQueueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
deviceQueueCreateInfo.pNext = nullptr;
deviceQueueCreateInfo.queueFamilyIndex = m_queueFamilyIndex;
deviceQueueCreateInfo.queueCount = 1;
deviceQueueCreateInfo.pQueuePriorities = queuePriorities;
// Now we pass the queue create info, as well as our requested extensions,
// into our DeviceCreateInfo structure.
VkDeviceCreateInfo deviceCreateInfo = {};
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
deviceCreateInfo.pNext = nullptr;
deviceCreateInfo.queueCreateInfoCount = 1;
deviceCreateInfo.pQueueCreateInfos = &deviceQueueCreateInfo;
deviceCreateInfo.enabledLayerCount = 0;
deviceCreateInfo.ppEnabledLayerNames = nullptr;
deviceCreateInfo.enabledExtensionCount = enabledExtensionCount;
deviceCreateInfo.ppEnabledExtensionNames = extensionNames;
// Create the device.
ret = vkCreateDevice(m_physicalDevice, &deviceCreateInfo, nullptr, &m_device);
GVR_VK_CHECK(!ret);
// Obtain the device queue that we requested.
vkGetDeviceQueue(m_device, m_queueFamilyIndex, 0, &m_queue);
}
void VulkanCore::InitSwapchain(uint32_t width, uint32_t height){
VkResult ret = VK_SUCCESS;
m_width = width;// 320;//surfaceCapabilities.currentExtent.width;
m_height = height;//240;//surfaceCapabilities.currentExtent.height;
VkImageCreateInfo imageCreateInfo = {};
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageCreateInfo.pNext = nullptr;
imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
imageCreateInfo.format = VK_FORMAT_R8G8B8A8_UINT;//VK_FORMAT_R8G8B8A8_UNORM;//m_surfaceFormat.format;//VK_FORMAT_R32G32B32A32_SFLOAT;
imageCreateInfo.extent = {m_width, m_height, 1};
imageCreateInfo .mipLevels = 1;
imageCreateInfo .arrayLayers = 1;
imageCreateInfo .samples = VK_SAMPLE_COUNT_1_BIT;
imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
imageCreateInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
imageCreateInfo .flags = 0;
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
//LOGI("Vulkan Format %d ", m_surfaceFormat.format);
// Create the image with details as imageCreateInfo
m_swapchainImageCount = 2;
m_swapchainBuffers = new GVR_VK_SwapchainBuffer[m_swapchainImageCount];
GVR_VK_CHECK(m_swapchainBuffers);
for(int i = 0; i < m_swapchainImageCount; i++) {
VkMemoryRequirements mem_reqs;
VkResult err;
bool pass;
ret = vkCreateImage(m_device, &imageCreateInfo, nullptr, &m_swapchainBuffers[i].image);
GVR_VK_CHECK(!ret);
// discover what memory requirements are for this image.
vkGetImageMemoryRequirements(m_device, m_swapchainBuffers[i].image, &mem_reqs);
//LOGD("Vulkan image memreq %d", mem_reqs.size);
m_swapchainBuffers[i].size = mem_reqs.size;
// Allocate memory according to requirements
VkMemoryAllocateInfo memoryAllocateInfo = {};
memoryAllocateInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
memoryAllocateInfo.pNext = nullptr;
memoryAllocateInfo.allocationSize = 0;
memoryAllocateInfo.memoryTypeIndex = 0;
memoryAllocateInfo.allocationSize = mem_reqs.size;
pass = GetMemoryTypeFromProperties(mem_reqs.memoryTypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &memoryAllocateInfo.memoryTypeIndex);
GVR_VK_CHECK(pass);
err = vkAllocateMemory(m_device, &memoryAllocateInfo, nullptr, &m_swapchainBuffers[i].mem);
GVR_VK_CHECK(!err);
// Bind memory to the image
err = vkBindImageMemory(m_device, m_swapchainBuffers[i].image, m_swapchainBuffers[i].mem, 0);
GVR_VK_CHECK(!err);
VkImageViewCreateInfo imageViewCreateInfo = {};
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = nullptr;
imageViewCreateInfo.format = VK_FORMAT_R8G8B8A8_UINT;//VK_FORMAT_R8G8B8A8_UNORM;//m_surfaceFormat.format;
imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_R;
imageViewCreateInfo.components.g = VK_COMPONENT_SWIZZLE_G;
imageViewCreateInfo.components.b = VK_COMPONENT_SWIZZLE_B;
imageViewCreateInfo.components.a = VK_COMPONENT_SWIZZLE_A;
imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = 1;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = m_swapchainBuffers[i].image;
err = vkCreateImageView(m_device, &imageViewCreateInfo, nullptr, &m_swapchainBuffers[i].view);
GVR_VK_CHECK(!err);
}
m_depthBuffers = new GVR_VK_DepthBuffer[m_swapchainImageCount];
for (int i = 0; i < m_swapchainImageCount; i++) {
const VkFormat depthFormat = VK_FORMAT_D16_UNORM;
VkImageCreateInfo imageCreateInfo = {};
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageCreateInfo.pNext = nullptr;
imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
imageCreateInfo.format = depthFormat;
imageCreateInfo.extent = {m_width, m_height, 1};
imageCreateInfo .mipLevels = 1;
imageCreateInfo .arrayLayers = 1;
imageCreateInfo .samples = VK_SAMPLE_COUNT_1_BIT;
imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageCreateInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
imageCreateInfo .flags = 0;
VkImageViewCreateInfo imageViewCreateInfo = {};
imageViewCreateInfo .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo .pNext = nullptr;
imageViewCreateInfo .image = VK_NULL_HANDLE;
imageViewCreateInfo.format = depthFormat;
imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = 1;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
VkMemoryRequirements mem_reqs;
VkResult err;
bool pass;
m_depthBuffers[i].format = depthFormat;
// Create the image with details as imageCreateInfo
err = vkCreateImage(m_device, &imageCreateInfo, nullptr, &m_depthBuffers[i].image);
GVR_VK_CHECK(!err);
// discover what memory requirements are for this image.
vkGetImageMemoryRequirements(m_device, m_depthBuffers[i].image, &mem_reqs);
// Allocate memory according to requirements
VkMemoryAllocateInfo memoryAllocateInfo = {};
memoryAllocateInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
memoryAllocateInfo.pNext = nullptr;
memoryAllocateInfo.allocationSize = 0;
memoryAllocateInfo.memoryTypeIndex = 0;
memoryAllocateInfo.allocationSize = mem_reqs.size;
pass = GetMemoryTypeFromProperties(mem_reqs.memoryTypeBits, 0, &memoryAllocateInfo.memoryTypeIndex);
GVR_VK_CHECK(pass);
err = vkAllocateMemory(m_device, &memoryAllocateInfo, nullptr, &m_depthBuffers[i].mem);
GVR_VK_CHECK(!err);
// Bind memory to the image
err = vkBindImageMemory(m_device, m_depthBuffers[i].image, m_depthBuffers[i].mem, 0);
GVR_VK_CHECK(!err);
// Create the view for this image
imageViewCreateInfo.image = m_depthBuffers[i].image;
err = vkCreateImageView(m_device, &imageViewCreateInfo, nullptr, &m_depthBuffers[i].view);
GVR_VK_CHECK(!err);
}
}
bool VulkanCore::GetMemoryTypeFromProperties( uint32_t typeBits, VkFlags requirements_mask, uint32_t* typeIndex)
{
GVR_VK_CHECK(typeIndex != nullptr);
// Search memtypes to find first index with those properties
for (uint32_t i = 0; i < 32; i++) {
if ((typeBits & 1) == 1) {
// Type is available, does it match user properties?
if ((m_physicalDeviceMemoryProperties.memoryTypes[i].propertyFlags &
requirements_mask) == requirements_mask) {
*typeIndex = i;
return true;
}
}
typeBits >>= 1;
}
// No memory types matched, return failure
return false;
}
void VulkanCore::InitCommandbuffers(){
VkResult ret = VK_SUCCESS;
// Command buffers are allocated from a pool; we define that pool here and create it.
VkCommandPoolCreateInfo commandPoolCreateInfo = {};
commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
commandPoolCreateInfo.pNext = nullptr;
commandPoolCreateInfo.queueFamilyIndex = m_queueFamilyIndex;
commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
ret = vkCreateCommandPool(m_device, &commandPoolCreateInfo, nullptr, &m_commandPool);
GVR_VK_CHECK(!ret);
VkCommandBufferAllocateInfo commandBufferAllocateInfo = {};
commandBufferAllocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
commandBufferAllocateInfo.pNext = nullptr;
commandBufferAllocateInfo.commandPool = m_commandPool;
commandBufferAllocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
commandBufferAllocateInfo.commandBufferCount = 1;
// Create render command buffers, one per swapchain image
for (int i=0; i < m_swapchainImageCount; i++)
{
ret = vkAllocateCommandBuffers(m_device, &commandBufferAllocateInfo, &m_swapchainBuffers[i].cmdBuffer);
GVR_VK_CHECK(!ret);
}
}
void VulkanCore::InitVertexBuffers(){
// Our vertex buffer data is a simple triangle, with associated vertex colors.
const float vb[3][7] = {
// position color
{ -0.9f, -0.9f, 0.9f, 1.0f, 0.0f, 0.0f, 1.0f },
{ 0.9f, -0.9f, 0.9f, 1.0f, 0.0f, 0.0f, 1.0f },
{ 0.0f, 0.9f, 0.9f, 1.0f, 0.0f, 0.0f, 1.0f },
};
VkResult err;
bool pass;
// Our m_vertices member contains the types required for storing
// and defining our vertex buffer within the graphics pipeline.
memset(&m_vertices, 0, sizeof(m_vertices));
// Create our buffer object.
VkBufferCreateInfo bufferCreateInfo = {};
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferCreateInfo.pNext = nullptr;
bufferCreateInfo.size = sizeof(vb);
bufferCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
bufferCreateInfo.flags = 0;
err = vkCreateBuffer(m_device, &bufferCreateInfo, nullptr, &m_vertices.buf);
GVR_VK_CHECK(!err);
// Obtain the memory requirements for this buffer.
VkMemoryRequirements mem_reqs;
vkGetBufferMemoryRequirements(m_device, m_vertices.buf, &mem_reqs);
GVR_VK_CHECK(!err);
// And allocate memory according to those requirements.
VkMemoryAllocateInfo memoryAllocateInfo = {};
memoryAllocateInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
memoryAllocateInfo.pNext = nullptr;
memoryAllocateInfo.allocationSize = 0;
memoryAllocateInfo.memoryTypeIndex = 0;
memoryAllocateInfo.allocationSize = mem_reqs.size;
pass = GetMemoryTypeFromProperties(mem_reqs.memoryTypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &memoryAllocateInfo.memoryTypeIndex);
GVR_VK_CHECK(pass);
err = vkAllocateMemory(m_device, &memoryAllocateInfo, nullptr, &m_vertices.mem);
GVR_VK_CHECK(!err);
// Now we need to map the memory of this new allocation so the CPU can edit it.
void *data;
err = vkMapMemory(m_device, m_vertices.mem, 0, memoryAllocateInfo.allocationSize, 0, &data);
GVR_VK_CHECK(!err);
// Copy our triangle verticies and colors into the mapped memory area.
memcpy(data, vb, sizeof(vb));
// Unmap the memory back from the CPU.
vkUnmapMemory(m_device, m_vertices.mem);
// Bind our buffer to the memory.
err = vkBindBufferMemory(m_device, m_vertices.buf, m_vertices.mem, 0);
GVR_VK_CHECK(!err);
// The vertices need to be defined so that the pipeline understands how the
// data is laid out. This is done by providing a VkPipelineVertexInputStateCreateInfo
// structure with the correct information.
m_vertices.vi.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
m_vertices.vi.pNext = nullptr;
m_vertices.vi.vertexBindingDescriptionCount = 1;
m_vertices.vi.pVertexBindingDescriptions = m_vertices.vi_bindings;
m_vertices.vi.vertexAttributeDescriptionCount = 2;
m_vertices.vi.pVertexAttributeDescriptions = m_vertices.vi_attrs;
// We bind the buffer as a whole, using the correct buffer ID.
// This defines the stride for each element of the vertex array.
m_vertices.vi_bindings[0].binding = GVR_VK_VERTEX_BUFFER_BIND_ID;
m_vertices.vi_bindings[0].stride = sizeof(vb[0]);
m_vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
// Within each element, we define the attributes. At location 0,
// the vertex positions, in float3 format, with offset 0 as they are
// first in the array structure.
m_vertices.vi_attrs[0].binding = GVR_VK_VERTEX_BUFFER_BIND_ID;
m_vertices.vi_attrs[0].location = 0;
m_vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT; //float3
m_vertices.vi_attrs[0].offset = 0;
// The second location is the vertex colors, in RGBA float4 format.
// These appear in each element in memory after the float3 vertex
// positions, so the offset is set accordingly.
m_vertices.vi_attrs[1].binding = GVR_VK_VERTEX_BUFFER_BIND_ID;
m_vertices.vi_attrs[1].location = 1;
m_vertices.vi_attrs[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; //float4
m_vertices.vi_attrs[1].offset = sizeof(float) * 3;
}
void VulkanCore::InitLayouts(){
VkResult ret = VK_SUCCESS;
// This sample has no bindings, so the layout is empty.
VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo = {};
descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
descriptorSetLayoutCreateInfo.pNext = nullptr;
descriptorSetLayoutCreateInfo.bindingCount = 0;
descriptorSetLayoutCreateInfo.pBindings = nullptr;
ret = vkCreateDescriptorSetLayout(m_device, &descriptorSetLayoutCreateInfo, nullptr, &m_descriptorLayout);
GVR_VK_CHECK(!ret);
// Our pipeline layout simply points to the empty descriptor layout.
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo = {};
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = nullptr;
pipelineLayoutCreateInfo.setLayoutCount = 1;
pipelineLayoutCreateInfo.pSetLayouts = &m_descriptorLayout;
ret = vkCreatePipelineLayout(m_device, &pipelineLayoutCreateInfo, nullptr, &m_pipelineLayout);
GVR_VK_CHECK(!ret);
}
void VulkanCore::InitRenderPass(){
// The renderpass defines the attachments to the framebuffer object that gets
// used in the pipeline. We have two attachments, the colour buffer, and the
// depth buffer. The operations and layouts are set to defaults for this type
// of attachment.
VkAttachmentDescription attachmentDescriptions[2] = {};
attachmentDescriptions[0].flags = 0;
attachmentDescriptions[0].format = m_surfaceFormat.format;
attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
attachmentDescriptions[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
attachmentDescriptions[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
attachmentDescriptions[1].flags = 0;
attachmentDescriptions[1].format = m_depthBuffers[0].format;
attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
attachmentDescriptions[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
// We have references to the attachment offsets, stating the layout type.
VkAttachmentReference colorReference = {};
colorReference.attachment = 0;
colorReference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
VkAttachmentReference depthReference = {};
depthReference.attachment = 1;
depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
// There can be multiple subpasses in a renderpass, but this example has only one.
// We set the color and depth references at the grahics bind point in the pipeline.
VkSubpassDescription subpassDescription = {};
subpassDescription.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpassDescription.flags = 0;
subpassDescription.inputAttachmentCount = 0;
subpassDescription.pInputAttachments = nullptr;
subpassDescription.colorAttachmentCount = 1;
subpassDescription.pColorAttachments = &colorReference;
subpassDescription.pResolveAttachments = nullptr;
subpassDescription.pDepthStencilAttachment = nullptr;//&depthReference;
subpassDescription.preserveAttachmentCount = 0;
subpassDescription.pPreserveAttachments = nullptr;
// The renderpass itself is created with the number of subpasses, and the
// list of attachments which those subpasses can reference.
VkRenderPassCreateInfo renderPassCreateInfo = {};
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = nullptr;
renderPassCreateInfo.attachmentCount = 2;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpassDescription;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = nullptr;
VkResult ret;
ret = vkCreateRenderPass(m_device, &renderPassCreateInfo, nullptr, &m_renderPass);
GVR_VK_CHECK(!ret);
}
void VulkanCore::InitPipeline(){
#if 0
VkResult err;
// The pipeline contains all major state for rendering.
// Our vertex input is a single vertex buffer, and its layout is defined
// in our m_vertices object already. Use this when creating the pipeline.
VkPipelineVertexInputStateCreateInfo vi = {};
vi = m_vertices.vi;
// Our vertex buffer describes a triangle list.
VkPipelineInputAssemblyStateCreateInfo ia = {};
ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
// State for rasterization, such as polygon fill mode is defined.
VkPipelineRasterizationStateCreateInfo rs = {};
rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rs.polygonMode = VK_POLYGON_MODE_FILL;
rs.cullMode = VK_CULL_MODE_BACK_BIT;
rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
rs.depthClampEnable = VK_FALSE;
rs.rasterizerDiscardEnable = VK_FALSE;
rs.depthBiasEnable = VK_FALSE;
// For this example we do not do blending, so it is disabled.
VkPipelineColorBlendAttachmentState att_state[1] = {};
att_state[0].colorWriteMask = 0xf;
att_state[0].blendEnable = VK_FALSE;
VkPipelineColorBlendStateCreateInfo cb = {};
cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
cb.attachmentCount = 1;
cb.pAttachments = &att_state[0];
// We define a simple viewport and scissor. It does not change during rendering
// in this sample.
VkPipelineViewportStateCreateInfo vp = {};
vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
vp.viewportCount = 1;
vp.scissorCount = 1;
VkViewport viewport = {};
viewport.height = (float) m_height;
viewport.width = (float) m_width;
viewport.minDepth = (float) 0.0f;
viewport.maxDepth = (float) 1.0f;
vp.pViewports = &viewport;
VkRect2D scissor = {};
scissor.extent.width = m_width;
scissor.extent.height = m_height;
scissor.offset.x = 0;
scissor.offset.y = 0;
vp.pScissors = &scissor;
// Standard depth and stencil state is defined
VkPipelineDepthStencilStateCreateInfo ds = {};
ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
ds.depthTestEnable = VK_TRUE;
ds.depthWriteEnable = VK_TRUE;
ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
ds.depthBoundsTestEnable = VK_FALSE;
ds.back.failOp = VK_STENCIL_OP_KEEP;
ds.back.passOp = VK_STENCIL_OP_KEEP;
ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
ds.stencilTestEnable = VK_FALSE;
ds.front = ds.back;
// We do not use multisample
VkPipelineMultisampleStateCreateInfo ms = {};
ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
ms.pSampleMask = nullptr;
ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
// We define two shader stages: our vertex and fragment shader.
// they are embedded as SPIR-V into a header file for ease of deployment.
VkPipelineShaderStageCreateInfo shaderStages[2] = {};
shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
shaderStages[0].module = CreateShaderModule( (const uint32_t*)&shader_tri_vert[0], shader_tri_vert_size);
shaderStages[0].pName = "main";
shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
shaderStages[1].module = CreateShaderModule( (const uint32_t*)&shader_tri_frag[0], shader_tri_frag_size);
shaderStages[1].pName = "main";
// Pipelines are allocated from pipeline caches.
VkPipelineCacheCreateInfo pipelineCache = {};
pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
pipelineCache.pNext = nullptr;
pipelineCache.flags = 0;
VkPipelineCache piplineCache;
err = vkCreatePipelineCache(m_device, &pipelineCache, nullptr, &piplineCache);
GVR_VK_CHECK(!err);
// Out graphics pipeline records all state information, including our renderpass
// and pipeline layout. We do not have any dynamic state in this example.
VkGraphicsPipelineCreateInfo pipelineCreateInfo = {};
pipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
pipelineCreateInfo.layout = m_pipelineLayout;
pipelineCreateInfo.pVertexInputState = &vi;
pipelineCreateInfo.pInputAssemblyState = &ia;
pipelineCreateInfo.pRasterizationState = &rs;
pipelineCreateInfo.pColorBlendState = &cb;
pipelineCreateInfo.pMultisampleState = &ms;
pipelineCreateInfo.pViewportState = &vp;
pipelineCreateInfo.pDepthStencilState = nullptr;//&ds;
pipelineCreateInfo.pStages = &shaderStages[0];
pipelineCreateInfo.renderPass = m_renderPass;
pipelineCreateInfo.pDynamicState = nullptr;
pipelineCreateInfo.stageCount = 2; //vertex and fragment
err = vkCreateGraphicsPipelines(m_device, piplineCache, 1, &pipelineCreateInfo, nullptr, &m_pipeline);
GVR_VK_CHECK(!err);
// We can destroy the cache now as we do not need it. The shader modules also
// can be destroyed after the pipeline is created.
vkDestroyPipelineCache(m_device, piplineCache, nullptr);
vkDestroyShaderModule(m_device, shaderStages[0].module, nullptr);
vkDestroyShaderModule(m_device, shaderStages[1].module, nullptr);
#endif
}
void VulkanCore::InitFrameBuffers(){
//The framebuffer objects reference the renderpass, and allow
// the references defined in that renderpass to now attach to views.
// The views in this example are the colour view, which is our swapchain image,
// and the depth buffer created manually earlier.
VkImageView attachments [2] = {};
VkFramebufferCreateInfo framebufferCreateInfo = {};
framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferCreateInfo.pNext = nullptr;
framebufferCreateInfo.renderPass = m_renderPass;
framebufferCreateInfo.attachmentCount = 2;
framebufferCreateInfo.pAttachments = attachments;
framebufferCreateInfo.width = m_width;
framebufferCreateInfo.height = m_height;
framebufferCreateInfo.layers = 1;
VkResult ret;
m_frameBuffers = new VkFramebuffer[m_swapchainImageCount];
// Reusing the framebufferCreateInfo to create m_swapchainImageCount framebuffers,
// only the attachments to the relevent image views change each time.
for (uint32_t i = 0; i < m_swapchainImageCount; i++) {
attachments[0] = m_swapchainBuffers[i].view;
//framebufferCreateInfo.pAttachments = &m_swapchainBuffers[i].view;
attachments[1] = m_depthBuffers[i].view;
LOGI("Vulkan view %d created", i);
if((m_swapchainBuffers[i].view == VK_NULL_HANDLE) || (m_renderPass == VK_NULL_HANDLE)){
LOGI("Vulkan image view null");
}
else
LOGI("Vulkan image view not null");
ret = vkCreateFramebuffer(m_device, &framebufferCreateInfo, nullptr, &m_frameBuffers[i]);
GVR_VK_CHECK(!ret);
}
}
void VulkanCore::InitSync(){
VkResult ret = VK_SUCCESS;
// For synchronization, we have semaphores for rendering and backbuffer signalling.
VkSemaphoreCreateInfo semaphoreCreateInfo = {};
semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
semaphoreCreateInfo.pNext = nullptr;
semaphoreCreateInfo.flags = 0;
ret = vkCreateSemaphore(m_device, &semaphoreCreateInfo, nullptr, &m_backBufferSemaphore);
GVR_VK_CHECK(!ret);
ret = vkCreateSemaphore(m_device, &semaphoreCreateInfo, nullptr, &m_renderCompleteSemaphore);
GVR_VK_CHECK(!ret);
}
void VulkanCore::BuildCmdBuffer()
{
// For the triangle sample, we pre-record our command buffer, as it is static.
// We have a buffer per swap chain image, so loop over the creation process.
for (uint32_t i = 0; i < m_swapchainImageCount; i++) {
VkCommandBuffer &cmdBuffer = m_swapchainBuffers[i].cmdBuffer;
// vkBeginCommandBuffer should reset the command buffer, but Reset can be called
// to make it more explicit.
VkResult err;
err = vkResetCommandBuffer(cmdBuffer, 0);
GVR_VK_CHECK(!err);
VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
cmd_buf_hinfo.pNext = nullptr;
cmd_buf_hinfo.renderPass = VK_NULL_HANDLE;
cmd_buf_hinfo.subpass = 0;
cmd_buf_hinfo.framebuffer = VK_NULL_HANDLE;
cmd_buf_hinfo.occlusionQueryEnable = VK_FALSE;
cmd_buf_hinfo.queryFlags = 0;
cmd_buf_hinfo.pipelineStatistics = 0;
VkCommandBufferBeginInfo cmd_buf_info = {};
cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
cmd_buf_info.pNext = nullptr;
cmd_buf_info.flags = 0;
cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
// By calling vkBeginCommandBuffer, cmdBuffer is put into the recording state.
err = vkBeginCommandBuffer(cmdBuffer, &cmd_buf_info);
GVR_VK_CHECK(!err);
// Before we can use the back buffer from the swapchain, we must change the
// image layout from the PRESENT mode to the COLOR_ATTACHMENT mode.
// PRESENT mode is optimal for sending to the screen for users to see, so the
// image will be set back to that mode after we have completed rendering.
VkImageMemoryBarrier preRenderBarrier = {};
preRenderBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
preRenderBarrier.pNext = nullptr;
preRenderBarrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
preRenderBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
preRenderBarrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
preRenderBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
preRenderBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
preRenderBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
preRenderBarrier.image = m_swapchainBuffers[i].image;
preRenderBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
preRenderBarrier.subresourceRange.baseArrayLayer = 0;
preRenderBarrier.subresourceRange.baseMipLevel = 1;
preRenderBarrier.subresourceRange.layerCount = 0;
preRenderBarrier.subresourceRange.levelCount = 1;
// Thie PipelineBarrier function can operate on memoryBarriers,
// bufferMemory and imageMemory buffers. We only provide a single
// imageMemoryBarrier.
vkCmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
0, 0, nullptr, 0, nullptr, 1, &preRenderBarrier);
// When starting the render pass, we can set clear values.
VkClearValue clear_values[2] = {};
clear_values[0].color.float32[0] = 0.3f;
clear_values[0].color.float32[1] = 0.3f;
clear_values[0].color.float32[2] = 0.3f;
clear_values[0].color.float32[3] = 1.0f;
clear_values[1].depthStencil.depth = 1.0f;
clear_values[1].depthStencil.stencil = 0;
VkRenderPassBeginInfo rp_begin = {};
rp_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
rp_begin.pNext = nullptr;
rp_begin.renderPass = m_renderPass;
rp_begin.framebuffer = m_frameBuffers[i];
rp_begin.renderArea.offset.x = 0;
rp_begin.renderArea.offset.y = 0;
rp_begin.renderArea.extent.width = m_width;
rp_begin.renderArea.extent.height = m_height;
rp_begin.clearValueCount = 2;
rp_begin.pClearValues = clear_values;
vkCmdBeginRenderPass(cmdBuffer, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
// Set our pipeline. This holds all major state
// the pipeline defines, for example, that the vertex buffer is a triangle list.
vkCmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline);
// Bind our vertex buffer, with a 0 offset.
VkDeviceSize offsets[1] = {0};
vkCmdBindVertexBuffers(cmdBuffer, GVR_VK_VERTEX_BUFFER_BIND_ID, 1, &m_vertices.buf, offsets);
// Issue a draw command, with our 3 vertices.
vkCmdDraw(cmdBuffer, 3, 1, 0, 0);
// Copy Image to Buffer
VkOffset3D off = {};
off.x = 0;
off.y = 0;
off.z = 0;
VkExtent3D extent3D = {};
extent3D.width = 320;
extent3D.height = 240;
VkImageSubresourceLayers subResource = {};
subResource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
subResource.baseArrayLayer = 0;
subResource.mipLevel = 0;
subResource.layerCount = 1;
VkBufferImageCopy someDetails = {};
someDetails.bufferOffset = 0;
someDetails.bufferRowLength = 0;
someDetails.bufferImageHeight = 0;
someDetails.imageSubresource = subResource;
someDetails.imageOffset = off;
someDetails.imageExtent = extent3D;
VkBufferImageCopy region = { 0 };
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
region.imageSubresource.layerCount = 1;
region.imageExtent.width = m_width;
region.imageExtent.height = m_height;
region.imageExtent.depth = 1;
// Now our render pass has ended.
vkCmdEndRenderPass(cmdBuffer);
//vkCmdCopyImageToBuffer(cmdBuffer, m_swapchainBuffers[i].image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, m_outputBuffers[i].imageOutputBuffer, 1, ®ion);
// As stated earlier, now transition the swapchain image to the PRESENT mode.
VkImageMemoryBarrier prePresentBarrier = {};
prePresentBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
prePresentBarrier.pNext = nullptr;
prePresentBarrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
prePresentBarrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
prePresentBarrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
prePresentBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
prePresentBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
prePresentBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
prePresentBarrier.image = m_swapchainBuffers[i].image;
prePresentBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
prePresentBarrier.subresourceRange.baseArrayLayer = 0;
prePresentBarrier.subresourceRange.baseMipLevel = 1;
prePresentBarrier.subresourceRange.layerCount = 0;
vkCmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
0, 0, nullptr, 0, nullptr, 1, &prePresentBarrier);
// By ending the command buffer, it is put out of record mode.
err = vkEndCommandBuffer(cmdBuffer);
GVR_VK_CHECK(!err);
}
VkFence nullFence = VK_NULL_HANDLE;
VkSubmitInfo submitInfo = {};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.pNext = nullptr;
submitInfo.waitSemaphoreCount = 1;
submitInfo.pWaitSemaphores = &m_backBufferSemaphore;
submitInfo.pWaitDstStageMask = nullptr;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &m_swapchainBuffers[m_swapchainCurrentIdx].cmdBuffer;
submitInfo.signalSemaphoreCount = 1;
submitInfo.pSignalSemaphores = &m_renderCompleteSemaphore;
VkResult err;
err = vkQueueSubmit(m_queue, 1, &submitInfo, VK_NULL_HANDLE);
GVR_VK_CHECK(!err);
err = vkQueueWaitIdle(m_queue);
if(err != VK_SUCCESS)
LOGI("Vulkan vkQueueWaitIdle submit failed");
LOGI("Vulkan vkQueueWaitIdle submitted");
uint8_t * data;
static bool printflag = true;
if(printflag){
uint8_t * data;
err = vkMapMemory(m_device, m_swapchainBuffers[m_swapchainCurrentIdx].mem, 0, m_swapchainBuffers[m_swapchainCurrentIdx].size, 0, (void **)&data);
GVR_VK_CHECK(!err);
//void* data;
uint8_t *finaloutput = (uint8_t*)malloc(m_width*m_height*4* sizeof(uint8_t));
for(int i = 0; i < (320); i++)
finaloutput[i] = 0;
LOGI("Vulkna size of %d", sizeof(finaloutput));
//while(1) {
memcpy(finaloutput, data, (m_width*m_height*4* sizeof(uint8_t)));
LOGI("Vulkan memcpy map done");
float tt;
for (int i = 0; i < (m_width*m_height)-4; i++) {
//tt = (float) data[i];
LOGI("Vulkan Data %u, %u %u %u", data[i], data[i+1], data[i+2], data[i+3]);
i+=3;
}
texDataVulkan = data;//finaloutput;
LOGI("Vulkan data reading done");
vkUnmapMemory(m_device,m_swapchainBuffers[m_swapchainCurrentIdx].mem);
printflag = false;
}
}
void VulkanCore::initVulkanCore()
{
#if 0
InitVulkan();
CreateInstance();
GetPhysicalDevices();
InitDevice();
InitSwapchain(1024 , 1024);
LOGI("Vulkan after swap chain");
InitCommandbuffers();
LOGI("Vulkan after cmd buffers");
InitVertexBuffers();
LOGI("Vulkan after vert buf");
InitLayouts();
LOGI("Vulkan after layout");
InitRenderPass();
LOGI("Vulkan after render pass");
InitPipeline();
LOGI("Vulkan after piplen");
InitFrameBuffers();
LOGI("Vulkan after FBO");
InitSync();
LOGI("Vulkan after synch");
// Initialize our command buffers
BuildCmdBuffer();
#endif
} | rahul27/GearVRf | GVRf/Framework/framework/src/main/jni/vulkan/vulkanCore.cpp | C++ | apache-2.0 | 46,740 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import logging
import os
import django
from django.core.urlresolvers import reverse
from django import http
from django.test.utils import override_settings
from django.utils import timezone
from django.utils import unittest
from mox import IgnoreArg # noqa
from mox import IsA # noqa
from horizon import exceptions
from horizon.workflows import views
from openstack_dashboard import api
from openstack_dashboard.dashboards.identity.projects import workflows
from openstack_dashboard import policy_backend
from openstack_dashboard.test import helpers as test
from openstack_dashboard import usage
from openstack_dashboard.usage import quotas
with_sel = os.environ.get('WITH_SELENIUM', False)
if with_sel:
from selenium.webdriver import ActionChains # noqa
from selenium.webdriver.common import keys
from socket import timeout as socket_timeout # noqa
INDEX_URL = reverse('horizon:identity:projects:index')
USER_ROLE_PREFIX = workflows.PROJECT_USER_MEMBER_SLUG + "_role_"
GROUP_ROLE_PREFIX = workflows.PROJECT_GROUP_MEMBER_SLUG + "_role_"
PROJECT_DETAIL_URL = reverse('horizon:identity:projects:detail', args=[1])
class TenantsViewTests(test.BaseAdminViewTests):
@test.create_stubs({api.keystone: ('tenant_list',)})
def test_index(self):
api.keystone.tenant_list(IsA(http.HttpRequest),
domain=None,
paginate=True,
marker=None) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'identity/projects/index.html')
self.assertItemsEqual(res.context['table'].data, self.tenants.list())
@test.create_stubs({api.keystone: ('tenant_list', )})
def test_index_with_domain_context(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
domain_tenants = [tenant for tenant in self.tenants.list()
if tenant.domain_id == domain.id]
api.keystone.tenant_list(IsA(http.HttpRequest),
domain=domain.id,
paginate=True,
marker=None) \
.AndReturn([domain_tenants, False])
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'identity/projects/index.html')
self.assertItemsEqual(res.context['table'].data, domain_tenants)
self.assertContains(res, "<em>test_domain:</em>")
class ProjectsViewNonAdminTests(test.TestCase):
@override_settings(POLICY_CHECK_FUNCTION=policy_backend.check)
@test.create_stubs({api.keystone: ('tenant_list',)})
def test_index(self):
api.keystone.tenant_list(IsA(http.HttpRequest),
user=self.user.id,
paginate=True,
marker=None,
admin=False) \
.AndReturn([self.tenants.list(), False])
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'identity/projects/index.html')
self.assertItemsEqual(res.context['table'].data, self.tenants.list())
class CreateProjectWorkflowTests(test.BaseAdminViewTests):
def _get_project_info(self, project):
domain = self._get_default_domain()
project_info = {"name": project.name,
"description": project.description,
"enabled": project.enabled,
"domain": domain.id}
return project_info
def _get_workflow_fields(self, project):
domain = self._get_default_domain()
project_info = {"domain_id": domain.id,
"domain_name": domain.name,
"name": project.name,
"description": project.description,
"enabled": project.enabled}
return project_info
def _get_quota_info(self, quota):
cinder_quota = self.cinder_quotas.first()
neutron_quota = self.neutron_quotas.first()
quota_data = {}
for field in quotas.NOVA_QUOTA_FIELDS:
quota_data[field] = int(quota.get(field).limit)
for field in quotas.CINDER_QUOTA_FIELDS:
quota_data[field] = int(cinder_quota.get(field).limit)
for field in quotas.NEUTRON_QUOTA_FIELDS:
quota_data[field] = int(neutron_quota.get(field).limit)
return quota_data
def _get_workflow_data(self, project, quota):
project_info = self._get_workflow_fields(project)
quota_data = self._get_quota_info(quota)
project_info.update(quota_data)
return project_info
def _get_default_domain(self):
default_domain = self.domain
domain = {"id": self.request.session.get('domain_context',
default_domain.id),
"name": self.request.session.get('domain_context_name',
default_domain.name)}
return api.base.APIDictWrapper(domain)
def _get_all_users(self, domain_id):
if not domain_id:
users = self.users.list()
else:
users = [user for user in self.users.list()
if user.domain_id == domain_id]
return users
def _get_all_groups(self, domain_id):
if not domain_id:
groups = self.groups.list()
else:
groups = [group for group in self.groups.list()
if group.domain_id == domain_id]
return groups
@test.create_stubs({api.keystone: ('get_default_domain',
'get_default_role',
'user_list',
'group_list',
'role_list'),
api.base: ('is_service_enabled',),
api.neutron: ('is_extension_supported',),
quotas: ('get_default_quota_data',)})
def test_add_project_get(self):
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.MultipleTimes().AndReturn(True)
api.base.is_service_enabled(IsA(http.HttpRequest), 'volume') \
.MultipleTimes().AndReturn(True)
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
api.neutron.is_extension_supported(
IsA(http.HttpRequest), 'security-group').AndReturn(True)
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)).AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
api.keystone.role_list(IsA(http.HttpRequest)).AndReturn(roles)
self.mox.ReplayAll()
url = reverse('horizon:identity:projects:create')
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self.assertContains(res, '<input type="hidden" name="subnet" '
'id="id_subnet" />', html=True)
workflow = res.context['workflow']
self.assertEqual(res.context['workflow'].name,
workflows.CreateProject.name)
step = workflow.get_step("createprojectinfoaction")
self.assertEqual(step.action.initial['ram'], quota.get('ram').limit)
self.assertEqual(step.action.initial['injected_files'],
quota.get('injected_files').limit)
self.assertQuerysetEqual(
workflow.steps,
['<CreateProjectInfo: createprojectinfoaction>',
'<UpdateProjectMembers: update_members>',
'<UpdateProjectGroups: update_group_members>',
'<CreateProjectQuota: create_quotas>'])
def test_add_project_get_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_get()
@test.create_stubs({api.keystone: ('get_default_role',
'user_list',
'group_list',
'role_list',
'domain_get'),
api.neutron: ('is_extension_supported',
'tenant_quota_get'),
quotas: ('get_default_quota_data',)})
@test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True})
def test_add_project_get_with_neutron(self):
quota = self.quotas.first()
neutron_quotas = self.neutron_quotas.first()
quotas.get_default_quota_data(IsA(http.HttpRequest)) \
.AndReturn(quota)
api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \
.MultipleTimes().AndReturn(True)
api.neutron.is_extension_supported(
IsA(http.HttpRequest), 'security-group').AndReturn(True)
api.neutron.tenant_quota_get(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(neutron_quotas)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(self.roles.first())
api.keystone.user_list(IsA(http.HttpRequest), domain=None) \
.AndReturn(self.users.list())
api.keystone.role_list(IsA(http.HttpRequest)) \
.AndReturn(self.roles.list())
api.keystone.group_list(IsA(http.HttpRequest), domain=None) \
.AndReturn(self.groups.list())
api.keystone.role_list(IsA(http.HttpRequest)) \
.AndReturn(self.roles.list())
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:identity:projects:create'))
self.assertTemplateUsed(res, views.WorkflowView.template_name)
if django.VERSION >= (1, 6):
self.assertContains(res, '''
<input class="form-control"
id="id_subnet" min="-1"
name="subnet" type="number" value="10" />
''', html=True)
else:
self.assertContains(res, '''
<input class="form-control"
name="subnet" id="id_subnet"
value="10" type="text" />
''', html=True)
workflow = res.context['workflow']
self.assertEqual(res.context['workflow'].name,
workflows.CreateProject.name)
step = workflow.get_step("createprojectinfoaction")
self.assertEqual(step.action.initial['ram'], quota.get('ram').limit)
self.assertEqual(step.action.initial['subnet'],
neutron_quotas.get('subnet').limit)
@test.create_stubs({api.keystone: ('get_default_role',
'add_tenant_user_role',
'tenant_create',
'user_list',
'group_list',
'role_list',
'domain_get'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages',),
api.cinder: ('tenant_quota_update',),
api.nova: ('tenant_quota_update',)})
def test_add_project_post(self, neutron=False):
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
if neutron:
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
# handle
project_details = self._get_project_info(project)
quota_data = self._get_quota_info(quota)
api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \
.AndReturn(project)
workflow_data = {}
for role in roles:
if USER_ROLE_PREFIX + role.id in workflow_data:
ulist = workflow_data[USER_ROLE_PREFIX + role.id]
for user_id in ulist:
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user=user_id,
role=role.id)
for role in roles:
if GROUP_ROLE_PREFIX + role.id in workflow_data:
ulist = workflow_data[GROUP_ROLE_PREFIX + role.id]
for group_id in ulist:
api.keystone.add_group_role(IsA(http.HttpRequest),
role=role.id,
group=group_id,
project=self.tenant.id)
nova_updated_quota = dict([(key, quota_data[key]) for key in
quotas.NOVA_QUOTA_FIELDS])
api.nova.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**nova_updated_quota)
cinder_updated_quota = dict([(key, quota_data[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
api.cinder.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**cinder_updated_quota)
self.mox.ReplayAll()
workflow_data.update(self._get_workflow_data(project, quota))
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_add_project_post_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_post()
@test.create_stubs({api.neutron: ('is_extension_supported',
'tenant_quota_update')})
@test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True})
def test_add_project_post_with_neutron(self):
quota_data = self.neutron_quotas.first()
neutron_updated_quota = dict([(key, quota_data.get(key).limit)
for key in quotas.NEUTRON_QUOTA_FIELDS])
api.neutron.is_extension_supported(
IsA(http.HttpRequest), 'security-group').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \
.MultipleTimes().AndReturn(True)
api.neutron.tenant_quota_update(IsA(http.HttpRequest),
self.tenant.id,
**neutron_updated_quota)
self.test_add_project_post(neutron=True)
@test.create_stubs({api.keystone: ('user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role'),
quotas: ('get_default_quota_data',
'get_disabled_quotas')})
def test_add_project_quota_defaults_error(self):
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)) \
.AndRaise(self.exceptions.nova)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
self.mox.ReplayAll()
url = reverse('horizon:identity:projects:create')
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
self.assertContains(res, "Unable to retrieve default quota values")
def test_add_project_quota_defaults_error_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_quota_defaults_error()
@test.create_stubs({api.keystone: ('tenant_create',
'user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages')})
def test_add_project_tenant_create_error(self):
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
# handle
project_details = self._get_project_info(project)
api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \
.AndRaise(self.exceptions.keystone)
self.mox.ReplayAll()
workflow_data = self._get_workflow_data(project, quota)
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_add_project_tenant_create_error_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_tenant_create_error()
@test.create_stubs({api.keystone: ('tenant_create',
'user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role',
'add_tenant_user_role'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages'),
api.nova: ('tenant_quota_update',)})
def test_add_project_quota_update_error(self):
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
# handle
project_details = self._get_project_info(project)
quota_data = self._get_quota_info(quota)
api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \
.AndReturn(project)
workflow_data = {}
for role in roles:
if USER_ROLE_PREFIX + role.id in workflow_data:
ulist = workflow_data[USER_ROLE_PREFIX + role.id]
for user_id in ulist:
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user=user_id,
role=role.id)
for role in roles:
if GROUP_ROLE_PREFIX + role.id in workflow_data:
ulist = workflow_data[GROUP_ROLE_PREFIX + role.id]
for group_id in ulist:
api.keystone.add_group_role(IsA(http.HttpRequest),
role=role.id,
group=group_id,
project=self.tenant.id)
nova_updated_quota = dict([(key, quota_data[key]) for key in
quotas.NOVA_QUOTA_FIELDS])
api.nova.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**nova_updated_quota) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
workflow_data.update(self._get_workflow_data(project, quota))
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_add_project_quota_update_error_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_quota_update_error()
@test.create_stubs({api.keystone: ('tenant_create',
'user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role',
'add_tenant_user_role'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages'),
api.cinder: ('tenant_quota_update',),
api.nova: ('tenant_quota_update',)})
def test_add_project_user_update_error(self):
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
# handle
project_details = self._get_project_info(project)
quota_data = self._get_quota_info(quota)
api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \
.AndReturn(project)
workflow_data = {}
for role in roles:
if USER_ROLE_PREFIX + role.id in workflow_data:
ulist = workflow_data[USER_ROLE_PREFIX + role.id]
for user_id in ulist:
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user=user_id,
role=role.id) \
.AndRaise(self.exceptions.keystone)
break
break
nova_updated_quota = dict([(key, quota_data[key]) for key in
quotas.NOVA_QUOTA_FIELDS])
api.nova.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**nova_updated_quota)
cinder_updated_quota = dict([(key, quota_data[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
api.cinder.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**cinder_updated_quota)
self.mox.ReplayAll()
workflow_data.update(self._get_workflow_data(project, quota))
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_add_project_user_update_error_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_user_update_error()
@test.create_stubs({api.keystone: ('user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages')})
def test_add_project_missing_field_error(self):
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
self.mox.ReplayAll()
workflow_data = self._get_workflow_data(project, quota)
workflow_data["name"] = ""
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertContains(res, "field is required")
def test_add_project_missing_field_error_domain(self):
domain = self.domains.get(id="1")
self.setSessionValues(domain_context=domain.id,
domain_context_name=domain.name)
self.test_add_project_missing_field_error()
@test.create_stubs({api.keystone: ('user_list',
'role_list',
'group_list',
'get_default_domain',
'get_default_role',
'tenant_list'),
quotas: ('get_default_quota_data',
'get_disabled_quotas',
'tenant_quota_usages')})
def test_add_project_name_already_in_use_error(self):
keystone_api_version = api.keystone.VERSIONS.active
if keystone_api_version < 3:
return
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
default_domain = self._get_default_domain()
domain_id = default_domain.id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
# init
api.keystone.tenant_list(IgnoreArg(),
domain=domain_id,
filters={"name": project.name})\
.AndReturn(project)
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(default_domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
self.mox.ReplayAll()
workflow_data = self._get_workflow_data(project, quota)
url = reverse('horizon:identity:projects:create')
res = self.client.post(url, workflow_data)
self.assertContains(res, 'already in use')
class UpdateProjectWorkflowTests(test.BaseAdminViewTests):
def _get_quota_info(self, quota):
cinder_quota = self.cinder_quotas.first()
neutron_quota = self.neutron_quotas.first()
quota_data = {}
for field in quotas.NOVA_QUOTA_FIELDS:
quota_data[field] = int(quota.get(field).limit)
for field in quotas.CINDER_QUOTA_FIELDS:
quota_data[field] = int(cinder_quota.get(field).limit)
for field in quotas.NEUTRON_QUOTA_FIELDS:
quota_data[field] = int(neutron_quota.get(field).limit)
return quota_data
def _get_all_users(self, domain_id):
if not domain_id:
users = self.users.list()
else:
users = [user for user in self.users.list()
if user.domain_id == domain_id]
return users
def _get_all_groups(self, domain_id):
if not domain_id:
groups = self.groups.list()
else:
groups = [group for group in self.groups.list()
if group.domain_id == domain_id]
return groups
def _get_proj_users(self, project_id):
return [user for user in self.users.list()
if user.project_id == project_id]
def _get_proj_groups(self, project_id):
return [group for group in self.groups.list()
if group.project_id == project_id]
def _get_proj_role_assignment(self, project_id):
project_scope = {'project': {'id': project_id}}
return self.role_assignments.filter(scope=project_scope)
def _check_role_list(self, keystone_api_version, role_assignments, groups,
proj_users, roles, workflow_data):
if keystone_api_version >= 3:
# admin role with attempt to remove current admin, results in
# warning message
workflow_data[USER_ROLE_PREFIX + "1"] = ['3']
# member role
workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '3']
# admin role
workflow_data[GROUP_ROLE_PREFIX + "1"] = ['2', '3']
# member role
workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3']
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
# Give user 1 role 2
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user='1',
role='2',)
# remove role 2 from user 2
api.keystone.remove_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user='2',
role='2')
# Give user 3 role 1
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user='3',
role='1',)
api.keystone.group_list(IsA(http.HttpRequest),
domain=self.domain.id,
project=self.tenant.id) \
.AndReturn(groups)
api.keystone.roles_for_group(IsA(http.HttpRequest),
group='1',
project=self.tenant.id) \
.AndReturn(roles)
api.keystone.remove_group_role(IsA(http.HttpRequest),
project=self.tenant.id,
group='1',
role='1')
api.keystone.roles_for_group(IsA(http.HttpRequest),
group='2',
project=self.tenant.id) \
.AndReturn(roles)
api.keystone.roles_for_group(IsA(http.HttpRequest),
group='3',
project=self.tenant.id) \
.AndReturn(roles)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
# admin user - try to remove all roles on current project, warning
api.keystone.roles_for_user(IsA(http.HttpRequest), '1',
self.tenant.id).AndReturn(roles)
# member user 1 - has role 1, will remove it
api.keystone.roles_for_user(IsA(http.HttpRequest), '2',
self.tenant.id).AndReturn((roles[1],))
# member user 3 - has role 2
api.keystone.roles_for_user(IsA(http.HttpRequest), '3',
self.tenant.id).AndReturn((roles[0],))
# add role 2
api.keystone.add_tenant_user_role(IsA(http.HttpRequest),
project=self.tenant.id,
user='3',
role='2')\
.AndRaise(self.exceptions.keystone)
@test.create_stubs({api.keystone: ('get_default_role',
'roles_for_user',
'tenant_get',
'domain_get',
'user_list',
'roles_for_group',
'group_list',
'role_list',
'role_assignments_list'),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas')})
def test_update_project_get(self):
keystone_api_version = api.keystone.VERSIONS.active
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
domain_id = project.domain_id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
proj_users = self._get_proj_users(project.id)
role_assignments = self._get_proj_role_assignment(project.id)
api.keystone.tenant_get(IsA(http.HttpRequest),
self.tenant.id, admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
if keystone_api_version >= 3:
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
for user in proj_users:
api.keystone.roles_for_user(IsA(http.HttpRequest),
user.id,
self.tenant.id).AndReturn(roles)
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
self.mox.ReplayAll()
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
workflow = res.context['workflow']
self.assertEqual(res.context['workflow'].name,
workflows.UpdateProject.name)
step = workflow.get_step("update_info")
self.assertEqual(step.action.initial['ram'], quota.get('ram').limit)
self.assertEqual(step.action.initial['injected_files'],
quota.get('injected_files').limit)
self.assertEqual(step.action.initial['name'], project.name)
self.assertEqual(step.action.initial['description'],
project.description)
self.assertQuerysetEqual(
workflow.steps,
['<UpdateProjectInfo: update_info>',
'<UpdateProjectMembers: update_members>',
'<UpdateProjectGroups: update_group_members>',
'<UpdateProjectQuota: update_quotas>'])
@test.create_stubs({api.keystone: ('tenant_get',
'domain_get',
'tenant_update',
'get_default_role',
'roles_for_user',
'remove_tenant_user_role',
'add_tenant_user_role',
'user_list',
'roles_for_group',
'remove_group_role',
'add_group_role',
'group_list',
'role_list',
'role_assignments_list'),
api.nova: ('tenant_quota_update',),
api.cinder: ('tenant_quota_update',),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas',
'tenant_quota_usages')})
def test_update_project_save(self, neutron=False):
keystone_api_version = api.keystone.VERSIONS.active
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
domain_id = project.domain_id
users = self._get_all_users(domain_id)
proj_users = self._get_proj_users(project.id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
role_assignments = self._get_proj_role_assignment(project.id)
quota_usages = self.quota_usages.first()
# get/init
api.keystone.tenant_get(IsA(http.HttpRequest),
self.tenant.id, admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
if neutron:
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
workflow_data = {}
if keystone_api_version >= 3:
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
for user in proj_users:
api.keystone.roles_for_user(IsA(http.HttpRequest),
user.id,
self.tenant.id).AndReturn(roles)
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
workflow_data[USER_ROLE_PREFIX + "1"] = ['3'] # admin role
workflow_data[USER_ROLE_PREFIX + "2"] = ['2'] # member role
# Group assignment form data
workflow_data[GROUP_ROLE_PREFIX + "1"] = ['3'] # admin role
workflow_data[GROUP_ROLE_PREFIX + "2"] = ['2'] # member role
# update some fields
project._info["domain_id"] = domain_id
project._info["name"] = "updated name"
project._info["description"] = "updated description"
quota.metadata_items = 444
quota.volumes = 444
updated_project = {"name": project._info["name"],
"description": project._info["description"],
"enabled": project.enabled}
updated_quota = self._get_quota_info(quota)
# handle
api.keystone.tenant_update(IsA(http.HttpRequest),
project.id,
**updated_project) \
.AndReturn(project)
self._check_role_list(keystone_api_version, role_assignments, groups,
proj_users, roles, workflow_data)
quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \
.AndReturn(quota_usages)
nova_updated_quota = dict([(key, updated_quota[key]) for key in
quotas.NOVA_QUOTA_FIELDS])
api.nova.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**nova_updated_quota)
cinder_updated_quota = dict([(key, updated_quota[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
api.cinder.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**cinder_updated_quota)
self.mox.ReplayAll()
# submit form data
project_data = {"domain_id": project._info["domain_id"],
"name": project._info["name"],
"id": project.id,
"description": project._info["description"],
"enabled": project.enabled}
workflow_data.update(project_data)
workflow_data.update(updated_quota)
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertMessageCount(error=0, warning=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('is_extension_supported',
'tenant_quota_get',
'tenant_quota_update')})
@test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True})
def test_update_project_save_with_neutron(self):
quota_data = self.neutron_quotas.first()
neutron_updated_quota = dict([(key, quota_data.get(key).limit)
for key in quotas.NEUTRON_QUOTA_FIELDS])
api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \
.MultipleTimes().AndReturn(True)
api.neutron.tenant_quota_get(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota_data)
api.neutron.tenant_quota_update(IsA(http.HttpRequest),
self.tenant.id,
**neutron_updated_quota)
self.test_update_project_save(neutron=True)
@test.create_stubs({api.keystone: ('tenant_get',)})
def test_update_project_get_error(self):
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
admin=True) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.keystone: ('tenant_get',
'domain_get',
'tenant_update',
'get_default_role',
'roles_for_user',
'remove_tenant_user',
'add_tenant_user_role',
'user_list',
'roles_for_group',
'remove_group_role',
'add_group_role',
'group_list',
'role_list',
'role_assignments_list'),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas',
'tenant_quota_usages',),
api.nova: ('tenant_quota_update',)})
def test_update_project_tenant_update_error(self):
keystone_api_version = api.keystone.VERSIONS.active
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
domain_id = project.domain_id
users = self._get_all_users(domain_id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
proj_users = self._get_proj_users(project.id)
role_assignments = self.role_assignments.list()
quota_usages = self.quota_usages.first()
# get/init
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
workflow_data = {}
if keystone_api_version >= 3:
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
for user in proj_users:
api.keystone.roles_for_user(IsA(http.HttpRequest),
user.id,
self.tenant.id).AndReturn(roles)
role_ids = [role.id for role in roles]
for user in proj_users:
if role_ids:
workflow_data.setdefault(USER_ROLE_PREFIX + role_ids[0], []) \
.append(user.id)
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
role_ids = [role.id for role in roles]
for group in groups:
if role_ids:
workflow_data.setdefault(GROUP_ROLE_PREFIX + role_ids[0], []) \
.append(group.id)
# update some fields
project._info["domain_id"] = domain_id
project._info["name"] = "updated name"
project._info["description"] = "updated description"
quota.metadata_items = 444
quota.volumes = 444
updated_project = {"name": project._info["name"],
"description": project._info["description"],
"enabled": project.enabled}
updated_quota = self._get_quota_info(quota)
# handle
quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \
.AndReturn(quota_usages)
api.keystone.tenant_update(IsA(http.HttpRequest),
project.id,
**updated_project) \
.AndRaise(self.exceptions.keystone)
self.mox.ReplayAll()
# submit form data
project_data = {"domain_id": project._info["domain_id"],
"name": project._info["name"],
"id": project.id,
"description": project._info["description"],
"enabled": project.enabled}
workflow_data.update(project_data)
workflow_data.update(updated_quota)
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.keystone: ('tenant_get',
'domain_get',
'tenant_update',
'get_default_role',
'roles_for_user',
'remove_tenant_user_role',
'add_tenant_user_role',
'user_list',
'roles_for_group',
'remove_group_role',
'add_group_role',
'group_list',
'role_list',
'role_assignments_list'),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas',
'tenant_quota_usages',),
api.nova: ('tenant_quota_update',)})
def test_update_project_quota_update_error(self):
keystone_api_version = api.keystone.VERSIONS.active
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
domain_id = project.domain_id
users = self._get_all_users(domain_id)
proj_users = self._get_proj_users(project.id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
role_assignments = self._get_proj_role_assignment(project.id)
quota_usages = self.quota_usages.first()
# get/init
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
workflow_data = {}
if keystone_api_version >= 3:
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
for user in proj_users:
api.keystone.roles_for_user(IsA(http.HttpRequest),
user.id,
self.tenant.id).AndReturn(roles)
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
workflow_data[USER_ROLE_PREFIX + "1"] = ['1', '3'] # admin role
workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role
# Group role assignment data
workflow_data[GROUP_ROLE_PREFIX + "1"] = ['1', '3'] # admin role
workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role
# update some fields
project._info["domain_id"] = domain_id
project._info["name"] = "updated name"
project._info["description"] = "updated description"
quota[0].limit = 444
quota[1].limit = -1
updated_project = {"name": project._info["name"],
"description": project._info["description"],
"enabled": project.enabled}
updated_quota = self._get_quota_info(quota)
# handle
api.keystone.tenant_update(IsA(http.HttpRequest),
project.id,
**updated_project) \
.AndReturn(project)
self._check_role_list(keystone_api_version, role_assignments, groups,
proj_users, roles, workflow_data)
quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \
.AndReturn(quota_usages)
nova_updated_quota = dict([(key, updated_quota[key]) for key in
quotas.NOVA_QUOTA_FIELDS])
api.nova.tenant_quota_update(IsA(http.HttpRequest),
project.id,
**nova_updated_quota) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
# submit form data
project_data = {"domain_id": project._info["domain_id"],
"name": project._info["name"],
"id": project.id,
"description": project._info["description"],
"enabled": project.enabled}
workflow_data.update(project_data)
workflow_data.update(updated_quota)
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertMessageCount(error=2, warning=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.keystone: ('tenant_get',
'domain_get',
'tenant_update',
'get_default_role',
'roles_for_user',
'remove_tenant_user_role',
'add_tenant_user_role',
'user_list',
'roles_for_group',
'remove_group_role',
'add_group_role',
'group_list',
'role_list',
'role_assignments_list'),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas',
'tenant_quota_usages')})
def test_update_project_member_update_error(self):
keystone_api_version = api.keystone.VERSIONS.active
project = self.tenants.first()
quota = self.quotas.first()
default_role = self.roles.first()
domain_id = project.domain_id
users = self._get_all_users(domain_id)
proj_users = self._get_proj_users(project.id)
groups = self._get_all_groups(domain_id)
roles = self.roles.list()
role_assignments = self._get_proj_role_assignment(project.id)
quota_usages = self.quota_usages.first()
# get/init
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(default_role)
api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(roles)
api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \
.AndReturn(groups)
workflow_data = {}
if keystone_api_version >= 3:
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
else:
api.keystone.user_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(proj_users)
for user in proj_users:
api.keystone.roles_for_user(IsA(http.HttpRequest),
user.id,
self.tenant.id).AndReturn(roles)
api.keystone.role_assignments_list(IsA(http.HttpRequest),
project=self.tenant.id) \
.AndReturn(role_assignments)
workflow_data[USER_ROLE_PREFIX + "1"] = ['1', '3'] # admin role
workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role
workflow_data[GROUP_ROLE_PREFIX + "1"] = ['1', '3'] # admin role
workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role
# update some fields
project._info["domain_id"] = domain_id
project._info["name"] = "updated name"
project._info["description"] = "updated description"
quota.metadata_items = 444
quota.volumes = 444
updated_project = {"name": project._info["name"],
"description": project._info["description"],
"enabled": project.enabled}
updated_quota = self._get_quota_info(quota)
# handle
quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \
.AndReturn(quota_usages)
api.keystone.tenant_update(IsA(http.HttpRequest),
project.id,
**updated_project) \
.AndReturn(project)
self._check_role_list(keystone_api_version, role_assignments, groups,
proj_users, roles, workflow_data)
self.mox.ReplayAll()
# submit form data
project_data = {"domain_id": project._info["domain_id"],
"name": project._info["name"],
"id": project.id,
"description": project._info["description"],
"enabled": project.enabled}
workflow_data.update(project_data)
workflow_data.update(updated_quota)
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
res = self.client.post(url, workflow_data)
self.assertNoFormErrors(res)
self.assertMessageCount(error=2, warning=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
# django 1.7 and later does not handle the thrown keystoneclient
# exception well enough.
# TODO(mrunge): re-check when django-1.8 is stable
@unittest.skipIf(django.VERSION >= (1, 7, 0),
'Currently skipped with Django >= 1.7')
@test.create_stubs({api.keystone: ('get_default_role',
'tenant_get',
'domain_get'),
quotas: ('get_tenant_quota_data',
'get_disabled_quotas')})
def test_update_project_when_default_role_does_not_exist(self):
project = self.tenants.first()
domain_id = project.domain_id
quota = self.quotas.first()
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(None) # Default role doesn't exist
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
admin=True) \
.AndReturn(project)
api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \
.AndReturn(self.domain)
quotas.get_disabled_quotas(IsA(http.HttpRequest)) \
.AndReturn(self.disabled_quotas.first())
quotas.get_tenant_quota_data(IsA(http.HttpRequest),
tenant_id=self.tenant.id) \
.AndReturn(quota)
self.mox.ReplayAll()
url = reverse('horizon:identity:projects:update',
args=[self.tenant.id])
try:
# Avoid the log message in the test output when the workflow's
# step action cannot be instantiated
logging.disable(logging.ERROR)
with self.assertRaises(exceptions.NotFound):
self.client.get(url)
finally:
logging.disable(logging.NOTSET)
class UsageViewTests(test.BaseAdminViewTests):
def _stub_nova_api_calls(self, nova_stu_enabled=True):
self.mox.StubOutWithMock(api.nova, 'usage_get')
self.mox.StubOutWithMock(api.nova, 'tenant_absolute_limits')
self.mox.StubOutWithMock(api.nova, 'extension_supported')
self.mox.StubOutWithMock(api.cinder, 'tenant_absolute_limits')
api.nova.extension_supported(
'SimpleTenantUsage', IsA(http.HttpRequest)) \
.AndReturn(nova_stu_enabled)
def _stub_neutron_api_calls(self, neutron_sg_enabled=True):
self.mox.StubOutWithMock(api.neutron, 'is_extension_supported')
self.mox.StubOutWithMock(api.network, 'floating_ip_supported')
self.mox.StubOutWithMock(api.network, 'tenant_floating_ip_list')
if neutron_sg_enabled:
self.mox.StubOutWithMock(api.network, 'security_group_list')
api.neutron.is_extension_supported(
IsA(http.HttpRequest),
'security-group').AndReturn(neutron_sg_enabled)
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
if neutron_sg_enabled:
api.network.security_group_list(IsA(http.HttpRequest)) \
.AndReturn(self.q_secgroups.list())
def test_usage_csv(self):
self._test_usage_csv(nova_stu_enabled=True)
def test_usage_csv_disabled(self):
self._test_usage_csv(nova_stu_enabled=False)
def _test_usage_csv(self, nova_stu_enabled=True):
now = timezone.now()
usage_obj = api.nova.NovaUsage(self.usages.first())
self._stub_nova_api_calls(nova_stu_enabled)
api.nova.extension_supported(
'SimpleTenantUsage', IsA(http.HttpRequest)) \
.AndReturn(nova_stu_enabled)
start = datetime.datetime(now.year, now.month, 1, 0, 0, 0, 0)
end = datetime.datetime(now.year, now.month, now.day, 23, 59, 59, 0)
if nova_stu_enabled:
api.nova.usage_get(IsA(http.HttpRequest),
self.tenant.id,
start, end).AndReturn(usage_obj)
api.nova.tenant_absolute_limits(IsA(http.HttpRequest))\
.AndReturn(self.limits['absolute'])
api.cinder.tenant_absolute_limits(IsA(http.HttpRequest)) \
.AndReturn(self.cinder_limits['absolute'])
self._stub_neutron_api_calls()
self.mox.ReplayAll()
project_id = self.tenants.first().id
csv_url = reverse('horizon:identity:projects:usage',
args=[project_id]) + "?format=csv"
res = self.client.get(csv_url)
self.assertTemplateUsed(res, 'project/overview/usage.csv')
self.assertTrue(isinstance(res.context['usage'], usage.ProjectUsage))
hdr = ('Instance Name,VCPUs,RAM (MB),Disk (GB),Usage (Hours),'
'Time since created (Seconds),State')
self.assertContains(res, '%s\r\n' % hdr)
class DetailProjectViewTests(test.BaseAdminViewTests):
@test.create_stubs({api.keystone: ('tenant_get',)})
def test_detail_view(self):
project = self.tenants.first()
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id) \
.AndReturn(project)
self.mox.ReplayAll()
res = self.client.get(PROJECT_DETAIL_URL, args=[project.id])
self.assertTemplateUsed(res, 'identity/projects/detail.html')
self.assertEqual(res.context['project'].name, project.name)
self.assertEqual(res.context['project'].id, project.id)
self.assertContains(res, "Project Details: %s" % project.name,
1, 200)
@test.create_stubs({api.keystone: ('tenant_get',)})
def test_detail_view_with_exception(self):
project = self.tenants.first()
api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id) \
.AndRaise(self.exceptions.keystone)
self.mox.ReplayAll()
res = self.client.get(PROJECT_DETAIL_URL, args=[project.id])
self.assertRedirectsNoFollow(res, INDEX_URL)
@unittest.skipUnless(os.environ.get('WITH_SELENIUM', False),
"The WITH_SELENIUM env variable is not set.")
class SeleniumTests(test.SeleniumAdminTestCase):
@test.create_stubs(
{api.keystone: ('tenant_list', 'tenant_get', 'tenant_update')})
def test_inline_editing_update(self):
# Tenant List
api.keystone.tenant_list(IgnoreArg(),
domain=None,
marker=None,
paginate=True) \
.AndReturn([self.tenants.list(), False])
# Edit mod
api.keystone.tenant_get(IgnoreArg(),
u'1',
admin=True) \
.AndReturn(self.tenants.list()[0])
# Update - requires get and update
api.keystone.tenant_get(IgnoreArg(),
u'1',
admin=True) \
.AndReturn(self.tenants.list()[0])
api.keystone.tenant_update(
IgnoreArg(),
u'1',
description='a test tenant.',
enabled=True,
name=u'Changed test_tenant')
# Refreshing cell with changed name
changed_tenant = copy.copy(self.tenants.list()[0])
changed_tenant.name = u'Changed test_tenant'
api.keystone.tenant_get(IgnoreArg(),
u'1',
admin=True) \
.AndReturn(changed_tenant)
self.mox.ReplayAll()
self.selenium.get("%s%s" % (self.live_server_url, INDEX_URL))
# Check the presence of the important elements
td_element = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']")
cell_wrapper = td_element.find_element_by_class_name(
'table_cell_wrapper')
edit_button_wrapper = td_element.find_element_by_class_name(
'table_cell_action')
edit_button = edit_button_wrapper.find_element_by_tag_name('button')
# Hovering over td and clicking on edit button
action_chains = ActionChains(self.selenium)
action_chains.move_to_element(cell_wrapper).click(edit_button)
action_chains.perform()
# Waiting for the AJAX response for switching to editing mod
wait = self.ui.WebDriverWait(self.selenium, 10,
ignored_exceptions=[socket_timeout])
wait.until(lambda x: self.selenium.find_element_by_name("name__1"))
# Changing project name in cell form
td_element = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']")
name_input = td_element.find_element_by_tag_name('input')
name_input.send_keys(keys.Keys.HOME)
name_input.send_keys("Changed ")
# Saving new project name by AJAX
td_element.find_element_by_class_name('inline-edit-submit').click()
# Waiting for the AJAX response of cell refresh
wait = self.ui.WebDriverWait(self.selenium, 10,
ignored_exceptions=[socket_timeout])
wait.until(lambda x: self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']"
"/div[@class='table_cell_wrapper']"
"/div[@class='table_cell_data_wrapper']"))
# Checking new project name after cell refresh
data_wrapper = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']"
"/div[@class='table_cell_wrapper']"
"/div[@class='table_cell_data_wrapper']")
self.assertTrue(data_wrapper.text == u'Changed test_tenant',
"Error: saved tenant name is expected to be "
"'Changed test_tenant'")
@test.create_stubs(
{api.keystone: ('tenant_list', 'tenant_get')})
def test_inline_editing_cancel(self):
# Tenant List
api.keystone.tenant_list(IgnoreArg(),
domain=None,
marker=None,
paginate=True) \
.AndReturn([self.tenants.list(), False])
# Edit mod
api.keystone.tenant_get(IgnoreArg(),
u'1',
admin=True) \
.AndReturn(self.tenants.list()[0])
# Cancel edit mod is without the request
self.mox.ReplayAll()
self.selenium.get("%s%s" % (self.live_server_url, INDEX_URL))
# Check the presence of the important elements
td_element = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']")
cell_wrapper = td_element.find_element_by_class_name(
'table_cell_wrapper')
edit_button_wrapper = td_element.find_element_by_class_name(
'table_cell_action')
edit_button = edit_button_wrapper.find_element_by_tag_name('button')
# Hovering over td and clicking on edit
action_chains = ActionChains(self.selenium)
action_chains.move_to_element(cell_wrapper).click(edit_button)
action_chains.perform()
# Waiting for the AJAX response for switching to editing mod
wait = self.ui.WebDriverWait(self.selenium, 10,
ignored_exceptions=[socket_timeout])
wait.until(lambda x: self.selenium.find_element_by_name("name__1"))
# Click on cancel button
td_element = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']")
td_element.find_element_by_class_name('inline-edit-cancel').click()
# Cancel is via javascript, so it should be immediate
# Checking that tenant name is not changed
data_wrapper = self.selenium.find_element_by_xpath(
"//td[@data-update-url='/identity/?action=cell_update"
"&table=tenants&cell_name=name&obj_id=1']"
"/div[@class='table_cell_wrapper']"
"/div[@class='table_cell_data_wrapper']")
self.assertTrue(data_wrapper.text == u'test_tenant',
"Error: saved tenant name is expected to be "
"'test_tenant'")
@test.create_stubs({api.keystone: ('get_default_domain',
'get_default_role',
'user_list',
'group_list',
'role_list'),
api.base: ('is_service_enabled',),
quotas: ('get_default_quota_data',)})
def test_membership_list_loads_correctly(self):
member_css_class = ".available_members"
users = self.users.list()
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.MultipleTimes().AndReturn(False)
api.base.is_service_enabled(IsA(http.HttpRequest), 'volume') \
.MultipleTimes().AndReturn(False)
api.keystone.get_default_domain(IsA(http.HttpRequest)) \
.AndReturn(self.domain)
quotas.get_default_quota_data(IsA(http.HttpRequest)) \
.AndReturn(self.quotas.first())
api.keystone.get_default_role(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(self.roles.first())
api.keystone.user_list(IsA(http.HttpRequest), domain=self.domain.id) \
.AndReturn(users)
api.keystone.role_list(IsA(http.HttpRequest)) \
.AndReturn(self.roles.list())
api.keystone.group_list(IsA(http.HttpRequest), domain=self.domain.id) \
.AndReturn(self.groups.list())
api.keystone.role_list(IsA(http.HttpRequest)) \
.AndReturn(self.roles.list())
self.mox.ReplayAll()
self.selenium.get("%s%s" %
(self.live_server_url,
reverse('horizon:identity:projects:create')))
members = self.selenium.find_element_by_css_selector(member_css_class)
for user in users:
self.assertIn(user.name, members.text)
| newrocknj/horizon | openstack_dashboard/dashboards/identity/projects/tests.py | Python | apache-2.0 | 82,571 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.configurable;
import com.intellij.ide.actions.ShowFilePathAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.ex.ProjectLevelVcsManagerEx;
import com.intellij.openapi.vcs.readOnlyHandler.ReadonlyStatusHandlerImpl;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.*;
import java.util.List;
public class VcsGeneralConfigurationPanel {
private JCheckBox myShowReadOnlyStatusDialog;
private JRadioButton myShowDialogOnAddingFile;
private JRadioButton myPerformActionOnAddingFile;
private JRadioButton myDoNothingOnAddingFile;
private JRadioButton myShowDialogOnRemovingFile;
private JRadioButton myPerformActionOnRemovingFile;
private JRadioButton myDoNothingOnRemovingFile;
private JPanel myPanel;
private final JRadioButton[] myOnFileAddingGroup;
private final JRadioButton[] myOnFileRemovingGroup;
private final Project myProject;
private JPanel myPromptsPanel;
Map<VcsShowOptionsSettingImpl, JCheckBox> myPromptOptions = new LinkedHashMap<>();
private JPanel myRemoveConfirmationPanel;
private JPanel myAddConfirmationPanel;
private JComboBox myOnPatchCreation;
private JCheckBox myReloadContext;
private ButtonGroup myEmptyChangelistRemovingGroup;
public VcsGeneralConfigurationPanel(final Project project) {
myProject = project;
myOnFileAddingGroup = new JRadioButton[]{
myShowDialogOnAddingFile,
myPerformActionOnAddingFile,
myDoNothingOnAddingFile
};
myOnFileRemovingGroup = new JRadioButton[]{
myShowDialogOnRemovingFile,
myPerformActionOnRemovingFile,
myDoNothingOnRemovingFile
};
myPromptsPanel.setLayout(new GridLayout(3, 0));
List<VcsShowOptionsSettingImpl> options = ProjectLevelVcsManagerEx.getInstanceEx(project).getAllOptions();
for (VcsShowOptionsSettingImpl setting : options) {
if (!setting.getApplicableVcses().isEmpty() || project.isDefault()) {
final JCheckBox checkBox = new JCheckBox(setting.getDisplayName());
myPromptsPanel.add(checkBox);
myPromptOptions.put(setting, checkBox);
}
}
myPromptsPanel.setSize(myPromptsPanel.getPreferredSize()); // todo check text!
myOnPatchCreation.setName((SystemInfo.isMac ? "Reveal patch in" : "Show patch in ") +
ShowFilePathAction.getFileManagerName() + " after creation:");
}
public void apply() {
VcsConfiguration settings = VcsConfiguration.getInstance(myProject);
settings.REMOVE_EMPTY_INACTIVE_CHANGELISTS = getSelected(myEmptyChangelistRemovingGroup);
settings.RELOAD_CONTEXT = myReloadContext.isSelected();
for (VcsShowOptionsSettingImpl setting : myPromptOptions.keySet()) {
setting.setValue(myPromptOptions.get(setting).isSelected());
}
getAddConfirmation().setValue(getSelected(myOnFileAddingGroup));
getRemoveConfirmation().setValue(getSelected(myOnFileRemovingGroup));
applyPatchOption(settings);
getReadOnlyStatusHandler().getState().SHOW_DIALOG = myShowReadOnlyStatusDialog.isSelected();
}
private void applyPatchOption(VcsConfiguration settings) {
settings.SHOW_PATCH_IN_EXPLORER = getShowPatchValue();
}
@Nullable
private Boolean getShowPatchValue() {
final int index = myOnPatchCreation.getSelectedIndex();
if (index == 0) {
return null;
} else if (index == 1) {
return true;
} else {
return false;
}
}
private VcsShowConfirmationOption getAddConfirmation() {
return ProjectLevelVcsManagerEx.getInstanceEx(myProject)
.getConfirmation(VcsConfiguration.StandardConfirmation.ADD);
}
private VcsShowConfirmationOption getRemoveConfirmation() {
return ProjectLevelVcsManagerEx.getInstanceEx(myProject)
.getConfirmation(VcsConfiguration.StandardConfirmation.REMOVE);
}
private static VcsShowConfirmationOption.Value getSelected(JRadioButton[] group) {
if (group[0].isSelected()) return VcsShowConfirmationOption.Value.SHOW_CONFIRMATION;
if (group[1].isSelected()) return VcsShowConfirmationOption.Value.DO_ACTION_SILENTLY;
return VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY;
}
private static VcsShowConfirmationOption.Value getSelected(ButtonGroup group) {
switch (UIUtil.getSelectedButton(group)) {
case 0:
return VcsShowConfirmationOption.Value.SHOW_CONFIRMATION;
case 1:
return VcsShowConfirmationOption.Value.DO_ACTION_SILENTLY;
}
return VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY;
}
private ReadonlyStatusHandlerImpl getReadOnlyStatusHandler() {
return ((ReadonlyStatusHandlerImpl)ReadonlyStatusHandler.getInstance(myProject));
}
public boolean isModified() {
VcsConfiguration settings = VcsConfiguration.getInstance(myProject);
if (settings.REMOVE_EMPTY_INACTIVE_CHANGELISTS != getSelected(myEmptyChangelistRemovingGroup)){
return true;
}
if (settings.RELOAD_CONTEXT != myReloadContext.isSelected()) return true;
if (getReadOnlyStatusHandler().getState().SHOW_DIALOG != myShowReadOnlyStatusDialog.isSelected()) {
return true;
}
for (VcsShowOptionsSettingImpl setting : myPromptOptions.keySet()) {
if (setting.getValue() != myPromptOptions.get(setting).isSelected()) return true;
}
if (getSelected(myOnFileAddingGroup) != getAddConfirmation().getValue()) return true;
if (getSelected(myOnFileRemovingGroup) != getRemoveConfirmation().getValue()) return true;
if (! Comparing.equal(settings.SHOW_PATCH_IN_EXPLORER, getShowPatchValue())) return true;
return false;
}
public void reset() {
VcsConfiguration settings = VcsConfiguration.getInstance(myProject);
myReloadContext.setSelected(settings.RELOAD_CONTEXT);
VcsShowConfirmationOption.Value value = settings.REMOVE_EMPTY_INACTIVE_CHANGELISTS;
UIUtil.setSelectedButton(myEmptyChangelistRemovingGroup, value == VcsShowConfirmationOption.Value.SHOW_CONFIRMATION
? 0
: value == VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY ? 2 : 1);
myShowReadOnlyStatusDialog.setSelected(getReadOnlyStatusHandler().getState().SHOW_DIALOG);
for (VcsShowOptionsSettingImpl setting : myPromptOptions.keySet()) {
myPromptOptions.get(setting).setSelected(setting.getValue());
}
selectInGroup(myOnFileAddingGroup, getAddConfirmation());
selectInGroup(myOnFileRemovingGroup, getRemoveConfirmation());
if (settings.SHOW_PATCH_IN_EXPLORER == null) {
myOnPatchCreation.setSelectedIndex(0);
} else if (Boolean.TRUE.equals(settings.SHOW_PATCH_IN_EXPLORER)) {
myOnPatchCreation.setSelectedIndex(1);
} else {
myOnPatchCreation.setSelectedIndex(2);
}
}
private static void selectInGroup(final JRadioButton[] group, final VcsShowConfirmationOption confirmation) {
final VcsShowConfirmationOption.Value value = confirmation.getValue();
final int index;
//noinspection EnumSwitchStatementWhichMissesCases
switch(value) {
case SHOW_CONFIRMATION: index = 0; break;
case DO_ACTION_SILENTLY: index = 1; break;
default: index = 2;
}
group[index].setSelected(true);
}
public JComponent getPanel() {
return myPanel;
}
public void updateAvailableOptions(final Collection<AbstractVcs> activeVcses) {
for (VcsShowOptionsSettingImpl setting : myPromptOptions.keySet()) {
final JCheckBox checkBox = myPromptOptions.get(setting);
checkBox.setEnabled(setting.isApplicableTo(activeVcses) || myProject.isDefault());
if (!myProject.isDefault()) {
checkBox.setToolTipText(VcsBundle.message("tooltip.text.action.applicable.to.vcses", composeText(setting.getApplicableVcses())));
}
}
if (!myProject.isDefault()) {
final ProjectLevelVcsManagerEx vcsManager = ProjectLevelVcsManagerEx.getInstanceEx(myProject);
final VcsShowConfirmationOptionImpl addConfirmation = vcsManager.getConfirmation(VcsConfiguration.StandardConfirmation.ADD);
UIUtil.setEnabled(myAddConfirmationPanel, addConfirmation.isApplicableTo(activeVcses), true);
myAddConfirmationPanel.setToolTipText(
VcsBundle.message("tooltip.text.action.applicable.to.vcses", composeText(addConfirmation.getApplicableVcses())));
final VcsShowConfirmationOptionImpl removeConfirmation = vcsManager.getConfirmation(VcsConfiguration.StandardConfirmation.REMOVE);
UIUtil.setEnabled(myRemoveConfirmationPanel, removeConfirmation.isApplicableTo(activeVcses), true);
myRemoveConfirmationPanel.setToolTipText(
VcsBundle.message("tooltip.text.action.applicable.to.vcses", composeText(removeConfirmation.getApplicableVcses())));
}
}
private static String composeText(final List<AbstractVcs> applicableVcses) {
final TreeSet<String> result = new TreeSet<>();
for (AbstractVcs abstractVcs : applicableVcses) {
result.add(abstractVcs.getDisplayName());
}
return StringUtil.join(result, ", ");
}
}
| jk1/intellij-community | platform/vcs-impl/src/com/intellij/openapi/vcs/configurable/VcsGeneralConfigurationPanel.java | Java | apache-2.0 | 10,031 |
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using Xunit;
namespace Apache.Arrow.Tests
{
public class BitUtilityTests
{
public class ByteCount
{
[Theory]
[InlineData(0, 0)]
[InlineData(1, 1)]
[InlineData(8, 1)]
[InlineData(9, 2)]
[InlineData(32, 4)]
public void HasExpectedResult(int n, int expected)
{
var count = BitUtility.ByteCount(n);
Assert.Equal(expected, count);
}
}
public class CountBits
{
[Theory]
[InlineData(new byte[] { 0b00000000 }, 0)]
[InlineData(new byte[] { 0b00000001 }, 1)]
[InlineData(new byte[] { 0b11111111 }, 8)]
[InlineData(new byte[] { 0b01001001, 0b01010010 }, 6)]
public void CountsAllOneBits(byte[] data, int expectedCount)
{
Assert.Equal(expectedCount,
BitUtility.CountBits(data));
}
[Theory]
[InlineData(new byte[] { 0b11111111 }, 0, 8)]
[InlineData(new byte[] { 0b11111111 }, 3, 5)]
[InlineData(new byte[] { 0b11111111, 0b11111111 }, 9, 7)]
[InlineData(new byte[] { 0b11111111 }, -1, 0)]
public void CountsAllOneBitsFromAnOffset(byte[] data, int offset, int expectedCount)
{
Assert.Equal(expectedCount,
BitUtility.CountBits(data, offset));
}
[Theory]
[InlineData(new byte[] { 0b11111111 }, 0, 8, 8)]
[InlineData(new byte[] { 0b11111111 }, 0, 4, 4)]
[InlineData(new byte[] { 0b11111111 }, 3, 2, 2)]
[InlineData(new byte[] { 0b11111111 }, 3, 5, 5)]
[InlineData(new byte[] { 0b11111111, 0b11111111 }, 9, 7, 7)]
[InlineData(new byte[] { 0b11111111, 0b11111111 }, 7, 2, 2)]
[InlineData(new byte[] { 0b11111111, 0b11111111, 0b11111111 }, 0, 24, 24)]
[InlineData(new byte[] { 0b11111111, 0b11111111, 0b11111111 }, 8, 16, 16)]
[InlineData(new byte[] { 0b11111111, 0b11111111, 0b11111111 }, 0, 16, 16)]
[InlineData(new byte[] { 0b11111111, 0b11111111, 0b11111111 }, 3, 18, 18)]
[InlineData(new byte[] { 0b11111111 }, -1, 0, 0)]
public void CountsAllOneBitsFromOffsetWithinLength(byte[] data, int offset, int length, int expectedCount)
{
var actualCount = BitUtility.CountBits(data, offset, length);
Assert.Equal(expectedCount, actualCount);
}
[Fact]
public void CountsZeroBitsWhenDataIsEmpty()
{
Assert.Equal(0,
BitUtility.CountBits(null));
}
}
public class GetBit
{
[Theory]
[InlineData(new byte[] { 0b01001001 }, 0, true)]
[InlineData(new byte[] { 0b01001001 }, 1, false)]
[InlineData(new byte[] { 0b01001001 }, 2, false)]
[InlineData(new byte[] { 0b01001001 }, 3, true)]
[InlineData(new byte[] { 0b01001001 }, 4, false)]
[InlineData(new byte[] { 0b01001001 }, 5, false)]
[InlineData(new byte[] { 0b01001001 }, 6, true)]
[InlineData(new byte[] { 0b01001001 }, 7, false)]
[InlineData(new byte[] { 0b01001001, 0b01010010 }, 8, false)]
[InlineData(new byte[] { 0b01001001, 0b01010010 }, 14, true)]
public void GetsCorrectBitForIndex(byte[] data, int index, bool expectedValue)
{
Assert.Equal(expectedValue,
BitUtility.GetBit(data, index));
}
[Theory]
[InlineData(null, 0)]
[InlineData(new byte[] { 0b00000000 }, -1)]
public void ThrowsWhenBitIndexOutOfRange(byte[] data, int index)
{
Assert.Throws<IndexOutOfRangeException>(() =>
BitUtility.GetBit(data, index));
}
}
public class SetBit
{
[Theory]
[InlineData(new byte[] { 0b00000000 }, 0, new byte[] { 0b00000001 })]
[InlineData(new byte[] { 0b00000000 }, 2, new byte[] { 0b00000100 })]
[InlineData(new byte[] { 0b00000000 }, 7, new byte[] { 0b10000000 })]
[InlineData(new byte[] { 0b00000000, 0b00000000 }, 8, new byte[] { 0b00000000, 0b00000001 })]
[InlineData(new byte[] { 0b00000000, 0b00000000 }, 15, new byte[] { 0b00000000, 0b10000000 })]
public void SetsBitAtIndex(byte[] data, int index, byte[] expectedValue)
{
BitUtility.SetBit(data, index);
Assert.Equal(expectedValue, data);
}
}
public class ClearBit
{
[Theory]
[InlineData(new byte[] { 0b00000001 }, 0, new byte[] { 0b00000000 })]
[InlineData(new byte[] { 0b00000010 }, 1, new byte[] { 0b00000000 })]
[InlineData(new byte[] { 0b10000001 }, 7, new byte[] { 0b00000001 })]
[InlineData(new byte[] { 0b11111111, 0b11111111 }, 15, new byte[] { 0b11111111, 0b01111111 })]
public void ClearsBitAtIndex(byte[] data, int index, byte[] expectedValue)
{
BitUtility.ClearBit(data, index);
Assert.Equal(expectedValue, data);
}
}
public class RoundUpToMultipleOf64
{
[Theory]
[InlineData(0, 0)]
[InlineData(1, 64)]
[InlineData(63, 64)]
[InlineData(64, 64)]
[InlineData(65, 128)]
[InlineData(129, 192)]
public void ReturnsNextMultiple(int size, int expectedSize)
{
Assert.Equal(expectedSize,
BitUtility.RoundUpToMultipleOf64(size));
}
[Theory]
[InlineData(0)]
[InlineData(-1)]
public void ReturnsZeroWhenSizeIsLessThanOrEqualToZero(int size)
{
Assert.Equal(0,
BitUtility.RoundUpToMultipleOf64(size));
}
}
}
}
| cpcloud/arrow | csharp/test/Apache.Arrow.Tests/BitUtilityTests.cs | C# | apache-2.0 | 7,010 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ecs/model/RegisterContainerInstanceRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::ECS::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
RegisterContainerInstanceRequest::RegisterContainerInstanceRequest() :
m_clusterHasBeenSet(false),
m_instanceIdentityDocumentHasBeenSet(false),
m_instanceIdentityDocumentSignatureHasBeenSet(false),
m_totalResourcesHasBeenSet(false),
m_versionInfoHasBeenSet(false),
m_containerInstanceArnHasBeenSet(false)
{
}
Aws::String RegisterContainerInstanceRequest::SerializePayload() const
{
JsonValue payload;
if(m_clusterHasBeenSet)
{
payload.WithString("cluster", m_cluster);
}
if(m_instanceIdentityDocumentHasBeenSet)
{
payload.WithString("instanceIdentityDocument", m_instanceIdentityDocument);
}
if(m_instanceIdentityDocumentSignatureHasBeenSet)
{
payload.WithString("instanceIdentityDocumentSignature", m_instanceIdentityDocumentSignature);
}
if(m_totalResourcesHasBeenSet)
{
Array<JsonValue> totalResourcesJsonList(m_totalResources.size());
for(unsigned totalResourcesIndex = 0; totalResourcesIndex < totalResourcesJsonList.GetLength(); ++totalResourcesIndex)
{
totalResourcesJsonList[totalResourcesIndex].AsObject(m_totalResources[totalResourcesIndex].Jsonize());
}
payload.WithArray("totalResources", std::move(totalResourcesJsonList));
}
if(m_versionInfoHasBeenSet)
{
payload.WithObject("versionInfo", m_versionInfo.Jsonize());
}
if(m_containerInstanceArnHasBeenSet)
{
payload.WithString("containerInstanceArn", m_containerInstanceArn);
}
return payload.WriteReadable();
}
Aws::Http::HeaderValueCollection RegisterContainerInstanceRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "AmazonEC2ContainerServiceV20141113.RegisterContainerInstance"));
return std::move(headers);
}
| bmildner/aws-sdk-cpp | aws-cpp-sdk-ecs/source/model/RegisterContainerInstanceRequest.cpp | C++ | apache-2.0 | 2,594 |
/*
* Copyright 2019 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal.svm;
import com.oracle.svm.core.annotate.Alias;
import com.oracle.svm.core.annotate.RecomputeFieldValue;
import com.oracle.svm.core.annotate.TargetClass;
@TargetClass(className = "io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess")
final class UnsafeRefArrayAccessSubstitution {
private UnsafeRefArrayAccessSubstitution() {
}
@Alias
@RecomputeFieldValue(
kind = RecomputeFieldValue.Kind.ArrayIndexShift,
declClass = Object[].class)
public static int REF_ELEMENT_SHIFT;
}
| bryce-anderson/netty | common/src/main/java/io/netty/util/internal/svm/UnsafeRefArrayAccessSubstitution.java | Java | apache-2.0 | 1,194 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestKeyValueScanFixture extends TestCase {
public void testKeyValueScanFixture() throws IOException {
KeyValue kvs[] = new KeyValue[]{
KeyValueTestUtil.create("RowA", "family", "qf1",
1, KeyValue.Type.Put, "value-1"),
KeyValueTestUtil.create("RowA", "family", "qf2",
1, KeyValue.Type.Put, "value-2"),
KeyValueTestUtil.create("RowB", "family", "qf1",
10, KeyValue.Type.Put, "value-10")
};
KeyValueScanner scan = new KeyValueScanFixture(
KeyValue.COMPARATOR, kvs);
KeyValue kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowA"));
// should seek to this:
assertTrue(scan.seek(kv));
Cell res = scan.peek();
assertEquals(kvs[0], res);
kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowB"));
assertTrue(scan.seek(kv));
res = scan.peek();
assertEquals(kvs[2], res);
// ensure we pull things out properly:
kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowA"));
assertTrue(scan.seek(kv));
assertEquals(kvs[0], scan.peek());
assertEquals(kvs[0], scan.next());
assertEquals(kvs[1], scan.peek());
assertEquals(kvs[1], scan.next());
assertEquals(kvs[2], scan.peek());
assertEquals(kvs[2], scan.next());
assertEquals(null, scan.peek());
assertEquals(null, scan.next());
}
}
| justintung/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java | Java | apache-2.0 | 2,727 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.breakpoints;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* @author nik
*/
public class CustomizedBreakpointPresentation {
private Icon myIcon;
private String myErrorMessage;
public void setIcon(final Icon icon) {
myIcon = icon;
}
public void setErrorMessage(final String errorMessage) {
myErrorMessage = errorMessage;
}
@Nullable
public Icon getIcon() {
return myIcon;
}
public String getErrorMessage() {
return myErrorMessage;
}
}
| android-ia/platform_tools_idea | platform/xdebugger-impl/src/com/intellij/xdebugger/impl/breakpoints/CustomizedBreakpointPresentation.java | Java | apache-2.0 | 1,141 |
//---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2011 EMC Corp.
//
// @filename:
// CParseHandlerQueryOutput.cpp
//
// @doc:
// Implementation of the SAX parse handler class parsing the list of
// output column references in a DXL query.
//---------------------------------------------------------------------------
#include "naucrates/dxl/parser/CParseHandlerQueryOutput.h"
#include "naucrates/dxl/operators/CDXLOperatorFactory.h"
#include "naucrates/dxl/parser/CParseHandlerFactory.h"
#include "naucrates/dxl/parser/CParseHandlerScalarIdent.h"
using namespace gpdxl;
XERCES_CPP_NAMESPACE_USE
//---------------------------------------------------------------------------
// @function:
// CParseHandlerQueryOutput::CParseHandlerQueryOutput
//
// @doc:
// Constructor
//
//---------------------------------------------------------------------------
CParseHandlerQueryOutput::CParseHandlerQueryOutput(
CMemoryPool *mp, CParseHandlerManager *parse_handler_mgr,
CParseHandlerBase *parse_handler_root)
: CParseHandlerBase(mp, parse_handler_mgr, parse_handler_root),
m_dxl_array(nullptr)
{
}
//---------------------------------------------------------------------------
// @function:
// CParseHandlerQueryOutput::~CParseHandlerQueryOutput
//
// @doc:
// Destructor
//
//---------------------------------------------------------------------------
CParseHandlerQueryOutput::~CParseHandlerQueryOutput()
{
m_dxl_array->Release();
}
//---------------------------------------------------------------------------
// @function:
// CParseHandlerQueryOutput::GetOutputColumnsDXLArray
//
// @doc:
// Return the list of query output columns
//
//---------------------------------------------------------------------------
CDXLNodeArray *
CParseHandlerQueryOutput::GetOutputColumnsDXLArray()
{
GPOS_ASSERT(nullptr != m_dxl_array);
return m_dxl_array;
}
//---------------------------------------------------------------------------
// @function:
// CParseHandlerQueryOutput::StartElement
//
// @doc:
// Invoked by Xerces to process an opening tag
//
//---------------------------------------------------------------------------
void
CParseHandlerQueryOutput::StartElement(const XMLCh *const element_uri,
const XMLCh *const element_local_name,
const XMLCh *const element_qname,
const Attributes &attrs)
{
if (0 ==
XMLString::compareString(CDXLTokens::XmlstrToken(EdxltokenQueryOutput),
element_local_name))
{
// start the query output section in the DXL document
GPOS_ASSERT(nullptr == m_dxl_array);
m_dxl_array = GPOS_NEW(m_mp) CDXLNodeArray(m_mp);
}
else if (0 == XMLString::compareString(
CDXLTokens::XmlstrToken(EdxltokenScalarIdent),
element_local_name))
{
// we must have seen a proj list already and initialized the proj list node
GPOS_ASSERT(nullptr != m_dxl_array);
// start new scalar ident element
CParseHandlerBase *child_parse_handler =
CParseHandlerFactory::GetParseHandler(
m_mp, CDXLTokens::XmlstrToken(EdxltokenScalarIdent),
m_parse_handler_mgr, this);
m_parse_handler_mgr->ActivateParseHandler(child_parse_handler);
// store parse handler
this->Append(child_parse_handler);
child_parse_handler->startElement(element_uri, element_local_name,
element_qname, attrs);
}
else
{
CWStringDynamic *str = CDXLUtils::CreateDynamicStringFromXMLChArray(
m_parse_handler_mgr->GetDXLMemoryManager(), element_local_name);
GPOS_RAISE(gpdxl::ExmaDXL, gpdxl::ExmiDXLUnexpectedTag,
str->GetBuffer());
}
}
//---------------------------------------------------------------------------
// @function:
// CParseHandlerQueryOutput::EndElement
//
// @doc:
// Invoked by Xerces to process a closing tag
//
//---------------------------------------------------------------------------
void
CParseHandlerQueryOutput::EndElement(const XMLCh *const, // element_uri,
const XMLCh *const element_local_name,
const XMLCh *const // element_qname
)
{
if (0 !=
XMLString::compareString(CDXLTokens::XmlstrToken(EdxltokenQueryOutput),
element_local_name))
{
CWStringDynamic *str = CDXLUtils::CreateDynamicStringFromXMLChArray(
m_parse_handler_mgr->GetDXLMemoryManager(), element_local_name);
GPOS_RAISE(gpdxl::ExmaDXL, gpdxl::ExmiDXLUnexpectedTag,
str->GetBuffer());
}
const ULONG size = this->Length();
for (ULONG ul = 0; ul < size; ul++)
{
CParseHandlerScalarIdent *child_parse_handler =
dynamic_cast<CParseHandlerScalarIdent *>((*this)[ul]);
GPOS_ASSERT(nullptr != child_parse_handler);
CDXLNode *pdxlnIdent = child_parse_handler->CreateDXLNode();
pdxlnIdent->AddRef();
m_dxl_array->Append(pdxlnIdent);
}
// deactivate handler
m_parse_handler_mgr->DeactivateHandler();
}
// EOF
| 50wu/gpdb | src/backend/gporca/libnaucrates/src/parser/CParseHandlerQueryOutput.cpp | C++ | apache-2.0 | 4,849 |
class PerlBuild < Formula
desc "Perl builder"
homepage "https://github.com/tokuhirom/Perl-Build"
url "https://github.com/tokuhirom/Perl-Build/archive/1.32.tar.gz"
sha256 "ba86d74ff9718977637806ef650c85615534f0b17023a72f447587676d7f66fd"
license any_of: ["Artistic-1.0", "GPL-1.0-or-later"]
head "https://github.com/tokuhirom/perl-build.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "a9d4cdf8f97ae6c7aaafc8cb6e6d5099ec97f6ec0632a33af90e70766c9e497e"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "b662afe3c5e833e08c5e0a425f5597ab159b808e6285e90f96ee48e1f8d8d9a8"
sha256 cellar: :any_skip_relocation, monterey: "e05da78d5eab2ca95b3bdc567a1d8ef81d60c932af55420958f2e6538b18c89e"
sha256 cellar: :any_skip_relocation, big_sur: "a24fadf986032226343c74378f0344b15729687d9b0679f64e859e41a4f165db"
sha256 cellar: :any_skip_relocation, catalina: "e2b99b05c34a89e8706810730e8ac6da7d98c76025b72d86eb2a6003a47a4b85"
sha256 cellar: :any_skip_relocation, mojave: "5ae631c827ab5b58f0e2bafa3b5470f3b2f2236802942c3d4454ab96fd212aa8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "7e55952e9cc4849a4a6da657c0b9e52f93da495518b9c0db1da64efab51ced28"
end
uses_from_macos "perl"
resource "Module::Build" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Module-Build-0.4231.tar.gz"
sha256 "7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717"
end
resource "Module::Build::Tiny" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Module-Build-Tiny-0.039.tar.gz"
sha256 "7d580ff6ace0cbe555bf36b86dc8ea232581530cbeaaea09bccb57b55797f11c"
end
resource "ExtUtils::Config" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-Config-0.008.tar.gz"
sha256 "ae5104f634650dce8a79b7ed13fb59d67a39c213a6776cfdaa3ee749e62f1a8c"
end
resource "ExtUtils::Helpers" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-Helpers-0.026.tar.gz"
sha256 "de901b6790a4557cf4ec908149e035783b125bf115eb9640feb1bc1c24c33416"
end
resource "ExtUtils::InstallPaths" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-InstallPaths-0.012.tar.gz"
sha256 "84735e3037bab1fdffa3c2508567ad412a785c91599db3c12593a50a1dd434ed"
end
resource "HTTP::Tinyish" do
url "https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/HTTP-Tinyish-0.17.tar.gz"
sha256 "47bd111e474566d733c41870e2374c81689db5e0b5a43adc48adb665d89fb067"
end
resource "CPAN::Perl::Releases" do
url "https://cpan.metacpan.org/authors/id/B/BI/BINGOS/CPAN-Perl-Releases-5.20210220.tar.gz"
sha256 "c88ba6bba670bfc36bcb10adcceab83428ab3b3363ac9bb11f374a88f52466be"
end
resource "CPAN::Perl::Releases::MetaCPAN" do
url "https://cpan.metacpan.org/authors/id/S/SK/SKAJI/CPAN-Perl-Releases-MetaCPAN-0.006.tar.gz"
sha256 "d78ef4ee4f0bc6d95c38bbcb0d2af81cf59a31bde979431c1b54ec50d71d0e1b"
end
resource "File::pushd" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/File-pushd-1.016.tar.gz"
sha256 "d73a7f09442983b098260df3df7a832a5f660773a313ca273fa8b56665f97cdc"
end
resource "HTTP::Tiny" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/HTTP-Tiny-0.076.tar.gz"
sha256 "ddbdaa2fb511339fa621a80021bf1b9733fddafc4fe0245f26c8b92171ef9387"
end
# Devel::PatchPerl dependency
resource "Module::Pluggable" do
url "https://cpan.metacpan.org/authors/id/S/SI/SIMONW/Module-Pluggable-5.2.tar.gz"
sha256 "b3f2ad45e4fd10b3fb90d912d78d8b795ab295480db56dc64e86b9fa75c5a6df"
end
resource "Devel::PatchPerl" do
url "https://cpan.metacpan.org/authors/id/B/BI/BINGOS/Devel-PatchPerl-2.08.tar.gz"
sha256 "69c6e97016260f408e9d7e448f942b36a6d49df5af07340f1d65d7e230167419"
end
# Pod::Usage dependency
resource "Pod::Text" do
url "https://cpan.metacpan.org/authors/id/R/RR/RRA/podlators-4.12.tar.gz"
sha256 "948717da19630a5f003da4406da90fe1cbdec9ae493671c90dfb6d8b3d63b7eb"
end
resource "Pod::Usage" do
url "https://cpan.metacpan.org/authors/id/M/MA/MAREKR/Pod-Usage-1.69.tar.gz"
sha256 "1a920c067b3c905b72291a76efcdf1935ba5423ab0187b9a5a63cfc930965132"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
# Ensure we don't install the pre-packed script
(buildpath/"perl-build").unlink
# Remove this apparently dead symlink.
(buildpath/"bin/perl-build").unlink
build_pl = ["Module::Build::Tiny", "CPAN::Perl::Releases::MetaCPAN"]
resources.each do |r|
r.stage do
next if build_pl.include? r.name
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make"
system "make", "install"
end
end
build_pl.each do |name|
resource(name).stage do
system "perl", "Build.PL", "--install_base", libexec
system "./Build"
system "./Build", "install"
end
end
ENV.prepend_path "PATH", libexec/"bin"
system "perl", "Build.PL", "--install_base", libexec
# Replace the dead symlink we removed earlier.
(buildpath/"bin").install_symlink buildpath/"script/perl-build"
system "./Build"
system "./Build", "install"
%w[perl-build plenv-install plenv-uninstall].each do |cmd|
(bin/cmd).write_env_script(libexec/"bin/#{cmd}", PERL5LIB: ENV["PERL5LIB"])
end
end
test do
assert_match version.to_s, shell_output("#{bin}/perl-build --version")
end
end
| sjackman/homebrew-core | Formula/perl-build.rb | Ruby | bsd-2-clause | 5,489 |
class GitAbsorb < Formula
desc "Automatic git commit --fixup"
homepage "https://github.com/tummychow/git-absorb"
url "https://github.com/tummychow/git-absorb/archive/0.6.6.tar.gz"
sha256 "955069cc70a34816e6f4b6a6bd1892cfc0ae3d83d053232293366eb65599af2f"
license "BSD-3-Clause"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_monterey: "a01139732b157c708bf13151074669105cca050159412fd781ed9be5b9afdb93"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "50ec784cd0089d5840025d2b108ac75b9b87b4ec786e9e4766304fc012cb3507"
sha256 cellar: :any_skip_relocation, monterey: "73201ddb25921212ac430c95be693d7b65ab5c4221a5a18958be63af69eef95b"
sha256 cellar: :any_skip_relocation, big_sur: "5c90abd3d3058854758851749660bab97f06a9b60b01e6eb75da29c3c6fa3941"
sha256 cellar: :any_skip_relocation, catalina: "0d9b836c7c18d1284e31fe6d354cbfae95c513fae6855d7d8897dbaab3eacf0e"
sha256 cellar: :any_skip_relocation, mojave: "d5f13b0f733d6c2d1cd8c98008fcf51faccd3bd4312dd7742dc6a2cc695d0a34"
sha256 cellar: :any_skip_relocation, x86_64_linux: "96f90dd36ce015d992314e9e6b325f4b2549fd2ef6871356f96d8ade728980c0"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
man1.install "Documentation/git-absorb.1"
(zsh_completion/"_git-absorb").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "zsh")
(bash_completion/"git-absorb").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "bash")
(fish_completion/"git-absorb.fish").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "fish")
end
test do
(testpath/".gitconfig").write <<~EOS
[user]
name = Real Person
email = notacat@hotmail.cat
EOS
system "git", "init"
(testpath/"test").write "foo"
system "git", "add", "test"
system "git", "commit", "--message", "Initial commit"
(testpath/"test").delete
(testpath/"test").write "bar"
system "git", "add", "test"
system "git", "absorb"
end
end
| sjackman/homebrew-core | Formula/git-absorb.rb | Ruby | bsd-2-clause | 2,121 |
cask 'astropad' do
version '3.1'
sha256 '4082c09dd4aa440a2b8bd25104d98d3f431fbca2fc4f139d3e390632f4903f22'
url "https://astropad.com/downloads/Astropad-#{version}.zip"
appcast 'https://astropad.com/downloads/sparkle.xml'
name 'Astropad'
homepage 'https://astropad.com/'
app 'Astropad.app'
uninstall quit: 'com.astro-hq.AstropadMac'
zap trash: [
'~/Library/Caches/Astropad',
'~/Library/Caches/com.astro-hq.AstropadMac',
'~/Library/Preferences/com.astro-hq.AstropadMac.plist',
'~/Library/Saved Application State/com.astro-hq.AstropadMac.savedState',
]
end
| josa42/homebrew-cask | Casks/astropad.rb | Ruby | bsd-2-clause | 649 |
goog.provide('ol.test.extent');
goog.require('ol.extent');
goog.require('ol.proj');
describe('ol.extent', function() {
describe('buffer', function() {
it('buffers an extent by some value', function() {
var extent = [-10, -20, 10, 20];
expect(ol.extent.buffer(extent, 15)).to.eql([-25, -35, 25, 35]);
});
});
describe('clone', function() {
it('creates a copy of an extent', function() {
var extent = ol.extent.createOrUpdate(1, 2, 3, 4);
var clone = ol.extent.clone(extent);
expect(ol.extent.equals(extent, clone)).to.be(true);
ol.extent.extendCoordinate(extent, [10, 20]);
expect(ol.extent.equals(extent, clone)).to.be(false);
});
});
describe('closestSquaredDistanceXY', function() {
it('returns correct result when x left of extent', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = -2;
var y = 0;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
it('returns correct result when x right of extent', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = 3;
var y = 0;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
it('returns correct result for other x values', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = 0.5;
var y = 3;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
it('returns correct result when y below extent', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = 0;
var y = -2;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
it('returns correct result when y above extent', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = 0;
var y = 3;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
it('returns correct result for other y values', function() {
var extent = ol.extent.createOrUpdate(0, 0, 1, 1);
var x = 3;
var y = 0.5;
expect(ol.extent.closestSquaredDistanceXY(extent, x, y)).to.be(4);
});
});
describe('createOrUpdateFromCoordinate', function() {
it('works when no extent passed', function() {
var coords = [0, 1];
var expected = [0, 1, 0, 1];
var got = ol.extent.createOrUpdateFromCoordinate(coords);
expect(got).to.eql(expected);
});
it('updates a passed extent', function() {
var extent = ol.extent.createOrUpdate(-4, -7, -3, -6);
var coords = [0, 1];
var expected = [0, 1, 0, 1];
ol.extent.createOrUpdateFromCoordinate(coords, extent);
expect(extent).to.eql(expected);
});
});
describe('createOrUpdateFromCoordinates', function() {
it('works when single coordinate and no extent passed', function() {
var coords = [[0, 1]];
var expected = [0, 1, 0, 1];
var got = ol.extent.createOrUpdateFromCoordinates(coords);
expect(got).to.eql(expected);
});
it('changes the passed extent when single coordinate', function() {
var extent = ol.extent.createOrUpdate(-4, -7, -3, -6);
var coords = [[0, 1]];
var expected = [0, 1, 0, 1];
ol.extent.createOrUpdateFromCoordinates(coords, extent);
expect(extent).to.eql(expected);
});
it('works when multiple coordinates and no extent passed', function() {
var coords = [[0, 1], [2, 3]];
var expected = [0, 1, 2, 3];
var got = ol.extent.createOrUpdateFromCoordinates(coords);
expect(got).to.eql(expected);
});
it('changes the passed extent when multiple coordinates given', function() {
var extent = ol.extent.createOrUpdate(-4, -7, -3, -6);
var coords = [[0, 1], [-2, -1]];
var expected = [-2, -1, 0, 1];
ol.extent.createOrUpdateFromCoordinates(coords, extent);
expect(extent).to.eql(expected);
});
});
describe('createOrUpdateFromRings', function() {
it('works when single ring and no extent passed', function() {
var ring = [[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]];
var rings = [ring];
var expected = [0, 0, 2, 2];
var got = ol.extent.createOrUpdateFromRings(rings);
expect(got).to.eql(expected);
});
it('changes the passed extent when single ring given', function() {
var ring = [[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]];
var rings = [ring];
var extent = [1, 1, 4, 7];
var expected = [0, 0, 2, 2];
ol.extent.createOrUpdateFromRings(rings, extent);
expect(extent).to.eql(expected);
});
it('works when multiple rings and no extent passed', function() {
var ring1 = [[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]];
var ring2 = [[1, 1], [1, 3], [3, 3], [3, 1], [1, 1]];
var rings = [ring1, ring2];
var expected = [0, 0, 3, 3];
var got = ol.extent.createOrUpdateFromRings(rings);
expect(got).to.eql(expected);
});
it('changes the passed extent when multiple rings given', function() {
var ring1 = [[0, 0], [0, 2], [2, 2], [2, 0], [0, 0]];
var ring2 = [[1, 1], [1, 3], [3, 3], [3, 1], [1, 1]];
var rings = [ring1, ring2];
var extent = [1, 1, 4, 7];
var expected = [0, 0, 3, 3];
ol.extent.createOrUpdateFromRings(rings, extent);
expect(extent).to.eql(expected);
});
});
describe('forEachCorner', function() {
var callbackFalse;
var callbackTrue;
beforeEach(function() {
callbackFalse = sinon.spy(function() {
return false;
});
callbackTrue = sinon.spy(function() {
return true;
});
});
it('calls the passed callback for each corner', function() {
var extent = [1, 2, 3, 4];
ol.extent.forEachCorner(extent, callbackFalse);
expect(callbackFalse.callCount).to.be(4);
});
it('calls the passed callback with each corner', function() {
var extent = [1, 2, 3, 4];
ol.extent.forEachCorner(extent, callbackFalse);
var firstCallFirstArg = callbackFalse.args[0][0];
var secondCallFirstArg = callbackFalse.args[1][0];
var thirdCallFirstArg = callbackFalse.args[2][0];
var fourthCallFirstArg = callbackFalse.args[3][0];
expect(firstCallFirstArg).to.eql([1, 2]); // bl
expect(secondCallFirstArg).to.eql([3, 2]); // br
expect(thirdCallFirstArg).to.eql([3, 4]); // tr
expect(fourthCallFirstArg).to.eql([1, 4]); // tl
});
it('calls a truthy callback only once', function() {
var extent = [1, 2, 3, 4];
ol.extent.forEachCorner(extent, callbackTrue);
expect(callbackTrue.callCount).to.be(1);
});
it('ensures that any corner can cancel the callback execution', function() {
var extent = [1, 2, 3, 4];
var bottomLeftSpy = sinon.spy(function(corner) {
return (corner[0] === 1 && corner[1] === 2) ? true : false;
});
var bottomRightSpy = sinon.spy(function(corner) {
return (corner[0] === 3 && corner[1] === 2) ? true : false;
});
var topRightSpy = sinon.spy(function(corner) {
return (corner[0] === 3 && corner[1] === 4) ? true : false;
});
var topLeftSpy = sinon.spy(function(corner) {
return (corner[0] === 1 && corner[1] === 4) ? true : false;
});
ol.extent.forEachCorner(extent, bottomLeftSpy);
ol.extent.forEachCorner(extent, bottomRightSpy);
ol.extent.forEachCorner(extent, topRightSpy);
ol.extent.forEachCorner(extent, topLeftSpy);
expect(bottomLeftSpy.callCount).to.be(1);
expect(bottomRightSpy.callCount).to.be(2);
expect(topRightSpy.callCount).to.be(3);
expect(topLeftSpy.callCount).to.be(4);
});
it('returns false eventually, if no invocation returned a truthy value',
function() {
var extent = [1, 2, 3, 4];
var spy = sinon.spy(); // will return undefined for each corner
var got = ol.extent.forEachCorner(extent, spy);
expect(spy.callCount).to.be(4);
expect(got).to.be(false);
}
);
it('calls the callback with given scope', function() {
var extent = [1, 2, 3, 4];
var scope = {humpty: 'dumpty'};
ol.extent.forEachCorner(extent, callbackTrue, scope);
expect(callbackTrue.calledOn(scope)).to.be(true);
});
});
describe('getArea', function() {
it('returns zero for empty extents', function() {
var emptyExtent = ol.extent.createEmpty();
var areaEmpty = ol.extent.getArea(emptyExtent);
expect(areaEmpty).to.be(0);
var extentDeltaXZero = [45, 67, 45, 78];
var areaDeltaXZero = ol.extent.getArea(extentDeltaXZero);
expect(areaDeltaXZero).to.be(0);
var extentDeltaYZero = [11, 67, 45, 67];
var areaDeltaYZero = ol.extent.getArea(extentDeltaYZero);
expect(areaDeltaYZero).to.be(0);
});
it('calculates correct area for other extents', function() {
var extent = [0, 0, 10, 10];
var area = ol.extent.getArea(extent);
expect(area).to.be(100);
});
});
describe('getIntersection()', function() {
it('returns the intersection of two extents', function() {
var world = [-180, -90, 180, 90];
var north = [-180, 0, 180, 90];
var farNorth = [-180, 45, 180, 90];
var east = [0, -90, 180, 90];
var farEast = [90, -90, 180, 90];
var south = [-180, -90, 180, 0];
var farSouth = [-180, -90, 180, -45];
var west = [-180, -90, 0, 90];
var farWest = [-180, -90, -90, 90];
var none = ol.extent.createEmpty();
expect(ol.extent.getIntersection(world, none)).to.eql(none);
expect(ol.extent.getIntersection(world, north)).to.eql(north);
expect(ol.extent.getIntersection(world, east)).to.eql(east);
expect(ol.extent.getIntersection(world, south)).to.eql(south);
expect(ol.extent.getIntersection(world, west)).to.eql(west);
expect(ol.extent.getIntersection(farEast, farWest)).to.eql(none);
expect(ol.extent.getIntersection(farNorth, farSouth)).to.eql(none);
expect(ol.extent.getIntersection(north, west)).to.eql([-180, 0, 0, 90]);
expect(ol.extent.getIntersection(east, south)).to.eql([0, -90, 180, 0]);
});
});
describe('containsCoordinate', function() {
describe('positive', function() {
it('returns true', function() {
var extent = [1, 2, 3, 4];
expect(ol.extent.containsCoordinate(extent, [1, 2])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [1, 3])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [1, 4])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [2, 2])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [2, 3])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [2, 4])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [3, 2])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [3, 3])).to.be.ok();
expect(ol.extent.containsCoordinate(extent, [3, 4])).to.be.ok();
});
});
describe('negative', function() {
it('returns false', function() {
var extent = [1, 2, 3, 4];
expect(ol.extent.containsCoordinate(extent, [0, 1])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [0, 2])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [0, 3])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [0, 4])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [0, 5])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [1, 1])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [1, 5])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [2, 1])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [2, 5])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [3, 1])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [3, 5])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [4, 1])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [4, 2])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [4, 3])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [4, 4])).to.not.be();
expect(ol.extent.containsCoordinate(extent, [4, 5])).to.not.be();
});
});
});
describe('coordinateRelationship()', function() {
var extent = [-180, -90, 180, 90];
var INTERSECTING = 1;
var ABOVE = 2;
var RIGHT = 4;
var BELOW = 8;
var LEFT = 16;
it('returns intersecting for within', function() {
var rel = ol.extent.coordinateRelationship(extent, [0, 0]);
expect(rel).to.be(INTERSECTING);
});
it('returns intersecting for touching top', function() {
var rel = ol.extent.coordinateRelationship(extent, [0, 90]);
expect(rel).to.be(INTERSECTING);
});
it('returns intersecting for touching right', function() {
var rel = ol.extent.coordinateRelationship(extent, [180, 0]);
expect(rel).to.be(INTERSECTING);
});
it('returns intersecting for touching bottom', function() {
var rel = ol.extent.coordinateRelationship(extent, [0, -90]);
expect(rel).to.be(INTERSECTING);
});
it('returns intersecting for touching left', function() {
var rel = ol.extent.coordinateRelationship(extent, [-180, 0]);
expect(rel).to.be(INTERSECTING);
});
it('above for north', function() {
var rel = ol.extent.coordinateRelationship(extent, [0, 100]);
expect(rel).to.be(ABOVE);
});
it('above and right for northeast', function() {
var rel = ol.extent.coordinateRelationship(extent, [190, 100]);
expect(rel & ABOVE).to.be(ABOVE);
expect(rel & RIGHT).to.be(RIGHT);
});
it('right for east', function() {
var rel = ol.extent.coordinateRelationship(extent, [190, 0]);
expect(rel).to.be(RIGHT);
});
it('below and right for southeast', function() {
var rel = ol.extent.coordinateRelationship(extent, [190, -100]);
expect(rel & BELOW).to.be(BELOW);
expect(rel & RIGHT).to.be(RIGHT);
});
it('below for south', function() {
var rel = ol.extent.coordinateRelationship(extent, [0, -100]);
expect(rel).to.be(BELOW);
});
it('below and left for southwest', function() {
var rel = ol.extent.coordinateRelationship(extent, [-190, -100]);
expect(rel & BELOW).to.be(BELOW);
expect(rel & LEFT).to.be(LEFT);
});
it('left for west', function() {
var rel = ol.extent.coordinateRelationship(extent, [-190, 0]);
expect(rel).to.be(LEFT);
});
it('above and left for northwest', function() {
var rel = ol.extent.coordinateRelationship(extent, [-190, 100]);
expect(rel & ABOVE).to.be(ABOVE);
expect(rel & LEFT).to.be(LEFT);
});
});
describe('getCenter', function() {
it('returns the expected center', function() {
var extent = [1, 2, 3, 4];
var center = ol.extent.getCenter(extent);
expect(center[0]).to.eql(2);
expect(center[1]).to.eql(3);
});
it('returns [NaN, NaN] for empty extents', function() {
var extent = ol.extent.createEmpty();
var center = ol.extent.getCenter(extent);
expect('' + center[0]).to.be('NaN');
expect('' + center[1]).to.be('NaN');
});
});
describe('getCorner', function() {
var extent = [1, 2, 3, 4];
it('gets the bottom left', function() {
var corner = 'bottom-left';
expect(ol.extent.getCorner(extent, corner)).to.eql([1, 2]);
});
it('gets the bottom right', function() {
var corner = 'bottom-right';
expect(ol.extent.getCorner(extent, corner)).to.eql([3, 2]);
});
it('gets the top left', function() {
var corner = 'top-left';
expect(ol.extent.getCorner(extent, corner)).to.eql([1, 4]);
});
it('gets the top right', function() {
var corner = 'top-right';
expect(ol.extent.getCorner(extent, corner)).to.eql([3, 4]);
});
it('throws exception for unexpected corner', function() {
expect(function() {
ol.extent.getCorner(extent, 'foobar');
}).to.throwException();
});
});
describe('getEnlargedArea', function() {
it('returns enlarged area of two extents', function() {
var extent1 = [-1, -1, 0, 0];
var extent2 = [0, 0, 1, 1];
var enlargedArea = ol.extent.getEnlargedArea(extent1, extent2);
expect(enlargedArea).to.be(4);
});
});
describe('getForViewAndSize', function() {
it('works for a unit square', function() {
var extent = ol.extent.getForViewAndSize(
[0, 0], 1, 0, [1, 1]);
expect(extent[0]).to.be(-0.5);
expect(extent[2]).to.be(0.5);
expect(extent[1]).to.be(-0.5);
expect(extent[3]).to.be(0.5);
});
it('works for center', function() {
var extent = ol.extent.getForViewAndSize(
[5, 10], 1, 0, [1, 1]);
expect(extent[0]).to.be(4.5);
expect(extent[2]).to.be(5.5);
expect(extent[1]).to.be(9.5);
expect(extent[3]).to.be(10.5);
});
it('works for rotation', function() {
var extent = ol.extent.getForViewAndSize(
[0, 0], 1, Math.PI / 4, [1, 1]);
expect(extent[0]).to.roughlyEqual(-Math.sqrt(0.5), 1e-9);
expect(extent[2]).to.roughlyEqual(Math.sqrt(0.5), 1e-9);
expect(extent[1]).to.roughlyEqual(-Math.sqrt(0.5), 1e-9);
expect(extent[3]).to.roughlyEqual(Math.sqrt(0.5), 1e-9);
});
it('works for resolution', function() {
var extent = ol.extent.getForViewAndSize(
[0, 0], 2, 0, [1, 1]);
expect(extent[0]).to.be(-1);
expect(extent[2]).to.be(1);
expect(extent[1]).to.be(-1);
expect(extent[3]).to.be(1);
});
it('works for size', function() {
var extent = ol.extent.getForViewAndSize(
[0, 0], 1, 0, [10, 5]);
expect(extent[0]).to.be(-5);
expect(extent[2]).to.be(5);
expect(extent[1]).to.be(-2.5);
expect(extent[3]).to.be(2.5);
});
});
describe('getSize', function() {
it('returns the expected size', function() {
var extent = [0, 1, 2, 4];
var size = ol.extent.getSize(extent);
expect(size).to.eql([2, 3]);
});
});
describe('getIntersectionArea', function() {
it('returns correct area when extents intersect', function() {
var extent1 = [0, 0, 2, 2];
var extent2 = [1, 1, 3, 3];
var intersectionArea = ol.extent.getIntersectionArea(extent1, extent2);
expect(intersectionArea).to.be(1);
});
it('returns 0 when extents do not intersect', function() {
var extent1 = [0, 0, 1, 1];
var extent2 = [2, 2, 3, 3];
var intersectionArea = ol.extent.getIntersectionArea(extent1, extent2);
expect(intersectionArea).to.be(0);
});
});
describe('getMargin', function() {
it('returns the correct margin (sum of width and height)', function() {
var extent = [1, 2, 3, 4];
expect(ol.extent.getMargin(extent)).to.be(4);
});
});
describe('intersects', function() {
it('returns the expected value', function() {
var intersects = ol.extent.intersects;
var extent = [50, 50, 100, 100];
expect(intersects(extent, extent)).to.be(true);
expect(intersects(extent, [20, 20, 80, 80])).to.be(true);
expect(intersects(extent, [20, 50, 80, 100])).to.be(true);
expect(intersects(extent, [20, 80, 80, 120])).to.be(true);
expect(intersects(extent, [50, 20, 100, 80])).to.be(true);
expect(intersects(extent, [50, 80, 100, 120])).to.be(true);
expect(intersects(extent, [80, 20, 120, 80])).to.be(true);
expect(intersects(extent, [80, 50, 120, 100])).to.be(true);
expect(intersects(extent, [80, 80, 120, 120])).to.be(true);
expect(intersects(extent, [20, 20, 120, 120])).to.be(true);
expect(intersects(extent, [70, 70, 80, 80])).to.be(true);
expect(intersects(extent, [10, 10, 30, 30])).to.be(false);
expect(intersects(extent, [30, 10, 70, 30])).to.be(false);
expect(intersects(extent, [50, 10, 100, 30])).to.be(false);
expect(intersects(extent, [80, 10, 120, 30])).to.be(false);
expect(intersects(extent, [120, 10, 140, 30])).to.be(false);
expect(intersects(extent, [10, 30, 30, 70])).to.be(false);
expect(intersects(extent, [120, 30, 140, 70])).to.be(false);
expect(intersects(extent, [10, 50, 30, 100])).to.be(false);
expect(intersects(extent, [120, 50, 140, 100])).to.be(false);
expect(intersects(extent, [10, 80, 30, 120])).to.be(false);
expect(intersects(extent, [120, 80, 140, 120])).to.be(false);
expect(intersects(extent, [10, 120, 30, 140])).to.be(false);
expect(intersects(extent, [30, 120, 70, 140])).to.be(false);
expect(intersects(extent, [50, 120, 100, 140])).to.be(false);
expect(intersects(extent, [80, 120, 120, 140])).to.be(false);
expect(intersects(extent, [120, 120, 140, 140])).to.be(false);
});
});
describe('scaleFromCenter', function() {
it('scales the extent from its center', function() {
var extent = [1, 1, 3, 3];
ol.extent.scaleFromCenter(extent, 2);
expect(extent[0]).to.eql(0);
expect(extent[2]).to.eql(4);
expect(extent[1]).to.eql(0);
expect(extent[3]).to.eql(4);
});
});
describe('intersectsSegment()', function() {
var extent = [-180, -90, 180, 90];
var north = [0, 100];
var northeast = [190, 100];
var east = [190, 0];
var southeast = [190, -100];
var south = [0, -100];
var southwest = [-190, -100];
var west = [-190, 0];
var northwest = [-190, 100];
var center = [0, 0];
var top = [0, 90];
var right = [180, 0];
var bottom = [-90, 0];
var left = [-180, 0];
var inside = [10, 10];
it('returns true if contained', function() {
var intersects = ol.extent.intersectsSegment(extent, center, inside);
expect(intersects).to.be(true);
});
it('returns true if crosses top', function() {
var intersects = ol.extent.intersectsSegment(extent, center, north);
expect(intersects).to.be(true);
});
it('returns true if crosses right', function() {
var intersects = ol.extent.intersectsSegment(extent, center, east);
expect(intersects).to.be(true);
});
it('returns true if crosses bottom', function() {
var intersects = ol.extent.intersectsSegment(extent, center, south);
expect(intersects).to.be(true);
});
it('returns true if crosses left', function() {
var intersects = ol.extent.intersectsSegment(extent, center, west);
expect(intersects).to.be(true);
});
it('returns false if above', function() {
var intersects = ol.extent.intersectsSegment(extent, northwest, north);
expect(intersects).to.be(false);
});
it('returns false if right', function() {
var intersects = ol.extent.intersectsSegment(extent, northeast, east);
expect(intersects).to.be(false);
});
it('returns false if below', function() {
var intersects = ol.extent.intersectsSegment(extent, south, southwest);
expect(intersects).to.be(false);
});
it('returns false if left', function() {
var intersects = ol.extent.intersectsSegment(extent, west, southwest);
expect(intersects).to.be(false);
});
it('returns true if crosses top to bottom', function() {
var intersects = ol.extent.intersectsSegment(extent, north, south);
expect(intersects).to.be(true);
});
it('returns true if crosses bottom to top', function() {
var intersects = ol.extent.intersectsSegment(extent, south, north);
expect(intersects).to.be(true);
});
it('returns true if crosses left to right', function() {
var intersects = ol.extent.intersectsSegment(extent, west, east);
expect(intersects).to.be(true);
});
it('returns true if crosses right to left', function() {
var intersects = ol.extent.intersectsSegment(extent, east, west);
expect(intersects).to.be(true);
});
it('returns true if crosses northwest to east', function() {
var intersects = ol.extent.intersectsSegment(extent, northwest, east);
expect(intersects).to.be(true);
});
it('returns true if crosses south to west', function() {
var intersects = ol.extent.intersectsSegment(extent, south, west);
expect(intersects).to.be(true);
});
it('returns true if touches top', function() {
var intersects = ol.extent.intersectsSegment(extent, northwest, top);
expect(intersects).to.be(true);
});
it('returns true if touches right', function() {
var intersects = ol.extent.intersectsSegment(extent, southeast, right);
expect(intersects).to.be(true);
});
it('returns true if touches bottom', function() {
var intersects = ol.extent.intersectsSegment(extent, bottom, south);
expect(intersects).to.be(true);
});
it('returns true if touches left', function() {
var intersects = ol.extent.intersectsSegment(extent, left, west);
expect(intersects).to.be(true);
});
it('works for zero length inside', function() {
var intersects = ol.extent.intersectsSegment(extent, center, center);
expect(intersects).to.be(true);
});
it('works for zero length outside', function() {
var intersects = ol.extent.intersectsSegment(extent, north, north);
expect(intersects).to.be(false);
});
it('works for left/right intersection spanning top to bottom', function() {
var extent = [2, 1, 3, 4];
var start = [0, 0];
var end = [5, 5];
expect(ol.extent.intersectsSegment(extent, start, end)).to.be(true);
expect(ol.extent.intersectsSegment(extent, end, start)).to.be(true);
});
it('works for top/bottom intersection spanning left to right', function() {
var extent = [1, 2, 4, 3];
var start = [0, 0];
var end = [5, 5];
expect(ol.extent.intersectsSegment(extent, start, end)).to.be(true);
expect(ol.extent.intersectsSegment(extent, end, start)).to.be(true);
});
});
describe('#applyTransform()', function() {
it('does transform', function() {
var transformFn = ol.proj.getTransform('EPSG:4326', 'EPSG:3857');
var sourceExtent = [-15, -30, 45, 60];
var destinationExtent = ol.extent.applyTransform(
sourceExtent, transformFn);
expect(destinationExtent).not.to.be(undefined);
expect(destinationExtent).not.to.be(null);
// FIXME check values with third-party tool
expect(destinationExtent[0])
.to.roughlyEqual(-1669792.3618991037, 1e-9);
expect(destinationExtent[2]).to.roughlyEqual(5009377.085697311, 1e-9);
expect(destinationExtent[1]).to.roughlyEqual(-3503549.843504376, 1e-8);
expect(destinationExtent[3]).to.roughlyEqual(8399737.889818361, 1e-8);
});
it('takes arbitrary function', function() {
var transformFn = function(input, output, opt_dimension) {
var dimension = opt_dimension !== undefined ? opt_dimension : 2;
if (output === undefined) {
output = new Array(input.length);
}
var n = input.length;
var i;
for (i = 0; i < n; i += dimension) {
output[i] = -input[i];
output[i + 1] = -input[i + 1];
}
return output;
};
var sourceExtent = [-15, -30, 45, 60];
var destinationExtent = ol.extent.applyTransform(
sourceExtent, transformFn);
expect(destinationExtent).not.to.be(undefined);
expect(destinationExtent).not.to.be(null);
expect(destinationExtent[0]).to.be(-45);
expect(destinationExtent[2]).to.be(15);
expect(destinationExtent[1]).to.be(-60);
expect(destinationExtent[3]).to.be(30);
});
});
});
| wet-boew/openlayers-dist | test/spec/ol/extent.test.js | JavaScript | bsd-2-clause | 27,991 |
# -*- coding: utf-8 -*-
#
# scikit-learn documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 8 09:13:42 2010.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import print_function
import sys
import os
from sklearn.externals.six import u
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.insert(0, os.path.abspath('sphinxext'))
from github_link import make_linkcode_resolve
import sphinx_gallery
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.autosummary',
'numpydoc',
'sphinx.ext.linkcode', 'sphinx.ext.doctest',
'sphinx_gallery.gen_gallery',
'sphinx_issues',
]
# this is needed for some reason...
# see https://github.com/numpy/numpydoc/issues/69
numpydoc_class_members_toctree = False
# pngmath / imgmath compatibility layer for different sphinx versions
import sphinx
from distutils.version import LooseVersion
if LooseVersion(sphinx.__version__) < LooseVersion('1.4'):
extensions.append('sphinx.ext.pngmath')
else:
extensions.append('sphinx.ext.imgmath')
autodoc_default_flags = ['members', 'inherited-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# generate autosummary even if no references
autosummary_generate = True
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# Generate the plots for the gallery
plot_gallery = True
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u('scikit-learn')
copyright = u('2007 - 2017, scikit-learn developers (BSD License)')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import sklearn
version = sklearn.__version__
# The full version, including alpha/beta/rc tags.
release = sklearn.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be
# searched for source files.
exclude_trees = ['_build', 'templates', 'includes']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'scikit-learn'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {'oldversion': False, 'collapsiblesidebar': True,
'google_analytics': True, 'surveybanner': False,
'sprintbanner': True}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'scikit-learn'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'logos/scikit-learn-logo-small.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'logos/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['images']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'scikit-learndoc'
# -- Options for LaTeX output ------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [('index', 'user_guide.tex', u('scikit-learn user guide'),
u('scikit-learn developers'), 'manual'), ]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = "logos/scikit-learn-logo.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r"""
\usepackage{amsmath}\usepackage{amsfonts}\usepackage{bm}\usepackage{morefloats}
\usepackage{enumitem} \setlistdepth{10}
"""
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
trim_doctests_flags = True
sphinx_gallery_conf = {
'doc_module': 'sklearn',
'backreferences_dir': os.path.join('modules', 'generated'),
'reference_url': {
'sklearn': None,
'matplotlib': 'http://matplotlib.org',
'numpy': 'http://docs.scipy.org/doc/numpy-1.8.1',
'scipy': 'http://docs.scipy.org/doc/scipy-0.13.3/reference'}
}
# The following dictionary contains the information used to create the
# thumbnails for the front page of the scikit-learn home page.
# key: first image in set
# values: (number of plot in set, height of thumbnail)
carousel_thumbs = {'sphx_glr_plot_classifier_comparison_001.png': 600,
'sphx_glr_plot_outlier_detection_003.png': 372,
'sphx_glr_plot_gpr_co2_001.png': 350,
'sphx_glr_plot_adaboost_twoclass_001.png': 372,
'sphx_glr_plot_compare_methods_001.png': 349}
def make_carousel_thumbs(app, exception):
"""produces the final resized carousel images"""
if exception is not None:
return
print('Preparing carousel images')
image_dir = os.path.join(app.builder.outdir, '_images')
for glr_plot, max_width in carousel_thumbs.items():
image = os.path.join(image_dir, glr_plot)
if os.path.exists(image):
c_thumb = os.path.join(image_dir, glr_plot[:-4] + '_carousel.png')
sphinx_gallery.gen_rst.scale_image(image, c_thumb, max_width, 190)
# Config for sphinx_issues
issues_uri = 'https://github.com/scikit-learn/scikit-learn/issues/{issue}'
issues_github_path = 'scikit-learn/scikit-learn'
issues_user_uri = 'https://github.com/{user}'
def setup(app):
# to hide/show the prompt in code examples:
app.add_javascript('js/copybutton.js')
app.connect('build-finished', make_carousel_thumbs)
# The following is used by sphinx.ext.linkcode to provide links to github
linkcode_resolve = make_linkcode_resolve('sklearn',
u'https://github.com/scikit-learn/'
'scikit-learn/blob/{revision}/'
'{package}/{path}#L{lineno}')
| nhejazi/scikit-learn | doc/conf.py | Python | bsd-3-clause | 9,924 |
/**
* Copyright (c) 2005-2007, Paul Tuckey
* All rights reserved.
* ====================================================================
* Licensed under the BSD License. Text as follows.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* - Neither the name tuckey.org nor the names of its contributors
* may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*/
package org.tuckey.web.filters.urlrewrite;
import org.tuckey.web.filters.urlrewrite.gzip.GzipFilter;
import org.tuckey.web.filters.urlrewrite.utils.Log;
import org.tuckey.web.filters.urlrewrite.utils.ModRewriteConfLoader;
import org.tuckey.web.filters.urlrewrite.utils.StringUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import org.xml.sax.SAXParseException;
import javax.servlet.ServletContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* Configuration object for urlrewrite filter.
*
* @author Paul Tuckey
* @version $Revision: 43 $ $Date: 2006-10-31 17:29:59 +1300 (Tue, 31 Oct 2006) $
*/
public class Conf {
private static Log log = Log.getLog(Conf.class);
private final List errors = new ArrayList();
private final List rules = new ArrayList(50);
private final List catchElems = new ArrayList(10);
private List outboundRules = new ArrayList(50);
private boolean ok = false;
private Date loadedDate = null;
private int ruleIdCounter = 0;
private int outboundRuleIdCounter = 0;
private String fileName;
private String confSystemId;
protected boolean useQueryString;
protected boolean useContext;
private static final String NONE_DECODE_USING = "null";
private static final String HEADER_DECODE_USING = "header";
private static final String DEFAULT_DECODE_USING = "header,utf-8";
protected String decodeUsing = DEFAULT_DECODE_USING;
private boolean decodeUsingEncodingHeader;
protected String defaultMatchType = null;
private ServletContext context;
private boolean docProcessed = false;
private boolean engineEnabled = true;
/**
* Empty const for testing etc.
*/
public Conf() {
loadedDate = new Date();
}
/**
* Constructor for use only when loading XML style configuration.
*
* @param fileName to display on status screen
*/
public Conf(ServletContext context, final InputStream inputStream, String fileName, String systemId) {
this(context, inputStream, fileName, systemId, false);
}
/**
* Normal constructor.
*
* @param fileName to display on status screen
* @param modRewriteStyleConf true if loading mod_rewrite style conf
*/
public Conf(ServletContext context, final InputStream inputStream, String fileName, String systemId,
boolean modRewriteStyleConf) {
// make sure context is setup before calling initialise()
this.context = context;
this.fileName = fileName;
this.confSystemId = systemId;
if (modRewriteStyleConf) {
loadModRewriteStyle(inputStream);
} else {
loadDom(inputStream);
}
if (docProcessed) initialise();
loadedDate = new Date();
}
protected void loadModRewriteStyle(InputStream inputStream) {
ModRewriteConfLoader loader = new ModRewriteConfLoader();
try {
loader.process(inputStream, this);
docProcessed = true; // fixed
} catch (IOException e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
}
/**
* Constructor when run elements don't need to be initialised correctly, for docuementation etc.
*/
public Conf(URL confUrl) {
// make sure context is setup before calling initialise()
this.context = null;
this.fileName = confUrl.getFile();
this.confSystemId = confUrl.toString();
try {
loadDom(confUrl.openStream());
} catch (IOException e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
if (docProcessed) initialise();
loadedDate = new Date();
}
/**
* Constructor when run elements don't need to be initialised correctly, for docuementation etc.
*/
public Conf(InputStream inputStream, String conffile) {
this(null, inputStream, conffile, conffile);
}
/**
* Load the dom document from the inputstream
* <p/>
* Note, protected so that is can be extended.
*
* @param inputStream stream of the conf file to load
*/
protected synchronized void loadDom(final InputStream inputStream) {
if (inputStream == null) {
log.error("inputstream is null");
return;
}
DocumentBuilder parser;
/**
* the thing that resolves dtd's and other xml entities.
*/
ConfHandler handler = new ConfHandler(confSystemId);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
log.debug("XML builder factory is: " + factory.getClass().getName());
factory.setValidating(true);
factory.setNamespaceAware(true);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
try {
parser = factory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
log.error("Unable to setup XML parser for reading conf", e);
return;
}
log.debug("XML Parser: " + parser.getClass().getName());
parser.setErrorHandler(handler);
parser.setEntityResolver(handler);
try {
log.debug("about to parse conf");
Document doc = parser.parse(inputStream, confSystemId);
processConfDoc(doc);
} catch (SAXParseException e) {
addError("Parse error on line " + e.getLineNumber() + " " + e.getMessage(), e);
} catch (Exception e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
}
/**
* Process dom document and populate Conf object.
* <p/>
* Note, protected so that is can be extended.
*/
protected void processConfDoc(Document doc) {
Element rootElement = doc.getDocumentElement();
if ("true".equalsIgnoreCase(getAttrValue(rootElement, "use-query-string"))) setUseQueryString(true);
if ("true".equalsIgnoreCase(getAttrValue(rootElement, "use-context"))) {
log.debug("use-context set to true");
setUseContext(true);
}
setDecodeUsing(getAttrValue(rootElement, "decode-using"));
setDefaultMatchType(getAttrValue(rootElement, "default-match-type"));
NodeList rootElementList = rootElement.getChildNodes();
for (int i = 0; i < rootElementList.getLength(); i++) {
Node node = rootElementList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("rule")) {
Element ruleElement = (Element) node;
// we have a rule node
NormalRule rule = new NormalRule();
processRuleBasics(ruleElement, rule);
procesConditions(ruleElement, rule);
processRuns(ruleElement, rule);
Node toNode = ruleElement.getElementsByTagName("to").item(0);
rule.setTo(getNodeValue(toNode));
rule.setToType(getAttrValue(toNode, "type"));
rule.setToContextStr(getAttrValue(toNode, "context"));
rule.setToLast(getAttrValue(toNode, "last"));
rule.setQueryStringAppend(getAttrValue(toNode, "qsappend"));
if ("true".equalsIgnoreCase(getAttrValue(toNode, "encode"))) rule.setEncodeToUrl(true);
processSetAttributes(ruleElement, rule);
addRule(rule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("class-rule")) {
Element ruleElement = (Element) node;
ClassRule classRule = new ClassRule();
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "enabled"))) classRule.setEnabled(false);
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "last"))) classRule.setLast(false);
classRule.setClassStr(getAttrValue(ruleElement, "class"));
classRule.setMethodStr(getAttrValue(ruleElement, "method"));
addRule(classRule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("outbound-rule")) {
Element ruleElement = (Element) node;
// we have a rule node
OutboundRule rule = new OutboundRule();
processRuleBasics(ruleElement, rule);
if ("true".equalsIgnoreCase(getAttrValue(ruleElement, "encodefirst"))) rule.setEncodeFirst(true);
procesConditions(ruleElement, rule);
processRuns(ruleElement, rule);
Node toNode = ruleElement.getElementsByTagName("to").item(0);
rule.setTo(getNodeValue(toNode));
rule.setToLast(getAttrValue(toNode, "last"));
if ("false".equalsIgnoreCase(getAttrValue(toNode, "encode"))) rule.setEncodeToUrl(false);
processSetAttributes(ruleElement, rule);
addOutboundRule(rule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("catch")) {
Element catchXMLElement = (Element) node;
// we have a rule node
CatchElem catchElem = new CatchElem();
catchElem.setClassStr(getAttrValue(catchXMLElement, "class"));
processRuns(catchXMLElement, catchElem);
catchElems.add(catchElem);
}
}
docProcessed = true;
}
private void processRuleBasics(Element ruleElement, RuleBase rule) {
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "enabled"))) rule.setEnabled(false);
String ruleMatchType = getAttrValue(ruleElement, "match-type");
if (StringUtils.isBlank(ruleMatchType)) ruleMatchType = defaultMatchType;
rule.setMatchType(ruleMatchType);
Node nameNode = ruleElement.getElementsByTagName("name").item(0);
rule.setName(getNodeValue(nameNode));
Node noteNode = ruleElement.getElementsByTagName("note").item(0);
rule.setNote(getNodeValue(noteNode));
Node fromNode = ruleElement.getElementsByTagName("from").item(0);
rule.setFrom(getNodeValue(fromNode));
if ("true".equalsIgnoreCase(getAttrValue(fromNode, "casesensitive"))) rule.setFromCaseSensitive(true);
}
private static void processSetAttributes(Element ruleElement, RuleBase rule) {
NodeList setNodes = ruleElement.getElementsByTagName("set");
for (int j = 0; j < setNodes.getLength(); j++) {
Node setNode = setNodes.item(j);
if (setNode == null) continue;
SetAttribute setAttribute = new SetAttribute();
setAttribute.setValue(getNodeValue(setNode));
setAttribute.setType(getAttrValue(setNode, "type"));
setAttribute.setName(getAttrValue(setNode, "name"));
rule.addSetAttribute(setAttribute);
}
}
private static void processRuns(Element ruleElement, Runnable runnable) {
NodeList runNodes = ruleElement.getElementsByTagName("run");
for (int j = 0; j < runNodes.getLength(); j++) {
Node runNode = runNodes.item(j);
if (runNode == null) continue;
Run run = new Run();
processInitParams(runNode, run);
run.setClassStr(getAttrValue(runNode, "class"));
run.setMethodStr(getAttrValue(runNode, "method"));
run.setJsonHandler("true".equalsIgnoreCase(getAttrValue(runNode, "jsonhandler")));
run.setNewEachTime("true".equalsIgnoreCase(getAttrValue(runNode, "neweachtime")));
runnable.addRun(run);
}
// gzip element is just a shortcut to run: org.tuckey.web.filters.urlrewrite.gzip.GzipFilter
NodeList gzipNodes = ruleElement.getElementsByTagName("gzip");
for (int j = 0; j < gzipNodes.getLength(); j++) {
Node runNode = gzipNodes.item(j);
if (runNode == null) continue;
Run run = new Run();
run.setClassStr(GzipFilter.class.getName());
run.setMethodStr("doFilter(ServletRequest, ServletResponse, FilterChain)");
processInitParams(runNode, run);
runnable.addRun(run);
}
}
private static void processInitParams(Node runNode, Run run) {
if (runNode.getNodeType() == Node.ELEMENT_NODE) {
Element runElement = (Element) runNode;
NodeList initParamsNodeList = runElement.getElementsByTagName("init-param");
for (int k = 0; k < initParamsNodeList.getLength(); k++) {
Node initParamNode = initParamsNodeList.item(k);
if (initParamNode == null) continue;
if (initParamNode.getNodeType() != Node.ELEMENT_NODE) continue;
Element initParamElement = (Element) initParamNode;
Node paramNameNode = initParamElement.getElementsByTagName("param-name").item(0);
Node paramValueNode = initParamElement.getElementsByTagName("param-value").item(0);
run.addInitParam(getNodeValue(paramNameNode), getNodeValue(paramValueNode));
}
}
}
private static void procesConditions(Element ruleElement, RuleBase rule) {
NodeList conditionNodes = ruleElement.getElementsByTagName("condition");
for (int j = 0; j < conditionNodes.getLength(); j++) {
Node conditionNode = conditionNodes.item(j);
if (conditionNode == null) continue;
Condition condition = new Condition();
condition.setValue(getNodeValue(conditionNode));
condition.setType(getAttrValue(conditionNode, "type"));
condition.setName(getAttrValue(conditionNode, "name"));
condition.setNext(getAttrValue(conditionNode, "next"));
condition.setCaseSensitive("true".equalsIgnoreCase(getAttrValue(conditionNode, "casesensitive")));
condition.setOperator(getAttrValue(conditionNode, "operator"));
rule.addCondition(condition);
}
}
private static String getNodeValue(Node node) {
if (node == null) return null;
NodeList nodeList = node.getChildNodes();
if (nodeList == null) return null;
Node child = nodeList.item(0);
if (child == null) return null;
if ((child.getNodeType() == Node.TEXT_NODE)) {
String value = ((Text) child).getData();
return value.trim();
}
return null;
}
private static String getAttrValue(Node n, String attrName) {
if (n == null) return null;
NamedNodeMap attrs = n.getAttributes();
if (attrs == null) return null;
Node attr = attrs.getNamedItem(attrName);
if (attr == null) return null;
String val = attr.getNodeValue();
if (val == null) return null;
return val.trim();
}
/**
* Initialise the conf file. This will run initialise on each rule and condition in the conf file.
*/
public void initialise() {
if (log.isDebugEnabled()) {
log.debug("now initialising conf");
}
initDecodeUsing(decodeUsing);
boolean rulesOk = true;
for (int i = 0; i < rules.size(); i++) {
final Rule rule = (Rule) rules.get(i);
if (!rule.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
for (int i = 0; i < outboundRules.size(); i++) {
final OutboundRule outboundRule = (OutboundRule) outboundRules.get(i);
if (!outboundRule.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
for (int i = 0; i < catchElems.size(); i++) {
final CatchElem catchElem = (CatchElem) catchElems.get(i);
if (!catchElem.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
if (rulesOk) {
ok = true;
}
if (log.isDebugEnabled()) {
log.debug("conf status " + ok);
}
}
private void initDecodeUsing(String decodeUsingSetting) {
decodeUsingSetting = StringUtils.trimToNull(decodeUsingSetting);
if (decodeUsingSetting == null) decodeUsingSetting = DEFAULT_DECODE_USING;
if ( decodeUsingSetting.equalsIgnoreCase(HEADER_DECODE_USING)) { // is 'header'
decodeUsingEncodingHeader = true;
decodeUsingSetting = null;
} else if ( decodeUsingSetting.startsWith(HEADER_DECODE_USING + ",")) { // is 'header,xxx'
decodeUsingEncodingHeader = true;
decodeUsingSetting = decodeUsingSetting.substring((HEADER_DECODE_USING + ",").length());
}
if (NONE_DECODE_USING.equalsIgnoreCase(decodeUsingSetting)) {
decodeUsingSetting = null;
}
if ( decodeUsingSetting != null ) {
try {
URLDecoder.decode("testUrl", decodeUsingSetting);
this.decodeUsing = decodeUsingSetting;
} catch (UnsupportedEncodingException e) {
addError("unsupported 'decodeusing' " + decodeUsingSetting + " see Java SDK docs for supported encodings");
}
} else {
this.decodeUsing = null;
}
}
/**
* Destory the conf gracefully.
*/
public void destroy() {
for (int i = 0; i < rules.size(); i++) {
final Rule rule = (Rule) rules.get(i);
rule.destroy();
}
}
/**
* Will add the rule to the rules list.
*
* @param rule The Rule to add
*/
public void addRule(final Rule rule) {
rule.setId(ruleIdCounter++);
rules.add(rule);
}
/**
* Will add the rule to the rules list.
*
* @param outboundRule The outbound rule to add
*/
public void addOutboundRule(final OutboundRule outboundRule) {
outboundRule.setId(outboundRuleIdCounter++);
outboundRules.add(outboundRule);
}
/**
* Will get the List of errors.
*
* @return the List of errors
*/
public List getErrors() {
return errors;
}
/**
* Will get the List of rules.
*
* @return the List of rules
*/
public List getRules() {
return rules;
}
/**
* Will get the List of outbound rules.
*
* @return the List of outbound rules
*/
public List getOutboundRules() {
return outboundRules;
}
/**
* true if the conf has been loaded ok.
*
* @return boolean
*/
public boolean isOk() {
return ok;
}
private void addError(final String errorMsg, final Exception e) {
errors.add(errorMsg);
log.error(errorMsg, e);
}
private void addError(final String errorMsg) {
errors.add(errorMsg);
}
public Date getLoadedDate() {
return (Date) loadedDate.clone();
}
public String getFileName() {
return fileName;
}
public boolean isUseQueryString() {
return useQueryString;
}
public void setUseQueryString(boolean useQueryString) {
this.useQueryString = useQueryString;
}
public boolean isUseContext() {
return useContext;
}
public void setUseContext(boolean useContext) {
this.useContext = useContext;
}
public String getDecodeUsing() {
return decodeUsing;
}
public void setDecodeUsing(String decodeUsing) {
this.decodeUsing = decodeUsing;
}
public void setDefaultMatchType(String defaultMatchType) {
if (RuleBase.MATCH_TYPE_WILDCARD.equalsIgnoreCase(defaultMatchType)) {
this.defaultMatchType = RuleBase.MATCH_TYPE_WILDCARD;
} else {
this.defaultMatchType = RuleBase.DEFAULT_MATCH_TYPE;
}
}
public String getDefaultMatchType() {
return defaultMatchType;
}
public List getCatchElems() {
return catchElems;
}
public boolean isDecodeUsingCustomCharsetRequired() {
return decodeUsing != null;
}
public boolean isEngineEnabled() {
return engineEnabled;
}
public void setEngineEnabled(boolean engineEnabled) {
this.engineEnabled = engineEnabled;
}
public boolean isLoadedFromFile() {
return fileName != null;
}
public boolean isDecodeUsingEncodingHeader() {
return decodeUsingEncodingHeader;
}
}
| safarijv/urlrewritefilter | src/main/java/org/tuckey/web/filters/urlrewrite/Conf.java | Java | bsd-3-clause | 23,779 |
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <string>
// template<> struct char_traits<char32_t>
// static constexpr int_type eof();
#include <string>
#include <cassert>
int main()
{
#ifndef _LIBCPP_HAS_NO_UNICODE_CHARS
std::char_traits<char32_t>::int_type i = std::char_traits<char32_t>::eof();
((void)i); // Prevent unused warning
#endif
}
| youtube/cobalt | third_party/llvm-project/libcxx/test/std/strings/char.traits/char.traits.specializations/char.traits.specializations.char32_t/eof.pass.cpp | C++ | bsd-3-clause | 669 |
<?php
/**
* Manage views in a database
*
* $Id: views.php,v 1.75 2007/12/15 22:57:43 ioguix Exp $
*/
// Include application functions
include_once('./libraries/lib.inc.php');
include_once('./classes/Gui.php');
$action = (isset($_REQUEST['action'])) ? $_REQUEST['action'] : '';
if (!isset($msg)) $msg = '';
/**
* Ask for select parameters and perform select
*/
function doSelectRows($confirm, $msg = '') {
global $data, $misc, $_no_output;
global $lang;
if ($confirm) {
$misc->printTrail('view');
$misc->printTitle($lang['strselect'], 'pg.sql.select');
$misc->printMsg($msg);
$attrs = $data->getTableAttributes($_REQUEST['view']);
echo "<form action=\"views.php\" method=\"post\" id=\"selectform\">\n";
if ($attrs->recordCount() > 0) {
// JavaScript for select all feature
echo "<script type=\"text/javascript\">\n";
echo "//<![CDATA[\n";
echo " function selectAll() {\n";
echo " for (var i=0; i<document.getElementById('selectform').elements.length; i++) {\n";
echo " var e = document.getElementById('selectform').elements[i];\n";
echo " if (e.name.indexOf('show') == 0) e.checked = document.getElementById('selectform').selectall.checked;\n";
echo " }\n";
echo " }\n";
echo "//]]>\n";
echo "</script>\n";
echo "<table>\n";
// Output table header
echo "<tr><th class=\"data\">{$lang['strshow']}</th><th class=\"data\">{$lang['strcolumn']}</th>";
echo "<th class=\"data\">{$lang['strtype']}</th><th class=\"data\">{$lang['stroperator']}</th>";
echo "<th class=\"data\">{$lang['strvalue']}</th></tr>";
$i = 0;
while (!$attrs->EOF) {
$attrs->fields['attnotnull'] = $data->phpBool($attrs->fields['attnotnull']);
// Set up default value if there isn't one already
if (!isset($_REQUEST['values'][$attrs->fields['attname']]))
$_REQUEST['values'][$attrs->fields['attname']] = null;
if (!isset($_REQUEST['ops'][$attrs->fields['attname']]))
$_REQUEST['ops'][$attrs->fields['attname']] = null;
// Continue drawing row
$id = (($i % 2) == 0 ? '1' : '2');
echo "<tr class=\"data{$id}\">\n";
echo "<td style=\"white-space:nowrap;\">";
echo "<input type=\"checkbox\" name=\"show[", htmlspecialchars($attrs->fields['attname']), "]\"",
isset($_REQUEST['show'][$attrs->fields['attname']]) ? ' checked="checked"' : '', " /></td>";
echo "<td style=\"white-space:nowrap;\">", $misc->printVal($attrs->fields['attname']), "</td>";
echo "<td style=\"white-space:nowrap;\">", $misc->printVal($data->formatType($attrs->fields['type'], $attrs->fields['atttypmod'])), "</td>";
echo "<td style=\"white-space:nowrap;\">";
echo "<select name=\"ops[{$attrs->fields['attname']}]\">\n";
foreach (array_keys($data->selectOps) as $v) {
echo "<option value=\"", htmlspecialchars($v), "\"", ($v == $_REQUEST['ops'][$attrs->fields['attname']]) ? ' selected="selected"' : '',
">", htmlspecialchars($v), "</option>\n";
}
echo "</select></td>\n";
echo "<td style=\"white-space:nowrap;\">", $data->printField("values[{$attrs->fields['attname']}]",
$_REQUEST['values'][$attrs->fields['attname']], $attrs->fields['type']), "</td>";
echo "</tr>\n";
$i++;
$attrs->moveNext();
}
// Select all checkbox
echo "<tr><td colspan=\"5\"><input type=\"checkbox\" id=\"selectall\" name=\"selectall\" onclick=\"javascript:selectAll()\" /><label for=\"selectall\">{$lang['strselectallfields']}</label></td></tr>";
echo "</table>\n";
}
else echo "<p>{$lang['strinvalidparam']}</p>\n";
echo "<p><input type=\"hidden\" name=\"action\" value=\"selectrows\" />\n";
echo "<input type=\"hidden\" name=\"view\" value=\"", htmlspecialchars($_REQUEST['view']), "\" />\n";
echo "<input type=\"hidden\" name=\"subject\" value=\"view\" />\n";
echo $misc->form;
echo "<input type=\"submit\" name=\"select\" value=\"{$lang['strselect']}\" />\n";
echo "<input type=\"submit\" name=\"cancel\" value=\"{$lang['strcancel']}\" /></p>\n";
echo "</form>\n";
}
else {
if (!isset($_POST['show'])) $_POST['show'] = array();
if (!isset($_POST['values'])) $_POST['values'] = array();
if (!isset($_POST['nulls'])) $_POST['nulls'] = array();
// Verify that they haven't supplied a value for unary operators
foreach ($_POST['ops'] as $k => $v) {
if ($data->selectOps[$v] == 'p' && $_POST['values'][$k] != '') {
doSelectRows(true, $lang['strselectunary']);
return;
}
}
if (sizeof($_POST['show']) == 0)
doSelectRows(true, $lang['strselectneedscol']);
else {
// Generate query SQL
$query = $data->getSelectSQL($_REQUEST['view'], array_keys($_POST['show']),
$_POST['values'], $_POST['ops']);
$_REQUEST['query'] = $query;
$_REQUEST['return'] = "schema";
$_no_output = true;
include('./display.php');
exit;
}
}
}
/**
* Show confirmation of drop and perform actual drop
*/
function doDrop($confirm) {
global $data, $misc;
global $lang, $_reload_browser;
if (empty($_REQUEST['view']) && empty($_REQUEST['ma'])) {
doDefault($lang['strspecifyviewtodrop']);
exit();
}
if ($confirm) {
$misc->printTrail('view');
$misc->printTitle($lang['strdrop'],'pg.view.drop');
echo "<form action=\"views.php\" method=\"post\">\n";
//If multi drop
if (isset($_REQUEST['ma'])) {
foreach($_REQUEST['ma'] as $v) {
$a = unserialize(htmlspecialchars_decode($v, ENT_QUOTES));
echo "<p>", sprintf($lang['strconfdropview'], $misc->printVal($a['view'])), "</p>\n";
echo '<input type="hidden" name="view[]" value="', htmlspecialchars($a['view']), "\" />\n";
}
}
else {
echo "<p>", sprintf($lang['strconfdropview'], $misc->printVal($_REQUEST['view'])), "</p>\n";
echo "<input type=\"hidden\" name=\"view\" value=\"", htmlspecialchars($_REQUEST['view']), "\" />\n";
}
echo "<input type=\"hidden\" name=\"action\" value=\"drop\" />\n";
echo $misc->form;
echo "<p><input type=\"checkbox\" id=\"cascade\" name=\"cascade\" /> <label for=\"cascade\">{$lang['strcascade']}</label></p>\n";
echo "<input type=\"submit\" name=\"drop\" value=\"{$lang['strdrop']}\" />\n";
echo "<input type=\"submit\" name=\"cancel\" value=\"{$lang['strcancel']}\" />\n";
echo "</form>\n";
}
else {
if (is_array($_POST['view'])) {
$msg='';
$status = $data->beginTransaction();
if ($status == 0) {
foreach($_POST['view'] as $s) {
$status = $data->dropView($s, isset($_POST['cascade']));
if ($status == 0)
$msg.= sprintf('%s: %s<br />', htmlentities($s, ENT_QUOTES, 'UTF-8'), $lang['strviewdropped']);
else {
$data->endTransaction();
doDefault(sprintf('%s%s: %s<br />', $msg, htmlentities($s, ENT_QUOTES, 'UTF-8'), $lang['strviewdroppedbad']));
return;
}
}
}
if($data->endTransaction() == 0) {
// Everything went fine, back to the Default page....
$_reload_browser = true;
doDefault($msg);
}
else doDefault($lang['strviewdroppedbad']);
}
else{
$status = $data->dropView($_POST['view'], isset($_POST['cascade']));
if ($status == 0) {
$_reload_browser = true;
doDefault($lang['strviewdropped']);
}
else
doDefault($lang['strviewdroppedbad']);
}
}
}
/**
* Sets up choices for table linkage, and which fields to select for the view we're creating
*/
function doSetParamsCreate($msg = '') {
global $data, $misc;
global $lang;
// Check that they've chosen tables for the view definition
if (!isset($_POST['formTables']) ) doWizardCreate($lang['strviewneedsdef']);
else {
// Initialise variables
if (!isset($_REQUEST['formView'])) $_REQUEST['formView'] = '';
if (!isset($_REQUEST['formComment'])) $_REQUEST['formComment'] = '';
$misc->printTrail('schema');
$misc->printTitle($lang['strcreateviewwiz'], 'pg.view.create');
$misc->printMsg($msg);
$tblCount = sizeof($_POST['formTables']);
//unserialize our schema/table information and store in arrSelTables
for ($i = 0; $i < $tblCount; $i++) {
$arrSelTables[] = unserialize($_POST['formTables'][$i]);
}
$linkCount = $tblCount;
//get linking keys
$rsLinkKeys = $data->getLinkingKeys($arrSelTables);
$linkCount = $rsLinkKeys->recordCount() > $tblCount ? $rsLinkKeys->recordCount() : $tblCount;
$arrFields = array(); //array that will hold all our table/field names
//if we have schemas we need to specify the correct schema for each table we're retrieiving
//with getTableAttributes
$curSchema = $data->_schema;
for ($i = 0; $i < $tblCount; $i++) {
if ($data->_schema != $arrSelTables[$i]['schemaname']) {
$data->setSchema($arrSelTables[$i]['schemaname']);
}
$attrs = $data->getTableAttributes($arrSelTables[$i]['tablename']);
while (!$attrs->EOF) {
$arrFields["{$arrSelTables[$i]['schemaname']}.{$arrSelTables[$i]['tablename']}.{$attrs->fields['attname']}"] = serialize(array(
'schemaname' => $arrSelTables[$i]['schemaname'],
'tablename' => $arrSelTables[$i]['tablename'],
'fieldname' => $attrs->fields['attname'])
);
$attrs->moveNext();
}
$data->setSchema($curSchema);
}
asort($arrFields);
echo "<form action=\"views.php\" method=\"post\">\n";
echo "<table>\n";
echo "<tr><th class=\"data\">{$lang['strviewname']}</th></tr>";
echo "<tr>\n<td class=\"data1\">\n";
// View name
echo "<input name=\"formView\" value=\"", htmlspecialchars($_REQUEST['formView']), "\" size=\"32\" maxlength=\"{$data->_maxNameLen}\" />\n";
echo "</td>\n</tr>\n";
echo "<tr><th class=\"data\">{$lang['strcomment']}</th></tr>";
echo "<tr>\n<td class=\"data1\">\n";
// View comments
echo "<textarea name=\"formComment\" rows=\"3\" cols=\"32\">",
htmlspecialchars($_REQUEST['formComment']), "</textarea>\n";
echo "</td>\n</tr>\n";
echo "</table>\n";
// Output selector for fields to be retrieved from view
echo "<table>\n";
echo "<tr><th class=\"data\">{$lang['strcolumns']}</th></tr>";
echo "<tr>\n<td class=\"data1\">\n";
echo GUI::printCombo($arrFields, 'formFields[]', false, '', true);
echo "</td>\n</tr>";
echo "<tr><td><input type=\"radio\" name=\"dblFldMeth\" id=\"dblFldMeth1\" value=\"rename\" /><label for=\"dblFldMeth1\">{$lang['strrenamedupfields']}</label>";
echo "<br /><input type=\"radio\" name=\"dblFldMeth\" id=\"dblFldMeth2\" value=\"drop\" /><label for=\"dblFldMeth2\">{$lang['strdropdupfields']}</label>";
echo "<br /><input type=\"radio\" name=\"dblFldMeth\" id=\"dblFldMeth3\" value=\"\" checked=\"checked\" /><label for=\"dblFldMeth3\">{$lang['strerrordupfields']}</label></td></tr></table><br />";
// Output the Linking keys combo boxes
echo "<table>\n";
echo "<tr><th class=\"data\">{$lang['strviewlink']}</th></tr>";
$rowClass = 'data1';
for ($i = 0; $i < $linkCount; $i++) {
// Initialise variables
if (!isset($formLink[$i]['operator'])) $formLink[$i]['operator'] = 'INNER JOIN';
echo "<tr>\n<td class=\"$rowClass\">\n";
if (!$rsLinkKeys->EOF) {
$curLeftLink = htmlspecialchars(serialize(array('schemaname' => $rsLinkKeys->fields['p_schema'], 'tablename' => $rsLinkKeys->fields['p_table'], 'fieldname' => $rsLinkKeys->fields['p_field']) ) );
$curRightLink = htmlspecialchars(serialize(array('schemaname' => $rsLinkKeys->fields['f_schema'], 'tablename' => $rsLinkKeys->fields['f_table'], 'fieldname' => $rsLinkKeys->fields['f_field']) ) );
$rsLinkKeys->moveNext();
}
else {
$curLeftLink = '';
$curRightLink = '';
}
echo GUI::printCombo($arrFields, "formLink[$i][leftlink]", true, $curLeftLink, false );
echo GUI::printCombo($data->joinOps, "formLink[$i][operator]", true, $formLink[$i]['operator']);
echo GUI::printCombo($arrFields, "formLink[$i][rightlink]", true, $curRightLink, false );
echo "</td>\n</tr>\n";
$rowClass = $rowClass == 'data1' ? 'data2' : 'data1';
}
echo "</table>\n<br />\n";
// Build list of available operators (infix only)
$arrOperators = array();
foreach ($data->selectOps as $k => $v) {
if ($v == 'i') $arrOperators[$k] = $k;
}
// Output additional conditions, note that this portion of the wizard treats the right hand side as literal values
//(not as database objects) so field names will be treated as strings, use the above linking keys section to perform joins
echo "<table>\n";
echo "<tr><th class=\"data\">{$lang['strviewconditions']}</th></tr>";
$rowClass = 'data1';
for ($i = 0; $i < $linkCount; $i++) {
echo "<tr>\n<td class=\"$rowClass\">\n";
echo GUI::printCombo($arrFields, "formCondition[$i][field]");
echo GUI::printCombo($arrOperators, "formCondition[$i][operator]", false, false);
echo "<input type=\"text\" name=\"formCondition[$i][txt]\" />\n";
echo "</td>\n</tr>\n";
$rowClass = $rowClass == 'data1' ? 'data2' : 'data1';
}
echo "</table>\n";
echo "<p><input type=\"hidden\" name=\"action\" value=\"save_create_wiz\" />\n";
foreach ($arrSelTables AS $curTable) {
echo "<input type=\"hidden\" name=\"formTables[]\" value=\"" . htmlspecialchars(serialize($curTable) ) . "\" />\n";
}
echo $misc->form;
echo "<input type=\"submit\" value=\"{$lang['strcreate']}\" />\n";
echo "<input type=\"submit\" name=\"cancel\" value=\"{$lang['strcancel']}\" /></p>\n";
echo "</form>\n";
}
}
/**
* Display a wizard where they can enter a new view
*/
function doWizardCreate($msg = '') {
global $data, $misc;
global $lang;
$tables = $data->getTables(true);
$misc->printTrail('schema');
$misc->printTitle($lang['strcreateviewwiz'], 'pg.view.create');
$misc->printMsg($msg);
echo "<form action=\"views.php\" method=\"post\">\n";
echo "<table>\n";
echo "<tr><th class=\"data\">{$lang['strtables']}</th></tr>";
echo "<tr>\n<td class=\"data1\">\n";
$arrTables = array();
while (!$tables->EOF) {
$arrTmp = array();
$arrTmp['schemaname'] = $tables->fields['nspname'];
$arrTmp['tablename'] = $tables->fields['relname'];
$arrTables[$tables->fields['nspname'] . '.' . $tables->fields['relname']] = serialize($arrTmp);
$tables->moveNext();
}
echo GUI::printCombo($arrTables, 'formTables[]', false, '', true);
echo "</td>\n</tr>\n";
echo "</table>\n";
echo "<p><input type=\"hidden\" name=\"action\" value=\"set_params_create\" />\n";
echo $misc->form;
echo "<input type=\"submit\" value=\"{$lang['strnext']}\" />\n";
echo "<input type=\"submit\" name=\"cancel\" value=\"{$lang['strcancel']}\" /></p>\n";
echo "</form>\n";
}
/**
* Displays a screen where they can enter a new view
*/
function doCreate($msg = '') {
global $data, $misc, $conf;
global $lang;
if (!isset($_REQUEST['formView'])) $_REQUEST['formView'] = '';
if (!isset($_REQUEST['formDefinition'])) $_REQUEST['formDefinition'] = 'SELECT ';
if (!isset($_REQUEST['formComment'])) $_REQUEST['formComment'] = '';
$misc->printTrail('schema');
$misc->printTitle($lang['strcreateview'], 'pg.view.create');
$misc->printMsg($msg);
echo "<form action=\"views.php\" method=\"post\">\n";
echo "<table style=\"width: 100%\">\n";
echo "\t<tr>\n\t\t<th class=\"data left required\">{$lang['strname']}</th>\n";
echo "\t<td class=\"data1\"><input name=\"formView\" size=\"32\" maxlength=\"{$data->_maxNameLen}\" value=\"",
htmlspecialchars($_REQUEST['formView']), "\" /></td>\n\t</tr>\n";
echo "\t<tr>\n\t\t<th class=\"data left required\">{$lang['strdefinition']}</th>\n";
echo "\t<td class=\"data1\"><textarea style=\"width:100%;\" rows=\"10\" cols=\"50\" name=\"formDefinition\">",
htmlspecialchars($_REQUEST['formDefinition']), "</textarea></td>\n\t</tr>\n";
echo "\t<tr>\n\t\t<th class=\"data left\">{$lang['strcomment']}</th>\n";
echo "\t\t<td class=\"data1\"><textarea name=\"formComment\" rows=\"3\" cols=\"32\">",
htmlspecialchars($_REQUEST['formComment']), "</textarea></td>\n\t</tr>\n";
echo "</table>\n";
echo "<p><input type=\"hidden\" name=\"action\" value=\"save_create\" />\n";
echo $misc->form;
echo "<input type=\"submit\" value=\"{$lang['strcreate']}\" />\n";
echo "<input type=\"submit\" name=\"cancel\" value=\"{$lang['strcancel']}\" /></p>\n";
echo "</form>\n";
}
/**
* Actually creates the new view in the database
*/
function doSaveCreate() {
global $data, $lang, $_reload_browser;
// Check that they've given a name and a definition
if ($_POST['formView'] == '') doCreate($lang['strviewneedsname']);
elseif ($_POST['formDefinition'] == '') doCreate($lang['strviewneedsdef']);
else {
$status = $data->createView($_POST['formView'], $_POST['formDefinition'], false, $_POST['formComment']);
if ($status == 0) {
$_reload_browser = true;
doDefault($lang['strviewcreated']);
}
else
doCreate($lang['strviewcreatedbad']);
}
}
/**
* Actually creates the new wizard view in the database
*/
function doSaveCreateWiz() {
global $data, $lang, $_reload_browser;
// Check that they've given a name and fields they want to select
if (!strlen($_POST['formView']) ) doSetParamsCreate($lang['strviewneedsname']);
else if (!isset($_POST['formFields']) || !count($_POST['formFields']) ) doSetParamsCreate($lang['strviewneedsfields']);
else {
$selFields = '';
if (! empty($_POST['dblFldMeth']) )
$tmpHsh = array();
foreach ($_POST['formFields'] AS $curField) {
$arrTmp = unserialize($curField);
$data->fieldArrayClean($arrTmp);
if (! empty($_POST['dblFldMeth']) ) { // doublon control
if (empty($tmpHsh[$arrTmp['fieldname']])) { // field does not exist
$selFields .= "\"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\".\"{$arrTmp['fieldname']}\", ";
$tmpHsh[$arrTmp['fieldname']] = 1;
} else if ($_POST['dblFldMeth'] == 'rename') { // field exist and must be renamed
$tmpHsh[$arrTmp['fieldname']]++;
$selFields .= "\"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\".\"{$arrTmp['fieldname']}\" AS \"{$arrTmp['schemaname']}_{$arrTmp['tablename']}_{$arrTmp['fieldname']}{$tmpHsh[$arrTmp['fieldname']]}\", ";
}
/* field already exist, just ignore this one */
} else { // no doublon control
$selFields .= "\"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\".\"{$arrTmp['fieldname']}\", ";
}
}
$selFields = substr($selFields, 0, -2);
unset($arrTmp, $tmpHsh);
$linkFields = '';
// If we have links, out put the JOIN ... ON statements
if (is_array($_POST['formLink']) ) {
// Filter out invalid/blank entries for our links
$arrLinks = array();
foreach ($_POST['formLink'] AS $curLink) {
if (strlen($curLink['leftlink']) && strlen($curLink['rightlink']) && strlen($curLink['operator'])) {
$arrLinks[] = $curLink;
}
}
// We must perform some magic to make sure that we have a valid join order
$count = sizeof($arrLinks);
$arrJoined = array();
$arrUsedTbls = array();
// If we have at least one join condition, output it
if ($count > 0) {
$j = 0;
while ($j < $count) {
foreach ($arrLinks AS $curLink) {
$arrLeftLink = unserialize($curLink['leftlink']);
$arrRightLink = unserialize($curLink['rightlink']);
$data->fieldArrayClean($arrLeftLink);
$data->fieldArrayClean($arrRightLink);
$tbl1 = "\"{$arrLeftLink['schemaname']}\".\"{$arrLeftLink['tablename']}\"";
$tbl2 = "\"{$arrRightLink['schemaname']}\".\"{$arrRightLink['tablename']}\"";
if ( (!in_array($curLink, $arrJoined) && in_array($tbl1, $arrUsedTbls)) || !count($arrJoined) ) {
// Make sure for multi-column foreign keys that we use a table alias tables joined to more than once
// This can (and should be) more optimized for multi-column foreign keys
$adj_tbl2 = in_array($tbl2, $arrUsedTbls) ? "$tbl2 AS alias_ppa_" . mktime() : $tbl2;
$linkFields .= strlen($linkFields) ? "{$curLink['operator']} $adj_tbl2 ON (\"{$arrLeftLink['schemaname']}\".\"{$arrLeftLink['tablename']}\".\"{$arrLeftLink['fieldname']}\" = \"{$arrRightLink['schemaname']}\".\"{$arrRightLink['tablename']}\".\"{$arrRightLink['fieldname']}\") "
: "$tbl1 {$curLink['operator']} $adj_tbl2 ON (\"{$arrLeftLink['schemaname']}\".\"{$arrLeftLink['tablename']}\".\"{$arrLeftLink['fieldname']}\" = \"{$arrRightLink['schemaname']}\".\"{$arrRightLink['tablename']}\".\"{$arrRightLink['fieldname']}\") ";
$arrJoined[] = $curLink;
if (!in_array($tbl1, $arrUsedTbls) ) $arrUsedTbls[] = $tbl1;
if (!in_array($tbl2, $arrUsedTbls) ) $arrUsedTbls[] = $tbl2;
}
}
$j++;
}
}
}
//if linkfields has no length then either _POST['formLink'] was not set, or there were no join conditions
//just select from all seleted tables - a cartesian join do a
if (!strlen($linkFields) ) {
foreach ($_POST['formTables'] AS $curTable) {
$arrTmp = unserialize($curTable);
$data->fieldArrayClean($arrTmp);
$linkFields .= strlen($linkFields) ? ", \"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\"" : "\"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\"";
}
}
$addConditions = '';
if (is_array($_POST['formCondition']) ) {
foreach ($_POST['formCondition'] AS $curCondition) {
if (strlen($curCondition['field']) && strlen($curCondition['txt']) ) {
$arrTmp = unserialize($curCondition['field']);
$data->fieldArrayClean($arrTmp);
$addConditions .= strlen($addConditions) ? " AND \"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\".\"{$arrTmp['fieldname']}\" {$curCondition['operator']} '{$curCondition['txt']}' "
: " \"{$arrTmp['schemaname']}\".\"{$arrTmp['tablename']}\".\"{$arrTmp['fieldname']}\" {$curCondition['operator']} '{$curCondition['txt']}' ";
}
}
}
$viewQuery = "SELECT $selFields FROM $linkFields ";
//add where from additional conditions
if (strlen($addConditions) ) $viewQuery .= ' WHERE ' . $addConditions;
$status = $data->createView($_POST['formView'], $viewQuery, false, $_POST['formComment']);
if ($status == 0) {
$_reload_browser = true;
doDefault($lang['strviewcreated']);
}
else
doSetParamsCreate($lang['strviewcreatedbad']);
}
}
/**
* Show default list of views in the database
*/
function doDefault($msg = '') {
global $data, $misc, $conf;
global $lang;
$misc->printTrail('schema');
$misc->printTabs('schema','views');
$misc->printMsg($msg);
$views = $data->getViews();
$columns = array(
'view' => array(
'title' => $lang['strview'],
'field' => field('relname'),
'url' => "redirect.php?subject=view&{$misc->href}&",
'vars' => array('view' => 'relname'),
),
'owner' => array(
'title' => $lang['strowner'],
'field' => field('relowner'),
),
'actions' => array(
'title' => $lang['stractions'],
),
'comment' => array(
'title' => $lang['strcomment'],
'field' => field('relcomment'),
),
);
$actions = array(
'multiactions' => array(
'keycols' => array('view' => 'relname'),
'url' => 'views.php',
),
'browse' => array(
'content' => $lang['strbrowse'],
'attr'=> array (
'href' => array (
'url' => 'display.php',
'urlvars' => array (
'action' => 'confselectrows',
'subject' => 'view',
'return' => 'schema',
'view' => field('relname')
)
)
)
),
'select' => array(
'content' => $lang['strselect'],
'attr'=> array (
'href' => array (
'url' => 'views.php',
'urlvars' => array (
'action' => 'confselectrows',
'view' => field('relname')
)
)
)
),
// Insert is possible if the relevant rule for the view has been created.
// 'insert' => array(
// 'title' => $lang['strinsert'],
// 'url' => "views.php?action=confinsertrow&{$misc->href}&",
// 'vars' => array('view' => 'relname'),
// ),
'alter' => array(
'content' => $lang['stralter'],
'attr'=> array (
'href' => array (
'url' => 'viewproperties.php',
'urlvars' => array (
'action' => 'confirm_alter',
'view' => field('relname')
)
)
)
),
'drop' => array(
'multiaction' => 'confirm_drop',
'content' => $lang['strdrop'],
'attr'=> array (
'href' => array (
'url' => 'views.php',
'urlvars' => array (
'action' => 'confirm_drop',
'view' => field('relname')
)
)
)
),
);
$misc->printTable($views, $columns, $actions, 'views-views', $lang['strnoviews']);
$navlinks = array (
'create' => array (
'attr'=> array (
'href' => array (
'url' => 'views.php',
'urlvars' => array (
'action' => 'create',
'server' => $_REQUEST['server'],
'database' => $_REQUEST['database'],
'schema' => $_REQUEST['schema']
)
)
),
'content' => $lang['strcreateview']
),
'createwiz' => array (
'attr'=> array (
'href' => array (
'url' => 'views.php',
'urlvars' => array (
'action' => 'wiz_create',
'server' => $_REQUEST['server'],
'database' => $_REQUEST['database'],
'schema' => $_REQUEST['schema']
)
)
),
'content' => $lang['strcreateviewwiz']
)
);
$misc->printNavLinks($navlinks, 'views-views', get_defined_vars());
}
/**
* Generate XML for the browser tree.
*/
function doTree() {
global $misc, $data;
$views = $data->getViews();
$reqvars = $misc->getRequestVars('view');
$attrs = array(
'text' => field('relname'),
'icon' => 'View',
'iconAction' => url('display.php', $reqvars, array('view' => field('relname'))),
'toolTip'=> field('relcomment'),
'action' => url('redirect.php', $reqvars, array('view' => field('relname'))),
'branch' => url('views.php', $reqvars,
array (
'action' => 'subtree',
'view' => field('relname')
)
)
);
$misc->printTree($views, $attrs, 'views');
exit;
}
function doSubTree() {
global $misc, $data;
$tabs = $misc->getNavTabs('view');
$items = $misc->adjustTabsForTree($tabs);
$reqvars = $misc->getRequestVars('view');
$attrs = array(
'text' => field('title'),
'icon' => field('icon'),
'action' => url(field('url'), $reqvars, field('urlvars'), array('view' => $_REQUEST['view'])),
'branch' => ifempty(
field('branch'), '', url(field('url'), field('urlvars'), $reqvars,
array(
'action' => 'tree',
'view' => $_REQUEST['view']
)
)
),
);
$misc->printTree($items, $attrs, 'view');
exit;
}
if ($action == 'tree') doTree();
if ($action == 'subtree') dosubTree();
$misc->printHeader($lang['strviews']);
$misc->printBody();
switch ($action) {
case 'selectrows':
if (!isset($_REQUEST['cancel'])) doSelectRows(false);
else doDefault();
break;
case 'confselectrows':
doSelectRows(true);
break;
case 'save_create_wiz':
if (isset($_REQUEST['cancel'])) doDefault();
else doSaveCreateWiz();
break;
case 'wiz_create':
doWizardCreate();
break;
case 'set_params_create':
if (isset($_POST['cancel'])) doDefault();
else doSetParamsCreate();
break;
case 'save_create':
if (isset($_REQUEST['cancel'])) doDefault();
else doSaveCreate();
break;
case 'create':
doCreate();
break;
case 'drop':
if (isset($_POST['drop'])) doDrop(false);
else doDefault();
break;
case 'confirm_drop':
doDrop(true);
break;
default:
doDefault();
break;
}
$misc->printFooter();
?>
| juanchi008/playa_auto | web/phppgadmin/views.php | PHP | bsd-3-clause | 27,911 |
// Copyright 2019 Google LLC.
// Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
#include "tools/fiddle/examples.h"
// HASH=eb905faa1084ccab3ad0605df4c27ea4
REG_FIDDLE(IRect_isEmpty64, 256, 256, true, 0) {
void draw(SkCanvas* canvas) {
SkIRect tests[] = {{20, 40, 10, 50}, {20, 40, 20, 50}};
for (auto rect : tests) {
SkDebugf("rect: {%d, %d, %d, %d} is" "%s empty\n", rect.left(), rect.top(), rect.right(),
rect.bottom(), rect.isEmpty64() ? "" : " not");
rect.sort();
SkDebugf("sorted: {%d, %d, %d, %d} is" "%s empty\n", rect.left(), rect.top(), rect.right(),
rect.bottom(), rect.isEmpty64() ? "" : " not");
}
}
} // END FIDDLE
| youtube/cobalt | third_party/skia_next/third_party/skia/docs/examples/IRect_isEmpty64.cpp | C++ | bsd-3-clause | 748 |
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils functions used in Task Python API."""
from tensorflow_lite_support.cc.task.core.proto import base_options_pb2
from tensorflow_lite_support.python.task.core import task_options
from tensorflow_lite_support.python.task.core.proto import configuration_pb2
_ProtoBaseOptions = base_options_pb2.BaseOptions
def ConvertToProtoBaseOptions(
options: task_options.BaseOptions) -> _ProtoBaseOptions:
"""Convert the Python BaseOptions to Proto BaseOptions.
Python BaseOptions is a subset of the Proto BaseOptions that strips off
configurations that are useless in Python development.
Args:
options: the Python BaseOptions object.
Returns:
The Proto BaseOptions object.
"""
proto_options = _ProtoBaseOptions()
if options.model_file.file_content:
proto_options.model_file.file_content = options.model_file.file_content
elif options.model_file.file_name:
proto_options.model_file.file_name = options.model_file.file_name
proto_options.compute_settings.tflite_settings.cpu_settings.num_threads = (
options.num_threads)
if options.use_coral:
proto_options.compute_settings.tflite_settings.delegate = (
configuration_pb2.Delegate.EDGETPU_CORAL)
return proto_options
| chromium/chromium | third_party/tflite_support/src/tensorflow_lite_support/python/task/core/task_utils.py | Python | bsd-3-clause | 1,839 |
# This migration comes from spree (originally 20130417120035)
class UpdateAdjustmentStates < ActiveRecord::Migration[4.2]
def up
Spree::Order.complete.find_each do |order|
order.adjustments.update_all(state: 'closed')
end
Spree::Shipment.shipped.includes(:adjustment).find_each do |shipment|
shipment.adjustment.update_column(:state, 'finalized') if shipment.adjustment
end
Spree::Adjustment.where(state: nil).update_all(state: 'open')
end
def down
end
end
| andreinabgomezg29/spree_out_stock | spec/dummy/db/migrate/20170904174291_update_adjustment_states.spree.rb | Ruby | bsd-3-clause | 500 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* This view displays options for exporting the captured data.
*/
var ExportView = (function() {
'use strict';
// We inherit from DivView.
var superClass = DivView;
/**
* @constructor
*/
function ExportView() {
assertFirstConstructorCall(ExportView);
// Call superclass's constructor.
superClass.call(this, ExportView.MAIN_BOX_ID);
var privacyStrippingCheckbox = $(ExportView.PRIVACY_STRIPPING_CHECKBOX_ID);
privacyStrippingCheckbox.onclick =
this.onSetPrivacyStripping_.bind(this, privacyStrippingCheckbox);
this.saveFileButton_ = $(ExportView.SAVE_FILE_BUTTON_ID);
this.saveFileButton_.onclick = this.onSaveFile_.bind(this);
this.saveStatusText_ = $(ExportView.SAVE_STATUS_TEXT_ID);
this.userCommentsTextArea_ = $(ExportView.USER_COMMENTS_TEXT_AREA_ID);
// Track blob for previous log dump so it can be revoked when a new dump is
// saved.
this.lastBlobURL_ = null;
// Cached copy of the last loaded log dump, for use when exporting.
this.loadedLogDump_ = null;
}
// ID for special HTML element in category_tabs.html
ExportView.TAB_HANDLE_ID = 'tab-handle-export';
// IDs for special HTML elements in export_view.html
ExportView.MAIN_BOX_ID = 'export-view-tab-content';
ExportView.DOWNLOAD_ANCHOR_ID = 'export-view-download-anchor';
ExportView.SAVE_FILE_BUTTON_ID = 'export-view-save-log-file';
ExportView.SAVE_STATUS_TEXT_ID = 'export-view-save-status-text';
ExportView.PRIVACY_STRIPPING_CHECKBOX_ID =
'export-view-privacy-stripping-checkbox';
ExportView.USER_COMMENTS_TEXT_AREA_ID = 'export-view-user-comments';
ExportView.PRIVACY_WARNING_ID = 'export-view-privacy-warning';
cr.addSingletonGetter(ExportView);
ExportView.prototype = {
// Inherit the superclass's methods.
__proto__: superClass.prototype,
/**
* Depending on the value of the checkbox, enables or disables stripping
* cookies and passwords from log dumps and displayed events.
*/
onSetPrivacyStripping_: function(privacyStrippingCheckbox) {
SourceTracker.getInstance().setPrivacyStripping(
privacyStrippingCheckbox.checked);
},
/**
* When loading a log dump, cache it for future export and continue showing
* the ExportView.
*/
onLoadLogFinish: function(polledData, tabData, logDump) {
this.loadedLogDump_ = logDump;
this.setUserComments_(logDump.userComments);
return true;
},
/**
* Sets the save to file status text, displayed below the save to file
* button, to |text|. Also enables or disables the save button based on the
* value of |isSaving|, which must be true if the save process is still
* ongoing, and false when the operation has stopped, regardless of success
* of failure.
*/
setSaveFileStatus: function(text, isSaving) {
this.enableSaveFileButton_(!isSaving);
this.saveStatusText_.textContent = text;
},
enableSaveFileButton_: function(enabled) {
this.saveFileButton_.disabled = !enabled;
},
showPrivacyWarning: function() {
setNodeDisplay($(ExportView.PRIVACY_WARNING_ID), true);
$(ExportView.PRIVACY_STRIPPING_CHECKBOX_ID).checked = false;
$(ExportView.PRIVACY_STRIPPING_CHECKBOX_ID).disabled = true;
// Updating the checkbox doesn't actually disable privacy stripping, since
// the onclick function will not be called.
this.onSetPrivacyStripping_($(ExportView.PRIVACY_STRIPPING_CHECKBOX_ID));
},
/**
* If not already busy saving a log dump, triggers asynchronous
* generation of log dump and starts waiting for it to complete.
*/
onSaveFile_: function() {
if (this.saveFileButton_.disabled)
return;
// Clean up previous blob, if any, to reduce resource usage.
if (this.lastBlobURL_) {
window.webkitURL.revokeObjectURL(this.lastBlobURL_);
this.lastBlobURL_ = null;
}
this.createLogDump_(this.onLogDumpCreated_.bind(this));
},
/**
* Creates a log dump, and either synchronously or asynchronously calls
* |callback| if it succeeds. Separate from onSaveFile_ for unit tests.
*/
createLogDump_: function(callback) {
// Get an explanation for the dump file (this is mandatory!)
var userComments = this.getNonEmptyUserComments_();
if (userComments == undefined) {
return;
}
this.setSaveFileStatus('Preparing data...', true);
var privacyStripping = SourceTracker.getInstance().getPrivacyStripping();
// If we have a cached log dump, update it synchronously.
if (this.loadedLogDump_) {
var dumpText = log_util.createUpdatedLogDump(userComments,
this.loadedLogDump_,
privacyStripping);
callback(dumpText);
return;
}
// Otherwise, poll information from the browser before creating one.
log_util.createLogDumpAsync(userComments,
callback,
privacyStripping);
},
/**
* Sets the user comments.
*/
setUserComments_: function(userComments) {
this.userCommentsTextArea_.value = userComments;
},
/**
* Fetches the user comments for this dump. If none were entered, warns the
* user and returns undefined. Otherwise returns the comments text.
*/
getNonEmptyUserComments_: function() {
var value = this.userCommentsTextArea_.value;
// Reset the class name in case we had hilighted it earlier.
this.userCommentsTextArea_.className = '';
// We don't accept empty explanations. We don't care what is entered, as
// long as there is something (a single whitespace would work).
if (value == '') {
// Put a big obnoxious red border around the text area.
this.userCommentsTextArea_.className =
'export-view-explanation-warning';
alert('Please fill in the text field!');
return undefined;
}
return value;
},
/**
* Creates a blob url and starts downloading it.
*/
onLogDumpCreated_: function(dumpText) {
var textBlob = new Blob([dumpText], {type: 'octet/stream'});
this.lastBlobURL_ = window.webkitURL.createObjectURL(textBlob);
// Update the anchor tag and simulate a click on it to start the
// download.
var downloadAnchor = $(ExportView.DOWNLOAD_ANCHOR_ID);
downloadAnchor.href = this.lastBlobURL_;
downloadAnchor.click();
this.setSaveFileStatus('Dump successful', false);
}
};
return ExportView;
})();
| zcbenz/cefode-chromium | chrome/browser/resources/net_internals/export_view.js | JavaScript | bsd-3-clause | 6,896 |
# frozen_string_literal: true
module Spree
module Admin
class PropertiesController < ResourceController
def index
respond_with(@collection)
end
private
def collection
return @collection if @collection
# params[:q] can be blank upon pagination
params[:q] = {} if params[:q].blank?
@collection = super
@search = @collection.ransack(params[:q])
@collection = @search.result.
page(params[:page]).
per(Spree::Config[:properties_per_page])
@collection
end
end
end
end
| pervino/solidus | backend/app/controllers/spree/admin/properties_controller.rb | Ruby | bsd-3-clause | 601 |
/*
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.drawee.drawable;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.DisplayMetrics;
import org.robolectric.RobolectricTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(RobolectricTestRunner.class)
public class RoundedBitmapDrawableTest {
private Resources mResources;
private Bitmap mBitmap;
private DisplayMetrics mDisplayMetrics;
RoundedBitmapDrawable mRoundedBitmapDrawable;
private final Drawable.Callback mCallback = mock(Drawable.Callback.class);
@Before
public void setUp() {
mResources = mock(Resources.class);
mBitmap = mock(Bitmap.class);
mDisplayMetrics = mock(DisplayMetrics.class);
when(mResources.getDisplayMetrics()).thenReturn(mDisplayMetrics);
mRoundedBitmapDrawable = new RoundedBitmapDrawable(mResources, mBitmap);
mRoundedBitmapDrawable.setCallback(mCallback);
}
@Test
public void testSetCircle() {
mRoundedBitmapDrawable.setCircle(true);
verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable);
assertTrue(mRoundedBitmapDrawable.isCircle());
}
@Test
public void testSetRadii() {
mRoundedBitmapDrawable.setRadii(new float[]{1, 2, 3, 4, 5, 6, 7, 8});
verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable);
assertArrayEquals(new float[]{1, 2, 3, 4, 5, 6, 7, 8}, mRoundedBitmapDrawable.getRadii(), 0);
}
@Test
public void testSetRadius() {
mRoundedBitmapDrawable.setRadius(9);
verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable);
assertArrayEquals(new float[]{9, 9, 9, 9, 9, 9, 9, 9}, mRoundedBitmapDrawable.getRadii(), 0);
}
@Test
public void testSetBorder() {
int color = 0x12345678;
float width = 5;
mRoundedBitmapDrawable.setBorder(color, width);
verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable);
assertEquals(color, mRoundedBitmapDrawable.getBorderColor());
assertEquals(width, mRoundedBitmapDrawable.getBorderWidth(), 0);
}
@Test
public void testSetPadding() {
float padding = 10;
mRoundedBitmapDrawable.setPadding(padding);
verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable);
assertEquals(padding, mRoundedBitmapDrawable.getPadding(), 0);
}
@Test
public void testShouldRoundDefault() {
assertFalse(mRoundedBitmapDrawable.shouldRound());
}
@Test
public void testShouldRoundRadius() {
mRoundedBitmapDrawable.setRadius(5);
assertTrue(mRoundedBitmapDrawable.shouldRound());
mRoundedBitmapDrawable.setRadius(0);
assertFalse(mRoundedBitmapDrawable.shouldRound());
}
@Test
public void testShouldRoundRadii() {
mRoundedBitmapDrawable.setRadii(new float[]{0, 0, 0, 0, 0, 0, 0, 1});
assertTrue(mRoundedBitmapDrawable.shouldRound());
mRoundedBitmapDrawable.setRadii(new float[]{0, 0, 0, 0, 0, 0, 0, 0});
assertFalse(mRoundedBitmapDrawable.shouldRound());
}
@Test
public void testShouldRoundCircle() {
mRoundedBitmapDrawable.setCircle(true);
assertTrue(mRoundedBitmapDrawable.shouldRound());
mRoundedBitmapDrawable.setCircle(false);
assertFalse(mRoundedBitmapDrawable.shouldRound());
}
@Test
public void testShouldRoundBorder() {
mRoundedBitmapDrawable.setBorder(0xFFFFFFFF, 1);
assertTrue(mRoundedBitmapDrawable.shouldRound());
mRoundedBitmapDrawable.setBorder(0x00000000, 0);
assertFalse(mRoundedBitmapDrawable.shouldRound());
}
@Test
public void testPreservePaintOnDrawableCopy() {
ColorFilter colorFilter = mock(ColorFilter.class);
Paint originalPaint = mock(Paint.class);
BitmapDrawable originalVersion = mock(BitmapDrawable.class);
originalPaint.setColorFilter(colorFilter);
when(originalVersion.getPaint()).thenReturn(originalPaint);
RoundedBitmapDrawable roundedVersion = RoundedBitmapDrawable.fromBitmapDrawable(
mResources,
originalVersion);
assertEquals(
originalVersion.getPaint().getColorFilter(),
roundedVersion.getPaint().getColorFilter());
}
}
| 0mok/fresco | drawee/src/test/java/com/facebook/drawee/drawable/RoundedBitmapDrawableTest.java | Java | bsd-3-clause | 4,630 |
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Variable = void 0;
const variable_1 = __importDefault(require("eslint-scope/lib/variable"));
const Variable = variable_1.default;
exports.Variable = Variable;
//# sourceMappingURL=Variable.js.map | ChromeDevTools/devtools-frontend | node_modules/@typescript-eslint/experimental-utils/dist/ts-eslint-scope/Variable.js | JavaScript | bsd-3-clause | 419 |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "services/device/hid/hid_preparsed_data.h"
#include <cstddef>
#include <cstdint>
#include "base/debug/dump_without_crashing.h"
#include "base/memory/ptr_util.h"
#include "base/notreached.h"
#include "components/device_event_log/device_event_log.h"
namespace device {
namespace {
// Windows parses HID report descriptors into opaque _HIDP_PREPARSED_DATA
// objects. The internal structure of _HIDP_PREPARSED_DATA is reserved for
// internal system use. The structs below are inferred and may be wrong or
// incomplete.
// https://docs.microsoft.com/en-us/windows-hardware/drivers/hid/preparsed-data
//
// _HIDP_PREPARSED_DATA begins with a fixed-sized header containing information
// about a single top-level HID collection. The header is followed by a
// variable-sized array describing the fields that make up each report.
//
// Input report items appear first in the array, followed by output report items
// and feature report items. The number of items of each type is given by
// |input_item_count|, |output_item_count| and |feature_item_count|. The sum of
// these counts should equal |item_count|. The total size in bytes of all report
// items is |size_bytes|.
#pragma pack(push, 1)
struct PreparsedDataHeader {
// Unknown constant value. _HIDP_PREPARSED_DATA identifier?
uint64_t magic;
// Top-level collection usage information.
uint16_t usage;
uint16_t usage_page;
uint16_t unknown[3];
// Number of report items for input reports. Includes unused items.
uint16_t input_item_count;
uint16_t unknown2;
// Maximum input report size, in bytes. Includes the report ID byte. Zero if
// there are no input reports.
uint16_t input_report_byte_length;
uint16_t unknown3;
// Number of report items for output reports. Includes unused items.
uint16_t output_item_count;
uint16_t unknown4;
// Maximum output report size, in bytes. Includes the report ID byte. Zero if
// there are no output reports.
uint16_t output_report_byte_length;
uint16_t unknown5;
// Number of report items for feature reports. Includes unused items.
uint16_t feature_item_count;
// Total number of report items (input, output, and feature). Unused items are
// excluded.
uint16_t item_count;
// Maximum feature report size, in bytes. Includes the report ID byte. Zero if
// there are no feature reports.
uint16_t feature_report_byte_length;
// Total size of all report items, in bytes.
uint16_t size_bytes;
uint16_t unknown6;
};
#pragma pack(pop)
static_assert(sizeof(PreparsedDataHeader) == 44,
"PreparsedDataHeader has incorrect size");
#pragma pack(push, 1)
struct PreparsedDataItem {
// Usage page for |usage_minimum| and |usage_maximum|.
uint16_t usage_page;
// Report ID for the report containing this item.
uint8_t report_id;
// Bit offset from |byte_index|.
uint8_t bit_index;
// Bit width of a single field defined by this item.
uint16_t bit_size;
// The number of fields defined by this item.
uint16_t report_count;
// Byte offset from the start of the report containing this item, including
// the report ID byte.
uint16_t byte_index;
// The total number of bits for all fields defined by this item.
uint16_t bit_count;
// The bit field for the corresponding main item in the HID report. This bit
// field is defined in the Device Class Definition for HID v1.11 section
// 6.2.2.5.
// https://www.usb.org/document-library/device-class-definition-hid-111
uint32_t bit_field;
uint32_t unknown;
// Usage information for the collection containing this item.
uint16_t link_usage_page;
uint16_t link_usage;
uint32_t unknown2[9];
// The usage range for this item.
uint16_t usage_minimum;
uint16_t usage_maximum;
// The string descriptor index range associated with this item. If the item
// has no string descriptors, |string_minimum| and |string_maximum| are set to
// zero.
uint16_t string_minimum;
uint16_t string_maximum;
// The designator index range associated with this item. If the item has no
// designators, |designator_minimum| and |designator_maximum| are set to zero.
uint16_t designator_minimum;
uint16_t designator_maximum;
// The data index range associated with this item.
uint16_t data_index_minimum;
uint16_t data_index_maximum;
uint32_t unknown3;
// The range of fields defined by this item in logical units.
int32_t logical_minimum;
int32_t logical_maximum;
// The range of fields defined by this item in units defined by |unit| and
// |unit_exponent|. If this item does not use physical units,
// |physical_minimum| and |physical_maximum| are set to zero.
int32_t physical_minimum;
int32_t physical_maximum;
// The unit definition for this item. The format for this definition is
// described in the Device Class Definition for HID v1.11 section 6.2.2.7.
// https://www.usb.org/document-library/device-class-definition-hid-111
uint32_t unit;
uint32_t unit_exponent;
};
#pragma pack(pop)
static_assert(sizeof(PreparsedDataItem) == 104,
"PreparsedDataItem has incorrect size");
bool ValidatePreparsedDataHeader(const PreparsedDataHeader& header) {
static bool has_dumped_without_crashing = false;
// _HIDP_PREPARSED_DATA objects are expected to start with a known constant
// value.
constexpr uint64_t kHidPreparsedDataMagic = 0x52444B2050646948;
// Require a matching magic value. The details of _HIDP_PREPARSED_DATA are
// proprietary and the magic constant may change. If DCHECKS are on, trigger
// a CHECK failure and crash. Otherwise, generate a non-crash dump.
DCHECK_EQ(header.magic, kHidPreparsedDataMagic);
if (header.magic != kHidPreparsedDataMagic) {
HID_LOG(ERROR) << "Unexpected magic value.";
if (has_dumped_without_crashing) {
base::debug::DumpWithoutCrashing();
has_dumped_without_crashing = true;
}
return false;
}
if (header.input_report_byte_length == 0 && header.input_item_count > 0)
return false;
if (header.output_report_byte_length == 0 && header.output_item_count > 0)
return false;
if (header.feature_report_byte_length == 0 && header.feature_item_count > 0)
return false;
// Calculate the expected total size of report items in the
// _HIDP_PREPARSED_DATA object. Use the individual item counts for each report
// type instead of the total |item_count|. In some cases additional items are
// allocated but are not used for any reports. Unused items are excluded from
// |item_count| but are included in the item counts for each report type and
// contribute to the total size of the object. See crbug.com/1199890 for more
// information.
uint16_t total_item_size =
(header.input_item_count + header.output_item_count +
header.feature_item_count) *
sizeof(PreparsedDataItem);
if (total_item_size != header.size_bytes)
return false;
return true;
}
bool ValidatePreparsedDataItem(const PreparsedDataItem& item) {
// Check that the item does not overlap with the report ID byte.
if (item.byte_index == 0)
return false;
// Check that the bit index does not exceed the maximum bit index in one byte.
if (item.bit_index >= CHAR_BIT)
return false;
// Check that the item occupies at least one bit in the report.
if (item.report_count == 0 || item.bit_size == 0 || item.bit_count == 0)
return false;
return true;
}
HidServiceWin::PreparsedData::ReportItem MakeReportItemFromPreparsedData(
const PreparsedDataItem& item) {
size_t bit_index = (item.byte_index - 1) * CHAR_BIT + item.bit_index;
return {item.report_id, item.bit_field,
item.bit_size, item.report_count,
item.usage_page, item.usage_minimum,
item.usage_maximum, item.designator_minimum,
item.designator_maximum, item.string_minimum,
item.string_maximum, item.logical_minimum,
item.logical_maximum, item.physical_minimum,
item.physical_maximum, item.unit,
item.unit_exponent, bit_index};
}
} // namespace
// static
std::unique_ptr<HidPreparsedData> HidPreparsedData::Create(
HANDLE device_handle) {
PHIDP_PREPARSED_DATA preparsed_data;
if (!HidD_GetPreparsedData(device_handle, &preparsed_data) ||
!preparsed_data) {
HID_PLOG(EVENT) << "Failed to get device data";
return nullptr;
}
HIDP_CAPS capabilities;
if (HidP_GetCaps(preparsed_data, &capabilities) != HIDP_STATUS_SUCCESS) {
HID_PLOG(EVENT) << "Failed to get device capabilities";
HidD_FreePreparsedData(preparsed_data);
return nullptr;
}
return base::WrapUnique(new HidPreparsedData(preparsed_data, capabilities));
}
HidPreparsedData::HidPreparsedData(PHIDP_PREPARSED_DATA preparsed_data,
HIDP_CAPS capabilities)
: preparsed_data_(preparsed_data), capabilities_(capabilities) {
DCHECK(preparsed_data_);
}
HidPreparsedData::~HidPreparsedData() {
HidD_FreePreparsedData(preparsed_data_);
}
const HIDP_CAPS& HidPreparsedData::GetCaps() const {
return capabilities_;
}
std::vector<HidServiceWin::PreparsedData::ReportItem>
HidPreparsedData::GetReportItems(HIDP_REPORT_TYPE report_type) const {
const auto& header =
*reinterpret_cast<const PreparsedDataHeader*>(preparsed_data_);
if (!ValidatePreparsedDataHeader(header))
return {};
size_t min_index;
size_t item_count;
switch (report_type) {
case HidP_Input:
min_index = 0;
item_count = header.input_item_count;
break;
case HidP_Output:
min_index = header.input_item_count;
item_count = header.output_item_count;
break;
case HidP_Feature:
min_index = header.input_item_count + header.output_item_count;
item_count = header.feature_item_count;
break;
default:
return {};
}
if (item_count == 0)
return {};
const auto* data = reinterpret_cast<const uint8_t*>(preparsed_data_);
const auto* items = reinterpret_cast<const PreparsedDataItem*>(
data + sizeof(PreparsedDataHeader));
std::vector<ReportItem> report_items;
for (size_t i = min_index; i < min_index + item_count; ++i) {
if (ValidatePreparsedDataItem(items[i]))
report_items.push_back(MakeReportItemFromPreparsedData(items[i]));
}
return report_items;
}
} // namespace device
| chromium/chromium | services/device/hid/hid_preparsed_data.cc | C++ | bsd-3-clause | 10,536 |
using ServiceStack.DataAnnotations;
namespace ServiceStack.Common.Tests.Models
{
public class ModelWithIndexFields
{
public string Id { get; set; }
[Index]
public string Name { get; set; }
public string AlbumId { get; set; }
[Index(true)]
public string UniqueName { get; set; }
}
} | firstsee/ServiceStack | tests/ServiceStack.Common.Tests/Models/ModelWithIndexFields.cs | C# | bsd-3-clause | 300 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include "ash/shell.h"
#include "ash/test/ash_test_base.h"
#include "base/timer/lap_timer.h"
#include "testing/perf/perf_test.h"
#include "ui/aura/window.h"
#include "ui/compositor/layer.h"
#include "ui/compositor/test/draw_waiter_for_test.h"
namespace ash {
namespace {
// TODO(wutao): On chromeos_linux builds, the tests only run with
// use_ozone = false.
class AshBackgroundFilterBlurPerfTest : public AshTestBase {
public:
AshBackgroundFilterBlurPerfTest() : timer_(0, base::TimeDelta(), 1) {}
AshBackgroundFilterBlurPerfTest(const AshBackgroundFilterBlurPerfTest&) =
delete;
AshBackgroundFilterBlurPerfTest& operator=(
const AshBackgroundFilterBlurPerfTest&) = delete;
~AshBackgroundFilterBlurPerfTest() override = default;
// AshTestBase:
void SetUp() override;
protected:
std::unique_ptr<ui::Layer> CreateSolidColorLayer(SkColor color);
void WithBoundsChange(ui::Layer* layer,
int num_iteration,
const std::string& test_name);
void WithOpacityChange(ui::Layer* layer,
int num_iteration,
const std::string& test_name);
std::unique_ptr<ui::Layer> background_layer_;
std::unique_ptr<ui::Layer> blur_layer_;
private:
ui::Layer* root_layer_ = nullptr;
ui::Compositor* compositor_ = nullptr;
base::LapTimer timer_;
};
void AshBackgroundFilterBlurPerfTest::SetUp() {
AshTestBase::SetUp();
// This is for consistency even if the default display size changed.
UpdateDisplay("800x600");
root_layer_ = Shell::GetAllRootWindows()[0]->layer();
compositor_ = root_layer_->GetCompositor();
background_layer_ = CreateSolidColorLayer(SK_ColorGREEN);
blur_layer_ = CreateSolidColorLayer(SK_ColorBLACK);
}
std::unique_ptr<ui::Layer>
AshBackgroundFilterBlurPerfTest::CreateSolidColorLayer(SkColor color) {
std::unique_ptr<ui::Layer> layer =
std::make_unique<ui::Layer>(ui::LAYER_SOLID_COLOR);
layer->SetBounds(root_layer_->bounds());
layer->SetColor(color);
root_layer_->Add(layer.get());
root_layer_->StackAtTop(layer.get());
return layer;
}
void AshBackgroundFilterBlurPerfTest::WithBoundsChange(
ui::Layer* layer,
int num_iteration,
const std::string& test_name) {
const gfx::Rect init_bounds = layer->GetTargetBounds();
// Wait for a DidCommit before starts the loop, and do not measure the last
// iteration of the loop.
ui::DrawWaiterForTest::WaitForCommit(compositor_);
timer_.Reset();
for (int i = 1; i <= num_iteration + 1; ++i) {
float fraction = (static_cast<float>(i) / num_iteration);
const gfx::Rect bounds =
gfx::Rect(0, 0, static_cast<int>(init_bounds.width() * fraction),
static_cast<int>(init_bounds.height() * fraction));
layer->SetBounds(bounds);
ui::DrawWaiterForTest::WaitForCommit(compositor_);
if (i <= num_iteration)
timer_.NextLap();
}
perf_test::PrintResult("AshBackgroundFilterBlurPerfTest", std::string(),
test_name, timer_.LapsPerSecond(), "runs/s", true);
}
void AshBackgroundFilterBlurPerfTest::WithOpacityChange(
ui::Layer* layer,
int num_iteration,
const std::string& test_name) {
float init_opacity = layer->GetTargetOpacity();
// Wait for a DidCommit before starts the loop, and do not measure the last
// iteration of the loop.
ui::DrawWaiterForTest::WaitForCommit(compositor_);
timer_.Reset();
for (int i = 1; i <= num_iteration + 1; ++i) {
float fraction = (static_cast<float>(i) / num_iteration);
float opacity = std::min(1.0f, init_opacity * fraction);
layer->SetOpacity(opacity);
ui::DrawWaiterForTest::WaitForCommit(compositor_);
if (i <= num_iteration)
timer_.NextLap();
}
perf_test::PrintResult("AshBackgroundFilterBlurPerfTest", std::string(),
test_name, timer_.LapsPerSecond(), "runs/s", true);
}
TEST_F(AshBackgroundFilterBlurPerfTest, NoBlurBackgroundLayerBoundsChange) {
WithBoundsChange(background_layer_.get(), 100,
"no_blur_background_layer_bounds_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, NoBlurBlurLayerBoundsChange) {
WithBoundsChange(blur_layer_.get(), 100, "no_blur_blur_layer_bounds_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, BackgroundLayerBoundsChange) {
blur_layer_->SetBackgroundBlur(10.f);
WithBoundsChange(background_layer_.get(), 100,
"background_layer_bounds_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, BlurLayerBoundsChange) {
blur_layer_->SetBackgroundBlur(10.f);
WithBoundsChange(blur_layer_.get(), 100, "blur_layer_bounds_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, NoBlurBackgroundLayerOpacityChange) {
WithOpacityChange(background_layer_.get(), 100,
"no_blur_background_layer_opacity_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, NoBlurBlurLayerOpacityChange) {
WithOpacityChange(blur_layer_.get(), 100,
"no_blur_blur_layer_opacity_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, BackgroundLayerOpacityChange) {
blur_layer_->SetBackgroundBlur(10.f);
WithOpacityChange(background_layer_.get(), 100,
"background_layer_opacity_change");
}
TEST_F(AshBackgroundFilterBlurPerfTest, BlurLayerOpacityChange) {
blur_layer_->SetBackgroundBlur(10.f);
WithOpacityChange(blur_layer_.get(), 100, "blur_layer_opacity_change");
}
} // namespace
} // namespace ash
| ric2b/Vivaldi-browser | chromium/ash/perftests/ash_background_filter_blur_perftest.cc | C++ | bsd-3-clause | 5,648 |
// Copyright 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Mock ServerConnectionManager class for use in client regression tests.
#include "sync/test/engine/mock_connection_manager.h"
#include <map>
#include "base/location.h"
#include "base/strings/stringprintf.h"
#include "sync/engine/syncer_proto_util.h"
#include "sync/protocol/bookmark_specifics.pb.h"
#include "sync/syncable/directory.h"
#include "sync/syncable/syncable_write_transaction.h"
#include "sync/test/engine/test_id_factory.h"
#include "testing/gtest/include/gtest/gtest.h"
using std::find;
using std::map;
using std::string;
using sync_pb::ClientToServerMessage;
using sync_pb::CommitMessage;
using sync_pb::CommitResponse;
using sync_pb::GetUpdatesMessage;
using sync_pb::SyncEnums;
namespace syncer {
using syncable::WriteTransaction;
static char kValidAuthToken[] = "AuthToken";
static char kCacheGuid[] = "kqyg7097kro6GSUod+GSg==";
MockConnectionManager::MockConnectionManager(syncable::Directory* directory,
CancelationSignal* signal)
: ServerConnectionManager("unused", 0, false, signal),
server_reachable_(true),
conflict_all_commits_(false),
conflict_n_commits_(0),
next_new_id_(10000),
store_birthday_("Store BDay!"),
store_birthday_sent_(false),
client_stuck_(false),
countdown_to_postbuffer_fail_(0),
directory_(directory),
mid_commit_observer_(NULL),
throttling_(false),
partialThrottling_(false),
fail_with_auth_invalid_(false),
fail_non_periodic_get_updates_(false),
next_position_in_parent_(2),
use_legacy_bookmarks_protocol_(false),
num_get_updates_requests_(0) {
SetNewTimestamp(0);
SetAuthToken(kValidAuthToken);
}
MockConnectionManager::~MockConnectionManager() {
EXPECT_TRUE(update_queue_.empty()) << "Unfetched updates.";
}
void MockConnectionManager::SetCommitTimeRename(string prepend) {
commit_time_rename_prepended_string_ = prepend;
}
void MockConnectionManager::SetMidCommitCallback(
const base::Closure& callback) {
mid_commit_callback_ = callback;
}
void MockConnectionManager::SetMidCommitObserver(
MockConnectionManager::MidCommitObserver* observer) {
mid_commit_observer_ = observer;
}
bool MockConnectionManager::PostBufferToPath(PostBufferParams* params,
const string& path,
const string& auth_token,
ScopedServerStatusWatcher* watcher) {
ClientToServerMessage post;
CHECK(post.ParseFromString(params->buffer_in));
CHECK(post.has_protocol_version());
CHECK(post.has_api_key());
CHECK(post.has_bag_of_chips());
requests_.push_back(post);
client_stuck_ = post.sync_problem_detected();
sync_pb::ClientToServerResponse response;
response.Clear();
if (directory_) {
// If the Directory's locked when we do this, it's a problem as in normal
// use this function could take a while to return because it accesses the
// network. As we can't test this we do the next best thing and hang here
// when there's an issue.
CHECK(directory_->good());
WriteTransaction wt(FROM_HERE, syncable::UNITTEST, directory_);
}
if (auth_token.empty()) {
params->response.server_status = HttpResponse::SYNC_AUTH_ERROR;
return false;
}
if (auth_token != kValidAuthToken) {
// Simulate server-side auth failure.
params->response.server_status = HttpResponse::SYNC_AUTH_ERROR;
InvalidateAndClearAuthToken();
}
if (--countdown_to_postbuffer_fail_ == 0) {
// Fail as countdown hits zero.
params->response.server_status = HttpResponse::SYNC_SERVER_ERROR;
return false;
}
if (!server_reachable_) {
params->response.server_status = HttpResponse::CONNECTION_UNAVAILABLE;
return false;
}
// Default to an ok connection.
params->response.server_status = HttpResponse::SERVER_CONNECTION_OK;
response.set_error_code(SyncEnums::SUCCESS);
const string current_store_birthday = store_birthday();
response.set_store_birthday(current_store_birthday);
if (post.has_store_birthday() && post.store_birthday() !=
current_store_birthday) {
response.set_error_code(SyncEnums::NOT_MY_BIRTHDAY);
response.set_error_message("Merry Unbirthday!");
response.SerializeToString(¶ms->buffer_out);
store_birthday_sent_ = true;
return true;
}
bool result = true;
EXPECT_TRUE(!store_birthday_sent_ || post.has_store_birthday() ||
post.message_contents() == ClientToServerMessage::AUTHENTICATE);
store_birthday_sent_ = true;
if (post.message_contents() == ClientToServerMessage::COMMIT) {
ProcessCommit(&post, &response);
} else if (post.message_contents() == ClientToServerMessage::GET_UPDATES) {
ProcessGetUpdates(&post, &response);
} else {
EXPECT_TRUE(false) << "Unknown/unsupported ClientToServerMessage";
return false;
}
{
base::AutoLock lock(response_code_override_lock_);
if (throttling_) {
response.set_error_code(SyncEnums::THROTTLED);
throttling_ = false;
}
if (partialThrottling_) {
sync_pb::ClientToServerResponse_Error* response_error =
response.mutable_error();
response_error->set_error_type(SyncEnums::PARTIAL_FAILURE);
for (ModelTypeSet::Iterator it = throttled_type_.First(); it.Good();
it.Inc()) {
response_error->add_error_data_type_ids(
GetSpecificsFieldNumberFromModelType(it.Get()));
}
partialThrottling_ = false;
}
if (fail_with_auth_invalid_)
response.set_error_code(SyncEnums::AUTH_INVALID);
}
response.SerializeToString(¶ms->buffer_out);
if (post.message_contents() == ClientToServerMessage::COMMIT &&
!mid_commit_callback_.is_null()) {
mid_commit_callback_.Run();
mid_commit_callback_.Reset();
}
if (mid_commit_observer_) {
mid_commit_observer_->Observe();
}
return result;
}
sync_pb::GetUpdatesResponse* MockConnectionManager::GetUpdateResponse() {
if (update_queue_.empty()) {
NextUpdateBatch();
}
return &update_queue_.back();
}
void MockConnectionManager::AddDefaultBookmarkData(sync_pb::SyncEntity* entity,
bool is_folder) {
if (use_legacy_bookmarks_protocol_) {
sync_pb::SyncEntity_BookmarkData* data = entity->mutable_bookmarkdata();
data->set_bookmark_folder(is_folder);
if (!is_folder) {
data->set_bookmark_url("http://google.com");
}
} else {
entity->set_folder(is_folder);
entity->mutable_specifics()->mutable_bookmark();
if (!is_folder) {
entity->mutable_specifics()->mutable_bookmark()->
set_url("http://google.com");
}
}
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateDirectory(
int id,
int parent_id,
string name,
int64 version,
int64 sync_ts,
std::string originator_cache_guid,
std::string originator_client_item_id) {
return AddUpdateDirectory(TestIdFactory::FromNumber(id),
TestIdFactory::FromNumber(parent_id),
name,
version,
sync_ts,
originator_cache_guid,
originator_client_item_id);
}
void MockConnectionManager::SetGUClientCommand(
sync_pb::ClientCommand* command) {
gu_client_command_.reset(command);
}
void MockConnectionManager::SetCommitClientCommand(
sync_pb::ClientCommand* command) {
commit_client_command_.reset(command);
}
void MockConnectionManager::SetTransientErrorId(syncable::Id id) {
transient_error_ids_.push_back(id);
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateBookmark(
int id, int parent_id,
string name, int64 version,
int64 sync_ts,
string originator_client_item_id,
string originator_cache_guid) {
return AddUpdateBookmark(TestIdFactory::FromNumber(id),
TestIdFactory::FromNumber(parent_id),
name,
version,
sync_ts,
originator_client_item_id,
originator_cache_guid);
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateSpecifics(
int id,
int parent_id,
string name,
int64 version,
int64 sync_ts,
bool is_dir,
int64 position,
const sync_pb::EntitySpecifics& specifics) {
sync_pb::SyncEntity* ent = AddUpdateMeta(
TestIdFactory::FromNumber(id).GetServerId(),
TestIdFactory::FromNumber(parent_id).GetServerId(),
name, version, sync_ts);
ent->set_position_in_parent(position);
ent->mutable_specifics()->CopyFrom(specifics);
ent->set_folder(is_dir);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateSpecifics(
int id,
int parent_id,
string name,
int64 version,
int64 sync_ts,
bool is_dir,
int64 position,
const sync_pb::EntitySpecifics& specifics,
string originator_cache_guid,
string originator_client_item_id) {
sync_pb::SyncEntity* ent = AddUpdateSpecifics(
id, parent_id, name, version, sync_ts, is_dir, position, specifics);
ent->set_originator_cache_guid(originator_cache_guid);
ent->set_originator_client_item_id(originator_client_item_id);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::SetNigori(
int id,
int64 version,
int64 sync_ts,
const sync_pb::EntitySpecifics& specifics) {
sync_pb::SyncEntity* ent = GetUpdateResponse()->add_entries();
ent->set_id_string(TestIdFactory::FromNumber(id).GetServerId());
ent->set_parent_id_string(TestIdFactory::FromNumber(0).GetServerId());
ent->set_server_defined_unique_tag(ModelTypeToRootTag(NIGORI));
ent->set_name("Nigori");
ent->set_non_unique_name("Nigori");
ent->set_version(version);
ent->set_sync_timestamp(sync_ts);
ent->set_mtime(sync_ts);
ent->set_ctime(1);
ent->set_position_in_parent(0);
ent->set_folder(false);
ent->mutable_specifics()->CopyFrom(specifics);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdatePref(string id,
string parent_id,
string client_tag,
int64 version,
int64 sync_ts) {
sync_pb::SyncEntity* ent =
AddUpdateMeta(id, parent_id, " ", version, sync_ts);
ent->set_client_defined_unique_tag(client_tag);
sync_pb::EntitySpecifics specifics;
AddDefaultFieldValue(PREFERENCES, &specifics);
ent->mutable_specifics()->CopyFrom(specifics);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateFull(
string id, string parent_id,
string name, int64 version,
int64 sync_ts, bool is_dir) {
sync_pb::SyncEntity* ent =
AddUpdateMeta(id, parent_id, name, version, sync_ts);
AddDefaultBookmarkData(ent, is_dir);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateMeta(
string id, string parent_id,
string name, int64 version,
int64 sync_ts) {
sync_pb::SyncEntity* ent = GetUpdateResponse()->add_entries();
ent->set_id_string(id);
ent->set_parent_id_string(parent_id);
ent->set_non_unique_name(name);
ent->set_name(name);
ent->set_version(version);
ent->set_sync_timestamp(sync_ts);
ent->set_mtime(sync_ts);
ent->set_ctime(1);
ent->set_position_in_parent(GeneratePositionInParent());
// This isn't perfect, but it works well enough. This is an update, which
// means the ID is a server ID, which means it never changes. By making
// kCacheGuid also never change, we guarantee that the same item always has
// the same originator_cache_guid and originator_client_item_id.
//
// Unfortunately, neither this class nor the tests that use it explicitly
// track sync entitites, so supporting proper cache guids and client item IDs
// would require major refactoring. The ID used here ought to be the "c-"
// style ID that was sent up on the commit.
ent->set_originator_cache_guid(kCacheGuid);
ent->set_originator_client_item_id(id);
return ent;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateDirectory(
string id,
string parent_id,
string name,
int64 version,
int64 sync_ts,
std::string originator_cache_guid,
std::string originator_client_item_id) {
sync_pb::SyncEntity* ret =
AddUpdateFull(id, parent_id, name, version, sync_ts, true);
ret->set_originator_cache_guid(originator_cache_guid);
ret->set_originator_client_item_id(originator_client_item_id);
return ret;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateBookmark(
string id,
string parent_id,
string name, int64 version,
int64 sync_ts,
string originator_cache_guid,
string originator_client_item_id) {
sync_pb::SyncEntity* ret =
AddUpdateFull(id, parent_id, name, version, sync_ts, false);
ret->set_originator_cache_guid(originator_cache_guid);
ret->set_originator_client_item_id(originator_client_item_id);
return ret;
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateFromLastCommit() {
EXPECT_EQ(1, last_sent_commit().entries_size());
EXPECT_EQ(1, last_commit_response().entryresponse_size());
EXPECT_EQ(CommitResponse::SUCCESS,
last_commit_response().entryresponse(0).response_type());
if (last_sent_commit().entries(0).deleted()) {
ModelType type = GetModelType(last_sent_commit().entries(0));
AddUpdateTombstone(syncable::Id::CreateFromServerId(
last_sent_commit().entries(0).id_string()), type);
} else {
sync_pb::SyncEntity* ent = GetUpdateResponse()->add_entries();
ent->CopyFrom(last_sent_commit().entries(0));
ent->clear_insert_after_item_id();
ent->clear_old_parent_id();
ent->set_position_in_parent(
last_commit_response().entryresponse(0).position_in_parent());
ent->set_version(
last_commit_response().entryresponse(0).version());
ent->set_id_string(
last_commit_response().entryresponse(0).id_string());
// This is the same hack as in AddUpdateMeta. See the comment in that
// function for more information.
ent->set_originator_cache_guid(kCacheGuid);
ent->set_originator_client_item_id(
last_commit_response().entryresponse(0).id_string());
if (last_sent_commit().entries(0).has_unique_position()) {
ent->mutable_unique_position()->CopyFrom(
last_sent_commit().entries(0).unique_position());
}
// Tests don't currently care about the following:
// parent_id_string, name, non_unique_name.
}
return GetMutableLastUpdate();
}
void MockConnectionManager::AddUpdateTombstone(
const syncable::Id& id,
ModelType type) {
// Tombstones have only the ID set and dummy values for the required fields.
sync_pb::SyncEntity* ent = GetUpdateResponse()->add_entries();
ent->set_id_string(id.GetServerId());
ent->set_version(0);
ent->set_name("");
ent->set_deleted(true);
// Make sure we can still extract the ModelType from this tombstone.
AddDefaultFieldValue(type, ent->mutable_specifics());
}
void MockConnectionManager::SetLastUpdateDeleted() {
// Tombstones have only the ID set. Wipe anything else.
string id_string = GetMutableLastUpdate()->id_string();
ModelType type = GetModelType(*GetMutableLastUpdate());
GetUpdateResponse()->mutable_entries()->RemoveLast();
AddUpdateTombstone(syncable::Id::CreateFromServerId(id_string), type);
}
void MockConnectionManager::SetLastUpdateOriginatorFields(
const string& client_id,
const string& entry_id) {
GetMutableLastUpdate()->set_originator_cache_guid(client_id);
GetMutableLastUpdate()->set_originator_client_item_id(entry_id);
}
void MockConnectionManager::SetLastUpdateServerTag(const string& tag) {
GetMutableLastUpdate()->set_server_defined_unique_tag(tag);
}
void MockConnectionManager::SetLastUpdateClientTag(const string& tag) {
GetMutableLastUpdate()->set_client_defined_unique_tag(tag);
}
void MockConnectionManager::SetLastUpdatePosition(int64 server_position) {
GetMutableLastUpdate()->set_position_in_parent(server_position);
}
void MockConnectionManager::SetNewTimestamp(int ts) {
next_token_ = base::StringPrintf("mock connection ts = %d", ts);
ApplyToken();
}
void MockConnectionManager::ApplyToken() {
if (!update_queue_.empty()) {
GetUpdateResponse()->clear_new_progress_marker();
sync_pb::DataTypeProgressMarker* new_marker =
GetUpdateResponse()->add_new_progress_marker();
new_marker->set_data_type_id(-1); // Invalid -- clients shouldn't see.
new_marker->set_token(next_token_);
}
}
void MockConnectionManager::SetChangesRemaining(int64 timestamp) {
GetUpdateResponse()->set_changes_remaining(timestamp);
}
void MockConnectionManager::ProcessGetUpdates(
sync_pb::ClientToServerMessage* csm,
sync_pb::ClientToServerResponse* response) {
CHECK(csm->has_get_updates());
ASSERT_EQ(csm->message_contents(), ClientToServerMessage::GET_UPDATES);
const GetUpdatesMessage& gu = csm->get_updates();
num_get_updates_requests_++;
EXPECT_FALSE(gu.has_from_timestamp());
EXPECT_FALSE(gu.has_requested_types());
if (fail_non_periodic_get_updates_) {
EXPECT_EQ(sync_pb::GetUpdatesCallerInfo::PERIODIC,
gu.caller_info().source());
}
// Verify that the items we're about to send back to the client are of
// the types requested by the client. If this fails, it probably indicates
// a test bug.
EXPECT_TRUE(gu.fetch_folders());
EXPECT_FALSE(gu.has_requested_types());
if (update_queue_.empty()) {
GetUpdateResponse();
}
sync_pb::GetUpdatesResponse* updates = &update_queue_.front();
for (int i = 0; i < updates->entries_size(); ++i) {
if (!updates->entries(i).deleted()) {
ModelType entry_type = GetModelType(updates->entries(i));
EXPECT_TRUE(
IsModelTypePresentInSpecifics(gu.from_progress_marker(), entry_type))
<< "Syncer did not request updates being provided by the test.";
}
}
response->mutable_get_updates()->CopyFrom(*updates);
// Set appropriate progress markers, overriding the value squirreled
// away by ApplyToken().
std::string token = response->get_updates().new_progress_marker(0).token();
response->mutable_get_updates()->clear_new_progress_marker();
for (int i = 0; i < gu.from_progress_marker_size(); ++i) {
sync_pb::DataTypeProgressMarker* new_marker =
response->mutable_get_updates()->add_new_progress_marker();
new_marker->set_data_type_id(gu.from_progress_marker(i).data_type_id());
new_marker->set_token(token);
}
// Fill the keystore key if requested.
if (gu.need_encryption_key())
response->mutable_get_updates()->add_encryption_keys(keystore_key_);
update_queue_.pop_front();
if (gu_client_command_) {
response->mutable_client_command()->CopyFrom(*gu_client_command_.get());
}
}
void MockConnectionManager::SetKeystoreKey(const std::string& key) {
// Note: this is not a thread-safe set, ok for now. NOT ok if tests
// run the syncer on the background thread while this method is called.
keystore_key_ = key;
}
bool MockConnectionManager::ShouldConflictThisCommit() {
bool conflict = false;
if (conflict_all_commits_) {
conflict = true;
} else if (conflict_n_commits_ > 0) {
conflict = true;
--conflict_n_commits_;
}
return conflict;
}
bool MockConnectionManager::ShouldTransientErrorThisId(syncable::Id id) {
return find(transient_error_ids_.begin(), transient_error_ids_.end(), id)
!= transient_error_ids_.end();
}
void MockConnectionManager::ProcessCommit(
sync_pb::ClientToServerMessage* csm,
sync_pb::ClientToServerResponse* response_buffer) {
CHECK(csm->has_commit());
ASSERT_EQ(csm->message_contents(), ClientToServerMessage::COMMIT);
map <string, string> changed_ids;
const CommitMessage& commit_message = csm->commit();
CommitResponse* commit_response = response_buffer->mutable_commit();
commit_messages_.push_back(new CommitMessage);
commit_messages_.back()->CopyFrom(commit_message);
map<string, sync_pb::CommitResponse_EntryResponse*> response_map;
for (int i = 0; i < commit_message.entries_size() ; i++) {
const sync_pb::SyncEntity& entry = commit_message.entries(i);
CHECK(entry.has_id_string());
string id_string = entry.id_string();
ASSERT_LT(entry.name().length(), 256ul) << " name probably too long. True "
"server name checking not implemented";
syncable::Id id;
if (entry.version() == 0) {
// Relies on our new item string id format. (string representation of a
// negative number).
id = syncable::Id::CreateFromClientString(id_string);
} else {
id = syncable::Id::CreateFromServerId(id_string);
}
committed_ids_.push_back(id);
if (response_map.end() == response_map.find(id_string))
response_map[id_string] = commit_response->add_entryresponse();
sync_pb::CommitResponse_EntryResponse* er = response_map[id_string];
if (ShouldConflictThisCommit()) {
er->set_response_type(CommitResponse::CONFLICT);
continue;
}
if (ShouldTransientErrorThisId(id)) {
er->set_response_type(CommitResponse::TRANSIENT_ERROR);
continue;
}
er->set_response_type(CommitResponse::SUCCESS);
er->set_version(entry.version() + 1);
if (!commit_time_rename_prepended_string_.empty()) {
// Commit time rename sent down from the server.
er->set_name(commit_time_rename_prepended_string_ + entry.name());
}
string parent_id_string = entry.parent_id_string();
// Remap id's we've already assigned.
if (changed_ids.end() != changed_ids.find(parent_id_string)) {
parent_id_string = changed_ids[parent_id_string];
er->set_parent_id_string(parent_id_string);
}
if (entry.has_version() && 0 != entry.version()) {
er->set_id_string(id_string); // Allows verification.
} else {
string new_id = base::StringPrintf("mock_server:%d", next_new_id_++);
changed_ids[id_string] = new_id;
er->set_id_string(new_id);
}
}
commit_responses_.push_back(new CommitResponse(*commit_response));
if (commit_client_command_) {
response_buffer->mutable_client_command()->CopyFrom(
*commit_client_command_.get());
}
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateDirectory(
syncable::Id id,
syncable::Id parent_id,
string name,
int64 version,
int64 sync_ts,
string originator_cache_guid,
string originator_client_item_id) {
return AddUpdateDirectory(id.GetServerId(), parent_id.GetServerId(),
name, version, sync_ts, originator_cache_guid,
originator_client_item_id);
}
sync_pb::SyncEntity* MockConnectionManager::AddUpdateBookmark(
syncable::Id id,
syncable::Id parent_id,
string name,
int64 version,
int64 sync_ts,
string originator_cache_guid,
string originator_client_item_id) {
return AddUpdateBookmark(id.GetServerId(), parent_id.GetServerId(),
name, version, sync_ts, originator_cache_guid,
originator_client_item_id);
}
sync_pb::SyncEntity* MockConnectionManager::GetMutableLastUpdate() {
sync_pb::GetUpdatesResponse* updates = GetUpdateResponse();
EXPECT_GT(updates->entries_size(), 0);
return updates->mutable_entries()->Mutable(updates->entries_size() - 1);
}
void MockConnectionManager::NextUpdateBatch() {
update_queue_.push_back(sync_pb::GetUpdatesResponse::default_instance());
SetChangesRemaining(0);
ApplyToken();
}
const CommitMessage& MockConnectionManager::last_sent_commit() const {
EXPECT_TRUE(!commit_messages_.empty());
return *commit_messages_.back();
}
const CommitResponse& MockConnectionManager::last_commit_response() const {
EXPECT_TRUE(!commit_responses_.empty());
return *commit_responses_.back();
}
const sync_pb::ClientToServerMessage&
MockConnectionManager::last_request() const {
EXPECT_TRUE(!requests_.empty());
return requests_.back();
}
const std::vector<sync_pb::ClientToServerMessage>&
MockConnectionManager::requests() const {
return requests_;
}
bool MockConnectionManager::IsModelTypePresentInSpecifics(
const google::protobuf::RepeatedPtrField<
sync_pb::DataTypeProgressMarker>& filter,
ModelType value) {
int data_type_id = GetSpecificsFieldNumberFromModelType(value);
for (int i = 0; i < filter.size(); ++i) {
if (filter.Get(i).data_type_id() == data_type_id) {
return true;
}
}
return false;
}
sync_pb::DataTypeProgressMarker const*
MockConnectionManager::GetProgressMarkerForType(
const google::protobuf::RepeatedPtrField<
sync_pb::DataTypeProgressMarker>& filter,
ModelType value) {
int data_type_id = GetSpecificsFieldNumberFromModelType(value);
for (int i = 0; i < filter.size(); ++i) {
if (filter.Get(i).data_type_id() == data_type_id) {
return &(filter.Get(i));
}
}
return NULL;
}
void MockConnectionManager::SetServerReachable() {
server_reachable_ = true;
}
void MockConnectionManager::SetServerNotReachable() {
server_reachable_ = false;
}
void MockConnectionManager::UpdateConnectionStatus() {
if (!server_reachable_) {
server_status_ = HttpResponse::CONNECTION_UNAVAILABLE;
} else {
server_status_ = HttpResponse::SERVER_CONNECTION_OK;
}
}
void MockConnectionManager::SetServerStatus(
HttpResponse::ServerConnectionCode server_status) {
server_status_ = server_status;
}
} // namespace syncer
| guorendong/iridium-browser-ubuntu | sync/test/engine/mock_connection_manager.cc | C++ | bsd-3-clause | 25,782 |
/*jslint sloppy: true, nomen: true */
/*global exports:true */
/*
This file is part of the PhantomJS project from Ofi Labs.
Copyright (C) 2013 Joseph Rollinson, jtrollinson@gmail.com
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* Takes in a QtCookieJar and decorates it with useful functions. */
function decorateCookieJar(jar) {
/* Allows one to add a cookie to the cookie jar from inside JavaScript */
jar.addCookie = function(cookie) {
jar.addCookieFromMap(cookie);
};
/* Getting and Setting jar.cookies gets and sets all the cookies in the
* cookie jar.
*/
jar.__defineGetter__('cookies', function() {
return this.cookiesToMap();
});
jar.__defineSetter__('cookies', function(cookies) {
this.addCookiesFromMap(cookies);
});
return jar;
}
/* Creates and decorates a new cookie jar.
* path is the file path where Phantomjs will store the cookie jar persistently.
* path is not mandatory.
*/
exports.create = function (path) {
if (arguments.length < 1) {
path = "";
}
return decorateCookieJar(phantom.createCookieJar(path));
};
/* Exports the decorateCookieJar function */
exports.decorate = decorateCookieJar;
| tianzhihen/phantomjs | src/modules/cookiejar.js | JavaScript | bsd-3-clause | 2,651 |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/passwords/bubble_controllers/move_to_account_store_bubble_controller.h"
#include "chrome/browser/favicon/favicon_service_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/profiles/profile_avatar_icon_util.h"
#include "chrome/browser/signin/identity_manager_factory.h"
#include "chrome/browser/sync/sync_service_factory.h"
#include "chrome/browser/ui/passwords/passwords_model_delegate.h"
#include "chrome/grit/generated_resources.h"
#include "components/favicon/core/favicon_util.h"
#include "components/password_manager/core/browser/password_feature_manager.h"
#include "components/password_manager/core/browser/password_manager_features_util.h"
#include "components/password_manager/core/common/password_manager_ui.h"
#include "components/signin/public/base/consent_level.h"
#include "components/signin/public/identity_manager/identity_manager.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/web_contents.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
namespace metrics_util = password_manager::metrics_util;
MoveToAccountStoreBubbleController::MoveToAccountStoreBubbleController(
base::WeakPtr<PasswordsModelDelegate> delegate)
: PasswordBubbleControllerBase(
std::move(delegate),
password_manager::metrics_util::AUTOMATIC_MOVE_TO_ACCOUNT_STORE) {}
MoveToAccountStoreBubbleController::~MoveToAccountStoreBubbleController() {
// Make sure the interactions are reported even if Views didn't notify the
// controller about the bubble being closed.
if (!interaction_reported_)
OnBubbleClosing();
}
void MoveToAccountStoreBubbleController::RequestFavicon(
base::OnceCallback<void(const gfx::Image&)> favicon_ready_callback) {
favicon::FaviconService* favicon_service =
FaviconServiceFactory::GetForProfile(GetProfile(),
ServiceAccessType::EXPLICIT_ACCESS);
favicon::GetFaviconImageForPageURL(
favicon_service, delegate_->GetPendingPassword().url,
favicon_base::IconType::kFavicon,
base::BindOnce(&MoveToAccountStoreBubbleController::OnFaviconReady,
base::Unretained(this), std::move(favicon_ready_callback)),
&favicon_tracker_);
}
void MoveToAccountStoreBubbleController::OnFaviconReady(
base::OnceCallback<void(const gfx::Image&)> favicon_ready_callback,
const favicon_base::FaviconImageResult& result) {
std::move(favicon_ready_callback).Run(result.image);
}
std::u16string MoveToAccountStoreBubbleController::GetTitle() const {
return l10n_util::GetStringUTF16(IDS_PASSWORD_MANAGER_MOVE_TITLE);
}
void MoveToAccountStoreBubbleController::AcceptMove() {
dismissal_reason_ = metrics_util::CLICKED_ACCEPT;
if (delegate_->GetPasswordFeatureManager()->IsOptedInForAccountStorage()) {
// User has already opted in to the account store. Move without reauth.
return delegate_->MovePasswordToAccountStore();
}
// Otherwise, we should invoke the reauth flow before saving.
return delegate_->AuthenticateUserForAccountStoreOptInAndMovePassword();
}
void MoveToAccountStoreBubbleController::RejectMove() {
dismissal_reason_ = metrics_util::CLICKED_NEVER;
return delegate_->BlockMovingPasswordToAccountStore();
}
gfx::Image MoveToAccountStoreBubbleController::GetProfileIcon(int size) {
if (!GetProfile())
return gfx::Image();
signin::IdentityManager* identity_manager =
IdentityManagerFactory::GetForProfile(GetProfile());
if (!identity_manager)
return gfx::Image();
AccountInfo primary_account_info = identity_manager->FindExtendedAccountInfo(
identity_manager->GetPrimaryAccountInfo(signin::ConsentLevel::kSignin));
DCHECK(!primary_account_info.IsEmpty());
gfx::Image account_icon = primary_account_info.account_image;
if (account_icon.IsEmpty()) {
account_icon = ui::ResourceBundle::GetSharedInstance().GetImageNamed(
profiles::GetPlaceholderAvatarIconResourceID());
}
return profiles::GetSizedAvatarIcon(account_icon,
/*is_rectangle=*/true, /*width=*/size,
/*height=*/size, profiles::SHAPE_CIRCLE);
}
void MoveToAccountStoreBubbleController::ReportInteractions() {
Profile* profile = GetProfile();
if (!profile)
return;
metrics_util::LogMoveUIDismissalReason(
dismissal_reason_,
password_manager::features_util::ComputePasswordAccountStorageUserState(
profile->GetPrefs(), SyncServiceFactory::GetForProfile(profile)));
// TODO(crbug.com/1063852): Consider recording UKM here, via:
// metrics_recorder_->RecordUIDismissalReason(dismissal_reason_)
}
| ric2b/Vivaldi-browser | chromium/chrome/browser/ui/passwords/bubble_controllers/move_to_account_store_bubble_controller.cc | C++ | bsd-3-clause | 4,892 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import hashlib
import os
import string
import win32api
import win32file
import win32com.client
from win32com.shell import shell, shellcon
import win32security
def _GetFileVersion(file_path):
"""Returns the file version of the given file."""
return win32com.client.Dispatch(
'Scripting.FileSystemObject').GetFileVersion(file_path)
def _GetFileBitness(file_path):
"""Returns the bitness of the given file."""
if win32file.GetBinaryType(file_path) == win32file.SCS_32BIT_BINARY:
return '32'
return '64'
def _GetProductName(file_path):
"""Returns the product name of the given file.
Args:
file_path: The absolute or relative path to the file.
Returns:
A string representing the product name of the file, or None if the
product name was not found.
"""
language_and_codepage_pairs = win32api.GetFileVersionInfo(
file_path, '\\VarFileInfo\\Translation')
if not language_and_codepage_pairs:
return None
product_name_entry = ('\\StringFileInfo\\%04x%04x\\ProductName' %
language_and_codepage_pairs[0])
return win32api.GetFileVersionInfo(file_path, product_name_entry)
def _GetUserSpecificRegistrySuffix():
"""Returns '.' + the unpadded Base32 encoding of the MD5 of the user's SID.
The result must match the output from the method
UserSpecificRegistrySuffix::GetSuffix() in
chrome/installer/util/shell_util.cc. It will always be 27 characters long.
"""
token_handle = win32security.OpenProcessToken(win32api.GetCurrentProcess(),
win32security.TOKEN_QUERY)
user_sid, _ = win32security.GetTokenInformation(token_handle,
win32security.TokenUser)
user_sid_string = win32security.ConvertSidToStringSid(user_sid)
md5_digest = hashlib.md5(user_sid_string).digest()
return '.' + base64.b32encode(md5_digest).rstrip('=')
class VariableExpander:
"""Expands variables in strings."""
def __init__(self, mini_installer_path,
previous_version_mini_installer_path, chromedriver_path,
quiet, output_dir):
"""Constructor.
The constructor initializes a variable dictionary that maps variables to
their values. These are the only acceptable variables:
* $BRAND: the browser brand (e.g., "Google Chrome" or "Chromium").
* $CHROME_DIR: the directory of Chrome (or Chromium) from the base
installation directory.
* $CHROME_HTML_PROG_ID: 'ChromeHTML' (or 'ChromiumHTM').
* $CHROME_LONG_NAME: 'Google Chrome' (or 'Chromium').
* $CHROME_LONG_NAME_BETA: 'Google Chrome Beta' if $BRAND is 'Google
* Chrome'.
* $CHROME_LONG_NAME_DEV: 'Google Chrome Dev' if $BRAND is 'Google
* Chrome'.
* $CHROME_LONG_NAME_SXS: 'Google Chrome SxS' if $BRAND is 'Google
* Chrome'.
* $CHROME_SHORT_NAME: 'Chrome' (or 'Chromium').
* $CHROME_SHORT_NAME_BETA: 'ChromeBeta' if $BRAND is 'Google Chrome'.
* $CHROME_SHORT_NAME_DEV: 'ChromeDev' if $BRAND is 'Google Chrome'.
* $CHROME_SHORT_NAME_SXS: 'ChromeCanary' if $BRAND is 'Google Chrome'.
* $CHROME_UPDATE_REGISTRY_SUBKEY: the registry key, excluding the root
key, of Chrome for Google Update.
* $CHROME_UPDATE_REGISTRY_SUBKEY_DEV: the registry key, excluding the
root key, of Chrome Dev for Google Update.
* $CHROME_UPDATE_REGISTRY_SUBKEY_BETA: the registry key, excluding the
root key, of Chrome Beta for Google Update.
* $CHROME_UPDATE_REGISTRY_SUBKEY_SXS: the registry key, excluding the
root key, of Chrome SxS for Google Update.
* $CHROMEDRIVER_PATH: Path to chromedriver.
* $QUIET: Supress output.
* $OUTPUT_DIR: "--output-dir=DIR" or an empty string.
* $LAUNCHER_UPDATE_REGISTRY_SUBKEY: the registry key, excluding the root
key, of the app launcher for Google Update if $BRAND is 'Google
* Chrome'.
* $LOCAL_APPDATA: the unquoted path to the Local Application Data
folder.
* $LOG_FILE: "--log-file=FILE" or an empty string.
* $MINI_INSTALLER: the unquoted path to the mini_installer.
* $MINI_INSTALLER_BITNESS: the bitness of the mini_installer.
* $MINI_INSTALLER_FILE_VERSION: the file version of $MINI_INSTALLER.
* $PREVIOUS_VERSION_MINI_INSTALLER: the unquoted path to a
mini_installer whose version is lower than $MINI_INSTALLER.
* $PREVIOUS_VERSION_MINI_INSTALLER_FILE_VERSION: the file version of
$PREVIOUS_VERSION_MINI_INSTALLER.
* $PROGRAM_FILES: the unquoted path to the Program Files folder.
* $USER_SPECIFIC_REGISTRY_SUFFIX: the output from the function
_GetUserSpecificRegistrySuffix().
* $VERSION_[XP/SERVER_2003/VISTA/WIN7/WIN8/WIN8_1/WIN10]: a 2-tuple
representing the version of the corresponding OS.
* $WINDOWS_VERSION: a 2-tuple representing the current Windows version.
* $CHROME_TOAST_ACTIVATOR_CLSID: NotificationActivator's CLSID for
Chrome.
* $CHROME_TOAST_ACTIVATOR_CLSID_BETA: NotificationActivator's CLSID for
Chrome Beta.
* $CHROME_TOAST_ACTIVATOR_CLSID_DEV: NotificationActivator's CLSID for
Chrome Dev.
* $CHROME_TOAST_ACTIVATOR_CLSID_SXS: NotificationActivator's CLSID for
Chrome SxS.
* $CHROME_ELEVATOR_CLSID: Elevator Service CLSID for Chrome.
* $CHROME_ELEVATOR_CLSID_BETA: Elevator Service CLSID for Chrome Beta.
* $CHROME_ELEVATOR_CLSID_DEV: Elevator Service CLSID for Chrome Dev.
* $CHROME_ELEVATOR_CLSID_SXS: Elevator Service CLSID for Chrome SxS.
* $CHROME_ELEVATOR_IID: IElevator IID for Chrome.
* $CHROME_ELEVATOR_IID_BETA: IElevator IID for Chrome Beta.
* $CHROME_ELEVATOR_IID_DEV: IElevator IID for Chrome Dev.
* $CHROME_ELEVATOR_IID_SXS: IElevator IID for Chrome SxS.
* $CHROME_ELEVATION_SERVICE_NAME: Elevation Service Name for Chrome.
* $CHROME_ELEVATION_SERVICE_NAME_BETA: Elevation Service Name for Chrome
Beta.
* $CHROME_ELEVATION_SERVICE_NAME_DEV: Elevation Service Name for Chrome
Dev.
* $CHROME_ELEVATION_SERVICE_NAME_SXS: Elevation Service Name for Chrome
SxS.
* $CHROME_ELEVATION_SERVICE_DISPLAY_NAME: Elevation Service Display Name
for Chrome.
* $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_BETA: Elevation Service Display
Name for Chrome Beta.
* $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_DEV: Elevation Service Display
Name for Chrome Dev.
* $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_SXS: Elevation Service Display
Name for Chrome SxS.
* $LAST_INSTALLER_BREAKING_VERSION: The last installer version that had
breaking changes.
Args:
mini_installer_path: The path to a mini_installer.
previous_version_mini_installer_path: The path to a mini_installer
whose version is lower than |mini_installer_path|.
"""
mini_installer_abspath = os.path.abspath(mini_installer_path)
previous_version_mini_installer_abspath = os.path.abspath(
previous_version_mini_installer_path)
windows_major_ver, windows_minor_ver, _, _, _ = win32api.GetVersionEx()
self._variable_mapping = {
'CHROMEDRIVER_PATH':
chromedriver_path,
'QUIET':
'-q' if quiet else '',
'OUTPUT_DIR':
'"--output-dir=%s"' % output_dir if output_dir else '',
'LAST_INSTALLER_BREAKING_VERSION':
'85.0.4169.0',
'LOCAL_APPDATA':
shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, None, 0),
'LOG_FILE':
'',
'MINI_INSTALLER':
mini_installer_abspath,
'MINI_INSTALLER_FILE_VERSION':
_GetFileVersion(mini_installer_abspath),
'MINI_INSTALLER_BITNESS':
_GetFileBitness(mini_installer_abspath),
'PREVIOUS_VERSION_MINI_INSTALLER':
previous_version_mini_installer_abspath,
'PREVIOUS_VERSION_MINI_INSTALLER_FILE_VERSION':
_GetFileVersion(previous_version_mini_installer_abspath),
'PROGRAM_FILES':
shell.SHGetFolderPath(
0, shellcon.CSIDL_PROGRAM_FILES
if _GetFileBitness(mini_installer_abspath) == '64' else
shellcon.CSIDL_PROGRAM_FILESX86, None, 0),
'USER_SPECIFIC_REGISTRY_SUFFIX':
_GetUserSpecificRegistrySuffix(),
'VERSION_SERVER_2003':
'(5, 2)',
'VERSION_VISTA':
'(6, 0)',
'VERSION_WIN10':
'(10, 0)',
'VERSION_WIN7':
'(6, 1)',
'VERSION_WIN8':
'(6, 2)',
'VERSION_WIN8_1':
'(6, 3)',
'VERSION_XP':
'(5, 1)',
'WINDOWS_VERSION':
'(%s, %s)' % (windows_major_ver, windows_minor_ver)
}
mini_installer_product_name = _GetProductName(mini_installer_abspath)
if mini_installer_product_name == 'Google Chrome Installer':
self._variable_mapping.update({
'BRAND':
'Google Chrome',
'BINARIES_UPDATE_REGISTRY_SUBKEY':
('Software\\Google\\Update\\Clients\\'
'{4DC8B4CA-1BDA-483e-B5FA-D3C12E15B62D}'),
'CHROME_DIR':
'Google\\Chrome',
'CHROME_HTML_PROG_ID':
'ChromeHTML',
'CHROME_HTML_PROG_ID_BETA':
'ChromeBHTML',
'CHROME_HTML_PROG_ID_DEV':
'ChromeDHTML',
'CHROME_HTML_PROG_ID_SXS':
'ChromeSSHTM',
'CHROME_LONG_NAME':
'Google Chrome',
'CHROME_SHORT_NAME':
'Chrome',
'CHROME_UPDATE_REGISTRY_SUBKEY':
('Software\\Google\\Update\\Clients\\'
'{8A69D345-D564-463c-AFF1-A69D9E530F96}'),
'CHROME_CLIENT_STATE_KEY_BETA':
('Software\\Google\\Update\\ClientState\\'
'{8237E44A-0054-442C-B6B6-EA0509993955}'),
'CHROME_CLIENT_STATE_KEY_DEV':
('Software\\Google\\Update\\ClientState\\'
'{401C381F-E0DE-4B85-8BD8-3F3F14FBDA57}'),
'CHROME_CLIENT_STATE_KEY_SXS':
('Software\\Google\\Update\\ClientState\\'
'{4ea16ac7-fd5a-47c3-875b-dbf4a2008c20}'),
'CHROME_CLIENT_STATE_KEY':
('Software\\Google\\Update\\ClientState\\'
'{8A69D345-D564-463c-AFF1-A69D9E530F96}'),
'CHROME_TOAST_ACTIVATOR_CLSID':
('{A2C6CB58-C076-425C-ACB7-6D19D64428CD}'),
'CHROME_DIR_BETA':
'Google\\Chrome Beta',
'CHROME_DIR_DEV':
'Google\\Chrome Dev',
'CHROME_DIR_SXS':
'Google\\Chrome SxS',
'CHROME_LONG_NAME_BETA':
'Google Chrome Beta',
'CHROME_LONG_NAME_DEV':
'Google Chrome Dev',
'CHROME_LONG_NAME_SXS':
'Google Chrome SxS',
'CHROME_SHORT_NAME_BETA':
'ChromeBeta',
'CHROME_SHORT_NAME_DEV':
'ChromeDev',
'CHROME_SHORT_NAME_SXS':
'ChromeCanary',
'CHROME_UPDATE_REGISTRY_SUBKEY_BETA':
('Software\\Google\\Update\\Clients\\'
'{8237E44A-0054-442C-B6B6-EA0509993955}'),
'CHROME_UPDATE_REGISTRY_SUBKEY_DEV':
('Software\\Google\\Update\\Clients\\'
'{401C381F-E0DE-4B85-8BD8-3F3F14FBDA57}'),
'CHROME_UPDATE_REGISTRY_SUBKEY_SXS':
('Software\\Google\\Update\\Clients\\'
'{4ea16ac7-fd5a-47c3-875b-dbf4a2008c20}'),
'LAUNCHER_UPDATE_REGISTRY_SUBKEY':
('Software\\Google\\Update\\Clients\\'
'{FDA71E6F-AC4C-4a00-8B70-9958A68906BF}'),
'CHROME_TOAST_ACTIVATOR_CLSID_BETA':
('{B89B137F-96AA-4AE2-98C4-6373EAA1EA4D}'),
'CHROME_TOAST_ACTIVATOR_CLSID_DEV':
('{F01C03EB-D431-4C83-8D7A-902771E732FA}'),
'CHROME_TOAST_ACTIVATOR_CLSID_SXS':
('{FA372A6E-149F-4E95-832D-8F698D40AD7F}'),
'CHROME_ELEVATOR_CLSID':
('{708860E0-F641-4611-8895-7D867DD3675B}'),
'CHROME_ELEVATOR_CLSID_BETA':
('{DD2646BA-3707-4BF8-B9A7-038691A68FC2}'),
'CHROME_ELEVATOR_CLSID_DEV':
('{DA7FDCA5-2CAA-4637-AA17-0740584DE7DA}'),
'CHROME_ELEVATOR_CLSID_SXS':
('{704C2872-2049-435E-A469-0A534313C42B}'),
'CHROME_ELEVATOR_IID':
('{463ABECF-410D-407F-8AF5-0DF35A005CC8}'),
'CHROME_ELEVATOR_IID_BETA':
('{A2721D66-376E-4D2F-9F0F-9070E9A42B5F}'),
'CHROME_ELEVATOR_IID_DEV':
('{BB2AA26B-343A-4072-8B6F-80557B8CE571}'),
'CHROME_ELEVATOR_IID_SXS':
('{4F7CE041-28E9-484F-9DD0-61A8CACEFEE4}'),
'CHROME_ELEVATION_SERVICE_NAME':
('GoogleChromeElevationService'),
'CHROME_ELEVATION_SERVICE_NAME_BETA':
('GoogleChromeBetaElevationService'),
'CHROME_ELEVATION_SERVICE_NAME_DEV':
('GoogleChromeDevElevationService'),
'CHROME_ELEVATION_SERVICE_NAME_SXS':
('GoogleChromeCanaryElevationService'),
'CHROME_ELEVATION_SERVICE_DISPLAY_NAME':
('Google Chrome Elevation Service ' +
'(GoogleChromeElevationService)'),
'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_BETA':
('Google Chrome Beta Elevation Service'
' (GoogleChromeBetaElevationService)'),
'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_DEV':
('Google Chrome Dev Elevation Service'
' (GoogleChromeDevElevationService)'),
'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_SXS':
('Google Chrome Canary Elevation Service'),
})
elif mini_installer_product_name == 'Chromium Installer':
self._variable_mapping.update({
'BRAND':
'Chromium',
'BINARIES_UPDATE_REGISTRY_SUBKEY':
'Software\\Chromium Binaries',
'CHROME_DIR':
'Chromium',
'CHROME_HTML_PROG_ID':
'ChromiumHTM',
'CHROME_LONG_NAME':
'Chromium',
'CHROME_SHORT_NAME':
'Chromium',
'CHROME_UPDATE_REGISTRY_SUBKEY':
'Software\\Chromium',
'CHROME_CLIENT_STATE_KEY':
'Software\\Chromium',
'CHROME_TOAST_ACTIVATOR_CLSID':
('{635EFA6F-08D6-4EC9-BD14-8A0FDE975159}'),
'CHROME_ELEVATOR_CLSID':
('{D133B120-6DB4-4D6B-8BFE-83BF8CA1B1B0}'),
'CHROME_ELEVATOR_IID':
('{B88C45B9-8825-4629-B83E-77CC67D9CEED}'),
'CHROME_ELEVATION_SERVICE_NAME':
'ChromiumElevationService',
'CHROME_ELEVATION_SERVICE_DISPLAY_NAME':
('Chromium Elevation Service (ChromiumElevationService)'),
})
else:
raise KeyError("Unknown mini_installer product name '%s'" %
mini_installer_product_name)
def SetLogFile(self, log_file):
"""Updates the value for the LOG_FILE variable"""
self._variable_mapping['LOG_FILE'] = ('"--log-file=%s"' %
log_file if log_file else '')
def Expand(self, a_string):
"""Expands variables in the given string.
This method resolves only variables defined in the constructor. It does
not resolve environment variables. Any dollar signs that are not part of
variables must be escaped with $$, otherwise a KeyError or a ValueError
will be raised.
Args:
a_string: A string.
Returns:
A new string created by replacing variables with their values.
"""
return string.Template(a_string).substitute(self._variable_mapping)
| ric2b/Vivaldi-browser | chromium/chrome/test/mini_installer/variable_expander.py | Python | bsd-3-clause | 17,079 |
<?php
namespace Drupal\Core\Render\Element;
use Drupal\Core\Form\FormStateInterface;
use Drupal\Core\Render\Element;
/**
* Provides an action button form element.
*
* When the button is pressed, the form will be submitted to Drupal, where it is
* validated and rebuilt. The submit handler is not invoked.
*
* Properties:
* - #limit_validation_errors: An array of form element keys that will block
* form submission when validation for these elements or any child elements
* fails. Specify an empty array to suppress all form validation errors.
* - #value: The text to be shown on the button.
*
*
* Usage Example:
* @code
* $form['actions']['preview'] = array(
* '#type' => 'button',
* '#value' => $this->t('Preview'),
* );
* @endcode
*
* @see \Drupal\Core\Render\Element\Submit
*
* @FormElement("button")
*/
class Button extends FormElement {
/**
* {@inheritdoc}
*/
public function getInfo() {
$class = get_class($this);
return array(
'#input' => TRUE,
'#name' => 'op',
'#is_button' => TRUE,
'#executes_submit_callback' => FALSE,
'#limit_validation_errors' => FALSE,
'#process' => array(
array($class, 'processButton'),
array($class, 'processAjaxForm'),
),
'#pre_render' => array(
array($class, 'preRenderButton'),
),
'#theme_wrappers' => array('input__submit'),
);
}
/**
* Processes a form button element.
*/
public static function processButton(&$element, FormStateInterface $form_state, &$complete_form) {
// If this is a button intentionally allowing incomplete form submission
// (e.g., a "Previous" or "Add another item" button), then also skip
// client-side validation.
if (isset($element['#limit_validation_errors']) && $element['#limit_validation_errors'] !== FALSE) {
$element['#attributes']['formnovalidate'] = 'formnovalidate';
}
return $element;
}
/**
* Prepares a #type 'button' render element for input.html.twig.
*
* @param array $element
* An associative array containing the properties of the element.
* Properties used: #attributes, #button_type, #name, #value. The
* #button_type property accepts any value, though core themes have CSS that
* styles the following button_types appropriately: 'primary', 'danger'.
*
* @return array
* The $element with prepared variables ready for input.html.twig.
*/
public static function preRenderButton($element) {
$element['#attributes']['type'] = 'submit';
Element::setAttributes($element, array('id', 'name', 'value'));
$element['#attributes']['class'][] = 'button';
if (!empty($element['#button_type'])) {
$element['#attributes']['class'][] = 'button--' . $element['#button_type'];
}
$element['#attributes']['class'][] = 'js-form-submit';
$element['#attributes']['class'][] = 'form-submit';
if (!empty($element['#attributes']['disabled'])) {
$element['#attributes']['class'][] = 'is-disabled';
}
return $element;
}
}
| windtrader/drupalvm-d8 | web/core/lib/Drupal/Core/Render/Element/Button.php | PHP | gpl-2.0 | 3,071 |
import { Directive, EventEmitter } from '@angular/core';
import { KmlLayerManager } from './../services/managers/kml-layer-manager';
var layerId = 0;
export var SebmGoogleMapKmlLayer = (function () {
function SebmGoogleMapKmlLayer(_manager) {
this._manager = _manager;
this._addedToManager = false;
this._id = (layerId++).toString();
this._subscriptions = [];
/**
* If true, the layer receives mouse events. Default value is true.
*/
this.clickable = true;
/**
* By default, the input map is centered and zoomed to the bounding box of the contents of the
* layer.
* If this option is set to true, the viewport is left unchanged, unless the map's center and zoom
* were never set.
*/
this.preserveViewport = false;
/**
* Whether to render the screen overlays. Default true.
*/
this.screenOverlays = true;
/**
* Suppress the rendering of info windows when layer features are clicked.
*/
this.suppressInfoWindows = false;
/**
* The URL of the KML document to display.
*/
this.url = null;
/**
* The z-index of the layer.
*/
this.zIndex = null;
/**
* This event is fired when a feature in the layer is clicked.
*/
this.layerClick = new EventEmitter();
/**
* This event is fired when the KML layers default viewport has changed.
*/
this.defaultViewportChange = new EventEmitter();
/**
* This event is fired when the KML layer has finished loading.
* At this point it is safe to read the status property to determine if the layer loaded
* successfully.
*/
this.statusChange = new EventEmitter();
}
SebmGoogleMapKmlLayer.prototype.ngOnInit = function () {
if (this._addedToManager) {
return;
}
this._manager.addKmlLayer(this);
this._addedToManager = true;
this._addEventListeners();
};
SebmGoogleMapKmlLayer.prototype.ngOnChanges = function (changes) {
if (!this._addedToManager) {
return;
}
this._updatePolygonOptions(changes);
};
SebmGoogleMapKmlLayer.prototype._updatePolygonOptions = function (changes) {
var options = Object.keys(changes)
.filter(function (k) { return SebmGoogleMapKmlLayer._kmlLayerOptions.indexOf(k) !== -1; })
.reduce(function (obj, k) {
obj[k] = changes[k].currentValue;
return obj;
}, {});
if (Object.keys(options).length > 0) {
this._manager.setOptions(this, options);
}
};
SebmGoogleMapKmlLayer.prototype._addEventListeners = function () {
var _this = this;
var listeners = [
{ name: 'click', handler: function (ev) { return _this.layerClick.emit(ev); } },
{ name: 'defaultviewport_changed', handler: function () { return _this.defaultViewportChange.emit(); } },
{ name: 'status_changed', handler: function () { return _this.statusChange.emit(); } },
];
listeners.forEach(function (obj) {
var os = _this._manager.createEventObservable(obj.name, _this).subscribe(obj.handler);
_this._subscriptions.push(os);
});
};
/** @internal */
SebmGoogleMapKmlLayer.prototype.id = function () { return this._id; };
/** @internal */
SebmGoogleMapKmlLayer.prototype.toString = function () { return "SebmGoogleMapKmlLayer-" + this._id.toString(); };
/** @internal */
SebmGoogleMapKmlLayer.prototype.ngOnDestroy = function () {
this._manager.deleteKmlLayer(this);
// unsubscribe all registered observable subscriptions
this._subscriptions.forEach(function (s) { return s.unsubscribe(); });
};
SebmGoogleMapKmlLayer._kmlLayerOptions = ['clickable', 'preserveViewport', 'screenOverlays', 'suppressInfoWindows', 'url', 'zIndex'];
SebmGoogleMapKmlLayer.decorators = [
{ type: Directive, args: [{
selector: 'sebm-google-map-kml-layer',
inputs: ['clickable', 'preserveViewport', 'screenOverlays', 'suppressInfoWindows', 'url', 'zIndex'],
outputs: ['layerClick', 'defaultViewportChange', 'statusChange']
},] },
];
/** @nocollapse */
SebmGoogleMapKmlLayer.ctorParameters = function () { return [
{ type: KmlLayerManager, },
]; };
return SebmGoogleMapKmlLayer;
}());
//# sourceMappingURL=google-map-kml-layer.js.map | Oussemalaamiri/guidemeAngular | src/node_modules/angular2-google-maps/esm/core/directives/google-map-kml-layer.js | JavaScript | mit | 4,685 |
module CatsHelper
end
| metaminded/cruddler | test/dummy/app/helpers/cats_helper.rb | Ruby | mit | 22 |
<?php
return [
'Names' => [
'Aran' => 'nasta’liq',
'Armn' => 'arménio',
'Beng' => 'bengalês',
'Egyd' => 'egípcio demótico',
'Egyh' => 'egípcio hierático',
'Ethi' => 'etíope',
'Hanb' => 'han com bopomofo',
'Inds' => 'indus',
'Orya' => 'odia',
'Sylo' => 'siloti nagri',
'Tale' => 'tai le',
'Telu' => 'telugu',
'Zsym' => 'símbolos',
'Zxxx' => 'não escrito',
],
];
| derrabus/symfony | src/Symfony/Component/Intl/Resources/data/scripts/pt_PT.php | PHP | mit | 493 |
"""
tests.components.automation.test_location
±±±~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests location automation.
"""
import unittest
from homeassistant.components import automation, zone
from tests.common import get_test_home_assistant
class TestAutomationZone(unittest.TestCase):
""" Test the event automation. """
def setUp(self): # pylint: disable=invalid-name
self.hass = get_test_home_assistant()
zone.setup(self.hass, {
'zone': {
'name': 'test',
'latitude': 32.880837,
'longitude': -117.237561,
'radius': 250,
}
})
self.calls = []
def record_call(service):
self.calls.append(service)
self.hass.services.register('test', 'automation', record_call)
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_if_fires_on_zone_enter(self):
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.881011,
'longitude': -117.234758
})
self.hass.pool.block_till_done()
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'zone',
'entity_id': 'test.entity',
'zone': 'zone.test',
'event': 'enter',
},
'action': {
'service': 'test.automation',
}
}
}))
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.880586,
'longitude': -117.237564
})
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_not_fires_for_enter_on_zone_leave(self):
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.880586,
'longitude': -117.237564
})
self.hass.pool.block_till_done()
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'zone',
'entity_id': 'test.entity',
'zone': 'zone.test',
'event': 'enter',
},
'action': {
'service': 'test.automation',
}
}
}))
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.881011,
'longitude': -117.234758
})
self.hass.pool.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_fires_on_zone_leave(self):
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.880586,
'longitude': -117.237564
})
self.hass.pool.block_till_done()
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'zone',
'entity_id': 'test.entity',
'zone': 'zone.test',
'event': 'leave',
},
'action': {
'service': 'test.automation',
}
}
}))
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.881011,
'longitude': -117.234758
})
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_not_fires_for_leave_on_zone_enter(self):
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.881011,
'longitude': -117.234758
})
self.hass.pool.block_till_done()
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'zone',
'entity_id': 'test.entity',
'zone': 'zone.test',
'event': 'leave',
},
'action': {
'service': 'test.automation',
}
}
}))
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.880586,
'longitude': -117.237564
})
self.hass.pool.block_till_done()
self.assertEqual(0, len(self.calls))
def test_zone_condition(self):
self.hass.states.set('test.entity', 'hello', {
'latitude': 32.880586,
'longitude': -117.237564
})
self.hass.pool.block_till_done()
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'event',
'event_type': 'test_event'
},
'condition': {
'platform': 'zone',
'entity_id': 'test.entity',
'zone': 'zone.test',
},
'action': {
'service': 'test.automation',
}
}
}))
self.hass.bus.fire('test_event')
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
| toddeye/home-assistant | tests/components/automation/test_zone.py | Python | mit | 5,342 |