code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
var expect = require('expect.js'),
defaultOpts = require('..').prototype.options,
_ = require('lodash'),
parse = require('../lib/parse'),
render = require('../lib/render');
var html = function(str, options) {
options = _.defaults(options || {}, defaultOpts);
var dom = parse(str, options);
return render(dom);
};
var xml = function(str, options) {
options = _.defaults(options || {}, defaultOpts);
options.xmlMode = true;
var dom = parse(str, options);
return render(dom, options);
};
describe('render', function() {
describe('(html)', function() {
it('should render <br /> tags correctly', function() {
var str = '<br />';
expect(html(str)).to.equal('<br>');
});
it('should handle double quotes within single quoted attributes properly', function() {
var str = '<hr class=\'an "edge" case\' />';
expect(html(str)).to.equal('<hr class="an "edge" case">');
});
it('should retain encoded HTML content within attributes', function() {
var str = '<hr class="cheerio & node = happy parsing" />';
expect(html(str)).to.equal('<hr class="cheerio & node = happy parsing">');
});
it('should shorten the "checked" attribute when it contains the value "checked"', function() {
var str = '<input checked/>';
expect(html(str)).to.equal('<input checked>');
});
it('should not shorten the "name" attribute when it contains the value "name"', function() {
var str = '<input name="name"/>';
expect(html(str)).to.equal('<input name="name">');
});
it('should render comments correctly', function() {
var str = '<!-- comment -->';
expect(html(str)).to.equal('<!-- comment -->');
});
it('should render whitespace by default', function() {
var str = '<a href="./haha.html">hi</a> <a href="./blah.html">blah</a>';
expect(html(str)).to.equal(str);
});
it('should normalize whitespace if specified', function() {
var str = '<a href="./haha.html">hi</a> <a href="./blah.html">blah </a>';
expect(html(str, { normalizeWhitespace: true })).to.equal('<a href="./haha.html">hi</a> <a href="./blah.html">blah </a>');
});
it('should preserve multiple hyphens in data attributes', function() {
var str = '<div data-foo-bar-baz="value"></div>';
expect(html(str)).to.equal('<div data-foo-bar-baz="value"></div>');
});
it('should render CDATA correctly', function() {
var str = '<a> <b> <![CDATA[ asdf&asdf ]]> <c/> <![CDATA[ asdf&asdf ]]> </b> </a>';
expect(xml(str)).to.equal(str);
});
});
});
| JHand93/WebPerformanceTestSuite | webpagetest-charts-api/node_modules/cheerio/test/render.js | JavaScript | mit | 2,628 |
"use strict";
var path = require('canonical-path');
var packagePath = __dirname;
var Package = require('dgeni').Package;
// Create and export a new Dgeni package called angularjs. This package depends upon
// the ngdoc, nunjucks, and examples packages defined in the dgeni-packages npm module.
module.exports = new Package('angularjs', [
require('dgeni-packages/ngdoc'),
require('dgeni-packages/nunjucks'),
require('dgeni-packages/examples'),
require('dgeni-packages/git')
])
.factory(require('./services/errorNamespaceMap'))
.factory(require('./services/getMinerrInfo'))
.factory(require('./services/getVersion'))
.factory(require('./services/deployments/debug'))
.factory(require('./services/deployments/default'))
.factory(require('./services/deployments/jquery'))
.factory(require('./services/deployments/production'))
.factory(require('./inline-tag-defs/type'))
.processor(require('./processors/error-docs'))
.processor(require('./processors/index-page'))
.processor(require('./processors/keywords'))
.processor(require('./processors/pages-data'))
.processor(require('./processors/versions-data'))
.config(function(dgeni, log, readFilesProcessor, writeFilesProcessor) {
dgeni.stopOnValidationError = true;
dgeni.stopOnProcessingError = true;
log.level = 'info';
readFilesProcessor.basePath = path.resolve(__dirname,'../..');
readFilesProcessor.sourceFiles = [
{ include: 'src/**/*.js', exclude: 'src/angular.bind.js', basePath: 'src' },
{ include: 'docs/content/**/*.ngdoc', basePath: 'docs/content' }
];
writeFilesProcessor.outputFolder = 'build/docs';
})
.config(function(parseTagsProcessor) {
parseTagsProcessor.tagDefinitions.push(require('./tag-defs/tutorial-step'));
parseTagsProcessor.tagDefinitions.push(require('./tag-defs/sortOrder'));
})
.config(function(inlineTagProcessor, typeInlineTagDef) {
inlineTagProcessor.inlineTagDefinitions.push(typeInlineTagDef);
})
.config(function(templateFinder, renderDocsProcessor, gitData) {
templateFinder.templateFolders.unshift(path.resolve(packagePath, 'templates'));
renderDocsProcessor.extraData.git = gitData;
})
.config(function(computePathsProcessor, computeIdsProcessor) {
computePathsProcessor.pathTemplates.push({
docTypes: ['error'],
pathTemplate: 'error/${namespace}/${name}',
outputPathTemplate: 'partials/error/${namespace}/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['errorNamespace'],
pathTemplate: 'error/${name}',
outputPathTemplate: 'partials/error/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['overview', 'tutorial'],
getPath: function(doc) {
var docPath = path.dirname(doc.fileInfo.relativePath);
if ( doc.fileInfo.baseName !== 'index' ) {
docPath = path.join(docPath, doc.fileInfo.baseName);
}
return docPath;
},
outputPathTemplate: 'partials/${path}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['e2e-test'],
getPath: function() {},
outputPathTemplate: 'ptore2e/${example.id}/${deployment.name}_test.js'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['indexPage'],
pathTemplate: '.',
outputPathTemplate: '${id}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['module' ],
pathTemplate: '${area}/${name}',
outputPathTemplate: 'partials/${area}/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['componentGroup' ],
pathTemplate: '${area}/${moduleName}/${groupType}',
outputPathTemplate: 'partials/${area}/${moduleName}/${groupType}.html'
});
computeIdsProcessor.idTemplates.push({
docTypes: ['overview', 'tutorial', 'e2e-test', 'indexPage'],
getId: function(doc) { return doc.fileInfo.baseName; },
getAliases: function(doc) { return [doc.id]; }
});
computeIdsProcessor.idTemplates.push({
docTypes: ['error'],
getId: function(doc) { return 'error:' + doc.namespace + ':' + doc.name; },
getAliases: function(doc) { return [doc.name, doc.namespace + ':' + doc.name, doc.id]; }
},
{
docTypes: ['errorNamespace'],
getId: function(doc) { return 'error:' + doc.name; },
getAliases: function(doc) { return [doc.id]; }
}
);
})
.config(function(checkAnchorLinksProcessor) {
checkAnchorLinksProcessor.base = '/';
// We are only interested in docs that have an area (i.e. they are pages)
checkAnchorLinksProcessor.checkDoc = function(doc) { return doc.area; };
})
.config(function(
generateIndexPagesProcessor,
generateProtractorTestsProcessor,
generateExamplesProcessor,
debugDeployment, defaultDeployment,
jqueryDeployment, productionDeployment) {
generateIndexPagesProcessor.deployments = [
debugDeployment,
defaultDeployment,
jqueryDeployment,
productionDeployment
];
generateProtractorTestsProcessor.deployments = [
defaultDeployment,
jqueryDeployment
];
generateProtractorTestsProcessor.basePath = 'build/docs/';
generateExamplesProcessor.deployments = [
debugDeployment,
defaultDeployment,
jqueryDeployment,
productionDeployment
];
})
.config(function(generateKeywordsProcessor) {
generateKeywordsProcessor.docTypesToIgnore = ['componentGroup'];
});
| JonFerrera/angular.js | docs/config/index.js | JavaScript | mit | 5,289 |
//---------------------------------------------------------------------
// <copyright file="CustomizeNamingTest.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.OData.Tests.Client.CodeGenerationTests
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Microsoft.OData.Core;
using Microsoft.OData.Edm;
using Microsoft.Spatial;
using Microsoft.Test.OData.Services.TestServices;
using Microsoft.Test.OData.Services.TestServices.ODataWCFServiceReferencePlus;
using Microsoft.Test.OData.Tests.Client.Common;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using ODataClient = Microsoft.OData.Client;
/// <summary>
/// T4 code generation for operations test cases.
/// </summary>
[TestClass]
public class CustomizeNamingTest : ODataWCFServiceTestsBase<Microsoft.Test.OData.Services.TestServices.ODataWCFServiceReferencePlus.InMemoryEntitiesPlus>
{
private const string ServerSideNameSpacePrefix = "Microsoft.Test.OData.Services.ODataWCFService.";
public CustomizeNamingTest()
: base(ServiceDescriptors.ODataWCFServiceDescriptor)
{
}
[TestMethod]
public void BasicQuery()
{
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
// Query a entity set
var products1 = TestClientContext.ProductsPlus.ToList();
Assert.AreEqual(5, products1.Count);
// Query with expand (Linq)
var products2 = TestClientContext.ProductsPlus.Expand(p => p.DetailsPlus).ToList();
Assert.AreEqual(5, products2.Single(p => p.ProductIDPlus == 5).DetailsPlus.Count);
// Query with expand (PropertyName)
var products3 = TestClientContext.ProductsPlus.Expand("Details").ToList();
Assert.AreEqual(5, products3.Single(p => p.ProductIDPlus == 5).DetailsPlus.Count);
// Query a individual primitive property
var product4 = TestClientContext.ProductsPlus.Where(p => p.ProductIDPlus == 5).Single();
Assert.AreEqual("Cheetos", product4.NamePlus);
// Query an Navigation Property
TestClientContext.LoadProperty(product4, "Details");
Assert.AreEqual(5, product4.DetailsPlus.Count);
// Query a Derived entity.
var people5 = TestClientContext.PeoplePlus.Where(p => p.PersonIDPlus == 1).Single();
// Check the property from the derived type.
Assert.AreEqual("Tokyo", people5.HomeAddressPlus.CityPlus);
// Check the derived complex property.
Assert.AreEqual("Cats", ((HomeAddressPlus)(people5.HomeAddressPlus)).FamilyNamePlus);
// Check collection of PrimitiveTypes
Assert.AreEqual(1, people5.EmailsPlus.Count);
// Query with $select & $expand
var accounts6 = TestClientContext.AccountsPlus
.Where(a => a.AccountIDPlus == 103)
.Select(a => new AccountPlus() { AccountIDPlus = a.AccountIDPlus, MyGiftCardPlus = a.MyGiftCardPlus, CountryRegionPlus = a.CountryRegionPlus });
var account6 = accounts6.Single();
Assert.IsNotNull(account6.MyGiftCardPlus);
Assert.AreEqual(103, account6.AccountIDPlus);
Assert.IsNull(account6.AccountInfoPlus);
// Query with $filter by non-key property.
var accounts7 = TestClientContext.AccountsPlus.Where(a => a.CountryRegionPlus == "CN").ToList();
Assert.AreEqual(3, accounts7.Count);
// Query with OrderBy
var people8 = TestClientContext.PeoplePlus.OrderBy((p) => p.LastNamePlus).First();
Assert.AreEqual(5, people8.PersonIDPlus);
// Query with $count
var count = TestClientContext.AccountsPlus.Count();
Assert.AreEqual(count, 7);
// Query with MultiKeys
var productReview10 = TestClientContext.ProductReviewsPlus.Where(pd =>
pd.ProductDetailIDPlus == 2
&& pd.ProductIDPlus == 5
&& pd.ReviewTitlePlus == "Special"
&& pd.RevisionIDPlus == 1).First();
Assert.AreEqual("Andy", productReview10.AuthorPlus);
}
[TestMethod]
public void BasicModify()
{
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
TestClientContext.IgnoreMissingProperties = true;
// AddRelatedObject
AccountPlus newAccount1 = new AccountPlus()
{
AccountIDPlus = 110,
CountryRegionPlus = "CN",
AccountInfoPlus = new AccountInfoPlus()
{
FirstNamePlus = "New",
LastNamePlus = "Boy"
}
};
PaymentInstrumentPlus newPI = new PaymentInstrumentPlus()
{
PaymentInstrumentIDPlus = 110901,
FriendlyNamePlus = "110's first PI",
CreatedDatePlus = new DateTimeOffset(new DateTime(2012, 12, 10))
};
TestClientContext.AddToAccountsPlus(newAccount1);
TestClientContext.AddRelatedObject(newAccount1, "MyPaymentInstruments", newPI);
TestClientContext.SaveChanges();
var r1 = TestClientContext.AccountsPlus.Where(account => account.AccountIDPlus == 110).Single();
Assert.AreEqual("Boy", r1.AccountInfoPlus.LastNamePlus);
var r2 = TestClientContext.CreateQuery<PaymentInstrumentPlus>("Accounts(110)/MyPaymentInstruments")
.Where(pi => pi.PaymentInstrumentIDPlus == 110901).Single();
Assert.AreEqual("110's first PI", r2.FriendlyNamePlus);
//UpdateObject
newAccount1.CountryRegionPlus = "US";
TestClientContext.UpdateObject(newAccount1);
TestClientContext.SaveChanges();
r1 = TestClientContext.AccountsPlus.Where(account => account.AccountIDPlus == 110).Single();
Assert.AreEqual("US", r1.CountryRegionPlus);
//UpdateRelatedObject
var myGiftCard = new GiftCardPlus()
{
GiftCardIDPlus = 11111,
GiftCardNOPlus = "11111",
AmountPlus = 20,
ExperationDatePlus = new DateTimeOffset(2015, 12, 1, 0, 0, 0, new TimeSpan(0))
};
TestClientContext.UpdateRelatedObject(newAccount1, "MyGiftCard", myGiftCard);
TestClientContext.SaveChanges();
r1 = TestClientContext.AccountsPlus.Expand(account => account.MyGiftCardPlus).Where(account => account.AccountIDPlus == 110).Single();
Assert.AreEqual(11111, r1.MyGiftCardPlus.GiftCardIDPlus);
//Add Derived Object
CustomerPlus customerPlus = new CustomerPlus()
{
FirstNamePlus = "Nelson",
MiddleNamePlus = "S.",
LastNamePlus = "Black",
NumbersPlus = new ObservableCollection<string> { "111-111-1111" },
EmailsPlus = new ObservableCollection<string> { "abc@abc.com" },
PersonIDPlus = 10001,
BirthdayPlus = new DateTimeOffset(new DateTime(1957, 4, 3)),
CityPlus = "London",
HomePlus = GeographyPoint.Create(32.1, 23.1),
TimeBetweenLastTwoOrdersPlus = new TimeSpan(1),
HomeAddressPlus = new HomeAddressPlus()
{
CityPlus = "London",
PostalCodePlus = "98052",
StreetPlus = "1 Microsoft Way",
FamilyNamePlus = "Black's Family"
},
};
var ordersPlus = new ODataClient.DataServiceCollection<OrderPlus>(TestClientContext)
{
new OrderPlus()
{
OrderIDPlus = 11111111,
OrderDatePlus = new DateTimeOffset(new DateTime(2011, 5, 29, 14, 21, 12)),
ShelfLifePlus = new TimeSpan(1),
OrderShelfLifesPlus = new ObservableCollection<TimeSpan>(){new TimeSpan(1)}
}
};
TestClientContext.AddToPeoplePlus(customerPlus);
TestClientContext.SaveChanges();
var customer1 = TestClientContext.CustomersPlus.Where(c => c.PersonIDPlus == 10001).Single();
TestClientContext.AddLink(customer1, "Orders", ordersPlus[0]);
TestClientContext.SaveChanges();
TestClientContext.Detach(customerPlus);
TestClientContext.SaveChanges();
var customer = TestClientContext.CustomersPlus.Expand(p => (p as CustomerPlus).OrdersPlus).Where(p => p.PersonIDPlus == 10001).SingleOrDefault();
Assert.AreEqual(((CustomerPlus)customer).CityPlus, "London");
Assert.AreEqual(((HomeAddressPlus)(customer.HomeAddressPlus)).FamilyNamePlus, "Black's Family");
Assert.AreEqual(((CustomerPlus)customer).OrdersPlus.Count, 1);
var order = TestClientContext.OrdersPlus.Where(p => p.OrderIDPlus == 11111111).SingleOrDefault();
Assert.AreEqual(order.OrderShelfLifesPlus.Count, 1);
// DeleteObject
TestClientContext.DeleteObject(newAccount1);
TestClientContext.SaveChanges();
var accounts = TestClientContext.AccountsPlus.ToList();
Assert.IsTrue(!accounts.Any(ac => ac.AccountIDPlus == 110));
// SetLink
var person1 = TestClientContext.PeoplePlus.Where((p) => p.PersonIDPlus == 1).Single();
var person2 = TestClientContext.PeoplePlus.Where((p) => p.PersonIDPlus == 2).Single();
TestClientContext.SetLink(person1, "Parent", person2);
TestClientContext.SaveChanges();
person1 = TestClientContext.PeoplePlus.Expand(d => d.ParentPlus).Where((p) => p.PersonIDPlus == 1).Single();
Assert.IsNotNull(person1.ParentPlus);
Assert.IsNotNull(person1.ParentPlus.PersonIDPlus == 2);
// SetLink : Bug, SetLink to Null will not update the client object.
TestClientContext.SetLink(person1, "Parent", null);
TestClientContext.SaveChanges();
person1.ParentPlus = null;
var person3 = TestClientContext.PeoplePlus.Expand(d => d.ParentPlus).Where((p) => p.PersonIDPlus == 1).Single();
Assert.IsNull(person3.ParentPlus);
//AddLink
var companyPlus = TestClientContext.CompanyPlus.GetValue();
DepartmentPlus department = new DepartmentPlus()
{
DepartmentIDPlus = 100001,
NamePlus = "ID" + 100001,
};
TestClientContext.AddToDepartmentsPlus(department);
TestClientContext.AddLink(companyPlus, "Departments", department);
TestClientContext.SaveChanges();
TestClientContext.LoadProperty(companyPlus, "Departments");
Assert.IsTrue(companyPlus.DepartmentsPlus.Any(d => d.DepartmentIDPlus == department.DepartmentIDPlus));
//Delete Link
TestClientContext.DeleteLink(companyPlus, "Departments", department);
TestClientContext.SaveChanges();
TestClientContext.LoadProperty(companyPlus, "Departments");
Assert.IsTrue(!companyPlus.DepartmentsPlus.Any(d => d.DepartmentIDPlus == department.DepartmentIDPlus));
}
[TestMethod]
public void OpenComplexType()
{
//Update entity with open complex type
AccountPlus account = new AccountPlus()
{
AccountIDPlus = 1000000,
CountryRegionPlus = "CN",
AccountInfoPlus = new AccountInfoPlus()
{
FirstNamePlus = "Peter",
MiddleNamePlus = "White",
LastNamePlus = "Andy",
IsActivePlus = true
}
};
TestClientContext.AddToAccountsPlus(account);
TestClientContext.SaveChanges();
//Check account can be correctly desirialized.
account = TestClientContext.AccountsPlus.Where(a => a.AccountIDPlus == 1000000).Single();
Assert.IsNotNull(account);
Assert.AreEqual(account.AccountInfoPlus.MiddleNamePlus, "White");
Assert.IsTrue(account.AccountInfoPlus.IsActivePlus);
//Update entity with open complex type
var entry = new ODataEntry() { TypeName = ServerSideNameSpacePrefix + "Account" };
entry.Properties = new[]
{
new ODataProperty { Name = "AccountID", Value = 1000000 },
new ODataProperty
{
Name = "AccountInfo",
Value = new ODataComplexValue
{
TypeName = ServerSideNameSpacePrefix + "AccountInfo",
Properties = new[]
{
new ODataProperty
{
Name = "FirstName",
Value = "Peter"
},
new ODataProperty
{
Name = "LastName",
Value = "Andy"
},
//Property that exists in Customer-Defined client code.
new ODataProperty
{
Name = "MiddleName",
Value = "White2"
},
new ODataProperty
{
Name = "IsActive",
Value = false,
},
//Property that doesn't exist in Customer-Defined client code.
new ODataProperty
{
Name = "ShippingAddress",
Value = "#999, ZiXing Road"
}
}
}
}
};
var settings = new ODataMessageWriterSettings();
settings.PayloadBaseUri = ServiceBaseUri;
var accountType = Model.FindDeclaredType(ServerSideNameSpacePrefix + "Account") as IEdmEntityType;
var accountSet = Model.EntityContainer.FindEntitySet("Accounts");
var requestMessage = new HttpWebRequestMessage(new Uri(ServiceBaseUri + "Accounts(1000000)"));
requestMessage.SetHeader("Content-Type", MimeTypes.ApplicationJson);
requestMessage.SetHeader("Accept", MimeTypes.ApplicationJson);
requestMessage.Method = "PATCH";
using (var messageWriter = new ODataMessageWriter(requestMessage, settings))
{
var odataWriter = messageWriter.CreateODataEntryWriter(accountSet, accountType);
odataWriter.WriteStart(entry);
odataWriter.WriteEnd();
}
var responseMessage = requestMessage.GetResponse();
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
//Check account can be correctly desirialized.
account = TestClientContext.AccountsPlus.Where(a => a.AccountIDPlus == 1000000).Single();
Assert.IsNotNull(account);
Assert.AreEqual(account.AccountInfoPlus.MiddleNamePlus, "White2");
Assert.IsTrue(!account.AccountInfoPlus.IsActivePlus);
}
[TestMethod]
public void OpenEntityType()
{
//UpdateOpenTypeSingleton
var entry = new ODataEntry() { TypeName = ServerSideNameSpacePrefix + "PublicCompany" };
entry.Properties = new[]
{
new ODataProperty
{
Name = "FullName",
Value = "MS Ltd."
},
new ODataProperty
{
Name = "PhoneNumber",
Value = "123-45678"
},
new ODataProperty
{
Name = "TotalAssets",
Value = 500000L,
}
};
var settings = new ODataMessageWriterSettings();
settings.PayloadBaseUri = ServiceBaseUri;
settings.AutoComputePayloadMetadataInJson = true;
var companyType = Model.FindDeclaredType(ServerSideNameSpacePrefix + "PublicCompany") as IEdmEntityType;
var companySingleton = Model.EntityContainer.FindSingleton("PublicCompany");
var requestMessage = new HttpWebRequestMessage(new Uri(ServiceBaseUri + "PublicCompany"));
requestMessage.SetHeader("Content-Type", MimeTypes.ApplicationJson);
requestMessage.SetHeader("Accept", MimeTypes.ApplicationJson);
requestMessage.Method = "PATCH";
using (var messageWriter = new ODataMessageWriter(requestMessage, settings))
{
var odataWriter = messageWriter.CreateODataEntryWriter(companySingleton, companyType);
odataWriter.WriteStart(entry);
odataWriter.WriteEnd();
}
var responseMessage = requestMessage.GetResponse();
Assert.AreEqual(204, responseMessage.StatusCode);
//Check account can be correctly desirialized.
var company = TestClientContext.PublicCompanyPlus.GetValue();
Assert.IsNotNull(company);
Assert.AreEqual("MS Ltd.", company.FullNamePlus);
Assert.AreEqual(500000, company.TotalAssetsPlus);
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
company.FullNamePlus = "MS2 Ltd.";
company.TotalAssetsPlus = 1000000;
TestClientContext.UpdateObject(company);
TestClientContext.SaveChanges();
company.FullNamePlus = null;
company.TotalAssetsPlus = 0;
company = TestClientContext.PublicCompanyPlus.GetValue();
Assert.IsNotNull(company);
Assert.AreEqual("MS2 Ltd.", company.FullNamePlus);
Assert.AreEqual(1000000, company.TotalAssetsPlus);
}
[TestMethod]
public void InvokeOperations()
{
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
// Invoke Unbounded Action
var color1 = TestClientContext.GetDefaultColorPlus().GetValue();
Assert.AreEqual(color1, ColorPlus.RedPlus);
// Invoke Bounded Function on single entity
var account = TestClientContext.AccountsPlus.Where(a => a.AccountIDPlus == 101).Single();
var r2 = account.GetDefaultPIPlus().GetValue();
Assert.AreEqual(101901, r2.PaymentInstrumentIDPlus);
// Invoke bounded Function on Navigation Property
var account3 = TestClientContext.AccountsPlus.Expand(c => c.MyGiftCardPlus).Where(a => a.AccountIDPlus == 101).Single();
var result3 = account3.MyGiftCardPlus.GetActualAmountPlus(1).GetValue();
Assert.AreEqual(39.8, result3);
// Invoke bounded Action on single entity set
var product4 = TestClientContext.ProductsPlus.Where(p => p.ProductIDPlus == 7).Single();
var result = product4.AddAccessRightPlus(AccessLevelPlus.WritePlus).GetValue();
Assert.AreEqual(AccessLevelPlus.ReadWritePlus, result);
// Invoke bounded Action on Navigation Property
var account5 = TestClientContext.AccountsPlus.Where(ac => ac.AccountIDPlus == 101).Single();
var result5 = account5.RefreshDefaultPIPlus(DateTimeOffset.Now).GetValue();
Assert.AreEqual(101901, result5.PaymentInstrumentIDPlus);
}
[TestMethod]
public void ContainedEntityQuery()
{
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
// Query a single contained entity
var q1 = TestClientContext.CreateQuery<PaymentInstrumentPlus>("Accounts(103)/MyPaymentInstruments(103902)");
Assert.IsTrue(q1.RequestUri.OriginalString.EndsWith("Accounts(103)/MyPaymentInstruments(103902)", StringComparison.Ordinal));
List<PaymentInstrumentPlus> r1 = q1.ToList();
Assert.AreEqual(1, r1.Count);
Assert.AreEqual(103902, r1[0].PaymentInstrumentIDPlus);
Assert.AreEqual("103 second PI", r1[0].FriendlyNamePlus);
// Query a contained entity set with query option
var q2 = TestClientContext.CreateQuery<PaymentInstrumentPlus>("Accounts(103)/MyPaymentInstruments").Expand(pi => pi.BillingStatementsPlus).Where(pi => pi.PaymentInstrumentIDPlus == 103901);
PaymentInstrumentPlus r2 = q2.Single();
Assert.IsNotNull(r2.BillingStatementsPlus);
// Invoke a bounded Function.
double result = TestClientContext.Execute<double>(new Uri(ServiceBaseUri.AbsoluteUri +
"Accounts(101)/MyGiftCard/Microsoft.Test.OData.Services.ODataWCFService.GetActualAmount(bonusRate=0.2)", UriKind.Absolute), "GET", true).Single();
Assert.AreEqual(23.88, result);
}
[TestMethod]
public void SingltonQuery()
{
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
// Invoke a bounded Function
var company1 = TestClientContext.CompanyPlus.GetValue();
var result1 = company1.GetEmployeesCountPlus().GetValue();
Assert.AreEqual(2, result1);
// Invoke a bounded Action
var company2 = TestClientContext.CompanyPlus.GetValue();
var result2 = company2.IncreaseRevenuePlus(1).GetValue();
Assert.AreEqual(100001, result2);
// Invoke a bounded Action on derived type
TestClientContext.MergeOption = Microsoft.OData.Client.MergeOption.OverwriteChanges;
var publicCompany = TestClientContext.PublicCompanyPlus.GetValue();
var originalRevenue = publicCompany.RevenuePlus;
var revenue = publicCompany.IncreaseRevenuePlus(10).GetValue();
Assert.IsTrue(originalRevenue + 10 == revenue);
publicCompany = TestClientContext.PublicCompanyPlus.GetValue();
Assert.IsTrue(revenue == publicCompany.RevenuePlus);
// Invoke Unbound Action
TestClientContext.ResetBossAddressPlus(
new HomeAddressPlus()
{
CityPlus = "Shanghai",
StreetPlus = "ZiXing Road",
PostalCodePlus = "200100",
FamilyNamePlus = "White's Family"
}).GetValue();
TestClientContext.SaveChanges();
var boss = TestClientContext.BossPlus.GetValue();
Assert.AreEqual(boss.HomeAddressPlus.PostalCodePlus, "200100");
Assert.AreEqual(((HomeAddressPlus)boss.HomeAddressPlus).FamilyNamePlus, "White's Family");
}
}
}
| abkmr/odata.net | test/EndToEndTests/Tests/Client/Build.Desktop/CodeGenerationTests/CustomizeNamingTest.cs | C# | mit | 24,396 |
var debug = require('debug')('keystone:core:openDatabaseConnection');
module.exports = function openDatabaseConnection (callback) {
var keystone = this;
var mongoConnectionOpen = false;
// support replica sets for mongoose
if (keystone.get('mongo replica set')) {
if (keystone.get('logger')) {
console.log('\nWarning: using the `mongo replica set` option has been deprecated and will be removed in'
+ ' a future version.\nInstead set the `mongo` connection string with your host details, e.g.'
+ ' mongodb://username:password@host:port,host:port,host:port/database and set any replica set options'
+ ' in `mongo options`.\n\nRefer to https://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html'
+ ' for more details on the connection settings.');
}
debug('setting up mongo replica set');
var replicaData = keystone.get('mongo replica set');
var replica = '';
var credentials = (replicaData.username && replicaData.password) ? replicaData.username + ':' + replicaData.password + '@' : '';
replicaData.db.servers.forEach(function (server) {
replica += 'mongodb://' + credentials + server.host + ':' + server.port + '/' + replicaData.db.name + ',';
});
var options = {
auth: { authSource: replicaData.authSource },
replset: {
rs_name: replicaData.db.replicaSetOptions.rs_name,
readPreference: replicaData.db.replicaSetOptions.readPreference,
},
};
debug('connecting to replicate set');
keystone.mongoose.connect(replica, options);
} else {
debug('connecting to mongo');
keystone.mongoose.connect(keystone.get('mongo'), keystone.get('mongo options'));
}
keystone.mongoose.connection.on('error', function (err) {
if (keystone.get('logger')) {
console.log('------------------------------------------------');
console.log('Mongo Error:\n');
console.log(err);
}
if (mongoConnectionOpen) {
if (err.name === 'ValidationError') return;
throw err;
} else {
throw new Error('KeystoneJS (' + keystone.get('name') + ') failed to start - Check that you are running `mongod` in a separate process.');
}
}).on('open', function () {
debug('mongo connection open');
mongoConnectionOpen = true;
var connected = function () {
if (keystone.get('auto update')) {
debug('applying auto update');
keystone.applyUpdates(callback);
} else {
callback();
}
};
if (keystone.sessionStorePromise) {
keystone.sessionStorePromise.then(connected);
} else {
connected();
}
});
return this;
};
| andreufirefly/keystone | lib/core/openDatabaseConnection.js | JavaScript | mit | 2,539 |
#ifndef NUMEXPR_OBJECT_HPP
#define NUMEXPR_OBJECT_HPP
/*********************************************************************
Numexpr - Fast numerical array expression evaluator for NumPy.
License: MIT
Author: See AUTHORS.txt
See LICENSE.txt for details about copyright and rights to use.
**********************************************************************/
struct NumExprObject
{
PyObject_HEAD
PyObject *signature; /* a python string */
PyObject *tempsig;
PyObject *constsig;
PyObject *fullsig;
PyObject *program; /* a python string */
PyObject *constants; /* a tuple of int/float/complex */
PyObject *input_names; /* tuple of strings */
char **mem; /* pointers to registers */
char *rawmem; /* a chunks of raw memory for storing registers */
npy_intp *memsteps;
npy_intp *memsizes;
int rawmemsize;
int n_inputs;
int n_constants;
int n_temps;
};
extern PyTypeObject NumExprType;
#endif // NUMEXPR_OBJECT_HPP
| Alwnikrotikz/numexpr | numexpr/numexpr_object.hpp | C++ | mit | 1,069 |
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
var Zepto = (function() {
var undefined, key, $, classList, emptyArray = [], concat = emptyArray.concat, filter = emptyArray.filter, slice = emptyArray.slice,
document = window.document,
elementDisplay = {}, classCache = {},
cssNumber = { 'column-count': 1, 'columns': 1, 'font-weight': 1, 'line-height': 1,'opacity': 1, 'z-index': 1, 'zoom': 1 },
fragmentRE = /^\s*<(\w+|!)[^>]*>/,
singleTagRE = /^<(\w+)\s*\/?>(?:<\/\1>|)$/,
tagExpanderRE = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,
rootNodeRE = /^(?:body|html)$/i,
capitalRE = /([A-Z])/g,
// special attributes that should be get/set via method calls
methodAttributes = ['val', 'css', 'html', 'text', 'data', 'width', 'height', 'offset'],
adjacencyOperators = [ 'after', 'prepend', 'before', 'append' ],
table = document.createElement('table'),
tableRow = document.createElement('tr'),
containers = {
'tr': document.createElement('tbody'),
'tbody': table, 'thead': table, 'tfoot': table,
'td': tableRow, 'th': tableRow,
'*': document.createElement('div')
},
readyRE = /complete|loaded|interactive/,
simpleSelectorRE = /^[\w-]*$/,
class2type = {},
toString = class2type.toString,
zepto = {},
camelize, uniq,
tempParent = document.createElement('div'),
propMap = {
'tabindex': 'tabIndex',
'readonly': 'readOnly',
'for': 'htmlFor',
'class': 'className',
'maxlength': 'maxLength',
'cellspacing': 'cellSpacing',
'cellpadding': 'cellPadding',
'rowspan': 'rowSpan',
'colspan': 'colSpan',
'usemap': 'useMap',
'frameborder': 'frameBorder',
'contenteditable': 'contentEditable'
},
isArray = Array.isArray ||
function(object){ return object instanceof Array }
zepto.matches = function(element, selector) {
if (!selector || !element || element.nodeType !== 1) return false
var matchesSelector = element.webkitMatchesSelector || element.mozMatchesSelector ||
element.oMatchesSelector || element.matchesSelector
if (matchesSelector) return matchesSelector.call(element, selector)
// fall back to performing a selector:
var match, parent = element.parentNode, temp = !parent
if (temp) (parent = tempParent).appendChild(element)
match = ~zepto.qsa(parent, selector).indexOf(element)
temp && tempParent.removeChild(element)
return match
}
function type(obj) {
return obj == null ? String(obj) :
class2type[toString.call(obj)] || "object"
}
function isFunction(value) { return type(value) == "function" }
function isWindow(obj) { return obj != null && obj == obj.window }
function isDocument(obj) { return obj != null && obj.nodeType == obj.DOCUMENT_NODE }
function isObject(obj) { return type(obj) == "object" }
function isPlainObject(obj) {
return isObject(obj) && !isWindow(obj) && Object.getPrototypeOf(obj) == Object.prototype
}
function likeArray(obj) { return typeof obj.length == 'number' }
function compact(array) { return filter.call(array, function(item){ return item != null }) }
function flatten(array) { return array.length > 0 ? $.fn.concat.apply([], array) : array }
camelize = function(str){ return str.replace(/-+(.)?/g, function(match, chr){ return chr ? chr.toUpperCase() : '' }) }
function dasherize(str) {
return str.replace(/::/g, '/')
.replace(/([A-Z]+)([A-Z][a-z])/g, '$1_$2')
.replace(/([a-z\d])([A-Z])/g, '$1_$2')
.replace(/_/g, '-')
.toLowerCase()
}
uniq = function(array){ return filter.call(array, function(item, idx){ return array.indexOf(item) == idx }) }
function classRE(name) {
return name in classCache ?
classCache[name] : (classCache[name] = new RegExp('(^|\\s)' + name + '(\\s|$)'))
}
function maybeAddPx(name, value) {
return (typeof value == "number" && !cssNumber[dasherize(name)]) ? value + "px" : value
}
function defaultDisplay(nodeName) {
var element, display
if (!elementDisplay[nodeName]) {
element = document.createElement(nodeName)
document.body.appendChild(element)
display = getComputedStyle(element, '').getPropertyValue("display")
element.parentNode.removeChild(element)
display == "none" && (display = "block")
elementDisplay[nodeName] = display
}
return elementDisplay[nodeName]
}
function children(element) {
return 'children' in element ?
slice.call(element.children) :
$.map(element.childNodes, function(node){ if (node.nodeType == 1) return node })
}
function Z(dom, selector) {
var i, len = dom ? dom.length : 0
for (i = 0; i < len; i++) this[i] = dom[i]
this.length = len
this.selector = selector || ''
}
// `$.zepto.fragment` takes a html string and an optional tag name
// to generate DOM nodes nodes from the given html string.
// The generated DOM nodes are returned as an array.
// This function can be overriden in plugins for example to make
// it compatible with browsers that don't support the DOM fully.
zepto.fragment = function(html, name, properties) {
var dom, nodes, container
// A special case optimization for a single tag
if (singleTagRE.test(html)) dom = $(document.createElement(RegExp.$1))
if (!dom) {
if (html.replace) html = html.replace(tagExpanderRE, "<$1></$2>")
if (name === undefined) name = fragmentRE.test(html) && RegExp.$1
if (!(name in containers)) name = '*'
container = containers[name]
container.innerHTML = '' + html
dom = $.each(slice.call(container.childNodes), function(){
container.removeChild(this)
})
}
if (isPlainObject(properties)) {
nodes = $(dom)
$.each(properties, function(key, value) {
if (methodAttributes.indexOf(key) > -1) nodes[key](value)
else nodes.attr(key, value)
})
}
return dom
}
// `$.zepto.Z` swaps out the prototype of the given `dom` array
// of nodes with `$.fn` and thus supplying all the Zepto functions
// to the array. This method can be overriden in plugins.
zepto.Z = function(dom, selector) {
return new Z(dom, selector)
}
// `$.zepto.isZ` should return `true` if the given object is a Zepto
// collection. This method can be overriden in plugins.
zepto.isZ = function(object) {
return object instanceof zepto.Z
}
// `$.zepto.init` is Zepto's counterpart to jQuery's `$.fn.init` and
// takes a CSS selector and an optional context (and handles various
// special cases).
// This method can be overriden in plugins.
zepto.init = function(selector, context) {
var dom
// If nothing given, return an empty Zepto collection
if (!selector) return zepto.Z()
// Optimize for string selectors
else if (typeof selector == 'string') {
selector = selector.trim()
// If it's a html fragment, create nodes from it
// Note: In both Chrome 21 and Firefox 15, DOM error 12
// is thrown if the fragment doesn't begin with <
if (selector[0] == '<' && fragmentRE.test(selector))
dom = zepto.fragment(selector, RegExp.$1, context), selector = null
// If there's a context, create a collection on that context first, and select
// nodes from there
else if (context !== undefined) return $(context).find(selector)
// If it's a CSS selector, use it to select nodes.
else dom = zepto.qsa(document, selector)
}
// If a function is given, call it when the DOM is ready
else if (isFunction(selector)) return $(document).ready(selector)
// If a Zepto collection is given, just return it
else if (zepto.isZ(selector)) return selector
else {
// normalize array if an array of nodes is given
if (isArray(selector)) dom = compact(selector)
// Wrap DOM nodes.
else if (isObject(selector))
dom = [selector], selector = null
// If it's a html fragment, create nodes from it
else if (fragmentRE.test(selector))
dom = zepto.fragment(selector.trim(), RegExp.$1, context), selector = null
// If there's a context, create a collection on that context first, and select
// nodes from there
else if (context !== undefined) return $(context).find(selector)
// And last but no least, if it's a CSS selector, use it to select nodes.
else dom = zepto.qsa(document, selector)
}
// create a new Zepto collection from the nodes found
return zepto.Z(dom, selector)
}
// `$` will be the base `Zepto` object. When calling this
// function just call `$.zepto.init, which makes the implementation
// details of selecting nodes and creating Zepto collections
// patchable in plugins.
$ = function(selector, context){
return zepto.init(selector, context)
}
function extend(target, source, deep) {
for (key in source)
if (deep && (isPlainObject(source[key]) || isArray(source[key]))) {
if (isPlainObject(source[key]) && !isPlainObject(target[key]))
target[key] = {}
if (isArray(source[key]) && !isArray(target[key]))
target[key] = []
extend(target[key], source[key], deep)
}
else if (source[key] !== undefined) target[key] = source[key]
}
// Copy all but undefined properties from one or more
// objects to the `target` object.
$.extend = function(target){
var deep, args = slice.call(arguments, 1)
if (typeof target == 'boolean') {
deep = target
target = args.shift()
}
args.forEach(function(arg){ extend(target, arg, deep) })
return target
}
// `$.zepto.qsa` is Zepto's CSS selector implementation which
// uses `document.querySelectorAll` and optimizes for some special cases, like `#id`.
// This method can be overriden in plugins.
zepto.qsa = function(element, selector){
var found,
maybeID = selector[0] == '#',
maybeClass = !maybeID && selector[0] == '.',
nameOnly = maybeID || maybeClass ? selector.slice(1) : selector, // Ensure that a 1 char tag name still gets checked
isSimple = simpleSelectorRE.test(nameOnly)
return (element.getElementById && isSimple && maybeID) ? // Safari DocumentFragment doesn't have getElementById
( (found = element.getElementById(nameOnly)) ? [found] : [] ) :
(element.nodeType !== 1 && element.nodeType !== 9 && element.nodeType !== 11) ? [] :
slice.call(
isSimple && !maybeID && element.getElementsByClassName ? // DocumentFragment doesn't have getElementsByClassName/TagName
maybeClass ? element.getElementsByClassName(nameOnly) : // If it's simple, it could be a class
element.getElementsByTagName(selector) : // Or a tag
element.querySelectorAll(selector) // Or it's not simple, and we need to query all
)
}
function filtered(nodes, selector) {
return selector == null ? $(nodes) : $(nodes).filter(selector)
}
$.contains = document.documentElement.contains ?
function(parent, node) {
return parent !== node && parent.contains(node)
} :
function(parent, node) {
while (node && (node = node.parentNode))
if (node === parent) return true
return false
}
function funcArg(context, arg, idx, payload) {
return isFunction(arg) ? arg.call(context, idx, payload) : arg
}
function setAttribute(node, name, value) {
value == null ? node.removeAttribute(name) : node.setAttribute(name, value)
}
// access className property while respecting SVGAnimatedString
function className(node, value){
var klass = node.className || '',
svg = klass && klass.baseVal !== undefined
if (value === undefined) return svg ? klass.baseVal : klass
svg ? (klass.baseVal = value) : (node.className = value)
}
// "true" => true
// "false" => false
// "null" => null
// "42" => 42
// "42.5" => 42.5
// "08" => "08"
// JSON => parse if valid
// String => self
function deserializeValue(value) {
try {
return value ?
value == "true" ||
( value == "false" ? false :
value == "null" ? null :
+value + "" == value ? +value :
/^[\[\{]/.test(value) ? $.parseJSON(value) :
value )
: value
} catch(e) {
return value
}
}
$.type = type
$.isFunction = isFunction
$.isWindow = isWindow
$.isArray = isArray
$.isPlainObject = isPlainObject
$.isEmptyObject = function(obj) {
var name
for (name in obj) return false
return true
}
$.inArray = function(elem, array, i){
return emptyArray.indexOf.call(array, elem, i)
}
$.camelCase = camelize
$.trim = function(str) {
return str == null ? "" : String.prototype.trim.call(str)
}
// plugin compatibility
$.uuid = 0
$.support = { }
$.expr = { }
$.noop = function() {}
$.map = function(elements, callback){
var value, values = [], i, key
if (likeArray(elements))
for (i = 0; i < elements.length; i++) {
value = callback(elements[i], i)
if (value != null) values.push(value)
}
else
for (key in elements) {
value = callback(elements[key], key)
if (value != null) values.push(value)
}
return flatten(values)
}
$.each = function(elements, callback){
var i, key
if (likeArray(elements)) {
for (i = 0; i < elements.length; i++)
if (callback.call(elements[i], i, elements[i]) === false) return elements
} else {
for (key in elements)
if (callback.call(elements[key], key, elements[key]) === false) return elements
}
return elements
}
$.grep = function(elements, callback){
return filter.call(elements, callback)
}
if (window.JSON) $.parseJSON = JSON.parse
// Populate the class2type map
$.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) {
class2type[ "[object " + name + "]" ] = name.toLowerCase()
})
// Define methods that will be available on all
// Zepto collections
$.fn = {
constructor: zepto.Z,
length: 0,
// Because a collection acts like an array
// copy over these useful array functions.
forEach: emptyArray.forEach,
reduce: emptyArray.reduce,
push: emptyArray.push,
sort: emptyArray.sort,
splice: emptyArray.splice,
indexOf: emptyArray.indexOf,
concat: function(){
var i, value, args = []
for (i = 0; i < arguments.length; i++) {
value = arguments[i]
args[i] = zepto.isZ(value) ? value.toArray() : value
}
return concat.apply(zepto.isZ(this) ? this.toArray() : this, args)
},
// `map` and `slice` in the jQuery API work differently
// from their array counterparts
map: function(fn){
return $($.map(this, function(el, i){ return fn.call(el, i, el) }))
},
slice: function(){
return $(slice.apply(this, arguments))
},
ready: function(callback){
// need to check if document.body exists for IE as that browser reports
// document ready when it hasn't yet created the body element
if (readyRE.test(document.readyState) && document.body) callback($)
else document.addEventListener('DOMContentLoaded', function(){ callback($) }, false)
return this
},
get: function(idx){
return idx === undefined ? slice.call(this) : this[idx >= 0 ? idx : idx + this.length]
},
toArray: function(){ return this.get() },
size: function(){
return this.length
},
remove: function(){
return this.each(function(){
if (this.parentNode != null)
this.parentNode.removeChild(this)
})
},
each: function(callback){
emptyArray.every.call(this, function(el, idx){
return callback.call(el, idx, el) !== false
})
return this
},
filter: function(selector){
if (isFunction(selector)) return this.not(this.not(selector))
return $(filter.call(this, function(element){
return zepto.matches(element, selector)
}))
},
add: function(selector,context){
return $(uniq(this.concat($(selector,context))))
},
is: function(selector){
return this.length > 0 && zepto.matches(this[0], selector)
},
not: function(selector){
var nodes=[]
if (isFunction(selector) && selector.call !== undefined)
this.each(function(idx){
if (!selector.call(this,idx)) nodes.push(this)
})
else {
var excludes = typeof selector == 'string' ? this.filter(selector) :
(likeArray(selector) && isFunction(selector.item)) ? slice.call(selector) : $(selector)
this.forEach(function(el){
if (excludes.indexOf(el) < 0) nodes.push(el)
})
}
return $(nodes)
},
has: function(selector){
return this.filter(function(){
return isObject(selector) ?
$.contains(this, selector) :
$(this).find(selector).size()
})
},
eq: function(idx){
return idx === -1 ? this.slice(idx) : this.slice(idx, + idx + 1)
},
first: function(){
var el = this[0]
return el && !isObject(el) ? el : $(el)
},
last: function(){
var el = this[this.length - 1]
return el && !isObject(el) ? el : $(el)
},
find: function(selector){
var result, $this = this
if (!selector) result = $()
else if (typeof selector == 'object')
result = $(selector).filter(function(){
var node = this
return emptyArray.some.call($this, function(parent){
return $.contains(parent, node)
})
})
else if (this.length == 1) result = $(zepto.qsa(this[0], selector))
else result = this.map(function(){ return zepto.qsa(this, selector) })
return result
},
closest: function(selector, context){
var node = this[0], collection = false
if (typeof selector == 'object') collection = $(selector)
while (node && !(collection ? collection.indexOf(node) >= 0 : zepto.matches(node, selector)))
node = node !== context && !isDocument(node) && node.parentNode
return $(node)
},
parents: function(selector){
var ancestors = [], nodes = this
while (nodes.length > 0)
nodes = $.map(nodes, function(node){
if ((node = node.parentNode) && !isDocument(node) && ancestors.indexOf(node) < 0) {
ancestors.push(node)
return node
}
})
return filtered(ancestors, selector)
},
parent: function(selector){
return filtered(uniq(this.pluck('parentNode')), selector)
},
children: function(selector){
return filtered(this.map(function(){ return children(this) }), selector)
},
contents: function() {
return this.map(function() { return this.contentDocument || slice.call(this.childNodes) })
},
siblings: function(selector){
return filtered(this.map(function(i, el){
return filter.call(children(el.parentNode), function(child){ return child!==el })
}), selector)
},
empty: function(){
return this.each(function(){ this.innerHTML = '' })
},
// `pluck` is borrowed from Prototype.js
pluck: function(property){
return $.map(this, function(el){ return el[property] })
},
show: function(){
return this.each(function(){
this.style.display == "none" && (this.style.display = '')
if (getComputedStyle(this, '').getPropertyValue("display") == "none")
this.style.display = defaultDisplay(this.nodeName)
})
},
replaceWith: function(newContent){
return this.before(newContent).remove()
},
wrap: function(structure){
var func = isFunction(structure)
if (this[0] && !func)
var dom = $(structure).get(0),
clone = dom.parentNode || this.length > 1
return this.each(function(index){
$(this).wrapAll(
func ? structure.call(this, index) :
clone ? dom.cloneNode(true) : dom
)
})
},
wrapAll: function(structure){
if (this[0]) {
$(this[0]).before(structure = $(structure))
var children
// drill down to the inmost element
while ((children = structure.children()).length) structure = children.first()
$(structure).append(this)
}
return this
},
wrapInner: function(structure){
var func = isFunction(structure)
return this.each(function(index){
var self = $(this), contents = self.contents(),
dom = func ? structure.call(this, index) : structure
contents.length ? contents.wrapAll(dom) : self.append(dom)
})
},
unwrap: function(){
this.parent().each(function(){
$(this).replaceWith($(this).children())
})
return this
},
clone: function(){
return this.map(function(){ return this.cloneNode(true) })
},
hide: function(){
return this.css("display", "none")
},
toggle: function(setting){
return this.each(function(){
var el = $(this)
;(setting === undefined ? el.css("display") == "none" : setting) ? el.show() : el.hide()
})
},
prev: function(selector){ return $(this.pluck('previousElementSibling')).filter(selector || '*') },
next: function(selector){ return $(this.pluck('nextElementSibling')).filter(selector || '*') },
html: function(html){
return 0 in arguments ?
this.each(function(idx){
var originHtml = this.innerHTML
$(this).empty().append( funcArg(this, html, idx, originHtml) )
}) :
(0 in this ? this[0].innerHTML : null)
},
text: function(text){
return 0 in arguments ?
this.each(function(idx){
var newText = funcArg(this, text, idx, this.textContent)
this.textContent = newText == null ? '' : ''+newText
}) :
(0 in this ? this[0].textContent : null)
},
attr: function(name, value){
var result
return (typeof name == 'string' && !(1 in arguments)) ?
(!this.length || this[0].nodeType !== 1 ? undefined :
(!(result = this[0].getAttribute(name)) && name in this[0]) ? this[0][name] : result
) :
this.each(function(idx){
if (this.nodeType !== 1) return
if (isObject(name)) for (key in name) setAttribute(this, key, name[key])
else setAttribute(this, name, funcArg(this, value, idx, this.getAttribute(name)))
})
},
removeAttr: function(name){
return this.each(function(){ this.nodeType === 1 && name.split(' ').forEach(function(attribute){
setAttribute(this, attribute)
}, this)})
},
prop: function(name, value){
name = propMap[name] || name
return (1 in arguments) ?
this.each(function(idx){
this[name] = funcArg(this, value, idx, this[name])
}) :
(this[0] && this[0][name])
},
data: function(name, value){
var attrName = 'data-' + name.replace(capitalRE, '-$1').toLowerCase()
var data = (1 in arguments) ?
this.attr(attrName, value) :
this.attr(attrName)
return data !== null ? deserializeValue(data) : undefined
},
val: function(value){
return 0 in arguments ?
this.each(function(idx){
this.value = funcArg(this, value, idx, this.value)
}) :
(this[0] && (this[0].multiple ?
$(this[0]).find('option').filter(function(){ return this.selected }).pluck('value') :
this[0].value)
)
},
offset: function(coordinates){
if (coordinates) return this.each(function(index){
var $this = $(this),
coords = funcArg(this, coordinates, index, $this.offset()),
parentOffset = $this.offsetParent().offset(),
props = {
top: coords.top - parentOffset.top,
left: coords.left - parentOffset.left
}
if ($this.css('position') == 'static') props['position'] = 'relative'
$this.css(props)
})
if (!this.length) return null
if (!$.contains(document.documentElement, this[0]))
return {top: 0, left: 0}
var obj = this[0].getBoundingClientRect()
return {
left: obj.left + window.pageXOffset,
top: obj.top + window.pageYOffset,
width: Math.round(obj.width),
height: Math.round(obj.height)
}
},
css: function(property, value){
if (arguments.length < 2) {
var computedStyle, element = this[0]
if(!element) return
computedStyle = getComputedStyle(element, '')
if (typeof property == 'string')
return element.style[camelize(property)] || computedStyle.getPropertyValue(property)
else if (isArray(property)) {
var props = {}
$.each(property, function(_, prop){
props[prop] = (element.style[camelize(prop)] || computedStyle.getPropertyValue(prop))
})
return props
}
}
var css = ''
if (type(property) == 'string') {
if (!value && value !== 0)
this.each(function(){ this.style.removeProperty(dasherize(property)) })
else
css = dasherize(property) + ":" + maybeAddPx(property, value)
} else {
for (key in property)
if (!property[key] && property[key] !== 0)
this.each(function(){ this.style.removeProperty(dasherize(key)) })
else
css += dasherize(key) + ':' + maybeAddPx(key, property[key]) + ';'
}
return this.each(function(){ this.style.cssText += ';' + css })
},
index: function(element){
return element ? this.indexOf($(element)[0]) : this.parent().children().indexOf(this[0])
},
hasClass: function(name){
if (!name) return false
return emptyArray.some.call(this, function(el){
return this.test(className(el))
}, classRE(name))
},
addClass: function(name){
if (!name) return this
return this.each(function(idx){
if (!('className' in this)) return
classList = []
var cls = className(this), newName = funcArg(this, name, idx, cls)
newName.split(/\s+/g).forEach(function(klass){
if (!$(this).hasClass(klass)) classList.push(klass)
}, this)
classList.length && className(this, cls + (cls ? " " : "") + classList.join(" "))
})
},
removeClass: function(name){
return this.each(function(idx){
if (!('className' in this)) return
if (name === undefined) return className(this, '')
classList = className(this)
funcArg(this, name, idx, classList).split(/\s+/g).forEach(function(klass){
classList = classList.replace(classRE(klass), " ")
})
className(this, classList.trim())
})
},
toggleClass: function(name, when){
if (!name) return this
return this.each(function(idx){
var $this = $(this), names = funcArg(this, name, idx, className(this))
names.split(/\s+/g).forEach(function(klass){
(when === undefined ? !$this.hasClass(klass) : when) ?
$this.addClass(klass) : $this.removeClass(klass)
})
})
},
scrollTop: function(value){
if (!this.length) return
var hasScrollTop = 'scrollTop' in this[0]
if (value === undefined) return hasScrollTop ? this[0].scrollTop : this[0].pageYOffset
return this.each(hasScrollTop ?
function(){ this.scrollTop = value } :
function(){ this.scrollTo(this.scrollX, value) })
},
scrollLeft: function(value){
if (!this.length) return
var hasScrollLeft = 'scrollLeft' in this[0]
if (value === undefined) return hasScrollLeft ? this[0].scrollLeft : this[0].pageXOffset
return this.each(hasScrollLeft ?
function(){ this.scrollLeft = value } :
function(){ this.scrollTo(value, this.scrollY) })
},
position: function() {
if (!this.length) return
var elem = this[0],
// Get *real* offsetParent
offsetParent = this.offsetParent(),
// Get correct offsets
offset = this.offset(),
parentOffset = rootNodeRE.test(offsetParent[0].nodeName) ? { top: 0, left: 0 } : offsetParent.offset()
// Subtract element margins
// note: when an element has margin: auto the offsetLeft and marginLeft
// are the same in Safari causing offset.left to incorrectly be 0
offset.top -= parseFloat( $(elem).css('margin-top') ) || 0
offset.left -= parseFloat( $(elem).css('margin-left') ) || 0
// Add offsetParent borders
parentOffset.top += parseFloat( $(offsetParent[0]).css('border-top-width') ) || 0
parentOffset.left += parseFloat( $(offsetParent[0]).css('border-left-width') ) || 0
// Subtract the two offsets
return {
top: offset.top - parentOffset.top,
left: offset.left - parentOffset.left
}
},
offsetParent: function() {
return this.map(function(){
var parent = this.offsetParent || document.body
while (parent && !rootNodeRE.test(parent.nodeName) && $(parent).css("position") == "static")
parent = parent.offsetParent
return parent
})
}
}
// for now
$.fn.detach = $.fn.remove
// Generate the `width` and `height` functions
;['width', 'height'].forEach(function(dimension){
var dimensionProperty =
dimension.replace(/./, function(m){ return m[0].toUpperCase() })
$.fn[dimension] = function(value){
var offset, el = this[0]
if (value === undefined) return isWindow(el) ? el['inner' + dimensionProperty] :
isDocument(el) ? el.documentElement['scroll' + dimensionProperty] :
(offset = this.offset()) && offset[dimension]
else return this.each(function(idx){
el = $(this)
el.css(dimension, funcArg(this, value, idx, el[dimension]()))
})
}
})
function traverseNode(node, fun) {
fun(node)
for (var i = 0, len = node.childNodes.length; i < len; i++)
traverseNode(node.childNodes[i], fun)
}
// Generate the `after`, `prepend`, `before`, `append`,
// `insertAfter`, `insertBefore`, `appendTo`, and `prependTo` methods.
adjacencyOperators.forEach(function(operator, operatorIndex) {
var inside = operatorIndex % 2 //=> prepend, append
$.fn[operator] = function(){
// arguments can be nodes, arrays of nodes, Zepto objects and HTML strings
var argType, nodes = $.map(arguments, function(arg) {
argType = type(arg)
return argType == "object" || argType == "array" || arg == null ?
arg : zepto.fragment(arg)
}),
parent, copyByClone = this.length > 1
if (nodes.length < 1) return this
return this.each(function(_, target){
parent = inside ? target : target.parentNode
// convert all methods to a "before" operation
target = operatorIndex == 0 ? target.nextSibling :
operatorIndex == 1 ? target.firstChild :
operatorIndex == 2 ? target :
null
var parentInDocument = $.contains(document.documentElement, parent)
nodes.forEach(function(node){
if (copyByClone) node = node.cloneNode(true)
else if (!parent) return $(node).remove()
parent.insertBefore(node, target)
if (parentInDocument) traverseNode(node, function(el){
if (el.nodeName != null && el.nodeName.toUpperCase() === 'SCRIPT' &&
(!el.type || el.type === 'text/javascript') && !el.src)
window['eval'].call(window, el.innerHTML)
})
})
})
}
// after => insertAfter
// prepend => prependTo
// before => insertBefore
// append => appendTo
$.fn[inside ? operator+'To' : 'insert'+(operatorIndex ? 'Before' : 'After')] = function(html){
$(html)[operator](this)
return this
}
})
zepto.Z.prototype = Z.prototype = $.fn
// Export internal API functions in the `$.zepto` namespace
zepto.uniq = uniq
zepto.deserializeValue = deserializeValue
$.zepto = zepto
return $
})()
// If `$` is not yet defined, point it to `Zepto`
window.Zepto = Zepto
window.$ === undefined && (window.$ = Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var jsonpID = 0,
document = window.document,
key,
name,
rscript = /<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,
scriptTypeRE = /^(?:text|application)\/javascript/i,
xmlTypeRE = /^(?:text|application)\/xml/i,
jsonType = 'application/json',
htmlType = 'text/html',
blankRE = /^\s*$/,
originAnchor = document.createElement('a')
originAnchor.href = window.location.href
// trigger a custom event and return false if it was cancelled
function triggerAndReturn(context, eventName, data) {
var event = $.Event(eventName)
$(context).trigger(event, data)
return !event.isDefaultPrevented()
}
// trigger an Ajax "global" event
function triggerGlobal(settings, context, eventName, data) {
if (settings.global) return triggerAndReturn(context || document, eventName, data)
}
// Number of active Ajax requests
$.active = 0
function ajaxStart(settings) {
if (settings.global && $.active++ === 0) triggerGlobal(settings, null, 'ajaxStart')
}
function ajaxStop(settings) {
if (settings.global && !(--$.active)) triggerGlobal(settings, null, 'ajaxStop')
}
// triggers an extra global event "ajaxBeforeSend" that's like "ajaxSend" but cancelable
function ajaxBeforeSend(xhr, settings) {
var context = settings.context
if (settings.beforeSend.call(context, xhr, settings) === false ||
triggerGlobal(settings, context, 'ajaxBeforeSend', [xhr, settings]) === false)
return false
triggerGlobal(settings, context, 'ajaxSend', [xhr, settings])
}
function ajaxSuccess(data, xhr, settings, deferred) {
var context = settings.context, status = 'success'
settings.success.call(context, data, status, xhr)
if (deferred) deferred.resolveWith(context, [data, status, xhr])
triggerGlobal(settings, context, 'ajaxSuccess', [xhr, settings, data])
ajaxComplete(status, xhr, settings)
}
// type: "timeout", "error", "abort", "parsererror"
function ajaxError(error, type, xhr, settings, deferred) {
var context = settings.context
settings.error.call(context, xhr, type, error)
if (deferred) deferred.rejectWith(context, [xhr, type, error])
triggerGlobal(settings, context, 'ajaxError', [xhr, settings, error || type])
ajaxComplete(type, xhr, settings)
}
// status: "success", "notmodified", "error", "timeout", "abort", "parsererror"
function ajaxComplete(status, xhr, settings) {
var context = settings.context
settings.complete.call(context, xhr, status)
triggerGlobal(settings, context, 'ajaxComplete', [xhr, settings])
ajaxStop(settings)
}
// Empty function, used as default callback
function empty() {}
$.ajaxJSONP = function(options, deferred){
if (!('type' in options)) return $.ajax(options)
var _callbackName = options.jsonpCallback,
callbackName = ($.isFunction(_callbackName) ?
_callbackName() : _callbackName) || ('jsonp' + (++jsonpID)),
script = document.createElement('script'),
originalCallback = window[callbackName],
responseData,
abort = function(errorType) {
$(script).triggerHandler('error', errorType || 'abort')
},
xhr = { abort: abort }, abortTimeout
if (deferred) deferred.promise(xhr)
$(script).on('load error', function(e, errorType){
clearTimeout(abortTimeout)
$(script).off().remove()
if (e.type == 'error' || !responseData) {
ajaxError(null, errorType || 'error', xhr, options, deferred)
} else {
ajaxSuccess(responseData[0], xhr, options, deferred)
}
window[callbackName] = originalCallback
if (responseData && $.isFunction(originalCallback))
originalCallback(responseData[0])
originalCallback = responseData = undefined
})
if (ajaxBeforeSend(xhr, options) === false) {
abort('abort')
return xhr
}
window[callbackName] = function(){
responseData = arguments
}
script.src = options.url.replace(/\?(.+)=\?/, '?$1=' + callbackName)
document.head.appendChild(script)
if (options.timeout > 0) abortTimeout = setTimeout(function(){
abort('timeout')
}, options.timeout)
return xhr
}
$.ajaxSettings = {
// Default type of request
type: 'GET',
// Callback that is executed before request
beforeSend: empty,
// Callback that is executed if the request succeeds
success: empty,
// Callback that is executed the the server drops error
error: empty,
// Callback that is executed on request complete (both: error and success)
complete: empty,
// The context for the callbacks
context: null,
// Whether to trigger "global" Ajax events
global: true,
// Transport
xhr: function () {
return new window.XMLHttpRequest()
},
// MIME types mapping
// IIS returns Javascript as "application/x-javascript"
accepts: {
script: 'text/javascript, application/javascript, application/x-javascript',
json: jsonType,
xml: 'application/xml, text/xml',
html: htmlType,
text: 'text/plain'
},
// Whether the request is to another domain
crossDomain: false,
// Default timeout
timeout: 0,
// Whether data should be serialized to string
processData: true,
// Whether the browser should be allowed to cache GET responses
cache: true
}
function mimeToDataType(mime) {
if (mime) mime = mime.split(';', 2)[0]
return mime && ( mime == htmlType ? 'html' :
mime == jsonType ? 'json' :
scriptTypeRE.test(mime) ? 'script' :
xmlTypeRE.test(mime) && 'xml' ) || 'text'
}
function appendQuery(url, query) {
if (query == '') return url
return (url + '&' + query).replace(/[&?]{1,2}/, '?')
}
// serialize payload and append it to the URL for GET requests
function serializeData(options) {
if (options.processData && options.data && $.type(options.data) != "string")
options.data = $.param(options.data, options.traditional)
if (options.data && (!options.type || options.type.toUpperCase() == 'GET'))
options.url = appendQuery(options.url, options.data), options.data = undefined
}
$.ajax = function(options){
var settings = $.extend({}, options || {}),
deferred = $.Deferred && $.Deferred(),
urlAnchor, hashIndex
for (key in $.ajaxSettings) if (settings[key] === undefined) settings[key] = $.ajaxSettings[key]
ajaxStart(settings)
if (!settings.crossDomain) {
urlAnchor = document.createElement('a')
urlAnchor.href = settings.url
urlAnchor.href = urlAnchor.href
settings.crossDomain = (originAnchor.protocol + '//' + originAnchor.host) !== (urlAnchor.protocol + '//' + urlAnchor.host)
}
if (!settings.url) settings.url = window.location.toString()
if ((hashIndex = settings.url.indexOf('#')) > -1) settings.url = settings.url.slice(0, hashIndex)
serializeData(settings)
var dataType = settings.dataType, hasPlaceholder = /\?.+=\?/.test(settings.url)
if (hasPlaceholder) dataType = 'jsonp'
if (settings.cache === false || (
(!options || options.cache !== true) &&
('script' == dataType || 'jsonp' == dataType)
))
settings.url = appendQuery(settings.url, '_=' + Date.now())
if ('jsonp' == dataType) {
if (!hasPlaceholder)
settings.url = appendQuery(settings.url,
settings.jsonp ? (settings.jsonp + '=?') : settings.jsonp === false ? '' : 'callback=?')
return $.ajaxJSONP(settings, deferred)
}
var mime = settings.accepts[dataType],
headers = { },
setHeader = function(name, value) { headers[name.toLowerCase()] = [name, value] },
protocol = /^([\w-]+:)\/\//.test(settings.url) ? RegExp.$1 : window.location.protocol,
xhr = settings.xhr(),
nativeSetHeader = xhr.setRequestHeader,
abortTimeout
if (deferred) deferred.promise(xhr)
if (!settings.crossDomain) setHeader('X-Requested-With', 'XMLHttpRequest')
setHeader('Accept', mime || '*/*')
if (mime = settings.mimeType || mime) {
if (mime.indexOf(',') > -1) mime = mime.split(',', 2)[0]
xhr.overrideMimeType && xhr.overrideMimeType(mime)
}
if (settings.contentType || (settings.contentType !== false && settings.data && settings.type.toUpperCase() != 'GET'))
setHeader('Content-Type', settings.contentType || 'application/x-www-form-urlencoded')
if (settings.headers) for (name in settings.headers) setHeader(name, settings.headers[name])
xhr.setRequestHeader = setHeader
xhr.onreadystatechange = function(){
if (xhr.readyState == 4) {
xhr.onreadystatechange = empty
clearTimeout(abortTimeout)
var result, error = false
if ((xhr.status >= 200 && xhr.status < 300) || xhr.status == 304 || (xhr.status == 0 && protocol == 'file:')) {
dataType = dataType || mimeToDataType(settings.mimeType || xhr.getResponseHeader('content-type'))
result = xhr.responseText
try {
// http://perfectionkills.com/global-eval-what-are-the-options/
if (dataType == 'script') (1,eval)(result)
else if (dataType == 'xml') result = xhr.responseXML
else if (dataType == 'json') result = blankRE.test(result) ? null : $.parseJSON(result)
} catch (e) { error = e }
if (error) ajaxError(error, 'parsererror', xhr, settings, deferred)
else ajaxSuccess(result, xhr, settings, deferred)
} else {
ajaxError(xhr.statusText || null, xhr.status ? 'error' : 'abort', xhr, settings, deferred)
}
}
}
if (ajaxBeforeSend(xhr, settings) === false) {
xhr.abort()
ajaxError(null, 'abort', xhr, settings, deferred)
return xhr
}
if (settings.xhrFields) for (name in settings.xhrFields) xhr[name] = settings.xhrFields[name]
var async = 'async' in settings ? settings.async : true
xhr.open(settings.type, settings.url, async, settings.username, settings.password)
for (name in headers) nativeSetHeader.apply(xhr, headers[name])
if (settings.timeout > 0) abortTimeout = setTimeout(function(){
xhr.onreadystatechange = empty
xhr.abort()
ajaxError(null, 'timeout', xhr, settings, deferred)
}, settings.timeout)
// avoid sending empty string (#319)
xhr.send(settings.data ? settings.data : null)
return xhr
}
// handle optional data/success arguments
function parseArguments(url, data, success, dataType) {
if ($.isFunction(data)) dataType = success, success = data, data = undefined
if (!$.isFunction(success)) dataType = success, success = undefined
return {
url: url
, data: data
, success: success
, dataType: dataType
}
}
$.get = function(/* url, data, success, dataType */){
return $.ajax(parseArguments.apply(null, arguments))
}
$.post = function(/* url, data, success, dataType */){
var options = parseArguments.apply(null, arguments)
options.type = 'POST'
return $.ajax(options)
}
$.getJSON = function(/* url, data, success */){
var options = parseArguments.apply(null, arguments)
options.dataType = 'json'
return $.ajax(options)
}
$.fn.load = function(url, data, success){
if (!this.length) return this
var self = this, parts = url.split(/\s/), selector,
options = parseArguments(url, data, success),
callback = options.success
if (parts.length > 1) options.url = parts[0], selector = parts[1]
options.success = function(response){
self.html(selector ?
$('<div>').html(response.replace(rscript, "")).find(selector)
: response)
callback && callback.apply(self, arguments)
}
$.ajax(options)
return this
}
var escape = encodeURIComponent
function serialize(params, obj, traditional, scope){
var type, array = $.isArray(obj), hash = $.isPlainObject(obj)
$.each(obj, function(key, value) {
type = $.type(value)
if (scope) key = traditional ? scope :
scope + '[' + (hash || type == 'object' || type == 'array' ? key : '') + ']'
// handle data in serializeArray() format
if (!scope && array) params.add(value.name, value.value)
// recurse into nested objects
else if (type == "array" || (!traditional && type == "object"))
serialize(params, value, traditional, key)
else params.add(key, value)
})
}
$.param = function(obj, traditional){
var params = []
params.add = function(key, value) {
if ($.isFunction(value)) value = value()
if (value == null) value = ""
this.push(escape(key) + '=' + escape(value))
}
serialize(params, obj, traditional)
return params.join('&').replace(/%20/g, '+')
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var _zid = 1, undefined,
slice = Array.prototype.slice,
isFunction = $.isFunction,
isString = function(obj){ return typeof obj == 'string' },
handlers = {},
specialEvents={},
focusinSupported = 'onfocusin' in window,
focus = { focus: 'focusin', blur: 'focusout' },
hover = { mouseenter: 'mouseover', mouseleave: 'mouseout' }
specialEvents.click = specialEvents.mousedown = specialEvents.mouseup = specialEvents.mousemove = 'MouseEvents'
function zid(element) {
return element._zid || (element._zid = _zid++)
}
function findHandlers(element, event, fn, selector) {
event = parse(event)
if (event.ns) var matcher = matcherFor(event.ns)
return (handlers[zid(element)] || []).filter(function(handler) {
return handler
&& (!event.e || handler.e == event.e)
&& (!event.ns || matcher.test(handler.ns))
&& (!fn || zid(handler.fn) === zid(fn))
&& (!selector || handler.sel == selector)
})
}
function parse(event) {
var parts = ('' + event).split('.')
return {e: parts[0], ns: parts.slice(1).sort().join(' ')}
}
function matcherFor(ns) {
return new RegExp('(?:^| )' + ns.replace(' ', ' .* ?') + '(?: |$)')
}
function eventCapture(handler, captureSetting) {
return handler.del &&
(!focusinSupported && (handler.e in focus)) ||
!!captureSetting
}
function realEvent(type) {
return hover[type] || (focusinSupported && focus[type]) || type
}
function add(element, events, fn, data, selector, delegator, capture){
var id = zid(element), set = (handlers[id] || (handlers[id] = []))
events.split(/\s/).forEach(function(event){
if (event == 'ready') return $(document).ready(fn)
var handler = parse(event)
handler.fn = fn
handler.sel = selector
// emulate mouseenter, mouseleave
if (handler.e in hover) fn = function(e){
var related = e.relatedTarget
if (!related || (related !== this && !$.contains(this, related)))
return handler.fn.apply(this, arguments)
}
handler.del = delegator
var callback = delegator || fn
handler.proxy = function(e){
e = compatible(e)
if (e.isImmediatePropagationStopped()) return
e.data = data
var result = callback.apply(element, e._args == undefined ? [e] : [e].concat(e._args))
if (result === false) e.preventDefault(), e.stopPropagation()
return result
}
handler.i = set.length
set.push(handler)
if ('addEventListener' in element)
element.addEventListener(realEvent(handler.e), handler.proxy, eventCapture(handler, capture))
})
}
function remove(element, events, fn, selector, capture){
var id = zid(element)
;(events || '').split(/\s/).forEach(function(event){
findHandlers(element, event, fn, selector).forEach(function(handler){
delete handlers[id][handler.i]
if ('removeEventListener' in element)
element.removeEventListener(realEvent(handler.e), handler.proxy, eventCapture(handler, capture))
})
})
}
$.event = { add: add, remove: remove }
$.proxy = function(fn, context) {
var args = (2 in arguments) && slice.call(arguments, 2)
if (isFunction(fn)) {
var proxyFn = function(){ return fn.apply(context, args ? args.concat(slice.call(arguments)) : arguments) }
proxyFn._zid = zid(fn)
return proxyFn
} else if (isString(context)) {
if (args) {
args.unshift(fn[context], fn)
return $.proxy.apply(null, args)
} else {
return $.proxy(fn[context], fn)
}
} else {
throw new TypeError("expected function")
}
}
$.fn.bind = function(event, data, callback){
return this.on(event, data, callback)
}
$.fn.unbind = function(event, callback){
return this.off(event, callback)
}
$.fn.one = function(event, selector, data, callback){
return this.on(event, selector, data, callback, 1)
}
var returnTrue = function(){return true},
returnFalse = function(){return false},
ignoreProperties = /^([A-Z]|returnValue$|layer[XY]$)/,
eventMethods = {
preventDefault: 'isDefaultPrevented',
stopImmediatePropagation: 'isImmediatePropagationStopped',
stopPropagation: 'isPropagationStopped'
}
function compatible(event, source) {
if (source || !event.isDefaultPrevented) {
source || (source = event)
$.each(eventMethods, function(name, predicate) {
var sourceMethod = source[name]
event[name] = function(){
this[predicate] = returnTrue
return sourceMethod && sourceMethod.apply(source, arguments)
}
event[predicate] = returnFalse
})
if (source.defaultPrevented !== undefined ? source.defaultPrevented :
'returnValue' in source ? source.returnValue === false :
source.getPreventDefault && source.getPreventDefault())
event.isDefaultPrevented = returnTrue
}
return event
}
function createProxy(event) {
var key, proxy = { originalEvent: event }
for (key in event)
if (!ignoreProperties.test(key) && event[key] !== undefined) proxy[key] = event[key]
return compatible(proxy, event)
}
$.fn.delegate = function(selector, event, callback){
return this.on(event, selector, callback)
}
$.fn.undelegate = function(selector, event, callback){
return this.off(event, selector, callback)
}
$.fn.live = function(event, callback){
$(document.body).delegate(this.selector, event, callback)
return this
}
$.fn.die = function(event, callback){
$(document.body).undelegate(this.selector, event, callback)
return this
}
$.fn.on = function(event, selector, data, callback, one){
var autoRemove, delegator, $this = this
if (event && !isString(event)) {
$.each(event, function(type, fn){
$this.on(type, selector, data, fn, one)
})
return $this
}
if (!isString(selector) && !isFunction(callback) && callback !== false)
callback = data, data = selector, selector = undefined
if (callback === undefined || data === false)
callback = data, data = undefined
if (callback === false) callback = returnFalse
return $this.each(function(_, element){
if (one) autoRemove = function(e){
remove(element, e.type, callback)
return callback.apply(this, arguments)
}
if (selector) delegator = function(e){
var evt, match = $(e.target).closest(selector, element).get(0)
if (match && match !== element) {
evt = $.extend(createProxy(e), {currentTarget: match, liveFired: element})
return (autoRemove || callback).apply(match, [evt].concat(slice.call(arguments, 1)))
}
}
add(element, event, callback, data, selector, delegator || autoRemove)
})
}
$.fn.off = function(event, selector, callback){
var $this = this
if (event && !isString(event)) {
$.each(event, function(type, fn){
$this.off(type, selector, fn)
})
return $this
}
if (!isString(selector) && !isFunction(callback) && callback !== false)
callback = selector, selector = undefined
if (callback === false) callback = returnFalse
return $this.each(function(){
remove(this, event, callback, selector)
})
}
$.fn.trigger = function(event, args){
event = (isString(event) || $.isPlainObject(event)) ? $.Event(event) : compatible(event)
event._args = args
return this.each(function(){
// handle focus(), blur() by calling them directly
if (event.type in focus && typeof this[event.type] == "function") this[event.type]()
// items in the collection might not be DOM elements
else if ('dispatchEvent' in this) this.dispatchEvent(event)
else $(this).triggerHandler(event, args)
})
}
// triggers event handlers on current element just as if an event occurred,
// doesn't trigger an actual event, doesn't bubble
$.fn.triggerHandler = function(event, args){
var e, result
this.each(function(i, element){
e = createProxy(isString(event) ? $.Event(event) : event)
e._args = args
e.target = element
$.each(findHandlers(element, event.type || event), function(i, handler){
result = handler.proxy(e)
if (e.isImmediatePropagationStopped()) return false
})
})
return result
}
// shortcut methods for `.bind(event, fn)` for each event type
;('focusin focusout focus blur load resize scroll unload click dblclick '+
'mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave '+
'change select keydown keypress keyup error').split(' ').forEach(function(event) {
$.fn[event] = function(callback) {
return (0 in arguments) ?
this.bind(event, callback) :
this.trigger(event)
}
})
$.Event = function(type, props) {
if (!isString(type)) props = type, type = props.type
var event = document.createEvent(specialEvents[type] || 'Events'), bubbles = true
if (props) for (var name in props) (name == 'bubbles') ? (bubbles = !!props[name]) : (event[name] = props[name])
event.initEvent(type, bubbles, true)
return compatible(event)
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
$.fn.serializeArray = function() {
var name, type, result = [],
add = function(value) {
if (value.forEach) return value.forEach(add)
result.push({ name: name, value: value })
}
if (this[0]) $.each(this[0].elements, function(_, field){
type = field.type, name = field.name
if (name && field.nodeName.toLowerCase() != 'fieldset' &&
!field.disabled && type != 'submit' && type != 'reset' && type != 'button' && type != 'file' &&
((type != 'radio' && type != 'checkbox') || field.checked))
add($(field).val())
})
return result
}
$.fn.serialize = function(){
var result = []
this.serializeArray().forEach(function(elm){
result.push(encodeURIComponent(elm.name) + '=' + encodeURIComponent(elm.value))
})
return result.join('&')
}
$.fn.submit = function(callback) {
if (0 in arguments) this.bind('submit', callback)
else if (this.length) {
var event = $.Event('submit')
this.eq(0).trigger(event)
if (!event.isDefaultPrevented()) this.get(0).submit()
}
return this
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($, undefined){
var prefix = '', eventPrefix,
vendors = { Webkit: 'webkit', Moz: '', O: 'o' },
testEl = document.createElement('div'),
supportedTransforms = /^((translate|rotate|scale)(X|Y|Z|3d)?|matrix(3d)?|perspective|skew(X|Y)?)$/i,
transform,
transitionProperty, transitionDuration, transitionTiming, transitionDelay,
animationName, animationDuration, animationTiming, animationDelay,
cssReset = {}
function dasherize(str) { return str.replace(/([a-z])([A-Z])/, '$1-$2').toLowerCase() }
function normalizeEvent(name) { return eventPrefix ? eventPrefix + name : name.toLowerCase() }
$.each(vendors, function(vendor, event){
if (testEl.style[vendor + 'TransitionProperty'] !== undefined) {
prefix = '-' + vendor.toLowerCase() + '-'
eventPrefix = event
return false
}
})
transform = prefix + 'transform'
cssReset[transitionProperty = prefix + 'transition-property'] =
cssReset[transitionDuration = prefix + 'transition-duration'] =
cssReset[transitionDelay = prefix + 'transition-delay'] =
cssReset[transitionTiming = prefix + 'transition-timing-function'] =
cssReset[animationName = prefix + 'animation-name'] =
cssReset[animationDuration = prefix + 'animation-duration'] =
cssReset[animationDelay = prefix + 'animation-delay'] =
cssReset[animationTiming = prefix + 'animation-timing-function'] = ''
$.fx = {
off: (eventPrefix === undefined && testEl.style.transitionProperty === undefined),
speeds: { _default: 400, fast: 200, slow: 600 },
cssPrefix: prefix,
transitionEnd: normalizeEvent('TransitionEnd'),
animationEnd: normalizeEvent('AnimationEnd')
}
$.fn.animate = function(properties, duration, ease, callback, delay){
if ($.isFunction(duration))
callback = duration, ease = undefined, duration = undefined
if ($.isFunction(ease))
callback = ease, ease = undefined
if ($.isPlainObject(duration))
ease = duration.easing, callback = duration.complete, delay = duration.delay, duration = duration.duration
if (duration) duration = (typeof duration == 'number' ? duration :
($.fx.speeds[duration] || $.fx.speeds._default)) / 1000
if (delay) delay = parseFloat(delay) / 1000
return this.anim(properties, duration, ease, callback, delay)
}
$.fn.anim = function(properties, duration, ease, callback, delay){
var key, cssValues = {}, cssProperties, transforms = '',
that = this, wrappedCallback, endEvent = $.fx.transitionEnd,
fired = false
if (duration === undefined) duration = $.fx.speeds._default / 1000
if (delay === undefined) delay = 0
if ($.fx.off) duration = 0
if (typeof properties == 'string') {
// keyframe animation
cssValues[animationName] = properties
cssValues[animationDuration] = duration + 's'
cssValues[animationDelay] = delay + 's'
cssValues[animationTiming] = (ease || 'linear')
endEvent = $.fx.animationEnd
} else {
cssProperties = []
// CSS transitions
for (key in properties)
if (supportedTransforms.test(key)) transforms += key + '(' + properties[key] + ') '
else cssValues[key] = properties[key], cssProperties.push(dasherize(key))
if (transforms) cssValues[transform] = transforms, cssProperties.push(transform)
if (duration > 0 && typeof properties === 'object') {
cssValues[transitionProperty] = cssProperties.join(', ')
cssValues[transitionDuration] = duration + 's'
cssValues[transitionDelay] = delay + 's'
cssValues[transitionTiming] = (ease || 'linear')
}
}
wrappedCallback = function(event){
if (typeof event !== 'undefined') {
if (event.target !== event.currentTarget) return // makes sure the event didn't bubble from "below"
$(event.target).unbind(endEvent, wrappedCallback)
} else
$(this).unbind(endEvent, wrappedCallback) // triggered by setTimeout
fired = true
$(this).css(cssReset)
callback && callback.call(this)
}
if (duration > 0){
this.bind(endEvent, wrappedCallback)
// transitionEnd is not always firing on older Android phones
// so make sure it gets fired
setTimeout(function(){
if (fired) return
wrappedCallback.call(that)
}, ((duration + delay) * 1000) + 25)
}
// trigger page reflow so new elements can animate
this.size() && this.get(0).clientLeft
this.css(cssValues)
if (duration <= 0) setTimeout(function() {
that.each(function(){ wrappedCallback.call(this) })
}, 0)
return this
}
testEl = null
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($, undefined){
var document = window.document, docElem = document.documentElement,
origShow = $.fn.show, origHide = $.fn.hide, origToggle = $.fn.toggle
function anim(el, speed, opacity, scale, callback) {
if (typeof speed == 'function' && !callback) callback = speed, speed = undefined
var props = { opacity: opacity }
if (scale) {
props.scale = scale
el.css($.fx.cssPrefix + 'transform-origin', '0 0')
}
return el.animate(props, speed, null, callback)
}
function hide(el, speed, scale, callback) {
return anim(el, speed, 0, scale, function(){
origHide.call($(this))
callback && callback.call(this)
})
}
$.fn.show = function(speed, callback) {
origShow.call(this)
if (speed === undefined) speed = 0
else this.css('opacity', 0)
return anim(this, speed, 1, '1,1', callback)
}
$.fn.hide = function(speed, callback) {
if (speed === undefined) return origHide.call(this)
else return hide(this, speed, '0,0', callback)
}
$.fn.toggle = function(speed, callback) {
if (speed === undefined || typeof speed == 'boolean')
return origToggle.call(this, speed)
else return this.each(function(){
var el = $(this)
el[el.css('display') == 'none' ? 'show' : 'hide'](speed, callback)
})
}
$.fn.fadeTo = function(speed, opacity, callback) {
return anim(this, speed, opacity, null, callback)
}
$.fn.fadeIn = function(speed, callback) {
var target = this.css('opacity')
if (target > 0) this.css('opacity', 0)
else target = 1
return origShow.call(this).fadeTo(speed, target, callback)
}
$.fn.fadeOut = function(speed, callback) {
return hide(this, speed, null, callback)
}
$.fn.fadeToggle = function(speed, callback) {
return this.each(function(){
var el = $(this)
el[
(el.css('opacity') == 0 || el.css('display') == 'none') ? 'fadeIn' : 'fadeOut'
](speed, callback)
})
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function(){
// getComputedStyle shouldn't freak out when called
// without a valid element as argument
try {
getComputedStyle(undefined)
} catch(e) {
var nativeGetComputedStyle = getComputedStyle;
window.getComputedStyle = function(element){
try {
return nativeGetComputedStyle(element)
} catch(e) {
return null
}
}
}
})()
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var touch = {},
touchTimeout, tapTimeout, swipeTimeout, longTapTimeout,
longTapDelay = 750,
gesture
function swipeDirection(x1, x2, y1, y2) {
return Math.abs(x1 - x2) >=
Math.abs(y1 - y2) ? (x1 - x2 > 0 ? 'Left' : 'Right') : (y1 - y2 > 0 ? 'Up' : 'Down')
}
function longTap() {
longTapTimeout = null
if (touch.last) {
touch.el.trigger('longTap')
touch = {}
}
}
function cancelLongTap() {
if (longTapTimeout) clearTimeout(longTapTimeout)
longTapTimeout = null
}
function cancelAll() {
if (touchTimeout) clearTimeout(touchTimeout)
if (tapTimeout) clearTimeout(tapTimeout)
if (swipeTimeout) clearTimeout(swipeTimeout)
if (longTapTimeout) clearTimeout(longTapTimeout)
touchTimeout = tapTimeout = swipeTimeout = longTapTimeout = null
touch = {}
}
function isPrimaryTouch(event){
return (event.pointerType == 'touch' ||
event.pointerType == event.MSPOINTER_TYPE_TOUCH)
&& event.isPrimary
}
function isPointerEventType(e, type){
return (e.type == 'pointer'+type ||
e.type.toLowerCase() == 'mspointer'+type)
}
$(document).ready(function(){
var now, delta, deltaX = 0, deltaY = 0, firstTouch, _isPointerType
if ('MSGesture' in window) {
gesture = new MSGesture()
gesture.target = document.body
}
$(document)
.bind('MSGestureEnd', function(e){
var swipeDirectionFromVelocity =
e.velocityX > 1 ? 'Right' : e.velocityX < -1 ? 'Left' : e.velocityY > 1 ? 'Down' : e.velocityY < -1 ? 'Up' : null;
if (swipeDirectionFromVelocity) {
touch.el.trigger('swipe')
touch.el.trigger('swipe'+ swipeDirectionFromVelocity)
}
})
.on('touchstart MSPointerDown pointerdown', function(e){
if((_isPointerType = isPointerEventType(e, 'down')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
if (e.touches && e.touches.length === 1 && touch.x2) {
// Clear out touch movement data if we have it sticking around
// This can occur if touchcancel doesn't fire due to preventDefault, etc.
touch.x2 = undefined
touch.y2 = undefined
}
now = Date.now()
delta = now - (touch.last || now)
touch.el = $('tagName' in firstTouch.target ?
firstTouch.target : firstTouch.target.parentNode)
touchTimeout && clearTimeout(touchTimeout)
touch.x1 = firstTouch.pageX
touch.y1 = firstTouch.pageY
if (delta > 0 && delta <= 250) touch.isDoubleTap = true
touch.last = now
longTapTimeout = setTimeout(longTap, longTapDelay)
// adds the current touch contact for IE gesture recognition
if (gesture && _isPointerType) gesture.addPointer(e.pointerId);
})
.on('touchmove MSPointerMove pointermove', function(e){
if((_isPointerType = isPointerEventType(e, 'move')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
cancelLongTap()
touch.x2 = firstTouch.pageX
touch.y2 = firstTouch.pageY
deltaX += Math.abs(touch.x1 - touch.x2)
deltaY += Math.abs(touch.y1 - touch.y2)
})
.on('touchend MSPointerUp pointerup', function(e){
if((_isPointerType = isPointerEventType(e, 'up')) &&
!isPrimaryTouch(e)) return
cancelLongTap()
// swipe
if ((touch.x2 && Math.abs(touch.x1 - touch.x2) > 30) ||
(touch.y2 && Math.abs(touch.y1 - touch.y2) > 30))
swipeTimeout = setTimeout(function() {
touch.el.trigger('swipe')
touch.el.trigger('swipe' + (swipeDirection(touch.x1, touch.x2, touch.y1, touch.y2)))
touch = {}
}, 0)
// normal tap
else if ('last' in touch)
// don't fire tap when delta position changed by more than 30 pixels,
// for instance when moving to a point and back to origin
if (deltaX < 30 && deltaY < 30) {
// delay by one tick so we can cancel the 'tap' event if 'scroll' fires
// ('tap' fires before 'scroll')
tapTimeout = setTimeout(function() {
// trigger universal 'tap' with the option to cancelTouch()
// (cancelTouch cancels processing of single vs double taps for faster 'tap' response)
var event = $.Event('tap')
event.cancelTouch = cancelAll
touch.el.trigger(event)
// trigger double tap immediately
if (touch.isDoubleTap) {
if (touch.el) touch.el.trigger('doubleTap')
touch = {}
}
// trigger single tap after 250ms of inactivity
else {
touchTimeout = setTimeout(function(){
touchTimeout = null
if (touch.el) touch.el.trigger('singleTap')
touch = {}
}, 250)
}
}, 0)
} else {
touch = {}
}
deltaX = deltaY = 0
})
// when the browser window loses focus,
// for example when a modal dialog is shown,
// cancel all ongoing events
.on('touchcancel MSPointerCancel pointercancel', cancelAll)
// scrolling the window indicates intention of the user
// to scroll, not tap or swipe, so cancel all ongoing events
$(window).on('scroll', cancelAll)
})
;['swipe', 'swipeLeft', 'swipeRight', 'swipeUp', 'swipeDown',
'doubleTap', 'tap', 'singleTap', 'longTap'].forEach(function(eventName){
$.fn[eventName] = function(callback){ return this.on(eventName, callback) }
})
})(Zepto)
| nongfadai/front_demo | web/src/notuse/app/lib/zepto/zepto.js | JavaScript | mit | 69,690 |
var kunstmaanbundles = kunstmaanbundles || {};
kunstmaanbundles.datepicker = (function($, window, undefined) {
var init, reInit, _setDefaultDate, _initDatepicker;
var _today = window.moment(),
_tomorrow = window.moment(_today).add(1, 'days');
var defaultFormat = 'DD-MM-YYYY',
defaultCollapse = true,
defaultKeepOpen = false,
defaultMinDate = false,
defaultShowDefaultDate = false,
defaultStepping = 1;
init = function() {
$('.js-datepicker').each(function() {
_initDatepicker($(this));
});
};
reInit = function(el) {
if (el) {
_initDatepicker($(el));
} else {
$('.js-datepicker').each(function() {
if (!$(this).hasClass('datepicker--enabled')) {
_initDatepicker($(this));
}
});
}
};
_setDefaultDate = function(elMinDate) {
if(elMinDate === 'tomorrow') {
return _tomorrow;
} else {
return _today;
}
};
_initDatepicker = function($el) {
// Get Settings
var elFormat = $el.data('format'),
elCollapse = $el.data('collapse'),
elKeepOpen = $el.data('keep-open'),
elMinDate = $el.data('min-date'),
elShowDefaultDate = $el.data('default-date'),
elStepping = $el.data('stepping');
// Set Settings
var format = (elFormat !== undefined) ? elFormat : defaultFormat,
collapse = (elCollapse !== undefined) ? elCollapse : defaultCollapse,
keepOpen = (elKeepOpen !== undefined) ? elKeepOpen : defaultKeepOpen,
minDate = (elMinDate === 'tomorrow') ? _tomorrow : (elMinDate === 'today') ? _today : defaultMinDate,
defaultDate = (elShowDefaultDate) ? _setDefaultDate(elMinDate) : defaultShowDefaultDate,
stepping = (elStepping !== undefined) ? elStepping : defaultStepping;
// Setup
var $input = $el.find('input'),
$addon = $el.find('.input-group-addon'),
linkedDatepickerID = $el.data('linked-datepicker') || false;
if (format.indexOf('HH:mm') === -1) {
// Drop time if not necessary
if (minDate) {
minDate = minDate.clone().startOf('day'); // clone() because otherwise .startOf() mutates the original moment object
}
if (defaultDate) {
defaultDate = defaultDate.clone().startOf('day');
}
}
$input.datetimepicker({
format: format,
collapse: collapse,
keepOpen: keepOpen,
minDate: minDate,
defaultDate: defaultDate,
widgetPositioning: {
horizontal: 'left',
vertical: 'auto'
},
widgetParent: $el,
icons: {
time: 'fa fa-clock',
date: 'fa fa-calendar',
up: 'fa fa-chevron-up',
down: 'fa fa-chevron-down',
previous: 'fa fa-arrow-left',
next: 'fa fa-arrow-right',
today: 'fa fa-crosshairs',
clear: 'fa fa-trash'
},
stepping: stepping
});
$el.addClass('datepicker--enabled');
$addon.on('click', function() {
$input.focus();
});
// Linked datepickers - allow future datetime only - (un)publish modal
if (linkedDatepickerID) {
// set min time only if selected date = today
$(document).on('dp.change', linkedDatepickerID, function(e) {
if (e.target.value === _today.format('DD-MM-YYYY')) {
var selectedTime = window.moment($input.val(), 'HH:mm');
// Force user to select new time, if current time isn't valid anymore
selectedTime.isBefore(_today) && $input.data('DateTimePicker').show();
$input.data('DateTimePicker').minDate(_today);
} else {
$input.data('DateTimePicker').minDate(false);
}
});
}
};
return {
init: init,
reInit: reInit
};
})(jQuery, window);
| mwoynarski/KunstmaanBundlesCMS | src/Kunstmaan/AdminBundle/Resources/ui/js/_datepicker.js | JavaScript | mit | 4,312 |
/*
* Copyright 2012 Evernote Corporation.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.evernote.client.android;
import com.evernote.edam.error.EDAMNotFoundException;
import com.evernote.edam.error.EDAMSystemException;
import com.evernote.edam.error.EDAMUserException;
import com.evernote.edam.type.LinkedNotebook;
import com.evernote.edam.type.Note;
import com.evernote.edam.type.Notebook;
import com.evernote.edam.type.SharedNotebook;
import com.evernote.thrift.TException;
import com.evernote.thrift.protocol.TProtocol;
import com.evernote.thrift.transport.TTransportException;
import java.util.Arrays;
import java.util.List;
/**
*
* This is a wrapper/helper class that manages the connection to a linked notestore. It maintains two
* {@link AsyncLinkedNoteStoreClient} objects, one points to the users personal store and the other to
* linked notebooks shard.
*
* These helper methods make network calls across both shards to return the appropriate data.
*
*
*
* @author @tylersmithnet
*/
public class AsyncLinkedNoteStoreClient {
/**
* References users main note store
*/
private AsyncNoteStoreClient mMainNoteStoreClient;
private AsyncNoteStoreClient mLinkedStoreClient;
private String mAuthToken;
private ClientFactory mClientFactory;
AsyncLinkedNoteStoreClient(TProtocol iprot, TProtocol oprot, String authenticationToken, ClientFactory clientFactory) throws TTransportException {
mLinkedStoreClient = new AsyncNoteStoreClient(iprot, oprot, authenticationToken);
mMainNoteStoreClient = EvernoteSession.getOpenSession().getClientFactory().createNoteStoreClient();
mAuthToken = authenticationToken;
mClientFactory = clientFactory;
}
/**
* Returns the {@link AsyncNoteStoreClient} object that has been instantiated to the appropriate shard
* @return
*/
public AsyncNoteStoreClient getAsyncClient() {
return mLinkedStoreClient;
}
AsyncNoteStoreClient getAsyncPersonalClient() {
return mMainNoteStoreClient;
}
String getAuthenticationToken() {
return mAuthToken;
}
void setAuthToken(String authenticationToken) {
mAuthToken = authenticationToken;
}
ClientFactory getClientFactory() {
return mClientFactory;
}
/**
* Helper method to create a note asynchronously in a linked/business notebook
*
* @param note
* @param linkedNotebook
* @param callback
*/
public void createNoteAsync(final Note note, final LinkedNotebook linkedNotebook, final OnClientCallback<Note> callback) {
AsyncReflector.execute(this, callback, "createNote", note, linkedNotebook);
}
/**
* Helper method to create a note synchronously in a linked notebook
*
* @param note
* @param linkedNotebook
* @return
* @throws com.evernote.edam.error.EDAMUserException
*
* @throws com.evernote.edam.error.EDAMSystemException
*
* @throws com.evernote.thrift.TException
* @throws com.evernote.edam.error.EDAMNotFoundException
*
*/
public Note createNote(Note note, LinkedNotebook linkedNotebook) throws EDAMUserException, EDAMSystemException, TException, EDAMNotFoundException {
SharedNotebook sharedNotebook = getAsyncClient().getClient().getSharedNotebookByAuth(getAuthenticationToken());
note.setNotebookGuid(sharedNotebook.getNotebookGuid());
return getAsyncClient().getClient().createNote(getAuthenticationToken(), note);
}
/**
* Helper method to list linked/business notebooks asynchronously
*
* @see {@link com.evernote.edam.notestore.NoteStore.Client#listLinkedNotebooks(String)}
*
* @param callback
*/
public void listNotebooksAsync(final OnClientCallback<List<LinkedNotebook>> callback) {
AsyncReflector.execute(getAsyncPersonalClient(), callback, "listNotebooks", getAuthenticationToken());
}
/**
* Helper method to list linked notebooks synchronously
*
* @see {@link com.evernote.edam.notestore.NoteStore.Client#listLinkedNotebooks(String)}
*
*/
public List<LinkedNotebook> listNotebooks() throws EDAMUserException, EDAMSystemException, TException, EDAMNotFoundException {
return getAsyncPersonalClient().getClient().listLinkedNotebooks(getAsyncPersonalClient().getAuthenticationToken());
}
/**
* Create Linked Notebook from a Notebook
*
* Asynchronous call
*
* @param callback
*/
public void createNotebookAsync(Notebook notebook, OnClientCallback<LinkedNotebook> callback) {
AsyncReflector.execute(this, callback, "createNotebook", notebook);
}
/**
* Create Linked Notebook from a Notebook
*
* Synchronous call
*
* @return {@link LinkedNotebook} with guid from server
*/
public LinkedNotebook createNotebook(Notebook notebook) throws TException, EDAMUserException, EDAMSystemException, EDAMNotFoundException {
Notebook originalNotebook = getAsyncClient().getClient().createNotebook(getAuthenticationToken(), notebook);
SharedNotebook sharedNotebook = originalNotebook.getSharedNotebooks().get(0);
LinkedNotebook linkedNotebook = new LinkedNotebook();
linkedNotebook.setShareKey(sharedNotebook.getShareKey());
linkedNotebook.setShareName(originalNotebook.getName());
linkedNotebook.setUsername(EvernoteSession.getOpenSession().getAuthenticationResult().getBusinessUser().getUsername());
linkedNotebook.setShardId(EvernoteSession.getOpenSession().getAuthenticationResult().getBusinessUser().getShardId());
return getAsyncPersonalClient().getClient().createLinkedNotebook(getAsyncPersonalClient().getAuthenticationToken(), linkedNotebook);
}
/**
* Providing a LinkedNotebook referencing a linked/business account, perform a delete
*
* Asynchronous call
* @param callback
*/
public void deleteNotebookAsync(LinkedNotebook linkedNotebook, OnClientCallback<Integer> callback) {
AsyncReflector.execute(this, callback, "deleteNotebook", linkedNotebook);
}
/**
* Providing a LinkedNotebook referencing a linked account, perform a delete
*
* Synchronous call
*
* @return guid of notebook deleted
*/
public int deleteNotebook(LinkedNotebook linkedNotebook) throws TException, EDAMUserException, EDAMSystemException, EDAMNotFoundException {
SharedNotebook sharedNotebook = getAsyncClient().getClient().getSharedNotebookByAuth(getAuthenticationToken());
Long[] ids = {sharedNotebook.getId()};
getAsyncClient().getClient().expungeSharedNotebooks(getAuthenticationToken(), Arrays.asList(ids));
return getAsyncPersonalClient().getClient().expungeLinkedNotebook(getAsyncPersonalClient().getAuthenticationToken(), linkedNotebook.getGuid());
}
/**
* Will return the {@link Notebook} associated with the {@link LinkedNotebook} from the linked/business account
*
* Asynchronous call
*
* @param linkedNotebook
* @param callback
*/
public void getCorrespondingNotebookAsync(LinkedNotebook linkedNotebook, OnClientCallback<Notebook> callback) {
AsyncReflector.execute(this, callback, "getCorrespondingNotebook", linkedNotebook);
}
/**
* Will return the {@link com.evernote.edam.type.Notebook} associated with the {@link com.evernote.edam.type.LinkedNotebook} from the linked account
*
* Synchronous call
*
* @param linkedNotebook
*/
public Notebook getCorrespondingNotebook(LinkedNotebook linkedNotebook) throws TException, EDAMUserException, EDAMSystemException, EDAMNotFoundException {
SharedNotebook sharedNotebook = getAsyncClient().getClient().getSharedNotebookByAuth(getAuthenticationToken());
return getAsyncClient().getClient().getNotebook(getAuthenticationToken(), sharedNotebook.getNotebookGuid());
}
/**
* Checks writable permissions of {@link LinkedNotebook} on Linked/business account
*
* Asynchronous call
*
* @param linkedNotebook
* @param callback
*/
public void isNotebookWritableAsync(LinkedNotebook linkedNotebook, OnClientCallback<Boolean> callback) {
AsyncReflector.execute(this, callback, "isLinkedNotebookWritable", linkedNotebook);
}
/**
* Checks writable permissions of {@link LinkedNotebook} on Linked account
*
* Synchronous call
*
* @param linkedNotebook
*/
public boolean isNotebookWritable(LinkedNotebook linkedNotebook) throws EDAMUserException, TException, EDAMSystemException, EDAMNotFoundException {
Notebook notebook = getCorrespondingNotebook(linkedNotebook);
return !notebook.getRestrictions().isNoCreateNotes();
}
}
| daimajia/EverMemo-EverNote | src/com/evernote/client/android/AsyncLinkedNoteStoreClient.java | Java | mit | 9,741 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Collections.Sequences.Tests
{
public class SequenceTests
{
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void ArrayList(int[] array)
{
ArrayList<int> collection = CreateArrayList(array);
SequencePosition position = default;
int arrayIndex = 0;
while (collection.TryGet(ref position, out int item))
{
Assert.Equal(array[arrayIndex++], item);
}
Assert.Equal(array.Length, arrayIndex);
arrayIndex = 0;
foreach (int item in collection)
{
Assert.Equal(array[arrayIndex++], item);
}
Assert.Equal(array.Length, arrayIndex);
}
private static ArrayList<int> CreateArrayList(int[] array)
{
var collection = new ArrayList<int>();
foreach (var arrayItem in array) collection.Add(arrayItem);
return collection;
}
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void LinkedContainer(int[] array)
{
LinkedContainer<int> collection = CreateLinkedContainer(array);
SequencePosition position = default;
int arrayIndex = array.Length;
while (collection.TryGet(ref position, out int item))
{
Assert.Equal(array[--arrayIndex], item);
}
}
private static LinkedContainer<int> CreateLinkedContainer(int[] array)
{
var collection = new LinkedContainer<int>();
foreach (var item in array) collection.Add(item); // this adds to front
return collection;
}
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void Hashtable(int[] array)
{
Hashtable<int, string> collection = CreateHashtable(array);
int arrayIndex = 0;
SequencePosition position = default;
while (collection.TryGet(ref position, out KeyValuePair<int, string> item))
{
Assert.Equal(array[arrayIndex++], item.Key);
}
}
private static Hashtable<int, string> CreateHashtable(int[] array)
{
var collection = new Hashtable<int, string>(EqualityComparer<int>.Default);
foreach (var item in array) collection.Add(item, item.ToString());
return collection;
}
}
}
| KrzysztofCwalina/corefxlab | tests/System.Collections.Sequences.Tests/BasicUnitTests.cs | C# | mit | 2,965 |
package de.hilling.maven.release.testprojects.versioninheritor;
import org.junit.Assert;
import org.junit.Test;
public class CalculatorTest {
@Test
public void testAdd() throws Exception {
Assert.assertEquals(3, new Calculator().add(1, 2));
System.out.println("The Calculator Test has run"); // used in a test to assert this has run
}
}
| guhilling/smart-release-plugin | test-projects/parent-as-sibling/core-utils/src/test/java/de/hilling/maven/release/testprojects/versioninheritor/CalculatorTest.java | Java | mit | 369 |
<?php
/**
* This file is part of the Tmdb PHP API created by Michael Roterman.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @package Tmdb
* @author Michael Roterman <michael@wtfz.net>
* @copyright (c) 2013, Michael Roterman
* @version 0.0.1
*/
namespace Tmdb\Model\Collection;
use Tmdb\Model\Common\GenericCollection;
/**
* Class QueryParametersCollection
* @package Tmdb\Model\Collection
*/
class QueryParametersCollection extends GenericCollection
{
}
| yorkulibraries/vufind | web/vendor/php-tmdb/api/lib/Tmdb/Model/Collection/QueryParametersCollection.php | PHP | gpl-2.0 | 555 |
using System;
using System.Collections.Generic;
using System.Text;
namespace Rssdp
{
/// <summary>
/// Event arguments for the <see cref="SsdpDevice.DeviceAdded"/> and <see cref="SsdpDevice.DeviceRemoved"/> events.
/// </summary>
public sealed class DeviceEventArgs : EventArgs
{
#region Fields
private readonly SsdpDevice _Device;
#endregion
#region Constructors
/// <summary>
/// Constructs a new instance for the specified <see cref="SsdpDevice"/>.
/// </summary>
/// <param name="device">The <see cref="SsdpDevice"/> associated with the event this argument class is being used for.</param>
/// <exception cref="System.ArgumentNullException">Thrown if the <paramref name="device"/> argument is null.</exception>
public DeviceEventArgs(SsdpDevice device)
{
if (device == null) throw new ArgumentNullException("device");
_Device = device;
}
#endregion
#region Public Properties
/// <summary>
/// Returns the <see cref="SsdpDevice"/> instance the event being raised for.
/// </summary>
public SsdpDevice Device
{
get { return _Device; }
}
#endregion
}
} | gerrit507/Emby | RSSDP/DeviceEventArgs.cs | C# | gpl-2.0 | 1,125 |
// { dg-do compile { target c++11 } }
#include "../abi/mangle55.C"
| Gurgel100/gcc | gcc/testsuite/g++.dg/analyzer/pr93899.C | C++ | gpl-2.0 | 67 |
<?php
namespace Drupal\KernelTests\Core\Datetime;
use Drupal\KernelTests\KernelTestBase;
use Drupal\language\Entity\ConfigurableLanguage;
/**
* Tests date formatting.
*
* @group Common
* @coversDefaultClass \Drupal\Core\Datetime\DateFormatter
*/
class DateFormatterTest extends KernelTestBase {
/**
* {@inheritdoc}
*/
protected static $modules = ['language', 'system'];
/**
* Arbitrary langcode for a custom language.
*/
const LANGCODE = 'xx';
/**
* {@inheritdoc}
*/
protected function setUp(): void {
parent::setUp();
$this->installConfig(['system']);
$this->setSetting('locale_custom_strings_' . self::LANGCODE, [
'' => ['Sunday' => 'domingo'],
'Long month name' => ['March' => 'marzo'],
]);
$formats = $this->container->get('entity_type.manager')
->getStorage('date_format')
->loadMultiple(['long', 'medium', 'short']);
$formats['long']->setPattern('l, j. F Y - G:i')->save();
$formats['medium']->setPattern('j. F Y - G:i')->save();
$formats['short']->setPattern('Y M j - g:ia')->save();
ConfigurableLanguage::createFromLangcode(static::LANGCODE)->save();
}
/**
* Tests DateFormatter::format().
*
* @covers ::format
*/
public function testFormat() {
/** @var \Drupal\Core\Datetime\DateFormatterInterface $formatter */
$formatter = $this->container->get('date.formatter');
/** @var \Drupal\Core\Language\LanguageManagerInterface $language_manager */
$language_manager = $this->container->get('language_manager');
$timestamp = strtotime('2007-03-26T00:00:00+00:00');
$this->assertSame('Sunday, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T', 'America/Los_Angeles', 'en'), 'Test all parameters.');
$this->assertSame('domingo, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T', 'America/Los_Angeles', self::LANGCODE), 'Test translated format.');
$this->assertSame('l, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', '\\l, d-M-y H:i:s T', 'America/Los_Angeles', self::LANGCODE), 'Test an escaped format string.');
$this->assertSame('\\domingo, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', '\\\\l, d-M-y H:i:s T', 'America/Los_Angeles', self::LANGCODE), 'Test format containing backslash character.');
$this->assertSame('\\l, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', '\\\\\\l, d-M-y H:i:s T', 'America/Los_Angeles', self::LANGCODE), 'Test format containing backslash followed by escaped format string.');
$this->assertSame('Monday, 26-Mar-07 01:00:00 BST', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T', 'Europe/London', 'en'), 'Test a different time zone.');
$this->assertSame('Thu, 01/01/1970 - 00:00', $formatter->format(0, 'custom', '', 'UTC', 'en'), 'Test custom format with empty string.');
// Make sure we didn't change the configuration override language.
$this->assertSame('en', $language_manager->getConfigOverrideLanguage()->getId(), 'Configuration override language not disturbed,');
// Test bad format string will use the fallback format.
$this->assertSame($formatter->format($timestamp, 'fallback'), $formatter->format($timestamp, 'bad_format_string'), 'Test fallback format.');
$this->assertSame('en', $language_manager->getConfigOverrideLanguage()->getId(), 'Configuration override language not disturbed,');
// Change the default language and timezone.
$this->config('system.site')->set('default_langcode', static::LANGCODE)->save();
date_default_timezone_set('America/Los_Angeles');
// Reset the language manager so new negotiations attempts will fall back on
// on the new language.
$language_manager->reset();
$this->assertSame('en', $language_manager->getConfigOverrideLanguage()->getId(), 'Configuration override language not disturbed,');
$this->assertSame('Sunday, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T', 'America/Los_Angeles', 'en'), 'Test a different language.');
$this->assertSame('Monday, 26-Mar-07 01:00:00 BST', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T', 'Europe/London'), 'Test a different time zone.');
$this->assertSame('domingo, 25-Mar-07 17:00:00 PDT', $formatter->format($timestamp, 'custom', 'l, d-M-y H:i:s T'), 'Test custom date format.');
$this->assertSame('domingo, 25. marzo 2007 - 17:00', $formatter->format($timestamp, 'long'), 'Test long date format.');
$this->assertSame('25. marzo 2007 - 17:00', $formatter->format($timestamp, 'medium'), 'Test medium date format.');
$this->assertSame('2007 Mar 25 - 5:00pm', $formatter->format($timestamp, 'short'), 'Test short date format.');
$this->assertSame('25. marzo 2007 - 17:00', $formatter->format($timestamp), 'Test default date format.');
// Test HTML time element formats.
$this->assertSame('2007-03-25T17:00:00-0700', $formatter->format($timestamp, 'html_datetime'), 'Test html_datetime date format.');
$this->assertSame('2007-03-25', $formatter->format($timestamp, 'html_date'), 'Test html_date date format.');
$this->assertSame('17:00:00', $formatter->format($timestamp, 'html_time'), 'Test html_time date format.');
$this->assertSame('03-25', $formatter->format($timestamp, 'html_yearless_date'), 'Test html_yearless_date date format.');
$this->assertSame('2007-W12', $formatter->format($timestamp, 'html_week'), 'Test html_week date format.');
$this->assertSame('2007-03', $formatter->format($timestamp, 'html_month'), 'Test html_month date format.');
$this->assertSame('2007', $formatter->format($timestamp, 'html_year'), 'Test html_year date format.');
// Make sure we didn't change the configuration override language.
$this->assertSame('en', $language_manager->getConfigOverrideLanguage()->getId(), 'Configuration override language not disturbed,');
// Test bad format string will use the fallback format.
$this->assertSame($formatter->format($timestamp, 'fallback'), $formatter->format($timestamp, 'bad_format_string'), 'Test fallback format.');
$this->assertSame('en', $language_manager->getConfigOverrideLanguage()->getId(), 'Configuration override language not disturbed,');
// HTML is not escaped by the date formatter, it must be escaped later.
$this->assertSame("<script>alert('2007');</script>", $formatter->format($timestamp, 'custom', '\<\s\c\r\i\p\t\>\a\l\e\r\t\(\'Y\'\)\;\<\/\s\c\r\i\p\t\>'), 'Script tags not removed from dates.');
$this->assertSame('<em>2007</em>', $formatter->format($timestamp, 'custom', '\<\e\m\>Y\<\/\e\m\>'), 'Em tags are not removed from dates.');
}
}
| tobiasbuhrer/tobiasb | web/core/tests/Drupal/KernelTests/Core/Datetime/DateFormatterTest.php | PHP | gpl-2.0 | 6,727 |
/******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
package org.compiere.model;
import java.math.BigDecimal;
import java.sql.Timestamp;
import org.compiere.util.KeyNamePair;
/** Generated Interface for AD_WF_Responsible
* @author Adempiere (generated)
* @version Release 3.8.0
*/
public interface I_AD_WF_Responsible
{
/** TableName=AD_WF_Responsible */
public static final String Table_Name = "AD_WF_Responsible";
/** AD_Table_ID=646 */
public static final int Table_ID = MTable.getTable_ID(Table_Name);
KeyNamePair Model = new KeyNamePair(Table_ID, Table_Name);
/** AccessLevel = 6 - System - Client
*/
BigDecimal accessLevel = BigDecimal.valueOf(6);
/** Load Meta Data */
/** Column name AD_Client_ID */
public static final String COLUMNNAME_AD_Client_ID = "AD_Client_ID";
/** Get Client.
* Client/Tenant for this installation.
*/
public int getAD_Client_ID();
/** Column name AD_Org_ID */
public static final String COLUMNNAME_AD_Org_ID = "AD_Org_ID";
/** Set Organization.
* Organizational entity within client
*/
public void setAD_Org_ID (int AD_Org_ID);
/** Get Organization.
* Organizational entity within client
*/
public int getAD_Org_ID();
/** Column name AD_Role_ID */
public static final String COLUMNNAME_AD_Role_ID = "AD_Role_ID";
/** Set Role.
* Responsibility Role
*/
public void setAD_Role_ID (int AD_Role_ID);
/** Get Role.
* Responsibility Role
*/
public int getAD_Role_ID();
public org.compiere.model.I_AD_Role getAD_Role() throws RuntimeException;
/** Column name AD_User_ID */
public static final String COLUMNNAME_AD_User_ID = "AD_User_ID";
/** Set User/Contact.
* User within the system - Internal or Business Partner Contact
*/
public void setAD_User_ID (int AD_User_ID);
/** Get User/Contact.
* User within the system - Internal or Business Partner Contact
*/
public int getAD_User_ID();
public org.compiere.model.I_AD_User getAD_User() throws RuntimeException;
/** Column name AD_WF_Responsible_ID */
public static final String COLUMNNAME_AD_WF_Responsible_ID = "AD_WF_Responsible_ID";
/** Set Workflow Responsible.
* Responsible for Workflow Execution
*/
public void setAD_WF_Responsible_ID (int AD_WF_Responsible_ID);
/** Get Workflow Responsible.
* Responsible for Workflow Execution
*/
public int getAD_WF_Responsible_ID();
/** Column name Created */
public static final String COLUMNNAME_Created = "Created";
/** Get Created.
* Date this record was created
*/
public Timestamp getCreated();
/** Column name CreatedBy */
public static final String COLUMNNAME_CreatedBy = "CreatedBy";
/** Get Created By.
* User who created this records
*/
public int getCreatedBy();
/** Column name Description */
public static final String COLUMNNAME_Description = "Description";
/** Set Description.
* Optional short description of the record
*/
public void setDescription (String Description);
/** Get Description.
* Optional short description of the record
*/
public String getDescription();
/** Column name EntityType */
public static final String COLUMNNAME_EntityType = "EntityType";
/** Set Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public void setEntityType (String EntityType);
/** Get Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public String getEntityType();
/** Column name IsActive */
public static final String COLUMNNAME_IsActive = "IsActive";
/** Set Active.
* The record is active in the system
*/
public void setIsActive (boolean IsActive);
/** Get Active.
* The record is active in the system
*/
public boolean isActive();
/** Column name Name */
public static final String COLUMNNAME_Name = "Name";
/** Set Name.
* Alphanumeric identifier of the entity
*/
public void setName (String Name);
/** Get Name.
* Alphanumeric identifier of the entity
*/
public String getName();
/** Column name ResponsibleType */
public static final String COLUMNNAME_ResponsibleType = "ResponsibleType";
/** Set Responsible Type.
* Type of the Responsibility for a workflow
*/
public void setResponsibleType (String ResponsibleType);
/** Get Responsible Type.
* Type of the Responsibility for a workflow
*/
public String getResponsibleType();
/** Column name Updated */
public static final String COLUMNNAME_Updated = "Updated";
/** Get Updated.
* Date this record was updated
*/
public Timestamp getUpdated();
/** Column name UpdatedBy */
public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
/** Get Updated By.
* User who updated this records
*/
public int getUpdatedBy();
}
| armenrz/adempiere | base/src/org/compiere/model/I_AD_WF_Responsible.java | Java | gpl-2.0 | 6,101 |
/*
* Copyright (C) 2008-2018 TrinityCore <https://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Boss_NexusPrince_Shaffar
SD%Complete: 80
SDComment: Need more tuning of spell timers, it should not be as linear fight as current. Also should possibly find a better way to deal with his three initial beacons to make sure all aggro.
SDCategory: Auchindoun, Mana Tombs
EndScriptData */
#include "ScriptMgr.h"
#include "mana_tombs.h"
#include "MotionMaster.h"
#include "ScriptedCreature.h"
enum Yells
{
SAY_INTRO = 0,
SAY_AGGRO = 1,
SAY_SLAY = 2,
SAY_SUMMON = 3,
SAY_DEAD = 4
};
enum Spells
{
SPELL_BLINK = 34605,
SPELL_FROSTBOLT = 32364,
SPELL_FIREBALL = 32363,
SPELL_FROSTNOVA = 32365,
SPELL_ETHEREAL_BEACON = 32371, // Summons NPC_BEACON
SPELL_ETHEREAL_BEACON_VISUAL = 32368,
// Ethereal Beacon
SPELL_ARCANE_BOLT = 15254,
SPELL_ETHEREAL_APPRENTICE = 32372 // Summon 18430
};
enum Creatures
{
NPC_BEACON = 18431,
NPC_SHAFFAR = 18344
};
enum Misc
{
NR_INITIAL_BEACONS = 3
};
enum Events
{
EVENT_BLINK = 1,
EVENT_BEACON,
EVENT_FIREBALL,
EVENT_FROSTBOLT,
EVENT_FROST_NOVA
};
class boss_nexusprince_shaffar : public CreatureScript
{
public:
boss_nexusprince_shaffar() : CreatureScript("boss_nexusprince_shaffar") { }
struct boss_nexusprince_shaffarAI : public BossAI
{
boss_nexusprince_shaffarAI(Creature* creature) : BossAI(creature, DATA_NEXUSPRINCE_SHAFFAR)
{
_hasTaunted = false;
}
void Reset() override
{
_Reset();
float dist = 8.0f;
float posX, posY, posZ, angle;
me->GetHomePosition(posX, posY, posZ, angle);
me->SummonCreature(NPC_BEACON, posX - dist, posY - dist, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
me->SummonCreature(NPC_BEACON, posX - dist, posY + dist, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
me->SummonCreature(NPC_BEACON, posX + dist, posY, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
}
void MoveInLineOfSight(Unit* who) override
{
if (!_hasTaunted && who->GetTypeId() == TYPEID_PLAYER && me->IsWithinDistInMap(who, 100.0f))
{
Talk(SAY_INTRO);
_hasTaunted = true;
}
}
void EnterCombat(Unit* /*who*/) override
{
Talk(SAY_AGGRO);
_EnterCombat();
events.ScheduleEvent(EVENT_BEACON, 10000);
events.ScheduleEvent(EVENT_FIREBALL, 8000);
events.ScheduleEvent(EVENT_FROSTBOLT, 4000);
events.ScheduleEvent(EVENT_FROST_NOVA, 15000);
}
void JustSummoned(Creature* summoned) override
{
if (summoned->GetEntry() == NPC_BEACON)
{
summoned->CastSpell(summoned, SPELL_ETHEREAL_BEACON_VISUAL, false);
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0))
summoned->AI()->AttackStart(target);
}
summons.Summon(summoned);
}
void KilledUnit(Unit* victim) override
{
if (victim->GetTypeId() == TYPEID_PLAYER)
Talk(SAY_SLAY);
}
void JustDied(Unit* /*killer*/) override
{
Talk(SAY_DEAD);
_JustDied();
}
void ExecuteEvent(uint32 eventId) override
{
switch (eventId)
{
case EVENT_BLINK:
if (me->IsNonMeleeSpellCast(false))
me->InterruptNonMeleeSpells(true);
// expire movement, will prevent from running right back to victim after cast
// (but should MoveChase be used again at a certain time or should he not move?)
if (me->GetMotionMaster()->GetCurrentMovementGeneratorType() == CHASE_MOTION_TYPE)
me->GetMotionMaster()->MovementExpired();
DoCast(me, SPELL_BLINK);
break;
case EVENT_BEACON:
if (!urand(0, 3))
Talk(SAY_SUMMON);
DoCast(me, SPELL_ETHEREAL_BEACON, true);
events.ScheduleEvent(EVENT_BEACON, 10000);
break;
case EVENT_FIREBALL:
DoCastVictim(SPELL_FROSTBOLT);
events.ScheduleEvent(EVENT_FIREBALL, urand(4500, 6000));
break;
case EVENT_FROSTBOLT:
DoCastVictim(SPELL_FROSTBOLT);
events.ScheduleEvent(EVENT_FROSTBOLT, urand(4500, 6000));
break;
case EVENT_FROST_NOVA:
DoCast(me, SPELL_FROSTNOVA);
events.ScheduleEvent(EVENT_FROST_NOVA, urand(17500, 25000));
events.ScheduleEvent(EVENT_BLINK, 1500);
break;
default:
break;
}
}
private:
bool _hasTaunted;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<boss_nexusprince_shaffarAI>(creature);
}
};
enum EtherealBeacon
{
EVENT_APPRENTICE = 1,
EVENT_ARCANE_BOLT
};
class npc_ethereal_beacon : public CreatureScript
{
public:
npc_ethereal_beacon() : CreatureScript("npc_ethereal_beacon") { }
struct npc_ethereal_beaconAI : public ScriptedAI
{
npc_ethereal_beaconAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override
{
_events.Reset();
}
void EnterCombat(Unit* who) override
{
if (Creature* shaffar = me->FindNearestCreature(NPC_SHAFFAR, 100.0f))
if (!shaffar->IsInCombat())
shaffar->AI()->AttackStart(who);
_events.ScheduleEvent(EVENT_APPRENTICE, DUNGEON_MODE(20000, 10000));
_events.ScheduleEvent(EVENT_ARCANE_BOLT, 1000);
}
void JustSummoned(Creature* summoned) override
{
summoned->AI()->AttackStart(me->GetVictim());
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_APPRENTICE:
DoCast(me, SPELL_ETHEREAL_APPRENTICE, true);
me->DespawnOrUnsummon();
break;
case EVENT_ARCANE_BOLT:
DoCastVictim(SPELL_ARCANE_BOLT);
_events.ScheduleEvent(EVENT_ARCANE_BOLT, urand(2000, 4500));
break;
default:
break;
}
}
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_ethereal_beaconAI>(creature);
}
};
enum EtherealApprentice
{
SPELL_ETHEREAL_APPRENTICE_FIREBOLT = 32369,
SPELL_ETHEREAL_APPRENTICE_FROSTBOLT = 32370,
EVENT_ETHEREAL_APPRENTICE_FIREBOLT = 1,
EVENT_ETHEREAL_APPRENTICE_FROSTBOLT
};
class npc_ethereal_apprentice : public CreatureScript
{
public:
npc_ethereal_apprentice() : CreatureScript("npc_ethereal_apprentice") { }
struct npc_ethereal_apprenticeAI : public ScriptedAI
{
npc_ethereal_apprenticeAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override
{
_events.Reset();
}
void EnterCombat(Unit* /*who*/) override
{
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FIREBOLT, 3000);
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_ETHEREAL_APPRENTICE_FIREBOLT:
DoCastVictim(SPELL_ETHEREAL_APPRENTICE_FIREBOLT, true);
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FROSTBOLT, 3000);
break;
case EVENT_ETHEREAL_APPRENTICE_FROSTBOLT:
DoCastVictim(SPELL_ETHEREAL_APPRENTICE_FROSTBOLT, true);
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FIREBOLT, 3000);
break;
default:
break;
}
}
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_ethereal_apprenticeAI>(creature);
}
};
enum Yor
{
SPELL_DOUBLE_BREATH = 38361,
EVENT_DOUBLE_BREATH = 1
};
class npc_yor : public CreatureScript
{
public:
npc_yor() : CreatureScript("npc_yor") { }
struct npc_yorAI : public ScriptedAI
{
npc_yorAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override { }
void EnterCombat(Unit* /*who*/) override
{
_events.ScheduleEvent(EVENT_DOUBLE_BREATH, urand(6000,9000));
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_DOUBLE_BREATH:
if (me->IsWithinDist(me->GetVictim(), ATTACK_DISTANCE))
DoCastVictim(SPELL_DOUBLE_BREATH);
_events.ScheduleEvent(EVENT_DOUBLE_BREATH, urand(6000,9000));
break;
default:
break;
}
}
DoMeleeAttackIfReady();
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_yorAI>(creature);
}
};
void AddSC_boss_nexusprince_shaffar()
{
new boss_nexusprince_shaffar();
new npc_ethereal_beacon();
new npc_ethereal_apprentice();
new npc_yor();
}
| Golrag/TrinityCore | src/server/scripts/Outland/Auchindoun/ManaTombs/boss_nexusprince_shaffar.cpp | C++ | gpl-2.0 | 12,484 |
package com.codename1.ui.layouts.mig;
import com.codename1.ui.Display;
import java.util.ArrayList;
import java.util.HashMap;
/*
* License (BSD):
* ==============
*
* Copyright (c) 2004, Mikael Grev, MiG InfoCom AB. (miglayout (at) miginfocom (dot) com)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
* Neither the name of the MiG InfoCom AB nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* @version 1.0
* @author Mikael Grev, MiG InfoCom AB
* Date: 2006-sep-08
*/
/**
*/
public final class LinkHandler
{
public static final int X = 0;
public static final int Y = 1;
public static final int WIDTH = 2;
public static final int HEIGHT = 3;
public static final int X2 = 4;
public static final int Y2 = 5;
private static final ArrayList<Object> LAYOUTS = new ArrayList<Object>(4);
private static final ArrayList<HashMap<String, int[]>> VALUES = new ArrayList<HashMap<String, int[]>>(4);
private static final ArrayList<HashMap<String, int[]>> VALUES_TEMP = new ArrayList<HashMap<String, int[]>>(4);
private LinkHandler()
{
}
public synchronized static Integer getValue(Object layout, String key, int type)
{
Integer ret = null;
boolean cont = true;
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (ret == null && l == layout) {
int[] rect = VALUES_TEMP.get(i).get(key);
if (cont && rect != null && rect[type] != LayoutUtil.NOT_SET) {
ret = new Integer(rect[type]);
} else {
rect = VALUES.get(i).get(key);
ret = (rect != null && rect[type] != LayoutUtil.NOT_SET) ? new Integer(rect[type]) : null;
}
cont = false;
}
if (l == null) {
LAYOUTS.remove(i);
VALUES.remove(i);
VALUES_TEMP.remove(i);
}
}
return ret;
}
/** Sets a key that can be linked to from any component.
* @param layout The MigLayout instance
* @param key The key to link to. This is the same as the ID in a component constraint.
* @param x x
* @param y y
* @param width Width
* @param height Height
* @return If the value was changed
*/
public synchronized static boolean setBounds(Object layout, String key, int x, int y, int width, int height)
{
return setBounds(layout, key, x, y, width, height, false, false);
}
synchronized static boolean setBounds(Object layout, String key, int x, int y, int width, int height, boolean temporary, boolean incCur)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout) {
HashMap<String, int[]> map = (temporary ? VALUES_TEMP : VALUES).get(i);
int[] old = map.get(key);
if (old == null || old[X] != x || old[Y] != y || old[WIDTH] != width || old[HEIGHT] != height) {
if (old == null || incCur == false) {
map.put(key, new int[] {x, y, width, height, x + width, y + height});
return true;
} else {
boolean changed = false;
if (x != LayoutUtil.NOT_SET) {
if (old[X] == LayoutUtil.NOT_SET || x < old[X]) {
old[X] = x;
old[WIDTH] = old[X2] - x;
changed = true;
}
if (width != LayoutUtil.NOT_SET) {
int x2 = x + width;
if (old[X2] == LayoutUtil.NOT_SET || x2 > old[X2]) {
old[X2] = x2;
old[WIDTH] = x2 - old[X];
changed = true;
}
}
}
if (y != LayoutUtil.NOT_SET) {
if (old[Y] == LayoutUtil.NOT_SET || y < old[Y]) {
old[Y] = y;
old[HEIGHT] = old[Y2] - y;
changed = true;
}
if (height != LayoutUtil.NOT_SET) {
int y2 = y + height;
if (old[Y2] == LayoutUtil.NOT_SET || y2 > old[Y2]) {
old[Y2] = y2;
old[HEIGHT] = y2 - old[Y];
changed = true;
}
}
}
return changed;
}
}
return false;
}
}
LAYOUTS.add(Display.getInstance().createSoftWeakRef(layout));
int[] bounds = new int[] {x, y, width, height, x + width, y + height};
HashMap<String, int[]> values = new HashMap<String, int[]>(4);
if (temporary)
values.put(key, bounds);
VALUES_TEMP.add(values);
values = new HashMap<String, int[]>(4);
if (temporary == false)
values.put(key, bounds);
VALUES.add(values);
return true;
}
/** This method clear any weak references right away instead of waiting for the GC. This might be advantageous
* if lots of layout are created and disposed of quickly to keep memory consumption down.
* @since 3.7.4
*/
public synchronized static void clearWeakReferencesNow()
{
LAYOUTS.clear();
}
public synchronized static boolean clearBounds(Object layout, String key)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout)
return VALUES.get(i).remove(key) != null;
}
return false;
}
synchronized static void clearTemporaryBounds(Object layout)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout) {
VALUES_TEMP.get(i).clear();
return;
}
}
}
}
| JrmyDev/CodenameOne | CodenameOne/src/com/codename1/ui/layouts/mig/LinkHandler.java | Java | gpl-2.0 | 6,705 |
require 'test/unit'
require 'soap/rpc/driver'
require 'soap/rpc/standaloneServer'
require 'soap/header/simplehandler'
module SOAP
module Header
class TestAuthHeader < Test::Unit::TestCase
Port = 17171
PortName = 'http://tempuri.org/authHeaderPort'
MyHeaderName = XSD::QName.new("http://tempuri.org/authHeader", "auth")
DummyHeaderName = XSD::QName.new("http://tempuri.org/authHeader", "dummy")
class AuthHeaderPortServer < SOAP::RPC::StandaloneServer
class AuthHeaderService
def self.create
new
end
def deposit(amt)
"deposit #{amt} OK"
end
def withdrawal(amt)
"withdrawal #{amt} OK"
end
end
def initialize(*arg)
super
add_rpc_servant(AuthHeaderService.new, PortName)
ServerAuthHeaderHandler.init
add_request_headerhandler(ServerAuthHeaderHandler)
end
class ServerAuthHeaderHandler < SOAP::Header::SimpleHandler
class << self
def create
new
end
def init
@users = {
'NaHi' => 'passwd',
'HiNa' => 'wspass'
}
@sessions = {}
end
def login(userid, passwd)
userid and passwd and @users[userid] == passwd
end
def auth(sessionid)
@sessions[sessionid][0]
end
def create_session(userid)
while true
key = create_sessionkey
break unless @sessions[key]
end
@sessions[key] = [userid]
key
end
def destroy_session(sessionkey)
@sessions.delete(sessionkey)
end
def sessions
@sessions
end
private
def create_sessionkey
Time.now.usec.to_s
end
end
def initialize
super(MyHeaderName)
@userid = @sessionid = nil
end
def on_simple_outbound
{ "sessionid" => @sessionid }
end
def on_simple_inbound(my_header, mu)
auth = false
userid = my_header["userid"]
passwd = my_header["passwd"]
if self.class.login(userid, passwd)
auth = true
elsif sessionid = my_header["sessionid"]
if userid = self.class.auth(sessionid)
self.class.destroy_session(sessionid)
auth = true
end
end
raise RuntimeError.new("authentication failed") unless auth
@userid = userid
@sessionid = self.class.create_session(userid)
end
end
end
class ClientAuthHeaderHandler < SOAP::Header::SimpleHandler
def initialize(userid, passwd, mustunderstand)
super(MyHeaderName)
@sessionid = nil
@userid = userid
@passwd = passwd
@mustunderstand = mustunderstand
end
def on_simple_outbound
if @sessionid
{ "sessionid" => @sessionid }
else
{ "userid" => @userid, "passwd" => @passwd }
end
end
def on_simple_inbound(my_header, mustunderstand)
@sessionid = my_header["sessionid"]
end
def sessionid
@sessionid
end
end
class DummyHeaderHandler < SOAP::Header::SimpleHandler
def initialize(mustunderstand)
super(DummyHeaderName)
@mustunderstand = mustunderstand
end
def on_simple_outbound
{ XSD::QName.new("foo", "bar") => nil }
end
def on_simple_inbound(my_header, mustunderstand)
end
end
def setup
@endpoint = "http://localhost:#{Port}/"
setup_server
setup_client
end
def setup_server
@server = AuthHeaderPortServer.new(self.class.name, nil, '0.0.0.0', Port)
@server.level = Logger::Severity::ERROR
@t = Thread.new {
@server.start
}
end
def setup_client
@client = SOAP::RPC::Driver.new(@endpoint, PortName)
@client.wiredump_dev = STDERR if $DEBUG
@client.add_method('deposit', 'amt')
@client.add_method('withdrawal', 'amt')
end
def teardown
teardown_server
teardown_client
end
def teardown_server
@server.shutdown
@t.kill
@t.join
end
def teardown_client
@client.reset_stream
end
def test_success_no_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', false)
@client.headerhandler << h
do_transaction_check(h)
end
def test_success_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
do_transaction_check(h)
end
def test_no_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
@client.headerhandler << DummyHeaderHandler.new(false)
do_transaction_check(h)
end
def test_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
@client.headerhandler << (h2 = DummyHeaderHandler.new(true))
assert_raise(SOAP::UnhandledMustUnderstandHeaderError) do
assert_equal("deposit 150 OK", @client.deposit(150))
end
@client.headerhandler.delete(h2)
@client.headerhandler << (h2 = DummyHeaderHandler.new(false))
do_transaction_check(h)
end
def do_transaction_check(h)
assert_equal("deposit 150 OK", @client.deposit(150))
serversess = AuthHeaderPortServer::ServerAuthHeaderHandler.sessions[h.sessionid]
assert_equal("NaHi", serversess[0])
assert_equal("withdrawal 120 OK", @client.withdrawal(120))
serversess = AuthHeaderPortServer::ServerAuthHeaderHandler.sessions[h.sessionid]
assert_equal("NaHi", serversess[0])
end
def test_authfailure
h = ClientAuthHeaderHandler.new('NaHi', 'pa', false)
@client.headerhandler << h
assert_raises(RuntimeError) do
@client.deposit(150)
end
end
end
end
end
| jacques/connector | vendor/gems/soap4r-1.5.5.20061022/test/soap/header/test_authheader.rb | Ruby | gpl-2.0 | 5,284 |
#######################################################################
#
# Author: Malte Helmert (helmert@informatik.uni-freiburg.de)
# (C) Copyright 2003-2004 Malte Helmert
#
# This file is part of LAMA.
#
# LAMA is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the license, or (at your option) any later version.
#
# LAMA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import cStringIO
import textwrap
__all__ = ["print_nested_list"]
def tokenize_list(obj):
if isinstance(obj, list):
yield "("
for item in obj:
for elem in tokenize_list(item):
yield elem
yield ")"
else:
yield obj
def wrap_lines(lines):
for line in lines:
indent = " " * (len(line) - len(line.lstrip()) + 4)
line = line.replace("-", "_") # textwrap breaks on "-", but not "_"
line = textwrap.fill(line, subsequent_indent=indent, break_long_words=False)
yield line.replace("_", "-")
def print_nested_list(nested_list):
stream = cStringIO.StringIO()
indent = 0
startofline = True
pendingspace = False
for token in tokenize_list(nested_list):
if token == "(":
if not startofline:
stream.write("\n")
stream.write("%s(" % (" " * indent))
indent += 2
startofline = False
pendingspace = False
elif token == ")":
indent -= 2
stream.write(")")
startofline = False
pendingspace = False
else:
if startofline:
stream.write(" " * indent)
if pendingspace:
stream.write(" ")
stream.write(token)
startofline = False
pendingspace = True
for line in wrap_lines(stream.getvalue().splitlines()):
print line
| PlanTool/plantool | wrappingPlanners/Deterministic/LAMA/seq-sat-lama/lama/translate/pddl/pretty_print.py | Python | gpl-2.0 | 2,178 |
<?php
namespace Drupal\KernelTests\Core\Extension;
use Drupal\KernelTests\KernelTestBase;
/**
* @coversDefaultClass \Drupal\Core\Extension\ThemeExtensionList
* @group Extension
*/
class ThemeExtensionListTest extends KernelTestBase {
/**
* @covers ::getList
*/
public function testGetlist() {
\Drupal::configFactory()->getEditable('core.extension')
->set('module.testing', 1000)
->set('theme.test_theme', 0)
->save();
// The installation profile is provided by a container parameter.
// Saving the configuration doesn't automatically trigger invalidation
$this->container->get('kernel')->rebuildContainer();
/** @var \Drupal\Core\Extension\ThemeExtensionList $theme_extension_list */
$theme_extension_list = \Drupal::service('extension.list.theme');
$extensions = $theme_extension_list->getList();
$this->assertArrayHasKey('test_theme', $extensions);
}
/**
* Tests that themes have an empty default version set.
*/
public function testThemeWithoutVersion() {
$theme = \Drupal::service('extension.list.theme')->get('test_theme_settings_features');
$this->assertNull($theme->info['version']);
}
}
| tobiasbuhrer/tobiasb | web/core/tests/Drupal/KernelTests/Core/Extension/ThemeExtensionListTest.php | PHP | gpl-2.0 | 1,189 |
<?php
/**
* @package com_zoo Component
* @file category.php
* @version 2.4.9 May 2011
* @author YOOtheme http://www.yootheme.com
* @copyright Copyright (C) 2007 - 2011 YOOtheme GmbH
* @license http://www.gnu.org/licenses/gpl-2.0.html GNU/GPLv2 only
*/
/*
Class: Category
Category related attributes and functions.
*/
class Category {
/*
Variable: id
Primary key.
*/
public $id;
/*
Variable: application_id
Related application id.
*/
public $application_id;
/*
Variable: name
Category name.
*/
public $name;
/*
Variable: alias
Category alias.
*/
public $alias;
/*
Variable: description
Category description.
*/
public $description;
/*
Variable: parent
Categories parent id.
*/
public $parent;
/*
Variable: ordering
Categories ordering.
*/
public $ordering;
/*
Variable: published
Category published state.
*/
public $published;
/*
Variable: params
Category params.
*/
public $params;
/*
Variable: item_ids
Related category item ids.
*/
public $item_ids;
/*
Variable: app
App instance.
*/
public $app;
/*
Variable: _parent
Related category parent object.
*/
protected $_parent;
/*
Variable: _children
Related category children objects.
*/
protected $_children = array();
/*
Variable: _items
Related category item objects.
*/
protected $_items = array();
/*
Variable: _item_count
Related category item count.
*/
public $_item_count;
/*
Variable: _total_item_count
Item count including subcategories.
*/
protected $_total_item_count = null;
public function __construct() {
// init vars
$app = App::getInstance('zoo');
// decorate data as object
$this->params = $app->parameter->create($this->params);
// set related item ids
$this->item_ids = isset($this->item_ids) ? explode(',', $this->item_ids) : array();
if (!empty($this->item_ids)) {
$this->item_ids = array_combine($this->item_ids, $this->item_ids);
}
}
/*
Function: getApplication
Get related application object.
Returns:
Application - application object
*/
public function getApplication() {
return $this->app->table->application->get($this->application_id);
}
/*
Function: hasChildren
Does this category have children.
Returns:
Bool
*/
public function hasChildren() {
return !empty($this->_children);
}
/*
Function: getChildren
Method to get category's children.
Parameters:
recursive - Recursivly retrieve childrens children.
Returns:
id - children
*/
public function getChildren($recursive = false) {
if ($recursive) {
$children = array();
foreach ($this->_children as $child) {
$children[$child->id] = $child;
$children += $child->getChildren(true);
}
return $children;
}
return $this->_children;
}
/*
Function: setChildren
Set children.
Returns:
Category
*/
public function setChildren($val) {
$this->_children = $val;
return $this;
}
/*
Function: addChildren
Add children.
Returns:
Category
*/
public function addChild($category) {
$this->_children[] = $category;
return $this;
}
/*
Function: removeChild
Remove a child.
Returns:
Category
*/
public function removeChild($child) {
unset($this->_children[$child->id]);
return $this;
}
/*
Function: getParent
Method to get category's parent.
Returns:
id - parent
*/
public function getParent() {
return $this->_parent;
}
/*
Function: setParent
Set parent.
Returns:
Category
*/
public function setParent($val) {
$this->_parent = $val;
return $this;
}
/*
Function: getPathway
Method to get category's pathway.
Returns:
Array - Array of parent categories
*/
public function getPathway() {
if ($this->_parent == null) {
return array();
}
$pathway = $this->_parent->getPathway();
$pathway[$this->id] = $this;
return $pathway;
}
/*
Function: isPublished
Get published state.
Returns:
-
*/
public function isPublished() {
return $this->published;
}
/*
Function: setPublished
Set category published state and fire event.
Parameters:
$val - State
$save - Autosave category before fire event
Returns:
Category
*/
public function setPublished($val, $save = false) {
if ($this->published != $val) {
// set state
$old_state = $this->state;
$this->published = $val;
// autosave category ?
if ($save) {
$this->app->table->category->save($this);
}
// fire event
$this->app->event->dispatcher->notify($this->app->event->create($this, 'category:stateChanged', compact('old_state')));
}
return $this;
}
/*
Function: getPath
Method to get the path to this category.
Returns:
Array - Category path
*/
public function getPath($path = array()) {
$path[] = $this->id;
if ($this->_parent != null) {
$path = $this->_parent->getPath($path);
}
return $path;
}
/*
Function: getItems
Method to get category's items.
Returns:
Array
*/
public function getItems($published = false, $user = null, $orderby = '') {
if (empty($this->_items)) {
$this->_items = $this->app->table->item->getFromCategory($this->application_id, $this->id, $published, $user, $orderby);
}
return $this->_items;
}
/*
Function: itemCount
Method to count category's items.
Returns:
Int - Number of items
*/
public function itemCount() {
if (!isset($this->_item_count)) {
$this->_item_count = count($this->item_ids);
}
return $this->_item_count;
}
/*
Function: total_item_count
Method to count category's published items including all childrens items.
Returns:
Int - Number of items
*/
public function totalItemCount() {
if (!isset($this->_total_item_count)) {
$this->_total_item_count = count($this->getItemIds(true));
}
return $this->_total_item_count;
}
/*
Function: getItemIds
Method to get related item ids.
Returns:
Array - Related item ids
*/
public function getItemIds($recursive = false) {
$item_ids = $this->item_ids;
if ($recursive) {
foreach($this->getChildren(true) as $child) {
$item_ids += $child->item_ids;
}
}
return $item_ids;
}
/*
Function: childrenHaveItems
Method to check if children have items.
Returns:
Bool
*/
public function childrenHaveItems() {
foreach ($this->getChildren(true) as $child) {
if ($child->itemCount()) {
return true;
}
}
return false;
}
/*
Function: getParams
Gets category params.
Parameters:
$for - Get params for a specific use, including overidden values.
Returns:
Object - AppParameter
*/
public function getParams($for = null) {
// get site params and inherit globals
if ($for == 'site') {
return $this->app->parameter->create()
->set('config.', $this->getApplication()->getParams()->get('global.config.'))
->set('template.', $this->getApplication()->getParams()->get('global.template.'))
->loadArray($this->params->getData());
}
return $this->params;
}
/*
Function: getImage
Get image resource info.
Parameters:
$name - the param name of the image
Returns:
Array - Image info
*/
public function getImage($name) {
$params = $this->getParams();
if ($image = $params->get($name)) {
return $this->app->html->_('zoo.image', $image, $params->get($name . '_width'), $params->get($name . '_height'));
}
return null;
}
/*
Function: getImage
Executes Content Plugins on text.
Parameters:
$text - the text
Returns:
text - string
*/
public function getText($text) {
return $this->app->zoo->triggerContentPlugins($text);
}
}
/*
Class: CategoryException
*/
class CategoryException extends AppException {} | berkeley-amsa/amsa | tmp/install_4e730b9d77cb3/admin/classes/category.php | PHP | gpl-2.0 | 8,168 |
/*
* Copyright (C) 2016 Team Kodi
* http://kodi.tv
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this Program; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "GUIDialogNewJoystick.h"
#include "ServiceBroker.h"
#include "guilib/GUIWindowManager.h"
#include "guilib/WindowIDs.h"
#include "messaging/helpers/DialogHelper.h"
#include "settings/Settings.h"
using namespace JOYSTICK;
CGUIDialogNewJoystick::CGUIDialogNewJoystick() :
CThread("NewJoystickDlg")
{
}
void CGUIDialogNewJoystick::ShowAsync()
{
bool bShow = true;
if (IsRunning())
bShow = false;
else if (!CServiceBroker::GetSettings().GetBool(CSettings::SETTING_INPUT_ASKNEWCONTROLLERS))
bShow = false;
else if (g_windowManager.IsWindowActive(WINDOW_DIALOG_GAME_CONTROLLERS, false))
bShow = false;
if (bShow)
Create();
}
void CGUIDialogNewJoystick::Process()
{
using namespace KODI::MESSAGING::HELPERS;
// "New controller detected"
// "A new controller has been detected. Configuration can be done at any time in "Settings -> System Settings -> Input". Would you like to configure it now?"
if (ShowYesNoDialogText(CVariant{ 35011 }, CVariant{ 35012 }) == DialogResponse::YES)
{
g_windowManager.ActivateWindow(WINDOW_DIALOG_GAME_CONTROLLERS);
}
else
{
CServiceBroker::GetSettings().SetBool(CSettings::SETTING_INPUT_ASKNEWCONTROLLERS, false);
}
}
| hackthis02/xbmc | xbmc/input/joysticks/dialogs/GUIDialogNewJoystick.cpp | C++ | gpl-2.0 | 1,952 |
#!/usr/bin/env python
""" turtle-example-suite:
xtx_lindenmayer_indian.py
Each morning women in Tamil Nadu, in southern
India, place designs, created by using rice
flour and known as kolam on the thresholds of
their homes.
These can be described by Lindenmayer systems,
which can easily be implemented with turtle
graphics and Python.
Two examples are shown here:
(1) the snake kolam
(2) anklets of Krishna
Taken from Marcia Ascher: Mathematics
Elsewhere, An Exploration of Ideas Across
Cultures
"""
################################
# Mini Lindenmayer tool
###############################
from turtle import *
def replace( seq, replacementRules, n ):
for i in range(n):
newseq = ""
for element in seq:
newseq = newseq + replacementRules.get(element,element)
seq = newseq
return seq
def draw( commands, rules ):
for b in commands:
try:
rules[b]()
except TypeError:
try:
draw(rules[b], rules)
except:
pass
def main():
################################
# Example 1: Snake kolam
################################
def r():
right(45)
def l():
left(45)
def f():
forward(7.5)
snake_rules = {"-":r, "+":l, "f":f, "b":"f+f+f--f--f+f+f"}
snake_replacementRules = {"b": "b+f+b--f--b+f+b"}
snake_start = "b--f--b--f"
drawing = replace(snake_start, snake_replacementRules, 3)
reset()
speed(3)
tracer(1,0)
ht()
up()
backward(195)
down()
draw(drawing, snake_rules)
from time import sleep
sleep(3)
################################
# Example 2: Anklets of Krishna
################################
def A():
color("red")
circle(10,90)
def B():
from math import sqrt
color("black")
l = 5/sqrt(2)
forward(l)
circle(l, 270)
forward(l)
def F():
color("green")
forward(10)
krishna_rules = {"a":A, "b":B, "f":F}
krishna_replacementRules = {"a" : "afbfa", "b" : "afbfbfbfa" }
krishna_start = "fbfbfbfb"
reset()
speed(0)
tracer(3,0)
ht()
left(45)
drawing = replace(krishna_start, krishna_replacementRules, 3)
draw(drawing, krishna_rules)
tracer(1)
return "Done!"
if __name__=='__main__':
msg = main()
print msg
mainloop()
| teeple/pns_server | work/install/Python-2.7.4/Demo/turtle/tdemo_lindenmayer_indian.py | Python | gpl-2.0 | 2,432 |
/*
* Copyright (C) 2005-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#include "platform/Filesystem.h"
#include "platform/win32/CharsetConverter.h"
#include <Windows.h>
namespace win = KODI::PLATFORM::WINDOWS;
namespace KODI
{
namespace PLATFORM
{
namespace FILESYSTEM
{
space_info space(const std::string& path, std::error_code& ec)
{
ec.clear();
space_info sp;
auto pathW = win::ToW(path);
ULARGE_INTEGER capacity;
ULARGE_INTEGER available;
ULARGE_INTEGER free;
auto result = GetDiskFreeSpaceExW(pathW.c_str(), &available, &capacity, &free);
if (result == FALSE)
{
ec.assign(GetLastError(), std::system_category());
sp.available = static_cast<uintmax_t>(-1);
sp.capacity = static_cast<uintmax_t>(-1);
sp.free = static_cast<uintmax_t>(-1);
return sp;
}
sp.available = static_cast<uintmax_t>(available.QuadPart);
sp.capacity = static_cast<uintmax_t>(capacity.QuadPart);
sp.free = static_cast<uintmax_t>(free.QuadPart);
return sp;
}
std::string temp_directory_path(std::error_code &ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
if (!GetTempPathW(MAX_PATH, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
ec.clear();
return win::FromW(lpTempPathBuffer);
}
std::string create_temp_directory(std::error_code &ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
std::wstring xbmcTempPath = win::ToW(temp_directory_path(ec));
if (ec)
return std::string();
if (!GetTempFileNameW(xbmcTempPath.c_str(), L"xbm", 0, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
DeleteFileW(lpTempPathBuffer);
if (!CreateDirectoryW(lpTempPathBuffer, nullptr))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
ec.clear();
return win::FromW(lpTempPathBuffer);
}
std::string temp_file_path(const std::string&, std::error_code& ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
std::wstring xbmcTempPath = win::ToW(create_temp_directory(ec));
if (ec)
return std::string();
if (!GetTempFileNameW(xbmcTempPath.c_str(), L"xbm", 0, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
DeleteFileW(lpTempPathBuffer);
ec.clear();
return win::FromW(lpTempPathBuffer);
}
}
}
}
| asavah/xbmc | xbmc/platform/win32/Filesystem.cpp | C++ | gpl-2.0 | 2,486 |
// Copyright 2008 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include "VideoCommon/Fifo.h"
#include <atomic>
#include <cstring>
#include "Common/Assert.h"
#include "Common/Atomic.h"
#include "Common/BlockingLoop.h"
#include "Common/ChunkFile.h"
#include "Common/Event.h"
#include "Common/FPURoundMode.h"
#include "Common/MemoryUtil.h"
#include "Common/MsgHandler.h"
#include "Core/ConfigManager.h"
#include "Core/CoreTiming.h"
#include "Core/HW/Memmap.h"
#include "Core/Host.h"
#include "VideoCommon/AsyncRequests.h"
#include "VideoCommon/CPMemory.h"
#include "VideoCommon/CommandProcessor.h"
#include "VideoCommon/DataReader.h"
#include "VideoCommon/OpcodeDecoding.h"
#include "VideoCommon/VertexLoaderManager.h"
#include "VideoCommon/VertexManagerBase.h"
#include "VideoCommon/VideoBackendBase.h"
namespace Fifo
{
static constexpr u32 FIFO_SIZE = 2 * 1024 * 1024;
static constexpr int GPU_TIME_SLOT_SIZE = 1000;
static Common::BlockingLoop s_gpu_mainloop;
static Common::Flag s_emu_running_state;
// Most of this array is unlikely to be faulted in...
static u8 s_fifo_aux_data[FIFO_SIZE];
static u8* s_fifo_aux_write_ptr;
static u8* s_fifo_aux_read_ptr;
// This could be in SConfig, but it depends on multiple settings
// and can change at runtime.
static bool s_use_deterministic_gpu_thread;
static CoreTiming::EventType* s_event_sync_gpu;
// STATE_TO_SAVE
static u8* s_video_buffer;
static u8* s_video_buffer_read_ptr;
static std::atomic<u8*> s_video_buffer_write_ptr;
static std::atomic<u8*> s_video_buffer_seen_ptr;
static u8* s_video_buffer_pp_read_ptr;
// The read_ptr is always owned by the GPU thread. In normal mode, so is the
// write_ptr, despite it being atomic. In deterministic GPU thread mode,
// things get a bit more complicated:
// - The seen_ptr is written by the GPU thread, and points to what it's already
// processed as much of as possible - in the case of a partial command which
// caused it to stop, not the same as the read ptr. It's written by the GPU,
// under the lock, and updating the cond.
// - The write_ptr is written by the CPU thread after it copies data from the
// FIFO. Maybe someday it will be under the lock. For now, because RunGpuLoop
// polls, it's just atomic.
// - The pp_read_ptr is the CPU preprocessing version of the read_ptr.
static std::atomic<int> s_sync_ticks;
static bool s_syncing_suspended;
static Common::Event s_sync_wakeup_event;
void DoState(PointerWrap& p)
{
p.DoArray(s_video_buffer, FIFO_SIZE);
u8* write_ptr = s_video_buffer_write_ptr;
p.DoPointer(write_ptr, s_video_buffer);
s_video_buffer_write_ptr = write_ptr;
p.DoPointer(s_video_buffer_read_ptr, s_video_buffer);
if (p.mode == PointerWrap::MODE_READ && s_use_deterministic_gpu_thread)
{
// We're good and paused, right?
s_video_buffer_seen_ptr = s_video_buffer_pp_read_ptr = s_video_buffer_read_ptr;
}
p.Do(s_sync_ticks);
p.Do(s_syncing_suspended);
}
void PauseAndLock(bool doLock, bool unpauseOnUnlock)
{
if (doLock)
{
SyncGPU(SyncGPUReason::Other);
EmulatorState(false);
const SConfig& param = SConfig::GetInstance();
if (!param.bCPUThread || s_use_deterministic_gpu_thread)
return;
s_gpu_mainloop.WaitYield(std::chrono::milliseconds(100), Host_YieldToUI);
}
else
{
if (unpauseOnUnlock)
EmulatorState(true);
}
}
void Init()
{
// Padded so that SIMD overreads in the vertex loader are safe
s_video_buffer = static_cast<u8*>(Common::AllocateMemoryPages(FIFO_SIZE + 4));
ResetVideoBuffer();
if (SConfig::GetInstance().bCPUThread)
s_gpu_mainloop.Prepare();
s_sync_ticks.store(0);
}
void Shutdown()
{
if (s_gpu_mainloop.IsRunning())
PanicAlert("Fifo shutting down while active");
Common::FreeMemoryPages(s_video_buffer, FIFO_SIZE + 4);
s_video_buffer = nullptr;
s_video_buffer_write_ptr = nullptr;
s_video_buffer_pp_read_ptr = nullptr;
s_video_buffer_read_ptr = nullptr;
s_video_buffer_seen_ptr = nullptr;
s_fifo_aux_write_ptr = nullptr;
s_fifo_aux_read_ptr = nullptr;
}
// May be executed from any thread, even the graphics thread.
// Created to allow for self shutdown.
void ExitGpuLoop()
{
// This should break the wait loop in CPU thread
CommandProcessor::fifo.bFF_GPReadEnable = false;
FlushGpu();
// Terminate GPU thread loop
s_emu_running_state.Set();
s_gpu_mainloop.Stop(s_gpu_mainloop.kNonBlock);
}
void EmulatorState(bool running)
{
s_emu_running_state.Set(running);
if (running)
s_gpu_mainloop.Wakeup();
else
s_gpu_mainloop.AllowSleep();
}
void SyncGPU(SyncGPUReason reason, bool may_move_read_ptr)
{
if (s_use_deterministic_gpu_thread)
{
s_gpu_mainloop.Wait();
if (!s_gpu_mainloop.IsRunning())
return;
// Opportunistically reset FIFOs so we don't wrap around.
if (may_move_read_ptr && s_fifo_aux_write_ptr != s_fifo_aux_read_ptr)
PanicAlert("aux fifo not synced (%p, %p)", s_fifo_aux_write_ptr, s_fifo_aux_read_ptr);
memmove(s_fifo_aux_data, s_fifo_aux_read_ptr, s_fifo_aux_write_ptr - s_fifo_aux_read_ptr);
s_fifo_aux_write_ptr -= (s_fifo_aux_read_ptr - s_fifo_aux_data);
s_fifo_aux_read_ptr = s_fifo_aux_data;
if (may_move_read_ptr)
{
u8* write_ptr = s_video_buffer_write_ptr;
// what's left over in the buffer
size_t size = write_ptr - s_video_buffer_pp_read_ptr;
memmove(s_video_buffer, s_video_buffer_pp_read_ptr, size);
// This change always decreases the pointers. We write seen_ptr
// after write_ptr here, and read it before in RunGpuLoop, so
// 'write_ptr > seen_ptr' there cannot become spuriously true.
s_video_buffer_write_ptr = write_ptr = s_video_buffer + size;
s_video_buffer_pp_read_ptr = s_video_buffer;
s_video_buffer_read_ptr = s_video_buffer;
s_video_buffer_seen_ptr = write_ptr;
}
}
}
void PushFifoAuxBuffer(const void* ptr, size_t size)
{
if (size > (size_t)(s_fifo_aux_data + FIFO_SIZE - s_fifo_aux_write_ptr))
{
SyncGPU(SyncGPUReason::AuxSpace, /* may_move_read_ptr */ false);
if (!s_gpu_mainloop.IsRunning())
{
// GPU is shutting down
return;
}
if (size > (size_t)(s_fifo_aux_data + FIFO_SIZE - s_fifo_aux_write_ptr))
{
// That will sync us up to the last 32 bytes, so this short region
// of FIFO would have to point to a 2MB display list or something.
PanicAlert("absurdly large aux buffer");
return;
}
}
memcpy(s_fifo_aux_write_ptr, ptr, size);
s_fifo_aux_write_ptr += size;
}
void* PopFifoAuxBuffer(size_t size)
{
void* ret = s_fifo_aux_read_ptr;
s_fifo_aux_read_ptr += size;
return ret;
}
// Description: RunGpuLoop() sends data through this function.
static void ReadDataFromFifo(u32 readPtr)
{
size_t len = 32;
if (len > (size_t)(s_video_buffer + FIFO_SIZE - s_video_buffer_write_ptr))
{
size_t existing_len = s_video_buffer_write_ptr - s_video_buffer_read_ptr;
if (len > (size_t)(FIFO_SIZE - existing_len))
{
PanicAlert("FIFO out of bounds (existing %zu + new %zu > %u)", existing_len, len, FIFO_SIZE);
return;
}
memmove(s_video_buffer, s_video_buffer_read_ptr, existing_len);
s_video_buffer_write_ptr = s_video_buffer + existing_len;
s_video_buffer_read_ptr = s_video_buffer;
}
// Copy new video instructions to s_video_buffer for future use in rendering the new picture
Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len);
s_video_buffer_write_ptr += len;
}
// The deterministic_gpu_thread version.
static void ReadDataFromFifoOnCPU(u32 readPtr)
{
size_t len = 32;
u8* write_ptr = s_video_buffer_write_ptr;
if (len > (size_t)(s_video_buffer + FIFO_SIZE - write_ptr))
{
// We can't wrap around while the GPU is working on the data.
// This should be very rare due to the reset in SyncGPU.
SyncGPU(SyncGPUReason::Wraparound);
if (!s_gpu_mainloop.IsRunning())
{
// GPU is shutting down, so the next asserts may fail
return;
}
if (s_video_buffer_pp_read_ptr != s_video_buffer_read_ptr)
{
PanicAlert("desynced read pointers");
return;
}
write_ptr = s_video_buffer_write_ptr;
size_t existing_len = write_ptr - s_video_buffer_pp_read_ptr;
if (len > (size_t)(FIFO_SIZE - existing_len))
{
PanicAlert("FIFO out of bounds (existing %zu + new %zu > %u)", existing_len, len, FIFO_SIZE);
return;
}
}
Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len);
s_video_buffer_pp_read_ptr = OpcodeDecoder::Run<true>(
DataReader(s_video_buffer_pp_read_ptr, write_ptr + len), nullptr, false);
// This would have to be locked if the GPU thread didn't spin.
s_video_buffer_write_ptr = write_ptr + len;
}
void ResetVideoBuffer()
{
s_video_buffer_read_ptr = s_video_buffer;
s_video_buffer_write_ptr = s_video_buffer;
s_video_buffer_seen_ptr = s_video_buffer;
s_video_buffer_pp_read_ptr = s_video_buffer;
s_fifo_aux_write_ptr = s_fifo_aux_data;
s_fifo_aux_read_ptr = s_fifo_aux_data;
}
// Description: Main FIFO update loop
// Purpose: Keep the Core HW updated about the CPU-GPU distance
void RunGpuLoop()
{
AsyncRequests::GetInstance()->SetEnable(true);
AsyncRequests::GetInstance()->SetPassthrough(false);
s_gpu_mainloop.Run(
[] {
const SConfig& param = SConfig::GetInstance();
// Run events from the CPU thread.
AsyncRequests::GetInstance()->PullEvents();
// Do nothing while paused
if (!s_emu_running_state.IsSet())
return;
if (s_use_deterministic_gpu_thread)
{
// All the fifo/CP stuff is on the CPU. We just need to run the opcode decoder.
u8* seen_ptr = s_video_buffer_seen_ptr;
u8* write_ptr = s_video_buffer_write_ptr;
// See comment in SyncGPU
if (write_ptr > seen_ptr)
{
s_video_buffer_read_ptr =
OpcodeDecoder::Run(DataReader(s_video_buffer_read_ptr, write_ptr), nullptr, false);
s_video_buffer_seen_ptr = write_ptr;
}
}
else
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
CommandProcessor::SetCPStatusFromGPU();
// check if we are able to run this buffer
while (!CommandProcessor::IsInterruptWaiting() && fifo.bFF_GPReadEnable &&
fifo.CPReadWriteDistance && !AtBreakpoint())
{
if (param.bSyncGPU && s_sync_ticks.load() < param.iSyncGpuMinDistance)
break;
u32 cyclesExecuted = 0;
u32 readPtr = fifo.CPReadPointer;
ReadDataFromFifo(readPtr);
if (readPtr == fifo.CPEnd)
readPtr = fifo.CPBase;
else
readPtr += 32;
ASSERT_MSG(COMMANDPROCESSOR, (s32)fifo.CPReadWriteDistance - 32 >= 0,
"Negative fifo.CPReadWriteDistance = %i in FIFO Loop !\nThat can produce "
"instability in the game. Please report it.",
fifo.CPReadWriteDistance - 32);
u8* write_ptr = s_video_buffer_write_ptr;
s_video_buffer_read_ptr = OpcodeDecoder::Run(
DataReader(s_video_buffer_read_ptr, write_ptr), &cyclesExecuted, false);
Common::AtomicStore(fifo.CPReadPointer, readPtr);
Common::AtomicAdd(fifo.CPReadWriteDistance, static_cast<u32>(-32));
if ((write_ptr - s_video_buffer_read_ptr) == 0)
Common::AtomicStore(fifo.SafeCPReadPointer, fifo.CPReadPointer);
CommandProcessor::SetCPStatusFromGPU();
if (param.bSyncGPU)
{
cyclesExecuted = (int)(cyclesExecuted / param.fSyncGpuOverclock);
int old = s_sync_ticks.fetch_sub(cyclesExecuted);
if (old >= param.iSyncGpuMaxDistance &&
old - (int)cyclesExecuted < param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Set();
}
// This call is pretty important in DualCore mode and must be called in the FIFO Loop.
// If we don't, s_swapRequested or s_efbAccessRequested won't be set to false
// leading the CPU thread to wait in Video_BeginField or Video_AccessEFB thus slowing
// things down.
AsyncRequests::GetInstance()->PullEvents();
}
// fast skip remaining GPU time if fifo is empty
if (s_sync_ticks.load() > 0)
{
int old = s_sync_ticks.exchange(0);
if (old >= param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Set();
}
// The fifo is empty and it's unlikely we will get any more work in the near future.
// Make sure VertexManager finishes drawing any primitives it has stored in it's buffer.
g_vertex_manager->Flush();
}
},
100);
AsyncRequests::GetInstance()->SetEnable(false);
AsyncRequests::GetInstance()->SetPassthrough(true);
}
void FlushGpu()
{
const SConfig& param = SConfig::GetInstance();
if (!param.bCPUThread || s_use_deterministic_gpu_thread)
return;
s_gpu_mainloop.Wait();
}
void GpuMaySleep()
{
s_gpu_mainloop.AllowSleep();
}
bool AtBreakpoint()
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
return fifo.bFF_BPEnable && (fifo.CPReadPointer == fifo.CPBreakpoint);
}
void RunGpu()
{
const SConfig& param = SConfig::GetInstance();
// wake up GPU thread
if (param.bCPUThread && !s_use_deterministic_gpu_thread)
{
s_gpu_mainloop.Wakeup();
}
// if the sync GPU callback is suspended, wake it up.
if (!SConfig::GetInstance().bCPUThread || s_use_deterministic_gpu_thread ||
SConfig::GetInstance().bSyncGPU)
{
if (s_syncing_suspended)
{
s_syncing_suspended = false;
CoreTiming::ScheduleEvent(GPU_TIME_SLOT_SIZE, s_event_sync_gpu, GPU_TIME_SLOT_SIZE);
}
}
}
static int RunGpuOnCpu(int ticks)
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
bool reset_simd_state = false;
int available_ticks = int(ticks * SConfig::GetInstance().fSyncGpuOverclock) + s_sync_ticks.load();
while (fifo.bFF_GPReadEnable && fifo.CPReadWriteDistance && !AtBreakpoint() &&
available_ticks >= 0)
{
if (s_use_deterministic_gpu_thread)
{
ReadDataFromFifoOnCPU(fifo.CPReadPointer);
s_gpu_mainloop.Wakeup();
}
else
{
if (!reset_simd_state)
{
FPURoundMode::SaveSIMDState();
FPURoundMode::LoadDefaultSIMDState();
reset_simd_state = true;
}
ReadDataFromFifo(fifo.CPReadPointer);
u32 cycles = 0;
s_video_buffer_read_ptr = OpcodeDecoder::Run(
DataReader(s_video_buffer_read_ptr, s_video_buffer_write_ptr), &cycles, false);
available_ticks -= cycles;
}
if (fifo.CPReadPointer == fifo.CPEnd)
fifo.CPReadPointer = fifo.CPBase;
else
fifo.CPReadPointer += 32;
fifo.CPReadWriteDistance -= 32;
}
CommandProcessor::SetCPStatusFromGPU();
if (reset_simd_state)
{
FPURoundMode::LoadSIMDState();
}
// Discard all available ticks as there is nothing to do any more.
s_sync_ticks.store(std::min(available_ticks, 0));
// If the GPU is idle, drop the handler.
if (available_ticks >= 0)
return -1;
// Always wait at least for GPU_TIME_SLOT_SIZE cycles.
return -available_ticks + GPU_TIME_SLOT_SIZE;
}
void UpdateWantDeterminism(bool want)
{
// We are paused (or not running at all yet), so
// it should be safe to change this.
const SConfig& param = SConfig::GetInstance();
bool gpu_thread = false;
switch (param.m_GPUDeterminismMode)
{
case GPUDeterminismMode::Auto:
gpu_thread = want;
break;
case GPUDeterminismMode::Disabled:
gpu_thread = false;
break;
case GPUDeterminismMode::FakeCompletion:
gpu_thread = true;
break;
}
gpu_thread = gpu_thread && param.bCPUThread;
if (s_use_deterministic_gpu_thread != gpu_thread)
{
s_use_deterministic_gpu_thread = gpu_thread;
if (gpu_thread)
{
// These haven't been updated in non-deterministic mode.
s_video_buffer_seen_ptr = s_video_buffer_pp_read_ptr = s_video_buffer_read_ptr;
CopyPreprocessCPStateFromMain();
VertexLoaderManager::MarkAllDirty();
}
}
}
bool UseDeterministicGPUThread()
{
return s_use_deterministic_gpu_thread;
}
/* This function checks the emulated CPU - GPU distance and may wake up the GPU,
* or block the CPU if required. It should be called by the CPU thread regularly.
* @ticks The gone emulated CPU time.
* @return A good time to call WaitForGpuThread() next.
*/
static int WaitForGpuThread(int ticks)
{
const SConfig& param = SConfig::GetInstance();
int old = s_sync_ticks.fetch_add(ticks);
int now = old + ticks;
// GPU is idle, so stop polling.
if (old >= 0 && s_gpu_mainloop.IsDone())
return -1;
// Wakeup GPU
if (old < param.iSyncGpuMinDistance && now >= param.iSyncGpuMinDistance)
RunGpu();
// If the GPU is still sleeping, wait for a longer time
if (now < param.iSyncGpuMinDistance)
return GPU_TIME_SLOT_SIZE + param.iSyncGpuMinDistance - now;
// Wait for GPU
if (now >= param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Wait();
return GPU_TIME_SLOT_SIZE;
}
static void SyncGPUCallback(u64 ticks, s64 cyclesLate)
{
ticks += cyclesLate;
int next = -1;
if (!SConfig::GetInstance().bCPUThread || s_use_deterministic_gpu_thread)
{
next = RunGpuOnCpu((int)ticks);
}
else if (SConfig::GetInstance().bSyncGPU)
{
next = WaitForGpuThread((int)ticks);
}
s_syncing_suspended = next < 0;
if (!s_syncing_suspended)
CoreTiming::ScheduleEvent(next, s_event_sync_gpu, next);
}
// Initialize GPU - CPU thread syncing, this gives us a deterministic way to start the GPU thread.
void Prepare()
{
s_event_sync_gpu = CoreTiming::RegisterEvent("SyncGPUCallback", SyncGPUCallback);
s_syncing_suspended = true;
}
} // namespace Fifo
| LAGonauta/dolphin | Source/Core/VideoCommon/Fifo.cpp | C++ | gpl-2.0 | 18,046 |
<?php
/**
* Copyright 2013 Go Daddy Operating Company, LLC. All Rights Reserved.
*/
// Make sure it's wordpress
if ( !defined( 'ABSPATH' ) )
die( 'Forbidden' );
/**
* Class GD_System_Plugin_Config
* Handle reading system and reseller configurations
* @version 1.0
* @author Kurt Payne <kpayne@godaddy.com>
*/
class GD_System_Plugin_Config {
/**
* Config items
* @var array
*/
var $config = array();
/**
* Is this account missing a gd-config.php file?
* @var bool
*/
var $missing_gd_config = false;
// @codeCoverageIgnoreStart
/**
* Constructor
*/
public function __construct() {
if ( !defined( 'GD_RESELLER') && !defined( 'GD_VARNISH_SERVERS' ) && !function_exists( 'is_mobile_user_agent' ) ) {
if ( file_exists( ABSPATH . 'gd-config.php' ) && is_readable( ABSPATH . 'gd-config.php' ) ) {
require_once( ABSPATH . 'gd-config.php' );
} else {
$this->missing_gd_config = true;
}
}
}
// @codeCoverageIgnoreEnd
/**
* Get config
*
* @return array
*/
public function get_config() {
if ( ! empty( $this->config ) ) {
return $this->config;
}
$defaults = $this->_get_config( '/web/conf/gd-wordpress.conf' );
$resellers = $this->_get_config( '/web/conf/gd-resellers.conf' );
$reseller = ( defined( 'GD_RESELLER' ) && isset( $resellers[ GD_RESELLER ] ) ) ? $resellers[ GD_RESELLER ] : array();
$this->config = array_merge( (array) $defaults, (array) $reseller );
return $this->config;
}
/**
* Read a config file
* @param string $path
* @return array
*/
protected function _get_config( $path ) {
$conf = array();
if ( file_exists( $path ) && is_readable( $path ) && is_file( $path ) ) {
$conf = @parse_ini_file( $path, true );
if ( false === $conf ) {
$conf = array();
}
}
return $conf;
}
}
| amyevans/davmschool | wp-content/mu-plugins/gd-system-plugin/class-gd-system-plugin-config.php | PHP | gpl-2.0 | 1,813 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Core
* @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Core Website model
*
* @method Mage_Core_Model_Resource_Website _getResource()
* @method Mage_Core_Model_Resource_Website getResource()
* @method Mage_Core_Model_Website setCode(string $value)
* @method string getName()
* @method Mage_Core_Model_Website setName(string $value)
* @method int getSortOrder()
* @method Mage_Core_Model_Website setSortOrder(int $value)
* @method Mage_Core_Model_Website setDefaultGroupId(int $value)
* @method int getIsDefault()
* @method Mage_Core_Model_Website setIsDefault(int $value)
*
* @category Mage
* @package Mage_Core
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Core_Model_Website extends Mage_Core_Model_Abstract
{
const ENTITY = 'core_website';
const CACHE_TAG = 'website';
protected $_cacheTag = true;
/**
* @var string
*/
protected $_eventPrefix = 'website';
/**
* @var string
*/
protected $_eventObject = 'website';
/**
* Cache configuration array
*
* @var array
*/
protected $_configCache = array();
/**
* Website Group Coleection array
*
* @var array
*/
protected $_groups;
/**
* Website group ids array
*
* @var array
*/
protected $_groupIds = array();
/**
* The number of groups in a website
*
* @var int
*/
protected $_groupsCount;
/**
* Website Store collection array
*
* @var array
*/
protected $_stores;
/**
* Website store ids array
*
* @var array
*/
protected $_storeIds = array();
/**
* Website store codes array
*
* @var array
*/
protected $_storeCodes = array();
/**
* The number of stores in a website
*
* @var int
*/
protected $_storesCount = 0;
/**
* Website default group
*
* @var Mage_Core_Model_Store_Group
*/
protected $_defaultGroup;
/**
* Website default store
*
* @var Mage_Core_Model_Store
*/
protected $_defaultStore;
/**
* is can delete website
*
* @var bool
*/
protected $_isCanDelete;
/**
* @var bool
*/
private $_isReadOnly = false;
/**
* init model
*
*/
protected function _construct()
{
$this->_init('core/website');
}
/**
* Custom load
*
* @param int|string $id
* @param string $field
* @return Mage_Core_Model_Website
*/
public function load($id, $field = null)
{
if (!is_numeric($id) && is_null($field)) {
$this->_getResource()->load($this, $id, 'code');
return $this;
}
return parent::load($id, $field);
}
/**
* Load website configuration
*
* @param string $code
* @return Mage_Core_Model_Website
*/
public function loadConfig($code)
{
if (!Mage::getConfig()->getNode('websites')) {
return $this;
}
if (is_numeric($code)) {
foreach (Mage::getConfig()->getNode('websites')->children() as $websiteCode=>$website) {
if ((int)$website->system->website->id==$code) {
$code = $websiteCode;
break;
}
}
} else {
$website = Mage::getConfig()->getNode('websites/'.$code);
}
if (!empty($website)) {
$this->setCode($code);
$id = (int)$website->system->website->id;
$this->setId($id)->setStoreId($id);
}
return $this;
}
/**
* Get website config data
*
* @param string $path
* @return mixed
*/
public function getConfig($path) {
if (!isset($this->_configCache[$path])) {
$config = Mage::getConfig()->getNode('websites/'.$this->getCode().'/'.$path);
if (!$config) {
return false;
#throw Mage::exception('Mage_Core', Mage::helper('core')->__('Invalid website\'s configuration path: %s', $path));
}
if ($config->hasChildren()) {
$value = array();
foreach ($config->children() as $k=>$v) {
$value[$k] = $v;
}
} else {
$value = (string)$config;
}
$this->_configCache[$path] = $value;
}
return $this->_configCache[$path];
}
/**
* Load group collection and set internal data
*
*/
protected function _loadGroups()
{
$this->_groups = array();
$this->_groupsCount = 0;
foreach ($this->getGroupCollection() as $group) {
$this->_groups[$group->getId()] = $group;
$this->_groupIds[$group->getId()] = $group->getId();
if ($this->getDefaultGroupId() == $group->getId()) {
$this->_defaultGroup = $group;
}
$this->_groupsCount ++;
}
}
/**
* Set website groups
*
* @param array $groups
*/
public function setGroups($groups)
{
$this->_groups = array();
$this->_groupsCount = 0;
foreach ($groups as $group) {
$this->_groups[$group->getId()] = $group;
$this->_groupIds[$group->getId()] = $group->getId();
if ($this->getDefaultGroupId() == $group->getId()) {
$this->_defaultGroup = $group;
}
$this->_groupsCount ++;
}
return $this;
}
/**
* Retrieve new (not loaded) Group collection object with website filter
*
* @return Mage_Core_Model_Mysql4_Store_Group_Collection
*/
public function getGroupCollection()
{
return Mage::getModel('core/store_group')
->getCollection()
->addWebsiteFilter($this->getId());
}
/**
* Retrieve website groups
*
* @return array
*/
public function getGroups()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groups;
}
/**
* Retrieve website group ids
*
* @return array
*/
public function getGroupIds()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groupIds;
}
/**
* Retrieve number groups in a website
*
* @return int
*/
public function getGroupsCount()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groupsCount;
}
/**
* Retrieve default group model
*
* @return Mage_Core_Model_Store_Group
*/
public function getDefaultGroup()
{
if (!$this->hasDefaultGroupId()) {
return false;
}
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_defaultGroup;
}
/**
* Load store collection and set internal data
*
*/
protected function _loadStores()
{
$this->_stores = array();
$this->_storesCount = 0;
foreach ($this->getStoreCollection() as $store) {
$this->_stores[$store->getId()] = $store;
$this->_storeIds[$store->getId()] = $store->getId();
$this->_storeCodes[$store->getId()] = $store->getCode();
if ($this->getDefaultGroup() && $this->getDefaultGroup()->getDefaultStoreId() == $store->getId()) {
$this->_defaultStore = $store;
}
$this->_storesCount ++;
}
}
/**
* Set website stores
*
* @param array $stores
*/
public function setStores($stores)
{
$this->_stores = array();
$this->_storesCount = 0;
foreach ($stores as $store) {
$this->_stores[$store->getId()] = $store;
$this->_storeIds[$store->getId()] = $store->getId();
$this->_storeCodes[$store->getId()] = $store->getCode();
if ($this->getDefaultGroup() && $this->getDefaultGroup()->getDefaultStoreId() == $store->getId()) {
$this->_defaultStore = $store;
}
$this->_storesCount ++;
}
}
/**
* Retrieve new (not loaded) Store collection object with website filter
*
* @return Mage_Core_Model_Mysql4_Store_Collection
*/
public function getStoreCollection()
{
return Mage::getModel('core/store')
->getCollection()
->addWebsiteFilter($this->getId());
}
/**
* Retrieve wersite store objects
*
* @return array
*/
public function getStores()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_stores;
}
/**
* Retrieve website store ids
*
* @return array
*/
public function getStoreIds()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storeIds;
}
/**
* Retrieve website store codes
*
* @return array
*/
public function getStoreCodes()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storeCodes;
}
/**
* Retrieve number stores in a website
*
* @return int
*/
public function getStoresCount()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storesCount;
}
/**
* is can delete website
*
* @return bool
*/
public function isCanDelete()
{
if ($this->_isReadOnly || !$this->getId()) {
return false;
}
if (is_null($this->_isCanDelete)) {
$this->_isCanDelete = (Mage::getModel('core/website')->getCollection()->getSize() > 2)
&& !$this->getIsDefault();
}
return $this->_isCanDelete;
}
/**
* Retrieve unique website-group-store key for collection with groups and stores
*
* @return string
*/
public function getWebsiteGroupStore()
{
return join('-', array($this->getWebsiteId(), $this->getGroupId(), $this->getStoreId()));
}
public function getDefaultGroupId()
{
return $this->_getData('default_group_id');
}
public function getCode()
{
return $this->_getData('code');
}
protected function _beforeDelete()
{
$this->_protectFromNonAdmin();
return parent::_beforeDelete();
}
/**
* rewrite in order to clear configuration cache
*
* @return Mage_Core_Model_Website
*/
protected function _afterDelete()
{
Mage::app()->clearWebsiteCache($this->getId());
parent::_afterDelete();
Mage::getConfig()->removeCache();
return $this;
}
/**
* Retrieve website base currency code
*
* @return string
*/
public function getBaseCurrencyCode()
{
if ($this->getConfig(Mage_Core_Model_Store::XML_PATH_PRICE_SCOPE)
== Mage_Core_Model_Store::PRICE_SCOPE_GLOBAL
) {
return Mage::app()->getBaseCurrencyCode();
} else {
return $this->getConfig(Mage_Directory_Model_Currency::XML_PATH_CURRENCY_BASE);
}
}
/**
* Retrieve website base currency
*
* @return Mage_Directory_Model_Currency
*/
public function getBaseCurrency()
{
$currency = $this->getData('base_currency');
if (is_null($currency)) {
$currency = Mage::getModel('directory/currency')->load($this->getBaseCurrencyCode());
$this->setData('base_currency', $currency);
}
return $currency;
}
/**
* Retrieve Default Website Store or null
*
* @return Mage_Core_Model_Store
*/
public function getDefaultStore()
{
// init stores if not loaded
$this->getStores();
return $this->_defaultStore;
}
/**
* Retrieve default stores select object
* Select fields website_id, store_id
*
* @param $withDefault include/exclude default admin website
* @return Varien_Db_Select
*/
public function getDefaultStoresSelect($withDefault = false)
{
return $this->getResource()->getDefaultStoresSelect($withDefault);
}
/**
* Get/Set isReadOnly flag
*
* @param bool $value
* @return bool
*/
public function isReadOnly($value = null)
{
if (null !== $value) {
$this->_isReadOnly = (bool)$value;
}
return $this->_isReadOnly;
}
}
| keegan2149/magento | sites/default/app/code/core/Mage/Core/Model/Website.php | PHP | gpl-2.0 | 13,740 |
// Test that g++ complains about referring to a builtin type in a
// mem-initializer.
// Contributed by Kevin Buhr <buhr@stat.wisc.edu>
int r = 0;
struct foo { // ERROR - candidate
foo(int x) { r = 1; } // ERROR - candidate
};
struct bar : foo {
typedef int an_int;
bar() : bar::an_int(3) {} // ERROR - not a base
};
int
main() {
bar b;
return r;
}
| nslu2/Build-gcc-3.2.1 | gcc/testsuite/g++.old-deja/g++.robertl/eb69.C | C++ | gpl-2.0 | 373 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Adminhtml
* @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
class Mage_Adminhtml_Model_System_Config_Backend_Design_Package extends Mage_Core_Model_Config_Data
{
protected function _beforeSave()
{
$value = $this->getValue();
if (empty($value)) {
throw new Exception('package name is empty.');
}
if (!Mage::getDesign()->designPackageExists($value)) {
throw new Exception('package with this name does not exist and cannot be set.');
}
}
}
| keegan2149/magento | sites/default/app/code/core/Mage/Adminhtml/Model/System/Config/Backend/Design/Package.php | PHP | gpl-2.0 | 1,419 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
objs = orm.ProjectApplication.objects
apps = objs.filter(chain__chained_project=None).order_by(
'chain', '-id')
checked_chain = None
projs = []
for app in apps:
chain = app.chain
if chain.pk != checked_chain:
checked_chain = chain.pk
projs.append(orm.Project(id=chain, application=app, state=1))
orm.Project.objects.bulk_create(projs)
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'im.additionalmail': {
'Meta': {'object_name': 'AdditionalMail'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"})
},
'im.approvalterms': {
'Meta': {'object_name': 'ApprovalTerms'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'im.astakosuser': {
'Meta': {'object_name': 'AstakosUser', '_ormbases': ['auth.User']},
'accepted_email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'accepted_policy': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'activation_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_signed_terms': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deactivated_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deactivated_reason': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'disturbed_quota': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'email_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_credits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_signed_terms': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'invitations': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_rejected': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'moderated_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'moderated_data': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.AstakosUserQuota']", 'symmetrical': 'False'}),
'rejected_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}),
'verification_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}),
'verified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'im.astakosuserauthprovider': {
'Meta': {'unique_together': "(('identifier', 'module', 'user'),)", 'object_name': 'AstakosUserAuthProvider'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'affiliation': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'auth_backend': ('django.db.models.fields.CharField', [], {'default': "'astakos'", 'max_length': '255'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'info_data': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'module': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_providers'", 'to': "orm['im.AstakosUser']"})
},
'im.astakosuserquota': {
'Meta': {'unique_together': "(('resource', 'user'),)", 'object_name': 'AstakosUserQuota'},
'capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'max_digits': '38', 'decimal_places': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"})
},
'im.authproviderpolicyprofile': {
'Meta': {'object_name': 'AuthProviderPolicyProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'policy_add': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_automoderate': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_create': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_limit': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
'policy_login': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_remove': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_required': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_switch': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['im.AstakosUser']"})
},
'im.chain': {
'Meta': {'object_name': 'Chain'},
'chain': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'im.component': {
'Meta': {'object_name': 'Component'},
'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'})
},
'im.emailchange': {
'Meta': {'object_name': 'EmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'requested_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emailchanges'", 'unique': 'True', 'to': "orm['im.AstakosUser']"})
},
'im.endpoint': {
'Meta': {'object_name': 'Endpoint'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'endpoints'", 'to': "orm['im.Service']"})
},
'im.endpointdata': {
'Meta': {'unique_together': "(('endpoint', 'key'),)", 'object_name': 'EndpointData'},
'endpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['im.Endpoint']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'im.invitation': {
'Meta': {'object_name': 'Invitation'},
'code': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}),
'consumed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations_sent'", 'null': 'True', 'to': "orm['im.AstakosUser']"}),
'is_consumed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'realname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'im.pendingthirdpartyuser': {
'Meta': {'unique_together': "(('provider', 'third_party_identifier'),)", 'object_name': 'PendingThirdPartyUser'},
'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'third_party_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'im.project': {
'Meta': {'object_name': 'Project'},
'application': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'project'", 'unique': 'True', 'to': "orm['im.ProjectApplication']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deactivation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'deactivation_reason': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'chained_project'", 'unique': 'True', 'primary_key': 'True', 'db_column': "'id'", 'to': "orm['im.Chain']"}),
'last_approval_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.AstakosUser']", 'through': "orm['im.ProjectMembership']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
'im.projectapplication': {
'Meta': {'unique_together': "(('chain', 'id'),)", 'object_name': 'ProjectApplication'},
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_applied'", 'to': "orm['im.AstakosUser']"}),
'chain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'chained_apps'", 'db_column': "'chain'", 'to': "orm['im.Chain']"}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'limit_on_members_number': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'member_join_policy': ('django.db.models.fields.IntegerField', [], {}),
'member_leave_policy': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_owned'", 'to': "orm['im.AstakosUser']"}),
'precursor_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True', 'blank': 'True'}),
'resource_grants': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.ProjectResourceGrant']", 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'response_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'im.projectmembership': {
'Meta': {'unique_together': "(('person', 'project'),)", 'object_name': 'ProjectMembership'},
'acceptance_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'leave_request_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Project']"}),
'request_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'im.projectmembershiphistory': {
'Meta': {'object_name': 'ProjectMembershipHistory'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.BigIntegerField', [], {}),
'project': ('django.db.models.fields.BigIntegerField', [], {}),
'reason': ('django.db.models.fields.IntegerField', [], {}),
'serial': ('django.db.models.fields.BigIntegerField', [], {})
},
'im.projectresourcegrant': {
'Meta': {'unique_together': "(('resource', 'project_application'),)", 'object_name': 'ProjectResourceGrant'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'}),
'project_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True'}),
'project_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'null': 'True', 'max_digits': '38', 'decimal_places': '0'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"})
},
'im.resource': {
'Meta': {'object_name': 'Resource'},
'allow_in_projects': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'desc': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'service_origin': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'service_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'uplimit': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'})
},
'im.serial': {
'Meta': {'object_name': 'Serial'},
'serial': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'im.service': {
'Meta': {'object_name': 'Service'},
'component': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Component']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'im.sessioncatalog': {
'Meta': {'object_name': 'SessionCatalog'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessions'", 'null': 'True', 'to': "orm['im.AstakosUser']"})
},
'im.usersetting': {
'Meta': {'unique_together': "(('user', 'setting'),)", 'object_name': 'UserSetting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'setting': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}),
'value': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['im']
| grnet/synnefo | snf-astakos-app/astakos/im/migrations/old/0043_uninitialized_projects.py | Python | gpl-3.0 | 25,194 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2
short_description: create, terminate, start or stop an instance in ec2
description:
- Creates or terminates ec2 instances.
- C(state=restarted) was added in 2.2
version_added: "0.9"
options:
key_name:
description:
- key pair to use on the instance
required: false
default: null
aliases: ['keypair']
id:
version_added: "1.1"
description:
- identifier for this instance or set of instances, so that the module will be idempotent with respect to EC2 instances. This identifier is valid for at least 24 hours after the termination of the instance, and should not be reused for another call later on. For details, see the description of client token at U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Run_Instance_Idempotency.html).
required: false
default: null
aliases: []
group:
description:
- security group (or list of groups) to use with the instance
required: false
default: null
aliases: [ 'groups' ]
group_id:
version_added: "1.1"
description:
- security group id (or list of ids) to use with the instance
required: false
default: null
aliases: []
region:
version_added: "1.2"
description:
- The AWS region to use. Must be specified if ec2_url is not used. If not specified then the value of the EC2_REGION environment variable, if any, is used. See U(http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region)
required: false
default: null
aliases: [ 'aws_region', 'ec2_region' ]
zone:
version_added: "1.2"
description:
- AWS availability zone in which to launch the instance
required: false
default: null
aliases: [ 'aws_zone', 'ec2_zone' ]
instance_type:
description:
- instance type to use for the instance, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html)
required: true
default: null
aliases: []
tenancy:
version_added: "1.9"
description:
- An instance with a tenancy of "dedicated" runs on single-tenant hardware and can only be launched into a VPC. Note that to use dedicated tenancy you MUST specify a vpc_subnet_id as well. Dedicated tenancy is not available for EC2 "micro" instances.
required: false
default: default
choices: [ "default", "dedicated" ]
aliases: []
spot_price:
version_added: "1.5"
description:
- Maximum spot price to bid, If not set a regular on-demand instance is requested. A spot request is made with this maximum bid. When it is filled, the instance is started.
required: false
default: null
aliases: []
spot_type:
version_added: "2.0"
description:
- Type of spot request; one of "one-time" or "persistent". Defaults to "one-time" if not supplied.
required: false
default: "one-time"
choices: [ "one-time", "persistent" ]
aliases: []
image:
description:
- I(ami) ID to use for the instance
required: true
default: null
aliases: []
kernel:
description:
- kernel I(eki) to use for the instance
required: false
default: null
aliases: []
ramdisk:
description:
- ramdisk I(eri) to use for the instance
required: false
default: null
aliases: []
wait:
description:
- wait for the instance to be 'running' before returning. Does not wait for SSH, see 'wait_for' example for details.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: []
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
aliases: []
spot_wait_timeout:
version_added: "1.5"
description:
- how long to wait for the spot instance request to be fulfilled
default: 600
aliases: []
count:
description:
- number of instances to launch
required: False
default: 1
aliases: []
monitoring:
version_added: "1.1"
description:
- enable detailed monitoring (CloudWatch) for instance
required: false
default: null
choices: [ "yes", "no" ]
aliases: []
user_data:
version_added: "0.9"
description:
- opaque blob of data which is made available to the ec2 instance
required: false
default: null
aliases: []
instance_tags:
version_added: "1.0"
description:
- a hash/dictionary of tags to add to the new instance or for starting/stopping instance by tag; '{"key":"value"}' and '{"key":"value","key":"value"}'
required: false
default: null
aliases: []
placement_group:
version_added: "1.3"
description:
- placement group for the instance when using EC2 Clustered Compute
required: false
default: null
aliases: []
vpc_subnet_id:
version_added: "1.1"
description:
- the subnet ID in which to launch the instance (VPC)
required: false
default: null
aliases: []
assign_public_ip:
version_added: "1.5"
description:
- when provisioning within vpc, assign a public IP address. Boto library must be 2.13.0+
required: false
default: null
choices: [ "yes", "no" ]
aliases: []
private_ip:
version_added: "1.2"
description:
- the private ip address to assign the instance (from the vpc subnet)
required: false
default: null
aliases: []
instance_profile_name:
version_added: "1.3"
description:
- Name of the IAM instance profile to use. Boto library must be 2.5.0+
required: false
default: null
aliases: []
instance_ids:
version_added: "1.3"
description:
- "list of instance ids, currently used for states: absent, running, stopped"
required: false
default: null
aliases: ['instance_id']
source_dest_check:
version_added: "1.6"
description:
- Enable or Disable the Source/Destination checks (for NAT instances and Virtual Routers)
required: false
default: yes
choices: [ "yes", "no" ]
termination_protection:
version_added: "2.0"
description:
- Enable or Disable the Termination Protection
required: false
default: no
choices: [ "yes", "no" ]
instance_initiated_shutdown_behavior:
version_added: "2.2"
description:
- Set whether AWS will Stop or Terminate an instance on shutdown
required: false
default: 'stop'
choices: [ "stop", "terminate" ]
state:
version_added: "1.3"
description:
- create or terminate instances
required: false
default: 'present'
aliases: []
choices: ['present', 'absent', 'running', 'restarted', 'stopped']
volumes:
version_added: "1.5"
description:
- a list of hash/dictionaries of volumes to add to the new instance; '[{"key":"value", "key":"value"}]'; keys allowed are - device_name (str; required), delete_on_termination (bool; False), device_type (deprecated), ephemeral (str), encrypted (bool; False), snapshot (str), volume_type (str), iops (int) - device_type is deprecated use volume_type, iops must be set when volume_type='io1', ephemeral and snapshot are mutually exclusive.
required: false
default: null
aliases: []
ebs_optimized:
version_added: "1.6"
description:
- whether instance is using optimized EBS volumes, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html)
required: false
default: 'false'
exact_count:
version_added: "1.5"
description:
- An integer value which indicates how many instances that match the 'count_tag' parameter should be running. Instances are either created or terminated based on this value.
required: false
default: null
aliases: []
count_tag:
version_added: "1.5"
description:
- Used with 'exact_count' to determine how many nodes based on a specific tag criteria should be running. This can be expressed in multiple ways and is shown in the EXAMPLES section. For instance, one can request 25 servers that are tagged with "class=webserver". The specified tag must already exist or be passed in as the 'instance_tags' option.
required: false
default: null
aliases: []
network_interfaces:
version_added: "2.0"
description:
- A list of existing network interfaces to attach to the instance at launch. When specifying existing network interfaces, none of the assign_public_ip, private_ip, vpc_subnet_id, group, or group_id parameters may be used. (Those parameters are for creating a new network interface at launch.)
required: false
default: null
aliases: ['network_interface']
spot_launch_group:
version_added: "2.1"
description:
- Launch group for spot request, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/how-spot-instances-work.html#spot-launch-group)
required: false
default: null
author:
- "Tim Gerla (@tgerla)"
- "Lester Wade (@lwade)"
- "Seth Vidal"
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic provisioning example
- ec2:
key_name: mykey
instance_type: t2.micro
image: ami-123456
wait: yes
group: webserver
count: 3
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Advanced example with tagging and CloudWatch
- ec2:
key_name: mykey
group: databases
instance_type: t2.micro
image: ami-123456
wait: yes
wait_timeout: 500
count: 5
instance_tags:
db: postgres
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Single instance with additional IOPS volume from snapshot and volume delete on termination
- ec2:
key_name: mykey
group: webserver
instance_type: c3.medium
image: ami-123456
wait: yes
wait_timeout: 500
volumes:
- device_name: /dev/sdb
snapshot: snap-abcdef12
volume_type: io1
iops: 1000
volume_size: 100
delete_on_termination: true
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Single instance with ssd gp2 root volume
- ec2:
key_name: mykey
group: webserver
instance_type: c3.medium
image: ami-123456
wait: yes
wait_timeout: 500
volumes:
- device_name: /dev/xvda
volume_type: gp2
volume_size: 8
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
exact_count: 1
# Multiple groups example
- ec2:
key_name: mykey
group: ['databases', 'internal-services', 'sshable', 'and-so-forth']
instance_type: m1.large
image: ami-6e649707
wait: yes
wait_timeout: 500
count: 5
instance_tags:
db: postgres
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Multiple instances with additional volume from snapshot
- ec2:
key_name: mykey
group: webserver
instance_type: m1.large
image: ami-6e649707
wait: yes
wait_timeout: 500
count: 5
volumes:
- device_name: /dev/sdb
snapshot: snap-abcdef12
volume_size: 10
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Dedicated tenancy example
- local_action:
module: ec2
assign_public_ip: yes
group_id: sg-1dc53f72
key_name: mykey
image: ami-6e649707
instance_type: m1.small
tenancy: dedicated
vpc_subnet_id: subnet-29e63245
wait: yes
# Spot instance example
- ec2:
spot_price: 0.24
spot_wait_timeout: 600
keypair: mykey
group_id: sg-1dc53f72
instance_type: m1.small
image: ami-6e649707
wait: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
spot_launch_group: report_generators
# Examples using pre-existing network interfaces
- ec2:
key_name: mykey
instance_type: t2.small
image: ami-f005ba11
network_interface: eni-deadbeef
- ec2:
key_name: mykey
instance_type: t2.small
image: ami-f005ba11
network_interfaces: ['eni-deadbeef', 'eni-5ca1ab1e']
# Launch instances, runs some tasks
# and then terminate them
- name: Create a sandbox instance
hosts: localhost
gather_facts: False
vars:
key_name: my_keypair
instance_type: m1.small
security_group: my_securitygroup
image: my_ami_id
region: us-east-1
tasks:
- name: Launch instance
ec2:
key_name: "{{ keypair }}"
group: "{{ security_group }}"
instance_type: "{{ instance_type }}"
image: "{{ image }}"
wait: true
region: "{{ region }}"
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
register: ec2
- name: Add new instance to host group
add_host:
hostname: "{{ item.public_ip }}"
groupname: launched
with_items: "{{ ec2.instances }}"
- name: Wait for SSH to come up
wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
- name: Configure instance(s)
hosts: launched
become: True
gather_facts: True
roles:
- my_awesome_role
- my_awesome_test
- name: Terminate instances
hosts: localhost
connection: local
tasks:
- name: Terminate instances that were previously launched
ec2:
state: 'absent'
instance_ids: '{{ ec2.instance_ids }}'
# Start a few existing instances, run some tasks
# and stop the instances
- name: Start sandbox instances
hosts: localhost
gather_facts: false
connection: local
vars:
instance_ids:
- 'i-xxxxxx'
- 'i-xxxxxx'
- 'i-xxxxxx'
region: us-east-1
tasks:
- name: Start the sandbox instances
ec2:
instance_ids: '{{ instance_ids }}'
region: '{{ region }}'
state: running
wait: True
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
roles:
- do_neat_stuff
- do_more_neat_stuff
- name: Stop sandbox instances
hosts: localhost
gather_facts: false
connection: local
vars:
instance_ids:
- 'i-xxxxxx'
- 'i-xxxxxx'
- 'i-xxxxxx'
region: us-east-1
tasks:
- name: Stop the sandbox instances
ec2:
instance_ids: '{{ instance_ids }}'
region: '{{ region }}'
state: stopped
wait: True
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# Start stopped instances specified by tag
#
- local_action:
module: ec2
instance_tags:
Name: ExtraPower
state: running
#
# Restart instances specified by tag
#
- local_action:
module: ec2
instance_tags:
Name: ExtraPower
state: restarted
#
# Enforce that 5 instances with a tag "foo" are running
# (Highly recommended!)
#
- ec2:
key_name: mykey
instance_type: c1.medium
image: ami-40603AD1
wait: yes
group: webserver
instance_tags:
foo: bar
exact_count: 5
count_tag: foo
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# Enforce that 5 running instances named "database" with a "dbtype" of "postgres"
#
- ec2:
key_name: mykey
instance_type: c1.medium
image: ami-40603AD1
wait: yes
group: webserver
instance_tags:
Name: database
dbtype: postgres
exact_count: 5
count_tag:
Name: database
dbtype: postgres
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# count_tag complex argument examples
#
# instances with tag foo
count_tag:
foo:
# instances with tag foo=bar
count_tag:
foo: bar
# instances with tags foo=bar & baz
count_tag:
foo: bar
baz:
# instances with tags foo & bar & baz=bang
count_tag:
- foo
- bar
- baz: bang
'''
import time
from ast import literal_eval
from ansible.module_utils.six import iteritems
from ansible.module_utils.six import get_function_code
try:
import boto.ec2
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
from boto.exception import EC2ResponseError
from boto.vpc import VPCConnection
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def find_running_instances_by_count_tag(module, ec2, count_tag, zone=None):
# get reservations for instances that match tag(s) and are running
reservations = get_reservations(module, ec2, tags=count_tag, state="running", zone=zone)
instances = []
for res in reservations:
if hasattr(res, 'instances'):
for inst in res.instances:
instances.append(inst)
return reservations, instances
def _set_none_to_blank(dictionary):
result = dictionary
for k in result:
if isinstance(result[k], dict):
result[k] = _set_none_to_blank(result[k])
elif not result[k]:
result[k] = ""
return result
def get_reservations(module, ec2, tags=None, state=None, zone=None):
# TODO: filters do not work with tags that have underscores
filters = dict()
if tags is not None:
if isinstance(tags, str):
try:
tags = literal_eval(tags)
except:
pass
# if string, we only care that a tag of that name exists
if isinstance(tags, str):
filters.update({"tag-key": tags})
# if list, append each item to filters
if isinstance(tags, list):
for x in tags:
if isinstance(x, dict):
x = _set_none_to_blank(x)
filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(x)))
else:
filters.update({"tag-key": x})
# if dict, add the key and value to the filter
if isinstance(tags, dict):
tags = _set_none_to_blank(tags)
filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(tags)))
if state:
# http://stackoverflow.com/questions/437511/what-are-the-valid-instancestates-for-the-amazon-ec2-api
filters.update({'instance-state-name': state})
if zone:
filters.update({'availability-zone': zone})
results = ec2.get_all_instances(filters=filters)
return results
def get_instance_info(inst):
"""
Retrieves instance information from an instance
ID and returns it as a dictionary
"""
instance_info = {'id': inst.id,
'ami_launch_index': inst.ami_launch_index,
'private_ip': inst.private_ip_address,
'private_dns_name': inst.private_dns_name,
'public_ip': inst.ip_address,
'dns_name': inst.dns_name,
'public_dns_name': inst.public_dns_name,
'state_code': inst.state_code,
'architecture': inst.architecture,
'image_id': inst.image_id,
'key_name': inst.key_name,
'placement': inst.placement,
'region': inst.placement[:-1],
'kernel': inst.kernel,
'ramdisk': inst.ramdisk,
'launch_time': inst.launch_time,
'instance_type': inst.instance_type,
'root_device_type': inst.root_device_type,
'root_device_name': inst.root_device_name,
'state': inst.state,
'hypervisor': inst.hypervisor,
'tags': inst.tags,
'groups': dict((group.id, group.name) for group in inst.groups),
}
try:
instance_info['virtualization_type'] = getattr(inst,'virtualization_type')
except AttributeError:
instance_info['virtualization_type'] = None
try:
instance_info['ebs_optimized'] = getattr(inst, 'ebs_optimized')
except AttributeError:
instance_info['ebs_optimized'] = False
try:
bdm_dict = {}
bdm = getattr(inst, 'block_device_mapping')
for device_name in bdm.keys():
bdm_dict[device_name] = {
'status': bdm[device_name].status,
'volume_id': bdm[device_name].volume_id,
'delete_on_termination': bdm[device_name].delete_on_termination
}
instance_info['block_device_mapping'] = bdm_dict
except AttributeError:
instance_info['block_device_mapping'] = False
try:
instance_info['tenancy'] = getattr(inst, 'placement_tenancy')
except AttributeError:
instance_info['tenancy'] = 'default'
return instance_info
def boto_supports_associate_public_ip_address(ec2):
"""
Check if Boto library has associate_public_ip_address in the NetworkInterfaceSpecification
class. Added in Boto 2.13.0
ec2: authenticated ec2 connection object
Returns:
True if Boto library accepts associate_public_ip_address argument, else false
"""
try:
network_interface = boto.ec2.networkinterface.NetworkInterfaceSpecification()
getattr(network_interface, "associate_public_ip_address")
return True
except AttributeError:
return False
def boto_supports_profile_name_arg(ec2):
"""
Check if Boto library has instance_profile_name argument. instance_profile_name has been added in Boto 2.5.0
ec2: authenticated ec2 connection object
Returns:
True if Boto library accept instance_profile_name argument, else false
"""
run_instances_method = getattr(ec2, 'run_instances')
return 'instance_profile_name' in get_function_code(run_instances_method).co_varnames
def create_block_device(module, ec2, volume):
# Not aware of a way to determine this programatically
# http://aws.amazon.com/about-aws/whats-new/2013/10/09/ebs-provisioned-iops-maximum-iops-gb-ratio-increased-to-30-1/
MAX_IOPS_TO_SIZE_RATIO = 30
# device_type has been used historically to represent volume_type,
# however ec2_vol uses volume_type, as does the BlockDeviceType, so
# we add handling for either/or but not both
if all(key in volume for key in ['device_type','volume_type']):
module.fail_json(msg = 'device_type is a deprecated name for volume_type. Do not use both device_type and volume_type')
# get whichever one is set, or NoneType if neither are set
volume_type = volume.get('device_type') or volume.get('volume_type')
if 'snapshot' not in volume and 'ephemeral' not in volume:
if 'volume_size' not in volume:
module.fail_json(msg = 'Size must be specified when creating a new volume or modifying the root volume')
if 'snapshot' in volume:
if volume_type == 'io1' and 'iops' not in volume:
module.fail_json(msg = 'io1 volumes must have an iops value set')
if 'iops' in volume:
snapshot = ec2.get_all_snapshots(snapshot_ids=[volume['snapshot']])[0]
size = volume.get('volume_size', snapshot.volume_size)
if int(volume['iops']) > MAX_IOPS_TO_SIZE_RATIO * size:
module.fail_json(msg = 'IOPS must be at most %d times greater than size' % MAX_IOPS_TO_SIZE_RATIO)
if 'encrypted' in volume:
module.fail_json(msg = 'You can not set encryption when creating a volume from a snapshot')
if 'ephemeral' in volume:
if 'snapshot' in volume:
module.fail_json(msg = 'Cannot set both ephemeral and snapshot')
return BlockDeviceType(snapshot_id=volume.get('snapshot'),
ephemeral_name=volume.get('ephemeral'),
size=volume.get('volume_size'),
volume_type=volume_type,
delete_on_termination=volume.get('delete_on_termination', False),
iops=volume.get('iops'),
encrypted=volume.get('encrypted', None))
def boto_supports_param_in_spot_request(ec2, param):
"""
Check if Boto library has a <param> in its request_spot_instances() method. For example, the placement_group parameter wasn't added until 2.3.0.
ec2: authenticated ec2 connection object
Returns:
True if boto library has the named param as an argument on the request_spot_instances method, else False
"""
method = getattr(ec2, 'request_spot_instances')
return param in get_function_code(method).co_varnames
def await_spot_requests(module, ec2, spot_requests, count):
"""
Wait for a group of spot requests to be fulfilled, or fail.
module: Ansible module object
ec2: authenticated ec2 connection object
spot_requests: boto.ec2.spotinstancerequest.SpotInstanceRequest object returned by ec2.request_spot_instances
count: Total number of instances to be created by the spot requests
Returns:
list of instance ID's created by the spot request(s)
"""
spot_wait_timeout = int(module.params.get('spot_wait_timeout'))
wait_complete = time.time() + spot_wait_timeout
spot_req_inst_ids = dict()
while time.time() < wait_complete:
reqs = ec2.get_all_spot_instance_requests()
for sirb in spot_requests:
if sirb.id in spot_req_inst_ids:
continue
for sir in reqs:
if sir.id != sirb.id:
continue # this is not our spot instance
if sir.instance_id is not None:
spot_req_inst_ids[sirb.id] = sir.instance_id
elif sir.state == 'open':
continue # still waiting, nothing to do here
elif sir.state == 'active':
continue # Instance is created already, nothing to do here
elif sir.state == 'failed':
module.fail_json(msg="Spot instance request %s failed with status %s and fault %s:%s" % (
sir.id, sir.status.code, sir.fault.code, sir.fault.message))
elif sir.state == 'cancelled':
module.fail_json(msg="Spot instance request %s was cancelled before it could be fulfilled." % sir.id)
elif sir.state == 'closed':
# instance is terminating or marked for termination
# this may be intentional on the part of the operator,
# or it may have been terminated by AWS due to capacity,
# price, or group constraints in this case, we'll fail
# the module if the reason for the state is anything
# other than termination by user. Codes are documented at
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html
if sir.status.code == 'instance-terminated-by-user':
# do nothing, since the user likely did this on purpose
pass
else:
spot_msg = "Spot instance request %s was closed by AWS with the status %s and fault %s:%s"
module.fail_json(msg=spot_msg % (sir.id, sir.status.code, sir.fault.code, sir.fault.message))
if len(spot_req_inst_ids) < count:
time.sleep(5)
else:
return spot_req_inst_ids.values()
module.fail_json(msg = "wait for spot requests timeout on %s" % time.asctime())
def enforce_count(module, ec2, vpc):
exact_count = module.params.get('exact_count')
count_tag = module.params.get('count_tag')
zone = module.params.get('zone')
# fail here if the exact count was specified without filtering
# on a tag, as this may lead to a undesired removal of instances
if exact_count and count_tag is None:
module.fail_json(msg="you must use the 'count_tag' option with exact_count")
reservations, instances = find_running_instances_by_count_tag(module, ec2, count_tag, zone)
changed = None
checkmode = False
instance_dict_array = []
changed_instance_ids = None
if len(instances) == exact_count:
changed = False
elif len(instances) < exact_count:
changed = True
to_create = exact_count - len(instances)
if not checkmode:
(instance_dict_array, changed_instance_ids, changed) \
= create_instances(module, ec2, vpc, override_count=to_create)
for inst in instance_dict_array:
instances.append(inst)
elif len(instances) > exact_count:
changed = True
to_remove = len(instances) - exact_count
if not checkmode:
all_instance_ids = sorted([ x.id for x in instances ])
remove_ids = all_instance_ids[0:to_remove]
instances = [ x for x in instances if x.id not in remove_ids]
(changed, instance_dict_array, changed_instance_ids) \
= terminate_instances(module, ec2, remove_ids)
terminated_list = []
for inst in instance_dict_array:
inst['state'] = "terminated"
terminated_list.append(inst)
instance_dict_array = terminated_list
# ensure all instances are dictionaries
all_instances = []
for inst in instances:
if not isinstance(inst, dict):
inst = get_instance_info(inst)
all_instances.append(inst)
return (all_instances, instance_dict_array, changed_instance_ids, changed)
def create_instances(module, ec2, vpc, override_count=None):
"""
Creates new instances
module : AnsibleModule object
ec2: authenticated ec2 connection object
Returns:
A list of dictionaries with instance information
about the instances that were launched
"""
key_name = module.params.get('key_name')
id = module.params.get('id')
group_name = module.params.get('group')
group_id = module.params.get('group_id')
zone = module.params.get('zone')
instance_type = module.params.get('instance_type')
tenancy = module.params.get('tenancy')
spot_price = module.params.get('spot_price')
spot_type = module.params.get('spot_type')
image = module.params.get('image')
if override_count:
count = override_count
else:
count = module.params.get('count')
monitoring = module.params.get('monitoring')
kernel = module.params.get('kernel')
ramdisk = module.params.get('ramdisk')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
spot_wait_timeout = int(module.params.get('spot_wait_timeout'))
placement_group = module.params.get('placement_group')
user_data = module.params.get('user_data')
instance_tags = module.params.get('instance_tags')
vpc_subnet_id = module.params.get('vpc_subnet_id')
assign_public_ip = module.boolean(module.params.get('assign_public_ip'))
private_ip = module.params.get('private_ip')
instance_profile_name = module.params.get('instance_profile_name')
volumes = module.params.get('volumes')
ebs_optimized = module.params.get('ebs_optimized')
exact_count = module.params.get('exact_count')
count_tag = module.params.get('count_tag')
source_dest_check = module.boolean(module.params.get('source_dest_check'))
termination_protection = module.boolean(module.params.get('termination_protection'))
network_interfaces = module.params.get('network_interfaces')
spot_launch_group = module.params.get('spot_launch_group')
instance_initiated_shutdown_behavior = module.params.get('instance_initiated_shutdown_behavior')
# group_id and group_name are exclusive of each other
if group_id and group_name:
module.fail_json(msg = str("Use only one type of parameter (group_name) or (group_id)"))
vpc_id = None
if vpc_subnet_id:
if not vpc:
module.fail_json(msg="region must be specified")
else:
vpc_id = vpc.get_all_subnets(subnet_ids=[vpc_subnet_id])[0].vpc_id
else:
vpc_id = None
try:
# Here we try to lookup the group id from the security group name - if group is set.
if group_name:
if vpc_id:
grp_details = ec2.get_all_security_groups(filters={'vpc_id': vpc_id})
else:
grp_details = ec2.get_all_security_groups()
if isinstance(group_name, basestring):
group_name = [group_name]
unmatched = set(group_name).difference(str(grp.name) for grp in grp_details)
if len(unmatched) > 0:
module.fail_json(msg="The following group names are not valid: %s" % ', '.join(unmatched))
group_id = [ str(grp.id) for grp in grp_details if str(grp.name) in group_name ]
# Now we try to lookup the group id testing if group exists.
elif group_id:
#wrap the group_id in a list if it's not one already
if isinstance(group_id, basestring):
group_id = [group_id]
grp_details = ec2.get_all_security_groups(group_ids=group_id)
group_name = [grp_item.name for grp_item in grp_details]
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg = str(e))
# Lookup any instances that much our run id.
running_instances = []
count_remaining = int(count)
if id != None:
filter_dict = {'client-token':id, 'instance-state-name' : 'running'}
previous_reservations = ec2.get_all_instances(None, filter_dict)
for res in previous_reservations:
for prev_instance in res.instances:
running_instances.append(prev_instance)
count_remaining = count_remaining - len(running_instances)
# Both min_count and max_count equal count parameter. This means the launch request is explicit (we want count, or fail) in how many instances we want.
if count_remaining == 0:
changed = False
else:
changed = True
try:
params = {'image_id': image,
'key_name': key_name,
'monitoring_enabled': monitoring,
'placement': zone,
'instance_type': instance_type,
'kernel_id': kernel,
'ramdisk_id': ramdisk,
'user_data': user_data}
if ebs_optimized:
params['ebs_optimized'] = ebs_optimized
# 'tenancy' always has a default value, but it is not a valid parameter for spot instance request
if not spot_price:
params['tenancy'] = tenancy
if boto_supports_profile_name_arg(ec2):
params['instance_profile_name'] = instance_profile_name
else:
if instance_profile_name is not None:
module.fail_json(
msg="instance_profile_name parameter requires Boto version 2.5.0 or higher")
if assign_public_ip:
if not boto_supports_associate_public_ip_address(ec2):
module.fail_json(
msg="assign_public_ip parameter requires Boto version 2.13.0 or higher.")
elif not vpc_subnet_id:
module.fail_json(
msg="assign_public_ip only available with vpc_subnet_id")
else:
if private_ip:
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id=vpc_subnet_id,
private_ip_address=private_ip,
groups=group_id,
associate_public_ip_address=assign_public_ip)
else:
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id=vpc_subnet_id,
groups=group_id,
associate_public_ip_address=assign_public_ip)
interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface)
params['network_interfaces'] = interfaces
else:
if network_interfaces:
if isinstance(network_interfaces, basestring):
network_interfaces = [network_interfaces]
interfaces = []
for i, network_interface_id in enumerate(network_interfaces):
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
network_interface_id=network_interface_id,
device_index=i)
interfaces.append(interface)
params['network_interfaces'] = \
boto.ec2.networkinterface.NetworkInterfaceCollection(*interfaces)
else:
params['subnet_id'] = vpc_subnet_id
if vpc_subnet_id:
params['security_group_ids'] = group_id
else:
params['security_groups'] = group_name
if volumes:
bdm = BlockDeviceMapping()
for volume in volumes:
if 'device_name' not in volume:
module.fail_json(msg = 'Device name must be set for volume')
# Minimum volume size is 1GB. We'll use volume size explicitly set to 0
# to be a signal not to create this volume
if 'volume_size' not in volume or int(volume['volume_size']) > 0:
bdm[volume['device_name']] = create_block_device(module, ec2, volume)
params['block_device_map'] = bdm
# check to see if we're using spot pricing first before starting instances
if not spot_price:
if assign_public_ip and private_ip:
params.update(dict(
min_count = count_remaining,
max_count = count_remaining,
client_token = id,
placement_group = placement_group,
))
else:
params.update(dict(
min_count = count_remaining,
max_count = count_remaining,
client_token = id,
placement_group = placement_group,
private_ip_address = private_ip,
))
# For ordinary (not spot) instances, we can select 'stop'
# (the default) or 'terminate' here.
params['instance_initiated_shutdown_behavior'] = instance_initiated_shutdown_behavior or 'stop'
res = ec2.run_instances(**params)
instids = [ i.id for i in res.instances ]
while True:
try:
ec2.get_all_instances(instids)
break
except boto.exception.EC2ResponseError as e:
if "<Code>InvalidInstanceID.NotFound</Code>" in str(e):
# there's a race between start and get an instance
continue
else:
module.fail_json(msg = str(e))
# The instances returned through ec2.run_instances above can be in
# terminated state due to idempotency. See commit 7f11c3d for a complete
# explanation.
terminated_instances = [
str(instance.id) for instance in res.instances if instance.state == 'terminated'
]
if terminated_instances:
module.fail_json(msg = "Instances with id(s) %s " % terminated_instances +
"were created previously but have since been terminated - " +
"use a (possibly different) 'instanceid' parameter")
else:
if private_ip:
module.fail_json(
msg='private_ip only available with on-demand (non-spot) instances')
if boto_supports_param_in_spot_request(ec2, 'placement_group'):
params['placement_group'] = placement_group
elif placement_group :
module.fail_json(
msg="placement_group parameter requires Boto version 2.3.0 or higher.")
# You can't tell spot instances to 'stop'; they will always be
# 'terminate'd. For convenience, we'll ignore the latter value.
if instance_initiated_shutdown_behavior and instance_initiated_shutdown_behavior != 'terminate':
module.fail_json(
msg="instance_initiated_shutdown_behavior=stop is not supported for spot instances.")
if spot_launch_group and isinstance(spot_launch_group, basestring):
params['launch_group'] = spot_launch_group
params.update(dict(
count = count_remaining,
type = spot_type,
))
res = ec2.request_spot_instances(spot_price, **params)
# Now we have to do the intermediate waiting
if wait:
instids = await_spot_requests(module, ec2, res, count)
except boto.exception.BotoServerError as e:
module.fail_json(msg = "Instance creation failed => %s: %s" % (e.error_code, e.error_message))
# wait here until the instances are up
num_running = 0
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and num_running < len(instids):
try:
res_list = ec2.get_all_instances(instids)
except boto.exception.BotoServerError as e:
if e.error_code == 'InvalidInstanceID.NotFound':
time.sleep(1)
continue
else:
raise
num_running = 0
for res in res_list:
num_running += len([ i for i in res.instances if i.state=='running' ])
if len(res_list) <= 0:
# got a bad response of some sort, possibly due to
# stale/cached data. Wait a second and then try again
time.sleep(1)
continue
if wait and num_running < len(instids):
time.sleep(5)
else:
break
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime())
#We do this after the loop ends so that we end up with one list
for res in res_list:
running_instances.extend(res.instances)
# Enabled by default by AWS
if source_dest_check is False:
for inst in res.instances:
inst.modify_attribute('sourceDestCheck', False)
# Disabled by default by AWS
if termination_protection is True:
for inst in res.instances:
inst.modify_attribute('disableApiTermination', True)
# Leave this as late as possible to try and avoid InvalidInstanceID.NotFound
if instance_tags:
try:
ec2.create_tags(instids, instance_tags)
except boto.exception.EC2ResponseError as e:
module.fail_json(msg = "Instance tagging failed => %s: %s" % (e.error_code, e.error_message))
instance_dict_array = []
created_instance_ids = []
for inst in running_instances:
inst.update()
d = get_instance_info(inst)
created_instance_ids.append(inst.id)
instance_dict_array.append(d)
return (instance_dict_array, created_instance_ids, changed)
def terminate_instances(module, ec2, instance_ids):
"""
Terminates a list of instances
module: Ansible module object
ec2: authenticated ec2 connection object
termination_list: a list of instances to terminate in the form of
[ {id: <inst-id>}, ..]
Returns a dictionary of instance information
about the instances terminated.
If the instance to be terminated is running
"changed" will be set to False.
"""
# Whether to wait for termination to complete before returning
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
terminated_instance_ids = []
for res in ec2.get_all_instances(instance_ids):
for inst in res.instances:
if inst.state == 'running' or inst.state == 'stopped':
terminated_instance_ids.append(inst.id)
instance_dict_array.append(get_instance_info(inst))
try:
ec2.terminate_instances([inst.id])
except EC2ResponseError as e:
module.fail_json(msg='Unable to terminate instance {0}, error: {1}'.format(inst.id, e))
changed = True
# wait here until the instances are 'terminated'
if wait:
num_terminated = 0
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and num_terminated < len(terminated_instance_ids):
response = ec2.get_all_instances( \
instance_ids=terminated_instance_ids, \
filters={'instance-state-name':'terminated'})
try:
num_terminated = sum([len(res.instances) for res in response])
except Exception as e:
# got a bad response of some sort, possibly due to
# stale/cached data. Wait a second and then try again
time.sleep(1)
continue
if num_terminated < len(terminated_instance_ids):
time.sleep(5)
# waiting took too long
if wait_timeout < time.time() and num_terminated < len(terminated_instance_ids):
module.fail_json(msg = "wait for instance termination timeout on %s" % time.asctime())
#Lets get the current state of the instances after terminating - issue600
instance_dict_array = []
for res in ec2.get_all_instances(instance_ids=terminated_instance_ids,\
filters={'instance-state-name':'terminated'}):
for inst in res.instances:
instance_dict_array.append(get_instance_info(inst))
return (changed, instance_dict_array, terminated_instance_ids)
def startstop_instances(module, ec2, instance_ids, state, instance_tags):
"""
Starts or stops a list of existing instances
module: Ansible module object
ec2: authenticated ec2 connection object
instance_ids: The list of instances to start in the form of
[ {id: <inst-id>}, ..]
instance_tags: A dict of tag keys and values in the form of
{key: value, ... }
state: Intended state ("running" or "stopped")
Returns a dictionary of instance information
about the instances started/stopped.
If the instance was not able to change state,
"changed" will be set to False.
Note that if instance_ids and instance_tags are both non-empty,
this method will process the intersection of the two
"""
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
source_dest_check = module.params.get('source_dest_check')
termination_protection = module.params.get('termination_protection')
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
# Fail unless the user defined instance tags
if not instance_tags:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
# To make an EC2 tag filter, we need to prepend 'tag:' to each key.
# An empty filter does no filtering, so it's safe to pass it to the
# get_all_instances method even if the user did not specify instance_tags
filters = {}
if instance_tags:
for key, value in instance_tags.items():
filters["tag:" + key] = value
# Check that our instances are not in the state we want to take
# Check (and eventually change) instances attributes and instances state
existing_instances_array = []
for res in ec2.get_all_instances(instance_ids, filters=filters):
for inst in res.instances:
# Check "source_dest_check" attribute
try:
if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check:
inst.modify_attribute('sourceDestCheck', source_dest_check)
changed = True
except boto.exception.EC2ResponseError as exc:
# instances with more than one Elastic Network Interface will
# fail, because they have the sourceDestCheck attribute defined
# per-interface
if exc.code == 'InvalidInstanceID':
for interface in inst.interfaces:
if interface.source_dest_check != source_dest_check:
ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check)
changed = True
else:
module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc),
exception=traceback.format_exc(exc))
# Check "termination_protection" attribute
if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection
and termination_protection is not None):
inst.modify_attribute('disableApiTermination', termination_protection)
changed = True
# Check instance state
if inst.state != state:
instance_dict_array.append(get_instance_info(inst))
try:
if state == 'running':
inst.start()
else:
inst.stop()
except EC2ResponseError as e:
module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e))
changed = True
existing_instances_array.append(inst.id)
instance_ids = list(set(existing_instances_array + (instance_ids or [])))
## Wait for all the instances to finish starting or stopping
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
instance_dict_array = []
matched_instances = []
for res in ec2.get_all_instances(instance_ids):
for i in res.instances:
if i.state == state:
instance_dict_array.append(get_instance_info(i))
matched_instances.append(i)
if len(matched_instances) < len(instance_ids):
time.sleep(5)
else:
break
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime())
return (changed, instance_dict_array, instance_ids)
def restart_instances(module, ec2, instance_ids, state, instance_tags):
"""
Restarts a list of existing instances
module: Ansible module object
ec2: authenticated ec2 connection object
instance_ids: The list of instances to start in the form of
[ {id: <inst-id>}, ..]
instance_tags: A dict of tag keys and values in the form of
{key: value, ... }
state: Intended state ("restarted")
Returns a dictionary of instance information
about the instances.
If the instance was not able to change state,
"changed" will be set to False.
Wait will not apply here as this is a OS level operation.
Note that if instance_ids and instance_tags are both non-empty,
this method will process the intersection of the two.
"""
source_dest_check = module.params.get('source_dest_check')
termination_protection = module.params.get('termination_protection')
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
# Fail unless the user defined instance tags
if not instance_tags:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
# To make an EC2 tag filter, we need to prepend 'tag:' to each key.
# An empty filter does no filtering, so it's safe to pass it to the
# get_all_instances method even if the user did not specify instance_tags
filters = {}
if instance_tags:
for key, value in instance_tags.items():
filters["tag:" + key] = value
# Check that our instances are not in the state we want to take
# Check (and eventually change) instances attributes and instances state
for res in ec2.get_all_instances(instance_ids, filters=filters):
for inst in res.instances:
# Check "source_dest_check" attribute
try:
if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check:
inst.modify_attribute('sourceDestCheck', source_dest_check)
changed = True
except boto.exception.EC2ResponseError as exc:
# instances with more than one Elastic Network Interface will
# fail, because they have the sourceDestCheck attribute defined
# per-interface
if exc.code == 'InvalidInstanceID':
for interface in inst.interfaces:
if interface.source_dest_check != source_dest_check:
ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check)
changed = True
else:
module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc),
exception=traceback.format_exc(exc))
# Check "termination_protection" attribute
if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection
and termination_protection is not None):
inst.modify_attribute('disableApiTermination', termination_protection)
changed = True
# Check instance state
if inst.state != state:
instance_dict_array.append(get_instance_info(inst))
try:
inst.reboot()
except EC2ResponseError as e:
module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e))
changed = True
return (changed, instance_dict_array, instance_ids)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
key_name = dict(aliases = ['keypair']),
id = dict(),
group = dict(type='list', aliases=['groups']),
group_id = dict(type='list'),
zone = dict(aliases=['aws_zone', 'ec2_zone']),
instance_type = dict(aliases=['type']),
spot_price = dict(),
spot_type = dict(default='one-time', choices=["one-time", "persistent"]),
spot_launch_group = dict(),
image = dict(),
kernel = dict(),
count = dict(type='int', default='1'),
monitoring = dict(type='bool', default=False),
ramdisk = dict(),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
spot_wait_timeout = dict(default=600),
placement_group = dict(),
user_data = dict(),
instance_tags = dict(type='dict'),
vpc_subnet_id = dict(),
assign_public_ip = dict(type='bool', default=False),
private_ip = dict(),
instance_profile_name = dict(),
instance_ids = dict(type='list', aliases=['instance_id']),
source_dest_check = dict(type='bool', default=True),
termination_protection = dict(type='bool', default=None),
state = dict(default='present', choices=['present', 'absent', 'running', 'restarted', 'stopped']),
instance_initiated_shutdown_behavior=dict(default=None, choices=['stop', 'terminate']),
exact_count = dict(type='int', default=None),
count_tag = dict(),
volumes = dict(type='list'),
ebs_optimized = dict(type='bool', default=False),
tenancy = dict(default='default'),
network_interfaces = dict(type='list', aliases=['network_interface'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [
['exact_count', 'count'],
['exact_count', 'state'],
['exact_count', 'instance_ids'],
['network_interfaces', 'assign_public_ip'],
['network_interfaces', 'group'],
['network_interfaces', 'group_id'],
['network_interfaces', 'private_ip'],
['network_interfaces', 'vpc_subnet_id'],
],
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
ec2 = ec2_connect(module)
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if region:
try:
vpc = connect_to_aws(boto.vpc, region, **aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg = str(e))
else:
vpc = None
tagged_instances = []
state = module.params['state']
if state == 'absent':
instance_ids = module.params['instance_ids']
if not instance_ids:
module.fail_json(msg='instance_ids list is required for absent state')
(changed, instance_dict_array, new_instance_ids) = terminate_instances(module, ec2, instance_ids)
elif state in ('running', 'stopped'):
instance_ids = module.params.get('instance_ids')
instance_tags = module.params.get('instance_tags')
if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)):
module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids)
(changed, instance_dict_array, new_instance_ids) = startstop_instances(module, ec2, instance_ids, state, instance_tags)
elif state in ('restarted'):
instance_ids = module.params.get('instance_ids')
instance_tags = module.params.get('instance_tags')
if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)):
module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids)
(changed, instance_dict_array, new_instance_ids) = restart_instances(module, ec2, instance_ids, state, instance_tags)
elif state == 'present':
# Changed is always set to true when provisioning new instances
if not module.params.get('image'):
module.fail_json(msg='image parameter is required for new instance')
if module.params.get('exact_count') is None:
(instance_dict_array, new_instance_ids, changed) = create_instances(module, ec2, vpc)
else:
(tagged_instances, instance_dict_array, new_instance_ids, changed) = enforce_count(module, ec2, vpc)
module.exit_json(changed=changed, instance_ids=new_instance_ids, instances=instance_dict_array, tagged_instances=tagged_instances)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| wimnat/ansible-modules-core | cloud/amazon/ec2.py | Python | gpl-3.0 | 61,472 |
// ReSharper disable All
using System;
using System.Diagnostics;
using System.Linq;
using MixERP.Net.Api.Transactions.Fakes;
using MixERP.Net.ApplicationState.Cache;
using Xunit;
namespace MixERP.Net.Api.Transactions.Tests
{
public class GetPurchaseTests
{
public static GetPurchaseController Fixture()
{
GetPurchaseController controller = new GetPurchaseController(new GetPurchaseRepository(), "", new LoginView());
return controller;
}
[Fact]
[Conditional("Debug")]
public void Execute()
{
var actual = Fixture().Execute(new GetPurchaseController.Annotation());
Assert.Equal(1, actual);
}
}
} | gguruss/mixerp | src/Libraries/Web API/Transactions/Tests/GetPurchaseTests.cs | C# | gpl-3.0 | 720 |
using System;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq;
using MixERP.Net.Common;
using MixERP.Net.Core.Modules.Sales.Data.Data;
using MixERP.Net.DbFactory;
using MixERP.Net.Entities.Core;
using MixERP.Net.Entities.Transactions.Models;
using Npgsql;
namespace MixERP.Net.Core.Modules.Sales.Data.Transactions
{
internal static class GlTransaction
{
public static long Add(string catalog, string bookName, DateTime valueDate, int officeId, int userId,
long loginId, int costCenterId, string referenceNumber, string statementReference, StockMaster stockMaster,
Collection<StockDetail> details, Collection<Attachment> attachments, bool nonTaxable,
Collection<long> tranIds)
{
if (stockMaster == null)
{
return 0;
}
if (details == null)
{
return 0;
}
if (details.Count.Equals(0))
{
return 0;
}
string detail = StockMasterDetailHelper.CreateStockMasterDetailParameter(details);
string attachment = AttachmentHelper.CreateAttachmentModelParameter(attachments);
string ids = "NULL::bigint";
if (tranIds != null && tranIds.Count > 0)
{
ids = string.Join(",", tranIds);
}
string sql = string.Format(CultureInfo.InvariantCulture,
"SELECT * FROM transactions.post_sales(@BookName::national character varying(48), @OfficeId::integer, @UserId::integer, @LoginId::bigint, @ValueDate::date, @CostCenterId::integer, @ReferenceNumber::national character varying(24), @StatementReference::text, @IsCredit::boolean, @PaymentTermId::integer, @PartyCode::national character varying(12), @PriceTypeId::integer, @SalespersonId::integer, @ShipperId::integer, @ShippingAddressCode::national character varying(12), @StoreId::integer, @NonTaxable::boolean, ARRAY[{0}], ARRAY[{1}], ARRAY[{2}])",
detail, attachment, ids);
using (NpgsqlCommand command = new NpgsqlCommand(sql))
{
command.Parameters.AddWithValue("@BookName", bookName);
command.Parameters.AddWithValue("@OfficeId", officeId);
command.Parameters.AddWithValue("@UserId", userId);
command.Parameters.AddWithValue("@LoginId", loginId);
command.Parameters.AddWithValue("@ValueDate", valueDate);
command.Parameters.AddWithValue("@CostCenterId", costCenterId);
command.Parameters.AddWithValue("@ReferenceNumber", referenceNumber);
command.Parameters.AddWithValue("@StatementReference", statementReference);
command.Parameters.AddWithValue("@IsCredit", stockMaster.IsCredit);
if (stockMaster.PaymentTermId.Equals(0))
{
command.Parameters.AddWithValue("@PaymentTermId", DBNull.Value);
}
else
{
command.Parameters.AddWithValue("@PaymentTermId", stockMaster.PaymentTermId);
}
command.Parameters.AddWithValue("@PartyCode", stockMaster.PartyCode);
command.Parameters.AddWithValue("@PriceTypeId", stockMaster.PriceTypeId);
command.Parameters.AddWithValue("@SalespersonId", stockMaster.SalespersonId);
command.Parameters.AddWithValue("@ShipperId", stockMaster.ShipperId);
command.Parameters.AddWithValue("@ShippingAddressCode", stockMaster.ShippingAddressCode);
command.Parameters.AddWithValue("@StoreId", stockMaster.StoreId);
command.Parameters.AddWithValue("@NonTaxable", nonTaxable);
command.Parameters.AddRange(StockMasterDetailHelper.AddStockMasterDetailParameter(details).ToArray());
command.Parameters.AddRange(AttachmentHelper.AddAttachmentParameter(attachments).ToArray());
long tranId = Conversion.TryCastLong(DbOperation.GetScalarValue(catalog, command));
return tranId;
}
}
}
} | mixerp/mixerp | src/FrontEnd/Modules/Sales.Data/Transactions/GlTransaction.cs | C# | gpl-3.0 | 4,220 |
/**
* Copyright 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var when = require("when");
var clone = require("clone");
var typeRegistry = require("../registry");
var Log = require("../../log");
var redUtil = require("../../util");
var flowUtil = require("./util");
function Flow(global,flow) {
if (typeof flow === 'undefined') {
flow = global;
}
var activeNodes = {};
var subflowInstanceNodes = {};
var catchNodeMap = {};
var statusNodeMap = {};
this.start = function(diff) {
var node;
var newNode;
var id;
catchNodeMap = {};
statusNodeMap = {};
for (id in flow.configs) {
if (flow.configs.hasOwnProperty(id)) {
node = flow.configs[id];
if (!activeNodes[id]) {
newNode = createNode(node.type,node);
if (newNode) {
activeNodes[id] = newNode;
}
}
}
}
if (diff && diff.rewired) {
for (var j=0;j<diff.rewired.length;j++) {
var rewireNode = activeNodes[diff.rewired[j]];
if (rewireNode) {
rewireNode.updateWires(flow.nodes[rewireNode.id].wires);
}
}
}
for (id in flow.nodes) {
if (flow.nodes.hasOwnProperty(id)) {
node = flow.nodes[id];
if (!node.subflow) {
if (!activeNodes[id]) {
newNode = createNode(node.type,node);
if (newNode) {
activeNodes[id] = newNode;
}
}
} else {
if (!subflowInstanceNodes[id]) {
try {
var nodes = createSubflow(flow.subflows[node.subflow]||global.subflows[node.subflow],node,flow.subflows,global.subflows,activeNodes);
subflowInstanceNodes[id] = nodes.map(function(n) { return n.id});
for (var i=0;i<nodes.length;i++) {
if (nodes[i]) {
activeNodes[nodes[i].id] = nodes[i];
}
}
} catch(err) {
console.log(err.stack)
}
}
}
}
}
for (id in activeNodes) {
if (activeNodes.hasOwnProperty(id)) {
node = activeNodes[id];
if (node.type === "catch") {
catchNodeMap[node.z] = catchNodeMap[node.z] || [];
catchNodeMap[node.z].push(node);
} else if (node.type === "status") {
statusNodeMap[node.z] = statusNodeMap[node.z] || [];
statusNodeMap[node.z].push(node);
}
}
}
}
this.stop = function(stopList) {
return when.promise(function(resolve) {
var i;
if (stopList) {
for (i=0;i<stopList.length;i++) {
if (subflowInstanceNodes[stopList[i]]) {
// The first in the list is the instance node we already
// know about
stopList = stopList.concat(subflowInstanceNodes[stopList[i]].slice(1))
}
}
} else {
stopList = Object.keys(activeNodes);
}
var promises = [];
for (i=0;i<stopList.length;i++) {
var node = activeNodes[stopList[i]];
if (node) {
delete activeNodes[stopList[i]];
if (subflowInstanceNodes[stopList[i]]) {
delete subflowInstanceNodes[stopList[i]];
}
try {
var p = node.close();
if (p) {
promises.push(p);
}
} catch(err) {
node.error(err);
}
}
}
when.settle(promises).then(function() {
resolve();
});
});
}
this.update = function(_global,_flow) {
global = _global;
flow = _flow;
}
this.getNode = function(id) {
return activeNodes[id];
}
this.getActiveNodes = function() {
return activeNodes;
}
this.handleStatus = function(node,statusMessage) {
var targetStatusNodes = null;
var reportingNode = node;
var handled = false;
while(reportingNode && !handled) {
targetStatusNodes = statusNodeMap[reportingNode.z];
if (targetStatusNodes) {
targetStatusNodes.forEach(function(targetStatusNode) {
if (targetStatusNode.scope && targetStatusNode.scope.indexOf(node.id) === -1) {
return;
}
var message = {
status: {
text: "",
source: {
id: node.id,
type: node.type,
name: node.name
}
}
};
if (statusMessage.text) {
message.status.text = statusMessage.text;
}
targetStatusNode.receive(message);
handled = true;
});
}
if (!handled) {
reportingNode = activeNodes[reportingNode.z];
}
}
}
this.handleError = function(node,logMessage,msg) {
var count = 1;
if (msg && msg.hasOwnProperty("error")) {
if (msg.error.hasOwnProperty("source")) {
if (msg.error.source.id === node.id) {
count = msg.error.source.count+1;
if (count === 10) {
node.warn(Log._("nodes.flow.error-loop"));
return;
}
}
}
}
var targetCatchNodes = null;
var throwingNode = node;
var handled = false;
while (throwingNode && !handled) {
targetCatchNodes = catchNodeMap[throwingNode.z];
if (targetCatchNodes) {
targetCatchNodes.forEach(function(targetCatchNode) {
if (targetCatchNode.scope && targetCatchNode.scope.indexOf(throwingNode.id) === -1) {
return;
}
var errorMessage;
if (msg) {
errorMessage = redUtil.cloneMessage(msg);
} else {
errorMessage = {};
}
if (errorMessage.hasOwnProperty("error")) {
errorMessage._error = errorMessage.error;
}
errorMessage.error = {
message: logMessage.toString(),
source: {
id: node.id,
type: node.type,
name: node.name,
count: count
}
};
targetCatchNode.receive(errorMessage);
handled = true;
});
}
if (!handled) {
throwingNode = activeNodes[throwingNode.z];
}
}
}
}
var EnvVarPropertyRE = /^\$\((\S+)\)$/;
function mapEnvVarProperties(obj,prop) {
if (Buffer.isBuffer(obj[prop])) {
return;
} else if (Array.isArray(obj[prop])) {
for (var i=0;i<obj[prop].length;i++) {
mapEnvVarProperties(obj[prop],i);
}
} else if (typeof obj[prop] === 'string') {
var m;
if ( (m = EnvVarPropertyRE.exec(obj[prop])) !== null) {
if (process.env.hasOwnProperty(m[1])) {
obj[prop] = process.env[m[1]];
}
}
} else {
for (var p in obj[prop]) {
if (obj[prop].hasOwnProperty) {
mapEnvVarProperties(obj[prop],p);
}
}
}
}
function createNode(type,config) {
var nn = null;
var nt = typeRegistry.get(type);
if (nt) {
var conf = clone(config);
delete conf.credentials;
for (var p in conf) {
if (conf.hasOwnProperty(p)) {
mapEnvVarProperties(conf,p);
}
}
try {
nn = new nt(conf);
}
catch (err) {
Log.log({
level: Log.ERROR,
id:conf.id,
type: type,
msg: err
});
}
} else {
Log.error(Log._("nodes.flow.unknown-type", {type:type}));
}
return nn;
}
function createSubflow(sf,sfn,subflows,globalSubflows,activeNodes) {
//console.log("CREATE SUBFLOW",sf.id,sfn.id);
var nodes = [];
var node_map = {};
var newNodes = [];
var node;
var wires;
var i,j,k;
var createNodeInSubflow = function(def) {
node = clone(def);
var nid = redUtil.generateId();
node_map[node.id] = node;
node._alias = node.id;
node.id = nid;
node.z = sfn.id;
newNodes.push(node);
}
// Clone all of the subflow node definitions and give them new IDs
for (i in sf.configs) {
if (sf.configs.hasOwnProperty(i)) {
createNodeInSubflow(sf.configs[i]);
}
}
// Clone all of the subflow node definitions and give them new IDs
for (i in sf.nodes) {
if (sf.nodes.hasOwnProperty(i)) {
createNodeInSubflow(sf.nodes[i]);
}
}
// Look for any catch/status nodes and update their scope ids
// Update all subflow interior wiring to reflect new node IDs
for (i=0;i<newNodes.length;i++) {
node = newNodes[i];
if (node.wires) {
var outputs = node.wires;
for (j=0;j<outputs.length;j++) {
wires = outputs[j];
for (k=0;k<wires.length;k++) {
outputs[j][k] = node_map[outputs[j][k]].id
}
}
if ((node.type === 'catch' || node.type === 'status') && node.scope) {
node.scope = node.scope.map(function(id) {
return node_map[id]?node_map[id].id:""
})
} else {
for (var prop in node) {
if (node.hasOwnProperty(prop) && prop !== '_alias') {
if (node_map[node[prop]]) {
//console.log("Mapped",node.type,node.id,prop,node_map[node[prop]].id);
node[prop] = node_map[node[prop]].id;
}
}
}
}
}
}
// Create a subflow node to accept inbound messages and route appropriately
var Node = require("../Node");
var subflowInstance = {
id: sfn.id,
type: sfn.type,
z: sfn.z,
name: sfn.name,
wires: []
}
if (sf.in) {
subflowInstance.wires = sf.in.map(function(n) { return n.wires.map(function(w) { return node_map[w.id].id;})})
subflowInstance._originalWires = clone(subflowInstance.wires);
}
var subflowNode = new Node(subflowInstance);
subflowNode.on("input", function(msg) { this.send(msg);});
subflowNode._updateWires = subflowNode.updateWires;
subflowNode.updateWires = function(newWires) {
// Wire the subflow outputs
if (sf.out) {
var node,wires,i,j;
// Restore the original wiring to the internal nodes
subflowInstance.wires = clone(subflowInstance._originalWires);
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id != sf.id) {
node = node_map[wires[j].id];
if (node._originalWires) {
node.wires = clone(node._originalWires);
}
}
}
}
var modifiedNodes = {};
var subflowInstanceModified = false;
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id === sf.id) {
subflowInstance.wires[wires[j].port] = subflowInstance.wires[wires[j].port].concat(newWires[i]);
subflowInstanceModified = true;
} else {
node = node_map[wires[j].id];
node.wires[wires[j].port] = node.wires[wires[j].port].concat(newWires[i]);
modifiedNodes[node.id] = node;
}
}
}
Object.keys(modifiedNodes).forEach(function(id) {
var node = modifiedNodes[id];
subflowNode.instanceNodes[id].updateWires(node.wires);
});
if (subflowInstanceModified) {
subflowNode._updateWires(subflowInstance.wires);
}
}
}
nodes.push(subflowNode);
// Wire the subflow outputs
if (sf.out) {
var modifiedNodes = {};
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id === sf.id) {
// A subflow input wired straight to a subflow output
subflowInstance.wires[wires[j].port] = subflowInstance.wires[wires[j].port].concat(sfn.wires[i])
subflowNode._updateWires(subflowInstance.wires);
} else {
node = node_map[wires[j].id];
modifiedNodes[node.id] = node;
if (!node._originalWires) {
node._originalWires = clone(node.wires);
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]).concat(sfn.wires[i]);
}
}
}
}
// Instantiate the nodes
for (i=0;i<newNodes.length;i++) {
node = newNodes[i];
var type = node.type;
var m = /^subflow:(.+)$/.exec(type);
if (!m) {
var newNode = createNode(type,node);
if (newNode) {
activeNodes[node.id] = newNode;
nodes.push(newNode);
}
} else {
var subflowId = m[1];
nodes = nodes.concat(createSubflow(subflows[subflowId]||globalSubflows[subflowId],node,subflows,globalSubflows,activeNodes));
}
}
subflowNode.instanceNodes = {};
nodes.forEach(function(node) {
subflowNode.instanceNodes[node.id] = node;
});
return nodes;
}
module.exports = {
create: function(global,conf) {
return new Flow(global,conf);
}
}
| lemio/w-esp | w-esp-node-red/red/runtime/nodes/flows/Flow.js | JavaScript | gpl-3.0 | 16,004 |
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'removeformat', 'en-gb', {
toolbar: 'Remove Format'
} );
| gmuro/dolibarr | htdocs/includes/ckeditor/ckeditor/_source/plugins/removeformat/lang/en-gb.js | JavaScript | gpl-3.0 | 227 |
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'flash', 'ku', {
access: 'دەستپێگەیشتنی نووسراو',
accessAlways: 'هەمیشه',
accessNever: 'هەرگیز',
accessSameDomain: 'هەمان دۆمەین',
alignAbsBottom: 'له ژێرەوه',
alignAbsMiddle: 'لەناوەند',
alignBaseline: 'هێڵەبنەڕەت',
alignTextTop: 'دەق لەسەرەوه',
bgcolor: 'ڕەنگی پاشبنەما',
chkFull: 'ڕێپێدان بە پڕی شاشه',
chkLoop: 'گرێ',
chkMenu: 'چالاککردنی لیستەی فلاش',
chkPlay: 'پێکردنی یان لێدانی خۆکار',
flashvars: 'گۆڕاوەکان بۆ فلاش',
hSpace: 'بۆشایی ئاسۆیی',
properties: 'خاسیەتی فلاش',
propertiesTab: 'خاسیەت',
quality: 'جۆرایەتی',
qualityAutoHigh: 'بەرزی خۆکار',
qualityAutoLow: 'نزمی خۆکار',
qualityBest: 'باشترین',
qualityHigh: 'بەرزی',
qualityLow: 'نزم',
qualityMedium: 'مامناوەند',
scale: 'پێوانه',
scaleAll: 'نیشاندانی هەموو',
scaleFit: 'بەوردی بگونجێت',
scaleNoBorder: 'بێ پەراوێز',
title: 'خاسیەتی فلاش',
vSpace: 'بۆشایی ئەستونی',
validateHSpace: 'بۆشایی ئاسۆیی دەبێت ژمارە بێت.',
validateSrc: 'ناونیشانی بەستەر نابێت خاڵی بێت',
validateVSpace: 'بۆشایی ئەستونی دەبێت ژماره بێت.',
windowMode: 'شێوازی پەنجەره',
windowModeOpaque: 'ناڕوون',
windowModeTransparent: 'ڕۆشن',
windowModeWindow: 'پەنجەره'
} );
| gmuro/dolibarr | htdocs/includes/ckeditor/ckeditor/_source/plugins/flash/lang/ku.js | JavaScript | gpl-3.0 | 1,731 |
// Copyright 2010 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifdef ENABLE_GDB_JIT_INTERFACE
#include "src/v8.h"
#include "src/base/platform/platform.h"
#include "src/bootstrapper.h"
#include "src/compiler.h"
#include "src/frames-inl.h"
#include "src/frames.h"
#include "src/gdb-jit.h"
#include "src/global-handles.h"
#include "src/messages.h"
#include "src/natives.h"
#include "src/ostreams.h"
#include "src/scopes.h"
namespace v8 {
namespace internal {
#ifdef __APPLE__
#define __MACH_O
class MachO;
class MachOSection;
typedef MachO DebugObject;
typedef MachOSection DebugSection;
#else
#define __ELF
class ELF;
class ELFSection;
typedef ELF DebugObject;
typedef ELFSection DebugSection;
#endif
class Writer BASE_EMBEDDED {
public:
explicit Writer(DebugObject* debug_object)
: debug_object_(debug_object),
position_(0),
capacity_(1024),
buffer_(reinterpret_cast<byte*>(malloc(capacity_))) {
}
~Writer() {
free(buffer_);
}
uintptr_t position() const {
return position_;
}
template<typename T>
class Slot {
public:
Slot(Writer* w, uintptr_t offset) : w_(w), offset_(offset) { }
T* operator-> () {
return w_->RawSlotAt<T>(offset_);
}
void set(const T& value) {
*w_->RawSlotAt<T>(offset_) = value;
}
Slot<T> at(int i) {
return Slot<T>(w_, offset_ + sizeof(T) * i);
}
private:
Writer* w_;
uintptr_t offset_;
};
template<typename T>
void Write(const T& val) {
Ensure(position_ + sizeof(T));
*RawSlotAt<T>(position_) = val;
position_ += sizeof(T);
}
template<typename T>
Slot<T> SlotAt(uintptr_t offset) {
Ensure(offset + sizeof(T));
return Slot<T>(this, offset);
}
template<typename T>
Slot<T> CreateSlotHere() {
return CreateSlotsHere<T>(1);
}
template<typename T>
Slot<T> CreateSlotsHere(uint32_t count) {
uintptr_t slot_position = position_;
position_ += sizeof(T) * count;
Ensure(position_);
return SlotAt<T>(slot_position);
}
void Ensure(uintptr_t pos) {
if (capacity_ < pos) {
while (capacity_ < pos) capacity_ *= 2;
buffer_ = reinterpret_cast<byte*>(realloc(buffer_, capacity_));
}
}
DebugObject* debug_object() { return debug_object_; }
byte* buffer() { return buffer_; }
void Align(uintptr_t align) {
uintptr_t delta = position_ % align;
if (delta == 0) return;
uintptr_t padding = align - delta;
Ensure(position_ += padding);
DCHECK((position_ % align) == 0);
}
void WriteULEB128(uintptr_t value) {
do {
uint8_t byte = value & 0x7F;
value >>= 7;
if (value != 0) byte |= 0x80;
Write<uint8_t>(byte);
} while (value != 0);
}
void WriteSLEB128(intptr_t value) {
bool more = true;
while (more) {
int8_t byte = value & 0x7F;
bool byte_sign = byte & 0x40;
value >>= 7;
if ((value == 0 && !byte_sign) || (value == -1 && byte_sign)) {
more = false;
} else {
byte |= 0x80;
}
Write<int8_t>(byte);
}
}
void WriteString(const char* str) {
do {
Write<char>(*str);
} while (*str++);
}
private:
template<typename T> friend class Slot;
template<typename T>
T* RawSlotAt(uintptr_t offset) {
DCHECK(offset < capacity_ && offset + sizeof(T) <= capacity_);
return reinterpret_cast<T*>(&buffer_[offset]);
}
DebugObject* debug_object_;
uintptr_t position_;
uintptr_t capacity_;
byte* buffer_;
};
class ELFStringTable;
template<typename THeader>
class DebugSectionBase : public ZoneObject {
public:
virtual ~DebugSectionBase() { }
virtual void WriteBody(Writer::Slot<THeader> header, Writer* writer) {
uintptr_t start = writer->position();
if (WriteBodyInternal(writer)) {
uintptr_t end = writer->position();
header->offset = start;
#if defined(__MACH_O)
header->addr = 0;
#endif
header->size = end - start;
}
}
virtual bool WriteBodyInternal(Writer* writer) {
return false;
}
typedef THeader Header;
};
struct MachOSectionHeader {
char sectname[16];
char segname[16];
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
uint32_t addr;
uint32_t size;
#else
uint64_t addr;
uint64_t size;
#endif
uint32_t offset;
uint32_t align;
uint32_t reloff;
uint32_t nreloc;
uint32_t flags;
uint32_t reserved1;
uint32_t reserved2;
};
class MachOSection : public DebugSectionBase<MachOSectionHeader> {
public:
enum Type {
S_REGULAR = 0x0u,
S_ATTR_COALESCED = 0xbu,
S_ATTR_SOME_INSTRUCTIONS = 0x400u,
S_ATTR_DEBUG = 0x02000000u,
S_ATTR_PURE_INSTRUCTIONS = 0x80000000u
};
MachOSection(const char* name,
const char* segment,
uintptr_t align,
uint32_t flags)
: name_(name),
segment_(segment),
align_(align),
flags_(flags) {
if (align_ != 0) {
DCHECK(IsPowerOf2(align));
align_ = WhichPowerOf2(align_);
}
}
virtual ~MachOSection() { }
virtual void PopulateHeader(Writer::Slot<Header> header) {
header->addr = 0;
header->size = 0;
header->offset = 0;
header->align = align_;
header->reloff = 0;
header->nreloc = 0;
header->flags = flags_;
header->reserved1 = 0;
header->reserved2 = 0;
memset(header->sectname, 0, sizeof(header->sectname));
memset(header->segname, 0, sizeof(header->segname));
DCHECK(strlen(name_) < sizeof(header->sectname));
DCHECK(strlen(segment_) < sizeof(header->segname));
strncpy(header->sectname, name_, sizeof(header->sectname));
strncpy(header->segname, segment_, sizeof(header->segname));
}
private:
const char* name_;
const char* segment_;
uintptr_t align_;
uint32_t flags_;
};
struct ELFSectionHeader {
uint32_t name;
uint32_t type;
uintptr_t flags;
uintptr_t address;
uintptr_t offset;
uintptr_t size;
uint32_t link;
uint32_t info;
uintptr_t alignment;
uintptr_t entry_size;
};
#if defined(__ELF)
class ELFSection : public DebugSectionBase<ELFSectionHeader> {
public:
enum Type {
TYPE_NULL = 0,
TYPE_PROGBITS = 1,
TYPE_SYMTAB = 2,
TYPE_STRTAB = 3,
TYPE_RELA = 4,
TYPE_HASH = 5,
TYPE_DYNAMIC = 6,
TYPE_NOTE = 7,
TYPE_NOBITS = 8,
TYPE_REL = 9,
TYPE_SHLIB = 10,
TYPE_DYNSYM = 11,
TYPE_LOPROC = 0x70000000,
TYPE_X86_64_UNWIND = 0x70000001,
TYPE_HIPROC = 0x7fffffff,
TYPE_LOUSER = 0x80000000,
TYPE_HIUSER = 0xffffffff
};
enum Flags {
FLAG_WRITE = 1,
FLAG_ALLOC = 2,
FLAG_EXEC = 4
};
enum SpecialIndexes {
INDEX_ABSOLUTE = 0xfff1
};
ELFSection(const char* name, Type type, uintptr_t align)
: name_(name), type_(type), align_(align) { }
virtual ~ELFSection() { }
void PopulateHeader(Writer::Slot<Header> header, ELFStringTable* strtab);
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
uintptr_t start = w->position();
if (WriteBodyInternal(w)) {
uintptr_t end = w->position();
header->offset = start;
header->size = end - start;
}
}
virtual bool WriteBodyInternal(Writer* w) {
return false;
}
uint16_t index() const { return index_; }
void set_index(uint16_t index) { index_ = index; }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
header->flags = 0;
header->address = 0;
header->offset = 0;
header->size = 0;
header->link = 0;
header->info = 0;
header->entry_size = 0;
}
private:
const char* name_;
Type type_;
uintptr_t align_;
uint16_t index_;
};
#endif // defined(__ELF)
#if defined(__MACH_O)
class MachOTextSection : public MachOSection {
public:
MachOTextSection(uintptr_t align,
uintptr_t addr,
uintptr_t size)
: MachOSection("__text",
"__TEXT",
align,
MachOSection::S_REGULAR |
MachOSection::S_ATTR_SOME_INSTRUCTIONS |
MachOSection::S_ATTR_PURE_INSTRUCTIONS),
addr_(addr),
size_(size) { }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
MachOSection::PopulateHeader(header);
header->addr = addr_;
header->size = size_;
}
private:
uintptr_t addr_;
uintptr_t size_;
};
#endif // defined(__MACH_O)
#if defined(__ELF)
class FullHeaderELFSection : public ELFSection {
public:
FullHeaderELFSection(const char* name,
Type type,
uintptr_t align,
uintptr_t addr,
uintptr_t offset,
uintptr_t size,
uintptr_t flags)
: ELFSection(name, type, align),
addr_(addr),
offset_(offset),
size_(size),
flags_(flags) { }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
ELFSection::PopulateHeader(header);
header->address = addr_;
header->offset = offset_;
header->size = size_;
header->flags = flags_;
}
private:
uintptr_t addr_;
uintptr_t offset_;
uintptr_t size_;
uintptr_t flags_;
};
class ELFStringTable : public ELFSection {
public:
explicit ELFStringTable(const char* name)
: ELFSection(name, TYPE_STRTAB, 1), writer_(NULL), offset_(0), size_(0) {
}
uintptr_t Add(const char* str) {
if (*str == '\0') return 0;
uintptr_t offset = size_;
WriteString(str);
return offset;
}
void AttachWriter(Writer* w) {
writer_ = w;
offset_ = writer_->position();
// First entry in the string table should be an empty string.
WriteString("");
}
void DetachWriter() {
writer_ = NULL;
}
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
DCHECK(writer_ == NULL);
header->offset = offset_;
header->size = size_;
}
private:
void WriteString(const char* str) {
uintptr_t written = 0;
do {
writer_->Write(*str);
written++;
} while (*str++);
size_ += written;
}
Writer* writer_;
uintptr_t offset_;
uintptr_t size_;
};
void ELFSection::PopulateHeader(Writer::Slot<ELFSection::Header> header,
ELFStringTable* strtab) {
header->name = strtab->Add(name_);
header->type = type_;
header->alignment = align_;
PopulateHeader(header);
}
#endif // defined(__ELF)
#if defined(__MACH_O)
class MachO BASE_EMBEDDED {
public:
explicit MachO(Zone* zone) : zone_(zone), sections_(6, zone) { }
uint32_t AddSection(MachOSection* section) {
sections_.Add(section, zone_);
return sections_.length() - 1;
}
void Write(Writer* w, uintptr_t code_start, uintptr_t code_size) {
Writer::Slot<MachOHeader> header = WriteHeader(w);
uintptr_t load_command_start = w->position();
Writer::Slot<MachOSegmentCommand> cmd = WriteSegmentCommand(w,
code_start,
code_size);
WriteSections(w, cmd, header, load_command_start);
}
private:
struct MachOHeader {
uint32_t magic;
uint32_t cputype;
uint32_t cpusubtype;
uint32_t filetype;
uint32_t ncmds;
uint32_t sizeofcmds;
uint32_t flags;
#if V8_TARGET_ARCH_X64
uint32_t reserved;
#endif
};
struct MachOSegmentCommand {
uint32_t cmd;
uint32_t cmdsize;
char segname[16];
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
uint32_t vmaddr;
uint32_t vmsize;
uint32_t fileoff;
uint32_t filesize;
#else
uint64_t vmaddr;
uint64_t vmsize;
uint64_t fileoff;
uint64_t filesize;
#endif
uint32_t maxprot;
uint32_t initprot;
uint32_t nsects;
uint32_t flags;
};
enum MachOLoadCommandCmd {
LC_SEGMENT_32 = 0x00000001u,
LC_SEGMENT_64 = 0x00000019u
};
Writer::Slot<MachOHeader> WriteHeader(Writer* w) {
DCHECK(w->position() == 0);
Writer::Slot<MachOHeader> header = w->CreateSlotHere<MachOHeader>();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
header->magic = 0xFEEDFACEu;
header->cputype = 7; // i386
header->cpusubtype = 3; // CPU_SUBTYPE_I386_ALL
#elif V8_TARGET_ARCH_X64
header->magic = 0xFEEDFACFu;
header->cputype = 7 | 0x01000000; // i386 | 64-bit ABI
header->cpusubtype = 3; // CPU_SUBTYPE_I386_ALL
header->reserved = 0;
#else
#error Unsupported target architecture.
#endif
header->filetype = 0x1; // MH_OBJECT
header->ncmds = 1;
header->sizeofcmds = 0;
header->flags = 0;
return header;
}
Writer::Slot<MachOSegmentCommand> WriteSegmentCommand(Writer* w,
uintptr_t code_start,
uintptr_t code_size) {
Writer::Slot<MachOSegmentCommand> cmd =
w->CreateSlotHere<MachOSegmentCommand>();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
cmd->cmd = LC_SEGMENT_32;
#else
cmd->cmd = LC_SEGMENT_64;
#endif
cmd->vmaddr = code_start;
cmd->vmsize = code_size;
cmd->fileoff = 0;
cmd->filesize = 0;
cmd->maxprot = 7;
cmd->initprot = 7;
cmd->flags = 0;
cmd->nsects = sections_.length();
memset(cmd->segname, 0, 16);
cmd->cmdsize = sizeof(MachOSegmentCommand) + sizeof(MachOSection::Header) *
cmd->nsects;
return cmd;
}
void WriteSections(Writer* w,
Writer::Slot<MachOSegmentCommand> cmd,
Writer::Slot<MachOHeader> header,
uintptr_t load_command_start) {
Writer::Slot<MachOSection::Header> headers =
w->CreateSlotsHere<MachOSection::Header>(sections_.length());
cmd->fileoff = w->position();
header->sizeofcmds = w->position() - load_command_start;
for (int section = 0; section < sections_.length(); ++section) {
sections_[section]->PopulateHeader(headers.at(section));
sections_[section]->WriteBody(headers.at(section), w);
}
cmd->filesize = w->position() - (uintptr_t)cmd->fileoff;
}
Zone* zone_;
ZoneList<MachOSection*> sections_;
};
#endif // defined(__MACH_O)
#if defined(__ELF)
class ELF BASE_EMBEDDED {
public:
explicit ELF(Zone* zone) : zone_(zone), sections_(6, zone) {
sections_.Add(new(zone) ELFSection("", ELFSection::TYPE_NULL, 0), zone);
sections_.Add(new(zone) ELFStringTable(".shstrtab"), zone);
}
void Write(Writer* w) {
WriteHeader(w);
WriteSectionTable(w);
WriteSections(w);
}
ELFSection* SectionAt(uint32_t index) {
return sections_[index];
}
uint32_t AddSection(ELFSection* section) {
sections_.Add(section, zone_);
section->set_index(sections_.length() - 1);
return sections_.length() - 1;
}
private:
struct ELFHeader {
uint8_t ident[16];
uint16_t type;
uint16_t machine;
uint32_t version;
uintptr_t entry;
uintptr_t pht_offset;
uintptr_t sht_offset;
uint32_t flags;
uint16_t header_size;
uint16_t pht_entry_size;
uint16_t pht_entry_num;
uint16_t sht_entry_size;
uint16_t sht_entry_num;
uint16_t sht_strtab_index;
};
void WriteHeader(Writer* w) {
DCHECK(w->position() == 0);
Writer::Slot<ELFHeader> header = w->CreateSlotHere<ELFHeader>();
#if (V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_X87 || \
(V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT))
const uint8_t ident[16] =
{ 0x7f, 'E', 'L', 'F', 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#elif V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_64_BIT
const uint8_t ident[16] =
{ 0x7f, 'E', 'L', 'F', 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#else
#error Unsupported target architecture.
#endif
memcpy(header->ident, ident, 16);
header->type = 1;
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
header->machine = 3;
#elif V8_TARGET_ARCH_X64
// Processor identification value for x64 is 62 as defined in
// System V ABI, AMD64 Supplement
// http://www.x86-64.org/documentation/abi.pdf
header->machine = 62;
#elif V8_TARGET_ARCH_ARM
// Set to EM_ARM, defined as 40, in "ARM ELF File Format" at
// infocenter.arm.com/help/topic/com.arm.doc.dui0101a/DUI0101A_Elf.pdf
header->machine = 40;
#else
#error Unsupported target architecture.
#endif
header->version = 1;
header->entry = 0;
header->pht_offset = 0;
header->sht_offset = sizeof(ELFHeader); // Section table follows header.
header->flags = 0;
header->header_size = sizeof(ELFHeader);
header->pht_entry_size = 0;
header->pht_entry_num = 0;
header->sht_entry_size = sizeof(ELFSection::Header);
header->sht_entry_num = sections_.length();
header->sht_strtab_index = 1;
}
void WriteSectionTable(Writer* w) {
// Section headers table immediately follows file header.
DCHECK(w->position() == sizeof(ELFHeader));
Writer::Slot<ELFSection::Header> headers =
w->CreateSlotsHere<ELFSection::Header>(sections_.length());
// String table for section table is the first section.
ELFStringTable* strtab = static_cast<ELFStringTable*>(SectionAt(1));
strtab->AttachWriter(w);
for (int i = 0, length = sections_.length();
i < length;
i++) {
sections_[i]->PopulateHeader(headers.at(i), strtab);
}
strtab->DetachWriter();
}
int SectionHeaderPosition(uint32_t section_index) {
return sizeof(ELFHeader) + sizeof(ELFSection::Header) * section_index;
}
void WriteSections(Writer* w) {
Writer::Slot<ELFSection::Header> headers =
w->SlotAt<ELFSection::Header>(sizeof(ELFHeader));
for (int i = 0, length = sections_.length();
i < length;
i++) {
sections_[i]->WriteBody(headers.at(i), w);
}
}
Zone* zone_;
ZoneList<ELFSection*> sections_;
};
class ELFSymbol BASE_EMBEDDED {
public:
enum Type {
TYPE_NOTYPE = 0,
TYPE_OBJECT = 1,
TYPE_FUNC = 2,
TYPE_SECTION = 3,
TYPE_FILE = 4,
TYPE_LOPROC = 13,
TYPE_HIPROC = 15
};
enum Binding {
BIND_LOCAL = 0,
BIND_GLOBAL = 1,
BIND_WEAK = 2,
BIND_LOPROC = 13,
BIND_HIPROC = 15
};
ELFSymbol(const char* name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
value(value),
size(size),
info((binding << 4) | type),
other(0),
section(section) {
}
Binding binding() const {
return static_cast<Binding>(info >> 4);
}
#if (V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_X87 || \
(V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT))
struct SerializedLayout {
SerializedLayout(uint32_t name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
value(value),
size(size),
info((binding << 4) | type),
other(0),
section(section) {
}
uint32_t name;
uintptr_t value;
uintptr_t size;
uint8_t info;
uint8_t other;
uint16_t section;
};
#elif V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_64_BIT
struct SerializedLayout {
SerializedLayout(uint32_t name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
info((binding << 4) | type),
other(0),
section(section),
value(value),
size(size) {
}
uint32_t name;
uint8_t info;
uint8_t other;
uint16_t section;
uintptr_t value;
uintptr_t size;
};
#endif
void Write(Writer::Slot<SerializedLayout> s, ELFStringTable* t) {
// Convert symbol names from strings to indexes in the string table.
s->name = t->Add(name);
s->value = value;
s->size = size;
s->info = info;
s->other = other;
s->section = section;
}
private:
const char* name;
uintptr_t value;
uintptr_t size;
uint8_t info;
uint8_t other;
uint16_t section;
};
class ELFSymbolTable : public ELFSection {
public:
ELFSymbolTable(const char* name, Zone* zone)
: ELFSection(name, TYPE_SYMTAB, sizeof(uintptr_t)),
locals_(1, zone),
globals_(1, zone) {
}
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
w->Align(header->alignment);
int total_symbols = locals_.length() + globals_.length() + 1;
header->offset = w->position();
Writer::Slot<ELFSymbol::SerializedLayout> symbols =
w->CreateSlotsHere<ELFSymbol::SerializedLayout>(total_symbols);
header->size = w->position() - header->offset;
// String table for this symbol table should follow it in the section table.
ELFStringTable* strtab =
static_cast<ELFStringTable*>(w->debug_object()->SectionAt(index() + 1));
strtab->AttachWriter(w);
symbols.at(0).set(ELFSymbol::SerializedLayout(0,
0,
0,
ELFSymbol::BIND_LOCAL,
ELFSymbol::TYPE_NOTYPE,
0));
WriteSymbolsList(&locals_, symbols.at(1), strtab);
WriteSymbolsList(&globals_, symbols.at(locals_.length() + 1), strtab);
strtab->DetachWriter();
}
void Add(const ELFSymbol& symbol, Zone* zone) {
if (symbol.binding() == ELFSymbol::BIND_LOCAL) {
locals_.Add(symbol, zone);
} else {
globals_.Add(symbol, zone);
}
}
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
ELFSection::PopulateHeader(header);
// We are assuming that string table will follow symbol table.
header->link = index() + 1;
header->info = locals_.length() + 1;
header->entry_size = sizeof(ELFSymbol::SerializedLayout);
}
private:
void WriteSymbolsList(const ZoneList<ELFSymbol>* src,
Writer::Slot<ELFSymbol::SerializedLayout> dst,
ELFStringTable* strtab) {
for (int i = 0, len = src->length();
i < len;
i++) {
src->at(i).Write(dst.at(i), strtab);
}
}
ZoneList<ELFSymbol> locals_;
ZoneList<ELFSymbol> globals_;
};
#endif // defined(__ELF)
class LineInfo : public Malloced {
public:
LineInfo() : pc_info_(10) {}
void SetPosition(intptr_t pc, int pos, bool is_statement) {
AddPCInfo(PCInfo(pc, pos, is_statement));
}
struct PCInfo {
PCInfo(intptr_t pc, int pos, bool is_statement)
: pc_(pc), pos_(pos), is_statement_(is_statement) {}
intptr_t pc_;
int pos_;
bool is_statement_;
};
List<PCInfo>* pc_info() { return &pc_info_; }
private:
void AddPCInfo(const PCInfo& pc_info) { pc_info_.Add(pc_info); }
List<PCInfo> pc_info_;
};
class CodeDescription BASE_EMBEDDED {
public:
#if V8_TARGET_ARCH_X64
enum StackState {
POST_RBP_PUSH,
POST_RBP_SET,
POST_RBP_POP,
STACK_STATE_MAX
};
#endif
CodeDescription(const char* name, Code* code, Handle<Script> script,
LineInfo* lineinfo, GDBJITInterface::CodeTag tag,
CompilationInfo* info)
: name_(name),
code_(code),
script_(script),
lineinfo_(lineinfo),
tag_(tag),
info_(info) {}
const char* name() const {
return name_;
}
LineInfo* lineinfo() const { return lineinfo_; }
GDBJITInterface::CodeTag tag() const {
return tag_;
}
CompilationInfo* info() const {
return info_;
}
bool IsInfoAvailable() const {
return info_ != NULL;
}
uintptr_t CodeStart() const {
return reinterpret_cast<uintptr_t>(code_->instruction_start());
}
uintptr_t CodeEnd() const {
return reinterpret_cast<uintptr_t>(code_->instruction_end());
}
uintptr_t CodeSize() const {
return CodeEnd() - CodeStart();
}
bool IsLineInfoAvailable() {
return !script_.is_null() &&
script_->source()->IsString() &&
script_->HasValidSource() &&
script_->name()->IsString() &&
lineinfo_ != NULL;
}
#if V8_TARGET_ARCH_X64
uintptr_t GetStackStateStartAddress(StackState state) const {
DCHECK(state < STACK_STATE_MAX);
return stack_state_start_addresses_[state];
}
void SetStackStateStartAddress(StackState state, uintptr_t addr) {
DCHECK(state < STACK_STATE_MAX);
stack_state_start_addresses_[state] = addr;
}
#endif
SmartArrayPointer<char> GetFilename() {
return String::cast(script_->name())->ToCString();
}
int GetScriptLineNumber(int pos) {
return script_->GetLineNumber(pos) + 1;
}
private:
const char* name_;
Code* code_;
Handle<Script> script_;
LineInfo* lineinfo_;
GDBJITInterface::CodeTag tag_;
CompilationInfo* info_;
#if V8_TARGET_ARCH_X64
uintptr_t stack_state_start_addresses_[STACK_STATE_MAX];
#endif
};
#if defined(__ELF)
static void CreateSymbolsTable(CodeDescription* desc,
Zone* zone,
ELF* elf,
int text_section_index) {
ELFSymbolTable* symtab = new(zone) ELFSymbolTable(".symtab", zone);
ELFStringTable* strtab = new(zone) ELFStringTable(".strtab");
// Symbol table should be followed by the linked string table.
elf->AddSection(symtab);
elf->AddSection(strtab);
symtab->Add(ELFSymbol("V8 Code",
0,
0,
ELFSymbol::BIND_LOCAL,
ELFSymbol::TYPE_FILE,
ELFSection::INDEX_ABSOLUTE),
zone);
symtab->Add(ELFSymbol(desc->name(),
0,
desc->CodeSize(),
ELFSymbol::BIND_GLOBAL,
ELFSymbol::TYPE_FUNC,
text_section_index),
zone);
}
#endif // defined(__ELF)
class DebugInfoSection : public DebugSection {
public:
explicit DebugInfoSection(CodeDescription* desc)
#if defined(__ELF)
: ELFSection(".debug_info", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_info",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard
enum DWARF2LocationOp {
DW_OP_reg0 = 0x50,
DW_OP_reg1 = 0x51,
DW_OP_reg2 = 0x52,
DW_OP_reg3 = 0x53,
DW_OP_reg4 = 0x54,
DW_OP_reg5 = 0x55,
DW_OP_reg6 = 0x56,
DW_OP_reg7 = 0x57,
DW_OP_fbreg = 0x91 // 1 param: SLEB128 offset
};
enum DWARF2Encoding {
DW_ATE_ADDRESS = 0x1,
DW_ATE_SIGNED = 0x5
};
bool WriteBodyInternal(Writer* w) {
uintptr_t cu_start = w->position();
Writer::Slot<uint32_t> size = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
w->Write<uint16_t>(2); // DWARF version.
w->Write<uint32_t>(0); // Abbreviation table offset.
w->Write<uint8_t>(sizeof(intptr_t));
w->WriteULEB128(1); // Abbreviation code.
w->WriteString(desc_->GetFilename().get());
w->Write<intptr_t>(desc_->CodeStart());
w->Write<intptr_t>(desc_->CodeStart() + desc_->CodeSize());
w->Write<uint32_t>(0);
uint32_t ty_offset = static_cast<uint32_t>(w->position() - cu_start);
w->WriteULEB128(3);
w->Write<uint8_t>(kPointerSize);
w->WriteString("v8value");
if (desc_->IsInfoAvailable()) {
Scope* scope = desc_->info()->scope();
w->WriteULEB128(2);
w->WriteString(desc_->name());
w->Write<intptr_t>(desc_->CodeStart());
w->Write<intptr_t>(desc_->CodeStart() + desc_->CodeSize());
Writer::Slot<uint32_t> fb_block_size = w->CreateSlotHere<uint32_t>();
uintptr_t fb_block_start = w->position();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
w->Write<uint8_t>(DW_OP_reg5); // The frame pointer's here on ia32
#elif V8_TARGET_ARCH_X64
w->Write<uint8_t>(DW_OP_reg6); // and here on x64.
#elif V8_TARGET_ARCH_ARM
UNIMPLEMENTED();
#elif V8_TARGET_ARCH_MIPS
UNIMPLEMENTED();
#elif V8_TARGET_ARCH_MIPS64
UNIMPLEMENTED();
#else
#error Unsupported target architecture.
#endif
fb_block_size.set(static_cast<uint32_t>(w->position() - fb_block_start));
int params = scope->num_parameters();
int slots = scope->num_stack_slots();
int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
int locals = scope->StackLocalCount();
int current_abbreviation = 4;
for (int param = 0; param < params; ++param) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
scope->parameter(param)->name()->ToCString(DISALLOW_NULLS).get());
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(
JavaScriptFrameConstants::kLastParameterOffset +
kPointerSize * (params - param - 1));
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
EmbeddedVector<char, 256> buffer;
StringBuilder builder(buffer.start(), buffer.length());
for (int slot = 0; slot < slots; ++slot) {
w->WriteULEB128(current_abbreviation++);
builder.Reset();
builder.AddFormatted("slot%d", slot);
w->WriteString(builder.Finalize());
}
// See contexts.h for more information.
DCHECK(Context::MIN_CONTEXT_SLOTS == 4);
DCHECK(Context::CLOSURE_INDEX == 0);
DCHECK(Context::PREVIOUS_INDEX == 1);
DCHECK(Context::EXTENSION_INDEX == 2);
DCHECK(Context::GLOBAL_OBJECT_INDEX == 3);
w->WriteULEB128(current_abbreviation++);
w->WriteString(".closure");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".previous");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".extension");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".global");
for (int context_slot = 0;
context_slot < context_slots;
++context_slot) {
w->WriteULEB128(current_abbreviation++);
builder.Reset();
builder.AddFormatted("context_slot%d", context_slot + internal_slots);
w->WriteString(builder.Finalize());
}
ZoneList<Variable*> stack_locals(locals, scope->zone());
ZoneList<Variable*> context_locals(context_slots, scope->zone());
scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
for (int local = 0; local < locals; ++local) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
stack_locals[local]->name()->ToCString(DISALLOW_NULLS).get());
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(
JavaScriptFrameConstants::kLocal0Offset -
kPointerSize * local);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
{
w->WriteULEB128(current_abbreviation++);
w->WriteString("__function");
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(JavaScriptFrameConstants::kFunctionOffset);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
{
w->WriteULEB128(current_abbreviation++);
w->WriteString("__context");
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(StandardFrameConstants::kContextOffset);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
w->WriteULEB128(0); // Terminate the sub program.
}
w->WriteULEB128(0); // Terminate the compile unit.
size.set(static_cast<uint32_t>(w->position() - start));
return true;
}
private:
CodeDescription* desc_;
};
class DebugAbbrevSection : public DebugSection {
public:
explicit DebugAbbrevSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".debug_abbrev", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_abbrev",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard, figure 14.
enum DWARF2Tags {
DW_TAG_FORMAL_PARAMETER = 0x05,
DW_TAG_POINTER_TYPE = 0xf,
DW_TAG_COMPILE_UNIT = 0x11,
DW_TAG_STRUCTURE_TYPE = 0x13,
DW_TAG_BASE_TYPE = 0x24,
DW_TAG_SUBPROGRAM = 0x2e,
DW_TAG_VARIABLE = 0x34
};
// DWARF2 standard, figure 16.
enum DWARF2ChildrenDetermination {
DW_CHILDREN_NO = 0,
DW_CHILDREN_YES = 1
};
// DWARF standard, figure 17.
enum DWARF2Attribute {
DW_AT_LOCATION = 0x2,
DW_AT_NAME = 0x3,
DW_AT_BYTE_SIZE = 0xb,
DW_AT_STMT_LIST = 0x10,
DW_AT_LOW_PC = 0x11,
DW_AT_HIGH_PC = 0x12,
DW_AT_ENCODING = 0x3e,
DW_AT_FRAME_BASE = 0x40,
DW_AT_TYPE = 0x49
};
// DWARF2 standard, figure 19.
enum DWARF2AttributeForm {
DW_FORM_ADDR = 0x1,
DW_FORM_BLOCK4 = 0x4,
DW_FORM_STRING = 0x8,
DW_FORM_DATA4 = 0x6,
DW_FORM_BLOCK = 0x9,
DW_FORM_DATA1 = 0xb,
DW_FORM_FLAG = 0xc,
DW_FORM_REF4 = 0x13
};
void WriteVariableAbbreviation(Writer* w,
int abbreviation_code,
bool has_value,
bool is_parameter) {
w->WriteULEB128(abbreviation_code);
w->WriteULEB128(is_parameter ? DW_TAG_FORMAL_PARAMETER : DW_TAG_VARIABLE);
w->Write<uint8_t>(DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
if (has_value) {
w->WriteULEB128(DW_AT_TYPE);
w->WriteULEB128(DW_FORM_REF4);
w->WriteULEB128(DW_AT_LOCATION);
w->WriteULEB128(DW_FORM_BLOCK4);
}
w->WriteULEB128(0);
w->WriteULEB128(0);
}
bool WriteBodyInternal(Writer* w) {
int current_abbreviation = 1;
bool extra_info = desc_->IsInfoAvailable();
DCHECK(desc_->IsLineInfoAvailable());
w->WriteULEB128(current_abbreviation++);
w->WriteULEB128(DW_TAG_COMPILE_UNIT);
w->Write<uint8_t>(extra_info ? DW_CHILDREN_YES : DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(DW_AT_LOW_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_HIGH_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_STMT_LIST);
w->WriteULEB128(DW_FORM_DATA4);
w->WriteULEB128(0);
w->WriteULEB128(0);
if (extra_info) {
Scope* scope = desc_->info()->scope();
int params = scope->num_parameters();
int slots = scope->num_stack_slots();
int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
int locals = scope->StackLocalCount();
// Total children is params + slots + context_slots + internal_slots +
// locals + 2 (__function and __context).
// The extra duplication below seems to be necessary to keep
// gdb from getting upset on OSX.
w->WriteULEB128(current_abbreviation++); // Abbreviation code.
w->WriteULEB128(DW_TAG_SUBPROGRAM);
w->Write<uint8_t>(DW_CHILDREN_YES);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(DW_AT_LOW_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_HIGH_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_FRAME_BASE);
w->WriteULEB128(DW_FORM_BLOCK4);
w->WriteULEB128(0);
w->WriteULEB128(0);
w->WriteULEB128(current_abbreviation++);
w->WriteULEB128(DW_TAG_STRUCTURE_TYPE);
w->Write<uint8_t>(DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_BYTE_SIZE);
w->WriteULEB128(DW_FORM_DATA1);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(0);
w->WriteULEB128(0);
for (int param = 0; param < params; ++param) {
WriteVariableAbbreviation(w, current_abbreviation++, true, true);
}
for (int slot = 0; slot < slots; ++slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int internal_slot = 0;
internal_slot < internal_slots;
++internal_slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int context_slot = 0;
context_slot < context_slots;
++context_slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int local = 0; local < locals; ++local) {
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
}
// The function.
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
// The context.
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
w->WriteULEB128(0); // Terminate the sibling list.
}
w->WriteULEB128(0); // Terminate the table.
return true;
}
private:
CodeDescription* desc_;
};
class DebugLineSection : public DebugSection {
public:
explicit DebugLineSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".debug_line", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_line",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard, figure 34.
enum DWARF2Opcodes {
DW_LNS_COPY = 1,
DW_LNS_ADVANCE_PC = 2,
DW_LNS_ADVANCE_LINE = 3,
DW_LNS_SET_FILE = 4,
DW_LNS_SET_COLUMN = 5,
DW_LNS_NEGATE_STMT = 6
};
// DWARF2 standard, figure 35.
enum DWARF2ExtendedOpcode {
DW_LNE_END_SEQUENCE = 1,
DW_LNE_SET_ADDRESS = 2,
DW_LNE_DEFINE_FILE = 3
};
bool WriteBodyInternal(Writer* w) {
// Write prologue.
Writer::Slot<uint32_t> total_length = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
// Used for special opcodes
const int8_t line_base = 1;
const uint8_t line_range = 7;
const int8_t max_line_incr = (line_base + line_range - 1);
const uint8_t opcode_base = DW_LNS_NEGATE_STMT + 1;
w->Write<uint16_t>(2); // Field version.
Writer::Slot<uint32_t> prologue_length = w->CreateSlotHere<uint32_t>();
uintptr_t prologue_start = w->position();
w->Write<uint8_t>(1); // Field minimum_instruction_length.
w->Write<uint8_t>(1); // Field default_is_stmt.
w->Write<int8_t>(line_base); // Field line_base.
w->Write<uint8_t>(line_range); // Field line_range.
w->Write<uint8_t>(opcode_base); // Field opcode_base.
w->Write<uint8_t>(0); // DW_LNS_COPY operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_PC operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_LINE operands count.
w->Write<uint8_t>(1); // DW_LNS_SET_FILE operands count.
w->Write<uint8_t>(1); // DW_LNS_SET_COLUMN operands count.
w->Write<uint8_t>(0); // DW_LNS_NEGATE_STMT operands count.
w->Write<uint8_t>(0); // Empty include_directories sequence.
w->WriteString(desc_->GetFilename().get()); // File name.
w->WriteULEB128(0); // Current directory.
w->WriteULEB128(0); // Unknown modification time.
w->WriteULEB128(0); // Unknown file size.
w->Write<uint8_t>(0);
prologue_length.set(static_cast<uint32_t>(w->position() - prologue_start));
WriteExtendedOpcode(w, DW_LNE_SET_ADDRESS, sizeof(intptr_t));
w->Write<intptr_t>(desc_->CodeStart());
w->Write<uint8_t>(DW_LNS_COPY);
intptr_t pc = 0;
intptr_t line = 1;
bool is_statement = true;
List<LineInfo::PCInfo>* pc_info = desc_->lineinfo()->pc_info();
pc_info->Sort(&ComparePCInfo);
int pc_info_length = pc_info->length();
for (int i = 0; i < pc_info_length; i++) {
LineInfo::PCInfo* info = &pc_info->at(i);
DCHECK(info->pc_ >= pc);
// Reduce bloating in the debug line table by removing duplicate line
// entries (per DWARF2 standard).
intptr_t new_line = desc_->GetScriptLineNumber(info->pos_);
if (new_line == line) {
continue;
}
// Mark statement boundaries. For a better debugging experience, mark
// the last pc address in the function as a statement (e.g. "}"), so that
// a user can see the result of the last line executed in the function,
// should control reach the end.
if ((i+1) == pc_info_length) {
if (!is_statement) {
w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
}
} else if (is_statement != info->is_statement_) {
w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
is_statement = !is_statement;
}
// Generate special opcodes, if possible. This results in more compact
// debug line tables. See the DWARF 2.0 standard to learn more about
// special opcodes.
uintptr_t pc_diff = info->pc_ - pc;
intptr_t line_diff = new_line - line;
// Compute special opcode (see DWARF 2.0 standard)
intptr_t special_opcode = (line_diff - line_base) +
(line_range * pc_diff) + opcode_base;
// If special_opcode is less than or equal to 255, it can be used as a
// special opcode. If line_diff is larger than the max line increment
// allowed for a special opcode, or if line_diff is less than the minimum
// line that can be added to the line register (i.e. line_base), then
// special_opcode can't be used.
if ((special_opcode >= opcode_base) && (special_opcode <= 255) &&
(line_diff <= max_line_incr) && (line_diff >= line_base)) {
w->Write<uint8_t>(special_opcode);
} else {
w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
w->WriteSLEB128(pc_diff);
w->Write<uint8_t>(DW_LNS_ADVANCE_LINE);
w->WriteSLEB128(line_diff);
w->Write<uint8_t>(DW_LNS_COPY);
}
// Increment the pc and line operands.
pc += pc_diff;
line += line_diff;
}
// Advance the pc to the end of the routine, since the end sequence opcode
// requires this.
w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
w->WriteSLEB128(desc_->CodeSize() - pc);
WriteExtendedOpcode(w, DW_LNE_END_SEQUENCE, 0);
total_length.set(static_cast<uint32_t>(w->position() - start));
return true;
}
private:
void WriteExtendedOpcode(Writer* w,
DWARF2ExtendedOpcode op,
size_t operands_size) {
w->Write<uint8_t>(0);
w->WriteULEB128(operands_size + 1);
w->Write<uint8_t>(op);
}
static int ComparePCInfo(const LineInfo::PCInfo* a,
const LineInfo::PCInfo* b) {
if (a->pc_ == b->pc_) {
if (a->is_statement_ != b->is_statement_) {
return b->is_statement_ ? +1 : -1;
}
return 0;
} else if (a->pc_ > b->pc_) {
return +1;
} else {
return -1;
}
}
CodeDescription* desc_;
};
#if V8_TARGET_ARCH_X64
class UnwindInfoSection : public DebugSection {
public:
explicit UnwindInfoSection(CodeDescription* desc);
virtual bool WriteBodyInternal(Writer* w);
int WriteCIE(Writer* w);
void WriteFDE(Writer* w, int);
void WriteFDEStateOnEntry(Writer* w);
void WriteFDEStateAfterRBPPush(Writer* w);
void WriteFDEStateAfterRBPSet(Writer* w);
void WriteFDEStateAfterRBPPop(Writer* w);
void WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position);
private:
CodeDescription* desc_;
// DWARF3 Specification, Table 7.23
enum CFIInstructions {
DW_CFA_ADVANCE_LOC = 0x40,
DW_CFA_OFFSET = 0x80,
DW_CFA_RESTORE = 0xC0,
DW_CFA_NOP = 0x00,
DW_CFA_SET_LOC = 0x01,
DW_CFA_ADVANCE_LOC1 = 0x02,
DW_CFA_ADVANCE_LOC2 = 0x03,
DW_CFA_ADVANCE_LOC4 = 0x04,
DW_CFA_OFFSET_EXTENDED = 0x05,
DW_CFA_RESTORE_EXTENDED = 0x06,
DW_CFA_UNDEFINED = 0x07,
DW_CFA_SAME_VALUE = 0x08,
DW_CFA_REGISTER = 0x09,
DW_CFA_REMEMBER_STATE = 0x0A,
DW_CFA_RESTORE_STATE = 0x0B,
DW_CFA_DEF_CFA = 0x0C,
DW_CFA_DEF_CFA_REGISTER = 0x0D,
DW_CFA_DEF_CFA_OFFSET = 0x0E,
DW_CFA_DEF_CFA_EXPRESSION = 0x0F,
DW_CFA_EXPRESSION = 0x10,
DW_CFA_OFFSET_EXTENDED_SF = 0x11,
DW_CFA_DEF_CFA_SF = 0x12,
DW_CFA_DEF_CFA_OFFSET_SF = 0x13,
DW_CFA_VAL_OFFSET = 0x14,
DW_CFA_VAL_OFFSET_SF = 0x15,
DW_CFA_VAL_EXPRESSION = 0x16
};
// System V ABI, AMD64 Supplement, Version 0.99.5, Figure 3.36
enum RegisterMapping {
// Only the relevant ones have been added to reduce clutter.
AMD64_RBP = 6,
AMD64_RSP = 7,
AMD64_RA = 16
};
enum CFIConstants {
CIE_ID = 0,
CIE_VERSION = 1,
CODE_ALIGN_FACTOR = 1,
DATA_ALIGN_FACTOR = 1,
RETURN_ADDRESS_REGISTER = AMD64_RA
};
};
void UnwindInfoSection::WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position) {
uint32_t align = (w->position() - initial_position) % kPointerSize;
if (align != 0) {
for (uint32_t i = 0; i < (kPointerSize - align); i++) {
w->Write<uint8_t>(DW_CFA_NOP);
}
}
DCHECK((w->position() - initial_position) % kPointerSize == 0);
length_slot->set(w->position() - initial_position);
}
UnwindInfoSection::UnwindInfoSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".eh_frame", TYPE_X86_64_UNWIND, 1),
#else
: MachOSection("__eh_frame", "__TEXT", sizeof(uintptr_t),
MachOSection::S_REGULAR),
#endif
desc_(desc) { }
int UnwindInfoSection::WriteCIE(Writer* w) {
Writer::Slot<uint32_t> cie_length_slot = w->CreateSlotHere<uint32_t>();
uint32_t cie_position = w->position();
// Write out the CIE header. Currently no 'common instructions' are
// emitted onto the CIE; every FDE has its own set of instructions.
w->Write<uint32_t>(CIE_ID);
w->Write<uint8_t>(CIE_VERSION);
w->Write<uint8_t>(0); // Null augmentation string.
w->WriteSLEB128(CODE_ALIGN_FACTOR);
w->WriteSLEB128(DATA_ALIGN_FACTOR);
w->Write<uint8_t>(RETURN_ADDRESS_REGISTER);
WriteLength(w, &cie_length_slot, cie_position);
return cie_position;
}
void UnwindInfoSection::WriteFDE(Writer* w, int cie_position) {
// The only FDE for this function. The CFA is the current RBP.
Writer::Slot<uint32_t> fde_length_slot = w->CreateSlotHere<uint32_t>();
int fde_position = w->position();
w->Write<int32_t>(fde_position - cie_position + 4);
w->Write<uintptr_t>(desc_->CodeStart());
w->Write<uintptr_t>(desc_->CodeSize());
WriteFDEStateOnEntry(w);
WriteFDEStateAfterRBPPush(w);
WriteFDEStateAfterRBPSet(w);
WriteFDEStateAfterRBPPop(w);
WriteLength(w, &fde_length_slot, fde_position);
}
void UnwindInfoSection::WriteFDEStateOnEntry(Writer* w) {
// The first state, just after the control has been transferred to the the
// function.
// RBP for this function will be the value of RSP after pushing the RBP
// for the previous function. The previous RBP has not been pushed yet.
w->Write<uint8_t>(DW_CFA_DEF_CFA_SF);
w->WriteULEB128(AMD64_RSP);
w->WriteSLEB128(-kPointerSize);
// The RA is stored at location CFA + kCallerPCOffset. This is an invariant,
// and hence omitted from the next states.
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RA);
w->WriteSLEB128(StandardFrameConstants::kCallerPCOffset);
// The RBP of the previous function is still in RBP.
w->Write<uint8_t>(DW_CFA_SAME_VALUE);
w->WriteULEB128(AMD64_RBP);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_PUSH));
}
void UnwindInfoSection::WriteFDEStateAfterRBPPush(Writer* w) {
// The second state, just after RBP has been pushed.
// RBP / CFA for this function is now the current RSP, so just set the
// offset from the previous rule (from -8) to 0.
w->Write<uint8_t>(DW_CFA_DEF_CFA_OFFSET);
w->WriteULEB128(0);
// The previous RBP is stored at CFA + kCallerFPOffset. This is an invariant
// in this and the next state, and hence omitted in the next state.
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RBP);
w->WriteSLEB128(StandardFrameConstants::kCallerFPOffset);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_SET));
}
void UnwindInfoSection::WriteFDEStateAfterRBPSet(Writer* w) {
// The third state, after the RBP has been set.
// The CFA can now directly be set to RBP.
w->Write<uint8_t>(DW_CFA_DEF_CFA);
w->WriteULEB128(AMD64_RBP);
w->WriteULEB128(0);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_POP));
}
void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer* w) {
// The fourth (final) state. The RBP has been popped (just before issuing a
// return).
// The CFA can is now calculated in the same way as in the first state.
w->Write<uint8_t>(DW_CFA_DEF_CFA_SF);
w->WriteULEB128(AMD64_RSP);
w->WriteSLEB128(-kPointerSize);
// The RBP
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RBP);
w->WriteSLEB128(StandardFrameConstants::kCallerFPOffset);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(desc_->CodeEnd());
}
bool UnwindInfoSection::WriteBodyInternal(Writer* w) {
uint32_t cie_position = WriteCIE(w);
WriteFDE(w, cie_position);
return true;
}
#endif // V8_TARGET_ARCH_X64
static void CreateDWARFSections(CodeDescription* desc,
Zone* zone,
DebugObject* obj) {
if (desc->IsLineInfoAvailable()) {
obj->AddSection(new(zone) DebugInfoSection(desc));
obj->AddSection(new(zone) DebugAbbrevSection(desc));
obj->AddSection(new(zone) DebugLineSection(desc));
}
#if V8_TARGET_ARCH_X64
obj->AddSection(new(zone) UnwindInfoSection(desc));
#endif
}
// -------------------------------------------------------------------
// Binary GDB JIT Interface as described in
// http://sourceware.org/gdb/onlinedocs/gdb/Declarations.html
extern "C" {
typedef enum {
JIT_NOACTION = 0,
JIT_REGISTER_FN,
JIT_UNREGISTER_FN
} JITAction;
struct JITCodeEntry {
JITCodeEntry* next_;
JITCodeEntry* prev_;
Address symfile_addr_;
uint64_t symfile_size_;
};
struct JITDescriptor {
uint32_t version_;
uint32_t action_flag_;
JITCodeEntry* relevant_entry_;
JITCodeEntry* first_entry_;
};
// GDB will place breakpoint into this function.
// To prevent GCC from inlining or removing it we place noinline attribute
// and inline assembler statement inside.
void __attribute__((noinline)) __jit_debug_register_code() {
__asm__("");
}
// GDB will inspect contents of this descriptor.
// Static initialization is necessary to prevent GDB from seeing
// uninitialized descriptor.
JITDescriptor __jit_debug_descriptor = { 1, 0, 0, 0 };
#ifdef OBJECT_PRINT
void __gdb_print_v8_object(Object* object) {
OFStream os(stdout);
object->Print(os);
os << flush;
}
#endif
}
static JITCodeEntry* CreateCodeEntry(Address symfile_addr,
uintptr_t symfile_size) {
JITCodeEntry* entry = static_cast<JITCodeEntry*>(
malloc(sizeof(JITCodeEntry) + symfile_size));
entry->symfile_addr_ = reinterpret_cast<Address>(entry + 1);
entry->symfile_size_ = symfile_size;
MemCopy(entry->symfile_addr_, symfile_addr, symfile_size);
entry->prev_ = entry->next_ = NULL;
return entry;
}
static void DestroyCodeEntry(JITCodeEntry* entry) {
free(entry);
}
static void RegisterCodeEntry(JITCodeEntry* entry,
bool dump_if_enabled,
const char* name_hint) {
#if defined(DEBUG) && !V8_OS_WIN
static int file_num = 0;
if (FLAG_gdbjit_dump && dump_if_enabled) {
static const int kMaxFileNameSize = 64;
static const char* kElfFilePrefix = "/tmp/elfdump";
static const char* kObjFileExt = ".o";
char file_name[64];
SNPrintF(Vector<char>(file_name, kMaxFileNameSize),
"%s%s%d%s",
kElfFilePrefix,
(name_hint != NULL) ? name_hint : "",
file_num++,
kObjFileExt);
WriteBytes(file_name, entry->symfile_addr_, entry->symfile_size_);
}
#endif
entry->next_ = __jit_debug_descriptor.first_entry_;
if (entry->next_ != NULL) entry->next_->prev_ = entry;
__jit_debug_descriptor.first_entry_ =
__jit_debug_descriptor.relevant_entry_ = entry;
__jit_debug_descriptor.action_flag_ = JIT_REGISTER_FN;
__jit_debug_register_code();
}
static void UnregisterCodeEntry(JITCodeEntry* entry) {
if (entry->prev_ != NULL) {
entry->prev_->next_ = entry->next_;
} else {
__jit_debug_descriptor.first_entry_ = entry->next_;
}
if (entry->next_ != NULL) {
entry->next_->prev_ = entry->prev_;
}
__jit_debug_descriptor.relevant_entry_ = entry;
__jit_debug_descriptor.action_flag_ = JIT_UNREGISTER_FN;
__jit_debug_register_code();
}
static JITCodeEntry* CreateELFObject(CodeDescription* desc, Isolate* isolate) {
#ifdef __MACH_O
Zone zone(isolate);
MachO mach_o(&zone);
Writer w(&mach_o);
mach_o.AddSection(new(&zone) MachOTextSection(kCodeAlignment,
desc->CodeStart(),
desc->CodeSize()));
CreateDWARFSections(desc, &zone, &mach_o);
mach_o.Write(&w, desc->CodeStart(), desc->CodeSize());
#else
Zone zone(isolate);
ELF elf(&zone);
Writer w(&elf);
int text_section_index = elf.AddSection(
new(&zone) FullHeaderELFSection(
".text",
ELFSection::TYPE_NOBITS,
kCodeAlignment,
desc->CodeStart(),
0,
desc->CodeSize(),
ELFSection::FLAG_ALLOC | ELFSection::FLAG_EXEC));
CreateSymbolsTable(desc, &zone, &elf, text_section_index);
CreateDWARFSections(desc, &zone, &elf);
elf.Write(&w);
#endif
return CreateCodeEntry(w.buffer(), w.position());
}
static bool SameCodeObjects(void* key1, void* key2) {
return key1 == key2;
}
static HashMap* GetEntries() {
static HashMap* entries = NULL;
if (entries == NULL) {
entries = new HashMap(&SameCodeObjects);
}
return entries;
}
static uint32_t HashForCodeObject(Code* code) {
static const uintptr_t kGoldenRatio = 2654435761u;
uintptr_t hash = reinterpret_cast<uintptr_t>(code->address());
return static_cast<uint32_t>((hash >> kCodeAlignmentBits) * kGoldenRatio);
}
static const intptr_t kLineInfoTag = 0x1;
static bool IsLineInfoTagged(void* ptr) {
return 0 != (reinterpret_cast<intptr_t>(ptr) & kLineInfoTag);
}
static void* TagLineInfo(LineInfo* ptr) {
return reinterpret_cast<void*>(
reinterpret_cast<intptr_t>(ptr) | kLineInfoTag);
}
static LineInfo* UntagLineInfo(void* ptr) {
return reinterpret_cast<LineInfo*>(reinterpret_cast<intptr_t>(ptr) &
~kLineInfoTag);
}
void GDBJITInterface::AddCode(Handle<Name> name,
Handle<Script> script,
Handle<Code> code,
CompilationInfo* info) {
if (!FLAG_gdbjit) return;
Script::InitLineEnds(script);
if (!name.is_null() && name->IsString()) {
SmartArrayPointer<char> name_cstring =
Handle<String>::cast(name)->ToCString(DISALLOW_NULLS);
AddCode(name_cstring.get(), *code, GDBJITInterface::FUNCTION, *script,
info);
} else {
AddCode("", *code, GDBJITInterface::FUNCTION, *script, info);
}
}
static void AddUnwindInfo(CodeDescription* desc) {
#if V8_TARGET_ARCH_X64
if (desc->tag() == GDBJITInterface::FUNCTION) {
// To avoid propagating unwinding information through
// compilation pipeline we use an approximation.
// For most use cases this should not affect usability.
static const int kFramePointerPushOffset = 1;
static const int kFramePointerSetOffset = 4;
static const int kFramePointerPopOffset = -3;
uintptr_t frame_pointer_push_address =
desc->CodeStart() + kFramePointerPushOffset;
uintptr_t frame_pointer_set_address =
desc->CodeStart() + kFramePointerSetOffset;
uintptr_t frame_pointer_pop_address =
desc->CodeEnd() + kFramePointerPopOffset;
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_PUSH,
frame_pointer_push_address);
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_SET,
frame_pointer_set_address);
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_POP,
frame_pointer_pop_address);
} else {
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_PUSH,
desc->CodeStart());
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_SET,
desc->CodeStart());
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_POP,
desc->CodeEnd());
}
#endif // V8_TARGET_ARCH_X64
}
static base::LazyMutex mutex = LAZY_MUTEX_INITIALIZER;
void GDBJITInterface::AddCode(const char* name,
Code* code,
GDBJITInterface::CodeTag tag,
Script* script,
CompilationInfo* info) {
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
DisallowHeapAllocation no_gc;
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
if (e->value != NULL && !IsLineInfoTagged(e->value)) return;
LineInfo* lineinfo = UntagLineInfo(e->value);
CodeDescription code_desc(name,
code,
script != NULL ? Handle<Script>(script)
: Handle<Script>(),
lineinfo,
tag,
info);
if (!FLAG_gdbjit_full && !code_desc.IsLineInfoAvailable()) {
delete lineinfo;
GetEntries()->Remove(code, HashForCodeObject(code));
return;
}
AddUnwindInfo(&code_desc);
Isolate* isolate = code->GetIsolate();
JITCodeEntry* entry = CreateELFObject(&code_desc, isolate);
DCHECK(!IsLineInfoTagged(entry));
delete lineinfo;
e->value = entry;
const char* name_hint = NULL;
bool should_dump = false;
if (FLAG_gdbjit_dump) {
if (strlen(FLAG_gdbjit_dump_filter) == 0) {
name_hint = name;
should_dump = true;
} else if (name != NULL) {
name_hint = strstr(name, FLAG_gdbjit_dump_filter);
should_dump = (name_hint != NULL);
}
}
RegisterCodeEntry(entry, should_dump, name_hint);
}
void GDBJITInterface::RemoveCode(Code* code) {
if (!FLAG_gdbjit) return;
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
HashMap::Entry* e = GetEntries()->Lookup(code,
HashForCodeObject(code),
false);
if (e == NULL) return;
if (IsLineInfoTagged(e->value)) {
delete UntagLineInfo(e->value);
} else {
JITCodeEntry* entry = static_cast<JITCodeEntry*>(e->value);
UnregisterCodeEntry(entry);
DestroyCodeEntry(entry);
}
e->value = NULL;
GetEntries()->Remove(code, HashForCodeObject(code));
}
void GDBJITInterface::RemoveCodeRange(Address start, Address end) {
HashMap* entries = GetEntries();
Zone zone(Isolate::Current());
ZoneList<Code*> dead_codes(1, &zone);
for (HashMap::Entry* e = entries->Start(); e != NULL; e = entries->Next(e)) {
Code* code = reinterpret_cast<Code*>(e->key);
if (code->address() >= start && code->address() < end) {
dead_codes.Add(code, &zone);
}
}
for (int i = 0; i < dead_codes.length(); i++) {
RemoveCode(dead_codes.at(i));
}
}
static void RegisterDetailedLineInfo(Code* code, LineInfo* line_info) {
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
DCHECK(!IsLineInfoTagged(line_info));
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
DCHECK(e->value == NULL);
e->value = TagLineInfo(line_info);
}
void GDBJITInterface::EventHandler(const v8::JitCodeEvent* event) {
if (!FLAG_gdbjit) return;
switch (event->type) {
case v8::JitCodeEvent::CODE_ADDED: {
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
if (code->kind() == Code::OPTIMIZED_FUNCTION ||
code->kind() == Code::FUNCTION) {
break;
}
EmbeddedVector<char, 256> buffer;
StringBuilder builder(buffer.start(), buffer.length());
builder.AddSubstring(event->name.str, static_cast<int>(event->name.len));
AddCode(builder.Finalize(), code, NON_FUNCTION, NULL, NULL);
break;
}
case v8::JitCodeEvent::CODE_MOVED:
break;
case v8::JitCodeEvent::CODE_REMOVED: {
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
RemoveCode(code);
break;
}
case v8::JitCodeEvent::CODE_ADD_LINE_POS_INFO: {
LineInfo* line_info = reinterpret_cast<LineInfo*>(event->user_data);
line_info->SetPosition(static_cast<intptr_t>(event->line_info.offset),
static_cast<int>(event->line_info.pos),
event->line_info.position_type ==
v8::JitCodeEvent::STATEMENT_POSITION);
break;
}
case v8::JitCodeEvent::CODE_START_LINE_INFO_RECORDING: {
v8::JitCodeEvent* mutable_event = const_cast<v8::JitCodeEvent*>(event);
mutable_event->user_data = new LineInfo();
break;
}
case v8::JitCodeEvent::CODE_END_LINE_INFO_RECORDING: {
LineInfo* line_info = reinterpret_cast<LineInfo*>(event->user_data);
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
RegisterDetailedLineInfo(code, line_info);
break;
}
}
}
} } // namespace v8::internal
#endif
| xushiwei/fibjs | vender/src/v8/src/gdb-jit.cc | C++ | gpl-3.0 | 63,323 |
#--
# Copyright (c) 2004-2010 David Heinemeier Hansson
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
activesupport_path = File.expand_path('../../../activesupport/lib', __FILE__)
$:.unshift(activesupport_path) if File.directory?(activesupport_path) && !$:.include?(activesupport_path)
activemodel_path = File.expand_path('../../../activemodel/lib', __FILE__)
$:.unshift(activemodel_path) if File.directory?(activemodel_path) && !$:.include?(activemodel_path)
require 'active_support'
require 'active_support/i18n'
require 'active_model'
require 'arel'
require 'active_record/version'
module ActiveRecord
extend ActiveSupport::Autoload
eager_autoload do
autoload :ActiveRecordError, 'active_record/errors'
autoload :ConnectionNotEstablished, 'active_record/errors'
autoload :Aggregations
autoload :AssociationPreload
autoload :Associations
autoload :AttributeMethods
autoload :AutosaveAssociation
autoload :Relation
autoload_under 'relation' do
autoload :QueryMethods
autoload :FinderMethods
autoload :Calculations
autoload :PredicateBuilder
autoload :SpawnMethods
autoload :Batches
end
autoload :Base
autoload :Callbacks
autoload :CounterCache
autoload :DynamicFinderMatch
autoload :DynamicScopeMatch
autoload :Migration
autoload :Migrator, 'active_record/migration'
autoload :NamedScope
autoload :NestedAttributes
autoload :Observer
autoload :Persistence
autoload :QueryCache
autoload :Reflection
autoload :Schema
autoload :SchemaDumper
autoload :Serialization
autoload :SessionStore
autoload :Timestamp
autoload :Transactions
autoload :Validations
end
module AttributeMethods
extend ActiveSupport::Autoload
eager_autoload do
autoload :BeforeTypeCast
autoload :Dirty
autoload :PrimaryKey
autoload :Query
autoload :Read
autoload :TimeZoneConversion
autoload :Write
end
end
module Locking
extend ActiveSupport::Autoload
eager_autoload do
autoload :Optimistic
autoload :Pessimistic
end
end
module ConnectionAdapters
extend ActiveSupport::Autoload
eager_autoload do
autoload :AbstractAdapter
autoload :ConnectionManagement, "active_record/connection_adapters/abstract/connection_pool"
end
end
autoload :TestCase
autoload :TestFixtures, 'active_record/fixtures'
end
ActiveSupport.on_load(:active_record) do
Arel::Table.engine = self
end
I18n.load_path << File.dirname(__FILE__) + '/active_record/locale/en.yml'
| mzemel/kpsu.org | vendor/gems/ruby/1.8/gems/activerecord-3.0.3/lib/active_record.rb | Ruby | gpl-3.0 | 3,626 |
package org.thoughtcrime.securesms.push;
import android.content.Context;
import org.thoughtcrime.securesms.BuildConfig;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.whispersystems.signalservice.api.SignalServiceAccountManager;
public class TextSecureCommunicationFactory {
public static SignalServiceAccountManager createManager(Context context) {
return new SignalServiceAccountManager(BuildConfig.TEXTSECURE_URL,
new TextSecurePushTrustStore(context),
TextSecurePreferences.getLocalNumber(context),
TextSecurePreferences.getPushServerPassword(context),
BuildConfig.USER_AGENT);
}
public static SignalServiceAccountManager createManager(Context context, String number, String password) {
return new SignalServiceAccountManager(BuildConfig.TEXTSECURE_URL, new TextSecurePushTrustStore(context),
number, password, BuildConfig.USER_AGENT);
}
}
| IBobko/signal | src/org/thoughtcrime/securesms/push/TextSecureCommunicationFactory.java | Java | gpl-3.0 | 1,107 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
const path = require("path");
const util = require("util");
const _ = require("lodash");
const webpack = require("webpack");
const TARGET_NAME = "webpack4-babel7";
module.exports = exports = async function(tests, dirname) {
const fixtures = [];
for (const [name, input] of tests) {
if (/typescript-/.test(name)) {
continue;
}
const testFnName = _.camelCase(`${TARGET_NAME}-${name}`);
const evalMaps = name.match(/-eval/);
const babelEnv = !name.match(/-es6/);
const babelModules = name.match(/-cjs/);
console.log(`Building ${TARGET_NAME} test ${name}`);
const scriptPath = path.join(dirname, "output", TARGET_NAME, `${name}.js`);
const result = await util.promisify(webpack)({
mode: "development",
context: path.dirname(input),
entry: `./${path.basename(input)}`,
output: {
path: path.dirname(scriptPath),
filename: path.basename(scriptPath),
devtoolModuleFilenameTemplate: `${TARGET_NAME}://./${name}/[resource-path]`,
libraryTarget: "var",
library: testFnName,
libraryExport: "default"
},
devtool: evalMaps ? "eval-source-map" : "source-map",
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: require.resolve("babel-loader"),
options: {
babelrc: false,
plugins: [
require.resolve("@babel/plugin-proposal-class-properties")
],
presets: [
require.resolve("@babel/preset-flow"),
babelEnv
? [
require.resolve("@babel/preset-env"),
{ modules: babelModules ? "commonjs" : false }
]
: null
].filter(Boolean)
}
}
].filter(Boolean)
}
});
fixtures.push({
name,
testFnName: testFnName,
scriptPath,
assets: [scriptPath, evalMaps ? null : `${scriptPath}.map`].filter(
Boolean
)
});
}
return {
target: TARGET_NAME,
fixtures
};
};
| darkwing/debugger.html | test/mochitest/examples/sourcemapped/builds/webpack4-babel7/index.js | JavaScript | mpl-2.0 | 2,374 |
<?php
return array(
'about_asset_depreciations' => 'Over afschrijving van materiaal',
'about_depreciations' => 'U kan de materiaalafschrijving instellen om materiaal af te schrijven op basis van lineaire afschrijving.',
'asset_depreciations' => 'Materiaalafschrijvingen',
'create' => 'Afschrijving aanmaken',
'depreciation_name' => 'Afschrijvingsnaam',
'number_of_months' => 'Aantal maanden',
'update' => 'Afschrijving bijwerken',
);
| madd15/snipe-it | resources/lang/nl/admin/depreciations/general.php | PHP | agpl-3.0 | 528 |
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
#! /usr/bin/env python
import sys, os
import essentia, essentia.standard, essentia.streaming
from essentia.streaming import *
from numpy import argmax, log10, mean, tanh
dynamicFrameSize = 88200
dynamicHopSize = 44100
analysisSampleRate = 44100.0
# expects the audio source to already be equal-loudness filtered
class LevelExtractor(essentia.streaming.CompositeBase):
#"""describes the dynamics of an audio signal"""
def __init__(self, frameSize=dynamicFrameSize, hopSize=dynamicHopSize):
super(LevelExtractor, self).__init__()
fc = FrameCutter(frameSize=frameSize,
hopSize=hopSize,
startFromZero=True,
silentFrames='noise')
dy = Loudness()
fc.frame >> dy.signal
# define inputs:
self.inputs['signal'] = fc.signal
# define outputs:
self.outputs['loudness'] = dy.loudness
def squeezeRange(x, x1, x2):
return 0.5 + 0.5 * tanh(-1.0 + 2.0 * (x - x1) / (x2 - x1))
def levelAverage(pool, namespace=''):
epsilon = 1e-4
threshold = 1e-4 # -80dB
if namespace: namespace += '.lowlevel.'
else: namespace = 'lowlevel.'
loudness = pool[namespace + 'loudness']
pool.remove(namespace + 'loudness')
maxValue = loudness[argmax(loudness)]
if maxValue <= epsilon: maxValue = epsilon
# normalization of the maximum:
def f(x):
x /= float(maxValue)
if x <= threshold : return threshold
return x
loudness = map(f, loudness)
# average level:
levelAverage = 10.0*log10(mean(loudness))
# Re-scaling and range-control
# This yields in numbers between
# 0 for signals with large dynamic variace and thus low dynamic average
# 1 for signal with little dynamic range and thus
# a dynamic average close to the maximum
x1 = -5.0
x2 = -2.0
levelAverageSqueezed = squeezeRange(levelAverage, x1, x2)
pool.set(namespace + 'average_loudness', levelAverageSqueezed)
usage = 'level.py [options] <inputfilename> <outputfilename>'
def parse_args():
import numpy
essentia_version = '%s\n'\
'python version: %s\n'\
'numpy version: %s' % (essentia.__version__, # full version
sys.version.split()[0], # python major version
numpy.__version__) # numpy version
from optparse import OptionParser
parser = OptionParser(usage=usage, version=essentia_version)
parser.add_option("-c","--cpp", action="store_true", dest="generate_cpp",
help="generate cpp code from CompositeBase algorithm")
parser.add_option("-d", "--dot", action="store_true", dest="generate_dot",
help="generate dot and cpp code from CompositeBase algorithm")
(options, args) = parser.parse_args()
return options, args
if __name__ == '__main__':
opts, args = parse_args()
if len(args) != 2:
cmd = './'+os.path.basename(sys.argv[0])+ ' -h'
os.system(cmd)
sys.exit(1)
if opts.generate_dot:
essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=True)
elif opts.generate_cpp:
essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=False)
# find out replay gain:
loader = EqloudLoader(filename=args[0],
sampleRate=analysisSampleRate,
downmix='mix')
rgain = ReplayGain(applyEqloud=False)
pool = essentia.Pool()
loader.audio >> rgain.signal
rgain.replayGain >> (pool, 'replay_gain')
essentia.run(loader)
# get average level:
loader = EqloudLoader(filename=args[0],
replayGain=pool['replay_gain'],
sampleRate=analysisSampleRate,
downmix='mix')
levelExtractor = LevelExtractor()
loader.audio >> levelExtractor.signal
levelExtractor.loudness >> (pool, 'lowlevel.loudness')
essentia.run(loader)
levelAverage(pool)
essentia.standard.YamlOutput(filename=args[1])(pool)
| GiantSteps/essentia | src/examples/python/streaming_extractor/level.py | Python | agpl-3.0 | 4,852 |
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
require_relative '../app/lib/exceptions'
require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder'
require_relative '../lib/paperclip/video_transcoder'
require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version'
require_relative '../lib/devise/ldap_authenticatable'
Dotenv::Railtie.load
Bundler.require(:pam_authentication) if ENV['PAM_ENABLED'] == 'true'
require_relative '../lib/mastodon/redis_config'
module Mastodon
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# All translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.available_locales = [
:en,
:ar,
:ast,
:bg,
:ca,
:co,
:cs,
:cy,
:da,
:de,
:el,
:eo,
:es,
:eu,
:fa,
:fi,
:fr,
:gl,
:he,
:hr,
:hu,
:hy,
:id,
:io,
:it,
:ja,
:ka,
:ko,
:nl,
:no,
:oc,
:pl,
:pt,
:'pt-BR',
:ro,
:ru,
:sk,
:sl,
:sr,
:'sr-Latn',
:sv,
:ta,
:te,
:th,
:tr,
:uk,
:'zh-CN',
:'zh-HK',
:'zh-TW',
]
config.i18n.default_locale = ENV['DEFAULT_LOCALE']&.to_sym
unless config.i18n.available_locales.include?(config.i18n.default_locale)
config.i18n.default_locale = :en
end
# config.paths.add File.join('app', 'api'), glob: File.join('**', '*.rb')
# config.autoload_paths += Dir[Rails.root.join('app', 'api', '*')]
config.active_job.queue_adapter = :sidekiq
config.middleware.use Rack::Attack
config.middleware.use Rack::Deflater
config.to_prepare do
Doorkeeper::AuthorizationsController.layout 'modal'
Doorkeeper::AuthorizedApplicationsController.layout 'admin'
Doorkeeper::Application.send :include, ApplicationExtension
end
end
end
| SerCom-KC/mastodon | config/application.rb | Ruby | agpl-3.0 | 2,782 |
/*
* BioJava development code
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
*/
package org.biojava.nbio.structure.io.mmcif.model;
/** A bean for the PDBX_NONPOLY_SCHEME category, which provides residue level nomenclature
* mapping for non-polymer entities.
* @author Andreas Prlic
* @since 1.7
*/
public class PdbxNonPolyScheme {
String asym_id;
String entity_id;
String seq_id;
String mon_id;
String ndb_seq_num;
String pdb_seq_num ;
String auth_seq_num ;
String pdb_mon_id;
String auth_mon_id;
String pdb_strand_id;
String pdb_ins_code;
public String getAsym_id() {
return asym_id;
}
public void setAsym_id(String asym_id) {
this.asym_id = asym_id;
}
public String getEntity_id() {
return entity_id;
}
public void setEntity_id(String entity_id) {
this.entity_id = entity_id;
}
public String getSeq_id() {
return seq_id;
}
public void setSeq_id(String seq_id) {
this.seq_id = seq_id;
}
public String getMon_id() {
return mon_id;
}
public void setMon_id(String mon_id) {
this.mon_id = mon_id;
}
public String getNdb_seq_num() {
return ndb_seq_num;
}
public void setNdb_seq_num(String ndb_seq_num) {
this.ndb_seq_num = ndb_seq_num;
}
public String getPdb_seq_num() {
return pdb_seq_num;
}
public void setPdb_seq_num(String pdb_seq_num) {
this.pdb_seq_num = pdb_seq_num;
}
public String getAuth_seq_num() {
return auth_seq_num;
}
public void setAuth_seq_num(String auth_seq_num) {
this.auth_seq_num = auth_seq_num;
}
public String getPdb_mon_id() {
return pdb_mon_id;
}
public void setPdb_mon_id(String pdb_mon_id) {
this.pdb_mon_id = pdb_mon_id;
}
public String getAuth_mon_id() {
return auth_mon_id;
}
public void setAuth_mon_id(String auth_mon_id) {
this.auth_mon_id = auth_mon_id;
}
public String getPdb_strand_id() {
return pdb_strand_id;
}
public void setPdb_strand_id(String pdb_strand_id) {
this.pdb_strand_id = pdb_strand_id;
}
public String getPdb_ins_code() {
return pdb_ins_code;
}
public void setPdb_ins_code(String pdb_ins_code) {
this.pdb_ins_code = pdb_ins_code;
}
}
| fionakim/biojava | biojava-structure/src/main/java/org/biojava/nbio/structure/io/mmcif/model/PdbxNonPolyScheme.java | Java | lgpl-2.1 | 2,607 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.wildfly.iiop.openjdk.rmi.ir;
import org.omg.CORBA.IDLTypeOperations;
/**
* Interface of local IDL types.
*
* @author <a href="mailto:osh@sparre.dk">Ole Husgaard</a>
* @version $Revision: 81018 $
*/
interface LocalIDLType extends IDLTypeOperations, LocalIRObject {
}
| xasx/wildfly | iiop-openjdk/src/main/java/org/wildfly/iiop/openjdk/rmi/ir/LocalIDLType.java | Java | lgpl-2.1 | 1,319 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.formatting;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Allows a custom language plugin to build a formatting model for a file in the language, or
* for a portion of a file.
* A formatting model defines how a file is broken into non-whitespace blocks and different
* types of whitespace (alignment, indents and wraps) between them.
* <p>For certain aspects of the custom formatting to work properly, it is recommended to use TokenType.WHITE_SPACE
* as the language's whitespace tokens. See {@link com.intellij.lang.ParserDefinition}
*
* @apiNote in case you getting a {@link StackOverflowError}, with your builder, most likely you haven't implemented any model building
* methods. Please, implement {@link #createModel(FormattingContext)}
* @see com.intellij.lang.LanguageFormatting
* @see FormattingModelProvider#createFormattingModelForPsiFile(PsiFile, Block, CodeStyleSettings)
*/
public interface FormattingModelBuilder {
/**
* Requests building the formatting model for a section of the file containing
* the specified PSI element and its children.
*
* @return the formatting model for the file.
* @see FormattingContext
*/
default @NotNull FormattingModel createModel(@NotNull FormattingContext formattingContext) {
return createModel(formattingContext.getPsiElement(),
formattingContext.getFormattingRange(),
formattingContext.getCodeStyleSettings(),
formattingContext.getFormattingMode());
}
/**
* Returns the TextRange which should be processed by the formatter in order to detect proper indent options.
*
* @param file the file in which the line break is inserted.
* @param offset the line break offset.
* @param elementAtOffset the parameter at {@code offset}
* @return the range to reformat, or null if the default range should be used
*/
default @Nullable TextRange getRangeAffectingIndent(PsiFile file, int offset, ASTNode elementAtOffset) {
return null;
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final @NotNull PsiElement element,
final @NotNull TextRange range,
final @NotNull CodeStyleSettings settings,
final @NotNull FormattingMode mode) {
return createModel(element, settings, mode); // just for compatibility with old implementations
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final @NotNull PsiElement element,
final @NotNull CodeStyleSettings settings,
@NotNull FormattingMode mode) {
return createModel(element, settings);
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final PsiElement element, final CodeStyleSettings settings) {
return createModel(FormattingContext.create(element, settings));
}
}
| siosio/intellij-community | platform/code-style-api/src/com/intellij/formatting/FormattingModelBuilder.java | Java | apache-2.0 | 3,839 |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.manager.api.beans.clients;
/**
* The various client statuses.
*
* @author eric.wittmann@redhat.com
*/
public enum ClientStatus {
Created, Ready, Registered, Retired
}
| jasonchaffee/apiman | manager/api/beans/src/main/java/io/apiman/manager/api/beans/clients/ClientStatus.java | Java | apache-2.0 | 790 |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.table;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.ui.JsonUtils;
import org.onosproject.ui.RequestHandler;
import static org.onosproject.ui.table.TableModel.sortDir;
/**
* Message handler specifically for table views.
*/
public abstract class TableRequestHandler extends RequestHandler {
private static final String FIRST_COL = "firstCol";
private static final String FIRST_DIR = "firstDir";
private static final String SECOND_COL = "secondCol";
private static final String SECOND_DIR = "secondDir";
private static final String ASC = "asc";
private static final String ANNOTS = "annots";
private static final String NO_ROWS_MSG_KEY = "no_rows_msg";
private final String respType;
private final String nodeName;
/**
* Constructs a table request handler for a specific table view. When
* table requests come in, the handler will generate the appropriate
* table rows, sort them according the the request sort parameters, and
* send back the response to the client.
*
* @param reqType type of the request event
* @param respType type of the response event
* @param nodeName name of JSON node holding row data
*/
public TableRequestHandler(String reqType, String respType, String nodeName) {
super(reqType);
this.respType = respType;
this.nodeName = nodeName;
}
@Override
public void process(long sid, ObjectNode payload) {
TableModel tm = createTableModel();
populateTable(tm, payload);
String firstCol = JsonUtils.string(payload, FIRST_COL, defaultColumnId());
String firstDir = JsonUtils.string(payload, FIRST_DIR, ASC);
String secondCol = JsonUtils.string(payload, SECOND_COL, null);
String secondDir = JsonUtils.string(payload, SECOND_DIR, null);
tm.sort(firstCol, sortDir(firstDir), secondCol, sortDir(secondDir));
addTableConfigAnnotations(tm, payload);
ObjectNode rootNode = MAPPER.createObjectNode();
rootNode.set(nodeName, TableUtils.generateRowArrayNode(tm));
rootNode.set(ANNOTS, TableUtils.generateAnnotObjectNode(tm));
sendMessage(respType, 0, rootNode);
}
/**
* Creates the table model (devoid of data) using {@link #getColumnIds()}
* to initialize it, ready to be populated.
* <p>
* This default implementation returns a table model with default
* formatters and comparators for all columns.
*
* @return an empty table model
*/
protected TableModel createTableModel() {
return new TableModel(getColumnIds());
}
/**
* Adds table configuration specific annotations to table model.
*
* @param tm a table model
* @param payload the event payload from the client
*/
protected void addTableConfigAnnotations(TableModel tm, ObjectNode payload) {
tm.addAnnotation(NO_ROWS_MSG_KEY, noRowsMessage(payload));
}
/**
* Returns the default column ID to be used when one is not supplied in
* the payload as the column on which to sort.
* <p>
* This default implementation returns "id".
*
* @return default sort column identifier
*/
protected String defaultColumnId() {
return "id";
}
/**
* Subclasses should return the array of column IDs with which
* to initialize their table model.
*
* @return the column IDs
*/
protected abstract String[] getColumnIds();
/**
* Subclasses should return the message to display in the table when there
* are no rows to display. For example, a host table might return
* "No hosts found".
*
* @param payload request payload
* @return the message
*/
protected abstract String noRowsMessage(ObjectNode payload);
/**
* Subclasses should populate the table model by adding
* {@link TableModel.Row rows}.
* <pre>
* tm.addRow()
* .cell(COL_ONE, ...)
* .cell(COL_TWO, ...)
* ... ;
* </pre>
* The request payload is provided in case there are request filtering
* parameters (other than sort column and sort direction) that are required
* to generate the appropriate data.
*
* @param tm the table model
* @param payload request payload
*/
protected abstract void populateTable(TableModel tm, ObjectNode payload);
} | VinodKumarS-Huawei/ietf96yang | core/api/src/main/java/org/onosproject/ui/table/TableRequestHandler.java | Java | apache-2.0 | 5,111 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.ui;
/**
* Policy editor related constants
*/
public class PolicyEditorConstants {
public static final String ATTRIBUTE_SEPARATOR = ",";
public static final String TARGET_ELEMENT = "Target";
public static final String ANY_OF_ELEMENT = "AnyOf";
public static final String ALL_OF_ELEMENT = "AllOf";
public static final String COMBINE_FUNCTION_AND = "AND";
public static final String COMBINE_FUNCTION_OR = "OR";
public static final String COMBINE_FUNCTION_END = "END";
public static final String MATCH_ELEMENT = "Match";
public static final String MATCH_ID = "MatchId";
public static final String ATTRIBUTE_ID = "AttributeId";
public static final String CATEGORY = "Category";
public static final String DATA_TYPE = "DataType";
public static final String ISSUER = "Issuer";
public static final String SOA_CATEGORY_USER = "Subject";
public static final String SOA_CATEGORY_SUBJECT = "Subject";
public static final String SOA_CATEGORY_RESOURCE = "Resource";
public static final String SOA_CATEGORY_ACTION = "Action";
public static final String SOA_CATEGORY_ENVIRONMENT = "Environment";
public static final String MUST_BE_PRESENT = "MustBePresent";
public static final String ATTRIBUTE_DESIGNATOR = "AttributeDesignator";
public static final class PreFunctions {
public static final String PRE_FUNCTION_IS = "is";
public static final String PRE_FUNCTION_IS_NOT = "is-not";
public static final String PRE_FUNCTION_ARE = "are";
public static final String PRE_FUNCTION_ARE_NOT = "are-not";
public static final String CAN_DO = "can";
public static final String CAN_NOT_DO = "can not";
}
public static final class TargetPreFunctions {
public static final String PRE_FUNCTION_IS = "is";
}
public static final class TargetFunctions {
public static final String FUNCTION_EQUAL = "equal";
}
public static final String RULE_EFFECT_PERMIT = "Permit";
public static final String RULE_EFFECT_DENY = "Deny";
public static final class DataType {
public static final String DAY_TIME_DURATION = "http://www.w3.org/2001/XMLSchema#dayTimeDuration";
public static final String YEAR_MONTH_DURATION = "http://www.w3.org/2001/XMLSchema#yearMonthDuration";
public static final String STRING = "http://www.w3.org/2001/XMLSchema#string";
public static final String TIME = "http://www.w3.org/2001/XMLSchema#time";
public static final String IP_ADDRESS = "urn:oasis:names:tc:xacml:2.0:data-type:ipAddress";
public static final String DATE_TIME = "http://www.w3.org/2001/XMLSchema#dateTime";
public static final String DATE = "http://www.w3.org/2001/XMLSchema#date";
public static final String DOUBLE = "http://www.w3.org/2001/XMLSchema#double";
public static final String INT = "http://www.w3.org/2001/XMLSchema#integer";
}
public static final class CombiningAlog {
public static final String DENY_OVERRIDE_ID = "deny-overrides";
public static final String PERMIT_OVERRIDE_ID = "permit-overrides";
public static final String FIRST_APPLICABLE_ID = "first-applicable";
public static final String ORDER_PERMIT_OVERRIDE_ID = "ordered-permit-overrides";
public static final String ORDER_DENY_OVERRIDE_ID = "ordered-deny-overrides";
public static final String DENY_UNLESS_PERMIT_ID = "deny-unless-permit";
public static final String PERMIT_UNLESS_DENY_ID = "permit-unless-deny";
public static final String ONLY_ONE_APPLICABLE_ID = "only-one-applicable";
}
public static final String RULE_ALGORITHM_IDENTIFIER_1 = "urn:oasis:names:tc:xacml:1.0:" +
"rule-combining-algorithm:";
public static final String RULE_ALGORITHM_IDENTIFIER_3 = "urn:oasis:names:tc:xacml:3.0:" +
"rule-combining-algorithm:";
public static final String POLICY_ALGORITHM_IDENTIFIER_1 = "urn:oasis:names:tc:xacml:1.0:" +
"policy-combining-algorithm:";
public static final String POLICY_ALGORITHM_IDENTIFIER_3 = "urn:oasis:names:tc:xacml:3.0:" +
"policy-combining-algorithm:";
public static final String POLICY_EDITOR_SEPARATOR = "|";
public static final int POLICY_EDITOR_ROW_DATA = 7;
public static final String DYNAMIC_SELECTOR_CATEGORY = "Category";
public static final String DYNAMIC_SELECTOR_FUNCTION = "Function";
public static final String SUBJECT_ID_DEFAULT= "urn:oasis:names:tc:xacml:1.0:subject:subject-id";
public static final String SUBJECT_ID_ROLE= "http://wso2.org/claims/role";
public static final String RESOURCE_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:resource:resource-id";
public static final String ACTION_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:action:action-id";
public static final String ENVIRONMENT_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:environment:environment-id";
public static final String RESOURCE_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:resource";
public static final String SUBJECT_CATEGORY_URI = "urn:oasis:names:tc:xacml:1.0:" +
"subject-category:access-subject";
public static final String ACTION_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:action";
public static final String ENVIRONMENT_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:environment";
public static final String ENVIRONMENT_CURRENT_DATE = "urn:oasis:names:tc:xacml:1.0:environment:current-date";
public static final String ENVIRONMENT_CURRENT_TIME = "urn:oasis:names:tc:xacml:1.0:environment:current-time";
public static final String ENVIRONMENT_CURRENT_DATETIME = "urn:oasis:names:tc:xacml:1.0:environment:current-dateTime";
public static final String SOA_POLICY_EDITOR = "SOA";
public static class FunctionIdentifier {
public static final String ANY = "*";
public static final String EQUAL_RANGE = "[";
public static final String EQUAL_RANGE_CLOSE = "]";
public static final String RANGE = "(";
public static final String RANGE_CLOSE = ")";
public static final String GREATER = ">";
public static final String GREATER_EQUAL = ">=";
public static final String LESS = "<";
public static final String LESS_EQUAL = "<=";
public static final String REGEX = "{";
public static final String AND = "&";
public static final String OR = "|";
}
public static final class AttributeId {
public static final String ENV_DOMAIN = "Domain";
public static final String ENV_DATE = "Date";
public static final String ENV_DATE_TIME = "DateTime";
public static final String ENV_IP = "IP";
public static final String ENV_TIME = "Time";
public static final String USER_AGE = "Age";
}
}
| SupunS/carbon-identity | components/identity/org.wso2.carbon.identity.entitlement.ui/src/main/java/org/wso2/carbon/identity/entitlement/ui/PolicyEditorConstants.java | Java | apache-2.0 | 8,025 |
package health;
import com.comcast.cdn.traffic_control.traffic_monitor.config.Cache;
import com.comcast.cdn.traffic_control.traffic_monitor.health.CacheStateUpdater;
import com.comcast.cdn.traffic_control.traffic_monitor.health.CacheStatisticsClient;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.ListenableFuture;
import com.ning.http.client.ProxyServer;
import com.ning.http.client.Request;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.whenNew;
@PrepareForTest({CacheStatisticsClient.class, AsyncHttpClient.class, ProxyServer.class})
@RunWith(PowerMockRunner.class)
public class CacheStatisticsClientTest {
@Test
public void itExecutesAsynchronousRequest() throws Exception {
ListenableFuture listenableFuture = mock(ListenableFuture.class);
AsyncHttpClient asyncHttpClient = spy(new AsyncHttpClient());
doReturn(listenableFuture).when(asyncHttpClient).executeRequest(any(Request.class), any(CacheStateUpdater.class));
whenNew(AsyncHttpClient.class).withNoArguments().thenReturn(asyncHttpClient);
Cache cache = mock(Cache.class);
when(cache.getQueryIp()).thenReturn("192.168.99.100");
when(cache.getQueryPort()).thenReturn(0);
when(cache.getStatisticsUrl()).thenReturn("http://cache1.example.com/astats");
CacheStateUpdater cacheStateUpdater = mock(CacheStateUpdater.class);
CacheStatisticsClient cacheStatisticsClient = new CacheStatisticsClient();
cacheStatisticsClient.fetchCacheStatistics(cache, cacheStateUpdater);
verify(cacheStateUpdater).setFuture(listenableFuture);
}
}
| dneuman64/traffic_control | traffic_monitor/src/test/java/health/CacheStatisticsClientTest.java | Java | apache-2.0 | 1,971 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.QualifiedName;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.PlatformIcons;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.documentation.docstrings.DocStringUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
import com.jetbrains.python.psi.stubs.PyClassStub;
import com.jetbrains.python.psi.stubs.PyFunctionStub;
import com.jetbrains.python.psi.stubs.PyTargetExpressionStub;
import com.jetbrains.python.psi.types.*;
import com.jetbrains.python.sdk.PythonSdkType;
import icons.PythonIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
import static com.intellij.openapi.util.text.StringUtil.notNullize;
import static com.jetbrains.python.psi.PyFunction.Modifier.CLASSMETHOD;
import static com.jetbrains.python.psi.PyFunction.Modifier.STATICMETHOD;
import static com.jetbrains.python.psi.impl.PyCallExpressionHelper.interpretAsModifierWrappingCall;
/**
* Implements PyFunction.
*/
public class PyFunctionImpl extends PyBaseElementImpl<PyFunctionStub> implements PyFunction {
public PyFunctionImpl(ASTNode astNode) {
super(astNode);
}
public PyFunctionImpl(final PyFunctionStub stub) {
this(stub, PyElementTypes.FUNCTION_DECLARATION);
}
public PyFunctionImpl(PyFunctionStub stub, IStubElementType nodeType) {
super(stub, nodeType);
}
private class CachedStructuredDocStringProvider implements CachedValueProvider<StructuredDocString> {
@Nullable
@Override
public Result<StructuredDocString> compute() {
final PyFunctionImpl f = PyFunctionImpl.this;
return Result.create(DocStringUtil.getStructuredDocString(f), f);
}
}
private CachedStructuredDocStringProvider myCachedStructuredDocStringProvider = new CachedStructuredDocStringProvider();
@Nullable
@Override
public String getName() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getName();
}
ASTNode node = getNameNode();
return node != null ? node.getText() : null;
}
public PsiElement getNameIdentifier() {
final ASTNode nameNode = getNameNode();
return nameNode != null ? nameNode.getPsi() : null;
}
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
final ASTNode nameElement = PyUtil.createNewName(this, name);
final ASTNode nameNode = getNameNode();
if (nameNode != null) {
getNode().replaceChild(nameNode, nameElement);
}
return this;
}
@Override
public Icon getIcon(int flags) {
if (isValid()) {
final Property property = getProperty();
if (property != null) {
if (property.getGetter().valueOrNull() == this) {
return PythonIcons.Python.PropertyGetter;
}
if (property.getSetter().valueOrNull() == this) {
return PythonIcons.Python.PropertySetter;
}
if (property.getDeleter().valueOrNull() == this) {
return PythonIcons.Python.PropertyDeleter;
}
return PlatformIcons.PROPERTY_ICON;
}
if (getContainingClass() != null) {
return PlatformIcons.METHOD_ICON;
}
}
return PythonIcons.Python.Function;
}
@Nullable
public ASTNode getNameNode() {
return getNode().findChildByType(PyTokenTypes.IDENTIFIER);
}
@NotNull
public PyParameterList getParameterList() {
return getRequiredStubOrPsiChild(PyElementTypes.PARAMETER_LIST);
}
@Override
@NotNull
public PyStatementList getStatementList() {
final PyStatementList statementList = childToPsi(PyElementTypes.STATEMENT_LIST);
assert statementList != null : "Statement list missing for function " + getText();
return statementList;
}
public PyClass getContainingClass() {
final PyFunctionStub stub = getStub();
if (stub != null) {
final StubElement parentStub = stub.getParentStub();
if (parentStub instanceof PyClassStub) {
return ((PyClassStub)parentStub).getPsi();
}
return null;
}
final PsiElement parent = PsiTreeUtil.getParentOfType(this, StubBasedPsiElement.class);
if (parent instanceof PyClass) {
return (PyClass)parent;
}
return null;
}
@Nullable
public PyDecoratorList getDecoratorList() {
return getStubOrPsiChild(PyElementTypes.DECORATOR_LIST); // PsiTreeUtil.getChildOfType(this, PyDecoratorList.class);
}
@Nullable
@Override
public PyType getReturnType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
final PyType type = getReturnType(context);
return isAsync() ? createCoroutineType(type) : type;
}
@Nullable
private PyType getReturnType(@NotNull TypeEvalContext context) {
for (PyTypeProvider typeProvider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final Ref<PyType> returnTypeRef = typeProvider.getReturnType(this, context);
if (returnTypeRef != null) {
final PyType returnType = returnTypeRef.get();
if (returnType != null) {
returnType.assertValid(typeProvider.toString());
}
return returnType;
}
}
final PyType docStringType = getReturnTypeFromDocString();
if (docStringType != null) {
docStringType.assertValid("from docstring");
return docStringType;
}
if (context.allowReturnTypes(this)) {
final Ref<? extends PyType> yieldTypeRef = getYieldStatementType(context);
if (yieldTypeRef != null) {
return yieldTypeRef.get();
}
return getReturnStatementType(context);
}
return null;
}
@Nullable
@Override
public PyType getCallType(@NotNull TypeEvalContext context, @NotNull PyCallSiteExpression callSite) {
for (PyTypeProvider typeProvider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final PyType type = typeProvider.getCallType(this, callSite, context);
if (type != null) {
type.assertValid(typeProvider.toString());
return type;
}
}
final PyExpression receiver = PyTypeChecker.getReceiver(callSite, this);
final List<PyExpression> arguments = PyTypeChecker.getArguments(callSite, this);
final List<PyParameter> parameters = PyUtil.getParameters(this, context);
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
final List<PyParameter> explicitParameters = PyTypeChecker.filterExplicitParameters(parameters, this, callSite, resolveContext);
final Map<PyExpression, PyNamedParameter> mapping = PyCallExpressionHelper.mapArguments(arguments, explicitParameters);
return getCallType(receiver, mapping, context);
}
@Nullable
@Override
public PyType getCallType(@Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> parameters,
@NotNull TypeEvalContext context) {
return analyzeCallType(context.getReturnType(this), receiver, parameters, context);
}
@Nullable
private PyType analyzeCallType(@Nullable PyType type,
@Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> parameters,
@NotNull TypeEvalContext context) {
if (PyTypeChecker.hasGenerics(type, context)) {
final Map<PyGenericType, PyType> substitutions = PyTypeChecker.unifyGenericCall(receiver, parameters, context);
if (substitutions != null) {
type = PyTypeChecker.substitute(type, substitutions, context);
}
else {
type = null;
}
}
if (receiver != null) {
type = replaceSelf(type, receiver, context);
}
if (type != null && isDynamicallyEvaluated(parameters.values(), context)) {
type = PyUnionType.createWeakType(type);
}
return type;
}
@Override
public ItemPresentation getPresentation() {
return new PyElementPresentation(this) {
@Nullable
@Override
public String getPresentableText() {
return notNullize(getName(), PyNames.UNNAMED_ELEMENT) + getParameterList().getPresentableText(true);
}
@Nullable
@Override
public String getLocationString() {
final PyClass containingClass = getContainingClass();
if (containingClass != null) {
return "(" + containingClass.getName() + " in " + getPackageForFile(getContainingFile()) + ")";
}
return super.getLocationString();
}
};
}
@Nullable
private PyType replaceSelf(@Nullable PyType returnType, @Nullable PyExpression receiver, @NotNull TypeEvalContext context) {
if (receiver != null) {
// TODO: Currently we substitute only simple subclass types, but we could handle union and collection types as well
if (returnType instanceof PyClassType) {
final PyClassType returnClassType = (PyClassType)returnType;
if (returnClassType.getPyClass() == getContainingClass()) {
final PyType receiverType = context.getType(receiver);
if (receiverType instanceof PyClassType && PyTypeChecker.match(returnType, receiverType, context)) {
return returnClassType.isDefinition() ? receiverType : ((PyClassType)receiverType).toInstance();
}
}
}
}
return returnType;
}
private static boolean isDynamicallyEvaluated(@NotNull Collection<PyNamedParameter> parameters, @NotNull TypeEvalContext context) {
for (PyNamedParameter parameter : parameters) {
final PyType type = context.getType(parameter);
if (type instanceof PyDynamicallyEvaluatedType) {
return true;
}
}
return false;
}
@Nullable
private Ref<? extends PyType> getYieldStatementType(@NotNull final TypeEvalContext context) {
Ref<PyType> elementType = null;
final PyBuiltinCache cache = PyBuiltinCache.getInstance(this);
final PyStatementList statements = getStatementList();
final Set<PyType> types = new LinkedHashSet<PyType>();
statements.accept(new PyRecursiveElementVisitor() {
@Override
public void visitPyYieldExpression(PyYieldExpression node) {
final PyType type = context.getType(node);
if (node.isDelegating() && type instanceof PyCollectionType) {
final PyCollectionType collectionType = (PyCollectionType)type;
// TODO: Select the parameter types that matches T in Iterable[T]
final List<PyType> elementTypes = collectionType.getElementTypes(context);
types.add(elementTypes.isEmpty() ? null : elementTypes.get(0));
}
else {
types.add(type);
}
}
@Override
public void visitPyFunction(PyFunction node) {
// Ignore nested functions
}
});
final int n = types.size();
if (n == 1) {
elementType = Ref.create(types.iterator().next());
}
else if (n > 0) {
elementType = Ref.create(PyUnionType.union(types));
}
if (elementType != null) {
final PyClass generator = cache.getClass(PyNames.FAKE_GENERATOR);
if (generator != null) {
final List<PyType> parameters = Arrays.asList(elementType.get(), null, getReturnStatementType(context));
return Ref.create(new PyCollectionTypeImpl(generator, false, parameters));
}
}
if (!types.isEmpty()) {
return Ref.create(null);
}
return null;
}
@Nullable
public PyType getReturnStatementType(TypeEvalContext typeEvalContext) {
final ReturnVisitor visitor = new ReturnVisitor(this, typeEvalContext);
final PyStatementList statements = getStatementList();
statements.accept(visitor);
if (isGeneratedStub() && !visitor.myHasReturns) {
if (PyNames.INIT.equals(getName())) {
return PyNoneType.INSTANCE;
}
return null;
}
return visitor.result();
}
@Nullable
private PyType createCoroutineType(@Nullable PyType returnType) {
final PyBuiltinCache cache = PyBuiltinCache.getInstance(this);
if (returnType instanceof PyClassLikeType && PyNames.FAKE_COROUTINE.equals(((PyClassLikeType)returnType).getClassQName())) {
return returnType;
}
final PyClass generator = cache.getClass(PyNames.FAKE_COROUTINE);
return generator != null ? new PyCollectionTypeImpl(generator, false, Collections.singletonList(returnType)) : null;
}
public PyFunction asMethod() {
if (getContainingClass() != null) {
return this;
}
else {
return null;
}
}
@Nullable
@Override
public PyType getReturnTypeFromDocString() {
final String typeName = extractReturnType();
return typeName != null ? PyTypeParser.getTypeByName(this, typeName) : null;
}
@Nullable
@Override
public String getDeprecationMessage() {
PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getDeprecationMessage();
}
return extractDeprecationMessage();
}
@Nullable
public String extractDeprecationMessage() {
PyStatementList statementList = getStatementList();
return extractDeprecationMessage(Arrays.asList(statementList.getStatements()));
}
@Override
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
for (PyTypeProvider provider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final PyType type = provider.getCallableType(this, context);
if (type != null) {
return type;
}
}
final PyFunctionTypeImpl type = new PyFunctionTypeImpl(this);
if (PyKnownDecoratorUtil.hasUnknownDecorator(this, context) && getProperty() == null) {
return PyUnionType.createWeakType(type);
}
return type;
}
@Nullable
public static String extractDeprecationMessage(List<PyStatement> statements) {
for (PyStatement statement : statements) {
if (statement instanceof PyExpressionStatement) {
PyExpressionStatement expressionStatement = (PyExpressionStatement)statement;
if (expressionStatement.getExpression() instanceof PyCallExpression) {
PyCallExpression callExpression = (PyCallExpression)expressionStatement.getExpression();
if (callExpression.isCalleeText(PyNames.WARN)) {
PyReferenceExpression warningClass = callExpression.getArgument(1, PyReferenceExpression.class);
if (warningClass != null && (PyNames.DEPRECATION_WARNING.equals(warningClass.getReferencedName()) ||
PyNames.PENDING_DEPRECATION_WARNING.equals(warningClass.getReferencedName()))) {
return PyPsiUtils.strValue(callExpression.getArguments()[0]);
}
}
}
}
}
return null;
}
@Override
public String getDocStringValue() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getDocString();
}
return DocStringUtil.getDocStringValue(this);
}
@Nullable
@Override
public StructuredDocString getStructuredDocString() {
return CachedValuesManager.getCachedValue(this, myCachedStructuredDocStringProvider);
}
private boolean isGeneratedStub() {
VirtualFile vFile = getContainingFile().getVirtualFile();
if (vFile != null) {
vFile = vFile.getParent();
if (vFile != null) {
vFile = vFile.getParent();
if (vFile != null && vFile.getName().equals(PythonSdkType.SKELETON_DIR_NAME)) {
return true;
}
}
}
return false;
}
@Nullable
private String extractReturnType() {
final String ARROW = "->";
final StructuredDocString structuredDocString = getStructuredDocString();
if (structuredDocString != null) {
return structuredDocString.getReturnType();
}
final String docString = getDocStringValue();
if (docString != null && docString.contains(ARROW)) {
final List<String> lines = StringUtil.split(docString, "\n");
while (lines.size() > 0 && lines.get(0).trim().length() == 0) {
lines.remove(0);
}
if (lines.size() > 1 && lines.get(1).trim().length() == 0) {
String firstLine = lines.get(0);
int pos = firstLine.lastIndexOf(ARROW);
if (pos >= 0) {
return firstLine.substring(pos + 2).trim();
}
}
}
return null;
}
private static class ReturnVisitor extends PyRecursiveElementVisitor {
private final PyFunction myFunction;
private final TypeEvalContext myContext;
private PyType myResult = null;
private boolean myHasReturns = false;
private boolean myHasRaises = false;
public ReturnVisitor(PyFunction function, final TypeEvalContext context) {
myFunction = function;
myContext = context;
}
@Override
public void visitPyReturnStatement(PyReturnStatement node) {
if (PsiTreeUtil.getParentOfType(node, ScopeOwner.class, true) == myFunction) {
final PyExpression expr = node.getExpression();
PyType returnType;
returnType = expr == null ? PyNoneType.INSTANCE : myContext.getType(expr);
if (!myHasReturns) {
myResult = returnType;
myHasReturns = true;
}
else {
myResult = PyUnionType.union(myResult, returnType);
}
}
}
@Override
public void visitPyRaiseStatement(PyRaiseStatement node) {
myHasRaises = true;
}
@Nullable
PyType result() {
return myHasReturns || myHasRaises ? myResult : PyNoneType.INSTANCE;
}
}
@Override
protected void acceptPyVisitor(PyElementVisitor pyVisitor) {
pyVisitor.visitPyFunction(this);
}
public int getTextOffset() {
final ASTNode name = getNameNode();
return name != null ? name.getStartOffset() : super.getTextOffset();
}
public PyStringLiteralExpression getDocStringExpression() {
final PyStatementList stmtList = getStatementList();
return DocStringUtil.findDocStringExpression(stmtList);
}
@NotNull
public Iterable<PyElement> iterateNames() {
return Collections.<PyElement>singleton(this);
}
public PyElement getElementNamed(final String the_name) {
return the_name.equals(getName()) ? this : null;
}
public boolean mustResolveOutside() {
return false;
}
@Override
public String toString() {
return super.toString() + "('" + getName() + "')";
}
public void subtreeChanged() {
super.subtreeChanged();
ControlFlowCache.clear(this);
}
public Property getProperty() {
final PyClass containingClass = getContainingClass();
if (containingClass != null) {
return containingClass.findPropertyByCallable(this);
}
return null;
}
@Override
public PyAnnotation getAnnotation() {
return getStubOrPsiChild(PyElementTypes.ANNOTATION);
}
@NotNull
@Override
public SearchScope getUseScope() {
final ScopeOwner scopeOwner = ScopeUtil.getScopeOwner(this);
if (scopeOwner instanceof PyFunction) {
return new LocalSearchScope(scopeOwner);
}
return super.getUseScope();
}
/**
* Looks for two standard decorators to a function, or a wrapping assignment that closely follows it.
*
* @return a flag describing what was detected.
*/
@Nullable
public Modifier getModifier() {
String deconame = getClassOrStaticMethodDecorator();
if (PyNames.CLASSMETHOD.equals(deconame)) {
return CLASSMETHOD;
}
else if (PyNames.STATICMETHOD.equals(deconame)) {
return STATICMETHOD;
}
// implicit staticmethod __new__
PyClass cls = getContainingClass();
if (cls != null && PyNames.NEW.equals(getName()) && cls.isNewStyleClass(null)) {
return STATICMETHOD;
}
//
if (getStub() != null) {
return getWrappersFromStub();
}
String func_name = getName();
if (func_name != null) {
PyAssignmentStatement assignment = PsiTreeUtil.getNextSiblingOfType(this, PyAssignmentStatement.class);
if (assignment != null) {
for (Pair<PyExpression, PyExpression> pair : assignment.getTargetsToValuesMapping()) {
PyExpression value = pair.getSecond();
if (value instanceof PyCallExpression) {
PyExpression target = pair.getFirst();
if (target instanceof PyTargetExpression && func_name.equals(target.getName())) {
Pair<String, PyFunction> interpreted = interpretAsModifierWrappingCall((PyCallExpression)value, this);
if (interpreted != null) {
PyFunction original = interpreted.getSecond();
if (original == this) {
String wrapper_name = interpreted.getFirst();
if (PyNames.CLASSMETHOD.equals(wrapper_name)) {
return CLASSMETHOD;
}
else if (PyNames.STATICMETHOD.equals(wrapper_name)) {
return STATICMETHOD;
}
}
}
}
}
}
}
}
return null;
}
@Override
public boolean isAsync() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.isAsync();
}
return getNode().findChildByType(PyTokenTypes.ASYNC_KEYWORD) != null;
}
@Nullable
private Modifier getWrappersFromStub() {
final StubElement parentStub = getStub().getParentStub();
final List childrenStubs = parentStub.getChildrenStubs();
int index = childrenStubs.indexOf(getStub());
if (index >= 0 && index < childrenStubs.size() - 1) {
StubElement nextStub = (StubElement)childrenStubs.get(index + 1);
if (nextStub instanceof PyTargetExpressionStub) {
final PyTargetExpressionStub targetExpressionStub = (PyTargetExpressionStub)nextStub;
if (targetExpressionStub.getInitializerType() == PyTargetExpressionStub.InitializerType.CallExpression) {
final QualifiedName qualifiedName = targetExpressionStub.getInitializer();
if (QualifiedName.fromComponents(PyNames.CLASSMETHOD).equals(qualifiedName)) {
return CLASSMETHOD;
}
if (QualifiedName.fromComponents(PyNames.STATICMETHOD).equals(qualifiedName)) {
return STATICMETHOD;
}
}
}
}
return null;
}
/**
* When a function is decorated many decorators, finds the deepest builtin decorator:
* <pre>
* @foo
* @classmethod <b># <-- that's it</b>
* @bar
* def moo(cls):
* pass
* </pre>
*
* @return name of the built-in decorator, or null (even if there are non-built-in decorators).
*/
@Nullable
private String getClassOrStaticMethodDecorator() {
PyDecoratorList decolist = getDecoratorList();
if (decolist != null) {
PyDecorator[] decos = decolist.getDecorators();
if (decos.length > 0) {
for (int i = decos.length - 1; i >= 0; i -= 1) {
PyDecorator deco = decos[i];
String deconame = deco.getName();
if (PyNames.CLASSMETHOD.equals(deconame) || PyNames.STATICMETHOD.equals(deconame)) {
return deconame;
}
for (PyKnownDecoratorProvider provider : PyUtil.KnownDecoratorProviderHolder.KNOWN_DECORATOR_PROVIDERS) {
String name = provider.toKnownDecorator(deconame);
if (name != null) {
return name;
}
}
}
}
}
return null;
}
@Nullable
@Override
public String getQualifiedName() {
return QualifiedNameFinder.getQualifiedName(this);
}
@NotNull
@Override
public List<PyAssignmentStatement> findAttributes() {
final List<PyAssignmentStatement> result = new ArrayList<PyAssignmentStatement>();
for (final PyAssignmentStatement statement : new PsiQuery(this).siblings(PyAssignmentStatement.class).getElements()) {
for (final PyQualifiedExpression targetExpression : new PsiQuery(statement.getTargets()).filter(PyQualifiedExpression.class)
.getElements()) {
final PyExpression qualifier = targetExpression.getQualifier();
if (qualifier == null) {
continue;
}
final PsiReference qualifierReference = qualifier.getReference();
if (qualifierReference == null) {
continue;
}
if (qualifierReference.isReferenceTo(this)) {
result.add(statement);
}
}
}
return result;
}
@NotNull
@Override
public ProtectionLevel getProtectionLevel() {
final int underscoreLevels = PyUtil.getInitialUnderscores(getName());
for (final ProtectionLevel level : ProtectionLevel.values()) {
if (level.getUnderscoreLevel() == underscoreLevels) {
return level;
}
}
return ProtectionLevel.PRIVATE;
}
}
| ivan-fedorov/intellij-community | python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java | Java | apache-2.0 | 26,495 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.settings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.hash.MessageDigests;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
/**
* Used to publish secure setting hashes in the cluster state and to validate those hashes against the local values of those same settings.
* This is colloquially referred to as the secure setting consistency check. It will publish and verify hashes only for the collection
* of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property.
*/
public final class ConsistentSettingsService {
private static final Logger logger = LogManager.getLogger(ConsistentSettingsService.class);
private final Settings settings;
private final ClusterService clusterService;
private final Collection<Setting<?>> secureSettingsCollection;
private final SecretKeyFactory pbkdf2KeyFactory;
public ConsistentSettingsService(Settings settings, ClusterService clusterService, Collection<Setting<?>> secureSettingsCollection) {
this.settings = settings;
this.clusterService = clusterService;
this.secureSettingsCollection = secureSettingsCollection;
// this is used to compute the PBKDF2 hash (the published one)
try {
this.pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("The \"PBKDF2WithHmacSHA512\" algorithm is required for consistent secure settings' hashes", e);
}
}
/**
* Returns a {@link LocalNodeMasterListener} that will publish hashes of all the settings passed in the constructor. These hashes are
* published by the master node only. Note that this is not designed for {@link SecureSettings} implementations that are mutable.
*/
public LocalNodeMasterListener newHashPublisher() {
// eagerly compute hashes to be published
final Map<String, String> computedHashesOfConsistentSettings = computeHashesOfConsistentSecureSettings();
return new HashesPublisher(computedHashesOfConsistentSettings, clusterService);
}
/**
* Verifies that the hashes of consistent secure settings in the latest {@code ClusterState} verify for the values of those same
* settings on the local node. The settings to be checked are passed in the constructor. Also, validates that a missing local
* value is also missing in the published set, and vice-versa.
*/
public boolean areAllConsistent() {
final ClusterState state = clusterService.state();
final Map<String, String> publishedHashesOfConsistentSettings = state.metadata().hashesOfConsistentSettings();
final Set<String> publishedSettingKeysToVerify = new HashSet<>();
publishedSettingKeysToVerify.addAll(publishedHashesOfConsistentSettings.keySet());
final AtomicBoolean allConsistent = new AtomicBoolean(true);
forEachConcreteSecureSettingDo(concreteSecureSetting -> {
final String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(concreteSecureSetting.getKey());
final byte[] localHash = concreteSecureSetting.getSecretDigest(settings);
if (publishedSaltAndHash == null && localHash == null) {
// consistency of missing
logger.debug(
"no published hash for the consistent secure setting [{}] but it also does NOT exist on the local node",
concreteSecureSetting.getKey()
);
} else if (publishedSaltAndHash == null && localHash != null) {
// setting missing on master but present locally
logger.warn(
"no published hash for the consistent secure setting [{}] but it exists on the local node",
concreteSecureSetting.getKey()
);
if (state.nodes().isLocalNodeElectedMaster()) {
throw new IllegalStateException(
"Master node cannot validate consistent setting. No published hash for ["
+ concreteSecureSetting.getKey()
+ "] but setting exists."
);
}
allConsistent.set(false);
} else if (publishedSaltAndHash != null && localHash == null) {
// setting missing locally but present on master
logger.warn(
"the consistent secure setting [{}] does not exist on the local node but there is a published hash for it",
concreteSecureSetting.getKey()
);
allConsistent.set(false);
} else {
assert publishedSaltAndHash != null;
assert localHash != null;
final String[] parts = publishedSaltAndHash.split(":");
if (parts == null || parts.length != 2) {
throw new IllegalArgumentException(
"published hash ["
+ publishedSaltAndHash
+ " ] for secure setting ["
+ concreteSecureSetting.getKey()
+ "] is invalid"
);
}
final String publishedSalt = parts[0];
final String publishedHash = parts[1];
final byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localHash, publishedSalt.getBytes(StandardCharsets.UTF_8));
final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8);
if (false == publishedHash.equals(computedSaltedHash)) {
logger.warn(
"the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]",
publishedHash,
concreteSecureSetting.getKey(),
computedSaltedHash
);
if (state.nodes().isLocalNodeElectedMaster()) {
throw new IllegalStateException(
"Master node cannot validate consistent setting. The published hash ["
+ publishedHash
+ "] of the consistent secure setting ["
+ concreteSecureSetting.getKey()
+ "] differs from the locally computed one ["
+ computedSaltedHash
+ "]."
);
}
allConsistent.set(false);
}
}
publishedSettingKeysToVerify.remove(concreteSecureSetting.getKey());
});
// another case of settings missing locally, when group settings have not expanded to all the keys published
for (String publishedSettingKey : publishedSettingKeysToVerify) {
for (Setting<?> setting : secureSettingsCollection) {
if (setting.match(publishedSettingKey)) {
// setting missing locally but present on master
logger.warn(
"the consistent secure setting [{}] does not exist on the local node but there is a published hash for it",
publishedSettingKey
);
allConsistent.set(false);
}
}
}
return allConsistent.get();
}
/**
* Iterate over the passed in secure settings, expanding {@link Setting.AffixSetting} to concrete settings, in the scope of the local
* settings.
*/
private void forEachConcreteSecureSettingDo(Consumer<SecureSetting<?>> secureSettingConsumer) {
for (Setting<?> setting : secureSettingsCollection) {
assert setting.isConsistent() : "[" + setting.getKey() + "] is not a consistent setting";
if (setting instanceof Setting.AffixSetting<?>) {
((Setting.AffixSetting<?>) setting).getAllConcreteSettings(settings).forEach(concreteSetting -> {
assert concreteSetting instanceof SecureSetting<?> : "[" + concreteSetting.getKey() + "] is not a secure setting";
secureSettingConsumer.accept((SecureSetting<?>) concreteSetting);
});
} else if (setting instanceof SecureSetting<?>) {
secureSettingConsumer.accept((SecureSetting<?>) setting);
} else {
assert false : "Unrecognized consistent secure setting [" + setting.getKey() + "]";
}
}
}
private Map<String, String> computeHashesOfConsistentSecureSettings() {
final Map<String, String> hashesBySettingKey = new HashMap<>();
forEachConcreteSecureSettingDo(concreteSecureSetting -> {
final byte[] localHash = concreteSecureSetting.getSecretDigest(settings);
if (localHash != null) {
final String salt = UUIDs.randomBase64UUID();
final byte[] publicHash = computeSaltedPBKDF2Hash(localHash, salt.getBytes(StandardCharsets.UTF_8));
final String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8);
hashesBySettingKey.put(concreteSecureSetting.getKey(), salt + ":" + encodedPublicHash);
}
});
return hashesBySettingKey;
}
private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) {
final int iterations = 5000;
final int keyLength = 512;
char[] value = null;
try {
value = MessageDigests.toHexCharArray(bytes);
final PBEKeySpec spec = new PBEKeySpec(value, salt, iterations, keyLength);
final SecretKey key = pbkdf2KeyFactory.generateSecret(spec);
return key.getEncoded();
} catch (InvalidKeySpecException e) {
throw new RuntimeException("Unexpected exception when computing PBKDF2 hash", e);
} finally {
if (value != null) {
Arrays.fill(value, '0');
}
}
}
static final class HashesPublisher implements LocalNodeMasterListener {
// eagerly compute hashes to be published
final Map<String, String> computedHashesOfConsistentSettings;
final ClusterService clusterService;
HashesPublisher(Map<String, String> computedHashesOfConsistentSettings, ClusterService clusterService) {
this.computedHashesOfConsistentSettings = Map.copyOf(computedHashesOfConsistentSettings);
this.clusterService = clusterService;
}
@Override
public void onMaster() {
clusterService.submitStateUpdateTask("publish-secure-settings-hashes", new ClusterStateUpdateTask(Priority.URGENT) {
@Override
public ClusterState execute(ClusterState currentState) {
final Map<String, String> publishedHashesOfConsistentSettings = currentState.metadata().hashesOfConsistentSettings();
if (computedHashesOfConsistentSettings.equals(publishedHashesOfConsistentSettings)) {
logger.debug("Nothing to publish. What is already published matches this node's view.");
return currentState;
} else {
return ClusterState.builder(currentState)
.metadata(
Metadata.builder(currentState.metadata()).hashesOfConsistentSettings(computedHashesOfConsistentSettings)
)
.build();
}
}
@Override
public void onFailure(String source, Exception e) {
logger.error("unable to publish secure settings hashes", e);
}
});
}
@Override
public void offMaster() {
logger.trace("I am no longer master, nothing to do");
}
}
}
| GlenRSmith/elasticsearch | server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java | Java | apache-2.0 | 13,526 |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using DocumentFormat.OpenXml.Validation;
using System.Diagnostics;
using System.Xml;
namespace DocumentFormat.OpenXml.Internal.SemanticValidation
{
internal class RelationshipExistConstraint : SemanticConstraint
{
private byte _rIdAttribute;
public RelationshipExistConstraint(byte rIdAttribute)
: base(SemanticValidationLevel.Part)
{
_rIdAttribute = rIdAttribute;
}
public override ValidationErrorInfo Validate(ValidationContext context)
{
OpenXmlSimpleType attributeValue = context.Element.Attributes[_rIdAttribute];
//if the attribute is omited, semantic validation will do nothing
if (attributeValue == null || string.IsNullOrEmpty(attributeValue.InnerText))
{
return null;
}
if (context.Part.PackagePart.RelationshipExists(attributeValue.InnerText))
{
return null;
}
else
{
string errorDescription = string.Format(System.Globalization.CultureInfo.CurrentUICulture, ValidationResources.Sem_InvalidRelationshipId,
attributeValue, GetAttributeQualifiedName(context.Element, _rIdAttribute));
return new ValidationErrorInfo()
{
Id = "Sem_InvalidRelationshipId",
ErrorType = ValidationErrorType.Semantic,
Node = context.Element,
Description = errorDescription
};
}
}
}
} | JesseQin/Open-XML-SDK | src/ofapi/Validation/SemanticValidation/SemanticConstraint/RelationshipExistConstraint.cs | C# | apache-2.0 | 1,905 |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
import six
from nova import block_device
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import matchers
from nova.virt import block_device as driver_block_device
from nova.virt import driver
from nova.volume import cinder
from nova.volume import encryptors
class TestDriverBlockDevice(test.NoDBTestCase):
driver_classes = {
'swap': driver_block_device.DriverSwapBlockDevice,
'ephemeral': driver_block_device.DriverEphemeralBlockDevice,
'volume': driver_block_device.DriverVolumeBlockDevice,
'snapshot': driver_block_device.DriverSnapshotBlockDevice,
'image': driver_block_device.DriverImageBlockDevice,
'blank': driver_block_device.DriverBlankBlockDevice
}
swap_bdm_dict = block_device.BlockDeviceDict(
{'id': 1, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'disk_bus': 'scsi',
'volume_size': 2,
'boot_index': -1})
swap_driver_bdm = {
'device_name': '/dev/sdb1',
'swap_size': 2,
'disk_bus': 'scsi'}
swap_legacy_driver_bdm = {
'device_name': '/dev/sdb1',
'swap_size': 2}
ephemeral_bdm_dict = block_device.BlockDeviceDict(
{'id': 2, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'disk_bus': 'scsi',
'device_type': 'disk',
'volume_size': 4,
'guest_format': 'ext4',
'delete_on_termination': True,
'boot_index': -1})
ephemeral_driver_bdm = {
'device_name': '/dev/sdc1',
'size': 4,
'device_type': 'disk',
'guest_format': 'ext4',
'disk_bus': 'scsi'}
ephemeral_legacy_driver_bdm = {
'device_name': '/dev/sdc1',
'size': 4,
'virtual_name': 'ephemeral0',
'num': 0}
volume_bdm_dict = block_device.BlockDeviceDict(
{'id': 3, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda1',
'source_type': 'volume',
'disk_bus': 'scsi',
'device_type': 'disk',
'volume_size': 8,
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'guest_format': 'ext4',
'connection_info': '{"fake": "connection_info"}',
'delete_on_termination': False,
'boot_index': 0})
volume_driver_bdm = {
'mount_device': '/dev/sda1',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': False,
'disk_bus': 'scsi',
'device_type': 'disk',
'guest_format': 'ext4',
'boot_index': 0}
volume_legacy_driver_bdm = {
'mount_device': '/dev/sda1',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': False}
snapshot_bdm_dict = block_device.BlockDeviceDict(
{'id': 4, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda2',
'delete_on_termination': True,
'volume_size': 3,
'disk_bus': 'scsi',
'device_type': 'disk',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': '{"fake": "connection_info"}',
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1})
snapshot_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True,
'disk_bus': 'scsi',
'device_type': 'disk',
'guest_format': None,
'boot_index': -1}
snapshot_legacy_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True}
image_bdm_dict = block_device.BlockDeviceDict(
{'id': 5, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda2',
'delete_on_termination': True,
'volume_size': 1,
'disk_bus': 'scsi',
'device_type': 'disk',
'source_type': 'image',
'destination_type': 'volume',
'connection_info': '{"fake": "connection_info"}',
'image_id': 'fake-image-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1})
image_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True,
'disk_bus': 'scsi',
'device_type': 'disk',
'guest_format': None,
'boot_index': -1}
image_legacy_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True}
blank_bdm_dict = block_device.BlockDeviceDict(
{'id': 6, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda2',
'delete_on_termination': True,
'volume_size': 3,
'disk_bus': 'scsi',
'device_type': 'disk',
'source_type': 'blank',
'destination_type': 'volume',
'connection_info': '{"fake": "connection_info"}',
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1})
blank_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True,
'disk_bus': 'scsi',
'device_type': 'disk',
'guest_format': None,
'boot_index': -1}
blank_legacy_driver_bdm = {
'mount_device': '/dev/sda2',
'connection_info': {"fake": "connection_info"},
'delete_on_termination': True}
def setUp(self):
super(TestDriverBlockDevice, self).setUp()
self.volume_api = self.mox.CreateMock(cinder.API)
self.virt_driver = self.mox.CreateMock(driver.ComputeDriver)
self.context = context.RequestContext('fake_user',
'fake_project')
# create bdm objects for testing
self.swap_bdm = fake_block_device.fake_bdm_object(
self.context, self.swap_bdm_dict)
self.ephemeral_bdm = fake_block_device.fake_bdm_object(
self.context, self.ephemeral_bdm_dict)
self.volume_bdm = fake_block_device.fake_bdm_object(
self.context, self.volume_bdm_dict)
self.snapshot_bdm = fake_block_device.fake_bdm_object(
self.context, self.snapshot_bdm_dict)
self.image_bdm = fake_block_device.fake_bdm_object(
self.context, self.image_bdm_dict)
self.blank_bdm = fake_block_device.fake_bdm_object(
self.context, self.blank_bdm_dict)
def test_no_device_raises(self):
for name, cls in self.driver_classes.items():
bdm = fake_block_device.fake_bdm_object(
self.context, {'no_device': True})
self.assertRaises(driver_block_device._NotTransformable,
cls, bdm)
def _test_driver_device(self, name):
db_bdm = getattr(self, "%s_bdm" % name)
test_bdm = self.driver_classes[name](db_bdm)
self.assertThat(test_bdm, matchers.DictMatches(
getattr(self, "%s_driver_bdm" % name)))
for k, v in six.iteritems(db_bdm):
field_val = getattr(test_bdm._bdm_obj, k)
if isinstance(field_val, bool):
v = bool(v)
self.assertEqual(field_val, v)
self.assertThat(test_bdm.legacy(),
matchers.DictMatches(
getattr(self, "%s_legacy_driver_bdm" % name)))
# Test passthru attributes
for passthru in test_bdm._proxy_as_attr:
self.assertEqual(getattr(test_bdm, passthru),
getattr(test_bdm._bdm_obj, passthru))
# Make sure that all others raise _invalidType
for other_name, cls in six.iteritems(self.driver_classes):
if other_name == name:
continue
self.assertRaises(driver_block_device._InvalidType,
cls,
getattr(self, '%s_bdm' % name))
# Test the save method
with mock.patch.object(test_bdm._bdm_obj, 'save') as save_mock:
for fld, alias in six.iteritems(test_bdm._update_on_save):
# We can't set fake values on enums, like device_type,
# so skip those.
if not isinstance(test_bdm._bdm_obj.fields[fld],
fields.BaseEnumField):
test_bdm[alias or fld] = 'fake_changed_value'
test_bdm.save()
for fld, alias in six.iteritems(test_bdm._update_on_save):
self.assertEqual(test_bdm[alias or fld],
getattr(test_bdm._bdm_obj, fld))
save_mock.assert_called_once_with()
def check_save():
self.assertEqual(set([]), test_bdm._bdm_obj.obj_what_changed())
# Test that nothing is set on the object if there are no actual changes
test_bdm._bdm_obj.obj_reset_changes()
with mock.patch.object(test_bdm._bdm_obj, 'save') as save_mock:
save_mock.side_effect = check_save
test_bdm.save()
def _test_driver_default_size(self, name):
size = 'swap_size' if name == 'swap' else 'size'
no_size_bdm = getattr(self, "%s_bdm_dict" % name).copy()
no_size_bdm['volume_size'] = None
driver_bdm = self.driver_classes[name](
fake_block_device.fake_bdm_object(self.context, no_size_bdm))
self.assertEqual(driver_bdm[size], 0)
del no_size_bdm['volume_size']
driver_bdm = self.driver_classes[name](
fake_block_device.fake_bdm_object(self.context, no_size_bdm))
self.assertEqual(driver_bdm[size], 0)
def test_driver_swap_block_device(self):
self._test_driver_device("swap")
def test_driver_swap_default_size(self):
self._test_driver_default_size('swap')
def test_driver_ephemeral_block_device(self):
self._test_driver_device("ephemeral")
def test_driver_ephemeral_default_size(self):
self._test_driver_default_size('ephemeral')
def test_driver_volume_block_device(self):
self._test_driver_device("volume")
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
self.assertEqual(test_bdm['connection_info'],
jsonutils.loads(test_bdm._bdm_obj.connection_info))
self.assertEqual(test_bdm._bdm_obj.id, 3)
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-1')
self.assertEqual(test_bdm.volume_size, 8)
def test_driver_snapshot_block_device(self):
self._test_driver_device("snapshot")
test_bdm = self.driver_classes['snapshot'](
self.snapshot_bdm)
self.assertEqual(test_bdm._bdm_obj.id, 4)
self.assertEqual(test_bdm.snapshot_id, 'fake-snapshot-id-1')
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2')
self.assertEqual(test_bdm.volume_size, 3)
def test_driver_image_block_device(self):
self._test_driver_device('image')
test_bdm = self.driver_classes['image'](
self.image_bdm)
self.assertEqual(test_bdm._bdm_obj.id, 5)
self.assertEqual(test_bdm.image_id, 'fake-image-id-1')
self.assertEqual(test_bdm.volume_size, 1)
def test_driver_image_block_device_destination_local(self):
self._test_driver_device('image')
bdm = self.image_bdm_dict.copy()
bdm['destination_type'] = 'local'
self.assertRaises(driver_block_device._InvalidType,
self.driver_classes['image'],
fake_block_device.fake_bdm_object(self.context, bdm))
def test_driver_blank_block_device(self):
self._test_driver_device('blank')
test_bdm = self.driver_classes['blank'](
self.blank_bdm)
self.assertEqual(6, test_bdm._bdm_obj.id)
self.assertEqual('fake-volume-id-2', test_bdm.volume_id)
self.assertEqual(3, test_bdm.volume_size)
def _test_call_wait_func(self, delete_on_termination, delete_fail=False):
test_bdm = self.driver_classes['volume'](self.volume_bdm)
test_bdm['delete_on_termination'] = delete_on_termination
with mock.patch.object(self.volume_api, 'delete') as vol_delete:
wait_func = mock.MagicMock()
mock_exception = exception.VolumeNotCreated(volume_id='fake-id',
seconds=1,
attempts=1,
volume_status='error')
wait_func.side_effect = mock_exception
if delete_on_termination and delete_fail:
vol_delete.side_effect = Exception()
self.assertRaises(exception.VolumeNotCreated,
test_bdm._call_wait_func,
context=self.context,
wait_func=wait_func,
volume_api=self.volume_api,
volume_id='fake-id')
self.assertEqual(delete_on_termination, vol_delete.called)
def test_call_wait_delete_volume(self):
self._test_call_wait_func(True)
def test_call_wait_delete_volume_fail(self):
self._test_call_wait_func(True, True)
def test_call_wait_no_delete_volume(self):
self._test_call_wait_func(False)
def _test_volume_attach(self, driver_bdm, bdm_dict,
fake_volume, check_attach=True,
fail_check_attach=False, driver_attach=False,
fail_driver_attach=False, volume_attach=True,
fail_volume_attach=False, access_mode='rw',
availability_zone=None):
elevated_context = self.context.elevated()
self.stubs.Set(self.context, 'elevated',
lambda: elevated_context)
self.mox.StubOutWithMock(driver_bdm._bdm_obj, 'save')
self.mox.StubOutWithMock(encryptors, 'get_encryption_metadata')
instance_detail = {'id': '123', 'uuid': 'fake_uuid',
'availability_zone': availability_zone}
instance = fake_instance.fake_instance_obj(self.context,
**instance_detail)
connector = {'ip': 'fake_ip', 'host': 'fake_host'}
connection_info = {'data': {'access_mode': access_mode}}
expected_conn_info = {'data': {'access_mode': access_mode},
'serial': fake_volume['id']}
enc_data = {'fake': 'enc_data'}
self.volume_api.get(self.context,
fake_volume['id']).AndReturn(fake_volume)
if check_attach:
if not fail_check_attach:
self.volume_api.check_attach(self.context, fake_volume,
instance=instance).AndReturn(None)
else:
self.volume_api.check_attach(self.context, fake_volume,
instance=instance).AndRaise(
test.TestingException)
driver_bdm._bdm_obj.save().AndReturn(None)
return instance, expected_conn_info
self.virt_driver.get_volume_connector(instance).AndReturn(connector)
self.volume_api.initialize_connection(
elevated_context, fake_volume['id'],
connector).AndReturn(connection_info)
if driver_attach:
encryptors.get_encryption_metadata(
elevated_context, self.volume_api, fake_volume['id'],
connection_info).AndReturn(enc_data)
if not fail_driver_attach:
self.virt_driver.attach_volume(
elevated_context, expected_conn_info, instance,
bdm_dict['device_name'],
disk_bus=bdm_dict['disk_bus'],
device_type=bdm_dict['device_type'],
encryption=enc_data).AndReturn(None)
else:
self.virt_driver.attach_volume(
elevated_context, expected_conn_info, instance,
bdm_dict['device_name'],
disk_bus=bdm_dict['disk_bus'],
device_type=bdm_dict['device_type'],
encryption=enc_data).AndRaise(test.TestingException)
self.volume_api.terminate_connection(
elevated_context, fake_volume['id'],
connector).AndReturn(None)
driver_bdm._bdm_obj.save().AndReturn(None)
return instance, expected_conn_info
if volume_attach:
driver_bdm._bdm_obj.save().AndReturn(None)
if not fail_volume_attach:
self.volume_api.attach(elevated_context, fake_volume['id'],
'fake_uuid', bdm_dict['device_name'],
mode=access_mode).AndReturn(None)
else:
self.volume_api.attach(elevated_context, fake_volume['id'],
'fake_uuid', bdm_dict['device_name'],
mode=access_mode).AndRaise(
test.TestingException)
if driver_attach:
self.virt_driver.detach_volume(
expected_conn_info, instance,
bdm_dict['device_name'],
encryption=enc_data).AndReturn(None)
self.volume_api.terminate_connection(
elevated_context, fake_volume['id'],
connector).AndReturn(None)
self.volume_api.detach(elevated_context,
fake_volume['id']).AndReturn(None)
driver_bdm._bdm_obj.save().AndReturn(None)
return instance, expected_conn_info
def test_volume_attach(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, expected_conn_info = self._test_volume_attach(
test_bdm, self.volume_bdm, volume)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance,
self.volume_api, self.virt_driver)
self.assertThat(test_bdm['connection_info'],
matchers.DictMatches(expected_conn_info))
def test_volume_attach_ro(self):
test_bdm = self.driver_classes['volume'](self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, expected_conn_info = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, access_mode='ro')
self.mox.ReplayAll()
test_bdm.attach(self.context, instance,
self.volume_api, self.virt_driver)
self.assertThat(test_bdm['connection_info'],
matchers.DictMatches(expected_conn_info))
def test_volume_attach_update_size(self):
test_bdm = self.driver_classes['volume'](self.volume_bdm)
test_bdm.volume_size = None
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached',
'size': 42}
instance, expected_conn_info = self._test_volume_attach(
test_bdm, self.volume_bdm, volume)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance,
self.volume_api, self.virt_driver)
self.assertEqual(expected_conn_info, test_bdm['connection_info'])
self.assertEqual(42, test_bdm.volume_size)
def test_volume_attach_check_attach_fails(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1'}
instance, _ = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, fail_check_attach=True)
self.mox.ReplayAll()
self.assertRaises(test.TestingException, test_bdm.attach, self.context,
instance, self.volume_api, self.virt_driver)
def test_volume_no_volume_attach(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, expected_conn_info = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, check_attach=False,
driver_attach=False)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance,
self.volume_api, self.virt_driver,
do_check_attach=False, do_driver_attach=False)
self.assertThat(test_bdm['connection_info'],
matchers.DictMatches(expected_conn_info))
def test_volume_attach_no_check_driver_attach(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, expected_conn_info = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, check_attach=False,
driver_attach=True)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance,
self.volume_api, self.virt_driver,
do_check_attach=False, do_driver_attach=True)
self.assertThat(test_bdm['connection_info'],
matchers.DictMatches(expected_conn_info))
def test_volume_attach_driver_attach_fails(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1'}
instance, _ = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, driver_attach=True,
fail_driver_attach=True)
self.mox.ReplayAll()
self.assertRaises(test.TestingException, test_bdm.attach, self.context,
instance, self.volume_api, self.virt_driver,
do_driver_attach=True)
def test_volume_attach_volume_attach_fails(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, _ = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, driver_attach=True,
fail_volume_attach=True)
self.mox.ReplayAll()
self.assertRaises(test.TestingException, test_bdm.attach, self.context,
instance, self.volume_api, self.virt_driver,
do_driver_attach=True)
def test_volume_attach_no_driver_attach_volume_attach_fails(self):
test_bdm = self.driver_classes['volume'](
self.volume_bdm)
volume = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
instance, _ = self._test_volume_attach(
test_bdm, self.volume_bdm, volume, fail_volume_attach=True)
self.mox.ReplayAll()
self.assertRaises(test.TestingException, test_bdm.attach, self.context,
instance, self.volume_api, self.virt_driver,
do_driver_attach=False)
def test_refresh_connection(self):
test_bdm = self.driver_classes['snapshot'](
self.snapshot_bdm)
instance = {'id': 'fake_id', 'uuid': 'fake_uuid'}
connector = {'ip': 'fake_ip', 'host': 'fake_host'}
connection_info = {'data': {'multipath_id': 'fake_multipath_id'}}
expected_conn_info = {'data': {'multipath_id': 'fake_multipath_id'},
'serial': 'fake-volume-id-2'}
self.mox.StubOutWithMock(test_bdm._bdm_obj, 'save')
self.virt_driver.get_volume_connector(instance).AndReturn(connector)
self.volume_api.initialize_connection(
self.context, test_bdm.volume_id,
connector).AndReturn(connection_info)
test_bdm._bdm_obj.save().AndReturn(None)
self.mox.ReplayAll()
test_bdm.refresh_connection_info(self.context, instance,
self.volume_api, self.virt_driver)
self.assertThat(test_bdm['connection_info'],
matchers.DictMatches(expected_conn_info))
def test_snapshot_attach_no_volume(self):
no_volume_snapshot = self.snapshot_bdm_dict.copy()
no_volume_snapshot['volume_id'] = None
test_bdm = self.driver_classes['snapshot'](
fake_block_device.fake_bdm_object(
self.context, no_volume_snapshot))
snapshot = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
wait_func = self.mox.CreateMockAnything()
self.volume_api.get_snapshot(self.context,
'fake-snapshot-id-1').AndReturn(snapshot)
self.volume_api.create(self.context, 3, '', '', snapshot,
availability_zone=None).AndReturn(volume)
wait_func(self.context, 'fake-volume-id-2').AndReturn(None)
instance, expected_conn_info = self._test_volume_attach(
test_bdm, no_volume_snapshot, volume)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver, wait_func)
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2')
def test_snapshot_attach_no_volume_cinder_cross_az_attach_false(self):
# Tests that the volume created from the snapshot has the same AZ as
# the instance.
self.flags(cross_az_attach=False, group='cinder')
no_volume_snapshot = self.snapshot_bdm_dict.copy()
no_volume_snapshot['volume_id'] = None
test_bdm = self.driver_classes['snapshot'](
fake_block_device.fake_bdm_object(
self.context, no_volume_snapshot))
snapshot = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
wait_func = self.mox.CreateMockAnything()
self.volume_api.get_snapshot(self.context,
'fake-snapshot-id-1').AndReturn(snapshot)
self.volume_api.create(self.context, 3, '', '', snapshot,
availability_zone='test-az').AndReturn(volume)
wait_func(self.context, 'fake-volume-id-2').AndReturn(None)
instance, expected_conn_info = self._test_volume_attach(
test_bdm, no_volume_snapshot, volume,
availability_zone='test-az')
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver, wait_func)
self.assertEqual('fake-volume-id-2', test_bdm.volume_id)
def test_snapshot_attach_fail_volume(self):
fail_volume_snapshot = self.snapshot_bdm_dict.copy()
fail_volume_snapshot['volume_id'] = None
test_bdm = self.driver_classes['snapshot'](
fake_block_device.fake_bdm_object(
self.context, fail_volume_snapshot))
snapshot = {'id': 'fake-volume-id-1',
'attach_status': 'detached'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx,
**{'uuid': 'fake-uuid'})
with test.nested(
mock.patch.object(self.volume_api, 'get_snapshot',
return_value=snapshot),
mock.patch.object(self.volume_api, 'create', return_value=volume),
mock.patch.object(self.volume_api, 'delete'),
) as (vol_get_snap, vol_create, vol_delete):
wait_func = mock.MagicMock()
mock_exception = exception.VolumeNotCreated(volume_id=volume['id'],
seconds=1,
attempts=1,
volume_status='error')
wait_func.side_effect = mock_exception
self.assertRaises(exception.VolumeNotCreated,
test_bdm.attach, context=self.context,
instance=instance,
volume_api=self.volume_api,
virt_driver=self.virt_driver,
wait_func=wait_func)
vol_get_snap.assert_called_once_with(
self.context, 'fake-snapshot-id-1')
vol_create.assert_called_once_with(
self.context, 3, '', '', snapshot, availability_zone=None)
vol_delete.assert_called_once_with(self.context, volume['id'])
def test_snapshot_attach_volume(self):
test_bdm = self.driver_classes['snapshot'](
self.snapshot_bdm)
instance = {'id': 'fake_id', 'uuid': 'fake_uuid'}
volume_class = self.driver_classes['volume']
self.mox.StubOutWithMock(volume_class, 'attach')
# Make sure theses are not called
self.mox.StubOutWithMock(self.volume_api, 'get_snapshot')
self.mox.StubOutWithMock(self.volume_api, 'create')
volume_class.attach(self.context, instance, self.volume_api,
self.virt_driver, do_check_attach=True
).AndReturn(None)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver)
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2')
def test_image_attach_no_volume(self):
no_volume_image = self.image_bdm_dict.copy()
no_volume_image['volume_id'] = None
test_bdm = self.driver_classes['image'](
fake_block_device.fake_bdm_object(
self.context, no_volume_image))
image = {'id': 'fake-image-id-1'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
wait_func = self.mox.CreateMockAnything()
self.volume_api.create(self.context, 1, '', '', image_id=image['id'],
availability_zone=None).AndReturn(volume)
wait_func(self.context, 'fake-volume-id-2').AndReturn(None)
instance, expected_conn_info = self._test_volume_attach(
test_bdm, no_volume_image, volume)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver, wait_func)
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2')
def test_image_attach_no_volume_cinder_cross_az_attach_false(self):
# Tests that the volume created from the image has the same AZ as the
# instance.
self.flags(cross_az_attach=False, group='cinder')
no_volume_image = self.image_bdm_dict.copy()
no_volume_image['volume_id'] = None
test_bdm = self.driver_classes['image'](
fake_block_device.fake_bdm_object(
self.context, no_volume_image))
image = {'id': 'fake-image-id-1'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
wait_func = self.mox.CreateMockAnything()
self.volume_api.create(self.context, 1, '', '', image_id=image['id'],
availability_zone='test-az').AndReturn(volume)
wait_func(self.context, 'fake-volume-id-2').AndReturn(None)
instance, expected_conn_info = self._test_volume_attach(
test_bdm, no_volume_image, volume,
availability_zone='test-az')
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver, wait_func)
self.assertEqual('fake-volume-id-2', test_bdm.volume_id)
def test_image_attach_fail_volume(self):
fail_volume_image = self.image_bdm_dict.copy()
fail_volume_image['volume_id'] = None
test_bdm = self.driver_classes['image'](
fake_block_device.fake_bdm_object(
self.context, fail_volume_image))
image = {'id': 'fake-image-id-1'}
volume = {'id': 'fake-volume-id-2',
'attach_status': 'detached'}
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx,
**{'uuid': 'fake-uuid'})
with test.nested(
mock.patch.object(self.volume_api, 'create', return_value=volume),
mock.patch.object(self.volume_api, 'delete'),
) as (vol_create, vol_delete):
wait_func = mock.MagicMock()
mock_exception = exception.VolumeNotCreated(volume_id=volume['id'],
seconds=1,
attempts=1,
volume_status='error')
wait_func.side_effect = mock_exception
self.assertRaises(exception.VolumeNotCreated,
test_bdm.attach, context=self.context,
instance=instance,
volume_api=self.volume_api,
virt_driver=self.virt_driver,
wait_func=wait_func)
vol_create.assert_called_once_with(
self.context, 1, '', '', image_id=image['id'],
availability_zone=None)
vol_delete.assert_called_once_with(self.context, volume['id'])
def test_image_attach_volume(self):
test_bdm = self.driver_classes['image'](
self.image_bdm)
instance = {'id': 'fake_id', 'uuid': 'fake_uuid'}
volume_class = self.driver_classes['volume']
self.mox.StubOutWithMock(volume_class, 'attach')
# Make sure theses are not called
self.mox.StubOutWithMock(self.volume_api, 'get_snapshot')
self.mox.StubOutWithMock(self.volume_api, 'create')
volume_class.attach(self.context, instance, self.volume_api,
self.virt_driver, do_check_attach=True
).AndReturn(None)
self.mox.ReplayAll()
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver)
self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2')
def test_blank_attach_fail_volume(self):
no_blank_volume = self.blank_bdm_dict.copy()
no_blank_volume['volume_id'] = None
test_bdm = self.driver_classes['blank'](
fake_block_device.fake_bdm_object(
self.context, no_blank_volume))
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx,
**{'uuid': 'fake-uuid'})
volume = {'id': 'fake-volume-id-2',
'display_name': 'fake-uuid-blank-vol'}
with test.nested(
mock.patch.object(self.volume_api, 'create', return_value=volume),
mock.patch.object(self.volume_api, 'delete'),
) as (vol_create, vol_delete):
wait_func = mock.MagicMock()
mock_exception = exception.VolumeNotCreated(volume_id=volume['id'],
seconds=1,
attempts=1,
volume_status='error')
wait_func.side_effect = mock_exception
self.assertRaises(exception.VolumeNotCreated,
test_bdm.attach, context=self.context,
instance=instance,
volume_api=self.volume_api,
virt_driver=self.virt_driver,
wait_func=wait_func)
vol_create.assert_called_once_with(
self.context, test_bdm.volume_size, 'fake-uuid-blank-vol',
'', availability_zone=None)
vol_delete.assert_called_once_with(
self.context, volume['id'])
def test_blank_attach_volume(self):
no_blank_volume = self.blank_bdm_dict.copy()
no_blank_volume['volume_id'] = None
test_bdm = self.driver_classes['blank'](
fake_block_device.fake_bdm_object(
self.context, no_blank_volume))
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx,
**{'uuid': 'fake-uuid'})
volume_class = self.driver_classes['volume']
volume = {'id': 'fake-volume-id-2',
'display_name': 'fake-uuid-blank-vol'}
with test.nested(
mock.patch.object(self.volume_api, 'create', return_value=volume),
mock.patch.object(volume_class, 'attach')
) as (vol_create, vol_attach):
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver)
vol_create.assert_called_once_with(
self.context, test_bdm.volume_size, 'fake-uuid-blank-vol',
'', availability_zone=None)
vol_attach.assert_called_once_with(self.context, instance,
self.volume_api,
self.virt_driver,
do_check_attach=True)
self.assertEqual('fake-volume-id-2', test_bdm.volume_id)
def test_blank_attach_volume_cinder_cross_az_attach_false(self):
# Tests that the blank volume created is in the same availability zone
# as the instance.
self.flags(cross_az_attach=False, group='cinder')
no_blank_volume = self.blank_bdm_dict.copy()
no_blank_volume['volume_id'] = None
test_bdm = self.driver_classes['blank'](
fake_block_device.fake_bdm_object(
self.context, no_blank_volume))
updates = {'uuid': 'fake-uuid', 'availability_zone': 'test-az'}
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx,
**updates)
volume_class = self.driver_classes['volume']
volume = {'id': 'fake-volume-id-2',
'display_name': 'fake-uuid-blank-vol'}
with mock.patch.object(self.volume_api, 'create',
return_value=volume) as vol_create:
with mock.patch.object(volume_class, 'attach') as vol_attach:
test_bdm.attach(self.context, instance, self.volume_api,
self.virt_driver)
vol_create.assert_called_once_with(
self.context, test_bdm.volume_size, 'fake-uuid-blank-vol',
'', availability_zone='test-az')
vol_attach.assert_called_once_with(self.context, instance,
self.volume_api,
self.virt_driver,
do_check_attach=True)
self.assertEqual('fake-volume-id-2', test_bdm.volume_id)
def test_convert_block_devices(self):
bdms = objects.BlockDeviceMappingList(
objects=[self.volume_bdm, self.ephemeral_bdm])
converted = driver_block_device._convert_block_devices(
self.driver_classes['volume'], bdms)
self.assertEqual(converted, [self.volume_driver_bdm])
def test_convert_all_volumes(self):
converted = driver_block_device.convert_all_volumes()
self.assertEqual([], converted)
converted = driver_block_device.convert_all_volumes(
self.volume_bdm, self.ephemeral_bdm, self.image_bdm,
self.blank_bdm, self.snapshot_bdm)
self.assertEqual(converted, [self.volume_driver_bdm,
self.image_driver_bdm,
self.blank_driver_bdm,
self.snapshot_driver_bdm])
def test_convert_volume(self):
self.assertIsNone(driver_block_device.convert_volume(self.swap_bdm))
self.assertEqual(self.volume_driver_bdm,
driver_block_device.convert_volume(self.volume_bdm))
self.assertEqual(self.snapshot_driver_bdm,
driver_block_device.convert_volume(self.snapshot_bdm))
def test_legacy_block_devices(self):
test_snapshot = self.driver_classes['snapshot'](
self.snapshot_bdm)
block_device_mapping = [test_snapshot, test_snapshot]
legacy_bdm = driver_block_device.legacy_block_devices(
block_device_mapping)
self.assertEqual(legacy_bdm, [self.snapshot_legacy_driver_bdm,
self.snapshot_legacy_driver_bdm])
# Test that the ephemerals work as expected
test_ephemerals = [self.driver_classes['ephemeral'](
self.ephemeral_bdm) for _ in range(2)]
expected = [self.ephemeral_legacy_driver_bdm.copy()
for _ in range(2)]
expected[0]['virtual_name'] = 'ephemeral0'
expected[0]['num'] = 0
expected[1]['virtual_name'] = 'ephemeral1'
expected[1]['num'] = 1
legacy_ephemerals = driver_block_device.legacy_block_devices(
test_ephemerals)
self.assertEqual(expected, legacy_ephemerals)
def test_get_swap(self):
swap = [self.swap_driver_bdm]
legacy_swap = [self.swap_legacy_driver_bdm]
no_swap = [self.volume_driver_bdm]
self.assertEqual(swap[0], driver_block_device.get_swap(swap))
self.assertEqual(legacy_swap[0],
driver_block_device.get_swap(legacy_swap))
self.assertIsNone(driver_block_device.get_swap(no_swap))
self.assertIsNone(driver_block_device.get_swap([]))
def test_is_implemented(self):
for bdm in (self.image_bdm, self.volume_bdm, self.swap_bdm,
self.ephemeral_bdm, self.snapshot_bdm):
self.assertTrue(driver_block_device.is_implemented(bdm))
local_image = self.image_bdm_dict.copy()
local_image['destination_type'] = 'local'
self.assertFalse(driver_block_device.is_implemented(
fake_block_device.fake_bdm_object(self.context, local_image)))
def test_is_block_device_mapping(self):
test_swap = self.driver_classes['swap'](self.swap_bdm)
test_ephemeral = self.driver_classes['ephemeral'](self.ephemeral_bdm)
test_image = self.driver_classes['image'](self.image_bdm)
test_snapshot = self.driver_classes['snapshot'](self.snapshot_bdm)
test_volume = self.driver_classes['volume'](self.volume_bdm)
test_blank = self.driver_classes['blank'](self.blank_bdm)
for bdm in (test_image, test_snapshot, test_volume, test_blank):
self.assertTrue(driver_block_device.is_block_device_mapping(
bdm._bdm_obj))
for bdm in (test_swap, test_ephemeral):
self.assertFalse(driver_block_device.is_block_device_mapping(
bdm._bdm_obj))
def test_get_volume_create_az_cinder_cross_az_attach_true(self):
# Tests that we get None back if cinder.cross_az_attach=True even if
# the instance has an AZ assigned. Note that since cross_az_attach
# defaults to True we don't need to set a flag explicitly for the test.
updates = {'availability_zone': 'test-az'}
instance = fake_instance.fake_instance_obj(self.context, **updates)
self.assertIsNone(
driver_block_device._get_volume_create_az_value(instance))
| HybridF5/nova | nova/tests/unit/virt/test_block_device.py | Python | apache-2.0 | 45,741 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.examples;
//import org.apache.ignite.examples.misc.client.memcache.*;
/**
* MemcacheRestExample multi-node self test.
*/
public class MemcacheRestExamplesMultiNodeSelfTest extends MemcacheRestExamplesSelfTest {
// TODO: IGNITE-711 next example(s) should be implemented for java 8
// or testing method(s) should be removed if example(s) does not applicable for java 8.
/** {@inheritDoc} */
// @Override protected void beforeTest() throws Exception {
// for (int i = 0; i < RMT_NODES_CNT; i++)
// startGrid("memcache-rest-examples-" + i, MemcacheRestExampleNodeStartup.configuration());
// }
} | irudyak/ignite | examples/src/test/java/org/apache/ignite/examples/MemcacheRestExamplesMultiNodeSelfTest.java | Java | apache-2.0 | 1,462 |
// RUN: %clang_cc1 -fsyntax-only -verify %s
// Errors
export class foo { }; // expected-error {{expected template}}
template x; // expected-error {{C++ requires a type specifier for all declarations}} \
// expected-error {{does not refer}}
export template x; // expected-error {{expected '<' after 'template'}}
export template<class T> class x0; // expected-warning {{exported templates are unsupported}}
template < ; // expected-error {{expected template parameter}} \
// expected-error{{expected ',' or '>' in template-parameter-list}} \
// expected-warning {{declaration does not declare anything}}
template <int +> struct x1; // expected-error {{expected ',' or '>' in template-parameter-list}}
// verifies that we only walk to the ',' & still produce errors on the rest of the template parameters
template <int +, T> struct x2; // expected-error {{expected ',' or '>' in template-parameter-list}} \
expected-error {{expected unqualified-id}}
template<template<int+>> struct x3; // expected-error {{expected ',' or '>' in template-parameter-list}} \
expected-error {{template template parameter requires 'class' after the parameter list}}
template <template X> struct Err1; // expected-error {{expected '<' after 'template'}} \
// expected-error{{extraneous}}
template <template <typename> > struct Err2; // expected-error {{template template parameter requires 'class' after the parameter list}}
template <template <typename> Foo> struct Err3; // expected-error {{template template parameter requires 'class' after the parameter list}}
// Template function declarations
template <typename T> void foo();
template <typename T, typename U> void foo();
// Template function definitions.
template <typename T> void foo() { }
// Template class (forward) declarations
template <typename T> struct A;
template <typename T, typename U> struct b;
template <typename> struct C;
template <typename, typename> struct D;
// Forward declarations with default parameters?
template <typename T = int> class X1;
template <typename = int> class X2;
// Forward declarations w/template template parameters
template <template <typename> class T> class TTP1;
template <template <typename> class> class TTP2;
template <template <typename> class T = foo> class TTP3; // expected-error{{must be a class template}}
template <template <typename> class = foo> class TTP3; // expected-error{{must be a class template}}
template <template <typename X, typename Y> class T> class TTP5;
// Forward declarations with non-type params
template <int> class NTP0;
template <int N> class NTP1;
template <int N = 5> class NTP2;
template <int = 10> class NTP3;
template <unsigned int N = 12u> class NTP4;
template <unsigned int = 12u> class NTP5;
template <unsigned = 15u> class NTP6;
template <typename T, T Obj> class NTP7;
// Template class declarations
template <typename T> struct A { };
template <typename T, typename U> struct B { };
// Template parameter shadowing
template<typename T, // expected-note{{template parameter is declared here}}
typename T> // expected-error{{declaration of 'T' shadows template parameter}}
void shadow1();
template<typename T> // expected-note{{template parameter is declared here}}
void shadow2(int T); // expected-error{{declaration of 'T' shadows template parameter}}
template<typename T> // expected-note{{template parameter is declared here}}
class T { // expected-error{{declaration of 'T' shadows template parameter}}
};
template<int Size> // expected-note{{template parameter is declared here}}
void shadow3(int Size); // expected-error{{declaration of 'Size' shadows template parameter}}
// <rdar://problem/6952203>
template<typename T> // expected-note{{here}}
struct shadow4 {
int T; // expected-error{{shadows}}
};
template<typename T> // expected-note{{here}}
struct shadow5 {
int T(int, float); // expected-error{{shadows}}
};
// Non-type template parameters in scope
template<int Size>
void f(int& i) {
i = Size;
Size = i; // expected-error{{expression is not assignable}}
}
template<typename T>
const T& min(const T&, const T&);
void f2() {
int x;
A< typeof(x>1) > a;
}
// PR3844
template <> struct S<int> { }; // expected-error{{explicit specialization of non-template struct 'S'}}
namespace PR6184 {
namespace N {
template <typename T>
void bar(typename T::x);
}
template <typename T>
void N::bar(typename T::x) { }
}
| jeltz/rust-debian-package | src/llvm/tools/clang/test/Parser/cxx-template-decl.cpp | C++ | apache-2.0 | 4,541 |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.jsprit.io.algorithm;
import org.apache.commons.configuration.XMLConfiguration;
public class AlgorithmConfig {
private XMLConfiguration xmlConfig;
public AlgorithmConfig() {
xmlConfig = new XMLConfiguration();
}
public XMLConfiguration getXMLConfiguration() {
return xmlConfig;
}
}
| terryturner/VRPinGMapFx | jsprit-master/jsprit-io/src/main/java/com/graphhopper/jsprit/io/algorithm/AlgorithmConfig.java | Java | apache-2.0 | 1,141 |
# Copyright (c) 2014 VMware, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from oslo_vmware import vim_util
from nova import exception
from nova import test
from nova.tests.unit.virt.vmwareapi import fake
from nova.tests.unit.virt.vmwareapi import stubs
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import network_util
from nova.virt.vmwareapi import vm_util
ResultSet = collections.namedtuple('ResultSet', ['objects'])
ObjectContent = collections.namedtuple('ObjectContent', ['obj', 'propSet'])
DynamicProperty = collections.namedtuple('DynamicProperty', ['name', 'val'])
class GetNetworkWithTheNameTestCase(test.NoDBTestCase):
def setUp(self):
super(GetNetworkWithTheNameTestCase, self).setUp()
fake.reset()
self.stub_out('nova.virt.vmwareapi.driver.VMwareAPISession.vim',
stubs.fake_vim_prop)
self.stub_out('nova.virt.vmwareapi.driver.'
'VMwareAPISession.is_vim_object',
stubs.fake_is_vim_object)
self._session = driver.VMwareAPISession()
def _build_cluster_networks(self, networks):
"""Returns a set of results for a cluster network lookup.
This is an example:
(ObjectContent){
obj =
(obj){
value = "domain-c7"
_type = "ClusterComputeResource"
}
propSet[] =
(DynamicProperty){
name = "network"
val =
(ArrayOfManagedObjectReference){
ManagedObjectReference[] =
(ManagedObjectReference){
value = "network-54"
_type = "Network"
},
(ManagedObjectReference){
value = "dvportgroup-14"
_type = "DistributedVirtualPortgroup"
},
}
},
}]
"""
objects = []
obj = ObjectContent(obj=vim_util.get_moref("domain-c7",
"ClusterComputeResource"),
propSet=[])
value = fake.DataObject()
value.ManagedObjectReference = []
for network in networks:
value.ManagedObjectReference.append(network)
obj.propSet.append(
DynamicProperty(name='network',
val=value))
objects.append(obj)
return ResultSet(objects=objects)
def test_get_network_no_match(self):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup"),
vim_util.get_moref("dvportgroup-136",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
self._continue_retrieval_called = False
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
result = fake.DataObject()
result.name = 'no-match'
return result
if method == 'continue_retrieval':
self._continue_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertTrue(self._continue_retrieval_called)
self.assertIsNone(res)
def _get_network_dvs_match(self, name, token=False):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
result = fake.DataObject()
if not token or self._continue_retrieval_called:
result.name = name
else:
result.name = 'fake_name'
result.key = 'fake_key'
result.distributedVirtualSwitch = 'fake_dvs'
return result
if method == 'continue_retrieval':
if token:
self._continue_retrieval_called = True
return networks
if method == 'cancel_retrieval':
self._cancel_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
def test_get_network_dvs_exact_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match_with_token(self):
self._continue_retrieval_called = False
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net',
token=True)
self.assertTrue(self._continue_retrieval_called)
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_network_match(self):
net_morefs = [vim_util.get_moref("network-54", "Network")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
return 'fake_net'
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
class GetVlanIdAndVswitchForPortgroupTestCase(test.NoDBTestCase):
@mock.patch.object(vm_util, 'get_host_ref')
def test_no_port_groups(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = None
self.assertRaises(
exception.NovaException,
network_util.get_vlanid_and_vswitch_for_portgroup,
session,
'port_group_name',
'fake_cluster'
)
@mock.patch.object(vm_util, 'get_host_ref')
def test_valid_port_group(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = self._fake_port_groups()
vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup(
session,
'port_group_name',
'fake_cluster'
)
self.assertEqual(vlanid, 100)
self.assertEqual(vswitch, 'vswitch_name')
@mock.patch.object(vm_util, 'get_host_ref')
def test_unknown_port_group(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = self._fake_port_groups()
vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup(
session,
'unknown_port_group',
'fake_cluster'
)
self.assertIsNone(vlanid)
self.assertIsNone(vswitch)
def _fake_port_groups(self):
port_group_spec = fake.DataObject()
port_group_spec.name = 'port_group_name'
port_group_spec.vlanId = 100
port_group = fake.DataObject()
port_group.vswitch = 'vswitch_name'
port_group.spec = port_group_spec
response = fake.DataObject()
response.HostPortGroup = [port_group]
return response
| zhimin711/nova | nova/tests/unit/virt/vmwareapi/test_network_util.py | Python | apache-2.0 | 9,191 |
require 'fog/libvirt'
require 'fog/compute'
require 'fog/libvirt/models/compute/util/util'
require 'fog/libvirt/models/compute/util/uri'
module Fog
module Compute
class Libvirt < Fog::Service
requires :libvirt_uri
recognizes :libvirt_username, :libvirt_password
recognizes :libvirt_ip_command
model_path 'fog/libvirt/models/compute'
model :server
collection :servers
model :network
collection :networks
model :interface
collection :interfaces
model :volume
collection :volumes
model :pool
collection :pools
model :node
collection :nodes
model :nic
collection :nics
request_path 'fog/libvirt/requests/compute'
request :list_domains
request :create_domain
request :define_domain
request :vm_action
request :list_pools
request :list_pool_volumes
request :define_pool
request :pool_action
request :list_volumes
request :volume_action
request :create_volume
request :list_networks
request :destroy_network
request :list_interfaces
request :destroy_interface
request :get_node_info
request :update_display
module Shared
include Fog::Compute::LibvirtUtil
end
class Mock
include Shared
def initialize(options={})
# libvirt is part of the gem => ruby-libvirt
require 'libvirt'
end
private
def client
return @client if defined?(@client)
end
#read mocks xml
def read_xml(file_name)
file_path = File.join(File.dirname(__FILE__),"requests","compute","mock_files",file_name)
File.read(file_path)
end
end
class Real
include Shared
attr_reader :client
attr_reader :uri
attr_reader :ip_command
def initialize(options={})
@uri = ::Fog::Compute::LibvirtUtil::URI.new(enhance_uri(options[:libvirt_uri]))
@ip_command = options[:libvirt_ip_command]
# libvirt is part of the gem => ruby-libvirt
begin
require 'libvirt'
rescue LoadError => e
retry if require('rubygems')
raise e.message
end
begin
if options[:libvirt_username] and options[:libvirt_password]
@client = ::Libvirt::open_auth(uri.uri, [::Libvirt::CRED_AUTHNAME, ::Libvirt::CRED_PASSPHRASE]) do |cred|
case cred['type']
when ::Libvirt::CRED_AUTHNAME
options[:libvirt_username]
when ::Libvirt::CRED_PASSPHRASE
options[:libvirt_password]
end
end
else
@client = ::Libvirt::open(uri.uri)
end
rescue ::Libvirt::ConnectionError
raise Fog::Errors::Error.new("Error making a connection to libvirt URI #{uri.uri}:\n#{$!}")
end
end
def terminate
@client.close if @client and !@client.closed?
end
def enhance_uri(uri)
require 'cgi'
append=""
# on macosx, chances are we are using libvirt through homebrew
# the client will default to a socket location based on it's own location (/opt)
# we conveniently point it to /var/run/libvirt/libvirt-sock
# if no socket option has been specified explicitly
if RUBY_PLATFORM =~ /darwin/
querystring=::URI.parse(uri).query
if querystring.nil?
append="?socket=/var/run/libvirt/libvirt-sock"
else
if !::CGI.parse(querystring).has_key?("socket")
append="&socket=/var/run/libvirt/libvirt-sock"
end
end
end
uri+append
end
end
end
end
end
| luna1x/chef-server | vendor/ruby/1.9.1/gems/fog-1.15.0/lib/fog/libvirt/compute.rb | Ruby | apache-2.0 | 3,975 |
#
# Author:: AJ Christensen (<aj@opscode.com>)
# Cookbook Name:: database
# Recipe:: snapshot
#
# Copyright 2009-2010, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "aws"
include_recipe "xfs"
%w{ebs_vol_dev db_role app_environment username password aws_access_key_id aws_secret_access_key snapshots_to_keep volume_id}.collect do |key|
Chef::Application.fatal!("Required db_snapshot configuration #{key} not found.", -47) unless node.db_snapshot.has_key? key
end
connection_info = {:host => localhost, :username => node.db_snapshot.username, :password => node.db_snapshot.password}
mysql_database "locking tables for #{node.db_snapshot.app_environment}" do
connection connection_info
sql "flush tables with read lock"
action :query
end
execute "xfs freeze" do
command "xfs_freeze -f #{node.db_snapshot.ebs_vol_dev}"
end
aws_ebs_volume "#{node.db_snapshot.db_role.first}_#{node.db_snapshot.app_environment}" do
aws_access_key node.db_snapshot.aws_access_key_id
aws_secret_access_key node.db_snapshot.aws_secret_access_key
size 50
device node.db_snapshot.ebs_vol_dev
snapshots_to_keep node.db_snapshot.snapshots_to_keep
action :snapshot
volume_id node.db_snapshot.volume_id
ignore_failure true # if this fails, continue to unfreeze and unlock
end
execute "xfs unfreeze" do
command "xfs_freeze -u #{node.db_snapshot.ebs_vol_dev}"
end
mysql_database "unflushing tables for #{node.db_snapshot.app_environment}" do
connection connection_info
sql "unlock tables"
action :query
end
aws_ebs_volume "#{node.db_snapshot.db_role.first}_#{node.db_snapshot.app_environment}" do
action :prune
end
| dagolden/opscode-cookbooks | database/recipes/snapshot.rb | Ruby | apache-2.0 | 2,156 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.stableinput;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.apache.flink.api.common.state.ListState;
/** A non-keyed implementation of a {@link BufferingElementsHandler}. */
public class NonKeyedBufferingElementsHandler<T> implements BufferingElementsHandler {
static <T> NonKeyedBufferingElementsHandler<T> create(ListState<BufferedElement> elementState) {
return new NonKeyedBufferingElementsHandler<>(elementState);
}
private final ListState<BufferedElement> elementState;
private NonKeyedBufferingElementsHandler(ListState<BufferedElement> elementState) {
this.elementState = checkNotNull(elementState);
}
@Override
public Stream<BufferedElement> getElements() {
try {
return StreamSupport.stream(elementState.get().spliterator(), false);
} catch (Exception e) {
throw new RuntimeException("Failed to retrieve buffered element from state backend.", e);
}
}
@Override
public void buffer(BufferedElement element) {
try {
elementState.add(element);
} catch (Exception e) {
throw new RuntimeException("Failed to buffer element in state backend.", e);
}
}
@Override
public void clear() {
elementState.clear();
}
}
| lukecwik/incubator-beam | runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/NonKeyedBufferingElementsHandler.java | Java | apache-2.0 | 2,224 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.simpleeval;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Pattern;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'simple evaluation' job entry.
*
* @author Samatar Hassan
* @since 01-01-2009
*/
public class JobEntrySimpleEval extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySimpleEval.class; // for i18n purposes, needed by Translator2!!
public static final String[] valueTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.EvalPreviousField.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.EvalVariable.Label" ),
};
public static final String[] valueTypeCode = new String[] { "field", "variable" };
public static final int VALUE_TYPE_FIELD = 0;
public static final int VALUE_TYPE_VARIABLE = 1;
public int valuetype;
public static final String[] successConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenRegExp.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ) };
public static final String[] successConditionCode = new String[] {
"equal", "different", "contains", "notcontains", "startswith", "notstatwith", "endswith", "notendwith",
"regexp", "inlist", "notinlist" };
public static final int SUCCESS_CONDITION_EQUAL = 0;
public static final int SUCCESS_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_CONDITION_CONTAINS = 2;
public static final int SUCCESS_CONDITION_NOT_CONTAINS = 3;
public static final int SUCCESS_CONDITION_START_WITH = 4;
public static final int SUCCESS_CONDITION_NOT_START_WITH = 5;
public static final int SUCCESS_CONDITION_END_WITH = 6;
public static final int SUCCESS_CONDITION_NOT_END_WITH = 7;
public static final int SUCCESS_CONDITION_REGEX = 8;
public static final int SUCCESS_CONDITION_IN_LIST = 9;
public static final int SUCCESS_CONDITION_NOT_IN_LIST = 10;
public int successcondition;
public static final String[] fieldTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeString.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeNumber.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeDateTime.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeBoolean.Label" ),
};
public static final String[] fieldTypeCode = new String[] { "string", "number", "datetime", "boolean" };
public static final int FIELD_TYPE_STRING = 0;
public static final int FIELD_TYPE_NUMBER = 1;
public static final int FIELD_TYPE_DATE_TIME = 2;
public static final int FIELD_TYPE_BOOLEAN = 3;
public int fieldtype;
public static final String[] successNumberConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessBetween.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ), };
public static final String[] successNumberConditionCode = new String[] {
"equal", "different", "smaller", "smallequal", "greater", "greaterequal", "between", "inlist", "notinlist" };
public static final int SUCCESS_NUMBER_CONDITION_EQUAL = 0;
public static final int SUCCESS_NUMBER_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER = 2;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL = 3;
public static final int SUCCESS_NUMBER_CONDITION_GREATER = 4;
public static final int SUCCESS_NUMBER_CONDITION_GREATER_EQUAL = 5;
public static final int SUCCESS_NUMBER_CONDITION_BETWEEN = 6;
public static final int SUCCESS_NUMBER_CONDITION_IN_LIST = 7;
public static final int SUCCESS_NUMBER_CONDITION_NOT_IN_LIST = 8;
public int successnumbercondition;
public static final String[] successBooleanConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenTrue.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenFalse.Label" )
};
public static final String[] successBooleanConditionCode = new String[] { "true", "false" };
public static final int SUCCESS_BOOLEAN_CONDITION_TRUE = 0;
public static final int SUCCESS_BOOLEAN_CONDITION_FALSE = 1;
public int successbooleancondition;
private String fieldname;
private String variablename;
private String mask;
private String comparevalue;
private String minvalue;
private String maxvalue;
private boolean successwhenvarset;
public JobEntrySimpleEval( String n ) {
super( n, "" );
valuetype = VALUE_TYPE_FIELD;
successcondition = SUCCESS_CONDITION_EQUAL;
successnumbercondition = SUCCESS_NUMBER_CONDITION_EQUAL;
successbooleancondition = SUCCESS_BOOLEAN_CONDITION_FALSE;
minvalue = null;
maxvalue = null;
comparevalue = null;
fieldname = null;
variablename = null;
fieldtype = FIELD_TYPE_STRING;
mask = null;
successwhenvarset = false;
}
public JobEntrySimpleEval() {
this( "" );
}
@Override
public Object clone() {
JobEntrySimpleEval je = (JobEntrySimpleEval) super.clone();
return je;
}
private static String getValueTypeCode( int i ) {
if ( i < 0 || i >= valueTypeCode.length ) {
return valueTypeCode[0];
}
return valueTypeCode[i];
}
private static String getFieldTypeCode( int i ) {
if ( i < 0 || i >= fieldTypeCode.length ) {
return fieldTypeCode[0];
}
return fieldTypeCode[i];
}
private static String getSuccessConditionCode( int i ) {
if ( i < 0 || i >= successConditionCode.length ) {
return successConditionCode[0];
}
return successConditionCode[i];
}
public static String getSuccessNumberConditionCode( int i ) {
if ( i < 0 || i >= successNumberConditionCode.length ) {
return successNumberConditionCode[0];
}
return successNumberConditionCode[i];
}
private static String getSuccessBooleanConditionCode( int i ) {
if ( i < 0 || i >= successBooleanConditionCode.length ) {
return successBooleanConditionCode[0];
}
return successBooleanConditionCode[i];
}
@Override
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "valuetype", getValueTypeCode( valuetype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldname", fieldname ) );
retval.append( " " ).append( XMLHandler.addTagValue( "variablename", variablename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldtype", getFieldTypeCode( fieldtype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "mask", mask ) );
retval.append( " " ).append( XMLHandler.addTagValue( "comparevalue", comparevalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "minvalue", minvalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "maxvalue", maxvalue ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "successcondition", getSuccessConditionCode( successcondition ) ) );
retval
.append( " " ).append(
XMLHandler.addTagValue(
"successnumbercondition", getSuccessNumberConditionCode( successnumbercondition ) ) );
retval.append( " " ).append(
XMLHandler.addTagValue(
"successbooleancondition", getSuccessBooleanConditionCode( successbooleancondition ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "successwhenvarset", successwhenvarset ) );
return retval.toString();
}
private static int getValueTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeCode.length; i++ ) {
if ( valueTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessNumberByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getFieldTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeCode.length; i++ ) {
if ( fieldTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionCode.length; i++ ) {
if ( successConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
public void setSuccessWhenVarSet( boolean successwhenvarset ) {
this.successwhenvarset = successwhenvarset;
}
public boolean isSuccessWhenVarSet() {
return this.successwhenvarset;
}
public static int getSuccessNumberConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
@Override
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
valuetype = getValueTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "valuetype" ), "" ) );
fieldname = XMLHandler.getTagValue( entrynode, "fieldname" );
fieldtype = getFieldTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "fieldtype" ), "" ) );
variablename = XMLHandler.getTagValue( entrynode, "variablename" );
mask = XMLHandler.getTagValue( entrynode, "mask" );
comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" );
minvalue = XMLHandler.getTagValue( entrynode, "minvalue" );
maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL(
XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( XMLHandler.getTagValue(
entrynode, "successbooleancondition" ), "" ) );
successwhenvarset = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "successwhenvarset" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException(
BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadXML" ), xe );
}
}
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
valuetype = getValueTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "valuetype" ), "" ) );
fieldname = rep.getJobEntryAttributeString( id_jobentry, "fieldname" );
variablename = rep.getJobEntryAttributeString( id_jobentry, "variablename" );
fieldtype = getFieldTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "fieldtype" ), "" ) );
mask = rep.getJobEntryAttributeString( id_jobentry, "mask" );
comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" );
minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" );
maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL(
rep.getJobEntryAttributeString( id_jobentry, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successbooleancondition" ), "" ) );
successwhenvarset = rep.getJobEntryAttributeBoolean( id_jobentry, "successwhenvarset" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "valuetype", getValueTypeCode( valuetype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldname", fieldname );
rep.saveJobEntryAttribute( id_job, getObjectId(), "variablename", variablename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldtype", getFieldTypeCode( fieldtype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "mask", mask );
rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successcondition", getSuccessConditionCode( successcondition ) );
rep
.saveJobEntryAttribute(
id_job, getObjectId(), "successnumbercondition",
getSuccessNumberConditionCode( successnumbercondition ) );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successbooleancondition",
getSuccessBooleanConditionCode( successbooleancondition ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "successwhenvarset", successwhenvarset );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableSaveRep" )
+ id_job, dbe );
}
}
@Override
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
result.setNrErrors( 1 );
result.setResult( false );
String sourcevalue = null;
switch ( valuetype ) {
case VALUE_TYPE_FIELD:
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.Log.ArgFromPrevious.Found", ( rows != null
? rows.size() : 0 )
+ "" ) );
}
if ( rows.size() == 0 ) {
rows = null;
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.NoRows" ) );
return result;
}
// get first row
resultRow = rows.get( 0 );
String realfieldname = environmentSubstitute( fieldname );
int indexOfField = -1;
indexOfField = resultRow.getRowMeta().indexOfValue( realfieldname );
if ( indexOfField == -1 ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.FieldNotExist", realfieldname ) );
resultRow = null;
rows = null;
return result;
}
sourcevalue = resultRow.getString( indexOfField, null );
if ( sourcevalue == null ) {
sourcevalue = "";
}
resultRow = null;
rows = null;
break;
case VALUE_TYPE_VARIABLE:
if ( Utils.isEmpty( variablename ) ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.VariableMissing" ) );
return result;
}
if ( isSuccessWhenVarSet() ) {
// return variable name
// remove specifications if needed
String variableName = StringUtil.getVariableName( Const.NVL( getVariableName(), "" ) );
// Get value, if the variable is not set, Null will be returned
String value = getVariable( variableName );
if ( value != null ) {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableSet", variableName ) );
}
result.setResult( true );
result.setNrErrors( 0 );
return result;
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableNotSet", variableName ) );
}
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
}
sourcevalue = environmentSubstitute( getVariableWithSpec() );
break;
default:
break;
}
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSimpleEval.Log.ValueToevaluate", sourcevalue ) );
}
boolean success = false;
String realCompareValue = environmentSubstitute( comparevalue );
if ( realCompareValue == null ) {
realCompareValue = "";
}
String realMinValue = environmentSubstitute( minvalue );
String realMaxValue = environmentSubstitute( maxvalue );
switch ( fieldtype ) {
case FIELD_TYPE_STRING:
switch ( successcondition ) {
case SUCCESS_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.equals( realCompareValue ) );
if ( valuetype == VALUE_TYPE_VARIABLE && !success ) {
// make the empty value evaluate to true when compared to a not set variable
if ( Utils.isEmpty( realCompareValue ) ) {
String variableName = StringUtil.getVariableName( variablename );
if ( System.getProperty( variableName ) == null ) {
success = true;
}
}
}
break;
case SUCCESS_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.equals( realCompareValue ) );
break;
case SUCCESS_CONDITION_CONTAINS: // contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_CONTAINS: // not contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_START_WITH: // starts with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_START_WITH: // not start with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_END_WITH: // ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_END_WITH: // not ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_REGEX: // regexp
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( Pattern.compile( realCompareValue ).matcher( sourcevalue ).matches() );
break;
case SUCCESS_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
success = ( sourcevalue.equals( parts[i].trim() ) );
}
break;
case SUCCESS_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
success = !( sourcevalue.equals( parts[i].trim() ) );
}
break;
default:
break;
}
break;
case FIELD_TYPE_NUMBER:
double valuenumber;
try {
valuenumber = Double.parseDouble( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", sourcevalue, e
.getMessage() ) );
return result;
}
double valuecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber < valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber <= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber > valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber >= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
double valuemin;
try {
valuemin = Double.parseDouble( realMinValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMinValue, e
.getMessage() ) );
return result;
}
double valuemax;
try {
valuemax = Double.parseDouble( realMaxValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMaxValue, e
.getMessage() ) );
return result;
}
if ( valuemin >= valuemax ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectNumbers", realMinValue, realMaxValue ) );
return result;
}
success = ( valuenumber >= valuemin && valuenumber <= valuemax );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
}
break;
default:
break;
}
break;
case FIELD_TYPE_DATE_TIME:
String realMask = environmentSubstitute( mask );
SimpleDateFormat df = new SimpleDateFormat();
if ( !Utils.isEmpty( realMask ) ) {
df.applyPattern( realMask );
}
Date datevalue = null;
try {
datevalue = convertToDate( sourcevalue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
Date datemin;
try {
datemin = convertToDate( realMinValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datemax;
try {
datemax = convertToDate( realMaxValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
if ( datemin.after( datemax ) || datemin.equals( datemax ) ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectDates", realMinValue, realMaxValue ) );
return result;
}
success =
( ( datevalue.after( datemin )
|| datevalue.equals( datemin ) ) && ( datevalue.before( datemax )
|| datevalue.equals( datemax ) ) );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
}
break;
default:
break;
}
df = null;
break;
case FIELD_TYPE_BOOLEAN:
boolean valuebool;
try {
valuebool = ValueMetaString.convertStringToBoolean( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableBoolean", sourcevalue, e
.getMessage() ) );
return result;
}
switch ( successbooleancondition ) {
case SUCCESS_BOOLEAN_CONDITION_FALSE: // false
success = ( !valuebool );
break;
case SUCCESS_BOOLEAN_CONDITION_TRUE: // true
success = ( valuebool );
break;
default:
break;
}
break;
default:
break;
}
result.setResult( success );
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
/*
* Returns variable with specifications
*/
private String getVariableWithSpec() {
String variable = getVariableName();
if ( ( !variable.contains( StringUtil.UNIX_OPEN ) && !variable.contains( StringUtil.WINDOWS_OPEN ) && !variable
.contains( StringUtil.HEX_OPEN ) )
&& ( ( !variable.contains( StringUtil.UNIX_CLOSE ) && !variable.contains( StringUtil.WINDOWS_CLOSE ) && !variable
.contains( StringUtil.HEX_CLOSE ) ) ) ) {
// Add specifications to variable
variable = StringUtil.UNIX_OPEN + variable + StringUtil.UNIX_CLOSE;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.CheckingVariable", variable ) );
}
}
return variable;
}
private Date convertToDate( String valueString, String mask, SimpleDateFormat df ) throws KettleException {
Date datevalue = null;
try {
datevalue = df.parse( valueString );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableDate", valueString ) );
}
return datevalue;
}
public static String getValueTypeDesc( int i ) {
if ( i < 0 || i >= valueTypeDesc.length ) {
return valueTypeDesc[0];
}
return valueTypeDesc[i];
}
public static String getFieldTypeDesc( int i ) {
if ( i < 0 || i >= fieldTypeDesc.length ) {
return fieldTypeDesc[0];
}
return fieldTypeDesc[i];
}
public static String getSuccessConditionDesc( int i ) {
if ( i < 0 || i >= successConditionDesc.length ) {
return successConditionDesc[0];
}
return successConditionDesc[i];
}
public static String getSuccessNumberConditionDesc( int i ) {
if ( i < 0 || i >= successNumberConditionDesc.length ) {
return successNumberConditionDesc[0];
}
return successNumberConditionDesc[i];
}
public static String getSuccessBooleanConditionDesc( int i ) {
if ( i < 0 || i >= successBooleanConditionDesc.length ) {
return successBooleanConditionDesc[0];
}
return successBooleanConditionDesc[i];
}
public static int getValueTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeDesc.length; i++ ) {
if ( valueTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getValueTypeByCode( tt );
}
public static int getFieldTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeDesc.length; i++ ) {
if ( fieldTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getFieldTypeByCode( tt );
}
public static int getSuccessConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionDesc.length; i++ ) {
if ( successConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessConditionByCode( tt );
}
public static int getSuccessNumberConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionDesc.length; i++ ) {
if ( successNumberConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessNumberByCode( tt );
}
public static int getSuccessBooleanConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionDesc.length; i++ ) {
if ( successBooleanConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessBooleanByCode( tt );
}
public void setMinValue( String minvalue ) {
this.minvalue = minvalue;
}
public String getMinValue() {
return minvalue;
}
public void setCompareValue( String comparevalue ) {
this.comparevalue = comparevalue;
}
public String getMask() {
return mask;
}
public void setMask( String mask ) {
this.mask = mask;
}
public String getFieldName() {
return fieldname;
}
public void setFieldName( String fieldname ) {
this.fieldname = fieldname;
}
public String getVariableName() {
return variablename;
}
public void setVariableName( String variablename ) {
this.variablename = variablename;
}
public String getCompareValue() {
return comparevalue;
}
public void setMaxValue( String maxvalue ) {
this.maxvalue = maxvalue;
}
public String getMaxValue() {
return maxvalue;
}
@Override
public boolean evaluates() {
return true;
}
}
| nicoben/pentaho-kettle | engine/src/org/pentaho/di/job/entries/simpleeval/JobEntrySimpleEval.java | Java | apache-2.0 | 46,419 |
#!/usr/bin/env python
"""
@package ion.agents.platform.rsn.simulator.oms_values
@file ion/agents/platform/rsn/simulator/oms_values.py
@author Carlos Rueda
@brief Platform attribute value generators for the RSN OMS simulator.
"""
__author__ = 'Carlos Rueda'
__license__ = 'Apache 2.0'
import time
import ntplib
import math
# time begins a few secs ago from now for purposes of reporting
_START_TIME = ntplib.system_to_ntp_time(time.time() - 30)
# maximum value array size for a single generation call
_MAX_RESULT_SIZE = 1000
# next value for generators created by _create_simple_generator
_next_value = 990000
def _create_simple_generator(gen_period):
"""
Returns a simple generator that reports incremental values every given
time period.
@param gen_period discretize the time axis by this period in secs
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
def _gen(from_time, to_time):
global _next_value
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
values = []
while t <= to_time:
val = _next_value
_next_value += 1
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
def _create_sine_generator(sine_period, gen_period, min_val, max_val):
"""
Returns a sine stream fluctuating between min_val and max_val.
@param sine_period Sine period in secs
@param gen_period discretize the time axis by this period in secs
@param min_val min value
@param max_val max value
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
twopi = 2 * math.pi
def _gen(from_time, to_time):
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
range2 = (max_val - min_val) / 2
values = []
while t <= to_time:
s = math.sin(t / sine_period * twopi)
val = s * range2 + (max_val + min_val) / 2
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
# generators per platform-ID/attribute-name:
_plat_attr_generators = {
# we used to have a couple here, but now none for the moment.
# An example would be:
# ('LJ01D', 'input_voltage'): _create_sine_generator(sine_period=30,
# gen_period=2.5,
# min_val=-500,
# max_val=+500),
}
# generators per attribute name:
_attribute_generators = {
'input_voltage':
_create_sine_generator(sine_period=30,
gen_period=2.5,
min_val=-500,
max_val=+500),
'input_bus_current':
_create_sine_generator(sine_period=50,
gen_period=5,
min_val=-300,
max_val=+300),
'MVPC_temperature':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-200,
max_val=+200),
'MVPC_pressure_1':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-100,
max_val=+100),
}
_default_generator = _create_simple_generator(gen_period=5)
def generate_values(platform_id, attr_id, from_time, to_time):
"""
Generates synthetic values within a given time window (both ends are
inclusive). Times are NTP.
@param platform_id Platform ID
@param attr_id Attribute ID. Only the name part is considered. See OOIION-1551.
@param from_time lower limit of desired time window
@param to_time upper limit of desired time window
"""
# get the attribute name from the given ID:
separator = attr_id.rfind('|')
attr_name = attr_id[:separator] if separator >= 0 else attr_id
# try by platform/attribute:
if (platform_id, attr_name) in _plat_attr_generators:
gen = _plat_attr_generators[(platform_id, attr_name)]
# else: try by the attribute only:
elif attr_name in _attribute_generators:
gen = _attribute_generators[attr_name]
else:
gen = _default_generator
return gen(from_time, to_time)
if __name__ == "__main__": # pragma: no cover
# do not restrict the absolute from_time for this demo program:
_START_TIME = 0
import sys
if len(sys.argv) < 5:
print("""
USAGE:
oms_values.py platform_id attr_id delta_from delta_to
Generates values in window [curr_time + delta_from, curr_time + delta_to]
Example:
oms_values.py Node1A input_voltage -35 0
""")
exit()
cur_time = ntplib.system_to_ntp_time(time.time())
platform_id = sys.argv[1]
attr_id = sys.argv[2]
delta_from = float(sys.argv[3])
delta_to = float(sys.argv[4])
from_time = cur_time + delta_from
to_time = cur_time + delta_to
values = generate_values(platform_id, attr_id, from_time, to_time)
print("Generated %d values in time window [%s, %s]:" % (
len(values), from_time, to_time))
for n, (val, t) in enumerate(values):
print("\t%2d: %5.2f -> %+4.3f" % (n, t, val))
"""
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A other_attr -35 0
Generated 7 values in time window [3561992754.4, 3561992789.4]:
0: 3561992755.00 -> +990000.000
1: 3561992760.00 -> +990001.000
2: 3561992765.00 -> +990002.000
3: 3561992770.00 -> +990003.000
4: 3561992775.00 -> +990004.000
5: 3561992780.00 -> +990005.000
6: 3561992785.00 -> +990006.000
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A input_voltage -35 0
Generated 7 values in time window [3561992757.86, 3561992792.86]:
0: 3561992760.00 -> -0.000
1: 3561992765.00 -> +433.013
2: 3561992770.00 -> +433.013
3: 3561992775.00 -> +0.000
4: 3561992780.00 -> -433.013
5: 3561992785.00 -> -433.013
6: 3561992790.00 -> -0.000
"""
| janeen666/mi-instrument | mi/platform/rsn/simulator/oms_values.py | Python | bsd-2-clause | 7,128 |
cask "operadriver" do
version "96.0.4664.45"
sha256 "fe712310d8577056442bf7146cde2b1db69181873ff3cb2311335b784829cac6"
url "https://github.com/operasoftware/operachromiumdriver/releases/download/v.#{version}/operadriver_mac64.zip"
name "OperaChromiumDriver"
desc "Driver for Chromium-based Opera releases"
homepage "https://github.com/operasoftware/operachromiumdriver"
livecheck do
url :url
regex(/^v?\.?(\d+(?:\.\d+)+)$/i)
end
binary "operadriver_mac64/operadriver"
end
| malob/homebrew-cask | Casks/operadriver.rb | Ruby | bsd-2-clause | 501 |
cask 'wifispoof' do
version '3.0.2'
sha256 'ee0b4e0941f20f4cd71b7f6fa4f56da695cd1d6e1c4e49daec3a460463bd9946'
# sweetpproductions.com/products was verified as official when first introduced to the cask
url "https://sweetpproductions.com/products/wifispoof#{version.major}/WiFiSpoof#{version.major}.dmg"
appcast 'https://sweetpproductions.com/products/wifispoof3/appcast.xml',
checkpoint: 'e4a7cf391172f201bbd706624b22df970cb05e7b095b05a45713744c66e3b58a'
name 'WiFiSpoof'
homepage 'https://wifispoof.com/'
auto_updates true
app 'WiFiSpoof.app'
end
| jiashuw/homebrew-cask | Casks/wifispoof.rb | Ruby | bsd-2-clause | 579 |
cask 'arduino' do
version '1.8.7'
sha256 'bc5fae3e0b54f000d335d93f2e6da66fc8549def015e3b136d34a10e171c1501'
url "https://downloads.arduino.cc/arduino-#{version}-macosx.zip"
appcast 'https://www.arduino.cc/en/Main/ReleaseNotes'
name 'Arduino'
homepage 'https://www.arduino.cc/'
app 'Arduino.app'
binary "#{appdir}/Arduino.app/Contents/Java/arduino-builder"
caveats do
depends_on_java
end
end
| bosr/homebrew-cask | Casks/arduino.rb | Ruby | bsd-2-clause | 418 |
// RUN: %clang_cc1 -fsyntax-only -verify %s
// expected-no-diagnostics
namespace DeduceVsMember {
template<typename T>
struct X {
template<typename U>
int &operator==(const U& other) const;
};
template<typename T, typename U>
float &operator==(const T&, const X<U>&);
void test(X<int> xi, X<float> xf) {
float& ir = (xi == xf);
}
}
namespace OrderWithStaticMember {
struct A {
template<class T> int g(T**, int=0) { return 0; }
template<class T> static int g(T*) { return 1; }
};
void f() {
A a;
int **p;
a.g(p);
}
}
| santoshn/softboundcets-34 | softboundcets-llvm-clang34/tools/clang/test/CXX/temp/temp.decls/temp.fct/temp.func.order/p3.cpp | C++ | bsd-3-clause | 574 |
# Copyright 2019 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import unittest
from json5.host import Host
class HostTest(unittest.TestCase):
maxDiff = None
def test_directory_and_file_operations(self):
h = Host()
orig_cwd = h.getcwd()
try:
d = h.mkdtemp()
h.chdir(d)
h.write_text_file('foo', 'bar')
contents = h.read_text_file('foo')
self.assertEqual(contents, 'bar')
h.chdir('..')
h.rmtree(d)
finally:
h.chdir(orig_cwd)
def test_print(self):
s = io.StringIO()
h = Host()
h.print_('hello, world', stream=s)
self.assertEqual('hello, world\n', s.getvalue())
if __name__ == '__main__': # pragma: no cover
unittest.main()
| scheib/chromium | third_party/pyjson5/src/tests/host_test.py | Python | bsd-3-clause | 1,346 |
// DO NOT EDIT! This test has been generated by /html/canvas/tools/gentest.py.
// OffscreenCanvas test in a worker:2d.gradient.interpolate.solid
// Description:
// Note:
importScripts("/resources/testharness.js");
importScripts("/html/canvas/resources/canvas-tests.js");
var t = async_test("");
var t_pass = t.done.bind(t);
var t_fail = t.step_func(function(reason) {
throw reason;
});
t.step(function() {
var offscreenCanvas = new OffscreenCanvas(100, 50);
var ctx = offscreenCanvas.getContext('2d');
var g = ctx.createLinearGradient(0, 0, 100, 0);
g.addColorStop(0, '#0f0');
g.addColorStop(1, '#0f0');
ctx.fillStyle = g;
ctx.fillRect(0, 0, 100, 50);
_assertPixel(offscreenCanvas, 50,25, 0,255,0,255, "50,25", "0,255,0,255");
t.done();
});
done();
| nwjs/chromium.src | third_party/blink/web_tests/external/wpt/html/canvas/offscreen/fill-and-stroke-styles/2d.gradient.interpolate.solid.worker.js | JavaScript | bsd-3-clause | 758 |
//--------------------------------------------------------------------------
//
// Environment:
// This software is part of the EvtGen package developed jointly
// for the BaBar and CLEO collaborations. If you use all or part
// of it, please give an appropriate acknowledgement.
//
// Copyright Information: See EvtGen/COPYRIGHT
// Copyright (C) 1998 Caltech, UCSB
//
// Module: EvtGen/EvtVector3R.hh
//
// Description: Class to describe real 3 vectors
//
// Modification history:
//
// RYD Sept. 5, 1997 Module created
//
//------------------------------------------------------------------------
#ifndef EVTVECTOR3R_HH
#define EVTVECTOR3R_HH
#include <iosfwd>
class EvtVector3R {
friend EvtVector3R rotateEuler(const EvtVector3R& v,
double phi,double theta,double ksi);
inline friend EvtVector3R operator*(double c,const EvtVector3R& v2);
inline friend double operator*(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator+(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator-(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator*(const EvtVector3R& v1,double c);
inline friend EvtVector3R operator/(const EvtVector3R& v1,double c);
friend EvtVector3R cross(const EvtVector3R& v1,const EvtVector3R& v2);
public:
EvtVector3R();
EvtVector3R(double x,double y ,double z);
virtual ~EvtVector3R();
inline EvtVector3R& operator*=(const double c);
inline EvtVector3R& operator/=(const double c);
inline EvtVector3R& operator+=(const EvtVector3R& v2);
inline EvtVector3R& operator-=(const EvtVector3R& v2);
inline void set(int i,double d);
inline void set(double x,double y ,double z);
void applyRotateEuler(double phi,double theta,double ksi);
inline double get(int i) const;
friend std::ostream& operator<<(std::ostream& s,const EvtVector3R& v);
double dot(const EvtVector3R& v2);
double d3mag() const;
private:
double v[3];
};
inline EvtVector3R& EvtVector3R::operator*=(const double c){
v[0]*=c;
v[1]*=c;
v[2]*=c;
return *this;
}
inline EvtVector3R& EvtVector3R::operator/=(const double c){
v[0]/=c;
v[1]/=c;
v[2]/=c;
return *this;
}
inline EvtVector3R& EvtVector3R::operator+=(const EvtVector3R& v2){
v[0]+=v2.v[0];
v[1]+=v2.v[1];
v[2]+=v2.v[2];
return *this;
}
inline EvtVector3R& EvtVector3R::operator-=(const EvtVector3R& v2){
v[0]-=v2.v[0];
v[1]-=v2.v[1];
v[2]-=v2.v[2];
return *this;
}
inline EvtVector3R operator*(double c,const EvtVector3R& v2){
return EvtVector3R(v2)*=c;
}
inline EvtVector3R operator*(const EvtVector3R& v1,double c){
return EvtVector3R(v1)*=c;
}
inline EvtVector3R operator/(const EvtVector3R& v1,double c){
return EvtVector3R(v1)/=c;
}
inline double operator*(const EvtVector3R& v1,const EvtVector3R& v2){
return v1.v[0]*v2.v[0]+v1.v[1]*v2.v[1]+v1.v[2]*v2.v[2];
}
inline EvtVector3R operator+(const EvtVector3R& v1,const EvtVector3R& v2) {
return EvtVector3R(v1)+=v2;
}
inline EvtVector3R operator-(const EvtVector3R& v1,const EvtVector3R& v2) {
return EvtVector3R(v1)-=v2;
}
inline double EvtVector3R::get(int i) const {
return v[i];
}
inline void EvtVector3R::set(int i,double d){
v[i]=d;
}
inline void EvtVector3R::set(double x,double y, double z){
v[0]=x;
v[1]=y;
v[2]=z;
}
#endif
| miranov25/AliRoot | TEvtGen/EvtGen/EvtGenBase/EvtVector3R.hh | C++ | bsd-3-clause | 3,395 |
from django.db.backends.ddl_references import Statement, Table
from django.db.models import F, Q
from django.db.models.constraints import BaseConstraint
from django.db.models.sql import Query
__all__ = ['ExclusionConstraint']
class ExclusionConstraint(BaseConstraint):
template = 'CONSTRAINT %(name)s EXCLUDE USING %(index_type)s (%(expressions)s)%(where)s'
def __init__(self, *, name, expressions, index_type=None, condition=None):
if index_type and index_type.lower() not in {'gist', 'spgist'}:
raise ValueError(
'Exclusion constraints only support GiST or SP-GiST indexes.'
)
if not expressions:
raise ValueError(
'At least one expression is required to define an exclusion '
'constraint.'
)
if not all(
isinstance(expr, (list, tuple)) and len(expr) == 2
for expr in expressions
):
raise ValueError('The expressions must be a list of 2-tuples.')
if not isinstance(condition, (type(None), Q)):
raise ValueError(
'ExclusionConstraint.condition must be a Q instance.'
)
self.expressions = expressions
self.index_type = index_type or 'GIST'
self.condition = condition
super().__init__(name=name)
def _get_expression_sql(self, compiler, connection, query):
expressions = []
for expression, operator in self.expressions:
if isinstance(expression, str):
expression = F(expression)
expression = expression.resolve_expression(query=query)
sql, params = expression.as_sql(compiler, connection)
expressions.append('%s WITH %s' % (sql % params, operator))
return expressions
def _get_condition_sql(self, compiler, schema_editor, query):
if self.condition is None:
return None
where = query.build_where(self.condition)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def constraint_sql(self, model, schema_editor):
query = Query(model, alias_cols=False)
compiler = query.get_compiler(connection=schema_editor.connection)
expressions = self._get_expression_sql(compiler, schema_editor.connection, query)
condition = self._get_condition_sql(compiler, schema_editor, query)
return self.template % {
'name': schema_editor.quote_name(self.name),
'index_type': self.index_type,
'expressions': ', '.join(expressions),
'where': ' WHERE (%s)' % condition if condition else '',
}
def create_sql(self, model, schema_editor):
return Statement(
'ALTER TABLE %(table)s ADD %(constraint)s',
table=Table(model._meta.db_table, schema_editor.quote_name),
constraint=self.constraint_sql(model, schema_editor),
)
def remove_sql(self, model, schema_editor):
return schema_editor._delete_constraint_sql(
schema_editor.sql_delete_check,
model,
schema_editor.quote_name(self.name),
)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
kwargs['expressions'] = self.expressions
if self.condition is not None:
kwargs['condition'] = self.condition
if self.index_type.lower() != 'gist':
kwargs['index_type'] = self.index_type
return path, args, kwargs
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.name == other.name and
self.index_type == other.index_type and
self.expressions == other.expressions and
self.condition == other.condition
)
return super().__eq__(other)
def __repr__(self):
return '<%s: index_type=%s, expressions=%s%s>' % (
self.__class__.__qualname__,
self.index_type,
self.expressions,
'' if self.condition is None else ', condition=%s' % self.condition,
)
| kaedroho/django | django/contrib/postgres/constraints.py | Python | bsd-3-clause | 4,221 |
<?php
// Copyright 2004-present Facebook. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* An abstraction allowing the driver to manipulate the browser's window
*/
class WebDriverWindow {
protected $executor;
public function __construct($executor) {
$this->executor = $executor;
}
/**
* Get the position of the current window, relative to the upper left corner
* of the screen.
*
* @return array The current window position.
*/
public function getPosition() {
$position = $this->executor->execute(
DriverCommand::GET_WINDOW_POSITION,
array(':windowHandle' => 'current')
);
return new WebDriverPoint(
$position['x'],
$position['y']
);
}
/**
* Get the size of the current window. This will return the outer window
* dimension, not just the view port.
*
* @return array The current window size.
*/
public function getSize() {
$size = $this->executor->execute(
DriverCommand::GET_WINDOW_SIZE,
array(':windowHandle' => 'current')
);
return new WebDriverDimension(
$size['width'],
$size['height']
);
}
/**
* Maximizes the current window if it is not already maximized
*
* @return WebDriverWindow The instance.
*/
public function maximize() {
$this->executor->execute(
DriverCommand::MAXIMIZE_WINDOW,
array(':windowHandle' => 'current')
);
return $this;
}
/**
* Set the size of the current window. This will change the outer window
* dimension, not just the view port.
*
* @param WebDriverDimension $size
* @return WebDriverWindow The instance.
*/
public function setSize(WebDriverDimension $size) {
$params = array(
'width' => $size->getWidth(),
'height' => $size->getHeight(),
':windowHandle' => 'current',
);
$this->executor->execute(DriverCommand::SET_WINDOW_SIZE, $params);
return $this;
}
/**
* Set the position of the current window. This is relative to the upper left
* corner of the screen.
*
* @param WebDriverPoint $position
* @return WebDriverWindow The instance.
*/
public function setPosition(WebDriverPoint $position) {
$params = array(
'x' => $position->getX(),
'y' => $position->getY(),
':windowHandle' => 'current',
);
$this->executor->execute(DriverCommand::SET_WINDOW_POSITION, $params);
return $this;
}
/**
* Get the current browser orientation.
*
* @return string Either LANDSCAPE|PORTRAIT
*/
public function getScreenOrientation() {
return $this->executor->execute(DriverCommand::GET_SCREEN_ORIENTATION);
}
/**
* Set the browser orientation. The orientation should either
* LANDSCAPE|PORTRAIT
*
* @param string $orientation
* @return WebDriverWindow The instance.
* @throws IndexOutOfBoundsException
*/
public function setScreenOrientation($orientation) {
$orientation = strtoupper($orientation);
if (!in_array($orientation, array('PORTRAIT', 'LANDSCAPE'))) {
throw new IndexOutOfBoundsException(
"Orientation must be either PORTRAIT, or LANDSCAPE"
);
}
$this->executor->execute(
DriverCommand::SET_SCREEN_ORIENTATION,
array('orientation' => $orientation)
);
return $this;
}
}
| hamidgoharjoo/test | vendor/facebook/webdriver/lib/WebDriverWindow.php | PHP | bsd-3-clause | 3,841 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/web_modal/modal_dialog_host.h"
namespace web_modal {
ModalDialogHostObserver::~ModalDialogHostObserver() {
}
ModalDialogHost::~ModalDialogHost() {
}
bool ModalDialogHost::ShouldActivateDialog() const {
return true;
}
} // namespace web_modal
| scheib/chromium | components/web_modal/modal_dialog_host.cc | C++ | bsd-3-clause | 437 |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Copyright 2007 Google Inc. All Rights Reserved.
/**
* @fileoverview A color palette with a button for adding additional colors
* manually.
*
*/
goog.provide('goog.ui.CustomColorPalette');
goog.require('goog.color');
goog.require('goog.dom');
goog.require('goog.ui.ColorPalette');
/**
* A custom color palette is a grid of color swatches and a button that allows
* the user to add additional colors to the palette
*
* @param {Array.<string>} initColors Array of initial colors to populate the
* palette with.
* @param {goog.ui.PaletteRenderer} opt_renderer Renderer used to render or
* decorate the palette; defaults to {@link goog.ui.PaletteRenderer}.
* @param {goog.dom.DomHelper} opt_domHelper Optional DOM helper, used for
* document interaction.
* @constructor
* @extends {goog.ui.ColorPalette}
*/
goog.ui.CustomColorPalette = function(initColors, opt_renderer, opt_domHelper) {
goog.ui.ColorPalette.call(this, initColors, opt_renderer, opt_domHelper);
this.setSupportedState(goog.ui.Component.State.OPENED, true);
};
goog.inherits(goog.ui.CustomColorPalette, goog.ui.ColorPalette);
/**
* Returns an array of DOM nodes for each color, and an additional cell with a
* '+'.
* @return {Array.<Node>} Array of div elements.
* @private
*/
goog.ui.CustomColorPalette.prototype.createColorNodes_ = function() {
/** @desc Hover caption for the button that allows the user to add a color. */
var MSG_CLOSURE_CUSTOM_COLOR_BUTTON = goog.getMsg('Add a color');
var nl = goog.ui.CustomColorPalette.superClass_.createColorNodes_.call(this);
nl.push(goog.dom.createDom('div', {
'class': goog.getCssName('goog-palette-customcolor'),
'title': MSG_CLOSURE_CUSTOM_COLOR_BUTTON
}, '+'));
return nl;
};
/**
* @inheritDoc
* @param {goog.events.Event} e Mouse or key event that triggered the action.
* @return {boolean} True if the action was allowed to proceed, false otherwise.
*/
goog.ui.CustomColorPalette.prototype.performActionInternal = function(e) {
var item = /** @type {Element} */ (this.getHighlightedItem());
if (item) {
if (goog.dom.classes.has(
item, goog.getCssName('goog-palette-customcolor'))) {
// User activated the special "add custom color" swatch.
this.promptForCustomColor();
} else {
// User activated a normal color swatch.
this.setSelectedItem(item);
return this.dispatchEvent(goog.ui.Component.EventType.ACTION);
}
}
return false;
};
/**
* Prompts the user to enter a custom color. Currently uses a window.prompt
* but could be updated to use a dialog box with a WheelColorPalette.
*/
goog.ui.CustomColorPalette.prototype.promptForCustomColor = function() {
/** @desc Default custom color dialog. */
var MSG_CLOSURE_CUSTOM_COLOR_PROMPT = goog.getMsg(
'Input custom color, i.e. pink, #F00, #D015FF or rgb(100, 50, 25)');
// A CustomColorPalette is considered "open" while the color selection prompt
// is open. Enabling state transition events for the OPENED state and
// listening for OPEN events allows clients to save the selection before
// it is destroyed (see e.g. bug 1064701).
var response = null;
this.setOpen(true);
if (this.isOpen()) {
// The OPEN event wasn't canceled; prompt for custom color.
response = window.prompt(MSG_CLOSURE_CUSTOM_COLOR_PROMPT, '#FFFFFF');
this.setOpen(false);
}
if (!response) {
// The user hit cancel
return;
}
var color;
/** @preserveTry */
try {
color = goog.color.parse(response).hex;
} catch (er) {
/** @desc Alert message sent when the input string is not a valid color. */
var MSG_CLOSURE_CUSTOM_COLOR_INVALID_INPUT = goog.getMsg(
'ERROR: "{$color}" is not a valid color.', {'color': response});
alert(MSG_CLOSURE_CUSTOM_COLOR_INVALID_INPUT);
return;
}
// TODO: This is relatively inefficient. Consider adding
// functionality to palette to add individual items after render time.
var colors = this.getColors();
colors.push(color)
this.setColors(colors);
// Set the selected color to the new color and notify listeners of the action.
this.setSelectedColor(color);
this.dispatchEvent(goog.ui.Component.EventType.ACTION);
};
| yesudeep/puppy | tools/google-closure-library/closure/goog/ui/customcolorpalette.js | JavaScript | mit | 4,792 |
/**
* Utility to register editors and common namespace for keeping reference to all editor classes
*/
import Handsontable from './browser';
import {toUpperCaseFirst} from './helpers/string';
export {registerEditor, getEditor, hasEditor, getEditorConstructor};
var
registeredEditorNames = {},
registeredEditorClasses = new WeakMap();
// support for older versions of Handsontable
Handsontable.editors = Handsontable.editors || {};
Handsontable.editors.registerEditor = registerEditor;
Handsontable.editors.getEditor = getEditor;
function RegisteredEditor(editorClass) {
var Clazz, instances;
instances = {};
Clazz = editorClass;
this.getConstructor = function() {
return editorClass;
};
this.getInstance = function(hotInstance) {
if (!(hotInstance.guid in instances)) {
instances[hotInstance.guid] = new Clazz(hotInstance);
}
return instances[hotInstance.guid];
};
}
/**
* Registers editor under given name
* @param {String} editorName
* @param {Function} editorClass
*/
function registerEditor(editorName, editorClass) {
var editor = new RegisteredEditor(editorClass);
if (typeof editorName === 'string') {
registeredEditorNames[editorName] = editor;
Handsontable.editors[toUpperCaseFirst(editorName) + 'Editor'] = editorClass;
}
registeredEditorClasses.set(editorClass, editor);
}
/**
* Returns instance (singleton) of editor class
*
* @param {String} editorName
* @param {Object} hotInstance
* @returns {Function} editorClass
*/
function getEditor(editorName, hotInstance) {
var editor;
if (typeof editorName == 'function') {
if (!(registeredEditorClasses.get(editorName))) {
registerEditor(null, editorName);
}
editor = registeredEditorClasses.get(editorName);
} else if (typeof editorName == 'string') {
editor = registeredEditorNames[editorName];
} else {
throw Error('Only strings and functions can be passed as "editor" parameter ');
}
if (!editor) {
throw Error('No editor registered under name "' + editorName + '"');
}
return editor.getInstance(hotInstance);
}
/**
* Get editor constructor class
*
* @param {String} editorName
* @returns {Function}
*/
function getEditorConstructor(editorName) {
var editor;
if (typeof editorName == 'string') {
editor = registeredEditorNames[editorName];
} else {
throw Error('Only strings and functions can be passed as "editor" parameter ');
}
if (!editor) {
throw Error('No editor registered under name "' + editorName + '"');
}
return editor.getConstructor();
}
/**
* @param editorName
* @returns {Boolean}
*/
function hasEditor(editorName) {
return registeredEditorNames[editorName] ? true : false;
}
| Growmies/handsontable | src/editors.js | JavaScript | mit | 2,723 |
"use strict";
var index_1 = require("../../models/types/index");
function createReferenceType(context, symbol, includeParent) {
var checker = context.checker;
var id = context.getSymbolID(symbol);
var name = checker.symbolToString(symbol);
if (includeParent && symbol.parent) {
name = checker.symbolToString(symbol.parent) + '.' + name;
}
return new index_1.ReferenceType(name, id);
}
exports.createReferenceType = createReferenceType;
//# sourceMappingURL=reference.js.map | glamb/TCMS-Frontend | node_modules/typedoc/lib/converter/factories/reference.js | JavaScript | mit | 505 |
<?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Behat\Service;
use Sylius\Behat\Service\Setter\CookieSetterInterface;
use Sylius\Component\User\Model\UserInterface;
use Symfony\Component\HttpFoundation\Session\SessionInterface;
use Symfony\Component\Security\Core\Authentication\Token\TokenInterface;
use Symfony\Component\Security\Core\Authentication\Token\UsernamePasswordToken;
use Symfony\Component\Security\Core\Exception\TokenNotFoundException;
/**
* @author Arkadiusz Krakowiak <arkadiusz.krakowiak@lakion.com>
* @author Kamil Kokot <kamil@kokot.me>
*/
final class SecurityService implements SecurityServiceInterface
{
/**
* @var SessionInterface
*/
private $session;
/**
* @var CookieSetterInterface
*/
private $cookieSetter;
/**
* @var string
*/
private $sessionTokenVariable;
/**
* @param SessionInterface $session
* @param CookieSetterInterface $cookieSetter
* @param string $firewallContextName
*/
public function __construct(SessionInterface $session, CookieSetterInterface $cookieSetter, $firewallContextName)
{
$this->session = $session;
$this->cookieSetter = $cookieSetter;
$this->sessionTokenVariable = sprintf('_security_%s', $firewallContextName);
}
/**
* {@inheritdoc}
*/
public function logIn(UserInterface $user)
{
$token = new UsernamePasswordToken($user, $user->getPassword(), 'randomstringbutnotnull', $user->getRoles());
$this->setToken($token);
}
public function logOut()
{
$this->session->set($this->sessionTokenVariable, null);
$this->session->save();
$this->cookieSetter->setCookie($this->session->getName(), $this->session->getId());
}
/**
* {@inheritdoc}
*/
public function getCurrentToken()
{
$serializedToken = $this->session->get($this->sessionTokenVariable);
if (null === $serializedToken) {
throw new TokenNotFoundException();
}
return unserialize($serializedToken);
}
/**
* {@inheritdoc}
*/
public function restoreToken(TokenInterface $token)
{
$this->setToken($token);
}
/**
* @param TokenInterface $token
*/
private function setToken(TokenInterface $token)
{
$serializedToken = serialize($token);
$this->session->set($this->sessionTokenVariable, $serializedToken);
$this->session->save();
$this->cookieSetter->setCookie($this->session->getName(), $this->session->getId());
}
}
| rainlike/justshop | vendor/sylius/sylius/src/Sylius/Behat/Service/SecurityService.php | PHP | mit | 2,802 |
package md5530bd51e982e6e7b340b73e88efe666e;
public class FormsApplicationActivity
extends android.app.Activity
implements
mono.android.IGCUserPeer
{
static final String __md_methods;
static {
__md_methods =
"n_onCreate:(Landroid/os/Bundle;)V:GetOnCreate_Landroid_os_Bundle_Handler\n" +
"n_onStart:()V:GetOnStartHandler\n" +
"n_onResume:()V:GetOnResumeHandler\n" +
"n_onPause:()V:GetOnPauseHandler\n" +
"n_onStop:()V:GetOnStopHandler\n" +
"n_onRestart:()V:GetOnRestartHandler\n" +
"n_onDestroy:()V:GetOnDestroyHandler\n" +
"n_onBackPressed:()V:GetOnBackPressedHandler\n" +
"n_onOptionsItemSelected:(Landroid/view/MenuItem;)Z:GetOnOptionsItemSelected_Landroid_view_MenuItem_Handler\n" +
"n_onPrepareOptionsMenu:(Landroid/view/Menu;)Z:GetOnPrepareOptionsMenu_Landroid_view_Menu_Handler\n" +
"n_onConfigurationChanged:(Landroid/content/res/Configuration;)V:GetOnConfigurationChanged_Landroid_content_res_Configuration_Handler\n" +
"";
mono.android.Runtime.register ("Xamarin.Forms.Platform.Android.FormsApplicationActivity, Xamarin.Forms.Platform.Android, Version=1.4.0.0, Culture=neutral, PublicKeyToken=null", FormsApplicationActivity.class, __md_methods);
}
public FormsApplicationActivity () throws java.lang.Throwable
{
super ();
if (getClass () == FormsApplicationActivity.class)
mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.FormsApplicationActivity, Xamarin.Forms.Platform.Android, Version=1.4.0.0, Culture=neutral, PublicKeyToken=null", "", this, new java.lang.Object[] { });
}
public void onCreate (android.os.Bundle p0)
{
n_onCreate (p0);
}
private native void n_onCreate (android.os.Bundle p0);
public void onStart ()
{
n_onStart ();
}
private native void n_onStart ();
public void onResume ()
{
n_onResume ();
}
private native void n_onResume ();
public void onPause ()
{
n_onPause ();
}
private native void n_onPause ();
public void onStop ()
{
n_onStop ();
}
private native void n_onStop ();
public void onRestart ()
{
n_onRestart ();
}
private native void n_onRestart ();
public void onDestroy ()
{
n_onDestroy ();
}
private native void n_onDestroy ();
public void onBackPressed ()
{
n_onBackPressed ();
}
private native void n_onBackPressed ();
public boolean onOptionsItemSelected (android.view.MenuItem p0)
{
return n_onOptionsItemSelected (p0);
}
private native boolean n_onOptionsItemSelected (android.view.MenuItem p0);
public boolean onPrepareOptionsMenu (android.view.Menu p0)
{
return n_onPrepareOptionsMenu (p0);
}
private native boolean n_onPrepareOptionsMenu (android.view.Menu p0);
public void onConfigurationChanged (android.content.res.Configuration p0)
{
n_onConfigurationChanged (p0);
}
private native void n_onConfigurationChanged (android.content.res.Configuration p0);
java.util.ArrayList refList;
public void monodroidAddReference (java.lang.Object obj)
{
if (refList == null)
refList = new java.util.ArrayList ();
refList.add (obj);
}
public void monodroidClearReferences ()
{
if (refList != null)
refList.clear ();
}
}
| fabianwilliams/aalpix | XamarinPagesDemo/Droid/obj/Debug/android/src/md5530bd51e982e6e7b340b73e88efe666e/FormsApplicationActivity.java | Java | mit | 3,159 |
using Abp.Dependency;
using Abp.Configuration.Startup;
using Abp.Modules;
using Abp.Net.Mail;
using Abp.Reflection.Extensions;
namespace Abp.MailKit
{
[DependsOn(typeof(AbpKernelModule))]
public class AbpMailKitModule : AbpModule
{
public override void PreInitialize()
{
IocManager.Register<IAbpMailKitConfiguration, AbpMailKitConfiguration>();
Configuration.ReplaceService<IEmailSender, MailKitEmailSender>(DependencyLifeStyle.Transient);
}
public override void Initialize()
{
IocManager.RegisterAssemblyByConvention(typeof(AbpMailKitModule).GetAssembly());
}
}
}
| carldai0106/aspnetboilerplate | src/Abp.MailKit/AbpMailKitModule.cs | C# | mit | 670 |
/**
*
*/
package org.junit.internal.runners.statements;
import java.util.ArrayList;
import java.util.List;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.MultipleFailureException;
import org.junit.runners.model.Statement;
public class RunAfters extends Statement {
private final Statement fNext;
private final Object fTarget;
private final List<FrameworkMethod> fAfters;
public RunAfters(Statement next, List<FrameworkMethod> afters, Object target) {
fNext= next;
fAfters= afters;
fTarget= target;
}
@Override
public void evaluate() throws Throwable {
List<Throwable> errors = new ArrayList<Throwable>();
try {
fNext.evaluate();
} catch (Throwable e) {
errors.add(e);
} finally {
for (FrameworkMethod each : fAfters)
try {
each.invokeExplosively(fTarget);
} catch (Throwable e) {
errors.add(e);
}
}
MultipleFailureException.assertEmpty(errors);
}
} | MarkAufdencamp/junit | src/main/java/org/junit/internal/runners/statements/RunAfters.java | Java | epl-1.0 | 948 |
/*
===========================================================================
Copyright (C) 2000 - 2013, Raven Software, Inc.
Copyright (C) 2001 - 2013, Activision, Inc.
Copyright (C) 2013 - 2015, OpenJK contributors
This file is part of the OpenJK source code.
OpenJK is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
===========================================================================
*/
// tr_image.c
#include "tr_local.h"
#include <map>
bool gServerSkinHack = false;
shader_t *R_FindServerShader( const char *name, const int *lightmapIndex, const byte *styles, qboolean mipRawImage );
static char *CommaParse( char **data_p );
/*
===============
RE_SplitSkins
input = skinname, possibly being a macro for three skins
return= true if three part skins found
output= qualified names to three skins if return is true, undefined if false
===============
*/
bool RE_SplitSkins(const char *INname, char *skinhead, char *skintorso, char *skinlower)
{ //INname= "models/players/jedi_tf/|head01_skin1|torso01|lower01";
if (strchr(INname, '|'))
{
char name[MAX_QPATH];
strcpy(name, INname);
char *p = strchr(name, '|');
*p=0;
p++;
//fill in the base path
strcpy (skinhead, name);
strcpy (skintorso, name);
strcpy (skinlower, name);
//now get the the individual files
//advance to second
char *p2 = strchr(p, '|');
assert(p2);
if (!p2)
{
return false;
}
*p2=0;
p2++;
strcat (skinhead, p);
strcat (skinhead, ".skin");
//advance to third
p = strchr(p2, '|');
assert(p);
if (!p)
{
return false;
}
*p=0;
p++;
strcat (skintorso,p2);
strcat (skintorso, ".skin");
strcat (skinlower,p);
strcat (skinlower, ".skin");
return true;
}
return false;
}
// given a name, go get the skin we want and return
qhandle_t RE_RegisterIndividualSkin( const char *name , qhandle_t hSkin)
{
skin_t *skin;
skinSurface_t *surf;
char *text, *text_p;
char *token;
char surfName[MAX_QPATH];
// load and parse the skin file
ri.FS_ReadFile( name, (void **)&text );
if ( !text ) {
#ifndef FINAL_BUILD
Com_Printf( "WARNING: RE_RegisterSkin( '%s' ) failed to load!\n", name );
#endif
return 0;
}
assert (tr.skins[hSkin]); //should already be setup, but might be an 3part append
skin = tr.skins[hSkin];
text_p = text;
while ( text_p && *text_p ) {
// get surface name
token = CommaParse( &text_p );
Q_strncpyz( surfName, token, sizeof( surfName ) );
if ( !token[0] ) {
break;
}
// lowercase the surface name so skin compares are faster
Q_strlwr( surfName );
if ( *text_p == ',' ) {
text_p++;
}
if ( !strncmp( token, "tag_", 4 ) ) { //these aren't in there, but just in case you load an id style one...
continue;
}
// parse the shader name
token = CommaParse( &text_p );
if ( !strcmp( &surfName[strlen(surfName)-4], "_off") )
{
if ( !strcmp( token ,"*off" ) )
{
continue; //don't need these double offs
}
surfName[strlen(surfName)-4] = 0; //remove the "_off"
}
if ((int)(sizeof( skin->surfaces) / sizeof( skin->surfaces[0] )) <= skin->numSurfaces)
{
assert( (int)(sizeof( skin->surfaces) / sizeof( skin->surfaces[0] )) > skin->numSurfaces );
Com_Printf( "WARNING: RE_RegisterSkin( '%s' ) more than %u surfaces!\n", name, (unsigned int)ARRAY_LEN(skin->surfaces) );
break;
}
surf = (skinSurface_t *) Hunk_Alloc( sizeof( *skin->surfaces[0] ), h_low );
skin->surfaces[skin->numSurfaces] = (_skinSurface_t *)surf;
Q_strncpyz( surf->name, surfName, sizeof( surf->name ) );
if (gServerSkinHack) surf->shader = R_FindServerShader( token, lightmapsNone, stylesDefault, qtrue );
else surf->shader = R_FindShader( token, lightmapsNone, stylesDefault, qtrue );
skin->numSurfaces++;
}
ri.FS_FreeFile( text );
// never let a skin have 0 shaders
if ( skin->numSurfaces == 0 ) {
return 0; // use default skin
}
return hSkin;
}
qhandle_t RE_RegisterSkin( const char *name ) {
qhandle_t hSkin;
skin_t *skin;
if ( !name || !name[0] ) {
Com_Printf( "Empty name passed to RE_RegisterSkin\n" );
return 0;
}
if ( strlen( name ) >= MAX_QPATH ) {
Com_Printf( "Skin name exceeds MAX_QPATH\n" );
return 0;
}
// see if the skin is already loaded
for ( hSkin = 1; hSkin < tr.numSkins ; hSkin++ ) {
skin = tr.skins[hSkin];
if ( !Q_stricmp( skin->name, name ) ) {
if( skin->numSurfaces == 0 ) {
return 0; // default skin
}
return hSkin;
}
}
// allocate a new skin
if ( tr.numSkins == MAX_SKINS ) {
Com_Printf( "WARNING: RE_RegisterSkin( '%s' ) MAX_SKINS hit\n", name );
return 0;
}
tr.numSkins++;
skin = (struct skin_s *)Hunk_Alloc( sizeof( skin_t ), h_low );
tr.skins[hSkin] = skin;
Q_strncpyz( skin->name, name, sizeof( skin->name ) );
skin->numSurfaces = 0;
// make sure the render thread is stopped
R_IssuePendingRenderCommands();
// If not a .skin file, load as a single shader
if ( strcmp( name + strlen( name ) - 5, ".skin" ) ) {
/* skin->numSurfaces = 1;
skin->surfaces[0] = (skinSurface_t *)Hunk_Alloc( sizeof(skin->surfaces[0]), h_low );
skin->surfaces[0]->shader = R_FindShader( name, lightmapsNone, stylesDefault, qtrue );
return hSkin;
*/
}
char skinhead[MAX_QPATH]={0};
char skintorso[MAX_QPATH]={0};
char skinlower[MAX_QPATH]={0};
if ( RE_SplitSkins(name, (char*)&skinhead, (char*)&skintorso, (char*)&skinlower ) )
{//three part
hSkin = RE_RegisterIndividualSkin(skinhead, hSkin);
if (hSkin)
{
hSkin = RE_RegisterIndividualSkin(skintorso, hSkin);
if (hSkin)
{
hSkin = RE_RegisterIndividualSkin(skinlower, hSkin);
}
}
}
else
{//single skin
hSkin = RE_RegisterIndividualSkin(name, hSkin);
}
return(hSkin);
}
/*
==================
CommaParse
This is unfortunate, but the skin files aren't
compatible with our normal parsing rules.
==================
*/
static char *CommaParse( char **data_p ) {
int c = 0, len;
char *data;
static char com_token[MAX_TOKEN_CHARS];
data = *data_p;
len = 0;
com_token[0] = 0;
// make sure incoming data is valid
if ( !data ) {
*data_p = NULL;
return com_token;
}
while ( 1 ) {
// skip whitespace
while( (c = *(const unsigned char* /*eurofix*/)data) <= ' ') {
if( !c ) {
break;
}
data++;
}
c = *data;
// skip double slash comments
if ( c == '/' && data[1] == '/' )
{
while (*data && *data != '\n')
data++;
}
// skip /* */ comments
else if ( c=='/' && data[1] == '*' )
{
while ( *data && ( *data != '*' || data[1] != '/' ) )
{
data++;
}
if ( *data )
{
data += 2;
}
}
else
{
break;
}
}
if ( c == 0 ) {
return "";
}
// handle quoted strings
if (c == '\"')
{
data++;
while (1)
{
c = *data++;
if (c=='\"' || !c)
{
com_token[len] = 0;
*data_p = ( char * ) data;
return com_token;
}
if (len < MAX_TOKEN_CHARS - 1)
{
com_token[len] = c;
len++;
}
}
}
// parse a regular word
do
{
if (len < MAX_TOKEN_CHARS - 1)
{
com_token[len] = c;
len++;
}
data++;
c = *data;
} while (c>32 && c != ',' );
com_token[len] = 0;
*data_p = ( char * ) data;
return com_token;
}
/*
===============
RE_RegisterServerSkin
Mangled version of the above function to load .skin files on the server.
===============
*/
qhandle_t RE_RegisterServerSkin( const char *name ) {
qhandle_t r;
if (ri.Cvar_VariableIntegerValue( "cl_running" ) &&
ri.Com_TheHunkMarkHasBeenMade() &&
ShaderHashTableExists())
{ //If the client is running then we can go straight into the normal registerskin func
return RE_RegisterSkin(name);
}
gServerSkinHack = true;
r = RE_RegisterSkin(name);
gServerSkinHack = false;
return r;
}
/*
===============
R_InitSkins
===============
*/
void R_InitSkins( void ) {
skin_t *skin;
tr.numSkins = 1;
// make the default skin have all default shaders
skin = tr.skins[0] = (struct skin_s *)ri.Hunk_Alloc( sizeof( skin_t ), h_low );
Q_strncpyz( skin->name, "<default skin>", sizeof( skin->name ) );
skin->numSurfaces = 1;
skin->surfaces[0] = (_skinSurface_t *)ri.Hunk_Alloc( sizeof( skinSurface_t ), h_low );
skin->surfaces[0]->shader = tr.defaultShader;
}
/*
===============
R_GetSkinByHandle
===============
*/
skin_t *R_GetSkinByHandle( qhandle_t hSkin ) {
if ( hSkin < 1 || hSkin >= tr.numSkins ) {
return tr.skins[0];
}
return tr.skins[ hSkin ];
}
| Yberion/stats_mod | codemp/rd-dedicated/tr_skin.cpp | C++ | gpl-2.0 | 8,848 |
/**
* Copyright (c) 2012--2014 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*
* Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package com.redhat.rhn.frontend.action.systems.sdc;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import com.redhat.rhn.common.db.datasource.DataResult;
import com.redhat.rhn.domain.rhnset.RhnSet;
import com.redhat.rhn.domain.server.Server;
import com.redhat.rhn.domain.user.User;
import com.redhat.rhn.frontend.dto.SystemPendingEventDto;
import com.redhat.rhn.frontend.struts.RequestContext;
import com.redhat.rhn.frontend.struts.RhnAction;
import com.redhat.rhn.frontend.struts.RhnHelper;
import com.redhat.rhn.frontend.struts.RhnListSetHelper;
import com.redhat.rhn.frontend.struts.StrutsDelegate;
import com.redhat.rhn.frontend.taglibs.list.ListTagHelper;
import com.redhat.rhn.frontend.taglibs.list.TagHelper;
import com.redhat.rhn.manager.rhnset.RhnSetDecl;
import com.redhat.rhn.manager.rhnset.RhnSetManager;
import com.redhat.rhn.manager.system.SystemManager;
/**
* SystemPendingEventsAction
* @version $Rev$
*/
public class SystemPendingEventsAction extends RhnAction {
/**
* {@inheritDoc}
*/
public ActionForward execute(ActionMapping mapping, ActionForm formIn,
HttpServletRequest request,
HttpServletResponse response) {
RequestContext context = new RequestContext(request);
Long sid = context.getRequiredParam("sid");
Server server = context.lookupAndBindServer();
User user = context.getCurrentUser();
Map<String, Object> params = makeParamMap(request);
params.put("sid", server.getId());
request.setAttribute(ListTagHelper.PARENT_URL, request.getRequestURI() +
"?sid=" + server.getId());
request.setAttribute("sid", sid);
RhnSet set = RhnSetDecl.PENDING_ACTIONS_TO_DELETE.get(user);
RhnListSetHelper helper = new RhnListSetHelper(request);
if (context.wasDispatched("system.event.pending.cancel")) {
helper.updateSet(set, RhnSetDecl.PENDING_ACTIONS_TO_DELETE.getLabel());
if (!set.isEmpty()) {
return getStrutsDelegate().forwardParams(
mapping.findForward("continue"), params);
}
RhnHelper.handleEmptySelection(request);
}
set.clear();
RhnSetManager.store(set);
DataResult<SystemPendingEventDto> result =
SystemManager.systemPendingEvents(sid, null);
if (ListTagHelper.getListAction(RequestContext.PAGE_LIST, request) != null) {
helper.execute(set, RequestContext.PAGE_LIST, result);
}
if (!set.isEmpty()) {
helper.syncSelections(set, result);
ListTagHelper.setSelectedAmount(RequestContext.PAGE_LIST,
set.size(), request);
}
ListTagHelper.bindSetDeclTo(RequestContext.PAGE_LIST,
RhnSetDecl.PENDING_ACTIONS_TO_DELETE, request);
TagHelper.bindElaboratorTo(RequestContext.PAGE_LIST,
result.getElaborator(), request);
params.put("isLocked", server.getLock() == null ? false : true);
request.setAttribute(RequestContext.PAGE_LIST, result);
return StrutsDelegate.getInstance().forwardParams(
mapping.findForward("default"), params);
}
}
| xkollar/spacewalk | java/code/src/com/redhat/rhn/frontend/action/systems/sdc/SystemPendingEventsAction.java | Java | gpl-2.0 | 4,067 |
/*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jvmti.GetThreadCpuTimerInfo;
import java.io.PrintStream;
import nsk.share.*;
import nsk.share.jvmti.*;
/** Debuggee class for this test. */
public class thrtimerinfo001 extends DebugeeClass {
/** Load native library if required. */
static {
loadLibrary("thrtimerinfo001");
}
/** Run test from command line. */
public static void main(String argv[]) {
argv = nsk.share.jvmti.JVMTITest.commonInit(argv);
// JCK-compatible exit
System.exit(run(argv, System.out) + Consts.JCK_STATUS_BASE);
}
/** Run test from JCK-compatible environment. */
public static int run(String argv[], PrintStream out) {
return new thrtimerinfo001().runIt(argv, out);
}
/* =================================================================== */
// scaffold objects
ArgumentHandler argHandler = null;
Log log = null;
long timeout = 0;
int status = Consts.TEST_PASSED;
/** Run debuggee. */
public int runIt(String argv[], PrintStream out) {
argHandler = new ArgumentHandler(argv);
log = new Log(out, argHandler);
timeout = argHandler.getWaitTime() * 60 * 1000; // milliseconds
thrtimerinfo001Thread thread = new thrtimerinfo001Thread("TestedThread");
// sync before thread started
log.display("Sync: tested thread created");
status = checkStatus(status);
// start and finish tested thread
try {
thread.start();
thread.join();
} catch (InterruptedException e) {
throw new Failure("Main thread interrupted while waiting for tested thread:\n\t"
+ e);
}
// sync after thread finished
log.display("Sync: tested thread started and finished");
status = checkStatus(status);
return status;
}
}
/* =================================================================== */
/** Class for tested thread. */
class thrtimerinfo001Thread extends Thread {
/** Make thread with specific name. */
public thrtimerinfo001Thread(String name) {
super(name);
}
/** Run some code. */
public void run() {
// do something
int n = 1000;
int s = 0;
for (int i = 0; i < n; i++) {
if (i % 2 == 0) {
s += i * 10;
} else {
s -= i * 10;
}
}
}
}
| md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jvmti/GetThreadCpuTimerInfo/thrtimerinfo001.java | Java | gpl-2.0 | 3,506 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magento.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Mage
* @package Mage_Checkout
* @copyright Copyright (c) 2006-2015 X.commerce, Inc. (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Multishipping checkout controller
*
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Checkout_MultishippingController extends Mage_Checkout_Controller_Action
{
/**
* Retrieve checkout model
*
* @return Mage_Checkout_Model_Type_Multishipping
*/
protected function _getCheckout()
{
return Mage::getSingleton('checkout/type_multishipping');
}
/**
* Retrieve checkout state model
*
* @return Mage_Checkout_Model_Type_Multishipping_State
*/
protected function _getState()
{
return Mage::getSingleton('checkout/type_multishipping_state');
}
/**
* Retrieve checkout url heler
*
* @return Mage_Checkout_Helper_Url
*/
protected function _getHelper()
{
return Mage::helper('checkout/url');
}
/**
* Retrieve checkout session
*
* @return Mage_Checkout_Model_Session
*/
protected function _getCheckoutSession()
{
return Mage::getSingleton('checkout/session');
}
/**
* Action predispatch
*
* Check customer authentication for some actions
*
* @return Mage_Checkout_MultishippingController
*/
public function preDispatch()
{
parent::preDispatch();
if ($this->getFlag('', 'redirectLogin')) {
return $this;
}
$action = strtolower($this->getRequest()->getActionName());
$checkoutSessionQuote = $this->_getCheckoutSession()->getQuote();
/**
* Catch index action call to set some flags before checkout/type_multishipping model initialization
*/
if ($action == 'index') {
$checkoutSessionQuote->setIsMultiShipping(true);
$this->_getCheckoutSession()->setCheckoutState(
Mage_Checkout_Model_Session::CHECKOUT_STATE_BEGIN
);
} elseif (!$checkoutSessionQuote->getIsMultiShipping() &&
!in_array($action, array('login', 'register', 'success'))
) {
$this->_redirect('*/*/index');
$this->setFlag('', self::FLAG_NO_DISPATCH, true);
return $this;
}
if (!in_array($action, array('login', 'register'))) {
if (!Mage::getSingleton('customer/session')->authenticate($this, $this->_getHelper()->getMSLoginUrl())) {
$this->setFlag('', self::FLAG_NO_DISPATCH, true);
}
if (!Mage::helper('checkout')->isMultishippingCheckoutAvailable()) {
$error = $this->_getCheckout()->getMinimumAmountError();
$this->_getCheckoutSession()->addError($error);
$this->_redirectUrl($this->_getHelper()->getCartUrl());
$this->setFlag('', self::FLAG_NO_DISPATCH, true);
return $this;
}
}
if (!$this->_preDispatchValidateCustomer()) {
return $this;
}
if ($this->_getCheckoutSession()->getCartWasUpdated(true) &&
!in_array($action, array('index', 'login', 'register', 'addresses', 'success'))
) {
$this->_redirectUrl($this->_getHelper()->getCartUrl());
$this->setFlag('', self::FLAG_NO_DISPATCH, true);
}
if ($action == 'success' && $this->_getCheckout()->getCheckoutSession()->getDisplaySuccess(true)) {
return $this;
}
$quote = $this->_getCheckout()->getQuote();
if (!$quote->hasItems() || $quote->getHasError() || $quote->isVirtual()) {
$this->_redirectUrl($this->_getHelper()->getCartUrl());
$this->setFlag('', self::FLAG_NO_DISPATCH, true);
return;
}
return $this;
}
/**
* Index action of Multishipping checkout
*/
public function indexAction()
{
$this->_getCheckoutSession()->setCartWasUpdated(false);
$this->_redirect('*/*/addresses');
}
/**
* Multishipping checkout login page
*/
public function loginAction()
{
if (Mage::getSingleton('customer/session')->isLoggedIn()) {
$this->_redirect('*/*/');
return;
}
$this->loadLayout();
$this->_initLayoutMessages('customer/session');
// set account create url
if ($loginForm = $this->getLayout()->getBlock('customer_form_login')) {
$loginForm->setCreateAccountUrl($this->_getHelper()->getMSRegisterUrl());
}
$this->renderLayout();
}
/**
* Multishipping checkout login page
*/
public function registerAction()
{
if (Mage::getSingleton('customer/session')->isLoggedIn()) {
$this->_redirectUrl($this->_getHelper()->getMSCheckoutUrl());
return;
}
$this->loadLayout();
$this->_initLayoutMessages('customer/session');
if ($registerForm = $this->getLayout()->getBlock('customer_form_register')) {
$registerForm->setShowAddressFields(true)
->setBackUrl($this->_getHelper()->getMSLoginUrl())
->setSuccessUrl($this->_getHelper()->getMSShippingAddressSavedUrl())
->setErrorUrl($this->_getHelper()->getCurrentUrl());
}
$this->renderLayout();
}
/**
* Multishipping checkout select address page
*/
public function addressesAction()
{
// If customer do not have addresses
if (!$this->_getCheckout()->getCustomerDefaultShippingAddress()) {
$this->_redirect('*/multishipping_address/newShipping');
return;
}
$this->_getState()->unsCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SELECT_ADDRESSES
);
if (!$this->_getCheckout()->validateMinimumAmount()) {
$message = $this->_getCheckout()->getMinimumAmountDescription();
$this->_getCheckout()->getCheckoutSession()->addNotice($message);
}
$this->loadLayout();
$this->_initLayoutMessages('customer/session');
$this->_initLayoutMessages('checkout/session');
$this->renderLayout();
}
/**
* Multishipping checkout process posted addresses
*/
public function addressesPostAction()
{
if (!$this->_getCheckout()->getCustomerDefaultShippingAddress()) {
$this->_redirect('*/multishipping_address/newShipping');
return;
}
try {
if ($this->getRequest()->getParam('continue', false)) {
$this->_getCheckout()->setCollectRatesFlag(true);
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->_getState()->setCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SELECT_ADDRESSES
);
$this->_redirect('*/*/shipping');
}
elseif ($this->getRequest()->getParam('new_address')) {
$this->_redirect('*/multishipping_address/newShipping');
}
else {
$this->_redirect('*/*/addresses');
}
if ($shipToInfo = $this->getRequest()->getPost('ship')) {
$this->_getCheckout()->setShippingItemsInformation($shipToInfo);
}
}
catch (Mage_Core_Exception $e) {
$this->_getCheckoutSession()->addError($e->getMessage());
$this->_redirect('*/*/addresses');
}
catch (Exception $e) {
$this->_getCheckoutSession()->addException(
$e,
Mage::helper('checkout')->__('Data saving problem')
);
$this->_redirect('*/*/addresses');
}
}
/**
* Multishipping checkout action to go back to addresses page
*/
public function backToAddressesAction()
{
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SELECT_ADDRESSES
);
$this->_getState()->unsCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->_redirect('*/*/addresses');
}
/**
* Multishipping checkout remove item action
*/
public function removeItemAction()
{
$itemId = $this->getRequest()->getParam('id');
$addressId = $this->getRequest()->getParam('address');
if ($addressId && $itemId) {
$this->_getCheckout()->setCollectRatesFlag(true);
$this->_getCheckout()->removeAddressItem($addressId, $itemId);
}
$this->_redirect('*/*/addresses');
}
/**
* Returns whether the minimum amount has been reached
*
* @return bool
*/
protected function _validateMinimumAmount()
{
if (!$this->_getCheckout()->validateMinimumAmount()) {
$error = $this->_getCheckout()->getMinimumAmountError();
$this->_getCheckout()->getCheckoutSession()->addError($error);
$this->_forward('backToAddresses');
return false;
}
return true;
}
/**
* Multishipping checkout shipping information page
*/
public function shippingAction()
{
if (!$this->_validateMinimumAmount()) {
return;
}
if (!$this->_getState()->getCompleteStep(Mage_Checkout_Model_Type_Multishipping_State::STEP_SELECT_ADDRESSES)) {
$this->_redirect('*/*/addresses');
return $this;
}
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->loadLayout();
$this->_initLayoutMessages('customer/session');
$this->_initLayoutMessages('checkout/session');
$this->renderLayout();
}
/**
* Multishipping checkout action to go back to shipping
*/
public function backToShippingAction()
{
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->_getState()->unsCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_BILLING
);
$this->_redirect('*/*/shipping');
}
/**
* Multishipping checkout after the shipping page
*/
public function shippingPostAction()
{
$shippingMethods = $this->getRequest()->getPost('shipping_method');
try {
Mage::dispatchEvent(
'checkout_controller_multishipping_shipping_post',
array('request'=>$this->getRequest(), 'quote'=>$this->_getCheckout()->getQuote())
);
$this->_getCheckout()->setShippingMethods($shippingMethods);
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_BILLING
);
$this->_getState()->setCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING
);
$this->_redirect('*/*/billing');
}
catch (Exception $e) {
$this->_getCheckoutSession()->addError($e->getMessage());
$this->_redirect('*/*/shipping');
}
}
/**
* Multishipping checkout billing information page
*/
public function billingAction()
{
$collectTotals = false;
$quote = $this->_getCheckoutSession()->getQuote();
/**
* Reset customer balance
*/
if ($quote->getUseCustomerBalance()) {
$quote->setUseCustomerBalance(false);
$collectTotals = true;
}
/**
* Reset reward points
*/
if ($quote->getUseRewardPoints()) {
$quote->setUseRewardPoints(false);
$collectTotals = true;
}
if ($collectTotals) {
$quote->collectTotals()->save();
}
if (!$this->_validateBilling()) {
return;
}
if (!$this->_validateMinimumAmount()) {
return;
}
if (!$this->_getState()->getCompleteStep(Mage_Checkout_Model_Type_Multishipping_State::STEP_SHIPPING)) {
$this->_redirect('*/*/shipping');
return $this;
}
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_BILLING
);
$this->loadLayout();
$this->_initLayoutMessages('customer/session');
$this->_initLayoutMessages('checkout/session');
$this->renderLayout();
}
/**
* Validation of selecting of billing address
*
* @return boolean
*/
protected function _validateBilling()
{
if(!$this->_getCheckout()->getQuote()->getBillingAddress()->getFirstname()) {
$this->_redirect('*/multishipping_address/selectBilling');
return false;
}
return true;
}
/**
* Multishipping checkout action to go back to billing
*/
public function backToBillingAction()
{
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_BILLING
);
$this->_getState()->unsCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_OVERVIEW
);
$this->_redirect('*/*/billing');
}
/**
* Multishipping checkout place order page
*/
public function overviewAction()
{
if (!$this->_validateMinimumAmount()) {
return $this;
}
$this->_getState()->setActiveStep(Mage_Checkout_Model_Type_Multishipping_State::STEP_OVERVIEW);
try {
$payment = $this->getRequest()->getPost('payment', array());
$payment['checks'] = Mage_Payment_Model_Method_Abstract::CHECK_USE_FOR_MULTISHIPPING
| Mage_Payment_Model_Method_Abstract::CHECK_USE_FOR_COUNTRY
| Mage_Payment_Model_Method_Abstract::CHECK_USE_FOR_CURRENCY
| Mage_Payment_Model_Method_Abstract::CHECK_ORDER_TOTAL_MIN_MAX
| Mage_Payment_Model_Method_Abstract::CHECK_ZERO_TOTAL;
$this->_getCheckout()->setPaymentMethod($payment);
$this->_getState()->setCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_BILLING
);
$this->loadLayout();
$this->_initLayoutMessages('checkout/session');
$this->_initLayoutMessages('customer/session');
$this->renderLayout();
}
catch (Mage_Core_Exception $e) {
$this->_getCheckoutSession()->addError($e->getMessage());
$this->_redirect('*/*/billing');
}
catch (Exception $e) {
Mage::logException($e);
$this->_getCheckoutSession()->addException($e, $this->__('Cannot open the overview page'));
$this->_redirect('*/*/billing');
}
}
/**
* Multishipping checkout after the overview page
*/
public function overviewPostAction()
{
if (!$this->_validateFormKey()) {
$this->_forward('backToAddresses');
return;
}
if (!$this->_validateMinimumAmount()) {
return;
}
try {
if ($requiredAgreements = Mage::helper('checkout')->getRequiredAgreementIds()) {
$postedAgreements = array_keys($this->getRequest()->getPost('agreement', array()));
if ($diff = array_diff($requiredAgreements, $postedAgreements)) {
$this->_getCheckoutSession()->addError($this->__('Please agree to all Terms and Conditions before placing the order.'));
$this->_redirect('*/*/billing');
return;
}
}
$payment = $this->getRequest()->getPost('payment');
$paymentInstance = $this->_getCheckout()->getQuote()->getPayment();
if (isset($payment['cc_number'])) {
$paymentInstance->setCcNumber($payment['cc_number']);
}
if (isset($payment['cc_cid'])) {
$paymentInstance->setCcCid($payment['cc_cid']);
}
$this->_getCheckout()->createOrders();
$this->_getState()->setActiveStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_SUCCESS
);
$this->_getState()->setCompleteStep(
Mage_Checkout_Model_Type_Multishipping_State::STEP_OVERVIEW
);
$this->_getCheckout()->getCheckoutSession()->clear();
$this->_getCheckout()->getCheckoutSession()->setDisplaySuccess(true);
$this->_redirect('*/*/success');
} catch (Mage_Payment_Model_Info_Exception $e) {
$message = $e->getMessage();
if ( !empty($message) ) {
$this->_getCheckoutSession()->addError($message);
}
$this->_redirect('*/*/billing');
} catch (Mage_Checkout_Exception $e) {
Mage::helper('checkout')
->sendPaymentFailedEmail($this->_getCheckout()->getQuote(), $e->getMessage(), 'multi-shipping');
$this->_getCheckout()->getCheckoutSession()->clear();
$this->_getCheckoutSession()->addError($e->getMessage());
$this->_redirect('*/cart');
}
catch (Mage_Core_Exception $e) {
Mage::helper('checkout')
->sendPaymentFailedEmail($this->_getCheckout()->getQuote(), $e->getMessage(), 'multi-shipping');
$this->_getCheckoutSession()->addError($e->getMessage());
$this->_redirect('*/*/billing');
} catch (Exception $e) {
Mage::logException($e);
Mage::helper('checkout')
->sendPaymentFailedEmail($this->_getCheckout()->getQuote(), $e->getMessage(), 'multi-shipping');
$this->_getCheckoutSession()->addError($this->__('Order place error.'));
$this->_redirect('*/*/billing');
}
}
/**
* Multishipping checkout success page
*/
public function successAction()
{
if (!$this->_getState()->getCompleteStep(Mage_Checkout_Model_Type_Multishipping_State::STEP_OVERVIEW)) {
$this->_redirect('*/*/addresses');
return $this;
}
$this->loadLayout();
$this->_initLayoutMessages('checkout/session');
$ids = $this->_getCheckout()->getOrderIds();
Mage::dispatchEvent('checkout_multishipping_controller_success_action', array('order_ids' => $ids));
$this->renderLayout();
}
/**
* Redirect to login page
*/
public function redirectLogin()
{
$this->setFlag('', 'no-dispatch', true);
Mage::getSingleton('customer/session')->setBeforeAuthUrl(Mage::getUrl('*/*', array('_secure'=>true)));
$this->getResponse()->setRedirect(
Mage::helper('core/url')->addRequestParam(
$this->_getHelper()->getMSLoginUrl(),
array('context' => 'checkout')
)
);
$this->setFlag('', 'redirectLogin', true);
}
}
| dvh11er/mage-cheatcode | magento/app/code/core/Mage/Checkout/controllers/MultishippingController.php | PHP | gpl-2.0 | 20,321 |
//===================================================================================
//
// (C) COPYRIGHT International Business Machines Corp., 2002 All Rights Reserved
// Licensed Materials - Property of IBM
// US Government Users Restricted Rights - Use, duplication or
// disclosure restricted by GSA ADP Schedule Contract with IBM Corp.
//
// IBM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
// ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE. IN NO EVENT SHALL IBM BE LIABLE FOR ANY SPECIAL, INDIRECT OR
// CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
// USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
// OR PERFORMANCE OF THIS SOFTWARE.
//
// The program may be used, executed, copied, modified, and distributed
// without royalty for the purpose of developing, using, marketing, or distributing.
//
//=======================================================================================
// gSOAP v2 Interop test round 2 base
//#include "interoptA.h"
#include "soapH.h"
extern "C" void displayText(char *text);
extern "C" int interopA(const char *url);
struct Namespace namespacesA[] =
{ {"SOAP-ENV", "http://schemas.xmlsoap.org/soap/envelope/"},
{"SOAP-ENC", "http://schemas.xmlsoap.org/soap/encoding/"},
{"xsi", "http://www.w3.org/2001/XMLSchema-instance", "http://www.w3.org/*/XMLSchema-instance"},
{"xsd", "http://www.w3.org/2001/XMLSchema", "http://www.w3.org/*/XMLSchema"},
{"ns", "http://soapinterop.org/"},
{"s", "http://soapinterop.org/xsd"},
{"a", "http://xml.apache.org/xml-soap"},
{"h", "http://soapinterop.org/echoheader/"},
{NULL, NULL}
};
int interopA(const char *url)
{
struct soap *soap;
int i, g;
xsd__string so, si = "Hello World! <>&";
struct ArrayOfstring Asi, Aso;
xsd__int no, ni = 1234567890;
xsd__int n = 2147483647;
struct ArrayOfint Ani, Ano;
// xsd__float f1 = 3.40282e+38;
xsd__float f1 = 123.5678;
xsd__float f2 = 3.14;
xsd__float fo, fi = 123.456;
// xsd__float fo, fi = 3e2;
//#ifdef SYMBIAN
// const struct soap_double_nan { unsigned int n1, n2; } soap_double_nan;
//#endif
xsd__float nan = FLT_NAN, inf = FLT_PINFTY, ninf = FLT_NINFTY;
struct ArrayOffloat Afi, Afo;
struct s__SOAPStruct sti, *p;
struct ns__echoStructResponse sto;
struct ArrayOfSOAPStruct Asti, Asto;
struct ns__echoVoidResponse Rv;
struct xsd__base64Binary b64i, b64o;
xsd__dateTime dto, dti = "1967-12-29T01:02:03";
struct xsd__hexBinary hbi, hbo;
xsd__decimal Do, Di = "1234567890.123456789";
xsd__boolean bo, bi = true;
//struct a__Map mi, mo;
//struct ArrayOfMap Ami, Amo;
// char buff[100];
displayText("running test A on");
displayText((char*)url);
soap = soap_new();
soap->namespaces = (struct Namespace *)namespacesA;
// soap.send_timeout = 30;
// soap.recv_timeout = 30;
Asi.__size = 8;
Asi.__offset = 0;
Asi.__ptr = (xsd__string*)malloc(Asi.__size*sizeof(xsd__string));
Asi.__ptr[0] = NULL;
Asi.__ptr[1] = " Hello\tWorld";
Asi.__ptr[2] = NULL;
Asi.__ptr[3] = "! ";
Asi.__ptr[4] = NULL;
Asi.__ptr[5] = si;
Asi.__ptr[6] = NULL;
Asi.__ptr[7] = si;
Ani.__size = 0;
Ani.__offset = 0;
Ani.__ptr = NULL; // (xsd__int*)malloc(Ani.__size*sizeof(xsd__int));
Afi.__size = 5;
Afi.__offset = 0;
Afi.__ptr = (xsd__float**)malloc(Afi.__size*sizeof(xsd__float*));
Afi.__ptr[0] = &f1;
Afi.__ptr[1] = &nan; // FLT_NAN;
Afi.__ptr[2] = &inf; // FLT_PINFTY;
Afi.__ptr[3] = &ninf; // FLT_NINFTY;
Afi.__ptr[4] = &f2;
sti.varString = "Hello";
sti.varInt = &n;
sti.varFloat = &f1;
Asti.__size = 3;
Asti.__offset = 2;
Asti.__ptr = (struct s__SOAPStruct**)malloc((Asti.__size+1)*sizeof(struct s__SOAPStruct*));
p = (struct s__SOAPStruct*)malloc(Asti.__size*sizeof(struct s__SOAPStruct));
Asti.__ptr[0] = p;
Asti.__ptr[1] = p+1;
Asti.__ptr[2] = p+2;
Asti.__ptr[3] = p;
Asti.__ptr[0]->varString = "Hello";
Asti.__ptr[0]->varInt = &n;
Asti.__ptr[0]->varFloat = &f1;
Asti.__ptr[1]->varString = "World";
Asti.__ptr[1]->varInt = &n;
Asti.__ptr[1]->varFloat = &f2;
Asti.__ptr[2]->varString = "!";
Asti.__ptr[2]->varInt = &n;
Asti.__ptr[2]->varFloat = &f2;
// b64i.__ptr = (unsigned char*)"This is an example Base64 encoded string";
// b64i.__size = strlen((char*)b64i.__ptr)+1;
unsigned char b64data[4]={0x80, 0x81, 0x82, 0x83};
b64i.__ptr = b64data;
b64i.__size = 4;
hbi.__ptr = (unsigned char*)"This is an example HexBinary encoded string";
hbi.__size = strlen((char*)hbi.__ptr)+1;
/*
mi.__size = 2;
mi.__ptr = (struct _item*)malloc(mi.__size*sizeof(struct _item));
mi.__ptr[0].key = new xsd__string_("hello");
mi.__ptr[0].value = new xsd__string_("world");
mi.__ptr[1].key = new xsd__int_(2);
mi.__ptr[1].value = new xsd__boolean_(true);
Ami.__size = 2;
Ami.__ptr = (struct a__Map**)malloc(Ami.__size*sizeof(struct a__Map*));
Ami.__ptr[0] = &mi;
Ami.__ptr[1] = &mi; */
char *site=(char*)url;
// char* site ="http://websrv.cs.fsu.edu/~engelen/interop2.cgi";
// char* site = "http://nagoya.apache.org:5049/axis/services/echo ";
char* action = "http://soapinterop.org/";
bool ok=true;
if (soap_call_ns__echoString(soap, site, action, si, so))
{
displayText("echoString fail");
ok=false;
}
else if (!so || strcmp(si, so))
{
ok=false;
displayText("echoString mismatched");
}
else
displayText("echoString pass");
if (soap_call_ns__echoInteger(soap, site, "http://soapinterop.org/", ni, no))
{
ok=false;
displayText("echoInteger fail");
}
else if (ni != no)
{
ok=false;
displayText("echoInteger mismatched");
}
else
displayText("echoInteger pass");
if (soap_call_ns__echoFloat(soap, site, "http://soapinterop.org/", fi, fo))
{
ok=false;
displayText("echoFloat fail");
}
else if (fi != fo)
{
ok=false;
displayText("echoFloat mismatched");
}
else
displayText("echoFloat pass");
if (soap_call_ns__echoStruct(soap, site, "http://soapinterop.org/", sti, sto))
{
ok=false;
displayText("echoStruct fail");
}
else if (!sto._return.varString || strcmp(sti.varString, sto._return.varString)
|| !sto._return.varInt || *sti.varInt != *sto._return.varInt
|| !sto._return.varFloat || *sti.varFloat != *sto._return.varFloat)
{
ok=false;
displayText("echoStruct mismatch");
}
else
displayText("echoStruct pass");
if (soap_call_ns__echoStringArray(soap, site, "http://soapinterop.org/", Asi, Aso))
{
soap_set_fault(soap);
soap_faultdetail(soap);
ok=false;
displayText("echoStringArray fail");
}
else
{ g = 0;
if (Asi.__size != Aso.__size)
g = 1;
else
for (i = 0; i < Asi.__size; i++)
if (Asi.__ptr[i] && Aso.__ptr[i] && strcmp(Asi.__ptr[i], Aso.__ptr[i]))
g = 1;
else if (!Asi.__ptr[i])
;
else if (Asi.__ptr[i] && !Aso.__ptr[i])
g = 1;
if (g)
{
ok=false;
displayText("echoStringArray mismatch");
}
else
displayText("echoStringArray pass");
}
if (soap_call_ns__echoIntegerArray(soap, site, "http://soapinterop.org/", Ani, Ano))
{ displayText("echoIntegerArray fail");
ok=false;
}
else
{ g = 0;
if (Ani.__size != Ano.__size)
g = 1;
else
for (i = 0; i < Ani.__size; i++)
if (Ani.__ptr[i] && (!Ano.__ptr[i] || *Ani.__ptr[i] != *Ano.__ptr[i]))
g = 1;
if (g)
{ displayText("echoIntegerArray mismatch");
ok=false;
}
else
displayText("echoIntegerArray pass");
}
if (soap_call_ns__echoFloatArray(soap, site, "http://soapinterop.org/", Afi, Afo))
{ displayText("echoFloatArray fail");
ok=false;
}
else
{ g = 0;
if (Afi.__size != Afo.__size)
g = 1;
else
for (i = 0; i < Afi.__size; i++)
if (Afi.__ptr[i] && Afo.__ptr[i] && soap_isnan(*Afi.__ptr[i]) && soap_isnan(*Afo.__ptr[i]))
;
else if (Afi.__ptr[i] && (!Afo.__ptr[i] || *Afi.__ptr[i] != *Afo.__ptr[i]))
g = 1;
if (g)
{ displayText("echoFloatArray mismatch");
ok=false;
}
else
displayText("echoFloatArray pass");
}
if (soap_call_ns__echoStructArray(soap, site, "http://soapinterop.org/", Asti, Asto))
{ displayText("echoStructArray fail");
ok=false;
}
else
{ g = 0;
if (Asti.__size+Asti.__offset != Asto.__size+Asto.__offset)
g = 1;
else
for (i = Asti.__offset; i < Asti.__size+Asti.__offset; i++)
if (!Asto.__ptr[i-Asto.__offset] ||
!Asto.__ptr[i-Asto.__offset]->varString ||
strcmp(Asti.__ptr[i-Asti.__offset]->varString, Asto.__ptr[i-Asto.__offset]->varString) ||
!Asto.__ptr[i-Asto.__offset]->varInt ||
*Asti.__ptr[i-Asti.__offset]->varInt != *Asto.__ptr[i-Asto.__offset]->varInt ||
!Asto.__ptr[i-Asto.__offset]->varFloat ||
*Asti.__ptr[i-Asti.__offset]->varFloat != *Asto.__ptr[i-Asto.__offset]->varFloat)
g = 1;
if (g)
{ displayText("echoStructArray mismatch");
ok=false;
}
else
displayText("echoStructArray pass");
}
if (soap_call_ns__echoVoid(soap, site, "http://soapinterop.org/", Rv))
{ displayText("echoVoid fail");
ok=false;
}
else
displayText("echoVoid pass");
if (soap_call_ns__echoBase64(soap, site, "http://soapinterop.org/", b64i, b64o))
{ displayText("echoBase64 fail");
ok=false;
}
else if ((b64i.__size+2)/3 != (b64o.__size+2)/3 || strncmp((char*)b64i.__ptr, (char*)b64o.__ptr,b64i.__size))
{
displayText("echoBase64 mismatch");
ok=false;
}
else
displayText("echoBase64 pass");
if (soap_call_ns__echoDate(soap, site, "http://soapinterop.org/", dti, dto))
{
displayText("echoDate fail");
ok=false;
}
else if (!dto || strncmp(dti, dto, 19))
{
displayText("echoDate mismatch");
ok=false;
}
else
displayText("echoDate pass");
if (soap_call_ns__echoHexBinary(soap, site, "http://soapinterop.org/", hbi, hbo))
{
ok=false;
displayText("echoHexBinary fail");
}
else if (hbi.__size != hbo.__size || strcmp((char*)hbi.__ptr, (char*)hbo.__ptr))
{
ok=false;
displayText("echoHexBinary mismatch");
}
else
displayText("echoHexBinary pass");
if (soap_call_ns__echoDecimal(soap, site, "http://soapinterop.org/", Di, Do))
{
ok=false;
displayText("echoDecimal pass");
}
else if (strcmp(Di, Do))
{
ok=false;
displayText("echoDecimal mismatch");
}
else
displayText("echoDecimal pass");
if (soap_call_ns__echoBoolean(soap, site, "http://soapinterop.org/", bi, bo))
{
ok=false;
displayText("echoBoolean fail");
}
else if (bi != bo)
{
ok=false;
displayText("echoBoolean mismatch");
}
else
displayText("echoBoolean pass");
if (ok)
displayText("ALL PASS");
else
displayText("FAILURES");
return 0;
end:
return 1;
}
| cory-ko/KBWS | gsoap/Symbian/interop2test.cpp | C++ | gpl-2.0 | 11,395 |
/*
*
* Copyright 2003, 2004 Blur Studio Inc.
*
* This file is part of the Resin software package.
*
* Resin is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Resin; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#ifndef COMMIT_CODE
#include <qdir.h>
#include <stdlib.h>
#include "shotgroup.h"
int ShotGroup::frameStart() const
{
return shots()[shots().count()-1].frameStart();
}
int ShotGroup::frameEnd() const
{
return shots()[0].frameEnd();
}
int ShotGroup::frameStartEDL() const
{
return shots()[shots().count()-1].frameStartEDL();
}
int ShotGroup::frameEndEDL() const
{
return shots()[0].frameEndEDL();
}
ShotList ShotGroup::shots() const
{
ShotList shots = children( Shot::type(), true );
return shots;
}
#endif
| perryjrandall/arsenalsuite | cpp/lib/classes/base/shotgroupbase.cpp | C++ | gpl-2.0 | 1,325 |
/*
* Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jdi.VirtualMachine.redefineClasses;
import nsk.share.*;
import nsk.share.jpda.*;
import nsk.share.jdi.*;
/**
* <code>redefineclasses012b</code> is deugee's part of the redefineclasses012.
* adding new field
*/
public interface redefineclasses012b {
//new fields
static public final Object newField001 = null;
//preexisting fields
static public final Object field001 = null;
public final Object field002 = null;
final Object field003 = null;
Object field004 = null;
}
| md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jdi/VirtualMachine/redefineClasses/redefineclasses012/newclass01/redefineclasses012b.java | Java | gpl-2.0 | 1,616 |
<?php
/**
* @file
* Contains \Drupal\Console\Command\Generate\PluginFieldWidgetCommand.
*/
namespace Drupal\Console\Command\Generate;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Drupal\Console\Generator\PluginFieldWidgetGenerator;
use Drupal\Console\Command\ModuleTrait;
use Drupal\Console\Command\ConfirmationTrait;
use Drupal\Console\Command\GeneratorCommand;
use Drupal\Console\Style\DrupalStyle;
class PluginFieldWidgetCommand extends GeneratorCommand
{
use ModuleTrait;
use ConfirmationTrait;
protected function configure()
{
$this
->setName('generate:plugin:fieldwidget')
->setDescription($this->trans('commands.generate.plugin.fieldwidget.description'))
->setHelp($this->trans('commands.generate.plugin.fieldwidget.help'))
->addOption('module', '', InputOption::VALUE_REQUIRED, $this->trans('commands.common.options.module'))
->addOption(
'class',
'',
InputOption::VALUE_REQUIRED,
$this->trans('commands.generate.plugin.fieldwidget.options.class')
)
->addOption(
'label',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.fieldwidget.options.label')
)
->addOption(
'plugin-id',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.fieldwidget.options.plugin-id')
)
->addOption(
'field-type',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.fieldwidget.options.field-type')
);
}
/**
* {@inheritdoc}
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$io = new DrupalStyle($input, $output);
// @see use Drupal\Console\Command\ConfirmationTrait::confirmGeneration
if (!$this->confirmGeneration($io)) {
return;
}
$module = $input->getOption('module');
$class_name = $input->getOption('class');
$label = $input->getOption('label');
$plugin_id = $input->getOption('plugin-id');
$field_type = $input->getOption('field-type');
$this
->getGenerator()
->generate($module, $class_name, $label, $plugin_id, $field_type);
$this->getChain()->addCommand('cache:rebuild', ['cache' => 'discovery']);
}
protected function interact(InputInterface $input, OutputInterface $output)
{
$io = new DrupalStyle($input, $output);
$fieldTypePluginManager = $this->getService('plugin.manager.field.field_type');
// --module option
$module = $input->getOption('module');
if (!$module) {
// @see Drupal\Console\Command\ModuleTrait::moduleQuestion
$module = $this->moduleQuestion($output);
$input->setOption('module', $module);
}
// --class option
$class_name = $input->getOption('class');
if (!$class_name) {
$class_name = $io->ask(
$this->trans('commands.generate.plugin.fieldwidget.questions.class'),
'ExampleFieldWidget'
);
$input->setOption('class', $class_name);
}
// --plugin label option
$label = $input->getOption('label');
if (!$label) {
$label = $io->ask(
$this->trans('commands.generate.plugin.fieldwidget.questions.label'),
$this->getStringHelper()->camelCaseToHuman($class_name)
);
$input->setOption('label', $label);
}
// --plugin-id option
$plugin_id = $input->getOption('plugin-id');
if (!$plugin_id) {
$plugin_id = $io->ask(
$this->trans('commands.generate.plugin.fieldwidget.questions.plugin-id'),
$this->getStringHelper()->camelCaseToUnderscore($class_name)
);
$input->setOption('plugin-id', $plugin_id);
}
// --field-type option
$field_type = $input->getOption('field-type');
if (!$field_type) {
// Gather valid field types.
$field_type_options = array();
foreach ($fieldTypePluginManager->getGroupedDefinitions($fieldTypePluginManager->getUiDefinitions()) as $category => $field_types) {
foreach ($field_types as $name => $field_type) {
$field_type_options[] = $name;
}
}
$field_type = $io->choice(
$this->trans('commands.generate.plugin.fieldwidget.questions.field-type'),
$field_type_options
);
$input->setOption('field-type', $field_type);
}
}
protected function createGenerator()
{
return new PluginFieldWidgetGenerator();
}
}
| sgrichards/BrightonDrupal | vendor/drupal/console/src/Command/Generate/PluginFieldWidgetCommand.php | PHP | gpl-2.0 | 5,154 |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.net;
/**
* Choose a network inteface to be the default for
* outgoing IPv6 traffic that does not specify a scope_id (and which needs one).
* We choose the first interface that is up and is (in order of preference):
* 1. neither loopback nor point to point
* 2. point to point
* 3. loopback
* 4. none.
* Platforms that do not require a default interface implement a dummy
* that returns null.
*/
import java.util.Enumeration;
import java.io.IOException;
class DefaultInterface {
private final static NetworkInterface defaultInterface =
chooseDefaultInterface();
static NetworkInterface getDefault() {
return defaultInterface;
}
/**
* Choose a default interface. This method returns an interface that is
* both "up" and supports multicast. This method choses an interface in
* order of preference:
* 1. neither loopback nor point to point
* 2. point to point
* 3. loopback
*
* @return the chosen interface or {@code null} if there isn't a suitable
* default
*/
private static NetworkInterface chooseDefaultInterface() {
Enumeration<NetworkInterface> nifs;
try {
nifs = NetworkInterface.getNetworkInterfaces();
} catch (IOException ignore) {
// unable to enumate network interfaces
return null;
}
NetworkInterface ppp = null;
NetworkInterface loopback = null;
while (nifs.hasMoreElements()) {
NetworkInterface ni = nifs.nextElement();
try {
if (ni.isUp() && ni.supportsMulticast()) {
boolean isLoopback = ni.isLoopback();
boolean isPPP = ni.isPointToPoint();
if (!isLoopback && !isPPP) {
// found an interface that is not the loopback or a
// point-to-point interface
return ni;
}
if (ppp == null && isPPP)
ppp = ni;
if (loopback == null && isLoopback)
loopback = ni;
}
} catch (IOException skip) { }
}
return (ppp != null) ? ppp : loopback;
}
}
| greghaskins/openjdk-jdk7u-jdk | src/macosx/classes/java/net/DefaultInterface.java | Java | gpl-2.0 | 3,499 |
/*
Copyright (C) 2011-2012 de4dot@gmail.com
This file is part of de4dot.
de4dot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
de4dot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with de4dot. If not, see <http://www.gnu.org/licenses/>.
*/
using de4dot.PE;
namespace de4dot.code.deobfuscators.MaxtoCode {
class DecrypterInfo {
public readonly MainType mainType;
public readonly PeImage peImage;
public readonly PeHeader peHeader;
public readonly McKey mcKey;
public readonly byte[] fileData;
public DecrypterInfo(MainType mainType, byte[] fileData) {
this.mainType = mainType;
this.peImage = new PeImage(fileData);
this.peHeader = new PeHeader(mainType, peImage);
this.mcKey = new McKey(peImage, peHeader);
this.fileData = fileData;
}
}
}
| hjlfmy/de4dot | de4dot.code/deobfuscators/MaxtoCode/DecrypterInfo.cs | C# | gpl-3.0 | 1,256 |
from __future__ import absolute_import
import time
class KafkaMetric(object):
# NOTE java constructor takes a lock instance
def __init__(self, metric_name, measurable, config):
if not metric_name:
raise ValueError('metric_name must be non-empty')
if not measurable:
raise ValueError('measurable must be non-empty')
self._metric_name = metric_name
self._measurable = measurable
self._config = config
@property
def metric_name(self):
return self._metric_name
@property
def measurable(self):
return self._measurable
@property
def config(self):
return self._config
@config.setter
def config(self, config):
self._config = config
def value(self, time_ms=None):
if time_ms is None:
time_ms = time.time() * 1000
return self.measurable.measure(self.config, time_ms)
| OpenBankProject/OBP-Kafka-Python | lib/kafka/metrics/kafka_metric.py | Python | agpl-3.0 | 933 |
"""
Help has buttons and menu items to open help, blog and forum pages in your primary browser.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities import archive
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_profile
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getNewRepository():
'Get new repository.'
return HelpRepository()
class HelpRepository:
"A class to handle the help settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_utilities.skeinforge_help.html', self)
announcementsText = '- Announcements - '
announcementsLabel = settings.LabelDisplay().getFromName(announcementsText, self )
announcementsLabel.columnspan = 6
settings.LabelDisplay().getFromName('Fabmetheus Blog, Announcements & Questions:', self )
settings.HelpPage().getFromNameAfterHTTP('fabmetheus.blogspot.com/', 'Fabmetheus Blog', self )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Documentation -', self )
settings.LabelDisplay().getFromName('Local Documentation Table of Contents: ', self )
settings.HelpPage().getFromNameSubName('Contents', self, 'contents.html')
settings.LabelDisplay().getFromName('Wiki Manual with Pictures & Charts: ', self )
settings.HelpPage().getFromNameAfterHTTP('fabmetheus.crsndoo.com/wiki/index.php/Skeinforge', 'Wiki Manual', self )
settings.LabelDisplay().getFromName('Skeinforge Overview: ', self )
settings.HelpPage().getFromNameSubName('Skeinforge Overview', self, 'skeinforge_application.skeinforge.html')
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Search -', self )
settings.LabelDisplay().getFromName('Reprap Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_reprap.html', 'Reprap Search', self )
settings.LabelDisplay().getFromName('Skeinforge Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_skeinforge.html', 'Skeinforge Search', self )
settings.LabelDisplay().getFromName('Web Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_web.html', 'Web Search', self )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Troubleshooting -', self )
settings.LabelDisplay().getFromName('Skeinforge Forum:', self)
settings.HelpPage().getFromNameAfterHTTP('forums.reprap.org/list.php?154', ' Skeinforge Forum ', self )
settings.LabelSeparator().getFromRepository(self)
self.version = settings.LabelDisplay().getFromName('Version: ' + archive.getFileText(archive.getVersionFileName()), self)
self.wikiManualPrimary = settings.BooleanSetting().getFromValue('Wiki Manual Primary', self, True )
self.wikiManualPrimary.setUpdateFunction( self.save )
def save(self):
"Write the entities."
settings.writeSettingsPrintMessage(self)
| nophead/Skeinforge50plus | skeinforge_application/skeinforge_utilities/skeinforge_help.py | Python | agpl-3.0 | 3,508 |
#!/usr/bin/env python
# Copyright (c) 2006-2007 XenSource, Inc.
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Simple example using the asynchronous version of the VM start method
# Assumes the presence of a VM called 'new'
import pprint, time, sys
import XenAPI
def main(session):
print "Listing all VM references:"
vms = session.xenapi.VM.get_all()
pprint.pprint(vms)
print "Dumping all VM records:"
for vm in vms:
pprint.pprint(session.xenapi.VM.get_record(vm))
print "Attempting to start a VM called 'new' (if it doesn't exist this will throw an exception)"
vm = session.xenapi.VM.get_by_name_label('new')[0]
session.xenapi.VM.start(vm, False, True)
print "Attempting to start the VM asynchronously"
task = session.xenapi.Async.VM.start(vm, False, True)
task_record = session.xenapi.task.get_record(task)
print "The initial contents of the task record:"
pprint.pprint(task_record)
print "Waiting for the task to complete"
while session.xenapi.task.get_status(task) == "pending": time.sleep(1)
task_record = session.xenapi.task.get_record(task)
print "The final contents of the task record:"
pprint.pprint(task_record)
if __name__ == "__main__":
if len(sys.argv) <> 4:
print "Usage:"
print sys.argv[0], " <url> <username> <password>"
sys.exit(1)
url = sys.argv[1]
username = sys.argv[2]
password = sys.argv[3]
# First acquire a valid session by logging in:
session = XenAPI.Session(url)
session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-vm-start-async.py")
main(session)
| anoobs/xen-api | scripts/examples/python/vm_start_async.py | Python | lgpl-2.1 | 2,335 |
//Draw ellipses
//Author: Rene Brun
TCanvas *ellipse(){
TCanvas *c1 = new TCanvas("c1");
c1->Range(0,0,1,1);
TPaveLabel *pel = new TPaveLabel(0.1,0.8,0.9,0.95,"Examples of Ellipses");
pel->SetFillColor(42);
pel->Draw();
TEllipse *el1 = new TEllipse(0.25,0.25,.1,.2);
el1->Draw();
TEllipse *el2 = new TEllipse(0.25,0.6,.2,.1);
el2->SetFillColor(6);
el2->SetFillStyle(3008);
el2->Draw();
TEllipse *el3 = new TEllipse(0.75,0.6,.2,.1,45,315);
el3->SetFillColor(2);
el3->SetFillStyle(1001);
el3->SetLineColor(4);
el3->Draw();
TEllipse *el4 = new TEllipse(0.75,0.25,.2,.15,45,315,62);
el4->SetFillColor(5);
el4->SetFillStyle(1001);
el4->SetLineColor(4);
el4->SetLineWidth(6);
el4->Draw();
return c1;
}
| perovic/root | tutorials/graphics/ellipse.C | C++ | lgpl-2.1 | 765 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="es_VE">
<context>
<name>DesktopSwitch</name>
<message>
<location filename="../desktopswitch.cpp" line="83"/>
<source>Switch to desktop %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../desktopswitch.cpp" line="123"/>
<location filename="../desktopswitch.cpp" line="133"/>
<source>Desktop %1</source>
<translation>Escritorio %1</translation>
</message>
</context>
<context>
<name>DesktopSwitchConfiguration</name>
<message>
<location filename="../desktopswitchconfiguration.ui" line="14"/>
<source>DesktopSwitch settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../desktopswitchconfiguration.ui" line="20"/>
<source>Number of rows:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../desktopswitchconfiguration.ui" line="40"/>
<source>Desktop labels:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../desktopswitchconfiguration.ui" line="58"/>
<source>Numbers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../desktopswitchconfiguration.ui" line="63"/>
<source>Names</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>
| rbazaud/lxqt-panel | plugin-desktopswitch/translations/desktopswitch_es_VE.ts | TypeScript | lgpl-2.1 | 1,622 |
/*
Copyright 2012 - 2015 pac4j organization
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.oauth.profile.facebook;
import junit.framework.TestCase;
import org.pac4j.oauth.profile.facebook.FacebookRelationshipStatus;
import org.pac4j.oauth.profile.facebook.converter.FacebookRelationshipStatusConverter;
/**
* This class test the {@link org.pac4j.oauth.profile.facebook.converter.FacebookRelationshipStatusConverter} class.
*
* @author Jerome Leleu
* @since 1.0.0
*/
public final class TestFacebookRelationshipStatusConverter extends TestCase {
private final FacebookRelationshipStatusConverter converter = new FacebookRelationshipStatusConverter();
public void testNull() {
assertNull(this.converter.convert(null));
}
public void testNotAString() {
assertNull(this.converter.convert(Boolean.TRUE));
}
public void testSingle() {
assertEquals(FacebookRelationshipStatus.SINGLE, this.converter.convert("Single"));
}
public void testInARelationship() {
assertEquals(FacebookRelationshipStatus.IN_A_RELATIONSHIP, this.converter.convert("In a relationship"));
}
public void testEngaged() {
assertEquals(FacebookRelationshipStatus.ENGAGED, this.converter.convert("Engaged"));
}
public void testMarried() {
assertEquals(FacebookRelationshipStatus.MARRIED, this.converter.convert("Married"));
}
public void testItsComplicated() {
assertEquals(FacebookRelationshipStatus.ITS_COMPLICATED, this.converter.convert("It's complicated"));
}
public void testInAnOpenRelationship() {
assertEquals(FacebookRelationshipStatus.IN_AN_OPEN_RELATIONSHIP,
this.converter.convert("In an open relationship"));
}
public void testWidowed() {
assertEquals(FacebookRelationshipStatus.WIDOWED, this.converter.convert("Widowed"));
}
public void testSeparated() {
assertEquals(FacebookRelationshipStatus.SEPARATED, this.converter.convert("Separated"));
}
public void testDivorced() {
assertEquals(FacebookRelationshipStatus.DIVORCED, this.converter.convert("Divorced"));
}
public void testInACivilUnion() {
assertEquals(FacebookRelationshipStatus.IN_A_CIVIL_UNION, this.converter.convert("In a civil union"));
}
public void testInADomesticPartnership() {
assertEquals(FacebookRelationshipStatus.IN_A_DOMESTIC_PARTNERSHIP,
this.converter.convert("In a domestic partnership"));
}
public void testSingleEnum() {
assertEquals(FacebookRelationshipStatus.SINGLE,
this.converter.convert(FacebookRelationshipStatus.SINGLE.toString()));
}
public void testInARelationshipEnum() {
assertEquals(FacebookRelationshipStatus.IN_A_RELATIONSHIP,
this.converter.convert(FacebookRelationshipStatus.IN_A_RELATIONSHIP.toString()));
}
public void testEngagedEnum() {
assertEquals(FacebookRelationshipStatus.ENGAGED,
this.converter.convert(FacebookRelationshipStatus.ENGAGED.toString()));
}
public void testMarriedEnum() {
assertEquals(FacebookRelationshipStatus.MARRIED,
this.converter.convert(FacebookRelationshipStatus.MARRIED.toString()));
}
public void testItsComplicatedEnum() {
assertEquals(FacebookRelationshipStatus.ITS_COMPLICATED,
this.converter.convert(FacebookRelationshipStatus.ITS_COMPLICATED.toString()));
}
public void testInAnOpenRelationshipEnum() {
assertEquals(FacebookRelationshipStatus.IN_AN_OPEN_RELATIONSHIP,
this.converter.convert(FacebookRelationshipStatus.IN_AN_OPEN_RELATIONSHIP.toString()));
}
public void testWidowedEnum() {
assertEquals(FacebookRelationshipStatus.WIDOWED,
this.converter.convert(FacebookRelationshipStatus.WIDOWED.toString()));
}
public void testSeparatedEnum() {
assertEquals(FacebookRelationshipStatus.SEPARATED,
this.converter.convert(FacebookRelationshipStatus.SEPARATED.toString()));
}
public void testDivorcedEnum() {
assertEquals(FacebookRelationshipStatus.DIVORCED,
this.converter.convert(FacebookRelationshipStatus.DIVORCED.toString()));
}
public void testInACivilUnionEnum() {
assertEquals(FacebookRelationshipStatus.IN_A_CIVIL_UNION,
this.converter.convert(FacebookRelationshipStatus.IN_A_CIVIL_UNION.toString()));
}
public void testInADomesticPartnershipEnum() {
assertEquals(FacebookRelationshipStatus.IN_A_DOMESTIC_PARTNERSHIP,
this.converter.convert(FacebookRelationshipStatus.IN_A_DOMESTIC_PARTNERSHIP.toString()));
}
}
| ganquan0910/pac4j | pac4j-oauth/src/test/java/org/pac4j/oauth/profile/facebook/TestFacebookRelationshipStatusConverter.java | Java | apache-2.0 | 5,452 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.crypto.key.kms;
import java.io.IOException;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A Utility class that maintains a Queue of entries for a given key. It tries
* to ensure that there is are always at-least <code>numValues</code> entries
* available for the client to consume for a particular key.
* It also uses an underlying Cache to evict queues for keys that have not been
* accessed for a configurable period of time.
* Implementing classes are required to implement the
* <code>QueueRefiller</code> interface that exposes a method to refill the
* queue, when empty
*/
@InterfaceAudience.Private
public class ValueQueue <E> {
/**
* QueueRefiller interface a client must implement to use this class
*/
public interface QueueRefiller <E> {
/**
* Method that has to be implemented by implementing classes to fill the
* Queue.
* @param keyName Key name
* @param keyQueue Queue that needs to be filled
* @param numValues number of Values to be added to the queue.
* @throws IOException
*/
public void fillQueueForKey(String keyName,
Queue<E> keyQueue, int numValues) throws IOException;
}
private static final String REFILL_THREAD =
ValueQueue.class.getName() + "_thread";
private final LoadingCache<String, LinkedBlockingQueue<E>> keyQueues;
private final ThreadPoolExecutor executor;
private final UniqueKeyBlockingQueue queue = new UniqueKeyBlockingQueue();
private final QueueRefiller<E> refiller;
private final SyncGenerationPolicy policy;
private final int numValues;
private final float lowWatermark;
private volatile boolean executorThreadsStarted = false;
/**
* A <code>Runnable</code> which takes a string name.
*/
private abstract static class NamedRunnable implements Runnable {
final String name;
private NamedRunnable(String keyName) {
this.name = keyName;
}
}
/**
* This backing blocking queue used in conjunction with the
* <code>ThreadPoolExecutor</code> used by the <code>ValueQueue</code>. This
* Queue accepts a task only if the task is not currently in the process
* of being run by a thread which is implied by the presence of the key
* in the <code>keysInProgress</code> set.
*
* NOTE: Only methods that ware explicitly called by the
* <code>ThreadPoolExecutor</code> need to be over-ridden.
*/
private static class UniqueKeyBlockingQueue extends
LinkedBlockingQueue<Runnable> {
private static final long serialVersionUID = -2152747693695890371L;
private HashSet<String> keysInProgress = new HashSet<String>();
@Override
public synchronized void put(Runnable e) throws InterruptedException {
if (keysInProgress.add(((NamedRunnable)e).name)) {
super.put(e);
}
}
@Override
public Runnable take() throws InterruptedException {
Runnable k = super.take();
if (k != null) {
keysInProgress.remove(((NamedRunnable)k).name);
}
return k;
}
@Override
public Runnable poll(long timeout, TimeUnit unit)
throws InterruptedException {
Runnable k = super.poll(timeout, unit);
if (k != null) {
keysInProgress.remove(((NamedRunnable)k).name);
}
return k;
}
}
/**
* Policy to decide how many values to return to client when client asks for
* "n" values and Queue is empty.
* This decides how many values to return when client calls "getAtMost"
*/
public static enum SyncGenerationPolicy {
ATLEAST_ONE, // Return atleast 1 value
LOW_WATERMARK, // Return min(n, lowWatermark * numValues) values
ALL // Return n values
}
/**
* Constructor takes the following tunable configuration parameters
* @param numValues The number of values cached in the Queue for a
* particular key.
* @param lowWatermark The ratio of (number of current entries/numValues)
* below which the <code>fillQueueForKey()</code> funciton will be
* invoked to fill the Queue.
* @param expiry Expiry time after which the Key and associated Queue are
* evicted from the cache.
* @param numFillerThreads Number of threads to use for the filler thread
* @param policy The SyncGenerationPolicy to use when client
* calls "getAtMost"
* @param refiller implementation of the QueueRefiller
*/
public ValueQueue(final int numValues, final float lowWatermark,
long expiry, int numFillerThreads, SyncGenerationPolicy policy,
final QueueRefiller<E> refiller) {
Preconditions.checkArgument(numValues > 0, "\"numValues\" must be > 0");
Preconditions.checkArgument(((lowWatermark > 0)&&(lowWatermark <= 1)),
"\"lowWatermark\" must be > 0 and <= 1");
Preconditions.checkArgument(expiry > 0, "\"expiry\" must be > 0");
Preconditions.checkArgument(numFillerThreads > 0,
"\"numFillerThreads\" must be > 0");
Preconditions.checkNotNull(policy, "\"policy\" must not be null");
this.refiller = refiller;
this.policy = policy;
this.numValues = numValues;
this.lowWatermark = lowWatermark;
keyQueues = CacheBuilder.newBuilder()
.expireAfterAccess(expiry, TimeUnit.MILLISECONDS)
.build(new CacheLoader<String, LinkedBlockingQueue<E>>() {
@Override
public LinkedBlockingQueue<E> load(String keyName)
throws Exception {
LinkedBlockingQueue<E> keyQueue =
new LinkedBlockingQueue<E>();
refiller.fillQueueForKey(keyName, keyQueue,
(int)(lowWatermark * numValues));
return keyQueue;
}
});
executor =
new ThreadPoolExecutor(numFillerThreads, numFillerThreads, 0L,
TimeUnit.MILLISECONDS, queue, new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat(REFILL_THREAD).build());
}
public ValueQueue(final int numValues, final float lowWaterMark, long expiry,
int numFillerThreads, QueueRefiller<E> fetcher) {
this(numValues, lowWaterMark, expiry, numFillerThreads,
SyncGenerationPolicy.ALL, fetcher);
}
/**
* Initializes the Value Queues for the provided keys by calling the
* fill Method with "numInitValues" values
* @param keyNames Array of key Names
* @throws ExecutionException
*/
public void initializeQueuesForKeys(String... keyNames)
throws ExecutionException {
for (String keyName : keyNames) {
keyQueues.get(keyName);
}
}
/**
* This removes the value currently at the head of the Queue for the
* provided key. Will immediately fire the Queue filler function if key
* does not exist.
* If Queue exists but all values are drained, It will ask the generator
* function to add 1 value to Queue and then drain it.
* @param keyName String key name
* @return E the next value in the Queue
* @throws IOException
* @throws ExecutionException
*/
public E getNext(String keyName)
throws IOException, ExecutionException {
return getAtMost(keyName, 1).get(0);
}
/**
* Drains the Queue for the provided key.
*
* @param keyName the key to drain the Queue for
*/
public void drain(String keyName ) {
try {
keyQueues.get(keyName).clear();
} catch (ExecutionException ex) {
//NOP
}
}
/**
* This removes the "num" values currently at the head of the Queue for the
* provided key. Will immediately fire the Queue filler function if key
* does not exist
* How many values are actually returned is governed by the
* <code>SyncGenerationPolicy</code> specified by the user.
* @param keyName String key name
* @param num Minimum number of values to return.
* @return List<E> values returned
* @throws IOException
* @throws ExecutionException
*/
public List<E> getAtMost(String keyName, int num) throws IOException,
ExecutionException {
LinkedBlockingQueue<E> keyQueue = keyQueues.get(keyName);
// Using poll to avoid race condition..
LinkedList<E> ekvs = new LinkedList<E>();
try {
for (int i = 0; i < num; i++) {
E val = keyQueue.poll();
// If queue is empty now, Based on the provided SyncGenerationPolicy,
// figure out how many new values need to be generated synchronously
if (val == null) {
// Synchronous call to get remaining values
int numToFill = 0;
switch (policy) {
case ATLEAST_ONE:
numToFill = (ekvs.size() < 1) ? 1 : 0;
break;
case LOW_WATERMARK:
numToFill =
Math.min(num, (int) (lowWatermark * numValues)) - ekvs.size();
break;
case ALL:
numToFill = num - ekvs.size();
break;
}
// Synchronous fill if not enough values found
if (numToFill > 0) {
refiller.fillQueueForKey(keyName, ekvs, numToFill);
}
// Asynch task to fill > lowWatermark
if (i <= (int) (lowWatermark * numValues)) {
submitRefillTask(keyName, keyQueue);
}
return ekvs;
}
ekvs.add(val);
}
} catch (Exception e) {
throw new IOException("Exeption while contacting value generator ", e);
}
return ekvs;
}
private void submitRefillTask(final String keyName,
final Queue<E> keyQueue) throws InterruptedException {
if (!executorThreadsStarted) {
synchronized (this) {
// To ensure all requests are first queued, make coreThreads =
// maxThreads
// and pre-start all the Core Threads.
executor.prestartAllCoreThreads();
executorThreadsStarted = true;
}
}
// The submit/execute method of the ThreadPoolExecutor is bypassed and
// the Runnable is directly put in the backing BlockingQueue so that we
// can control exactly how the runnable is inserted into the queue.
queue.put(
new NamedRunnable(keyName) {
@Override
public void run() {
int cacheSize = numValues;
int threshold = (int) (lowWatermark * (float) cacheSize);
// Need to ensure that only one refill task per key is executed
try {
if (keyQueue.size() < threshold) {
refiller.fillQueueForKey(name, keyQueue,
cacheSize - keyQueue.size());
}
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}
);
}
/**
* Cleanly shutdown
*/
public void shutdown() {
executor.shutdownNow();
}
}
| bruthe/hadoop-2.6.0r | src/common/org/apache/hadoop/crypto/key/kms/ValueQueue.java | Java | apache-2.0 | 12,143 |
package version
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// Number contains the semantic version of this SDK.
const Number = "v40.2.0"
| sethpollack/kubernetes | vendor/github.com/Azure/azure-sdk-for-go/version/version.go | GO | apache-2.0 | 865 |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fake
import (
api "k8s.io/kubernetes/pkg/api"
unversioned "k8s.io/kubernetes/pkg/api/unversioned"
v1 "k8s.io/kubernetes/pkg/api/v1"
v1alpha1 "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1"
core "k8s.io/kubernetes/pkg/client/testing/core"
labels "k8s.io/kubernetes/pkg/labels"
watch "k8s.io/kubernetes/pkg/watch"
)
// FakeClusterRoleBindings implements ClusterRoleBindingInterface
type FakeClusterRoleBindings struct {
Fake *FakeRbacV1alpha1
}
var clusterrolebindingsResource = unversioned.GroupVersionResource{Group: "rbac.authorization.k8s.io", Version: "v1alpha1", Resource: "clusterrolebindings"}
func (c *FakeClusterRoleBindings) Create(clusterRoleBinding *v1alpha1.ClusterRoleBinding) (result *v1alpha1.ClusterRoleBinding, err error) {
obj, err := c.Fake.
Invokes(core.NewRootCreateAction(clusterrolebindingsResource, clusterRoleBinding), &v1alpha1.ClusterRoleBinding{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.ClusterRoleBinding), err
}
func (c *FakeClusterRoleBindings) Update(clusterRoleBinding *v1alpha1.ClusterRoleBinding) (result *v1alpha1.ClusterRoleBinding, err error) {
obj, err := c.Fake.
Invokes(core.NewRootUpdateAction(clusterrolebindingsResource, clusterRoleBinding), &v1alpha1.ClusterRoleBinding{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.ClusterRoleBinding), err
}
func (c *FakeClusterRoleBindings) Delete(name string, options *v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(core.NewRootDeleteAction(clusterrolebindingsResource, name), &v1alpha1.ClusterRoleBinding{})
return err
}
func (c *FakeClusterRoleBindings) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {
action := core.NewRootDeleteCollectionAction(clusterrolebindingsResource, listOptions)
_, err := c.Fake.Invokes(action, &v1alpha1.ClusterRoleBindingList{})
return err
}
func (c *FakeClusterRoleBindings) Get(name string) (result *v1alpha1.ClusterRoleBinding, err error) {
obj, err := c.Fake.
Invokes(core.NewRootGetAction(clusterrolebindingsResource, name), &v1alpha1.ClusterRoleBinding{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.ClusterRoleBinding), err
}
func (c *FakeClusterRoleBindings) List(opts v1.ListOptions) (result *v1alpha1.ClusterRoleBindingList, err error) {
obj, err := c.Fake.
Invokes(core.NewRootListAction(clusterrolebindingsResource, opts), &v1alpha1.ClusterRoleBindingList{})
if obj == nil {
return nil, err
}
label, _, _ := core.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &v1alpha1.ClusterRoleBindingList{}
for _, item := range obj.(*v1alpha1.ClusterRoleBindingList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested clusterRoleBindings.
func (c *FakeClusterRoleBindings) Watch(opts v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(core.NewRootWatchAction(clusterrolebindingsResource, opts))
}
// Patch applies the patch and returns the patched clusterRoleBinding.
func (c *FakeClusterRoleBindings) Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *v1alpha1.ClusterRoleBinding, err error) {
obj, err := c.Fake.
Invokes(core.NewRootPatchSubresourceAction(clusterrolebindingsResource, name, data, subresources...), &v1alpha1.ClusterRoleBinding{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.ClusterRoleBinding), err
}
| github-co/kubernetes | pkg/client/clientset_generated/release_1_5/typed/rbac/v1alpha1/fake/fake_clusterrolebinding.go | GO | apache-2.0 | 4,098 |
package datastore
import (
"fmt"
"log"
"reflect"
"strings"
"sync"
"time"
"github.com/docker/docker/libnetwork/discoverapi"
"github.com/docker/docker/libnetwork/types"
"github.com/docker/libkv"
"github.com/docker/libkv/store"
)
//DataStore exported
type DataStore interface {
// GetObject gets data from datastore and unmarshals to the specified object
GetObject(key string, o KVObject) error
// PutObject adds a new Record based on an object into the datastore
PutObject(kvObject KVObject) error
// PutObjectAtomic provides an atomic add and update operation for a Record
PutObjectAtomic(kvObject KVObject) error
// DeleteObject deletes a record
DeleteObject(kvObject KVObject) error
// DeleteObjectAtomic performs an atomic delete operation
DeleteObjectAtomic(kvObject KVObject) error
// DeleteTree deletes a record
DeleteTree(kvObject KVObject) error
// Watchable returns whether the store is watchable or not
Watchable() bool
// Watch for changes on a KVObject
Watch(kvObject KVObject, stopCh <-chan struct{}) (<-chan KVObject, error)
// RestartWatch retriggers stopped Watches
RestartWatch()
// Active returns if the store is active
Active() bool
// List returns of a list of KVObjects belonging to the parent
// key. The caller must pass a KVObject of the same type as
// the objects that need to be listed
List(string, KVObject) ([]KVObject, error)
// Map returns a Map of KVObjects
Map(key string, kvObject KVObject) (map[string]KVObject, error)
// Scope returns the scope of the store
Scope() string
// KVStore returns access to the KV Store
KVStore() store.Store
// Close closes the data store
Close()
}
// ErrKeyModified is raised for an atomic update when the update is working on a stale state
var (
ErrKeyModified = store.ErrKeyModified
ErrKeyNotFound = store.ErrKeyNotFound
)
type datastore struct {
scope string
store store.Store
cache *cache
watchCh chan struct{}
active bool
sequential bool
sync.Mutex
}
// KVObject is Key/Value interface used by objects to be part of the DataStore
type KVObject interface {
// Key method lets an object provide the Key to be used in KV Store
Key() []string
// KeyPrefix method lets an object return immediate parent key that can be used for tree walk
KeyPrefix() []string
// Value method lets an object marshal its content to be stored in the KV store
Value() []byte
// SetValue is used by the datastore to set the object's value when loaded from the data store.
SetValue([]byte) error
// Index method returns the latest DB Index as seen by the object
Index() uint64
// SetIndex method allows the datastore to store the latest DB Index into the object
SetIndex(uint64)
// True if the object exists in the datastore, false if it hasn't been stored yet.
// When SetIndex() is called, the object has been stored.
Exists() bool
// DataScope indicates the storage scope of the KV object
DataScope() string
// Skip provides a way for a KV Object to avoid persisting it in the KV Store
Skip() bool
}
// KVConstructor interface defines methods which can construct a KVObject from another.
type KVConstructor interface {
// New returns a new object which is created based on the
// source object
New() KVObject
// CopyTo deep copies the contents of the implementing object
// to the passed destination object
CopyTo(KVObject) error
}
// ScopeCfg represents Datastore configuration.
type ScopeCfg struct {
Client ScopeClientCfg
}
// ScopeClientCfg represents Datastore Client-only mode configuration
type ScopeClientCfg struct {
Provider string
Address string
Config *store.Config
}
const (
// LocalScope indicates to store the KV object in local datastore such as boltdb
LocalScope = "local"
// GlobalScope indicates to store the KV object in global datastore such as consul/etcd/zookeeper
GlobalScope = "global"
// SwarmScope is not indicating a datastore location. It is defined here
// along with the other two scopes just for consistency.
SwarmScope = "swarm"
defaultPrefix = "/var/lib/docker/network/files"
)
const (
// NetworkKeyPrefix is the prefix for network key in the kv store
NetworkKeyPrefix = "network"
// EndpointKeyPrefix is the prefix for endpoint key in the kv store
EndpointKeyPrefix = "endpoint"
)
var (
defaultScopes = makeDefaultScopes()
)
func makeDefaultScopes() map[string]*ScopeCfg {
def := make(map[string]*ScopeCfg)
def[LocalScope] = &ScopeCfg{
Client: ScopeClientCfg{
Provider: string(store.BOLTDB),
Address: defaultPrefix + "/local-kv.db",
Config: &store.Config{
Bucket: "libnetwork",
ConnectionTimeout: time.Minute,
},
},
}
return def
}
var defaultRootChain = []string{"docker", "network", "v1.0"}
var rootChain = defaultRootChain
// DefaultScopes returns a map of default scopes and its config for clients to use.
func DefaultScopes(dataDir string) map[string]*ScopeCfg {
if dataDir != "" {
defaultScopes[LocalScope].Client.Address = dataDir + "/network/files/local-kv.db"
return defaultScopes
}
defaultScopes[LocalScope].Client.Address = defaultPrefix + "/local-kv.db"
return defaultScopes
}
// IsValid checks if the scope config has valid configuration.
func (cfg *ScopeCfg) IsValid() bool {
if cfg == nil ||
strings.TrimSpace(cfg.Client.Provider) == "" ||
strings.TrimSpace(cfg.Client.Address) == "" {
return false
}
return true
}
//Key provides convenient method to create a Key
func Key(key ...string) string {
keychain := append(rootChain, key...)
str := strings.Join(keychain, "/")
return str + "/"
}
//ParseKey provides convenient method to unpack the key to complement the Key function
func ParseKey(key string) ([]string, error) {
chain := strings.Split(strings.Trim(key, "/"), "/")
// The key must at least be equal to the rootChain in order to be considered as valid
if len(chain) <= len(rootChain) || !reflect.DeepEqual(chain[0:len(rootChain)], rootChain) {
return nil, types.BadRequestErrorf("invalid Key : %s", key)
}
return chain[len(rootChain):], nil
}
// newClient used to connect to KV Store
func newClient(scope string, kv string, addr string, config *store.Config, cached bool) (DataStore, error) {
if cached && scope != LocalScope {
return nil, fmt.Errorf("caching supported only for scope %s", LocalScope)
}
sequential := false
if scope == LocalScope {
sequential = true
}
if config == nil {
config = &store.Config{}
}
var addrs []string
if kv == string(store.BOLTDB) {
// Parse file path
addrs = strings.Split(addr, ",")
} else {
// Parse URI
parts := strings.SplitN(addr, "/", 2)
addrs = strings.Split(parts[0], ",")
// Add the custom prefix to the root chain
if len(parts) == 2 {
rootChain = append([]string{parts[1]}, defaultRootChain...)
}
}
store, err := libkv.NewStore(store.Backend(kv), addrs, config)
if err != nil {
return nil, err
}
ds := &datastore{scope: scope, store: store, active: true, watchCh: make(chan struct{}), sequential: sequential}
if cached {
ds.cache = newCache(ds)
}
return ds, nil
}
// NewDataStore creates a new instance of LibKV data store
func NewDataStore(scope string, cfg *ScopeCfg) (DataStore, error) {
if cfg == nil || cfg.Client.Provider == "" || cfg.Client.Address == "" {
c, ok := defaultScopes[scope]
if !ok || c.Client.Provider == "" || c.Client.Address == "" {
return nil, fmt.Errorf("unexpected scope %s without configuration passed", scope)
}
cfg = c
}
var cached bool
if scope == LocalScope {
cached = true
}
return newClient(scope, cfg.Client.Provider, cfg.Client.Address, cfg.Client.Config, cached)
}
// NewDataStoreFromConfig creates a new instance of LibKV data store starting from the datastore config data
func NewDataStoreFromConfig(dsc discoverapi.DatastoreConfigData) (DataStore, error) {
var (
ok bool
sCfgP *store.Config
)
sCfgP, ok = dsc.Config.(*store.Config)
if !ok && dsc.Config != nil {
return nil, fmt.Errorf("cannot parse store configuration: %v", dsc.Config)
}
scopeCfg := &ScopeCfg{
Client: ScopeClientCfg{
Address: dsc.Address,
Provider: dsc.Provider,
Config: sCfgP,
},
}
ds, err := NewDataStore(dsc.Scope, scopeCfg)
if err != nil {
return nil, fmt.Errorf("failed to construct datastore client from datastore configuration %v: %v", dsc, err)
}
return ds, err
}
func (ds *datastore) Close() {
ds.store.Close()
}
func (ds *datastore) Scope() string {
return ds.scope
}
func (ds *datastore) Active() bool {
return ds.active
}
func (ds *datastore) Watchable() bool {
return ds.scope != LocalScope
}
func (ds *datastore) Watch(kvObject KVObject, stopCh <-chan struct{}) (<-chan KVObject, error) {
sCh := make(chan struct{})
ctor, ok := kvObject.(KVConstructor)
if !ok {
return nil, fmt.Errorf("error watching object type %T, object does not implement KVConstructor interface", kvObject)
}
kvpCh, err := ds.store.Watch(Key(kvObject.Key()...), sCh)
if err != nil {
return nil, err
}
kvoCh := make(chan KVObject)
go func() {
retry_watch:
var err error
// Make sure to get a new instance of watch channel
ds.Lock()
watchCh := ds.watchCh
ds.Unlock()
loop:
for {
select {
case <-stopCh:
close(sCh)
return
case kvPair := <-kvpCh:
// If the backend KV store gets reset libkv's go routine
// for the watch can exit resulting in a nil value in
// channel.
if kvPair == nil {
ds.Lock()
ds.active = false
ds.Unlock()
break loop
}
dstO := ctor.New()
if err = dstO.SetValue(kvPair.Value); err != nil {
log.Printf("Could not unmarshal kvpair value = %s", string(kvPair.Value))
break
}
dstO.SetIndex(kvPair.LastIndex)
kvoCh <- dstO
}
}
// Wait on watch channel for a re-trigger when datastore becomes active
<-watchCh
kvpCh, err = ds.store.Watch(Key(kvObject.Key()...), sCh)
if err != nil {
log.Printf("Could not watch the key %s in store: %v", Key(kvObject.Key()...), err)
}
goto retry_watch
}()
return kvoCh, nil
}
func (ds *datastore) RestartWatch() {
ds.Lock()
defer ds.Unlock()
ds.active = true
watchCh := ds.watchCh
ds.watchCh = make(chan struct{})
close(watchCh)
}
func (ds *datastore) KVStore() store.Store {
return ds.store
}
// PutObjectAtomic adds a new Record based on an object into the datastore
func (ds *datastore) PutObjectAtomic(kvObject KVObject) error {
var (
previous *store.KVPair
pair *store.KVPair
err error
)
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
if kvObject == nil {
return types.BadRequestErrorf("invalid KV Object : nil")
}
kvObjValue := kvObject.Value()
if kvObjValue == nil {
return types.BadRequestErrorf("invalid KV Object with a nil Value for key %s", Key(kvObject.Key()...))
}
if kvObject.Skip() {
goto add_cache
}
if kvObject.Exists() {
previous = &store.KVPair{Key: Key(kvObject.Key()...), LastIndex: kvObject.Index()}
} else {
previous = nil
}
_, pair, err = ds.store.AtomicPut(Key(kvObject.Key()...), kvObjValue, previous, nil)
if err != nil {
if err == store.ErrKeyExists {
return ErrKeyModified
}
return err
}
kvObject.SetIndex(pair.LastIndex)
add_cache:
if ds.cache != nil {
// If persistent store is skipped, sequencing needs to
// happen in cache.
return ds.cache.add(kvObject, kvObject.Skip())
}
return nil
}
// PutObject adds a new Record based on an object into the datastore
func (ds *datastore) PutObject(kvObject KVObject) error {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
if kvObject == nil {
return types.BadRequestErrorf("invalid KV Object : nil")
}
if kvObject.Skip() {
goto add_cache
}
if err := ds.putObjectWithKey(kvObject, kvObject.Key()...); err != nil {
return err
}
add_cache:
if ds.cache != nil {
// If persistent store is skipped, sequencing needs to
// happen in cache.
return ds.cache.add(kvObject, kvObject.Skip())
}
return nil
}
func (ds *datastore) putObjectWithKey(kvObject KVObject, key ...string) error {
kvObjValue := kvObject.Value()
if kvObjValue == nil {
return types.BadRequestErrorf("invalid KV Object with a nil Value for key %s", Key(kvObject.Key()...))
}
return ds.store.Put(Key(key...), kvObjValue, nil)
}
// GetObject returns a record matching the key
func (ds *datastore) GetObject(key string, o KVObject) error {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
if ds.cache != nil {
return ds.cache.get(key, o)
}
kvPair, err := ds.store.Get(key)
if err != nil {
return err
}
if err := o.SetValue(kvPair.Value); err != nil {
return err
}
// Make sure the object has a correct view of the DB index in
// case we need to modify it and update the DB.
o.SetIndex(kvPair.LastIndex)
return nil
}
func (ds *datastore) ensureParent(parent string) error {
exists, err := ds.store.Exists(parent)
if err != nil {
return err
}
if exists {
return nil
}
return ds.store.Put(parent, []byte{}, &store.WriteOptions{IsDir: true})
}
func (ds *datastore) List(key string, kvObject KVObject) ([]KVObject, error) {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
if ds.cache != nil {
return ds.cache.list(kvObject)
}
var kvol []KVObject
cb := func(key string, val KVObject) {
kvol = append(kvol, val)
}
err := ds.iterateKVPairsFromStore(key, kvObject, cb)
if err != nil {
return nil, err
}
return kvol, nil
}
func (ds *datastore) iterateKVPairsFromStore(key string, kvObject KVObject, callback func(string, KVObject)) error {
// Bail out right away if the kvObject does not implement KVConstructor
ctor, ok := kvObject.(KVConstructor)
if !ok {
return fmt.Errorf("error listing objects, object does not implement KVConstructor interface")
}
// Make sure the parent key exists
if err := ds.ensureParent(key); err != nil {
return err
}
kvList, err := ds.store.List(key)
if err != nil {
return err
}
for _, kvPair := range kvList {
if len(kvPair.Value) == 0 {
continue
}
dstO := ctor.New()
if err := dstO.SetValue(kvPair.Value); err != nil {
return err
}
// Make sure the object has a correct view of the DB index in
// case we need to modify it and update the DB.
dstO.SetIndex(kvPair.LastIndex)
callback(kvPair.Key, dstO)
}
return nil
}
func (ds *datastore) Map(key string, kvObject KVObject) (map[string]KVObject, error) {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
kvol := make(map[string]KVObject)
cb := func(key string, val KVObject) {
// Trim the leading & trailing "/" to make it consistent across all stores
kvol[strings.Trim(key, "/")] = val
}
err := ds.iterateKVPairsFromStore(key, kvObject, cb)
if err != nil {
return nil, err
}
return kvol, nil
}
// DeleteObject unconditionally deletes a record from the store
func (ds *datastore) DeleteObject(kvObject KVObject) error {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
// cleanup the cache first
if ds.cache != nil {
// If persistent store is skipped, sequencing needs to
// happen in cache.
ds.cache.del(kvObject, kvObject.Skip())
}
if kvObject.Skip() {
return nil
}
return ds.store.Delete(Key(kvObject.Key()...))
}
// DeleteObjectAtomic performs atomic delete on a record
func (ds *datastore) DeleteObjectAtomic(kvObject KVObject) error {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
if kvObject == nil {
return types.BadRequestErrorf("invalid KV Object : nil")
}
previous := &store.KVPair{Key: Key(kvObject.Key()...), LastIndex: kvObject.Index()}
if kvObject.Skip() {
goto del_cache
}
if _, err := ds.store.AtomicDelete(Key(kvObject.Key()...), previous); err != nil {
if err == store.ErrKeyExists {
return ErrKeyModified
}
return err
}
del_cache:
// cleanup the cache only if AtomicDelete went through successfully
if ds.cache != nil {
// If persistent store is skipped, sequencing needs to
// happen in cache.
return ds.cache.del(kvObject, kvObject.Skip())
}
return nil
}
// DeleteTree unconditionally deletes a record from the store
func (ds *datastore) DeleteTree(kvObject KVObject) error {
if ds.sequential {
ds.Lock()
defer ds.Unlock()
}
// cleanup the cache first
if ds.cache != nil {
// If persistent store is skipped, sequencing needs to
// happen in cache.
ds.cache.del(kvObject, kvObject.Skip())
}
if kvObject.Skip() {
return nil
}
return ds.store.DeleteTree(Key(kvObject.KeyPrefix()...))
}
| jthelin/docker | libnetwork/datastore/datastore.go | GO | apache-2.0 | 16,439 |