text
stringlengths
101
197k
meta
stringlengths
167
272
/** * The backgound color for tooltips. * * @type color * @group tooltip */ $v-tooltip-background-color: rgba(if(is-dark-color($v-background-color), scale-color($v-background-color, $lightness: 80%), scale-color($v-background-color, $lightness: -80%)), .9) !default; /** * The font color for tooltips. * * @type color * @group tooltip */ $v-tooltip-font-color: valo-font-color(opacify($v-tooltip-background-color, 1), 1) !default; /** * The font size for tooltips. * * @type size * @group tooltip */ $v-tooltip-font-size: max(12px, round($v-font-size * 0.86)) !default; /** * The CSS box shadow for tooltips. * * @type list * @group tooltip */ $v-tooltip-box-shadow: 0 2px 12px rgba(#000, .2) !default; /** * The vertical padding for tooltips. * * @type size * @group tooltip */ $v-tooltip-padding-vertical: round($v-unit-size/8) !default; /** * The horizontal padding for tooltips. * * @type size * @group tooltip */ $v-tooltip-padding-horizontal: round($v-unit-size/4) !default; /** * The backgound color for error tooltips. * * @type color * @group tooltip */ $v-tooltip-error-message-background-color: #fff !default; /** * The font color for error tooltips. * * @type color * @group tooltip */ $v-tooltip-error-message-font-color: $v-error-indicator-color !default; /** * The font color for error tooltips for level 'info'. * * @type color * @group tooltip */ $v-tooltip-error-message-level-info-font-color: $v-error-indicator-level-info-color !default; /** * The font color for error tooltips for level 'warning'. * * @type color * @group tooltip */ $v-tooltip-error-message-level-warning-font-color: $v-error-indicator-level-warning-color !default; /** * The font color for error tooltips for level 'error'. * * @type color * @group tooltip */ $v-tooltip-error-message-level-error-font-color: $v-error-indicator-level-error-color !default; /** * The font color for error tooltips for level 'critical'. * * @type color * @group tooltip */ $v-tooltip-error-message-level-critical-font-color: $v-error-indicator-level-critical-color !default; /** * The font color for error tooltips for level 'system'. * * @type color * @group tooltip */ $v-tooltip-error-message-level-system-font-color: $v-error-indicator-level-system-color !default; /** * The corner radius for tooltips. * * @type size * @group tooltip */ $v-tooltip-border-radius: $v-border-radius - 1px !default; /** * Outputs the selectors and styles for tooltip elements. * * @group tooltip */ @mixin valo-tooltip { .v-tooltip { @include valo-tooltip-style; div[style*="width"] { width: auto !important; } .v-errormessage { background-color: opacify($v-tooltip-error-message-background-color, 1); background-color: $v-tooltip-error-message-background-color; color: $v-tooltip-error-message-font-color; margin: -$v-tooltip-padding-vertical #{-$v-tooltip-padding-horizontal}; padding: $v-tooltip-padding-vertical $v-tooltip-padding-horizontal; max-height: 10em; overflow: auto; font-weight: $v-font-weight + 100; h2:only-child { font: inherit; line-height: inherit; } } .v-errormessage-info { color: $v-tooltip-error-message-level-info-font-color; } .v-errormessage-warning { color: $v-tooltip-error-message-level-warning-font-color; } .v-errormessage-error { color: $v-tooltip-error-message-level-error-font-color; } .v-errormessage-critical { color: $v-tooltip-error-message-level-critical-font-color; } .v-errormessage-system { color: $v-tooltip-error-message-level-system-font-color; } .v-tooltip-text { max-height: 10em; overflow: auto; margin-top: $v-tooltip-padding-vertical * 2; pre { margin: 0px; } } .v-errormessage[aria-hidden="true"] + .v-tooltip-text { margin-top: 0; } h1, h2, h3, h4 { color: inherit; } pre.v-tooltip-pre { font: inherit; white-space: pre-wrap; } } } /** * Outputs the main styles for tooltip elements. * * @group tooltip */ @mixin valo-tooltip-style { background-color: $v-tooltip-background-color; @include box-shadow($v-tooltip-box-shadow); color: $v-tooltip-font-color; padding: $v-tooltip-padding-vertical $v-tooltip-padding-horizontal; border-radius: $v-tooltip-border-radius; max-width: 35em; overflow: hidden !important; font-size: $v-tooltip-font-size; }
{'repo_name': 'vaadin/framework', 'stars': '1611', 'repo_language': 'Java', 'file_name': 'ServerRpcHandlerTest.java', 'mime_type': 'text/x-java', 'hash': 6615311731435665719, 'source_dataset': 'data'}
using System; using System.Collections.Generic; namespace Performance.EFCore { public partial class Location { public Location() { ProductInventory = new HashSet<ProductInventory>(); WorkOrderRouting = new HashSet<WorkOrderRouting>(); } public short LocationID { get; set; } public string Name { get; set; } public decimal CostRate { get; set; } public decimal Availability { get; set; } public DateTime ModifiedDate { get; set; } public virtual ICollection<ProductInventory> ProductInventory { get; set; } public virtual ICollection<WorkOrderRouting> WorkOrderRouting { get; set; } } }
{'repo_name': 'rowanmiller/Demo-EFCore', 'stars': '111', 'repo_language': 'C#', 'file_name': 'AssemblyInfo.cs', 'mime_type': 'text/plain', 'hash': -4431332878075673592, 'source_dataset': 'data'}
--TEST-- Oniguruma retry limit --SKIPIF-- <?php extension_loaded('mbstring') or die('skip mbstring not available'); if (!function_exists('mb_ereg')) die('skip mb_ereg not available'); if (version_compare(MB_ONIGURUMA_VERSION, '6.9.3') < 0) { die('skip requires Oniguruma >= 6.9.3'); } ?> --FILE-- <?php $regex = 'A(B|C+)+D|AC+X'; $str = 'ACCCCCCCCCCCCCCCCCCCX'; var_dump(mb_ereg($regex, $str)); ini_set('mbstring.regex_retry_limit', '100000'); var_dump(mb_ereg($regex, $str)); ?> --EXPECT-- int(1) bool(false)
{'repo_name': 'php/php-src', 'stars': '27767', 'repo_language': 'C', 'file_name': 'echo.inc', 'mime_type': 'text/x-php', 'hash': 3489978492704163188, 'source_dataset': 'data'}
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # use filter instead of exclude so missing patterns dont' throw errors echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\"" /usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi }
{'repo_name': 'liu044100/SmileWeather', 'stars': '482', 'repo_language': 'Objective-C', 'file_name': 'Info.plist', 'mime_type': 'text/xml', 'hash': 7149171167571693668, 'source_dataset': 'data'}
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Loyc.Ecs.Parser; using Loyc.MiniTest; using Loyc.Syntax; using Loyc.Syntax.Lexing; using S = Loyc.Syntax.CodeSymbols; namespace Loyc.Ecs.Tests { partial class EcsPrinterAndParserTests { [Test] public void PrintingRegressions() { Stmt("\"Hello\";", F.Literal("Hello")); // bug: was handled as an empty statement because Name.Name=="" for a literal Stmt("new Foo().x;", F.Dot(F.Call(S.New, F.Call(Foo)), x)); // this worked Stmt("new Foo().x();", F.Call(F.Dot(F.Call(S.New, F.Call(Foo)), x))); // but this used to Assert // bug: 'public' attribute was suppressed by DropNonDeclarationAttributes Stmt("class Foo {\n public Foo() { }\n}", F.Call(S.Class, Foo, F.List(), F.Braces( Attr(@public, F.Call(S.Constructor, F.Missing, Foo, F.List(), F.Braces())))), p => p.DropNonDeclarationAttributes = true); // bug: 'ref' and 'out' attributes were suppressed by DropNonDeclarationAttributes Option(Mode.PrintBothParseFirst, "Foo(out a, ref b, public static c, [#partial] x);", "Foo(out a, ref b, c, x);", F.Call(Foo, Attr(@out, a), Attr(@ref, b), Attr(@public, @static, c), Attr(@partial, x)), p => p.DropNonDeclarationAttributes = true); Stmt("private set;", F.Attr(F.Private, _("set")), p => p.DropNonDeclarationAttributes = true); } [Test] public void BugFixes() { LNode stmt; Expr("(a + b).b<c>()", F.Call(F.Dot(F.InParens(F.Call(S.Add, a, b)), F.Of(b, c)))); Stmt("@`'+`(a, b)(c, 1);", F.Call(F.Call(S.Add, a, b), c, one)); // was: "c+1" // once printed as "partial #var(Foo, a);" which would be parsed as a method declaration Stmt("partial Foo a;", Attr(@partialWA, F.Vars(Foo, a))); Stmt("public partial alt class BinaryTree<T> { }", F.Attr(F.Public, partialWA, WordAttr("#alt"), F.Call(S.Class, F.Of(F.Id("BinaryTree"), T), F.List(), F.Braces()))); Stmt("partial Foo.T x { get; }", Attr(partialWA, F.Property(F.Dot(Foo, T), x, F.Braces(get)))); Stmt("IFRange<char> ICloneable<IFRange<char>>.Clone() {\n return Clone();\n}", F.Fn(F.Of(_("IFRange"), F.Char), F.Dot(F.Of(_("ICloneable"), F.Of(_("IFRange"), F.Char)), _("Clone")), F.List(), F.Braces(F.Call(S.Return, F.Call("Clone"))))); Stmt("Foo<a> IDictionary<a, b>.Keys { }", F.Property(F.Of(Foo, a), F.Dot(F.Of(_("IDictionary"), a, b), _("Keys")), F.Braces())); Stmt("T IDictionary<Symbol, T>.this[Symbol x] { get; set; }", F.Property(T, F.Dot(F.Of(_("IDictionary"), _("Symbol"), T), F.@this), F.List(F.Var(_("Symbol"), x)), F.Braces(get, set))); Stmt("Func<T, T> x = delegate(T a) {\n return a;\n};", F.Var(F.Of(_("Func"), T, T), x, F.Call(S.Lambda, F.List(F.Var(T, a)), F.Braces(F.Call(S.Return, a))).SetBaseStyle(NodeStyle.OldStyle))); Stmt("public static rule EmailAddress Parse(T x) { }", F.Attr(F.Public, _(S.Static), WordAttr("rule"), F.Fn(_("EmailAddress"), _("Parse"), F.List(F.Var(T, x)), F.Braces()))); // Currently we're not trying to treat this as a keyword Stmt("dynamic Foo();", F.Fn(_("dynamic"), Foo, F.List())); Stmt("dynamic x;", F.Var(_("dynamic"), x)); Token[] token = new[] { new Token((int)TokenType.Literal, 0, 0, 0, 'a') }; var tree = new TokenTree(F.File, (ICollection<Token>)token); var lexer = F.Call(_("LLLPG"), _("lexer"), F.Braces( Attr(F.Public, F.Property(_("rule"), a, F.Literal(tree))))).SetBaseStyle(NodeStyle.Special); Stmt("LLLPG (lexer) {\n public rule a @{ 'a' };\n}", lexer, Mode.ParserTest); Stmt("LLLPG (lexer) {\n public rule a => @{ 'a' };\n}", lexer); // 2016-04 bug: ForEachStmt failed to call Up() before returning Stmt("{\n foreach (var x in Foo) { }\n Foo();\n}", F.Braces(F.Call(S.ForEach, F.Vars(F.Missing, x), Foo, F.Braces()), F.Call(Foo))); // 2016-10 bug: `property:` was applied to both attributes stmt = F.Attr(a, F.Call(S.NamedArg, F.Id("property"), b), F.Private, F.Property(F.String, Foo, F.Braces(get, set))); Stmt("[a, property: b] private string Foo { get; set; }", stmt); Stmt("[a] [property: b] public string Foo { get; set; }", stmt.WithAttrChanged(2, @public), Mode.ParserTest); Stmt("a = (var b = x);", F.Call(S.Assign, a, F.InParens(F.Var(F.Missing, b, x)))); // 2017-01 bug: operator>> and operator<< wouldn't parse // (because there is no dedicated token for >> or <<) stmt = Attr(F.Id(S.Static), F.Fn(F.Int32, Attr(trivia_operator, _(S.Shl)), F.List(F.Var(Foo, x), F.Var(F.Int32, a)), F.Braces(F.Call(S.Return, F.Call(S.Shl, x, a))))); Stmt("static int operator<<(Foo x, int a) {\n return x << a;\n}", stmt); stmt = Attr(F.Id(S.Static), F.Fn(F.Int32, Attr(trivia_operator, _(S.Shr)), F.List(F.Var(Foo, x), F.Var(F.Int32, a)), F.Braces(F.Call(S.Return, F.Call(S.Shr, x, a))))); Stmt("static int operator>>(Foo x, int a) {\n return x >> a;\n}", stmt); // https://github.com/qwertie/ecsharp/issues/90 Expr("({\n Foo;\n})", F.InParens(F.Braces(Foo))); Stmt("({\n stuff;\n});", F.InParens(F.Braces(_("stuff")))); } [Test] public void PrecedenceChallenges() { Expr(@"a.([] -b)", F.Dot(a, F.Call(S._Negate, b))); Expr(@"a.([] -b).c", F.Dot(a, F.Call(S._Negate, b), c)); Expr(@"a.([] -b.c)", F.Dot(a, F.Call(S._Negate, F.Dot(b, c)))); Expr(@"a::([] -b)", F.Call(S.ColonColon, a, F.Call(S._Negate, b))); Expr(@"a.b->c", F.Call(S.RightArrow, F.Dot(a, b), c)); Expr(@"a->([] b.c)", F.Call(S.RightArrow, a, F.Dot(b, c))); Expr(@"a.(-b)(c)", F.Call(F.Dot(a, F.InParens(F.Call(S._Negate, b))), c)); // The printer should revert to prefix notation in certain cases in // order to faithfully represent the original tree. Expr(@"a * b + c", F.Call(S.Add, F.Call(S.Mul, a, b), c)); Expr(@"(a + b) * c", F.Call(S.Mul, F.InParens(F.Call(S.Add, a, b)), c)); Expr(@"@`'+`(a, b) * c", F.Call(S.Mul, F.Call(S.Add, a, b), c)); Expr(@"--a++", F.Call(S.PreDec, F.Call(S.PostInc, a))); Expr(@"(--a)++", F.Call(S.PostInc, F.InParens(F.Call(S.PreDec, a)))); Expr(@"@`'--`(a)++", F.Call(S.PostInc, F.Call(S.PreDec, a))); LNode a_b = F.Dot(a, b), a_b__c = F.Call(S.NullDot, F.Dot(a, b), c); Expr(@"a.b?.c.x", F.Call(S.NullDot, a_b, F.Dot(c, x))); Expr(@"(a.b?.c).x", F.Dot(F.InParens(a_b__c), x)); Expr(@"@`'?.`(a.b, c).x", F.Dot(a_b__c, x)); Expr(@"++$x", F.Call(S.PreInc, F.Call(S.Substitute, x))); Expr(@"++$([Foo] x)", F.Call(S.PreInc, F.Call(S.Substitute, Attr(Foo, x)))); Expr(@"a ? b : c", F.Call(S.QuestionMark, a, b, c)); Expr(@"a ? b + x : c + x", F.Call(S.QuestionMark, a, F.Call(S.Add, b, x), F.Call(S.Add, c, x))); Expr(@"a ? b = x : (c = x)",F.Call(S.QuestionMark, a, F.Assign(b, x), F.InParens(F.Assign(c, x)))); Expr(@"++$x", F.Call(S.PreInc, F.Call(S.Substitute, x))); // easy Expr(@"++--x", F.Call(S.PreInc, F.Call(S.PreDec, x))); // easy // Note: It was decided not to bother supporting `$++x`, or even // `$...x` which would be more convenient than writing `$(...x)` Expr(@"$(++x)", F.Call(S.Substitute, F.Call(S.PreInc, x))); Expr(@".(~x)", F.Call(S.Dot, F.Call(S.NotBits, x))); Expr(@"x++.Foo", F.Dot(F.Call(S.PostInc, x), Foo)); Expr(@"x++.Foo()", F.Call(F.Dot(F.Call(S.PostInc, x), Foo))); Expr(@"x++--.Foo", F.Dot(F.Call(S.PostDec, F.Call(S.PostInc, x)), Foo)); // Due to the high precedence of `$`, its argument must be in parens // unless it is trivial. Expr(@"$x", F.Call(S.Substitute, x)); Expr(@"$(x++)", F.Call(S.Substitute, F.Call(S.PostInc, x))); Expr(@"$(Foo(x))", F.Call(S.Substitute, F.Call(Foo, x))); Expr(@"$(a<b>)", F.Call(S.Substitute, F.Of(a, b))); Expr(@"$(a.b)", F.Call(S.Substitute, F.Dot(a, b))); Expr(@"$(a.b<c>)", F.Call(S.Substitute, F.Dot(a, F.Of(b, c)))); Expr(@"$((Foo) x)", F.Call(S.Substitute, F.Call(S.Cast, x, Foo))); Expr(@"$(x(->Foo))", F.Call(S.Substitute, Alternate(F.Call(S.Cast, x, Foo)))); } [Test] public void IsAsChallenges() { Expr("x / a as Foo / b", F.Call(S.Div, F.Call(S.As, F.Call(S.Div, x, a), Foo), b)); Expr("x / a as Foo? / b", F.Call(S.Div, F.Call(S.As, F.Call(S.Div, x, a), F.Of(_(S.QuestionMark), Foo)), b)); Expr("x / a is Foo? / b", F.Call(S.Div, F.Call(S.Is, F.Call(S.Div, x, a), F.Of(_(S.QuestionMark), Foo)), b)); Expr("x / a as Foo? < b", F.Call(S.LT, F.Call(S.As, F.Call(S.Div, x, a), F.Of(_(S.QuestionMark), Foo)), b)); Expr("x / a is Foo? < b", F.Call(S.LT, F.Call(S.Is, F.Call(S.Div, x, a), F.Of(_(S.QuestionMark), Foo)), b)); } [Test(Fails = "Failure caused by a bug in LLLPG")] public void IsAsChallenges2() { Expr("x / a as Foo < b", F.Call(S.LT, F.Call(S.As, F.Call(S.Div, x, a), Foo), b)); } [Test] public void SpecialEcsChallenges() { Expr("Foo x = a", F.Var(Foo, x.Name, a)); Expr("(Foo x = a) + 1", F.Call(S.Add, F.InParens(F.Var(Foo, x.Name, a)), one)); // Sadly, the way the printer uses parens in this situation does not // round-trip so we need separate printer and parser tests. Perhaps // ideally the parser would ignore parens when there's an attr list, // but then the printer might have to print extra parens in some cases. Expr("([] Foo x = a) + 1", F.Call(S.Add, F.Var(Foo, x.Name, a), one)); Expr("([] Foo a) = x", F.Assign(F.Vars(Foo, a), x)); Expr("([] Foo a) + x", F.Call(S.Add, F.Vars(Foo, a), x)); Expr("x + ([] Foo a)", F.Call(S.Add, x, F.Vars(Foo, a))); Expr("#label(Foo)", F.Call(S.Label, Foo)); Stmt("Foo:", F.Call(S.Label, Foo)); LNode Foo_a = F.Call(S.NamedArg, Foo, a); Expr("Foo: a", Foo_a); Stmt("@`'::=`(Foo, a);", Foo_a); Expr("@`'::=`(Foo(x), a)", F.Call(S.NamedArg, F.Call(Foo, x), a)); Expr("b + (Foo: a)", F.Call(S.Add, b, F.InParens(Foo_a))); Expr("b + @`'::=`(Foo, a)", F.Call(S.Add, b, Foo_a)); // Ambiguity between multiplication and pointer declarations: // - multiplication at stmt level => prefix notation, except in #result or when lhs is not a complex identifier // - pointer declaration inside expr => generic, not pointer, notation Expr("a * b", F.Call(S.Mul, a, b)); Stmt("a `'*` b;", F.Call(S.Mul, a, b)); Stmt("a() * b;", F.Call(S.Mul, F.Call(a), b)); Expr("#result(a * b)", F.Result(F.Call(S.Mul, a, b))); Stmt("{\n a * b\n}", F.Braces(F.Result(F.Call(S.Mul, a, b)))); Stmt("Foo* a = x;", F.Var(F.Of(_(S._Pointer), Foo), a.Name, x)); // Ambiguity between bitwise not and destructor declarations Expr("~Foo()", F.Call(S.NotBits, F.Call(Foo))); Stmt("@`'~`(Foo());", F.Call(S.NotBits, F.Call(Foo))); Stmt("~Foo;", F.Call(S.NotBits, Foo)); Stmt("$Foo $x;", F.Var(F.Call(S.Substitute, Foo), F.Call(S.Substitute, x))); Stmt("$Foo $x = 1;", F.Var(F.Call(S.Substitute, Foo), F.Call(S.Substitute, x), one)); Stmt("$Foo<$2> $x = 1;", F.Var(F.Of(F.Call(S.Substitute, Foo), F.Call(S.Substitute, two)), F.Call(S.Substitute, x), one)); Expr("$Foo $x = 1", F.Var(F.Call(S.Substitute, Foo), F.Call(S.Substitute, x), one)); } [Test] public void TypeContext() { // Certain syntax trees can print differently in a "type context" than elsewhere. var FooBracks = F.Call(S.IndexBracks, Foo); var FooArray = F.Of(_(S.Array), Foo); var Foo2DArray = F.Of(_(S.TwoDimensionalArray), Foo); var FooNArray = F.Of(_(S.Array), F.Of(S.QuestionMark, Foo)); var FooNullable = F.Of(_(S.QuestionMark), Foo); var FooPointer = F.Of(_(S._Pointer), Foo); Expr("Foo[]", FooBracks); Expr("@`'[]`<Foo>", FooArray); Expr("@`'?`<Foo>", FooNullable); Expr("@`'*`<Foo>", FooPointer); Stmt("#var(Foo[], a);", F.Vars(FooBracks, a)); Stmt("Foo[] a;", F.Vars(FooArray, a)); Stmt("Foo?[] a;", F.Vars(FooNArray, a)); Stmt("typeof(Foo?);", F.Call(S.Typeof, FooNullable)); Stmt("default(Foo*);", F.Call(S.Default, FooPointer)); Stmt("(Foo[]) a;", F.Call(S.Cast, a, FooArray)); Stmt("(Foo?[]) a;", F.Call(S.Cast, a, FooNArray)); Stmt("(Foo[,]) a;", F.Call(S.Cast, a, Foo2DArray)); Stmt("a(->Foo?);", Alternate(F.Call(S.Cast, a, FooNullable))); Stmt("a(as Foo*);", Alternate(F.Call(S.As, a, FooPointer))); Stmt("Foo!(#(Foo[]));", F.Of(Foo, F.List(FooBracks))); Stmt("Foo!(#(@`'*`<Foo>));", F.Of(Foo, F.List(FooPointer))); Expr("checked(Foo[])", F.Call(S.Checked, FooBracks)); Stmt("Foo<a*> x;", F.Vars(F.Of(Foo, F.Of(_(S._Pointer), a)), x)); Stmt("(Foo, Foo x) a, b;", F.Vars(F.Of(_(S.Tuple), Foo, F.Var(Foo, x)), a, b)); } [Test] public void TypeContextTuples() { // Type Tuples are printed in a "type context" (Ambiguity.TypeContext) // which changes printing behavior, e.g. Foo[10] normally means // @`'suf[]`(Foo), but in a type context it means @`'of`(@`'[]`, Foo) // (which can also be written as @`'[]`<Foo>) var FooBracks = F.Call(S.IndexBracks, Foo); var FooArray = F.Of(_(S.Array), Foo); var FooNullable = F.Of(_(S.QuestionMark), Foo); LNode stmt; stmt = F.Vars(F.Of(_(S.Tuple), F.Var(FooNullable, x), FooArray), a, b); Stmt("(Foo? x, Foo[]) a, b;", stmt); Stmt("tuple!(2 + 2);", F.Of(_("tuple"), F.Call(S.Add, two, two))); Stmt("@tuple!(2 + 2);", F.Of(_("tuple"), F.Call(S.Add, two, two)), Mode.ParserTest); Stmt("@'tuple !(2 + 2);", F.Of(_("'tuple"), F.Call(S.Add, two, two))); // In a type context, FooBracks shows as @`'suf[]`(Foo) instead of the usual Foo[] stmt = F.Vars(F.Of(_(S.Tuple), F.Var(FooNullable, x), FooBracks), a, b); Stmt("#var(@'tuple !(Foo? x, @`'suf[]`(Foo)), a, b);", stmt); stmt = F.Vars(F.Of(_(S.Tuple), F.Var(FooNullable, x), F.Var(FooBracks, T)), a, b); Stmt("#var(@'tuple !(Foo? x, #var(Foo[], T)), a, b);", stmt); } } }
{'repo_name': 'qwertie/ecsharp', 'stars': '128', 'repo_language': 'C#', 'file_name': 'TestMatchMacro.cs', 'mime_type': 'text/plain', 'hash': -9192845436832569408, 'source_dataset': 'data'}
https = require 'https' http = require 'http' {parse} = require 'url' ProgressBar = require 'progress' urlmod = require 'url' request = require 'request' #======================================================================== class Request constructor : ({url, uri, @headers, progress}) -> url = url or uri @_res = null @_data = [] @_err = null @uri = @url = if typeof(url) is 'string' then parse(url) else url @_bar = null @_opts = { progress } #-------------------- run : (cb) -> @_done_cb = cb @_launch() #-------------------- _make_opts : () -> opts = host : @url.hostname port : @url.port or (if @url.protocol is 'https:' then 443 else 80) path : @url.path method : 'GET' headers : @headers if (@url.protocol is 'https:') opts.mod = https opts.agent = new https.Agent opts else opts.mod = http opts #-------------------- _launch : () -> opts = @_make_opts() req = opts.mod.request opts, (res) => if @_opts.progress? and (l = res.headers?["content-length"])? and not(isNaN(l = parseInt(l,10))) and l > @_opts.progress @_bar = new ProgressBar "Download #{@url.path} [:bar] :percent :etas (#{l} bytes total)", { complete : "=", incomplete : ' ', width : 50, total : l } @_res = res res.request = @ res.on 'data', (d) => @_data.push d @_bar?.tick(d.length) res.on 'end', () => @_finish() req.end() req.on 'error', (e) => @_err = e @_finish() #-------------------- _finish : () -> cb = @_done_cb @_done_cb = null cb @_err, @_res, (Buffer.concat @_data) #============================================================================= single = (opts, cb) -> (new Request opts).run cb #============================================================================= format_url = (u) -> if (typeof u is 'string') then u else urlmod.format(u) #----------- request_progress = (opts, cb) -> lim = opts.maxRedirects or 10 res = body = null found = false opts.url = parse(opts.url) if typeof(opts.url) is 'string' for i in [0...lim] prev_url = opts.url await single opts, defer err, res, body if err? then break else if not (res.statusCode in [301, 302]) found = true break else if not (url = res.headers?.location)? err = new Error "Can't find a location in header for redirect" break else url = parse(url) unless url.host url.host = prev_url.host url.hostname = prev_url.hostname url.port = prev_url.port url.protocol = prev_url.protocol opts.url = url err = if err? then err else if not found then new Error "Too many redirects" else if (res.statusCode >= 200 and res.statusCode < 300) then null else new Error "In #{format_url opts.url}: HTTP failure, code #{res.statusCode}" cb err, res, body #============================================================================ request_mikeal = (opts, cb) -> opts.encoding = null rv = new iced.Rendezvous() url = opts.url or opts.uri url_s = if typeof(url) is 'object' then url.format() else url request opts, rv.id(true).defer(err, res, body) process.stderr.write("Downloading...") loop setTimeout rv.id(false).defer(), 100 await rv.wait defer which if which then break process.stderr.write(".") process.stderr.write("..done\n") cb err, res, body #============================================================================ exports.request = (opts, cb) -> if opts.proxy? then request_mikeal opts, cb else request_progress opts, cb #============================================================================
{'repo_name': 'keybase/node-client', 'stars': '298', 'repo_language': 'CoffeeScript', 'file_name': 'versions.iced', 'mime_type': 'text/plain', 'hash': -3187765781829770480, 'source_dataset': 'data'}
//このコードはT4 Templateによって自動生成されています。 using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Reflection; using System.Windows; using System.Windows.Navigation; using System.Windows.Shapes; using System.Windows.Controls; using System.Windows.Documents; using System.Windows.Controls.Primitives; using System.ComponentModel; using System.Windows.Interop; using System.Windows.Data; using System.Windows.Markup; using System.Windows.Documents.Serialization; using System.Windows.Input; using System.Windows.Media.Animation; using System.Windows.Media; using System.Windows.Media.TextFormatting; using System.Windows.Annotations; using System.Windows.Ink; using System.Windows.Automation.Peers; using System.Windows.Markup.Localizer; using System.Windows.Media.Imaging; using System.IO.Packaging; using System.Security.RightsManagement; using System.Windows.Threading; using System.Windows.Media.Effects; using System.Windows.Shell; using System.Security.Permissions; using System.Windows.Annotations.Storage; using System.Diagnostics; namespace Livet.Converters { /// <summary> /// DataGridHeadersVisibility列挙体とbool値の相互変換を行うIValueConverterです。 /// </summary> public class DataGridHeadersVisibilityAndBooleanConverter : IValueConverter { //VM→View public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture) { if (!(value is bool)) throw new ArgumentException(); if ((bool)value) { if (_isConvertWhenTrueSet) { return ConvertWhenTrue; } } else { if (_isConvertWhenFalseSet) { return ConvertWhenFalse; } } return DependencyProperty.UnsetValue; } private bool _isConvertWhenTrueSet; private DataGridHeadersVisibility _convertWhenTrue; /// <summary> /// ソース値がTrueの際にどのDataGridHeadersVisibility列挙体メンバーに変換するかを指定、または取得します。 /// </summary> public DataGridHeadersVisibility ConvertWhenTrue { get { return _convertWhenTrue; } set { _convertWhenTrue = value; _isConvertWhenTrueSet = true; } } private bool _isConvertWhenFalseSet; private DataGridHeadersVisibility _convertWhenFalse; /// <summary> /// ソース値がFalseの際にどのDataGridHeadersVisibility列挙体メンバーに変換するかを指定、または取得します。 /// </summary> public DataGridHeadersVisibility ConvertWhenFalse { get { return _convertWhenFalse; } set { _convertWhenFalse = value; _isConvertWhenFalseSet = true; } } //View→VM public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture) { if (!(value is DataGridHeadersVisibility)) throw new ArgumentException(); var enumValue = (DataGridHeadersVisibility)value; switch(enumValue.ToString()) { case "None": if (_isConvertBackWhenNoneSet) { return ConvertBackWhenNone; } break; case "Column": if (_isConvertBackWhenColumnSet) { return ConvertBackWhenColumn; } break; case "Row": if (_isConvertBackWhenRowSet) { return ConvertBackWhenRow; } break; case "All": if (_isConvertBackWhenAllSet) { return ConvertBackWhenAll; } break; default: throw new ArgumentException(); } if (_isConvertBackDefaultBooleanValueSet) { return ConvertBackDefaultBooleanValue; } return DependencyProperty.UnsetValue; } private bool _isConvertBackDefaultBooleanValueSet; private bool _convertBackDefaultBooleanValue; /// <summary> /// DataGridHeadersVisibilityの値をbool値に変換する際のデフォルト値を指定、または取得します。 /// </summary> public bool ConvertBackDefaultBooleanValue { get { return _convertBackDefaultBooleanValue; } set { _convertBackDefaultBooleanValue = value; _isConvertBackDefaultBooleanValueSet = true; } } private bool _isConvertBackWhenNoneSet; private bool _convertBackWhenNone; /// <summary> /// DataGridHeadersVisibilityの値がNoneの際True/Falseいずれに変換するのかを指定、または取得します。<br/> /// この値が設定されておらず、ConvertBackDefaultBooleanValueが設定されている場合はConvertBackDefaultBooleanValueに従います。 /// </summary> public bool ConvertBackWhenNone { get { return _convertBackWhenNone; } set { _convertBackWhenNone = value; _isConvertBackWhenNoneSet = true; } } private bool _isConvertBackWhenColumnSet; private bool _convertBackWhenColumn; /// <summary> /// DataGridHeadersVisibilityの値がColumnの際True/Falseいずれに変換するのかを指定、または取得します。<br/> /// この値が設定されておらず、ConvertBackDefaultBooleanValueが設定されている場合はConvertBackDefaultBooleanValueに従います。 /// </summary> public bool ConvertBackWhenColumn { get { return _convertBackWhenColumn; } set { _convertBackWhenColumn = value; _isConvertBackWhenColumnSet = true; } } private bool _isConvertBackWhenRowSet; private bool _convertBackWhenRow; /// <summary> /// DataGridHeadersVisibilityの値がRowの際True/Falseいずれに変換するのかを指定、または取得します。<br/> /// この値が設定されておらず、ConvertBackDefaultBooleanValueが設定されている場合はConvertBackDefaultBooleanValueに従います。 /// </summary> public bool ConvertBackWhenRow { get { return _convertBackWhenRow; } set { _convertBackWhenRow = value; _isConvertBackWhenRowSet = true; } } private bool _isConvertBackWhenAllSet; private bool _convertBackWhenAll; /// <summary> /// DataGridHeadersVisibilityの値がAllの際True/Falseいずれに変換するのかを指定、または取得します。<br/> /// この値が設定されておらず、ConvertBackDefaultBooleanValueが設定されている場合はConvertBackDefaultBooleanValueに従います。 /// </summary> public bool ConvertBackWhenAll { get { return _convertBackWhenAll; } set { _convertBackWhenAll = value; _isConvertBackWhenAllSet = true; } } } }
{'repo_name': 'runceel/Livet', 'stars': '176', 'repo_language': 'C#', 'file_name': 'Command.cs', 'mime_type': 'text/plain', 'hash': 2166698625143376199, 'source_dataset': 'data'}
gauge ===== A nearly stateless terminal based horizontal gauge / progress bar. ```javascript var Gauge = require("gauge") var gauge = new Gauge() gauge.show("test", 0.20) gauge.pulse("this") gauge.hide() ``` ![](gauge-demo.gif) ### CHANGES FROM 1.x Gauge 2.x is breaking release, please see the [changelog] for details on what's changed if you were previously a user of this module. [changelog]: CHANGELOG.md ### THE GAUGE CLASS This is the typical interface to the module– it provides a pretty fire-and-forget interface to displaying your status information. ``` var Gauge = require("gauge") var gauge = new Gauge([stream], [options]) ``` * **stream** – *(optional, default STDERR)* A stream that progress bar updates are to be written to. Gauge honors backpressure and will pause most writing if it is indicated. * **options** – *(optional)* An option object. Constructs a new gauge. Gauges are drawn on a single line, and are not drawn if **stream** isn't a tty and a tty isn't explicitly provided. If **stream** is a terminal or if you pass in **tty** to **options** then we will detect terminal resizes and redraw to fit. We do this by watching for `resize` events on the tty. (To work around a bug in verisons of Node prior to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to larger window sizes will be clean, but shrinking the window will always result in some cruft. **IMPORTANT:** If you prevously were passing in a non-tty stream but you still want output (for example, a stream wrapped by the `ansi` module) then you need to pass in the **tty** option below, as `gauge` needs access to the underlying tty in order to do things like terminal resizes and terminal width detection. The **options** object can have the following properties, all of which are optional: * **updateInterval**: How often gauge updates should be drawn, in miliseconds. * **fixedFramerate**: Defaults to false on node 0.8, true on everything else. When this is true a timer is created to trigger once every `updateInterval` ms, when false, updates are printed as soon as they come in but updates more often than `updateInterval` are ignored. The reason 0.8 doesn't have this set to true is that it can't `unref` its timer and so it would stop your program from exiting– if you want to use this feature with 0.8 just make sure you call `gauge.disable()` before you expect your program to exit. * **themes**: A themeset to use when selecting the theme to use. Defaults to `gauge/themes`, see the [themes] documentation for details. * **theme**: Select a theme for use, it can be a: * Theme object, in which case the **themes** is not used. * The name of a theme, which will be looked up in the current *themes* object. * A configuration object with any of `hasUnicode`, `hasColor` or `platform` keys, which if wlll be used to override our guesses when making a default theme selection. If no theme is selected then a default is picked using a combination of our best guesses at your OS, color support and unicode support. * **template**: Describes what you want your gauge to look like. The default is what npm uses. Detailed [documentation] is later in this document. * **hideCursor**: Defaults to true. If true, then the cursor will be hidden while the gauge is displayed. * **tty**: The tty that you're ultimately writing to. Defaults to the same as **stream**. This is used for detecting the width of the terminal and resizes. The width used is `tty.columns - 1`. If no tty is available then a width of `79` is assumed. * **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true the gauge starts enabled. If disabled then all update commands are ignored and no gauge will be printed until you call `.enable()`. * **Plumbing**: The class to use to actually generate the gauge for printing. This defaults to `require('gauge/plumbing')` and ordinarly you shouldn't need to override this. * **cleanupOnExit**: Defaults to true. Ordinarily we register an exit handler to make sure your cursor is turned back on and the progress bar erased when your process exits, even if you Ctrl-C out or otherwise exit unexpectedly. You can disable this and it won't register the exit handler. [has-unicode]: https://www.npmjs.com/package/has-unicode [themes]: #themes [documentation]: #templates #### `gauge.show(section | status, [completed])` The first argument is either the section, the name of the current thing contributing to progress, or an object with keys like **section**, **subsection** & **completed** (or any others you have types for in a custom template). If you don't want to update or set any of these you can pass `null` and it will be ignored. The second argument is the percent completed as a value between 0 and 1. Without it, completion is just not updated. You'll also note that completion can be passed in as part of a status object as the first argument. If both it and the completed argument are passed in, the completed argument wins. #### `gauge.hide([cb])` Removes the gauge from the terminal. Optionally, callback `cb` after IO has had an opportunity to happen (currently this just means after `setImmediate` has called back.) It turns out this is important when you're pausing the progress bar on one filehandle and printing to another– otherwise (with a big enough print) node can end up printing the "end progress bar" bits to the progress bar filehandle while other stuff is printing to another filehandle. These getting interleaved can cause corruption in some terminals. #### `gauge.pulse([subsection])` * **subsection** – *(optional)* The specific thing that triggered this pulse Spins the spinner in the gauge to show output. If **subsection** is included then it will be combined with the last name passed to `gauge.show`. #### `gauge.disable()` Hides the gauge and ignores further calls to `show` or `pulse`. #### `gauge.enable()` Shows the gauge and resumes updating when `show` or `pulse` is called. #### `gauge.isEnabled()` Returns true if the gauge is enabled. #### `gauge.setThemeset(themes)` Change the themeset to select a theme from. The same as the `themes` option used in the constructor. The theme will be reselected from this themeset. #### `gauge.setTheme(theme)` Change the active theme, will be displayed with the next show or pulse. This can be: * Theme object, in which case the **themes** is not used. * The name of a theme, which will be looked up in the current *themes* object. * A configuration object with any of `hasUnicode`, `hasColor` or `platform` keys, which if wlll be used to override our guesses when making a default theme selection. If no theme is selected then a default is picked using a combination of our best guesses at your OS, color support and unicode support. #### `gauge.setTemplate(template)` Change the active template, will be displayed with the next show or pulse ### Tracking Completion If you have more than one thing going on that you want to track completion of, you may find the related [are-we-there-yet] helpful. It's `change` event can be wired up to the `show` method to get a more traditional progress bar interface. [are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet ### THEMES ``` var themes = require('gauge/themes') // fetch the default color unicode theme for this platform var ourTheme = themes({hasUnicode: true, hasColor: true}) // fetch the default non-color unicode theme for osx var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) // create a new theme based on the color ascii theme for this platform // that brackets the progress bar with arrows var ourTheme = themes.newTheme(theme(hasUnicode: false, hasColor: true}), { preProgressbar: '→', postProgressbar: '←' }) ``` The object returned by `gauge/themes` is an instance of the `ThemeSet` class. ``` var ThemeSet = require('gauge/theme-set') var themes = new ThemeSet() // or var themes = require('gauge/themes') var mythemes = themes.newThemeset() // creates a new themeset based on the default themes ``` #### themes(opts) #### themes.getDefault(opts) Theme objects are a function that fetches the default theme based on platform, unicode and color support. Options is an object with the following properties: * **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is available then a non-unicode theme will be used. * **hasColor** - If true, fetch a color theme, if no color theme is available a non-color theme will be used. * **platform** (optional) - Defaults to `process.platform`. If no platform match is available then `fallback` is used instead. If no compatible theme can be found then an error will be thrown with a `code` of `EMISSINGTHEME`. #### themes.addTheme(themeName, themeObj) #### themes.addTheme(themeName, [parentTheme], newTheme) Adds a named theme to the themeset. You can pass in either a theme object, as returned by `themes.newTheme` or the arguments you'd pass to `themes.newTheme`. #### themes.getThemeNames() Return a list of all of the names of the themes in this themeset. Suitable for use in `themes.getTheme(…)`. #### themes.getTheme(name) Returns the theme object from this theme set named `name`. If `name` does not exist in this themeset an error will be thrown with a `code` of `EMISSINGTHEME`. #### themes.setDefault([opts], themeName) `opts` is an object with the following properties. * **platform** - Defaults to `'fallback'`. If your theme is platform specific, specify that here with the platform from `process.platform`, eg, `win32`, `darwin`, etc. * **hasUnicode** - Defaults to `false`. If your theme uses unicode you should set this to true. * **hasColor** - Defaults to `false`. If your theme uses color you should set this to true. `themeName` is the name of the theme (as given to `addTheme`) to use for this set of `opts`. #### themes.newTheme([parentTheme,] newTheme) Create a new theme object based on `parentTheme`. If no `parentTheme` is provided then a minimal parentTheme that defines functions for rendering the activity indicator (spinner) and progress bar will be defined. (This fallback parent is defined in `gauge/base-theme`.) newTheme should be a bare object– we'll start by discussing the properties defined by the default themes: * **preProgressbar** - displayed prior to the progress bar, if the progress bar is displayed. * **postProgressbar** - displayed after the progress bar, if the progress bar is displayed. * **progressBarTheme** - The subtheme passed through to the progress bar renderer, it's an object with `complete` and `remaining` properties that are the strings you want repeated for those sections of the progress bar. * **activityIndicatorTheme** - The theme for the activity indicator (spinner), this can either be a string, in which each character is a different step, or an array of strings. * **preSubsection** - Displayed as a separator between the `section` and `subsection` when the latter is printed. More generally, themes can have any value that would be a valid value when rendering templates. The properties in the theme are used when their name matches a type in the template. Their values can be: * **strings & numbers** - They'll be included as is * **function (values, theme, width)** - Should return what you want in your output. *values* is an object with values provided via `gauge.show`, *theme* is the theme specific to this item (see below) or this theme object, and *width* is the number of characters wide your result should be. There are a couple of special prefixes: * **pre** - Is shown prior to the property, if its displayed. * **post** - Is shown after the property, if its displayed. And one special suffix: * **Theme** - Its value is passed to a function-type item as the theme. #### themes.addToAllThemes(theme) This *mixes-in* `theme` into all themes currently defined. It also adds it to the default parent theme for this themeset, so future themes added to this themeset will get the values from `theme` by default. #### themes.newThemeset() Copy the current themeset into a new one. This allows you to easily inherit one themeset from another. ### TEMPLATES A template is an array of objects and strings that, after being evaluated, will be turned into the gauge line. The default template is: ```javascript [ {type: 'progressbar', length: 20}, {type: 'activityIndicator', kerning: 1, length: 1}, {type: 'section', kerning: 1, default: ''}, {type: 'subsection', kerning: 1, default: ''} ] ``` The various template elements can either be **plain strings**, in which case they will be be included verbatum in the output, or objects with the following properties: * *type* can be any of the following plus any keys you pass into `gauge.show` plus any keys you have on a custom theme. * `section` – What big thing you're working on now. * `subsection` – What component of that thing is currently working. * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` from your active theme. * `progressbar` – A progress bar representing your current `completed` using the `progressbarTheme` from your active theme. * *kerning* – Number of spaces that must be between this item and other items, if this item is displayed at all. * *maxLength* – The maximum length for this element. If its value is longer it will be truncated. * *minLength* – The minimum length for this element. If its value is shorter it will be padded according to the *align* value. * *align* – (Default: left) Possible values "left", "right" and "center". Works as you'd expect from word processors. * *length* – Provides a single value for both *minLength* and *maxLength*. If both *length* and *minLength or *maxLength* are specifed then the latter take precedence. * *value* – A literal value to use for this template item. * *default* – A default value to use for this template item if a value wasn't otherwise passed in. ### PLUMBING This is the super simple, assume nothing, do no magic internals used by gauge to implement its ordinary interface. ``` var Plumbing = require('gauge/plumbing') var gauge = new Plumbing(theme, template, width) ``` * **theme**: The theme to use. * **template**: The template to use. * **width**: How wide your gauge should be #### `gauge.setTheme(theme)` Change the active theme. #### `gauge.setTemplate(template)` Change the active template. #### `gauge.setWidth(width)` Change the width to render at. #### `gauge.hide()` Return the string necessary to hide the progress bar #### `gauge.hideCursor()` Return a string to hide the cursor. #### `gauge.showCursor()` Return a string to show the cursor. #### `gauge.show(status)` Using `status` for values, render the provided template with the theme and return a string that is suitable for printing to update the gauge.
{'repo_name': 'sx1989827/DOClever', 'stars': '2669', 'repo_language': 'JavaScript', 'file_name': 'parse.js', 'mime_type': 'text/plain', 'hash': 6265654543467672899, 'source_dataset': 'data'}
// Copyright 2014 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package core import ( "fmt" "math/big" "math/rand" "sync" "testing" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/consensus" "github.com/ethereum/go-ethereum/consensus/ethash" "github.com/ethereum/go-ethereum/core/rawdb" "github.com/ethereum/go-ethereum/core/state" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/ethdb" "github.com/ethereum/go-ethereum/params" ) // So we can deterministically seed different blockchains var ( canonicalSeed = 1 forkSeed = 2 ) // newCanonical creates a chain database, and injects a deterministic canonical // chain. Depending on the full flag, if creates either a full block chain or a // header only chain. func newCanonical(engine consensus.Engine, n int, full bool) (ethdb.Database, *BlockChain, error) { var ( db = ethdb.NewMemDatabase() genesis = new(Genesis).MustCommit(db) ) // Initialize a fresh chain with only a genesis block blockchain, _ := NewBlockChain(db, nil, params.AllEthashProtocolChanges, engine, vm.Config{}) // Create and inject the requested chain if n == 0 { return db, blockchain, nil } if full { // Full block-chain requested blocks := makeBlockChain(genesis, n, engine, db, canonicalSeed) _, err := blockchain.InsertChain(blocks) return db, blockchain, err } // Header-only chain requested headers := makeHeaderChain(genesis.Header(), n, engine, db, canonicalSeed) _, err := blockchain.InsertHeaderChain(headers, 1) return db, blockchain, err } // Test fork of length N starting from block i func testFork(t *testing.T, blockchain *BlockChain, i, n int, full bool, comparator func(td1, td2 *big.Int)) { // Copy old chain up to #i into a new db db, blockchain2, err := newCanonical(ethash.NewFaker(), i, full) if err != nil { t.Fatal("could not make new canonical in testFork", err) } defer blockchain2.Stop() // Assert the chains have the same header/block at #i var hash1, hash2 common.Hash if full { hash1 = blockchain.GetBlockByNumber(uint64(i)).Hash() hash2 = blockchain2.GetBlockByNumber(uint64(i)).Hash() } else { hash1 = blockchain.GetHeaderByNumber(uint64(i)).Hash() hash2 = blockchain2.GetHeaderByNumber(uint64(i)).Hash() } if hash1 != hash2 { t.Errorf("chain content mismatch at %d: have hash %v, want hash %v", i, hash2, hash1) } // Extend the newly created chain var ( blockChainB []*types.Block headerChainB []*types.Header ) if full { blockChainB = makeBlockChain(blockchain2.CurrentBlock(), n, ethash.NewFaker(), db, forkSeed) if _, err := blockchain2.InsertChain(blockChainB); err != nil { t.Fatalf("failed to insert forking chain: %v", err) } } else { headerChainB = makeHeaderChain(blockchain2.CurrentHeader(), n, ethash.NewFaker(), db, forkSeed) if _, err := blockchain2.InsertHeaderChain(headerChainB, 1); err != nil { t.Fatalf("failed to insert forking chain: %v", err) } } // Sanity check that the forked chain can be imported into the original var tdPre, tdPost *big.Int if full { tdPre = blockchain.GetTdByHash(blockchain.CurrentBlock().Hash()) if err := testBlockChainImport(blockChainB, blockchain); err != nil { t.Fatalf("failed to import forked block chain: %v", err) } tdPost = blockchain.GetTdByHash(blockChainB[len(blockChainB)-1].Hash()) } else { tdPre = blockchain.GetTdByHash(blockchain.CurrentHeader().Hash()) if err := testHeaderChainImport(headerChainB, blockchain); err != nil { t.Fatalf("failed to import forked header chain: %v", err) } tdPost = blockchain.GetTdByHash(headerChainB[len(headerChainB)-1].Hash()) } // Compare the total difficulties of the chains comparator(tdPre, tdPost) } func printChain(bc *BlockChain) { for i := bc.CurrentBlock().Number().Uint64(); i > 0; i-- { b := bc.GetBlockByNumber(uint64(i)) fmt.Printf("\t%x %v\n", b.Hash(), b.Difficulty()) } } // testBlockChainImport tries to process a chain of blocks, writing them into // the database if successful. func testBlockChainImport(chain types.Blocks, blockchain *BlockChain) error { for _, block := range chain { // Try and process the block err := blockchain.engine.VerifyHeader(blockchain, block.Header(), true) if err == nil { err = blockchain.validator.ValidateBody(block) } if err != nil { if err == ErrKnownBlock { continue } return err } statedb, err := state.New(blockchain.GetBlockByHash(block.ParentHash()).Root(), blockchain.stateCache) if err != nil { return err } receipts, _, usedGas, err := blockchain.Processor().Process(block, statedb, vm.Config{}) if err != nil { blockchain.reportBlock(block, receipts, err) return err } err = blockchain.validator.ValidateState(block, blockchain.GetBlockByHash(block.ParentHash()), statedb, receipts, usedGas) if err != nil { blockchain.reportBlock(block, receipts, err) return err } blockchain.mu.Lock() rawdb.WriteTd(blockchain.db, block.Hash(), block.NumberU64(), new(big.Int).Add(block.Difficulty(), blockchain.GetTdByHash(block.ParentHash()))) rawdb.WriteBlock(blockchain.db, block) statedb.Commit(false) blockchain.mu.Unlock() } return nil } // testHeaderChainImport tries to process a chain of header, writing them into // the database if successful. func testHeaderChainImport(chain []*types.Header, blockchain *BlockChain) error { for _, header := range chain { // Try and validate the header if err := blockchain.engine.VerifyHeader(blockchain, header, false); err != nil { return err } // Manually insert the header into the database, but don't reorganise (allows subsequent testing) blockchain.mu.Lock() rawdb.WriteTd(blockchain.db, header.Hash(), header.Number.Uint64(), new(big.Int).Add(header.Difficulty, blockchain.GetTdByHash(header.ParentHash))) rawdb.WriteHeader(blockchain.db, header) blockchain.mu.Unlock() } return nil } func insertChain(done chan bool, blockchain *BlockChain, chain types.Blocks, t *testing.T) { _, err := blockchain.InsertChain(chain) if err != nil { fmt.Println(err) t.FailNow() } done <- true } func TestLastBlock(t *testing.T) { _, blockchain, err := newCanonical(ethash.NewFaker(), 0, true) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } defer blockchain.Stop() blocks := makeBlockChain(blockchain.CurrentBlock(), 1, ethash.NewFullFaker(), blockchain.db, 0) if _, err := blockchain.InsertChain(blocks); err != nil { t.Fatalf("Failed to insert block: %v", err) } if blocks[len(blocks)-1].Hash() != rawdb.ReadHeadBlockHash(blockchain.db) { t.Fatalf("Write/Get HeadBlockHash failed") } } // Tests that given a starting canonical chain of a given size, it can be extended // with various length chains. func TestExtendCanonicalHeaders(t *testing.T) { testExtendCanonical(t, false) } func TestExtendCanonicalBlocks(t *testing.T) { testExtendCanonical(t, true) } func testExtendCanonical(t *testing.T, full bool) { length := 5 // Make first chain starting from genesis _, processor, err := newCanonical(ethash.NewFaker(), length, full) if err != nil { t.Fatalf("failed to make new canonical chain: %v", err) } defer processor.Stop() // Define the difficulty comparator better := func(td1, td2 *big.Int) { if td2.Cmp(td1) <= 0 { t.Errorf("total difficulty mismatch: have %v, expected more than %v", td2, td1) } } // Start fork from current height testFork(t, processor, length, 1, full, better) testFork(t, processor, length, 2, full, better) testFork(t, processor, length, 5, full, better) testFork(t, processor, length, 10, full, better) } // Tests that given a starting canonical chain of a given size, creating shorter // forks do not take canonical ownership. func TestShorterForkHeaders(t *testing.T) { testShorterFork(t, false) } func TestShorterForkBlocks(t *testing.T) { testShorterFork(t, true) } func testShorterFork(t *testing.T, full bool) { length := 10 // Make first chain starting from genesis _, processor, err := newCanonical(ethash.NewFaker(), length, full) if err != nil { t.Fatalf("failed to make new canonical chain: %v", err) } defer processor.Stop() // Define the difficulty comparator worse := func(td1, td2 *big.Int) { if td2.Cmp(td1) >= 0 { t.Errorf("total difficulty mismatch: have %v, expected less than %v", td2, td1) } } // Sum of numbers must be less than `length` for this to be a shorter fork testFork(t, processor, 0, 3, full, worse) testFork(t, processor, 0, 7, full, worse) testFork(t, processor, 1, 1, full, worse) testFork(t, processor, 1, 7, full, worse) testFork(t, processor, 5, 3, full, worse) testFork(t, processor, 5, 4, full, worse) } // Tests that given a starting canonical chain of a given size, creating longer // forks do take canonical ownership. func TestLongerForkHeaders(t *testing.T) { testLongerFork(t, false) } func TestLongerForkBlocks(t *testing.T) { testLongerFork(t, true) } func testLongerFork(t *testing.T, full bool) { length := 10 // Make first chain starting from genesis _, processor, err := newCanonical(ethash.NewFaker(), length, full) if err != nil { t.Fatalf("failed to make new canonical chain: %v", err) } defer processor.Stop() // Define the difficulty comparator better := func(td1, td2 *big.Int) { if td2.Cmp(td1) <= 0 { t.Errorf("total difficulty mismatch: have %v, expected more than %v", td2, td1) } } // Sum of numbers must be greater than `length` for this to be a longer fork testFork(t, processor, 0, 11, full, better) testFork(t, processor, 0, 15, full, better) testFork(t, processor, 1, 10, full, better) testFork(t, processor, 1, 12, full, better) testFork(t, processor, 5, 6, full, better) testFork(t, processor, 5, 8, full, better) } // Tests that given a starting canonical chain of a given size, creating equal // forks do take canonical ownership. func TestEqualForkHeaders(t *testing.T) { testEqualFork(t, false) } func TestEqualForkBlocks(t *testing.T) { testEqualFork(t, true) } func testEqualFork(t *testing.T, full bool) { length := 10 // Make first chain starting from genesis _, processor, err := newCanonical(ethash.NewFaker(), length, full) if err != nil { t.Fatalf("failed to make new canonical chain: %v", err) } defer processor.Stop() // Define the difficulty comparator equal := func(td1, td2 *big.Int) { if td2.Cmp(td1) != 0 { t.Errorf("total difficulty mismatch: have %v, want %v", td2, td1) } } // Sum of numbers must be equal to `length` for this to be an equal fork testFork(t, processor, 0, 10, full, equal) testFork(t, processor, 1, 9, full, equal) testFork(t, processor, 2, 8, full, equal) testFork(t, processor, 5, 5, full, equal) testFork(t, processor, 6, 4, full, equal) testFork(t, processor, 9, 1, full, equal) } // Tests that chains missing links do not get accepted by the processor. func TestBrokenHeaderChain(t *testing.T) { testBrokenChain(t, false) } func TestBrokenBlockChain(t *testing.T) { testBrokenChain(t, true) } func testBrokenChain(t *testing.T, full bool) { // Make chain starting from genesis db, blockchain, err := newCanonical(ethash.NewFaker(), 10, full) if err != nil { t.Fatalf("failed to make new canonical chain: %v", err) } defer blockchain.Stop() // Create a forked chain, and try to insert with a missing link if full { chain := makeBlockChain(blockchain.CurrentBlock(), 5, ethash.NewFaker(), db, forkSeed)[1:] if err := testBlockChainImport(chain, blockchain); err == nil { t.Errorf("broken block chain not reported") } } else { chain := makeHeaderChain(blockchain.CurrentHeader(), 5, ethash.NewFaker(), db, forkSeed)[1:] if err := testHeaderChainImport(chain, blockchain); err == nil { t.Errorf("broken header chain not reported") } } } // Tests that reorganising a long difficult chain after a short easy one // overwrites the canonical numbers and links in the database. func TestReorgLongHeaders(t *testing.T) { testReorgLong(t, false) } func TestReorgLongBlocks(t *testing.T) { testReorgLong(t, true) } func testReorgLong(t *testing.T, full bool) { testReorg(t, []int64{0, 0, -9}, []int64{0, 0, 0, -9}, 393280, full) } // Tests that reorganising a short difficult chain after a long easy one // overwrites the canonical numbers and links in the database. func TestReorgShortHeaders(t *testing.T) { testReorgShort(t, false) } func TestReorgShortBlocks(t *testing.T) { testReorgShort(t, true) } func testReorgShort(t *testing.T, full bool) { // Create a long easy chain vs. a short heavy one. Due to difficulty adjustment // we need a fairly long chain of blocks with different difficulties for a short // one to become heavyer than a long one. The 96 is an empirical value. easy := make([]int64, 96) for i := 0; i < len(easy); i++ { easy[i] = 60 } diff := make([]int64, len(easy)-1) for i := 0; i < len(diff); i++ { diff[i] = -9 } testReorg(t, easy, diff, 12615120, full) } func testReorg(t *testing.T, first, second []int64, td int64, full bool) { // Create a pristine chain and database db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } defer blockchain.Stop() // Insert an easy and a difficult chain afterwards easyBlocks, _ := GenerateChain(params.TestChainConfig, blockchain.CurrentBlock(), ethash.NewFaker(), db, len(first), func(i int, b *BlockGen) { b.OffsetTime(first[i]) }) diffBlocks, _ := GenerateChain(params.TestChainConfig, blockchain.CurrentBlock(), ethash.NewFaker(), db, len(second), func(i int, b *BlockGen) { b.OffsetTime(second[i]) }) if full { if _, err := blockchain.InsertChain(easyBlocks); err != nil { t.Fatalf("failed to insert easy chain: %v", err) } if _, err := blockchain.InsertChain(diffBlocks); err != nil { t.Fatalf("failed to insert difficult chain: %v", err) } } else { easyHeaders := make([]*types.Header, len(easyBlocks)) for i, block := range easyBlocks { easyHeaders[i] = block.Header() } diffHeaders := make([]*types.Header, len(diffBlocks)) for i, block := range diffBlocks { diffHeaders[i] = block.Header() } if _, err := blockchain.InsertHeaderChain(easyHeaders, 1); err != nil { t.Fatalf("failed to insert easy chain: %v", err) } if _, err := blockchain.InsertHeaderChain(diffHeaders, 1); err != nil { t.Fatalf("failed to insert difficult chain: %v", err) } } // Check that the chain is valid number and link wise if full { prev := blockchain.CurrentBlock() for block := blockchain.GetBlockByNumber(blockchain.CurrentBlock().NumberU64() - 1); block.NumberU64() != 0; prev, block = block, blockchain.GetBlockByNumber(block.NumberU64()-1) { if prev.ParentHash() != block.Hash() { t.Errorf("parent block hash mismatch: have %x, want %x", prev.ParentHash(), block.Hash()) } } } else { prev := blockchain.CurrentHeader() for header := blockchain.GetHeaderByNumber(blockchain.CurrentHeader().Number.Uint64() - 1); header.Number.Uint64() != 0; prev, header = header, blockchain.GetHeaderByNumber(header.Number.Uint64()-1) { if prev.ParentHash != header.Hash() { t.Errorf("parent header hash mismatch: have %x, want %x", prev.ParentHash, header.Hash()) } } } // Make sure the chain total difficulty is the correct one want := new(big.Int).Add(blockchain.genesisBlock.Difficulty(), big.NewInt(td)) if full { if have := blockchain.GetTdByHash(blockchain.CurrentBlock().Hash()); have.Cmp(want) != 0 { t.Errorf("total difficulty mismatch: have %v, want %v", have, want) } } else { if have := blockchain.GetTdByHash(blockchain.CurrentHeader().Hash()); have.Cmp(want) != 0 { t.Errorf("total difficulty mismatch: have %v, want %v", have, want) } } } // Tests that the insertion functions detect banned hashes. func TestBadHeaderHashes(t *testing.T) { testBadHashes(t, false) } func TestBadBlockHashes(t *testing.T) { testBadHashes(t, true) } func testBadHashes(t *testing.T, full bool) { // Create a pristine chain and database db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } defer blockchain.Stop() // Create a chain, ban a hash and try to import if full { blocks := makeBlockChain(blockchain.CurrentBlock(), 3, ethash.NewFaker(), db, 10) BadHashes[blocks[2].Header().Hash()] = true defer func() { delete(BadHashes, blocks[2].Header().Hash()) }() _, err = blockchain.InsertChain(blocks) } else { headers := makeHeaderChain(blockchain.CurrentHeader(), 3, ethash.NewFaker(), db, 10) BadHashes[headers[2].Hash()] = true defer func() { delete(BadHashes, headers[2].Hash()) }() _, err = blockchain.InsertHeaderChain(headers, 1) } if err != ErrBlacklistedHash { t.Errorf("error mismatch: have: %v, want: %v", err, ErrBlacklistedHash) } } // Tests that bad hashes are detected on boot, and the chain rolled back to a // good state prior to the bad hash. func TestReorgBadHeaderHashes(t *testing.T) { testReorgBadHashes(t, false) } func TestReorgBadBlockHashes(t *testing.T) { testReorgBadHashes(t, true) } func testReorgBadHashes(t *testing.T, full bool) { // Create a pristine chain and database db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } // Create a chain, import and ban afterwards headers := makeHeaderChain(blockchain.CurrentHeader(), 4, ethash.NewFaker(), db, 10) blocks := makeBlockChain(blockchain.CurrentBlock(), 4, ethash.NewFaker(), db, 10) if full { if _, err = blockchain.InsertChain(blocks); err != nil { t.Errorf("failed to import blocks: %v", err) } if blockchain.CurrentBlock().Hash() != blocks[3].Hash() { t.Errorf("last block hash mismatch: have: %x, want %x", blockchain.CurrentBlock().Hash(), blocks[3].Header().Hash()) } BadHashes[blocks[3].Header().Hash()] = true defer func() { delete(BadHashes, blocks[3].Header().Hash()) }() } else { if _, err = blockchain.InsertHeaderChain(headers, 1); err != nil { t.Errorf("failed to import headers: %v", err) } if blockchain.CurrentHeader().Hash() != headers[3].Hash() { t.Errorf("last header hash mismatch: have: %x, want %x", blockchain.CurrentHeader().Hash(), headers[3].Hash()) } BadHashes[headers[3].Hash()] = true defer func() { delete(BadHashes, headers[3].Hash()) }() } blockchain.Stop() // Create a new BlockChain and check that it rolled back the state. ncm, err := NewBlockChain(blockchain.db, nil, blockchain.chainConfig, ethash.NewFaker(), vm.Config{}) if err != nil { t.Fatalf("failed to create new chain manager: %v", err) } if full { if ncm.CurrentBlock().Hash() != blocks[2].Header().Hash() { t.Errorf("last block hash mismatch: have: %x, want %x", ncm.CurrentBlock().Hash(), blocks[2].Header().Hash()) } if blocks[2].Header().GasLimit != ncm.GasLimit() { t.Errorf("last block gasLimit mismatch: have: %d, want %d", ncm.GasLimit(), blocks[2].Header().GasLimit) } } else { if ncm.CurrentHeader().Hash() != headers[2].Hash() { t.Errorf("last header hash mismatch: have: %x, want %x", ncm.CurrentHeader().Hash(), headers[2].Hash()) } } ncm.Stop() } // Tests chain insertions in the face of one entity containing an invalid nonce. func TestHeadersInsertNonceError(t *testing.T) { testInsertNonceError(t, false) } func TestBlocksInsertNonceError(t *testing.T) { testInsertNonceError(t, true) } func testInsertNonceError(t *testing.T, full bool) { for i := 1; i < 25 && !t.Failed(); i++ { // Create a pristine chain and database db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } defer blockchain.Stop() // Create and insert a chain with a failing nonce var ( failAt int failRes int failNum uint64 ) if full { blocks := makeBlockChain(blockchain.CurrentBlock(), i, ethash.NewFaker(), db, 0) failAt = rand.Int() % len(blocks) failNum = blocks[failAt].NumberU64() blockchain.engine = ethash.NewFakeFailer(failNum) failRes, err = blockchain.InsertChain(blocks) } else { headers := makeHeaderChain(blockchain.CurrentHeader(), i, ethash.NewFaker(), db, 0) failAt = rand.Int() % len(headers) failNum = headers[failAt].Number.Uint64() blockchain.engine = ethash.NewFakeFailer(failNum) blockchain.hc.engine = blockchain.engine failRes, err = blockchain.InsertHeaderChain(headers, 1) } // Check that the returned error indicates the failure. if failRes != failAt { t.Errorf("test %d: failure index mismatch: have %d, want %d", i, failRes, failAt) } // Check that all no blocks after the failing block have been inserted. for j := 0; j < i-failAt; j++ { if full { if block := blockchain.GetBlockByNumber(failNum + uint64(j)); block != nil { t.Errorf("test %d: invalid block in chain: %v", i, block) } } else { if header := blockchain.GetHeaderByNumber(failNum + uint64(j)); header != nil { t.Errorf("test %d: invalid header in chain: %v", i, header) } } } } } // Tests that fast importing a block chain produces the same chain data as the // classical full block processing. func TestFastVsFullChains(t *testing.T) { // Configure and generate a sample block chain var ( gendb = ethdb.NewMemDatabase() key, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") address = crypto.PubkeyToAddress(key.PublicKey) funds = big.NewInt(1000000000) gspec = &Genesis{ Config: params.TestChainConfig, Alloc: GenesisAlloc{address: {Balance: funds}}, } genesis = gspec.MustCommit(gendb) signer = types.NewEIP155Signer(gspec.Config.ChainID) ) blocks, receipts := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), gendb, 1024, func(i int, block *BlockGen) { block.SetCoinbase(common.Address{0x00}) // If the block number is multiple of 3, send a few bonus transactions to the miner if i%3 == 2 { for j := 0; j < i%4+1; j++ { tx, err := types.SignTx(types.NewTransaction(block.TxNonce(address), common.Address{0x00}, big.NewInt(1000), params.TxGas, nil, nil), signer, key) if err != nil { panic(err) } block.AddTx(tx) } } // If the block number is a multiple of 5, add a few bonus uncles to the block if i%5 == 5 { block.AddUncle(&types.Header{ParentHash: block.PrevBlock(i - 1).Hash(), Number: big.NewInt(int64(i - 1))}) } }) // Import the chain as an archive node for the comparison baseline archiveDb := ethdb.NewMemDatabase() gspec.MustCommit(archiveDb) archive, _ := NewBlockChain(archiveDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer archive.Stop() if n, err := archive.InsertChain(blocks); err != nil { t.Fatalf("failed to process block %d: %v", n, err) } // Fast import the chain as a non-archive node to test fastDb := ethdb.NewMemDatabase() gspec.MustCommit(fastDb) fast, _ := NewBlockChain(fastDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer fast.Stop() headers := make([]*types.Header, len(blocks)) for i, block := range blocks { headers[i] = block.Header() } if n, err := fast.InsertHeaderChain(headers, 1); err != nil { t.Fatalf("failed to insert header %d: %v", n, err) } if n, err := fast.InsertReceiptChain(blocks, receipts); err != nil { t.Fatalf("failed to insert receipt %d: %v", n, err) } // Iterate over all chain data components, and cross reference for i := 0; i < len(blocks); i++ { num, hash := blocks[i].NumberU64(), blocks[i].Hash() if ftd, atd := fast.GetTdByHash(hash), archive.GetTdByHash(hash); ftd.Cmp(atd) != 0 { t.Errorf("block #%d [%x]: td mismatch: have %v, want %v", num, hash, ftd, atd) } if fheader, aheader := fast.GetHeaderByHash(hash), archive.GetHeaderByHash(hash); fheader.Hash() != aheader.Hash() { t.Errorf("block #%d [%x]: header mismatch: have %v, want %v", num, hash, fheader, aheader) } if fblock, ablock := fast.GetBlockByHash(hash), archive.GetBlockByHash(hash); fblock.Hash() != ablock.Hash() { t.Errorf("block #%d [%x]: block mismatch: have %v, want %v", num, hash, fblock, ablock) } else if types.DeriveSha(fblock.Transactions()) != types.DeriveSha(ablock.Transactions()) { t.Errorf("block #%d [%x]: transactions mismatch: have %v, want %v", num, hash, fblock.Transactions(), ablock.Transactions()) } else if types.CalcUncleHash(fblock.Uncles()) != types.CalcUncleHash(ablock.Uncles()) { t.Errorf("block #%d [%x]: uncles mismatch: have %v, want %v", num, hash, fblock.Uncles(), ablock.Uncles()) } if freceipts, areceipts := rawdb.ReadReceipts(fastDb, hash, *rawdb.ReadHeaderNumber(fastDb, hash)), rawdb.ReadReceipts(archiveDb, hash, *rawdb.ReadHeaderNumber(archiveDb, hash)); types.DeriveSha(freceipts) != types.DeriveSha(areceipts) { t.Errorf("block #%d [%x]: receipts mismatch: have %v, want %v", num, hash, freceipts, areceipts) } } // Check that the canonical chains are the same between the databases for i := 0; i < len(blocks)+1; i++ { if fhash, ahash := rawdb.ReadCanonicalHash(fastDb, uint64(i)), rawdb.ReadCanonicalHash(archiveDb, uint64(i)); fhash != ahash { t.Errorf("block #%d: canonical hash mismatch: have %v, want %v", i, fhash, ahash) } } } // Tests that various import methods move the chain head pointers to the correct // positions. func TestLightVsFastVsFullChainHeads(t *testing.T) { // Configure and generate a sample block chain var ( gendb = ethdb.NewMemDatabase() key, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") address = crypto.PubkeyToAddress(key.PublicKey) funds = big.NewInt(1000000000) gspec = &Genesis{Config: params.TestChainConfig, Alloc: GenesisAlloc{address: {Balance: funds}}} genesis = gspec.MustCommit(gendb) ) height := uint64(1024) blocks, receipts := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), gendb, int(height), nil) // Configure a subchain to roll back remove := []common.Hash{} for _, block := range blocks[height/2:] { remove = append(remove, block.Hash()) } // Create a small assertion method to check the three heads assert := func(t *testing.T, kind string, chain *BlockChain, header uint64, fast uint64, block uint64) { if num := chain.CurrentBlock().NumberU64(); num != block { t.Errorf("%s head block mismatch: have #%v, want #%v", kind, num, block) } if num := chain.CurrentFastBlock().NumberU64(); num != fast { t.Errorf("%s head fast-block mismatch: have #%v, want #%v", kind, num, fast) } if num := chain.CurrentHeader().Number.Uint64(); num != header { t.Errorf("%s head header mismatch: have #%v, want #%v", kind, num, header) } } // Import the chain as an archive node and ensure all pointers are updated archiveDb := ethdb.NewMemDatabase() gspec.MustCommit(archiveDb) archive, _ := NewBlockChain(archiveDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) if n, err := archive.InsertChain(blocks); err != nil { t.Fatalf("failed to process block %d: %v", n, err) } defer archive.Stop() assert(t, "archive", archive, height, height, height) archive.Rollback(remove) assert(t, "archive", archive, height/2, height/2, height/2) // Import the chain as a non-archive node and ensure all pointers are updated fastDb := ethdb.NewMemDatabase() gspec.MustCommit(fastDb) fast, _ := NewBlockChain(fastDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer fast.Stop() headers := make([]*types.Header, len(blocks)) for i, block := range blocks { headers[i] = block.Header() } if n, err := fast.InsertHeaderChain(headers, 1); err != nil { t.Fatalf("failed to insert header %d: %v", n, err) } if n, err := fast.InsertReceiptChain(blocks, receipts); err != nil { t.Fatalf("failed to insert receipt %d: %v", n, err) } assert(t, "fast", fast, height, height, 0) fast.Rollback(remove) assert(t, "fast", fast, height/2, height/2, 0) // Import the chain as a light node and ensure all pointers are updated lightDb := ethdb.NewMemDatabase() gspec.MustCommit(lightDb) light, _ := NewBlockChain(lightDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) if n, err := light.InsertHeaderChain(headers, 1); err != nil { t.Fatalf("failed to insert header %d: %v", n, err) } defer light.Stop() assert(t, "light", light, height, 0, 0) light.Rollback(remove) assert(t, "light", light, height/2, 0, 0) } // Tests that chain reorganisations handle transaction removals and reinsertions. func TestChainTxReorgs(t *testing.T) { var ( key1, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") key2, _ = crypto.HexToECDSA("8a1f9a8f95be41cd7ccb6168179afb4504aefe388d1e14474d32c45c72ce7b7a") key3, _ = crypto.HexToECDSA("49a7b37aa6f6645917e7b807e9d1c00d4fa71f18343b0d4122a4d2df64dd6fee") addr1 = crypto.PubkeyToAddress(key1.PublicKey) addr2 = crypto.PubkeyToAddress(key2.PublicKey) addr3 = crypto.PubkeyToAddress(key3.PublicKey) db = ethdb.NewMemDatabase() gspec = &Genesis{ Config: params.TestChainConfig, GasLimit: 3141592, Alloc: GenesisAlloc{ addr1: {Balance: big.NewInt(1000000)}, addr2: {Balance: big.NewInt(1000000)}, addr3: {Balance: big.NewInt(1000000)}, }, } genesis = gspec.MustCommit(db) signer = types.NewEIP155Signer(gspec.Config.ChainID) ) // Create two transactions shared between the chains: // - postponed: transaction included at a later block in the forked chain // - swapped: transaction included at the same block number in the forked chain postponed, _ := types.SignTx(types.NewTransaction(0, addr1, big.NewInt(1000), params.TxGas, nil, nil), signer, key1) swapped, _ := types.SignTx(types.NewTransaction(1, addr1, big.NewInt(1000), params.TxGas, nil, nil), signer, key1) // Create two transactions that will be dropped by the forked chain: // - pastDrop: transaction dropped retroactively from a past block // - freshDrop: transaction dropped exactly at the block where the reorg is detected var pastDrop, freshDrop *types.Transaction // Create three transactions that will be added in the forked chain: // - pastAdd: transaction added before the reorganization is detected // - freshAdd: transaction added at the exact block the reorg is detected // - futureAdd: transaction added after the reorg has already finished var pastAdd, freshAdd, futureAdd *types.Transaction chain, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 3, func(i int, gen *BlockGen) { switch i { case 0: pastDrop, _ = types.SignTx(types.NewTransaction(gen.TxNonce(addr2), addr2, big.NewInt(1000), params.TxGas, nil, nil), signer, key2) gen.AddTx(pastDrop) // This transaction will be dropped in the fork from below the split point gen.AddTx(postponed) // This transaction will be postponed till block #3 in the fork case 2: freshDrop, _ = types.SignTx(types.NewTransaction(gen.TxNonce(addr2), addr2, big.NewInt(1000), params.TxGas, nil, nil), signer, key2) gen.AddTx(freshDrop) // This transaction will be dropped in the fork from exactly at the split point gen.AddTx(swapped) // This transaction will be swapped out at the exact height gen.OffsetTime(9) // Lower the block difficulty to simulate a weaker chain } }) // Import the chain. This runs all block validation rules. blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) if i, err := blockchain.InsertChain(chain); err != nil { t.Fatalf("failed to insert original chain[%d]: %v", i, err) } defer blockchain.Stop() // overwrite the old chain chain, _ = GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 5, func(i int, gen *BlockGen) { switch i { case 0: pastAdd, _ = types.SignTx(types.NewTransaction(gen.TxNonce(addr3), addr3, big.NewInt(1000), params.TxGas, nil, nil), signer, key3) gen.AddTx(pastAdd) // This transaction needs to be injected during reorg case 2: gen.AddTx(postponed) // This transaction was postponed from block #1 in the original chain gen.AddTx(swapped) // This transaction was swapped from the exact current spot in the original chain freshAdd, _ = types.SignTx(types.NewTransaction(gen.TxNonce(addr3), addr3, big.NewInt(1000), params.TxGas, nil, nil), signer, key3) gen.AddTx(freshAdd) // This transaction will be added exactly at reorg time case 3: futureAdd, _ = types.SignTx(types.NewTransaction(gen.TxNonce(addr3), addr3, big.NewInt(1000), params.TxGas, nil, nil), signer, key3) gen.AddTx(futureAdd) // This transaction will be added after a full reorg } }) if _, err := blockchain.InsertChain(chain); err != nil { t.Fatalf("failed to insert forked chain: %v", err) } // removed tx for i, tx := range (types.Transactions{pastDrop, freshDrop}) { if txn, _, _, _ := rawdb.ReadTransaction(db, tx.Hash()); txn != nil { t.Errorf("drop %d: tx %v found while shouldn't have been", i, txn) } if rcpt, _, _, _ := rawdb.ReadReceipt(db, tx.Hash()); rcpt != nil { t.Errorf("drop %d: receipt %v found while shouldn't have been", i, rcpt) } } // added tx for i, tx := range (types.Transactions{pastAdd, freshAdd, futureAdd}) { if txn, _, _, _ := rawdb.ReadTransaction(db, tx.Hash()); txn == nil { t.Errorf("add %d: expected tx to be found", i) } if rcpt, _, _, _ := rawdb.ReadReceipt(db, tx.Hash()); rcpt == nil { t.Errorf("add %d: expected receipt to be found", i) } } // shared tx for i, tx := range (types.Transactions{postponed, swapped}) { if txn, _, _, _ := rawdb.ReadTransaction(db, tx.Hash()); txn == nil { t.Errorf("share %d: expected tx to be found", i) } if rcpt, _, _, _ := rawdb.ReadReceipt(db, tx.Hash()); rcpt == nil { t.Errorf("share %d: expected receipt to be found", i) } } } func TestLogReorgs(t *testing.T) { var ( key1, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") addr1 = crypto.PubkeyToAddress(key1.PublicKey) db = ethdb.NewMemDatabase() // this code generates a log code = common.Hex2Bytes("60606040525b7f24ec1d3ff24c2f6ff210738839dbc339cd45a5294d85c79361016243157aae7b60405180905060405180910390a15b600a8060416000396000f360606040526008565b00") gspec = &Genesis{Config: params.TestChainConfig, Alloc: GenesisAlloc{addr1: {Balance: big.NewInt(10000000000000)}}} genesis = gspec.MustCommit(db) signer = types.NewEIP155Signer(gspec.Config.ChainID) ) blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer blockchain.Stop() rmLogsCh := make(chan RemovedLogsEvent) blockchain.SubscribeRemovedLogsEvent(rmLogsCh) chain, _ := GenerateChain(params.TestChainConfig, genesis, ethash.NewFaker(), db, 2, func(i int, gen *BlockGen) { if i == 1 { tx, err := types.SignTx(types.NewContractCreation(gen.TxNonce(addr1), new(big.Int), 1000000, new(big.Int), code), signer, key1) if err != nil { t.Fatalf("failed to create tx: %v", err) } gen.AddTx(tx) } }) if _, err := blockchain.InsertChain(chain); err != nil { t.Fatalf("failed to insert chain: %v", err) } chain, _ = GenerateChain(params.TestChainConfig, genesis, ethash.NewFaker(), db, 3, func(i int, gen *BlockGen) {}) if _, err := blockchain.InsertChain(chain); err != nil { t.Fatalf("failed to insert forked chain: %v", err) } timeout := time.NewTimer(1 * time.Second) select { case ev := <-rmLogsCh: if len(ev.Logs) == 0 { t.Error("expected logs") } case <-timeout.C: t.Fatal("Timeout. There is no RemovedLogsEvent has been sent.") } } func TestReorgSideEvent(t *testing.T) { var ( db = ethdb.NewMemDatabase() key1, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") addr1 = crypto.PubkeyToAddress(key1.PublicKey) gspec = &Genesis{ Config: params.TestChainConfig, Alloc: GenesisAlloc{addr1: {Balance: big.NewInt(10000000000000)}}, } genesis = gspec.MustCommit(db) signer = types.NewEIP155Signer(gspec.Config.ChainID) ) blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer blockchain.Stop() chain, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 3, func(i int, gen *BlockGen) {}) if _, err := blockchain.InsertChain(chain); err != nil { t.Fatalf("failed to insert chain: %v", err) } replacementBlocks, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 4, func(i int, gen *BlockGen) { tx, err := types.SignTx(types.NewContractCreation(gen.TxNonce(addr1), new(big.Int), 1000000, new(big.Int), nil), signer, key1) if i == 2 { gen.OffsetTime(-9) } if err != nil { t.Fatalf("failed to create tx: %v", err) } gen.AddTx(tx) }) chainSideCh := make(chan ChainSideEvent, 64) blockchain.SubscribeChainSideEvent(chainSideCh) if _, err := blockchain.InsertChain(replacementBlocks); err != nil { t.Fatalf("failed to insert chain: %v", err) } // first two block of the secondary chain are for a brief moment considered // side chains because up to that point the first one is considered the // heavier chain. expectedSideHashes := map[common.Hash]bool{ replacementBlocks[0].Hash(): true, replacementBlocks[1].Hash(): true, chain[0].Hash(): true, chain[1].Hash(): true, chain[2].Hash(): true, } i := 0 const timeoutDura = 10 * time.Second timeout := time.NewTimer(timeoutDura) done: for { select { case ev := <-chainSideCh: block := ev.Block if _, ok := expectedSideHashes[block.Hash()]; !ok { t.Errorf("%d: didn't expect %x to be in side chain", i, block.Hash()) } i++ if i == len(expectedSideHashes) { timeout.Stop() break done } timeout.Reset(timeoutDura) case <-timeout.C: t.Fatal("Timeout. Possibly not all blocks were triggered for sideevent") } } // make sure no more events are fired select { case e := <-chainSideCh: t.Errorf("unexpected event fired: %v", e) case <-time.After(250 * time.Millisecond): } } // Tests if the canonical block can be fetched from the database during chain insertion. func TestCanonicalBlockRetrieval(t *testing.T) { _, blockchain, err := newCanonical(ethash.NewFaker(), 0, true) if err != nil { t.Fatalf("failed to create pristine chain: %v", err) } defer blockchain.Stop() chain, _ := GenerateChain(blockchain.chainConfig, blockchain.genesisBlock, ethash.NewFaker(), blockchain.db, 10, func(i int, gen *BlockGen) {}) var pend sync.WaitGroup pend.Add(len(chain)) for i := range chain { go func(block *types.Block) { defer pend.Done() // try to retrieve a block by its canonical hash and see if the block data can be retrieved. for { ch := rawdb.ReadCanonicalHash(blockchain.db, block.NumberU64()) if ch == (common.Hash{}) { continue // busy wait for canonical hash to be written } if ch != block.Hash() { t.Fatalf("unknown canonical hash, want %s, got %s", block.Hash().Hex(), ch.Hex()) } fb := rawdb.ReadBlock(blockchain.db, ch, block.NumberU64()) if fb == nil { t.Fatalf("unable to retrieve block %d for canonical hash: %s", block.NumberU64(), ch.Hex()) } if fb.Hash() != block.Hash() { t.Fatalf("invalid block hash for block %d, want %s, got %s", block.NumberU64(), block.Hash().Hex(), fb.Hash().Hex()) } return } }(chain[i]) if _, err := blockchain.InsertChain(types.Blocks{chain[i]}); err != nil { t.Fatalf("failed to insert block %d: %v", i, err) } } pend.Wait() } func TestEIP155Transition(t *testing.T) { // Configure and generate a sample block chain var ( db = ethdb.NewMemDatabase() key, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") address = crypto.PubkeyToAddress(key.PublicKey) funds = big.NewInt(1000000000) deleteAddr = common.Address{1} gspec = &Genesis{ Config: &params.ChainConfig{ChainID: big.NewInt(1), EIP155Block: big.NewInt(2), HomesteadBlock: new(big.Int)}, Alloc: GenesisAlloc{address: {Balance: funds}, deleteAddr: {Balance: new(big.Int)}}, } genesis = gspec.MustCommit(db) ) blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer blockchain.Stop() blocks, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 4, func(i int, block *BlockGen) { var ( tx *types.Transaction err error basicTx = func(signer types.Signer) (*types.Transaction, error) { return types.SignTx(types.NewTransaction(block.TxNonce(address), common.Address{}, new(big.Int), 21000, new(big.Int), nil), signer, key) } ) switch i { case 0: tx, err = basicTx(types.HomesteadSigner{}) if err != nil { t.Fatal(err) } block.AddTx(tx) case 2: tx, err = basicTx(types.HomesteadSigner{}) if err != nil { t.Fatal(err) } block.AddTx(tx) tx, err = basicTx(types.NewEIP155Signer(gspec.Config.ChainID)) if err != nil { t.Fatal(err) } block.AddTx(tx) case 3: tx, err = basicTx(types.HomesteadSigner{}) if err != nil { t.Fatal(err) } block.AddTx(tx) tx, err = basicTx(types.NewEIP155Signer(gspec.Config.ChainID)) if err != nil { t.Fatal(err) } block.AddTx(tx) } }) if _, err := blockchain.InsertChain(blocks); err != nil { t.Fatal(err) } block := blockchain.GetBlockByNumber(1) if block.Transactions()[0].Protected() { t.Error("Expected block[0].txs[0] to not be replay protected") } block = blockchain.GetBlockByNumber(3) if block.Transactions()[0].Protected() { t.Error("Expected block[3].txs[0] to not be replay protected") } if !block.Transactions()[1].Protected() { t.Error("Expected block[3].txs[1] to be replay protected") } if _, err := blockchain.InsertChain(blocks[4:]); err != nil { t.Fatal(err) } // generate an invalid chain id transaction config := &params.ChainConfig{ChainID: big.NewInt(2), EIP155Block: big.NewInt(2), HomesteadBlock: new(big.Int)} blocks, _ = GenerateChain(config, blocks[len(blocks)-1], ethash.NewFaker(), db, 4, func(i int, block *BlockGen) { var ( tx *types.Transaction err error basicTx = func(signer types.Signer) (*types.Transaction, error) { return types.SignTx(types.NewTransaction(block.TxNonce(address), common.Address{}, new(big.Int), 21000, new(big.Int), nil), signer, key) } ) if i == 0 { tx, err = basicTx(types.NewEIP155Signer(big.NewInt(2))) if err != nil { t.Fatal(err) } block.AddTx(tx) } }) _, err := blockchain.InsertChain(blocks) if err != types.ErrInvalidChainId { t.Error("expected error:", types.ErrInvalidChainId) } } func TestEIP161AccountRemoval(t *testing.T) { // Configure and generate a sample block chain var ( db = ethdb.NewMemDatabase() key, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") address = crypto.PubkeyToAddress(key.PublicKey) funds = big.NewInt(1000000000) theAddr = common.Address{1} gspec = &Genesis{ Config: &params.ChainConfig{ ChainID: big.NewInt(1), HomesteadBlock: new(big.Int), EIP155Block: new(big.Int), EIP158Block: big.NewInt(2), }, Alloc: GenesisAlloc{address: {Balance: funds}}, } genesis = gspec.MustCommit(db) ) blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{}) defer blockchain.Stop() blocks, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 3, func(i int, block *BlockGen) { var ( tx *types.Transaction err error signer = types.NewEIP155Signer(gspec.Config.ChainID) ) switch i { case 0: tx, err = types.SignTx(types.NewTransaction(block.TxNonce(address), theAddr, new(big.Int), 21000, new(big.Int), nil), signer, key) case 1: tx, err = types.SignTx(types.NewTransaction(block.TxNonce(address), theAddr, new(big.Int), 21000, new(big.Int), nil), signer, key) case 2: tx, err = types.SignTx(types.NewTransaction(block.TxNonce(address), theAddr, new(big.Int), 21000, new(big.Int), nil), signer, key) } if err != nil { t.Fatal(err) } block.AddTx(tx) }) // account must exist pre eip 161 if _, err := blockchain.InsertChain(types.Blocks{blocks[0]}); err != nil { t.Fatal(err) } if st, _ := blockchain.State(); !st.Exist(theAddr) { t.Error("expected account to exist") } // account needs to be deleted post eip 161 if _, err := blockchain.InsertChain(types.Blocks{blocks[1]}); err != nil { t.Fatal(err) } if st, _ := blockchain.State(); st.Exist(theAddr) { t.Error("account should not exist") } // account musn't be created post eip 161 if _, err := blockchain.InsertChain(types.Blocks{blocks[2]}); err != nil { t.Fatal(err) } if st, _ := blockchain.State(); st.Exist(theAddr) { t.Error("account should not exist") } } // This is a regression test (i.e. as weird as it is, don't delete it ever), which // tests that under weird reorg conditions the blockchain and its internal header- // chain return the same latest block/header. // // https://github.com/ethereum/go-ethereum/pull/15941 func TestBlockchainHeaderchainReorgConsistency(t *testing.T) { // Generate a canonical chain to act as the main dataset engine := ethash.NewFaker() db := ethdb.NewMemDatabase() genesis := new(Genesis).MustCommit(db) blocks, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, 64, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{1}) }) // Generate a bunch of fork blocks, each side forking from the canonical chain forks := make([]*types.Block, len(blocks)) for i := 0; i < len(forks); i++ { parent := genesis if i > 0 { parent = blocks[i-1] } fork, _ := GenerateChain(params.TestChainConfig, parent, engine, db, 1, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{2}) }) forks[i] = fork[0] } // Import the canonical and fork chain side by side, verifying the current block // and current header consistency diskdb := ethdb.NewMemDatabase() new(Genesis).MustCommit(diskdb) chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{}) if err != nil { t.Fatalf("failed to create tester chain: %v", err) } for i := 0; i < len(blocks); i++ { if _, err := chain.InsertChain(blocks[i : i+1]); err != nil { t.Fatalf("block %d: failed to insert into chain: %v", i, err) } if chain.CurrentBlock().Hash() != chain.CurrentHeader().Hash() { t.Errorf("block %d: current block/header mismatch: block #%d [%x…], header #%d [%x…]", i, chain.CurrentBlock().Number(), chain.CurrentBlock().Hash().Bytes()[:4], chain.CurrentHeader().Number, chain.CurrentHeader().Hash().Bytes()[:4]) } if _, err := chain.InsertChain(forks[i : i+1]); err != nil { t.Fatalf(" fork %d: failed to insert into chain: %v", i, err) } if chain.CurrentBlock().Hash() != chain.CurrentHeader().Hash() { t.Errorf(" fork %d: current block/header mismatch: block #%d [%x…], header #%d [%x…]", i, chain.CurrentBlock().Number(), chain.CurrentBlock().Hash().Bytes()[:4], chain.CurrentHeader().Number, chain.CurrentHeader().Hash().Bytes()[:4]) } } } // Tests that importing small side forks doesn't leave junk in the trie database // cache (which would eventually cause memory issues). func TestTrieForkGC(t *testing.T) { // Generate a canonical chain to act as the main dataset engine := ethash.NewFaker() db := ethdb.NewMemDatabase() genesis := new(Genesis).MustCommit(db) blocks, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, 2*triesInMemory, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{1}) }) // Generate a bunch of fork blocks, each side forking from the canonical chain forks := make([]*types.Block, len(blocks)) for i := 0; i < len(forks); i++ { parent := genesis if i > 0 { parent = blocks[i-1] } fork, _ := GenerateChain(params.TestChainConfig, parent, engine, db, 1, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{2}) }) forks[i] = fork[0] } // Import the canonical and fork chain side by side, forcing the trie cache to cache both diskdb := ethdb.NewMemDatabase() new(Genesis).MustCommit(diskdb) chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{}) if err != nil { t.Fatalf("failed to create tester chain: %v", err) } for i := 0; i < len(blocks); i++ { if _, err := chain.InsertChain(blocks[i : i+1]); err != nil { t.Fatalf("block %d: failed to insert into chain: %v", i, err) } if _, err := chain.InsertChain(forks[i : i+1]); err != nil { t.Fatalf("fork %d: failed to insert into chain: %v", i, err) } } // Dereference all the recent tries and ensure no past trie is left in for i := 0; i < triesInMemory; i++ { chain.stateCache.TrieDB().Dereference(blocks[len(blocks)-1-i].Root()) chain.stateCache.TrieDB().Dereference(forks[len(blocks)-1-i].Root()) } if len(chain.stateCache.TrieDB().Nodes()) > 0 { t.Fatalf("stale tries still alive after garbase collection") } } // Tests that doing large reorgs works even if the state associated with the // forking point is not available any more. func TestLargeReorgTrieGC(t *testing.T) { // Generate the original common chain segment and the two competing forks engine := ethash.NewFaker() db := ethdb.NewMemDatabase() genesis := new(Genesis).MustCommit(db) shared, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, 64, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{1}) }) original, _ := GenerateChain(params.TestChainConfig, shared[len(shared)-1], engine, db, 2*triesInMemory, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{2}) }) competitor, _ := GenerateChain(params.TestChainConfig, shared[len(shared)-1], engine, db, 2*triesInMemory+1, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{3}) }) // Import the shared chain and the original canonical one diskdb := ethdb.NewMemDatabase() new(Genesis).MustCommit(diskdb) chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{}) if err != nil { t.Fatalf("failed to create tester chain: %v", err) } if _, err := chain.InsertChain(shared); err != nil { t.Fatalf("failed to insert shared chain: %v", err) } if _, err := chain.InsertChain(original); err != nil { t.Fatalf("failed to insert shared chain: %v", err) } // Ensure that the state associated with the forking point is pruned away if node, _ := chain.stateCache.TrieDB().Node(shared[len(shared)-1].Root()); node != nil { t.Fatalf("common-but-old ancestor still cache") } // Import the competitor chain without exceeding the canonical's TD and ensure // we have not processed any of the blocks (protection against malicious blocks) if _, err := chain.InsertChain(competitor[:len(competitor)-2]); err != nil { t.Fatalf("failed to insert competitor chain: %v", err) } for i, block := range competitor[:len(competitor)-2] { if node, _ := chain.stateCache.TrieDB().Node(block.Root()); node != nil { t.Fatalf("competitor %d: low TD chain became processed", i) } } // Import the head of the competitor chain, triggering the reorg and ensure we // successfully reprocess all the stashed away blocks. if _, err := chain.InsertChain(competitor[len(competitor)-2:]); err != nil { t.Fatalf("failed to finalize competitor chain: %v", err) } for i, block := range competitor[:len(competitor)-triesInMemory] { if node, _ := chain.stateCache.TrieDB().Node(block.Root()); node != nil { t.Fatalf("competitor %d: competing chain state missing", i) } } } // Benchmarks large blocks with value transfers to non-existing accounts func benchmarkLargeNumberOfValueToNonexisting(b *testing.B, numTxs, numBlocks int, recipientFn func(uint64) common.Address, dataFn func(uint64) []byte) { var ( signer = types.HomesteadSigner{} testBankKey, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") testBankAddress = crypto.PubkeyToAddress(testBankKey.PublicKey) bankFunds = big.NewInt(100000000000000000) gspec = Genesis{ Config: params.TestChainConfig, Alloc: GenesisAlloc{ testBankAddress: {Balance: bankFunds}, common.HexToAddress("0xc0de"): { Code: []byte{0x60, 0x01, 0x50}, Balance: big.NewInt(0), }, // push 1, pop }, GasLimit: 100e6, // 100 M } ) // Generate the original common chain segment and the two competing forks engine := ethash.NewFaker() db := ethdb.NewMemDatabase() genesis := gspec.MustCommit(db) blockGenerator := func(i int, block *BlockGen) { block.SetCoinbase(common.Address{1}) for txi := 0; txi < numTxs; txi++ { uniq := uint64(i*numTxs + txi) recipient := recipientFn(uniq) //recipient := common.BigToAddress(big.NewInt(0).SetUint64(1337 + uniq)) tx, err := types.SignTx(types.NewTransaction(uniq, recipient, big.NewInt(1), params.TxGas, big.NewInt(1), nil), signer, testBankKey) if err != nil { b.Error(err) } block.AddTx(tx) } } shared, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, numBlocks, blockGenerator) b.StopTimer() b.ResetTimer() for i := 0; i < b.N; i++ { // Import the shared chain and the original canonical one diskdb := ethdb.NewMemDatabase() gspec.MustCommit(diskdb) chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{}) if err != nil { b.Fatalf("failed to create tester chain: %v", err) } b.StartTimer() if _, err := chain.InsertChain(shared); err != nil { b.Fatalf("failed to insert shared chain: %v", err) } b.StopTimer() if got := chain.CurrentBlock().Transactions().Len(); got != numTxs*numBlocks { b.Fatalf("Transactions were not included, expected %d, got %d", (numTxs * numBlocks), got) } } } func BenchmarkBlockChain_1x1000ValueTransferToNonexisting(b *testing.B) { var ( numTxs = 1000 numBlocks = 1 ) recipientFn := func(nonce uint64) common.Address { return common.BigToAddress(big.NewInt(0).SetUint64(1337 + nonce)) } dataFn := func(nonce uint64) []byte { return nil } benchmarkLargeNumberOfValueToNonexisting(b, numTxs, numBlocks, recipientFn, dataFn) } func BenchmarkBlockChain_1x1000ValueTransferToExisting(b *testing.B) { var ( numTxs = 1000 numBlocks = 1 ) b.StopTimer() b.ResetTimer() recipientFn := func(nonce uint64) common.Address { return common.BigToAddress(big.NewInt(0).SetUint64(1337)) } dataFn := func(nonce uint64) []byte { return nil } benchmarkLargeNumberOfValueToNonexisting(b, numTxs, numBlocks, recipientFn, dataFn) } func BenchmarkBlockChain_1x1000Executions(b *testing.B) { var ( numTxs = 1000 numBlocks = 1 ) b.StopTimer() b.ResetTimer() recipientFn := func(nonce uint64) common.Address { return common.BigToAddress(big.NewInt(0).SetUint64(0xc0de)) } dataFn := func(nonce uint64) []byte { return nil } benchmarkLargeNumberOfValueToNonexisting(b, numTxs, numBlocks, recipientFn, dataFn) }
{'repo_name': 'bitconch/bitconch-core', 'stars': '204', 'repo_language': 'C++', 'file_name': 'oom-score-adj.sh', 'mime_type': 'text/plain', 'hash': 594483145155492184, 'source_dataset': 'data'}
// RUN: rm -rf %t // RUN: mkdir %t // RUN: %hmaptool write %S/Inputs/double-quotes/a.hmap.json %t/a.hmap // RUN: %hmaptool write %S/Inputs/double-quotes/x.hmap.json %t/x.hmap // RUN: sed -e "s@TEST_DIR@%{/S:regex_replacement}/Inputs/double-quotes@g" \ // RUN: %S/Inputs/double-quotes/z.yaml > %t/z.yaml // The output with and without modules should be the same // RUN: %clang_cc1 \ // RUN: -I %t/x.hmap -iquote %t/a.hmap -ivfsoverlay %t/z.yaml \ // RUN: -F%S/Inputs/double-quotes -I%S/Inputs/double-quotes \ // RUN: -Wquoted-include-in-framework-header -fsyntax-only %s -verify // RUN: %clang_cc1 -fmodules -fimplicit-module-maps -fmodules-cache-path=%t/cache \ // RUN: -I %t/x.hmap -iquote %t/a.hmap -ivfsoverlay %t/z.yaml \ // RUN: -F%S/Inputs/double-quotes -I%S/Inputs/double-quotes \ // RUN: -Wquoted-include-in-framework-header -fsyntax-only %s \ // RUN: 2>%t/stderr // The same warnings show up when modules is on but -verify doesn't get it // because they only show up under the module A building context. // RUN: FileCheck --input-file=%t/stderr %s // CHECK: double-quoted include "A0.h" in framework header, expected angle-bracketed instead // CHECK: double-quoted include "B.h" in framework header, expected angle-bracketed instead // CHECK: double-quoted include "B.h" in framework header, expected angle-bracketed instead #import "A.h" #import <Z/Z.h> // Make sure we correctly handle paths that resemble frameworks, but aren't. #import "NotAFramework/Headers/Headers/Thing1.h" int bar() { return foo(); } // expected-warning@Inputs/double-quotes/A.framework/Headers/A.h:1{{double-quoted include "A0.h" in framework header, expected angle-bracketed instead}} // expected-warning@Inputs/double-quotes/A.framework/Headers/A.h:2{{double-quoted include "B.h" in framework header, expected angle-bracketed instead}} // expected-warning@Inputs/double-quotes/flat-header-path/Z.h:1{{double-quoted include "B.h" in framework header, expected angle-bracketed instead}}
{'repo_name': 'epiqc/ScaffCC', 'stars': '118', 'repo_language': 'C++', 'file_name': 'boolean_formula.x2y2.scaffold', 'mime_type': 'text/x-c', 'hash': 1046959497816063766, 'source_dataset': 'data'}
// An odd Mach-O file, with just a mach header with all but the magic field // and filetype zeros. The cputype and cpusubtype fields being zero are invalid, // but that does not mater for the most part to display some of the contents. RUN: llvm-objdump -private-headers %p/Inputs/macho-invalid-zero-ncmds -macho \ RUN: | FileCheck -check-prefix ZERO-NCMDS %s ZERO-NCMDS: MH_MAGIC_64 0 0 0x00 OBJECT 0 0 0x00000000 RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho64-invalid-incomplete-load-command 2>&1 \ RUN: | FileCheck -check-prefix INCOMPLETE-LOADC %s INCOMPLETE-LOADC: truncated or malformed object (load command 0 extends past the end all load commands in the file) RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho64-invalid-incomplete-load-command.1 2>&1 \ RUN: | FileCheck -check-prefix INCOMPLETE-LOADC-1 %s INCOMPLETE-LOADC-1: truncated or malformed object (load command 1 extends past the end all load commands in the file) RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho-invalid-too-small-load-command 2>&1 \ RUN: | FileCheck -check-prefix SMALL-LOADC-SIZE %s RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho64-invalid-too-small-load-command 2>&1 \ RUN: | FileCheck -check-prefix SMALL-LOADC-SIZE %s SMALL-LOADC-SIZE: truncated or malformed object (load commands extend past the end of the file) RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho64-invalid-too-small-load-command.1 2>&1 \ RUN: | FileCheck -check-prefix SMALL-LOADC-SIZE-1 %s SMALL-LOADC-SIZE-1: truncated or malformed object (load command 1 with size less than 8 bytes) RUN: not llvm-objdump -private-headers %p/Inputs/macho-invalid-too-small-segment-load-command 2>&1 \ RUN: | FileCheck -check-prefix MULTIPLE-NOT-4 %s MULTIPLE-NOT-4: truncated or malformed object (load command 0 cmdsize not a multiple of 4) RUN: not llvm-objdump -private-headers %p/Inputs/macho-invalid-too-small-segment-load-command.1 2>&1 \ RUN: | FileCheck -check-prefix SMALL-SEGLOADC-SIZE %s SMALL-SEGLOADC-SIZE: truncated or malformed object (load command 0 LC_SEGMENT cmdsize too small) RUN: not llvm-objdump -private-headers %p/Inputs/macho64-invalid-too-small-segment-load-command 2>&1 \ RUN: | FileCheck -check-prefix MULTIPLE-NOT-8 %s MULTIPLE-NOT-8: truncated or malformed object (load command 0 cmdsize not a multiple of 8) RUN: not llvm-objdump -private-headers %p/Inputs/macho-invalid-no-size-for-sections 2>&1 \ RUN: | FileCheck -check-prefix TOO-MANY-SECTS %s TOO-MANY-SECTS: truncated or malformed object (load command 0 inconsistent cmdsize in LC_SEGMENT for the number of sections) RUN: not llvm-objdump -private-headers %p/Inputs/macho64-invalid-no-size-for-sections 2>&1 \ RUN: | FileCheck -check-prefix TOO-MANY-SECTS-64 %s TOO-MANY-SECTS-64: truncated or malformed object (load command 0 inconsistent cmdsize in LC_SEGMENT_64 for the number of sections) RUN: not llvm-objdump -macho -t %p/Inputs/macho-invalid-bad-symbol-index 2>&1 \ RUN: | FileCheck -check-prefix BAD-SYMBOL %s BAD-SYMBOL: truncated or malformed object (ilocalsym plus nlocalsym in LC_DYSYMTAB load command extends past the end of the symbol table) RUN: llvm-objdump -macho -t %p/Inputs/macho-valid-0-nsyms 2>&1 \ RUN: | FileCheck -check-prefix ZERO-NSYMS %s ZERO-NSYMS: SYMBOL TABLE RUN: not llvm-objdump -t %p/Inputs/macho-invalid-symbol-name-past-eof 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF %s NAME-PAST-EOF: truncated or malformed object (bad string index: 4261412866 for symbol at index 0) RUN: llvm-nm -pa %p/Inputs/macho-invalid-symbol-name-past-eof 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF-nm-pa %s NAME-PAST-EOF-nm-pa: 0000000000000000 - 00 0000 SO bad string index RUN: llvm-nm -pax %p/Inputs/macho-invalid-symbol-name-past-eof 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF-nm-pax %s NAME-PAST-EOF-nm-pax: 0000000000000000 64 00 0000 fe000002 bad string index RUN: not llvm-objdump -t %p/Inputs/macho-bad-archive1.a 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF-ARCHIVE %s NAME-PAST-EOF-ARCHIVE: macho-bad-archive1.a(macho-invalid-symbol-name-past-eof) truncated or malformed object (bad string index: 4261412866 for symbol at index 0) RUN: not llvm-objdump -macho -arch all -t %p/Inputs/macho-universal-bad1.x86_64.i386 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF-FAT %s NAME-PAST-EOF-FAT: macho-universal-bad1.x86_64.i386 (for architecture x86_64) truncated or malformed object (bad string index: 4261412866 for symbol at index 0) RUN: not llvm-objdump -macho -arch all -t %p/Inputs/macho-universal-archive-bad1.x86_64.i386 2>&1 \ RUN: | FileCheck -check-prefix NAME-PAST-EOF-FAT-ARCHIVE %s NAME-PAST-EOF-FAT-ARCHIVE: macho-universal-archive-bad1.x86_64.i386(macho-invalid-symbol-name-past-eof) (for architecture x86_64) truncated or malformed object (bad string index: 4261412866 for symbol at index 0) RUN: llvm-nm %p/Inputs/macho-invalid-section-index-getSectionRawName 2>&1 \ RUN: | FileCheck -check-prefix INVALID-SECTION-IDX-SYMBOL-SEC %s INVALID-SECTION-IDX-SYMBOL-SEC: 0000000100000000 S __mh_execute_header RUN: llvm-nm -m %p/Inputs/macho-invalid-section-index-getSectionRawName 2>&1 \ RUN: | FileCheck -check-prefix INVALID-SECTION-IDX-SYMBOL-SEC-m %s INVALID-SECTION-IDX-SYMBOL-SEC-m: 0000000100000000 (?,?) [referenced dynamically] external __mh_execute_header RUN: llvm-nm -pax %p/Inputs/macho-invalid-section-index-getSectionRawName 2>&1 \ RUN: | FileCheck -check-prefix INVALID-SECTION-IDX-SYMBOL-SEC-pax %s INVALID-SECTION-IDX-SYMBOL-SEC-pax: 0000000100000000 0f 42 0010 00000065 __mh_execute_header RUN: not llvm-objdump -t %p/Inputs/macho-invalid-section-index-getSectionRawName 2>&1 \ RUN: | FileCheck -check-prefix INVALID-SECTION-IDX-SYMBOL-SEC-objdump %s INVALID-SECTION-IDX-SYMBOL-SEC-objdump: truncated or malformed object (bad section index: 66 for symbol at index 8) RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho-invalid-header 2>&1 | FileCheck -check-prefix INVALID-HEADER %s INVALID-HEADER: The file was not recognized as a valid object file RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho64-invalid-incomplete-segment-load-command 2>&1 | FileCheck -check-prefix INCOMPLETE-SEGMENT-LOADC %s INCOMPLETE-SEGMENT-LOADC: truncated or malformed object (load commands extend past the end of the file) RUN: not llvm-objdump -macho -private-headers %p/Inputs/macho-bad-archive2.a 2>&1 | FileCheck -check-prefix INCOMPLETE-SEGMENT-LOADC-ARCHIVE %s INCOMPLETE-SEGMENT-LOADC-ARCHIVE: macho-bad-archive2.a(macho64-invalid-incomplete-segment-load-command) truncated or malformed object (load commands extend past the end of the file) RUN: not llvm-objdump -macho -private-headers -arch all %p/Inputs/macho-universal-bad2.x86_64.i386 2>&1 | FileCheck -check-prefix INCOMPLETE-SEGMENT-LOADC-FAT %s INCOMPLETE-SEGMENT-LOADC-FAT: macho-universal-bad2.x86_64.i386 (for architecture x86_64) truncated or malformed object (load commands extend past the end of the file) RUN: not llvm-objdump -macho -private-headers -arch all %p/Inputs/macho-universal-archive-bad2.x86_64.i386 2>&1 | FileCheck -check-prefix INCOMPLETE-SEGMENT-LOADC-FAT-ARCHIVE %s INCOMPLETE-SEGMENT-LOADC-FAT-ARCHIVE: macho-universal-archive-bad2.x86_64.i386(macho64-invalid-incomplete-segment-load-command) (for architecture x86_64) truncated or malformed object (load commands extend past the end of the file) RUN: not llvm-objdump -macho -universal-headers %p/Inputs/macho-invalid-fat 2>&1 | FileCheck -check-prefix INVALID-FAT %s INVALID-FAT: truncated or malformed fat file (fat_arch_64 structs would extend past the end of the file) RUN: not llvm-objdump -macho -private-headers -arch all %p/Inputs/macho-invalid-fat.obj.elf-x86_64 2>&1 | FileCheck -check-prefix INVALID-FAT-ELF %s INVALID-FAT-ELF: Mach-O universal file: {{.*}}/macho-invalid-fat.obj.elf-x86_64 for architecture x86_64 is not a Mach-O file or an archive file
{'repo_name': 'ucb-bar/esp-llvm', 'stars': '106', 'repo_language': 'C++', 'file_name': 'float-intrinsics-double.ll', 'mime_type': 'text/plain', 'hash': -4508309053448486742, 'source_dataset': 'data'}
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class SumTests(TranspileTestCase): def test_sum_list(self): self.assertCodeExecution(""" print(sum([1, 2, 3, 4, 5, 6, 7])) print(sum([[1, 2], [3, 4], [5, 6]], [])) """) def test_sum_tuple(self): self.assertCodeExecution(""" print(sum((1, 2, 3, 4, 5, 6, 7))) """) def test_sum_iterator(self): self.assertCodeExecution(""" i = iter([1, 2]) print(sum(i)) print(sum(i)) """) def test_sum_mix_floats_and_ints(self): self.assertCodeExecution(""" print(sum([1, 1.414, 2, 3.14159])) """) def test_sum_frozenset(self): self.assertCodeExecution(""" print(sum(frozenset([1, 1.414, 2, 3.14159]))) """) def test_sum_set(self): self.assertCodeExecution(""" print(sum({1, 1.414, 2, 3.14159})) """) def test_sum_dict(self): self.assertCodeExecution(""" print(sum({1: 1.414, 2: 3.14159})) """) def test_sum_generator_expressions(self): self.assertCodeExecution(""" print(sum(x ** 2 for x in [3, 4])) """) class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["sum"] not_implemented = [ ]
{'repo_name': 'beeware/voc', 'stars': '838', 'repo_language': 'Python', 'file_name': 'constants.py', 'mime_type': 'text/x-python', 'hash': 4325140860371721514, 'source_dataset': 'data'}
<?php namespace Aacotroneo\Saml2; use OneLogin\Saml2\Auth as OneLogin_Saml2_Auth; use OneLogin\Saml2\Error as OneLogin_Saml2_Error; use Aacotroneo\Saml2\Events\Saml2LogoutEvent; use Log; use Psr\Log\InvalidArgumentException; use URL; class Saml2Auth { /** * @var \OneLogin_Saml2_Auth */ protected $auth; protected $samlAssertion; function __construct(OneLogin_Saml2_Auth $auth) { $this->auth = $auth; } /** * Load the IDP config file and construct a OneLogin\Saml2\Auth (aliased here as OneLogin_Saml2_Auth). * Pass the returned value to the Saml2Auth constructor. * * @param string $idpName The target IDP name, must correspond to config file 'config/saml2/${idpName}_idp_settings.php' * @return OneLogin_Saml2_Auth Contructed OneLogin Saml2 configuration of the requested IDP * @throws \InvalidArgumentException if $idpName is empty * @throws \Exception if key or certificate is configured to a file path and the file is not found. */ public static function loadOneLoginAuthFromIpdConfig($idpName) { if (empty($idpName)) { throw new \InvalidArgumentException("IDP name required."); } $config = config('saml2.'.$idpName.'_idp_settings'); if (is_null($config)) { throw new \InvalidArgumentException('"' . $idpName . '" is not a valid IdP.'); } if (empty($config['sp']['entityId'])) { $config['sp']['entityId'] = URL::route('saml2_metadata', $idpName); } if (empty($config['sp']['assertionConsumerService']['url'])) { $config['sp']['assertionConsumerService']['url'] = URL::route('saml2_acs', $idpName); } if (!empty($config['sp']['singleLogoutService']) && empty($config['sp']['singleLogoutService']['url'])) { $config['sp']['singleLogoutService']['url'] = URL::route('saml2_sls', $idpName); } if (strpos($config['sp']['privateKey'], 'file://')===0) { $config['sp']['privateKey'] = static::extractPkeyFromFile($config['sp']['privateKey']); } if (strpos($config['sp']['x509cert'], 'file://')===0) { $config['sp']['x509cert'] = static::extractCertFromFile($config['sp']['x509cert']); } if (strpos($config['idp']['x509cert'], 'file://')===0) { $config['idp']['x509cert'] = static::extractCertFromFile($config['idp']['x509cert']); } return new OneLogin_Saml2_Auth($config); } /** * @return bool if a valid user was fetched from the saml assertion this request. */ function isAuthenticated() { $auth = $this->auth; return $auth->isAuthenticated(); } /** * The user info from the assertion * @return Saml2User */ function getSaml2User() { return new Saml2User($this->auth); } /** * The ID of the last message processed * @return String */ function getLastMessageId() { return $this->auth->getLastMessageId(); } /** * Initiate a saml2 login flow. It will redirect! Before calling this, check if user is * authenticated (here in saml2). That would be true when the assertion was received this request. * * @param string|null $returnTo The target URL the user should be returned to after login. * @param array $parameters Extra parameters to be added to the GET * @param bool $forceAuthn When true the AuthNReuqest will set the ForceAuthn='true' * @param bool $isPassive When true the AuthNReuqest will set the Ispassive='true' * @param bool $stay True if we want to stay (returns the url string) False to redirect * @param bool $setNameIdPolicy When true the AuthNReuqest will set a nameIdPolicy element * * @return string|null If $stay is True, it return a string with the SLO URL + LogoutRequest + parameters */ function login($returnTo = null, $parameters = array(), $forceAuthn = false, $isPassive = false, $stay = false, $setNameIdPolicy = true) { $auth = $this->auth; return $auth->login($returnTo, $parameters, $forceAuthn, $isPassive, $stay, $setNameIdPolicy); } /** * Initiate a saml2 logout flow. It will close session on all other SSO services. You should close * local session if applicable. * * @param string|null $returnTo The target URL the user should be returned to after logout. * @param string|null $nameId The NameID that will be set in the LogoutRequest. * @param string|null $sessionIndex The SessionIndex (taken from the SAML Response in the SSO process). * @param string|null $nameIdFormat The NameID Format will be set in the LogoutRequest. * @param bool $stay True if we want to stay (returns the url string) False to redirect * @param string|null $nameIdNameQualifier The NameID NameQualifier will be set in the LogoutRequest. * * @return string|null If $stay is True, it return a string with the SLO URL + LogoutRequest + parameters * * @throws OneLogin_Saml2_Error */ function logout($returnTo = null, $nameId = null, $sessionIndex = null, $nameIdFormat = null, $stay = false, $nameIdNameQualifier = null) { $auth = $this->auth; return $auth->logout($returnTo, [], $nameId, $sessionIndex, $stay, $nameIdFormat, $nameIdNameQualifier); } /** * Process a Saml response (assertion consumer service) * When errors are encountered, it returns an array with proper description */ function acs() { /** @var $auth OneLogin_Saml2_Auth */ $auth = $this->auth; $auth->processResponse(); $errors = $auth->getErrors(); if (!empty($errors)) { return array('error' => $errors, 'last_error_reason' => $auth->getLastErrorReason()); } if (!$auth->isAuthenticated()) { return array('error' => 'Could not authenticate', 'last_error_reason' => $auth->getLastErrorReason()); } return null; } /** * Process a Saml response (assertion consumer service) * returns an array with errors if it can not logout */ function sls($idp, $retrieveParametersFromServer = false) { $auth = $this->auth; // destroy the local session by firing the Logout event $keep_local_session = false; $session_callback = function () use ($idp) { event(new Saml2LogoutEvent($idp)); }; $auth->processSLO($keep_local_session, null, $retrieveParametersFromServer, $session_callback); $errors = $auth->getErrors(); if (!empty($errors)) { return array('error' => $errors, 'last_error_reason' => $auth->getLastErrorReason()); } return null; } /** * Show metadata about the local sp. Use this to configure your saml2 IDP * @return mixed xml string representing metadata * @throws \InvalidArgumentException if metadata is not correctly set */ function getMetadata() { $auth = $this->auth; $settings = $auth->getSettings(); $metadata = $settings->getSPMetadata(); $errors = $settings->validateMetadata($metadata); if (empty($errors)) { return $metadata; } else { throw new InvalidArgumentException( 'Invalid SP metadata: ' . implode(', ', $errors), OneLogin_Saml2_Error::METADATA_SP_INVALID ); } } /** * Get the last error reason from \OneLogin_Saml2_Auth, useful for error debugging. * @see \OneLogin_Saml2_Auth::getLastErrorReason() * @return string */ function getLastErrorReason() { return $this->auth->getLastErrorReason(); } protected static function extractPkeyFromFile($path) { $res = openssl_get_privatekey($path); if (empty($res)) { throw new \Exception('Could not read private key-file at path \'' . $path . '\''); } openssl_pkey_export($res, $pkey); openssl_pkey_free($res); return static::extractOpensslString($pkey, 'PRIVATE KEY'); } protected static function extractCertFromFile($path) { $res = openssl_x509_read(file_get_contents($path)); if (empty($res)) { throw new \Exception('Could not read X509 certificate-file at path \'' . $path . '\''); } openssl_x509_export($res, $cert); openssl_x509_free($res); return static::extractOpensslString($cert, 'CERTIFICATE'); } protected static function extractOpensslString($keyString, $delimiter) { $keyString = str_replace(["\r", "\n"], "", $keyString); $regex = '/-{5}BEGIN(?:\s|\w)+' . $delimiter . '-{5}\s*(.+?)\s*-{5}END(?:\s|\w)+' . $delimiter . '-{5}/m'; preg_match($regex, $keyString, $matches); return empty($matches[1]) ? '' : $matches[1]; } }
{'repo_name': 'aacotroneo/laravel-saml2', 'stars': '389', 'repo_language': 'PHP', 'file_name': 'Saml2AuthTest.php', 'mime_type': 'text/x-php', 'hash': -7190102957174493198, 'source_dataset': 'data'}
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ios/chrome/browser/ios_chrome_io_thread.h" #include "ios/chrome/browser/net/ios_chrome_network_delegate.h" #include "ios/chrome/common/channel_info.h" #if !defined(__has_feature) || !__has_feature(objc_arc) #error "This file requires ARC support." #endif IOSChromeIOThread::IOSChromeIOThread(PrefService* local_state, net_log::ChromeNetLog* net_log) : IOSIOThread(local_state, net_log) { IOSChromeNetworkDelegate::InitializePrefsOnUIThread(nullptr, local_state); } IOSChromeIOThread::~IOSChromeIOThread() = default; std::unique_ptr<net::NetworkDelegate> IOSChromeIOThread::CreateSystemNetworkDelegate() { return std::make_unique<IOSChromeNetworkDelegate>(); } std::string IOSChromeIOThread::GetChannelString() const { return ::GetChannelString(); }
{'repo_name': 'kiwibrowser/src', 'stars': '728', 'repo_language': 'None', 'file_name': '3db76d1dd7d3a759dccc3f8fa7f68675c080cb095e4881063a6b850fdd68b8bc.pem', 'mime_type': 'text/plain', 'hash': 3488913403214720866, 'source_dataset': 'data'}
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright (C) 2020 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <resources xmlns:android="http://schemas.android.com/apk/res/android" xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2"> <string name="tethered_notification_title" msgid="6426563586025792944">"Има активна споделена връзка или точка за достъп"</string> <string name="tethered_notification_message" msgid="64800879503420696">"Докоснете, за да настроите."</string> <string name="disable_tether_notification_title" msgid="3004509127903564191">"Функцията за тетъринг е деактивирана"</string> <string name="disable_tether_notification_message" msgid="6717523799293901476">"Свържете се с администратора си за подробности"</string> <string name="notification_channel_tethering_status" msgid="2663463891530932727">"Състояние на функцията за точка за достъп и тетъринг"</string> <string name="no_upstream_notification_title" msgid="1204601824631788482"></string> <string name="no_upstream_notification_message" msgid="8586582938243032621"></string> <string name="no_upstream_notification_disable_button" msgid="8800919436924640822"></string> <string name="upstream_roaming_notification_title" msgid="4772373823198997030"></string> <string name="upstream_roaming_notification_message" msgid="3985577843181551650"></string> </resources>
{'repo_name': 'LineageOS/android_frameworks_base', 'stars': '249', 'repo_language': 'Java', 'file_name': 'strings.xml', 'mime_type': 'text/xml', 'hash': 8708117699439270335, 'source_dataset': 'data'}
setTimeout(() => console.log('Une seconde plus tard'), 1000); setTimeout(() => { console.log('Deux secondes plus tard'); // <1> }, 2000); process.on('exit', () => { console.log('Le processus se termine'); // <2> });
{'repo_name': 'oncletom/nodebook', 'stars': '277', 'repo_language': 'JavaScript', 'file_name': 'tcomb.js', 'mime_type': 'text/plain', 'hash': 874089920429202834, 'source_dataset': 'data'}
<?php /* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ class Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetection extends Google_Collection { protected $collection_key = 'webEntities'; protected $bestGuessLabelsType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel'; protected $bestGuessLabelsDataType = 'array'; protected $fullMatchingImagesType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage'; protected $fullMatchingImagesDataType = 'array'; protected $pagesWithMatchingImagesType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebPage'; protected $pagesWithMatchingImagesDataType = 'array'; protected $partialMatchingImagesType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage'; protected $partialMatchingImagesDataType = 'array'; protected $visuallySimilarImagesType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage'; protected $visuallySimilarImagesDataType = 'array'; protected $webEntitiesType = 'Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity'; protected $webEntitiesDataType = 'array'; /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel */ public function setBestGuessLabels($bestGuessLabels) { $this->bestGuessLabels = $bestGuessLabels; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel */ public function getBestGuessLabels() { return $this->bestGuessLabels; } /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function setFullMatchingImages($fullMatchingImages) { $this->fullMatchingImages = $fullMatchingImages; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function getFullMatchingImages() { return $this->fullMatchingImages; } /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebPage */ public function setPagesWithMatchingImages($pagesWithMatchingImages) { $this->pagesWithMatchingImages = $pagesWithMatchingImages; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebPage */ public function getPagesWithMatchingImages() { return $this->pagesWithMatchingImages; } /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function setPartialMatchingImages($partialMatchingImages) { $this->partialMatchingImages = $partialMatchingImages; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function getPartialMatchingImages() { return $this->partialMatchingImages; } /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function setVisuallySimilarImages($visuallySimilarImages) { $this->visuallySimilarImages = $visuallySimilarImages; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebImage */ public function getVisuallySimilarImages() { return $this->visuallySimilarImages; } /** * @param Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity */ public function setWebEntities($webEntities) { $this->webEntities = $webEntities; } /** * @return Google_Service_Vision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity */ public function getWebEntities() { return $this->webEntities; } }
{'repo_name': 'jbroadway/elefant', 'stars': '179', 'repo_language': 'PHP', 'file_name': 'jquery.js', 'mime_type': 'text/plain', 'hash': 4746663368067453923, 'source_dataset': 'data'}
fileFormatVersion: 2 guid: c0bcc6e4593cd8f4da22acb254a4974e timeCreated: 1471817125 licenseType: Free NativeFormatImporter: userData: assetBundleName: assetBundleVariant:
{'repo_name': 'keawstudio/Clothing-System-3D', 'stars': '109', 'repo_language': 'C#', 'file_name': 'documentation.html', 'mime_type': 'text/html', 'hash': 7404800227698461625, 'source_dataset': 'data'}
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/storage_partition_impl_map.h" #include <unordered_set> #include <utility> #include "base/bind.h" #include "base/bind_helpers.h" #include "base/callback.h" #include "base/command_line.h" #include "base/files/file_enumerator.h" #include "base/files/file_path.h" #include "base/files/file_util.h" #include "base/location.h" #include "base/macros.h" #include "base/single_thread_task_runner.h" #include "base/strings/string_number_conversions.h" #include "base/strings/string_util.h" #include "base/strings/stringprintf.h" #include "base/task/post_task.h" #include "base/task/thread_pool.h" #include "base/threading/thread_task_runner_handle.h" #include "build/build_config.h" #include "content/browser/appcache/chrome_appcache_service.h" #include "content/browser/background_fetch/background_fetch_context.h" #include "content/browser/blob_storage/chrome_blob_storage_context.h" #include "content/browser/code_cache/generated_code_cache_context.h" #include "content/browser/cookie_store/cookie_store_context.h" #include "content/browser/file_system/browser_file_system_helper.h" #include "content/browser/loader/prefetch_url_loader_service.h" #include "content/browser/resource_context_impl.h" #include "content/browser/storage_partition_impl.h" #include "content/browser/webui/url_data_manager_backend.h" #include "content/common/service_worker/service_worker_utils.h" #include "content/public/browser/browser_context.h" #include "content/public/browser/browser_task_traits.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/content_browser_client.h" #include "content/public/browser/storage_partition.h" #include "content/public/common/content_client.h" #include "content/public/common/content_constants.h" #include "content/public/common/content_features.h" #include "content/public/common/content_switches.h" #include "content/public/common/url_constants.h" #include "crypto/sha2.h" #include "services/network/public/cpp/features.h" #include "storage/browser/blob/blob_storage_context.h" namespace content { namespace { // These constants are used to create the directory structure under the profile // where renderers with a non-default storage partition keep their persistent // state. This will contain a set of directories that partially mirror the // directory structure of BrowserContext::GetPath(). // // The kStoragePartitionDirname contains an extensions directory which is // further partitioned by extension id, followed by another level of directories // for the "default" extension storage partition and one directory for each // persistent partition used by a webview tag. Example: // // Storage/ext/ABCDEF/def // Storage/ext/ABCDEF/hash(partition name) // // The code in GetStoragePartitionPath() constructs these path names. // // TODO(nasko): Move extension related path code out of content. const base::FilePath::CharType kStoragePartitionDirname[] = FILE_PATH_LITERAL("Storage"); const base::FilePath::CharType kExtensionsDirname[] = FILE_PATH_LITERAL("ext"); const base::FilePath::CharType kDefaultPartitionDirname[] = FILE_PATH_LITERAL("def"); const base::FilePath::CharType kTrashDirname[] = FILE_PATH_LITERAL("trash"); // Because partition names are user specified, they can be arbitrarily long // which makes them unsuitable for paths names. We use a truncation of a // SHA256 hash to perform a deterministic shortening of the string. The // kPartitionNameHashBytes constant controls the length of the truncation. // We use 6 bytes, which gives us 99.999% reliability against collisions over // 1 million partition domains. // // Analysis: // We assume that all partition names within one partition domain are // controlled by the the same entity. Thus there is no chance for adverserial // attack and all we care about is accidental collision. To get 5 9s over // 1 million domains, we need the probability of a collision in any one domain // to be // // p < nroot(1000000, .99999) ~= 10^-11 // // We use the following birthday attack approximation to calculate the max // number of unique names for this probability: // // n(p,H) = sqrt(2*H * ln(1/(1-p))) // // For a 6-byte hash, H = 2^(6*8). n(10^-11, H) ~= 75 // // An average partition domain is likely to have less than 10 unique // partition names which is far lower than 75. // // Note, that for 4 9s of reliability, the limit is 237 partition names per // partition domain. const int kPartitionNameHashBytes = 6; // Needed for selecting all files in ObliterateOneDirectory() below. #if defined(OS_POSIX) const int kAllFileTypes = base::FileEnumerator::FILES | base::FileEnumerator::DIRECTORIES | base::FileEnumerator::SHOW_SYM_LINKS; #else const int kAllFileTypes = base::FileEnumerator::FILES | base::FileEnumerator::DIRECTORIES; #endif base::FilePath GetStoragePartitionDomainPath( const std::string& partition_domain) { CHECK(base::IsStringUTF8(partition_domain)); return base::FilePath(kStoragePartitionDirname).Append(kExtensionsDirname) .Append(base::FilePath::FromUTF8Unsafe(partition_domain)); } // Helper function for doing a depth-first deletion of the data on disk. // Examines paths directly in |current_dir| (no recursion) and tries to // delete from disk anything that is in, or isn't a parent of something in // |paths_to_keep|. Paths that need further expansion are added to // |paths_to_consider|. void ObliterateOneDirectory(const base::FilePath& current_dir, const std::vector<base::FilePath>& paths_to_keep, std::vector<base::FilePath>* paths_to_consider) { CHECK(current_dir.IsAbsolute()); base::FileEnumerator enumerator(current_dir, false, kAllFileTypes); for (base::FilePath to_delete = enumerator.Next(); !to_delete.empty(); to_delete = enumerator.Next()) { // Enum tracking which of the 3 possible actions to take for |to_delete|. enum { kSkip, kEnqueue, kDelete } action = kDelete; for (auto to_keep = paths_to_keep.begin(); to_keep != paths_to_keep.end(); ++to_keep) { if (to_delete == *to_keep) { action = kSkip; break; } else if (to_delete.IsParent(*to_keep)) { // |to_delete| contains a path to keep. Add to stack for further // processing. action = kEnqueue; break; } } switch (action) { case kDelete: base::DeleteFileRecursively(to_delete); break; case kEnqueue: paths_to_consider->push_back(to_delete); break; case kSkip: break; } } } // Synchronously attempts to delete |unnormalized_root|, preserving only // entries in |paths_to_keep|. If there are no entries in |paths_to_keep| on // disk, then it completely removes |unnormalized_root|. All paths must be // absolute paths. void BlockingObliteratePath( const base::FilePath& unnormalized_browser_context_root, const base::FilePath& unnormalized_root, const std::vector<base::FilePath>& paths_to_keep, const scoped_refptr<base::TaskRunner>& closure_runner, base::OnceClosure on_gc_required) { // Early exit required because MakeAbsoluteFilePath() will fail on POSIX // if |unnormalized_root| does not exist. This is safe because there is // nothing to do in this situation anwyays. if (!base::PathExists(unnormalized_root)) { return; } // Never try to obliterate things outside of the browser context root or the // browser context root itself. Die hard. base::FilePath root = base::MakeAbsoluteFilePath(unnormalized_root); base::FilePath browser_context_root = base::MakeAbsoluteFilePath(unnormalized_browser_context_root); CHECK(!root.empty()); CHECK(!browser_context_root.empty()); CHECK(browser_context_root.IsParent(root) && browser_context_root != root); // Reduce |paths_to_keep| set to those under the root and actually on disk. std::vector<base::FilePath> valid_paths_to_keep; for (auto it = paths_to_keep.begin(); it != paths_to_keep.end(); ++it) { if (root.IsParent(*it) && base::PathExists(*it)) valid_paths_to_keep.push_back(*it); } // If none of the |paths_to_keep| are valid anymore then we just whack the // root and be done with it. Otherwise, signal garbage collection and do // a best-effort delete of the on-disk structures. if (valid_paths_to_keep.empty()) { base::DeleteFileRecursively(root); return; } closure_runner->PostTask(FROM_HERE, std::move(on_gc_required)); // Otherwise, start at the root and delete everything that is not in // |valid_paths_to_keep|. std::vector<base::FilePath> paths_to_consider; paths_to_consider.push_back(root); while(!paths_to_consider.empty()) { base::FilePath path = paths_to_consider.back(); paths_to_consider.pop_back(); ObliterateOneDirectory(path, valid_paths_to_keep, &paths_to_consider); } } // Ensures each path in |active_paths| is a direct child of storage_root. void NormalizeActivePaths(const base::FilePath& storage_root, std::unordered_set<base::FilePath>* active_paths) { std::unordered_set<base::FilePath> normalized_active_paths; for (auto iter = active_paths->begin(); iter != active_paths->end(); ++iter) { base::FilePath relative_path; if (!storage_root.AppendRelativePath(*iter, &relative_path)) continue; std::vector<base::FilePath::StringType> components; relative_path.GetComponents(&components); DCHECK(!relative_path.empty()); normalized_active_paths.insert(storage_root.Append(components.front())); } active_paths->swap(normalized_active_paths); } // Deletes all entries inside the |storage_root| that are not in the // |active_paths|. Deletion is done in 2 steps: // // (1) Moving all garbage collected paths into a trash directory. // (2) Asynchronously deleting the trash directory. // // The deletion is asynchronous because after (1) completes, calling code can // safely continue to use the paths that had just been garbage collected // without fear of race conditions. // // This code also ignores failed moves rather than attempting a smarter retry. // Moves shouldn't fail here unless there is some out-of-band error (eg., // FS corruption). Retry logic is dangerous in the general case because // there is not necessarily a guaranteed case where the logic may succeed. // // This function is still named BlockingGarbageCollect() because it does // execute a few filesystem operations synchronously. void BlockingGarbageCollect( const base::FilePath& storage_root, const scoped_refptr<base::TaskRunner>& file_access_runner, std::unique_ptr<std::unordered_set<base::FilePath>> active_paths) { CHECK(storage_root.IsAbsolute()); NormalizeActivePaths(storage_root, active_paths.get()); base::FileEnumerator enumerator(storage_root, false, kAllFileTypes); base::FilePath trash_directory; if (!base::CreateTemporaryDirInDir(storage_root, kTrashDirname, &trash_directory)) { // Unable to continue without creating the trash directory so give up. return; } for (base::FilePath path = enumerator.Next(); !path.empty(); path = enumerator.Next()) { if (active_paths->find(path) == active_paths->end() && path != trash_directory) { // Since |trash_directory| is unique for each run of this function there // can be no colllisions on the move. base::Move(path, trash_directory.Append(path.BaseName())); } } file_access_runner->PostTask( FROM_HERE, base::BindOnce(base::GetDeletePathRecursivelyCallback(), trash_directory)); } } // namespace // static base::FilePath StoragePartitionImplMap::GetStoragePartitionPath( const std::string& partition_domain, const std::string& partition_name) { if (partition_domain.empty()) return base::FilePath(); base::FilePath path = GetStoragePartitionDomainPath(partition_domain); // TODO(ajwong): Mangle in-memory into this somehow, either by putting // it into the partition_name, or by manually adding another path component // here. Otherwise, it's possible to have an in-memory StoragePartition and // a persistent one that return the same FilePath for GetPath(). if (!partition_name.empty()) { // For analysis of why we can ignore collisions, see the comment above // kPartitionNameHashBytes. char buffer[kPartitionNameHashBytes]; crypto::SHA256HashString(partition_name, &buffer[0], sizeof(buffer)); return path.AppendASCII(base::HexEncode(buffer, sizeof(buffer))); } return path.Append(kDefaultPartitionDirname); } StoragePartitionImplMap::StoragePartitionImplMap( BrowserContext* browser_context) : browser_context_(browser_context), file_access_runner_(base::ThreadPool::CreateSequencedTaskRunner( {base::MayBlock(), base::TaskPriority::BEST_EFFORT})), resource_context_initialized_(false) {} StoragePartitionImplMap::~StoragePartitionImplMap() { } StoragePartitionImpl* StoragePartitionImplMap::Get( const StoragePartitionConfig& partition_config, bool can_create) { // Find the previously created partition if it's available. PartitionMap::const_iterator it = partitions_.find(partition_config); if (it != partitions_.end()) return it->second.get(); if (!can_create) return nullptr; base::FilePath relative_partition_path = GetStoragePartitionPath( partition_config.partition_domain(), partition_config.partition_name()); std::unique_ptr<StoragePartitionImpl> partition_ptr( StoragePartitionImpl::Create( browser_context_, partition_config.in_memory(), relative_partition_path, partition_config.partition_domain())); StoragePartitionImpl* partition = partition_ptr.get(); partitions_[partition_config] = std::move(partition_ptr); partition->Initialize(); // Arm the serviceworker cookie change observation API. partition->GetCookieStoreContext()->ListenToCookieChanges( partition->GetNetworkContext(), /*success_callback=*/base::DoNothing()); PostCreateInitialization(partition, partition_config.in_memory()); return partition; } void StoragePartitionImplMap::AsyncObliterate( const std::string& partition_domain, base::OnceClosure on_gc_required) { // Find the active partitions for the domain. Because these partitions are // active, it is not possible to just delete the directories that contain // the backing data structures without causing the browser to crash. Instead, // of deleteing the directory, we tell each storage context later to // remove any data they have saved. This will leave the directory structure // intact but it will only contain empty databases. std::vector<StoragePartitionImpl*> active_partitions; std::vector<base::FilePath> paths_to_keep; for (PartitionMap::const_iterator it = partitions_.begin(); it != partitions_.end(); ++it) { const StoragePartitionConfig& config = it->first; if (config.partition_domain() == partition_domain) { it->second->ClearData( // All except shader cache. ~StoragePartition::REMOVE_DATA_MASK_SHADER_CACHE, StoragePartition::QUOTA_MANAGED_STORAGE_MASK_ALL, GURL(), base::Time(), base::Time::Max(), base::DoNothing()); if (!config.in_memory()) { paths_to_keep.push_back(it->second->GetPath()); } } } // Start a best-effort delete of the on-disk storage excluding paths that are // known to still be in use. This is to delete any previously created // StoragePartition state that just happens to not have been used during this // run of the browser. base::FilePath domain_root = browser_context_->GetPath().Append( GetStoragePartitionDomainPath(partition_domain)); base::ThreadPool::PostTask( FROM_HERE, {base::MayBlock(), base::TaskPriority::BEST_EFFORT}, base::BindOnce(&BlockingObliteratePath, browser_context_->GetPath(), domain_root, paths_to_keep, base::ThreadTaskRunnerHandle::Get(), std::move(on_gc_required))); } void StoragePartitionImplMap::GarbageCollect( std::unique_ptr<std::unordered_set<base::FilePath>> active_paths, base::OnceClosure done) { // Include all paths for current StoragePartitions in the active_paths since // they cannot be deleted safely. for (PartitionMap::const_iterator it = partitions_.begin(); it != partitions_.end(); ++it) { const StoragePartitionConfig& config = it->first; if (!config.in_memory()) active_paths->insert(it->second->GetPath()); } // Find the directory holding the StoragePartitions and delete everything in // there that isn't considered active. base::FilePath storage_root = browser_context_->GetPath().Append( GetStoragePartitionDomainPath(std::string())); file_access_runner_->PostTaskAndReply( FROM_HERE, base::BindOnce(&BlockingGarbageCollect, storage_root, file_access_runner_, std::move(active_paths)), std::move(done)); } void StoragePartitionImplMap::ForEach( BrowserContext::StoragePartitionCallback callback) { for (PartitionMap::const_iterator it = partitions_.begin(); it != partitions_.end(); ++it) { callback.Run(it->second.get()); } } void StoragePartitionImplMap::PostCreateInitialization( StoragePartitionImpl* partition, bool in_memory) { // TODO(ajwong): ResourceContexts no longer have any storage related state. // We should move this into a place where it is called once per // BrowserContext creation rather than piggybacking off the default context // creation. // Note: moving this into Get() before partitions_[] is set causes reentrency. if (!resource_context_initialized_) { resource_context_initialized_ = true; InitializeResourceContext(browser_context_); } partition->GetAppCacheService()->Initialize( in_memory ? base::FilePath() : partition->GetPath().Append(kAppCacheDirname), browser_context_, browser_context_->GetSpecialStoragePolicy()); // Check first to avoid memory leak in unittests. if (BrowserThread::IsThreadInitialized(BrowserThread::IO)) { partition->GetCacheStorageContext()->SetBlobParametersForCache( ChromeBlobStorageContext::GetFor(browser_context_)); if (!ServiceWorkerContext::IsServiceWorkerOnUIEnabled()) { GetIOThreadTaskRunner({})->PostTask( FROM_HERE, base::BindOnce( &ServiceWorkerContextWrapper::InitializeResourceContext, partition->GetServiceWorkerContext(), browser_context_->GetResourceContext())); } // Use PostTask() instead of RunOrPostTaskOnThread() because not posting a // task causes it to run before the CacheStorageManager has been // initialized, and then CacheStorageContextImpl::CacheManager() ends up // returning null instead of using the CrossSequenceCacheStorageManager in // unit tests that don't use a real IO thread, violating the DCHECK in // BackgroundFetchDataManager::InitializeOnCoreThread(). // TODO(crbug.com/960012): This workaround should be unnecessary after // CacheStorage moves off the IO thread to the thread pool. base::PostTask( FROM_HERE, {ServiceWorkerContext::GetCoreThreadId()}, base::BindOnce(&BackgroundFetchContext::InitializeOnCoreThread, partition->GetBackgroundFetchContext())); // We do not call InitializeURLRequestContext() for media contexts because, // other than the HTTP cache, the media contexts share the same backing // objects as their associated "normal" request context. Thus, the previous // call serves to initialize the media request context for this storage // partition as well. } } } // namespace content
{'repo_name': 'nwjs/chromium.src', 'stars': '111', 'repo_language': 'None', 'file_name': 'AndroidManifest.xml', 'mime_type': 'text/xml', 'hash': -3326214456826205399, 'source_dataset': 'data'}
/* * Copyright (c) 2003, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.awt.X11; /** * This class represents AWT application root window functionality. * Object of this class is singleton, all window reference it to have * common logical ancestor */ class XRootWindow extends XBaseWindow { private static class LazyHolder { private static final XRootWindow xawtRootWindow; static { XToolkit.awtLock(); try { xawtRootWindow = new XRootWindow(); xawtRootWindow.init(xawtRootWindow.getDelayedParams().delete(DELAYED)); } finally { XToolkit.awtUnlock(); } } } static XRootWindow getInstance() { return LazyHolder.xawtRootWindow; } private XRootWindow() { super(new XCreateWindowParams(new Object[] { DELAYED, Boolean.TRUE, EVENT_MASK, XConstants.StructureNotifyMask })); } public void postInit(XCreateWindowParams params){ super.postInit(params); setWMClass(getWMClass()); } protected String getWMName() { return XToolkit.getAWTAppClassName(); } protected String[] getWMClass() { return new String[] {XToolkit.getAWTAppClassName(), XToolkit.getAWTAppClassName()}; } /* Fix 4976517. Return awt_root_shell to XToolkit.c */ private static long getXRootWindow() { return getXAWTRootWindow().getWindow(); } }
{'repo_name': 'openjdk/jdk', 'stars': '2204', 'repo_language': 'Java', 'file_name': 'fttype1.c', 'mime_type': 'text/x-c', 'hash': 4664787751375714494, 'source_dataset': 'data'}
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: internal/prototest/examplepb/example.proto // Copyright (c) 2020 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package examplepb import ( context "context" fmt "fmt" proto "github.com/gogo/protobuf/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" io "io" math "math" math_bits "math/bits" reflect "reflect" strings "strings" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type GetValueRequest struct { Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` } func (m *GetValueRequest) Reset() { *m = GetValueRequest{} } func (*GetValueRequest) ProtoMessage() {} func (*GetValueRequest) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{0} } func (m *GetValueRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *GetValueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_GetValueRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *GetValueRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_GetValueRequest.Merge(m, src) } func (m *GetValueRequest) XXX_Size() int { return m.Size() } func (m *GetValueRequest) XXX_DiscardUnknown() { xxx_messageInfo_GetValueRequest.DiscardUnknown(m) } var xxx_messageInfo_GetValueRequest proto.InternalMessageInfo func (m *GetValueRequest) GetKey() string { if m != nil { return m.Key } return "" } type GetValueResponse struct { Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (m *GetValueResponse) Reset() { *m = GetValueResponse{} } func (*GetValueResponse) ProtoMessage() {} func (*GetValueResponse) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{1} } func (m *GetValueResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *GetValueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_GetValueResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *GetValueResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_GetValueResponse.Merge(m, src) } func (m *GetValueResponse) XXX_Size() int { return m.Size() } func (m *GetValueResponse) XXX_DiscardUnknown() { xxx_messageInfo_GetValueResponse.DiscardUnknown(m) } var xxx_messageInfo_GetValueResponse proto.InternalMessageInfo func (m *GetValueResponse) GetValue() string { if m != nil { return m.Value } return "" } type SetValueRequest struct { Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (m *SetValueRequest) Reset() { *m = SetValueRequest{} } func (*SetValueRequest) ProtoMessage() {} func (*SetValueRequest) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{2} } func (m *SetValueRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SetValueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SetValueRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SetValueRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_SetValueRequest.Merge(m, src) } func (m *SetValueRequest) XXX_Size() int { return m.Size() } func (m *SetValueRequest) XXX_DiscardUnknown() { xxx_messageInfo_SetValueRequest.DiscardUnknown(m) } var xxx_messageInfo_SetValueRequest proto.InternalMessageInfo func (m *SetValueRequest) GetKey() string { if m != nil { return m.Key } return "" } func (m *SetValueRequest) GetValue() string { if m != nil { return m.Value } return "" } type SetValueResponse struct { } func (m *SetValueResponse) Reset() { *m = SetValueResponse{} } func (*SetValueResponse) ProtoMessage() {} func (*SetValueResponse) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{3} } func (m *SetValueResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SetValueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SetValueResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SetValueResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_SetValueResponse.Merge(m, src) } func (m *SetValueResponse) XXX_Size() int { return m.Size() } func (m *SetValueResponse) XXX_DiscardUnknown() { xxx_messageInfo_SetValueResponse.DiscardUnknown(m) } var xxx_messageInfo_SetValueResponse proto.InternalMessageInfo type EchoOutRequest struct { Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` } func (m *EchoOutRequest) Reset() { *m = EchoOutRequest{} } func (*EchoOutRequest) ProtoMessage() {} func (*EchoOutRequest) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{4} } func (m *EchoOutRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoOutRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoOutRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoOutRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoOutRequest.Merge(m, src) } func (m *EchoOutRequest) XXX_Size() int { return m.Size() } func (m *EchoOutRequest) XXX_DiscardUnknown() { xxx_messageInfo_EchoOutRequest.DiscardUnknown(m) } var xxx_messageInfo_EchoOutRequest proto.InternalMessageInfo func (m *EchoOutRequest) GetMessage() string { if m != nil { return m.Message } return "" } type EchoOutResponse struct { AllMessages []string `protobuf:"bytes,2,rep,name=all_messages,json=allMessages,proto3" json:"all_messages,omitempty"` } func (m *EchoOutResponse) Reset() { *m = EchoOutResponse{} } func (*EchoOutResponse) ProtoMessage() {} func (*EchoOutResponse) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{5} } func (m *EchoOutResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoOutResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoOutResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoOutResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoOutResponse.Merge(m, src) } func (m *EchoOutResponse) XXX_Size() int { return m.Size() } func (m *EchoOutResponse) XXX_DiscardUnknown() { xxx_messageInfo_EchoOutResponse.DiscardUnknown(m) } var xxx_messageInfo_EchoOutResponse proto.InternalMessageInfo func (m *EchoOutResponse) GetAllMessages() []string { if m != nil { return m.AllMessages } return nil } type EchoInRequest struct { Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` NumResponses int64 `protobuf:"varint,2,opt,name=num_responses,json=numResponses,proto3" json:"num_responses,omitempty"` } func (m *EchoInRequest) Reset() { *m = EchoInRequest{} } func (*EchoInRequest) ProtoMessage() {} func (*EchoInRequest) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{6} } func (m *EchoInRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoInRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoInRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoInRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoInRequest.Merge(m, src) } func (m *EchoInRequest) XXX_Size() int { return m.Size() } func (m *EchoInRequest) XXX_DiscardUnknown() { xxx_messageInfo_EchoInRequest.DiscardUnknown(m) } var xxx_messageInfo_EchoInRequest proto.InternalMessageInfo func (m *EchoInRequest) GetMessage() string { if m != nil { return m.Message } return "" } func (m *EchoInRequest) GetNumResponses() int64 { if m != nil { return m.NumResponses } return 0 } type EchoInResponse struct { Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` } func (m *EchoInResponse) Reset() { *m = EchoInResponse{} } func (*EchoInResponse) ProtoMessage() {} func (*EchoInResponse) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{7} } func (m *EchoInResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoInResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoInResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoInResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoInResponse.Merge(m, src) } func (m *EchoInResponse) XXX_Size() int { return m.Size() } func (m *EchoInResponse) XXX_DiscardUnknown() { xxx_messageInfo_EchoInResponse.DiscardUnknown(m) } var xxx_messageInfo_EchoInResponse proto.InternalMessageInfo func (m *EchoInResponse) GetMessage() string { if m != nil { return m.Message } return "" } type EchoBothRequest struct { Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` NumResponses int64 `protobuf:"varint,2,opt,name=num_responses,json=numResponses,proto3" json:"num_responses,omitempty"` } func (m *EchoBothRequest) Reset() { *m = EchoBothRequest{} } func (*EchoBothRequest) ProtoMessage() {} func (*EchoBothRequest) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{8} } func (m *EchoBothRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoBothRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoBothRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoBothRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoBothRequest.Merge(m, src) } func (m *EchoBothRequest) XXX_Size() int { return m.Size() } func (m *EchoBothRequest) XXX_DiscardUnknown() { xxx_messageInfo_EchoBothRequest.DiscardUnknown(m) } var xxx_messageInfo_EchoBothRequest proto.InternalMessageInfo func (m *EchoBothRequest) GetMessage() string { if m != nil { return m.Message } return "" } func (m *EchoBothRequest) GetNumResponses() int64 { if m != nil { return m.NumResponses } return 0 } type EchoBothResponse struct { Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` } func (m *EchoBothResponse) Reset() { *m = EchoBothResponse{} } func (*EchoBothResponse) ProtoMessage() {} func (*EchoBothResponse) Descriptor() ([]byte, []int) { return fileDescriptor_bab15b635bbc13f7, []int{9} } func (m *EchoBothResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EchoBothResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EchoBothResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EchoBothResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_EchoBothResponse.Merge(m, src) } func (m *EchoBothResponse) XXX_Size() int { return m.Size() } func (m *EchoBothResponse) XXX_DiscardUnknown() { xxx_messageInfo_EchoBothResponse.DiscardUnknown(m) } var xxx_messageInfo_EchoBothResponse proto.InternalMessageInfo func (m *EchoBothResponse) GetMessage() string { if m != nil { return m.Message } return "" } func init() { proto.RegisterType((*GetValueRequest)(nil), "uber.yarpc.internal.examples.protobuf.example.GetValueRequest") proto.RegisterType((*GetValueResponse)(nil), "uber.yarpc.internal.examples.protobuf.example.GetValueResponse") proto.RegisterType((*SetValueRequest)(nil), "uber.yarpc.internal.examples.protobuf.example.SetValueRequest") proto.RegisterType((*SetValueResponse)(nil), "uber.yarpc.internal.examples.protobuf.example.SetValueResponse") proto.RegisterType((*EchoOutRequest)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoOutRequest") proto.RegisterType((*EchoOutResponse)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoOutResponse") proto.RegisterType((*EchoInRequest)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoInRequest") proto.RegisterType((*EchoInResponse)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoInResponse") proto.RegisterType((*EchoBothRequest)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoBothRequest") proto.RegisterType((*EchoBothResponse)(nil), "uber.yarpc.internal.examples.protobuf.example.EchoBothResponse") } func init() { proto.RegisterFile("internal/prototest/examplepb/example.proto", fileDescriptor_bab15b635bbc13f7) } var fileDescriptor_bab15b635bbc13f7 = []byte{ // 467 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x4d, 0x6b, 0xd4, 0x40, 0x18, 0x9e, 0x77, 0x83, 0xed, 0xee, 0x6b, 0xeb, 0x86, 0xc1, 0x43, 0xd8, 0xc3, 0x50, 0xd3, 0x4b, 0x28, 0x9a, 0x96, 0xea, 0x45, 0xb0, 0x2d, 0x14, 0x54, 0x44, 0xfc, 0x20, 0x01, 0x0f, 0x5e, 0x4a, 0x52, 0x46, 0x2b, 0xe6, 0xcb, 0xcc, 0x44, 0xdc, 0x9b, 0x57, 0x41, 0x41, 0xff, 0x85, 0x78, 0xf6, 0x47, 0x78, 0xdc, 0x63, 0x8f, 0x6e, 0xf6, 0xe2, 0xb1, 0x3f, 0x41, 0x92, 0xcc, 0x6c, 0x75, 0x65, 0x59, 0x36, 0xf6, 0x36, 0x33, 0x79, 0xbe, 0x32, 0xef, 0x93, 0xe0, 0xd6, 0xab, 0x44, 0xf2, 0x3c, 0x09, 0xa2, 0xed, 0x2c, 0x4f, 0x65, 0x2a, 0xb9, 0x90, 0xdb, 0xfc, 0x5d, 0x10, 0x67, 0x11, 0xcf, 0x42, 0xbd, 0x72, 0xeb, 0x67, 0xf4, 0x46, 0x11, 0xf2, 0xdc, 0x1d, 0x06, 0x79, 0x76, 0xec, 0x6a, 0x9a, 0xab, 0x20, 0xa2, 0xc1, 0x84, 0xc5, 0x0b, 0x7d, 0x62, 0x6f, 0x62, 0xff, 0x3e, 0x97, 0xcf, 0x82, 0xa8, 0xe0, 0x1e, 0x7f, 0x53, 0x70, 0x21, 0xa9, 0x89, 0xc6, 0x6b, 0x3e, 0xb4, 0x60, 0x03, 0x9c, 0x9e, 0x57, 0x2d, 0x6d, 0x07, 0xcd, 0x73, 0x90, 0xc8, 0xd2, 0x44, 0x70, 0x7a, 0x15, 0x2f, 0xbd, 0xad, 0x0e, 0xac, 0x4e, 0x8d, 0x6b, 0x36, 0xf6, 0x6d, 0xec, 0xfb, 0x8b, 0xe4, 0xe6, 0x50, 0x29, 0x9a, 0xfe, 0x8c, 0x89, 0xbd, 0x85, 0x57, 0xee, 0x1e, 0x9f, 0xa4, 0x4f, 0x0a, 0xa9, 0xd5, 0x2c, 0x5c, 0x8d, 0xb9, 0x10, 0xc1, 0x4b, 0xae, 0x14, 0xf5, 0xd6, 0xbe, 0x85, 0xfd, 0x29, 0x56, 0x65, 0xbc, 0x86, 0x6b, 0x41, 0x14, 0x1d, 0x29, 0x84, 0xb0, 0x3a, 0x1b, 0x86, 0xd3, 0xf3, 0x2e, 0x07, 0x51, 0xf4, 0x48, 0x1d, 0xd9, 0x8f, 0x71, 0xbd, 0x62, 0x3d, 0x48, 0x16, 0x1a, 0xd0, 0x4d, 0x5c, 0x4f, 0x8a, 0xf8, 0x28, 0x57, 0xea, 0xa2, 0x8e, 0x6f, 0x78, 0x6b, 0x49, 0x11, 0x6b, 0x47, 0xa1, 0x13, 0x57, 0x7a, 0x2a, 0xc4, 0xfc, 0xc4, 0x4f, 0x9b, 0xc4, 0x87, 0xa9, 0x3c, 0xb9, 0x20, 0xf7, 0xeb, 0x68, 0x9e, 0x2b, 0x2e, 0xf2, 0xdf, 0xfd, 0xd6, 0xc1, 0xee, 0x43, 0x3e, 0xac, 0xaf, 0x9c, 0x7e, 0x02, 0xec, 0xea, 0x21, 0xd3, 0x7d, 0x77, 0xa9, 0x16, 0xb9, 0x33, 0x15, 0x1a, 0x1c, 0xb4, 0xe6, 0xab, 0xc1, 0x93, 0x3a, 0x8f, 0xdf, 0x36, 0x8f, 0xff, 0x9f, 0x79, 0xfe, 0x29, 0x22, 0xd9, 0xfd, 0x6e, 0xa0, 0x71, 0x2f, 0x4d, 0xe9, 0x47, 0xc0, 0x55, 0xd5, 0x33, 0xba, 0xb7, 0xa4, 0xec, 0xdf, 0x5d, 0x1e, 0xec, 0xb7, 0xa5, 0xeb, 0x50, 0x0e, 0xd0, 0x0f, 0x80, 0x2b, 0x4d, 0xe1, 0xe8, 0x9d, 0x16, 0x72, 0xd3, 0xde, 0x0f, 0xf6, 0x5a, 0xb2, 0x75, 0x96, 0x1d, 0xa0, 0x5f, 0x00, 0xbb, 0xba, 0x7e, 0xb4, 0xcd, 0xcb, 0xfd, 0xf1, 0x25, 0x2c, 0x3d, 0xb2, 0xd9, 0xde, 0x57, 0xb7, 0xb3, 0x03, 0x87, 0x07, 0xa3, 0x31, 0x23, 0xa7, 0x63, 0x46, 0xce, 0xc6, 0x0c, 0xde, 0x97, 0x0c, 0xbe, 0x96, 0x0c, 0x7e, 0x94, 0x0c, 0x46, 0x25, 0x83, 0x9f, 0x25, 0x83, 0x5f, 0x25, 0x23, 0x67, 0x25, 0x83, 0xcf, 0x13, 0x46, 0x46, 0x13, 0x46, 0x4e, 0x27, 0x8c, 0x3c, 0xef, 0x4d, 0xff, 0xae, 0xe1, 0x4a, 0xed, 0x74, 0xf3, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbf, 0xa9, 0xa4, 0x98, 0x84, 0x05, 0x00, 0x00, } func (this *GetValueRequest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*GetValueRequest) if !ok { that2, ok := that.(GetValueRequest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Key != that1.Key { return false } return true } func (this *GetValueResponse) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*GetValueResponse) if !ok { that2, ok := that.(GetValueResponse) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Value != that1.Value { return false } return true } func (this *SetValueRequest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*SetValueRequest) if !ok { that2, ok := that.(SetValueRequest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Key != that1.Key { return false } if this.Value != that1.Value { return false } return true } func (this *SetValueResponse) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*SetValueResponse) if !ok { that2, ok := that.(SetValueResponse) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } return true } func (this *EchoOutRequest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoOutRequest) if !ok { that2, ok := that.(EchoOutRequest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Message != that1.Message { return false } return true } func (this *EchoOutResponse) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoOutResponse) if !ok { that2, ok := that.(EchoOutResponse) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if len(this.AllMessages) != len(that1.AllMessages) { return false } for i := range this.AllMessages { if this.AllMessages[i] != that1.AllMessages[i] { return false } } return true } func (this *EchoInRequest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoInRequest) if !ok { that2, ok := that.(EchoInRequest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Message != that1.Message { return false } if this.NumResponses != that1.NumResponses { return false } return true } func (this *EchoInResponse) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoInResponse) if !ok { that2, ok := that.(EchoInResponse) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Message != that1.Message { return false } return true } func (this *EchoBothRequest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoBothRequest) if !ok { that2, ok := that.(EchoBothRequest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Message != that1.Message { return false } if this.NumResponses != that1.NumResponses { return false } return true } func (this *EchoBothResponse) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*EchoBothResponse) if !ok { that2, ok := that.(EchoBothResponse) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Message != that1.Message { return false } return true } func (this *GetValueRequest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.GetValueRequest{") s = append(s, "Key: "+fmt.Sprintf("%#v", this.Key)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *GetValueResponse) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.GetValueResponse{") s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *SetValueRequest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&examplepb.SetValueRequest{") s = append(s, "Key: "+fmt.Sprintf("%#v", this.Key)+",\n") s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *SetValueResponse) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 4) s = append(s, "&examplepb.SetValueResponse{") s = append(s, "}") return strings.Join(s, "") } func (this *EchoOutRequest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.EchoOutRequest{") s = append(s, "Message: "+fmt.Sprintf("%#v", this.Message)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *EchoOutResponse) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.EchoOutResponse{") s = append(s, "AllMessages: "+fmt.Sprintf("%#v", this.AllMessages)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *EchoInRequest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&examplepb.EchoInRequest{") s = append(s, "Message: "+fmt.Sprintf("%#v", this.Message)+",\n") s = append(s, "NumResponses: "+fmt.Sprintf("%#v", this.NumResponses)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *EchoInResponse) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.EchoInResponse{") s = append(s, "Message: "+fmt.Sprintf("%#v", this.Message)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *EchoBothRequest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&examplepb.EchoBothRequest{") s = append(s, "Message: "+fmt.Sprintf("%#v", this.Message)+",\n") s = append(s, "NumResponses: "+fmt.Sprintf("%#v", this.NumResponses)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *EchoBothResponse) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 5) s = append(s, "&examplepb.EchoBothResponse{") s = append(s, "Message: "+fmt.Sprintf("%#v", this.Message)+",\n") s = append(s, "}") return strings.Join(s, "") } func valueToGoStringExample(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv) } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // KeyValueClient is the client API for KeyValue service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type KeyValueClient interface { GetValue(ctx context.Context, in *GetValueRequest, opts ...grpc.CallOption) (*GetValueResponse, error) SetValue(ctx context.Context, in *SetValueRequest, opts ...grpc.CallOption) (*SetValueResponse, error) } type keyValueClient struct { cc *grpc.ClientConn } func NewKeyValueClient(cc *grpc.ClientConn) KeyValueClient { return &keyValueClient{cc} } func (c *keyValueClient) GetValue(ctx context.Context, in *GetValueRequest, opts ...grpc.CallOption) (*GetValueResponse, error) { out := new(GetValueResponse) err := c.cc.Invoke(ctx, "/uber.yarpc.internal.examples.protobuf.example.KeyValue/GetValue", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *keyValueClient) SetValue(ctx context.Context, in *SetValueRequest, opts ...grpc.CallOption) (*SetValueResponse, error) { out := new(SetValueResponse) err := c.cc.Invoke(ctx, "/uber.yarpc.internal.examples.protobuf.example.KeyValue/SetValue", in, out, opts...) if err != nil { return nil, err } return out, nil } // KeyValueServer is the server API for KeyValue service. type KeyValueServer interface { GetValue(context.Context, *GetValueRequest) (*GetValueResponse, error) SetValue(context.Context, *SetValueRequest) (*SetValueResponse, error) } // UnimplementedKeyValueServer can be embedded to have forward compatible implementations. type UnimplementedKeyValueServer struct { } func (*UnimplementedKeyValueServer) GetValue(ctx context.Context, req *GetValueRequest) (*GetValueResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetValue not implemented") } func (*UnimplementedKeyValueServer) SetValue(ctx context.Context, req *SetValueRequest) (*SetValueResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method SetValue not implemented") } func RegisterKeyValueServer(s *grpc.Server, srv KeyValueServer) { s.RegisterService(&_KeyValue_serviceDesc, srv) } func _KeyValue_GetValue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetValueRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(KeyValueServer).GetValue(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/uber.yarpc.internal.examples.protobuf.example.KeyValue/GetValue", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(KeyValueServer).GetValue(ctx, req.(*GetValueRequest)) } return interceptor(ctx, in, info, handler) } func _KeyValue_SetValue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(SetValueRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(KeyValueServer).SetValue(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/uber.yarpc.internal.examples.protobuf.example.KeyValue/SetValue", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(KeyValueServer).SetValue(ctx, req.(*SetValueRequest)) } return interceptor(ctx, in, info, handler) } var _KeyValue_serviceDesc = grpc.ServiceDesc{ ServiceName: "uber.yarpc.internal.examples.protobuf.example.KeyValue", HandlerType: (*KeyValueServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetValue", Handler: _KeyValue_GetValue_Handler, }, { MethodName: "SetValue", Handler: _KeyValue_SetValue_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "internal/prototest/examplepb/example.proto", } // FooClient is the client API for Foo service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type FooClient interface { EchoOut(ctx context.Context, opts ...grpc.CallOption) (Foo_EchoOutClient, error) EchoIn(ctx context.Context, in *EchoInRequest, opts ...grpc.CallOption) (Foo_EchoInClient, error) EchoBoth(ctx context.Context, opts ...grpc.CallOption) (Foo_EchoBothClient, error) } type fooClient struct { cc *grpc.ClientConn } func NewFooClient(cc *grpc.ClientConn) FooClient { return &fooClient{cc} } func (c *fooClient) EchoOut(ctx context.Context, opts ...grpc.CallOption) (Foo_EchoOutClient, error) { stream, err := c.cc.NewStream(ctx, &_Foo_serviceDesc.Streams[0], "/uber.yarpc.internal.examples.protobuf.example.Foo/EchoOut", opts...) if err != nil { return nil, err } x := &fooEchoOutClient{stream} return x, nil } type Foo_EchoOutClient interface { Send(*EchoOutRequest) error CloseAndRecv() (*EchoOutResponse, error) grpc.ClientStream } type fooEchoOutClient struct { grpc.ClientStream } func (x *fooEchoOutClient) Send(m *EchoOutRequest) error { return x.ClientStream.SendMsg(m) } func (x *fooEchoOutClient) CloseAndRecv() (*EchoOutResponse, error) { if err := x.ClientStream.CloseSend(); err != nil { return nil, err } m := new(EchoOutResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *fooClient) EchoIn(ctx context.Context, in *EchoInRequest, opts ...grpc.CallOption) (Foo_EchoInClient, error) { stream, err := c.cc.NewStream(ctx, &_Foo_serviceDesc.Streams[1], "/uber.yarpc.internal.examples.protobuf.example.Foo/EchoIn", opts...) if err != nil { return nil, err } x := &fooEchoInClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type Foo_EchoInClient interface { Recv() (*EchoInResponse, error) grpc.ClientStream } type fooEchoInClient struct { grpc.ClientStream } func (x *fooEchoInClient) Recv() (*EchoInResponse, error) { m := new(EchoInResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *fooClient) EchoBoth(ctx context.Context, opts ...grpc.CallOption) (Foo_EchoBothClient, error) { stream, err := c.cc.NewStream(ctx, &_Foo_serviceDesc.Streams[2], "/uber.yarpc.internal.examples.protobuf.example.Foo/EchoBoth", opts...) if err != nil { return nil, err } x := &fooEchoBothClient{stream} return x, nil } type Foo_EchoBothClient interface { Send(*EchoBothRequest) error Recv() (*EchoBothResponse, error) grpc.ClientStream } type fooEchoBothClient struct { grpc.ClientStream } func (x *fooEchoBothClient) Send(m *EchoBothRequest) error { return x.ClientStream.SendMsg(m) } func (x *fooEchoBothClient) Recv() (*EchoBothResponse, error) { m := new(EchoBothResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } // FooServer is the server API for Foo service. type FooServer interface { EchoOut(Foo_EchoOutServer) error EchoIn(*EchoInRequest, Foo_EchoInServer) error EchoBoth(Foo_EchoBothServer) error } // UnimplementedFooServer can be embedded to have forward compatible implementations. type UnimplementedFooServer struct { } func (*UnimplementedFooServer) EchoOut(srv Foo_EchoOutServer) error { return status.Errorf(codes.Unimplemented, "method EchoOut not implemented") } func (*UnimplementedFooServer) EchoIn(req *EchoInRequest, srv Foo_EchoInServer) error { return status.Errorf(codes.Unimplemented, "method EchoIn not implemented") } func (*UnimplementedFooServer) EchoBoth(srv Foo_EchoBothServer) error { return status.Errorf(codes.Unimplemented, "method EchoBoth not implemented") } func RegisterFooServer(s *grpc.Server, srv FooServer) { s.RegisterService(&_Foo_serviceDesc, srv) } func _Foo_EchoOut_Handler(srv interface{}, stream grpc.ServerStream) error { return srv.(FooServer).EchoOut(&fooEchoOutServer{stream}) } type Foo_EchoOutServer interface { SendAndClose(*EchoOutResponse) error Recv() (*EchoOutRequest, error) grpc.ServerStream } type fooEchoOutServer struct { grpc.ServerStream } func (x *fooEchoOutServer) SendAndClose(m *EchoOutResponse) error { return x.ServerStream.SendMsg(m) } func (x *fooEchoOutServer) Recv() (*EchoOutRequest, error) { m := new(EchoOutRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func _Foo_EchoIn_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(EchoInRequest) if err := stream.RecvMsg(m); err != nil { return err } return srv.(FooServer).EchoIn(m, &fooEchoInServer{stream}) } type Foo_EchoInServer interface { Send(*EchoInResponse) error grpc.ServerStream } type fooEchoInServer struct { grpc.ServerStream } func (x *fooEchoInServer) Send(m *EchoInResponse) error { return x.ServerStream.SendMsg(m) } func _Foo_EchoBoth_Handler(srv interface{}, stream grpc.ServerStream) error { return srv.(FooServer).EchoBoth(&fooEchoBothServer{stream}) } type Foo_EchoBothServer interface { Send(*EchoBothResponse) error Recv() (*EchoBothRequest, error) grpc.ServerStream } type fooEchoBothServer struct { grpc.ServerStream } func (x *fooEchoBothServer) Send(m *EchoBothResponse) error { return x.ServerStream.SendMsg(m) } func (x *fooEchoBothServer) Recv() (*EchoBothRequest, error) { m := new(EchoBothRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } return m, nil } var _Foo_serviceDesc = grpc.ServiceDesc{ ServiceName: "uber.yarpc.internal.examples.protobuf.example.Foo", HandlerType: (*FooServer)(nil), Methods: []grpc.MethodDesc{}, Streams: []grpc.StreamDesc{ { StreamName: "EchoOut", Handler: _Foo_EchoOut_Handler, ClientStreams: true, }, { StreamName: "EchoIn", Handler: _Foo_EchoIn_Handler, ServerStreams: true, }, { StreamName: "EchoBoth", Handler: _Foo_EchoBoth_Handler, ServerStreams: true, ClientStreams: true, }, }, Metadata: "internal/prototest/examplepb/example.proto", } func (m *GetValueRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *GetValueRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *GetValueRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Key) > 0 { i -= len(m.Key) copy(dAtA[i:], m.Key) i = encodeVarintExample(dAtA, i, uint64(len(m.Key))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *GetValueResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *GetValueResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *GetValueResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) i = encodeVarintExample(dAtA, i, uint64(len(m.Value))) i-- dAtA[i] = 0x12 } return len(dAtA) - i, nil } func (m *SetValueRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SetValueRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SetValueRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) i = encodeVarintExample(dAtA, i, uint64(len(m.Value))) i-- dAtA[i] = 0x12 } if len(m.Key) > 0 { i -= len(m.Key) copy(dAtA[i:], m.Key) i = encodeVarintExample(dAtA, i, uint64(len(m.Key))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *SetValueResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SetValueResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SetValueResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l return len(dAtA) - i, nil } func (m *EchoOutRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoOutRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoOutRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Message) > 0 { i -= len(m.Message) copy(dAtA[i:], m.Message) i = encodeVarintExample(dAtA, i, uint64(len(m.Message))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *EchoOutResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoOutResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoOutResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.AllMessages) > 0 { for iNdEx := len(m.AllMessages) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.AllMessages[iNdEx]) copy(dAtA[i:], m.AllMessages[iNdEx]) i = encodeVarintExample(dAtA, i, uint64(len(m.AllMessages[iNdEx]))) i-- dAtA[i] = 0x12 } } return len(dAtA) - i, nil } func (m *EchoInRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoInRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoInRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.NumResponses != 0 { i = encodeVarintExample(dAtA, i, uint64(m.NumResponses)) i-- dAtA[i] = 0x10 } if len(m.Message) > 0 { i -= len(m.Message) copy(dAtA[i:], m.Message) i = encodeVarintExample(dAtA, i, uint64(len(m.Message))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *EchoInResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoInResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoInResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Message) > 0 { i -= len(m.Message) copy(dAtA[i:], m.Message) i = encodeVarintExample(dAtA, i, uint64(len(m.Message))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *EchoBothRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoBothRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoBothRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.NumResponses != 0 { i = encodeVarintExample(dAtA, i, uint64(m.NumResponses)) i-- dAtA[i] = 0x10 } if len(m.Message) > 0 { i -= len(m.Message) copy(dAtA[i:], m.Message) i = encodeVarintExample(dAtA, i, uint64(len(m.Message))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *EchoBothResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EchoBothResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EchoBothResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Message) > 0 { i -= len(m.Message) copy(dAtA[i:], m.Message) i = encodeVarintExample(dAtA, i, uint64(len(m.Message))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func encodeVarintExample(dAtA []byte, offset int, v uint64) int { offset -= sovExample(v) base := offset for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return base } func (m *GetValueRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Key) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func (m *GetValueResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Value) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func (m *SetValueRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Key) if l > 0 { n += 1 + l + sovExample(uint64(l)) } l = len(m.Value) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func (m *SetValueResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l return n } func (m *EchoOutRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Message) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func (m *EchoOutResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l if len(m.AllMessages) > 0 { for _, s := range m.AllMessages { l = len(s) n += 1 + l + sovExample(uint64(l)) } } return n } func (m *EchoInRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Message) if l > 0 { n += 1 + l + sovExample(uint64(l)) } if m.NumResponses != 0 { n += 1 + sovExample(uint64(m.NumResponses)) } return n } func (m *EchoInResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Message) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func (m *EchoBothRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Message) if l > 0 { n += 1 + l + sovExample(uint64(l)) } if m.NumResponses != 0 { n += 1 + sovExample(uint64(m.NumResponses)) } return n } func (m *EchoBothResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Message) if l > 0 { n += 1 + l + sovExample(uint64(l)) } return n } func sovExample(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozExample(x uint64) (n int) { return sovExample(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (this *GetValueRequest) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&GetValueRequest{`, `Key:` + fmt.Sprintf("%v", this.Key) + `,`, `}`, }, "") return s } func (this *GetValueResponse) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&GetValueResponse{`, `Value:` + fmt.Sprintf("%v", this.Value) + `,`, `}`, }, "") return s } func (this *SetValueRequest) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&SetValueRequest{`, `Key:` + fmt.Sprintf("%v", this.Key) + `,`, `Value:` + fmt.Sprintf("%v", this.Value) + `,`, `}`, }, "") return s } func (this *SetValueResponse) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&SetValueResponse{`, `}`, }, "") return s } func (this *EchoOutRequest) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoOutRequest{`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `}`, }, "") return s } func (this *EchoOutResponse) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoOutResponse{`, `AllMessages:` + fmt.Sprintf("%v", this.AllMessages) + `,`, `}`, }, "") return s } func (this *EchoInRequest) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoInRequest{`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `NumResponses:` + fmt.Sprintf("%v", this.NumResponses) + `,`, `}`, }, "") return s } func (this *EchoInResponse) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoInResponse{`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `}`, }, "") return s } func (this *EchoBothRequest) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoBothRequest{`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `NumResponses:` + fmt.Sprintf("%v", this.NumResponses) + `,`, `}`, }, "") return s } func (this *EchoBothResponse) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&EchoBothResponse{`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `}`, }, "") return s } func valueToStringExample(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("*%v", pv) } func (m *GetValueRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: GetValueRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: GetValueRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Key = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *GetValueResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: GetValueResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: GetValueResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Value = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SetValueRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SetValueRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SetValueRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Key = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Value = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SetValueResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SetValueResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SetValueResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoOutRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoOutRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoOutRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Message = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoOutResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoOutResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoOutResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field AllMessages", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.AllMessages = append(m.AllMessages, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoInRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoInRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoInRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Message = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field NumResponses", wireType) } m.NumResponses = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.NumResponses |= int64(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoInResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoInResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoInResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Message = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoBothRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoBothRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoBothRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Message = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field NumResponses", wireType) } m.NumResponses = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.NumResponses |= int64(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EchoBothResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EchoBothResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EchoBothResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Message", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowExample } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthExample } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthExample } if postIndex > l { return io.ErrUnexpectedEOF } m.Message = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipExample(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) < 0 { return ErrInvalidLengthExample } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipExample(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 depth := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowExample } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowExample } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } case 1: iNdEx += 8 case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowExample } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthExample } iNdEx += length case 3: depth++ case 4: if depth == 0 { return 0, ErrUnexpectedEndOfGroupExample } depth-- case 5: iNdEx += 4 default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } if iNdEx < 0 { return 0, ErrInvalidLengthExample } if depth == 0 { return iNdEx, nil } } return 0, io.ErrUnexpectedEOF } var ( ErrInvalidLengthExample = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowExample = fmt.Errorf("proto: integer overflow") ErrUnexpectedEndOfGroupExample = fmt.Errorf("proto: unexpected end of group") )
{'repo_name': 'yarpc/yarpc-go', 'stars': '247', 'repo_language': 'Go', 'file_name': 'chooser.go', 'mime_type': 'text/plain', 'hash': -6686255970574210778, 'source_dataset': 'data'}
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <fabien@symfony.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Yaml; use Symfony\Component\Yaml\Exception\ParseException; /** * Parser parses YAML strings to convert them to PHP arrays. * * @author Fabien Potencier <fabien@symfony.com> */ class Parser { const TAG_PATTERN = '((?P<tag>![\w!.\/:-]+) +)?'; const BLOCK_SCALAR_HEADER_PATTERN = '(?P<separator>\||>)(?P<modifiers>\+|\-|\d+|\+\d+|\-\d+|\d+\+|\d+\-)?(?P<comments> +#.*)?'; private $offset = 0; private $totalNumberOfLines; private $lines = array(); private $currentLineNb = -1; private $currentLine = ''; private $refs = array(); private $skippedLineNumbers = array(); private $locallySkippedLineNumbers = array(); /** * Constructor. * * @param int $offset The offset of YAML document (used for line numbers in error messages) * @param int|null $totalNumberOfLines The overall number of lines being parsed * @param int[] $skippedLineNumbers Number of comment lines that have been skipped by the parser */ public function __construct($offset = 0, $totalNumberOfLines = null, array $skippedLineNumbers = array()) { $this->offset = $offset; $this->totalNumberOfLines = $totalNumberOfLines; $this->skippedLineNumbers = $skippedLineNumbers; } /** * Parses a YAML string to a PHP value. * * @param string $value A YAML string * @param int $flags A bit field of PARSE_* constants to customize the YAML parser behavior * * @return mixed A PHP value * * @throws ParseException If the YAML is not valid */ public function parse($value, $flags = 0) { if (is_bool($flags)) { @trigger_error('Passing a boolean flag to toggle exception handling is deprecated since version 3.1 and will be removed in 4.0. Use the Yaml::PARSE_EXCEPTION_ON_INVALID_TYPE flag instead.', E_USER_DEPRECATED); if ($flags) { $flags = Yaml::PARSE_EXCEPTION_ON_INVALID_TYPE; } else { $flags = 0; } } if (func_num_args() >= 3) { @trigger_error('Passing a boolean flag to toggle object support is deprecated since version 3.1 and will be removed in 4.0. Use the Yaml::PARSE_OBJECT flag instead.', E_USER_DEPRECATED); if (func_get_arg(2)) { $flags |= Yaml::PARSE_OBJECT; } } if (func_num_args() >= 4) { @trigger_error('Passing a boolean flag to toggle object for map support is deprecated since version 3.1 and will be removed in 4.0. Use the Yaml::PARSE_OBJECT_FOR_MAP flag instead.', E_USER_DEPRECATED); if (func_get_arg(3)) { $flags |= Yaml::PARSE_OBJECT_FOR_MAP; } } if (false === preg_match('//u', $value)) { throw new ParseException('The YAML value does not appear to be valid UTF-8.'); } $this->refs = array(); $mbEncoding = null; $e = null; $data = null; if (2 /* MB_OVERLOAD_STRING */ & (int) ini_get('mbstring.func_overload')) { $mbEncoding = mb_internal_encoding(); mb_internal_encoding('UTF-8'); } try { $data = $this->doParse($value, $flags); } catch (\Exception $e) { } catch (\Throwable $e) { } if (null !== $mbEncoding) { mb_internal_encoding($mbEncoding); } $this->lines = array(); $this->currentLine = ''; $this->refs = array(); $this->skippedLineNumbers = array(); $this->locallySkippedLineNumbers = array(); if (null !== $e) { throw $e; } return $data; } private function doParse($value, $flags) { $this->currentLineNb = -1; $this->currentLine = ''; $value = $this->cleanup($value); $this->lines = explode("\n", $value); $this->locallySkippedLineNumbers = array(); if (null === $this->totalNumberOfLines) { $this->totalNumberOfLines = count($this->lines); } $data = array(); $context = null; $allowOverwrite = false; while ($this->moveToNextLine()) { if ($this->isCurrentLineEmpty()) { continue; } // tab? if ("\t" === $this->currentLine[0]) { throw new ParseException('A YAML file cannot contain tabs as indentation.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } $isRef = $mergeNode = false; if (self::preg_match('#^\-((?P<leadspaces>\s+)(?P<value>.+))?$#u', rtrim($this->currentLine), $values)) { if ($context && 'mapping' == $context) { throw new ParseException('You cannot define a sequence item when in a mapping', $this->getRealCurrentLineNb() + 1, $this->currentLine); } $context = 'sequence'; if (isset($values['value']) && self::preg_match('#^&(?P<ref>[^ ]+) *(?P<value>.*)#u', $values['value'], $matches)) { $isRef = $matches['ref']; $values['value'] = $matches['value']; } // array if (!isset($values['value']) || '' == trim($values['value'], ' ') || 0 === strpos(ltrim($values['value'], ' '), '#')) { $data[] = $this->parseBlock($this->getRealCurrentLineNb() + 1, $this->getNextEmbedBlock(null, true), $flags); } else { if (isset($values['leadspaces']) && self::preg_match('#^(?P<key>'.Inline::REGEX_QUOTED_STRING.'|[^ \'"\{\[].*?) *\:(\s+(?P<value>.+))?$#u', rtrim($values['value']), $matches) ) { // this is a compact notation element, add to next block and parse $block = $values['value']; if ($this->isNextLineIndented()) { $block .= "\n".$this->getNextEmbedBlock($this->getCurrentLineIndentation() + strlen($values['leadspaces']) + 1); } $data[] = $this->parseBlock($this->getRealCurrentLineNb(), $block, $flags); } else { $data[] = $this->parseValue($values['value'], $flags, $context); } } if ($isRef) { $this->refs[$isRef] = end($data); } } elseif ( self::preg_match('#^(?P<key>'.Inline::REGEX_QUOTED_STRING.'|[^ \'"\[\{].*?) *\:(\s+(?P<value>.+))?$#u', rtrim($this->currentLine), $values) && (false === strpos($values['key'], ' #') || in_array($values['key'][0], array('"', "'"))) ) { if ($context && 'sequence' == $context) { throw new ParseException('You cannot define a mapping item when in a sequence', $this->currentLineNb + 1, $this->currentLine); } $context = 'mapping'; // force correct settings Inline::parse(null, $flags, $this->refs); try { Inline::$parsedLineNumber = $this->getRealCurrentLineNb(); $key = Inline::parseScalar($values['key']); } catch (ParseException $e) { $e->setParsedLine($this->getRealCurrentLineNb() + 1); $e->setSnippet($this->currentLine); throw $e; } // Convert float keys to strings, to avoid being converted to integers by PHP if (is_float($key)) { $key = (string) $key; } if ('<<' === $key) { $mergeNode = true; $allowOverwrite = true; if (isset($values['value']) && 0 === strpos($values['value'], '*')) { $refName = substr($values['value'], 1); if (!array_key_exists($refName, $this->refs)) { throw new ParseException(sprintf('Reference "%s" does not exist.', $refName), $this->getRealCurrentLineNb() + 1, $this->currentLine); } $refValue = $this->refs[$refName]; if (!is_array($refValue)) { throw new ParseException('YAML merge keys used with a scalar value instead of an array.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } $data += $refValue; // array union } else { if (isset($values['value']) && $values['value'] !== '') { $value = $values['value']; } else { $value = $this->getNextEmbedBlock(); } $parsed = $this->parseBlock($this->getRealCurrentLineNb() + 1, $value, $flags); if (!is_array($parsed)) { throw new ParseException('YAML merge keys used with a scalar value instead of an array.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } if (isset($parsed[0])) { // If the value associated with the merge key is a sequence, then this sequence is expected to contain mapping nodes // and each of these nodes is merged in turn according to its order in the sequence. Keys in mapping nodes earlier // in the sequence override keys specified in later mapping nodes. foreach ($parsed as $parsedItem) { if (!is_array($parsedItem)) { throw new ParseException('Merge items must be arrays.', $this->getRealCurrentLineNb() + 1, $parsedItem); } $data += $parsedItem; // array union } } else { // If the value associated with the key is a single mapping node, each of its key/value pairs is inserted into the // current mapping, unless the key already exists in it. $data += $parsed; // array union } } } elseif (isset($values['value']) && self::preg_match('#^&(?P<ref>[^ ]+) *(?P<value>.*)#u', $values['value'], $matches)) { $isRef = $matches['ref']; $values['value'] = $matches['value']; } if ($mergeNode) { // Merge keys } elseif (!isset($values['value']) || '' == trim($values['value'], ' ') || 0 === strpos(ltrim($values['value'], ' '), '#')) { // hash // if next line is less indented or equal, then it means that the current value is null if (!$this->isNextLineIndented() && !$this->isNextLineUnIndentedCollection()) { // Spec: Keys MUST be unique; first one wins. // But overwriting is allowed when a merge node is used in current block. if ($allowOverwrite || !isset($data[$key])) { $data[$key] = null; } else { @trigger_error(sprintf('Duplicate key "%s" detected on line %d whilst parsing YAML. Silent handling of duplicate mapping keys in YAML is deprecated since version 3.2 and will throw \Symfony\Component\Yaml\Exception\ParseException in 4.0.', $key, $this->getRealCurrentLineNb() + 1), E_USER_DEPRECATED); } } else { // remember the parsed line number here in case we need it to provide some contexts in error messages below $realCurrentLineNbKey = $this->getRealCurrentLineNb(); $value = $this->parseBlock($this->getRealCurrentLineNb() + 1, $this->getNextEmbedBlock(), $flags); // Spec: Keys MUST be unique; first one wins. // But overwriting is allowed when a merge node is used in current block. if ($allowOverwrite || !isset($data[$key])) { $data[$key] = $value; } else { @trigger_error(sprintf('Duplicate key "%s" detected on line %d whilst parsing YAML. Silent handling of duplicate mapping keys in YAML is deprecated since version 3.2 and will throw \Symfony\Component\Yaml\Exception\ParseException in 4.0.', $key, $realCurrentLineNbKey + 1), E_USER_DEPRECATED); } } } else { $value = $this->parseValue($values['value'], $flags, $context); // Spec: Keys MUST be unique; first one wins. // But overwriting is allowed when a merge node is used in current block. if ($allowOverwrite || !isset($data[$key])) { $data[$key] = $value; } else { @trigger_error(sprintf('Duplicate key "%s" detected on line %d whilst parsing YAML. Silent handling of duplicate mapping keys in YAML is deprecated since version 3.2 and will throw \Symfony\Component\Yaml\Exception\ParseException in 4.0.', $key, $this->getRealCurrentLineNb() + 1), E_USER_DEPRECATED); } } if ($isRef) { $this->refs[$isRef] = $data[$key]; } } else { // multiple documents are not supported if ('---' === $this->currentLine) { throw new ParseException('Multiple documents are not supported.', $this->currentLineNb + 1, $this->currentLine); } // 1-liner optionally followed by newline(s) if (is_string($value) && $this->lines[0] === trim($value)) { try { Inline::$parsedLineNumber = $this->getRealCurrentLineNb(); $value = Inline::parse($this->lines[0], $flags, $this->refs); } catch (ParseException $e) { $e->setParsedLine($this->getRealCurrentLineNb() + 1); $e->setSnippet($this->currentLine); throw $e; } return $value; } throw new ParseException('Unable to parse.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } } if (Yaml::PARSE_OBJECT_FOR_MAP & $flags && !is_object($data) && 'mapping' === $context) { $object = new \stdClass(); foreach ($data as $key => $value) { $object->$key = $value; } $data = $object; } return empty($data) ? null : $data; } private function parseBlock($offset, $yaml, $flags) { $skippedLineNumbers = $this->skippedLineNumbers; foreach ($this->locallySkippedLineNumbers as $lineNumber) { if ($lineNumber < $offset) { continue; } $skippedLineNumbers[] = $lineNumber; } $parser = new self($offset, $this->totalNumberOfLines, $skippedLineNumbers); $parser->refs = &$this->refs; return $parser->doParse($yaml, $flags); } /** * Returns the current line number (takes the offset into account). * * @return int The current line number */ private function getRealCurrentLineNb() { $realCurrentLineNumber = $this->currentLineNb + $this->offset; foreach ($this->skippedLineNumbers as $skippedLineNumber) { if ($skippedLineNumber > $realCurrentLineNumber) { break; } ++$realCurrentLineNumber; } return $realCurrentLineNumber; } /** * Returns the current line indentation. * * @return int The current line indentation */ private function getCurrentLineIndentation() { return strlen($this->currentLine) - strlen(ltrim($this->currentLine, ' ')); } /** * Returns the next embed block of YAML. * * @param int $indentation The indent level at which the block is to be read, or null for default * @param bool $inSequence True if the enclosing data structure is a sequence * * @return string A YAML string * * @throws ParseException When indentation problem are detected */ private function getNextEmbedBlock($indentation = null, $inSequence = false) { $oldLineIndentation = $this->getCurrentLineIndentation(); $blockScalarIndentations = array(); if ($this->isBlockScalarHeader()) { $blockScalarIndentations[] = $this->getCurrentLineIndentation(); } if (!$this->moveToNextLine()) { return; } if (null === $indentation) { $newIndent = $this->getCurrentLineIndentation(); $unindentedEmbedBlock = $this->isStringUnIndentedCollectionItem(); if (!$this->isCurrentLineEmpty() && 0 === $newIndent && !$unindentedEmbedBlock) { throw new ParseException('Indentation problem.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } } else { $newIndent = $indentation; } $data = array(); if ($this->getCurrentLineIndentation() >= $newIndent) { $data[] = substr($this->currentLine, $newIndent); } else { $this->moveToPreviousLine(); return; } if ($inSequence && $oldLineIndentation === $newIndent && isset($data[0][0]) && '-' === $data[0][0]) { // the previous line contained a dash but no item content, this line is a sequence item with the same indentation // and therefore no nested list or mapping $this->moveToPreviousLine(); return; } $isItUnindentedCollection = $this->isStringUnIndentedCollectionItem(); if (empty($blockScalarIndentations) && $this->isBlockScalarHeader()) { $blockScalarIndentations[] = $this->getCurrentLineIndentation(); } $previousLineIndentation = $this->getCurrentLineIndentation(); while ($this->moveToNextLine()) { $indent = $this->getCurrentLineIndentation(); // terminate all block scalars that are more indented than the current line if (!empty($blockScalarIndentations) && $indent < $previousLineIndentation && trim($this->currentLine) !== '') { foreach ($blockScalarIndentations as $key => $blockScalarIndentation) { if ($blockScalarIndentation >= $this->getCurrentLineIndentation()) { unset($blockScalarIndentations[$key]); } } } if (empty($blockScalarIndentations) && !$this->isCurrentLineComment() && $this->isBlockScalarHeader()) { $blockScalarIndentations[] = $this->getCurrentLineIndentation(); } $previousLineIndentation = $indent; if ($isItUnindentedCollection && !$this->isCurrentLineEmpty() && !$this->isStringUnIndentedCollectionItem() && $newIndent === $indent) { $this->moveToPreviousLine(); break; } if ($this->isCurrentLineBlank()) { $data[] = substr($this->currentLine, $newIndent); continue; } // we ignore "comment" lines only when we are not inside a scalar block if (empty($blockScalarIndentations) && $this->isCurrentLineComment()) { // remember ignored comment lines (they are used later in nested // parser calls to determine real line numbers) // // CAUTION: beware to not populate the global property here as it // will otherwise influence the getRealCurrentLineNb() call here // for consecutive comment lines and subsequent embedded blocks $this->locallySkippedLineNumbers[] = $this->getRealCurrentLineNb(); continue; } if ($indent >= $newIndent) { $data[] = substr($this->currentLine, $newIndent); } elseif (0 == $indent) { $this->moveToPreviousLine(); break; } else { throw new ParseException('Indentation problem.', $this->getRealCurrentLineNb() + 1, $this->currentLine); } } return implode("\n", $data); } /** * Moves the parser to the next line. * * @return bool */ private function moveToNextLine() { if ($this->currentLineNb >= count($this->lines) - 1) { return false; } $this->currentLine = $this->lines[++$this->currentLineNb]; return true; } /** * Moves the parser to the previous line. * * @return bool */ private function moveToPreviousLine() { if ($this->currentLineNb < 1) { return false; } $this->currentLine = $this->lines[--$this->currentLineNb]; return true; } /** * Parses a YAML value. * * @param string $value A YAML value * @param int $flags A bit field of PARSE_* constants to customize the YAML parser behavior * @param string $context The parser context (either sequence or mapping) * * @return mixed A PHP value * * @throws ParseException When reference does not exist */ private function parseValue($value, $flags, $context) { if (0 === strpos($value, '*')) { if (false !== $pos = strpos($value, '#')) { $value = substr($value, 1, $pos - 2); } else { $value = substr($value, 1); } if (!array_key_exists($value, $this->refs)) { throw new ParseException(sprintf('Reference "%s" does not exist.', $value), $this->currentLineNb + 1, $this->currentLine); } return $this->refs[$value]; } if (self::preg_match('/^'.self::TAG_PATTERN.self::BLOCK_SCALAR_HEADER_PATTERN.'$/', $value, $matches)) { $modifiers = isset($matches['modifiers']) ? $matches['modifiers'] : ''; $data = $this->parseBlockScalar($matches['separator'], preg_replace('#\d+#', '', $modifiers), (int) abs($modifiers)); if (isset($matches['tag']) && '!!binary' === $matches['tag']) { return Inline::evaluateBinaryScalar($data); } return $data; } try { $quotation = '' !== $value && ('"' === $value[0] || "'" === $value[0]) ? $value[0] : null; // do not take following lines into account when the current line is a quoted single line value if (null !== $quotation && preg_match('/^'.$quotation.'.*'.$quotation.'(\s*#.*)?$/', $value)) { return Inline::parse($value, $flags, $this->refs); } while ($this->moveToNextLine()) { // unquoted strings end before the first unindented line if (null === $quotation && $this->getCurrentLineIndentation() === 0) { $this->moveToPreviousLine(); break; } $value .= ' '.trim($this->currentLine); // quoted string values end with a line that is terminated with the quotation character if ('' !== $this->currentLine && substr($this->currentLine, -1) === $quotation) { break; } } Inline::$parsedLineNumber = $this->getRealCurrentLineNb(); $parsedValue = Inline::parse($value, $flags, $this->refs); if ('mapping' === $context && is_string($parsedValue) && '"' !== $value[0] && "'" !== $value[0] && '[' !== $value[0] && '{' !== $value[0] && '!' !== $value[0] && false !== strpos($parsedValue, ': ')) { throw new ParseException('A colon cannot be used in an unquoted mapping value.'); } return $parsedValue; } catch (ParseException $e) { $e->setParsedLine($this->getRealCurrentLineNb() + 1); $e->setSnippet($this->currentLine); throw $e; } } /** * Parses a block scalar. * * @param string $style The style indicator that was used to begin this block scalar (| or >) * @param string $chomping The chomping indicator that was used to begin this block scalar (+ or -) * @param int $indentation The indentation indicator that was used to begin this block scalar * * @return string The text value */ private function parseBlockScalar($style, $chomping = '', $indentation = 0) { $notEOF = $this->moveToNextLine(); if (!$notEOF) { return ''; } $isCurrentLineBlank = $this->isCurrentLineBlank(); $blockLines = array(); // leading blank lines are consumed before determining indentation while ($notEOF && $isCurrentLineBlank) { // newline only if not EOF if ($notEOF = $this->moveToNextLine()) { $blockLines[] = ''; $isCurrentLineBlank = $this->isCurrentLineBlank(); } } // determine indentation if not specified if (0 === $indentation) { if (self::preg_match('/^ +/', $this->currentLine, $matches)) { $indentation = strlen($matches[0]); } } if ($indentation > 0) { $pattern = sprintf('/^ {%d}(.*)$/', $indentation); while ( $notEOF && ( $isCurrentLineBlank || self::preg_match($pattern, $this->currentLine, $matches) ) ) { if ($isCurrentLineBlank && strlen($this->currentLine) > $indentation) { $blockLines[] = substr($this->currentLine, $indentation); } elseif ($isCurrentLineBlank) { $blockLines[] = ''; } else { $blockLines[] = $matches[1]; } // newline only if not EOF if ($notEOF = $this->moveToNextLine()) { $isCurrentLineBlank = $this->isCurrentLineBlank(); } } } elseif ($notEOF) { $blockLines[] = ''; } if ($notEOF) { $blockLines[] = ''; $this->moveToPreviousLine(); } elseif (!$notEOF && !$this->isCurrentLineLastLineInDocument()) { $blockLines[] = ''; } // folded style if ('>' === $style) { $text = ''; $previousLineIndented = false; $previousLineBlank = false; for ($i = 0, $blockLinesCount = count($blockLines); $i < $blockLinesCount; ++$i) { if ('' === $blockLines[$i]) { $text .= "\n"; $previousLineIndented = false; $previousLineBlank = true; } elseif (' ' === $blockLines[$i][0]) { $text .= "\n".$blockLines[$i]; $previousLineIndented = true; $previousLineBlank = false; } elseif ($previousLineIndented) { $text .= "\n".$blockLines[$i]; $previousLineIndented = false; $previousLineBlank = false; } elseif ($previousLineBlank || 0 === $i) { $text .= $blockLines[$i]; $previousLineIndented = false; $previousLineBlank = false; } else { $text .= ' '.$blockLines[$i]; $previousLineIndented = false; $previousLineBlank = false; } } } else { $text = implode("\n", $blockLines); } // deal with trailing newlines if ('' === $chomping) { $text = preg_replace('/\n+$/', "\n", $text); } elseif ('-' === $chomping) { $text = preg_replace('/\n+$/', '', $text); } return $text; } /** * Returns true if the next line is indented. * * @return bool Returns true if the next line is indented, false otherwise */ private function isNextLineIndented() { $currentIndentation = $this->getCurrentLineIndentation(); $EOF = !$this->moveToNextLine(); while (!$EOF && $this->isCurrentLineEmpty()) { $EOF = !$this->moveToNextLine(); } if ($EOF) { return false; } $ret = $this->getCurrentLineIndentation() > $currentIndentation; $this->moveToPreviousLine(); return $ret; } /** * Returns true if the current line is blank or if it is a comment line. * * @return bool Returns true if the current line is empty or if it is a comment line, false otherwise */ private function isCurrentLineEmpty() { return $this->isCurrentLineBlank() || $this->isCurrentLineComment(); } /** * Returns true if the current line is blank. * * @return bool Returns true if the current line is blank, false otherwise */ private function isCurrentLineBlank() { return '' == trim($this->currentLine, ' '); } /** * Returns true if the current line is a comment line. * * @return bool Returns true if the current line is a comment line, false otherwise */ private function isCurrentLineComment() { //checking explicitly the first char of the trim is faster than loops or strpos $ltrimmedLine = ltrim($this->currentLine, ' '); return '' !== $ltrimmedLine && $ltrimmedLine[0] === '#'; } private function isCurrentLineLastLineInDocument() { return ($this->offset + $this->currentLineNb) >= ($this->totalNumberOfLines - 1); } /** * Cleanups a YAML string to be parsed. * * @param string $value The input YAML string * * @return string A cleaned up YAML string */ private function cleanup($value) { $value = str_replace(array("\r\n", "\r"), "\n", $value); // strip YAML header $count = 0; $value = preg_replace('#^\%YAML[: ][\d\.]+.*\n#u', '', $value, -1, $count); $this->offset += $count; // remove leading comments $trimmedValue = preg_replace('#^(\#.*?\n)+#s', '', $value, -1, $count); if ($count == 1) { // items have been removed, update the offset $this->offset += substr_count($value, "\n") - substr_count($trimmedValue, "\n"); $value = $trimmedValue; } // remove start of the document marker (---) $trimmedValue = preg_replace('#^\-\-\-.*?\n#s', '', $value, -1, $count); if ($count == 1) { // items have been removed, update the offset $this->offset += substr_count($value, "\n") - substr_count($trimmedValue, "\n"); $value = $trimmedValue; // remove end of the document marker (...) $value = preg_replace('#\.\.\.\s*$#', '', $value); } return $value; } /** * Returns true if the next line starts unindented collection. * * @return bool Returns true if the next line starts unindented collection, false otherwise */ private function isNextLineUnIndentedCollection() { $currentIndentation = $this->getCurrentLineIndentation(); $notEOF = $this->moveToNextLine(); while ($notEOF && $this->isCurrentLineEmpty()) { $notEOF = $this->moveToNextLine(); } if (false === $notEOF) { return false; } $ret = $this->getCurrentLineIndentation() === $currentIndentation && $this->isStringUnIndentedCollectionItem(); $this->moveToPreviousLine(); return $ret; } /** * Returns true if the string is un-indented collection item. * * @return bool Returns true if the string is un-indented collection item, false otherwise */ private function isStringUnIndentedCollectionItem() { return '-' === rtrim($this->currentLine) || 0 === strpos($this->currentLine, '- '); } /** * Tests whether or not the current line is the header of a block scalar. * * @return bool */ private function isBlockScalarHeader() { return (bool) self::preg_match('~'.self::BLOCK_SCALAR_HEADER_PATTERN.'$~', $this->currentLine); } /** * A local wrapper for `preg_match` which will throw a ParseException if there * is an internal error in the PCRE engine. * * This avoids us needing to check for "false" every time PCRE is used * in the YAML engine * * @throws ParseException on a PCRE internal error * * @see preg_last_error() * * @internal */ public static function preg_match($pattern, $subject, &$matches = null, $flags = 0, $offset = 0) { if (false === $ret = preg_match($pattern, $subject, $matches, $flags, $offset)) { switch (preg_last_error()) { case PREG_INTERNAL_ERROR: $error = 'Internal PCRE error.'; break; case PREG_BACKTRACK_LIMIT_ERROR: $error = 'pcre.backtrack_limit reached.'; break; case PREG_RECURSION_LIMIT_ERROR: $error = 'pcre.recursion_limit reached.'; break; case PREG_BAD_UTF8_ERROR: $error = 'Malformed UTF-8 data.'; break; case PREG_BAD_UTF8_OFFSET_ERROR: $error = 'Offset doesn\'t correspond to the begin of a valid UTF-8 code point.'; break; default: $error = 'Error.'; } throw new ParseException($error); } return $ret; } }
{'repo_name': 'cashmusic/platform', 'stars': '1005', 'repo_language': 'PHP', 'file_name': 'SeedBase.php', 'mime_type': 'text/x-php', 'hash': -8541484149587016595, 'source_dataset': 'data'}
/* * Copyright 2009 Jerome Glisse. * All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sub license, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE * USE OR OTHER DEALINGS IN THE SOFTWARE. * * The above copyright notice and this permission notice (including the * next paragraph) shall be included in all copies or substantial portions * of the Software. * */ /* * Authors: * Jerome Glisse <glisse@freedesktop.org> * Thomas Hellstrom <thomas-at-tungstengraphics-dot-com> * Dave Airlie */ #include <linux/list.h> #include <linux/slab.h> #include <drm/drmP.h> #include "radeon_drm.h" #include "radeon.h" #include "radeon_trace.h" int radeon_ttm_init(struct radeon_device *rdev); void radeon_ttm_fini(struct radeon_device *rdev); static void radeon_bo_clear_surface_reg(struct radeon_bo *bo); /* * To exclude mutual BO access we rely on bo_reserve exclusion, as all * function are calling it. */ void radeon_bo_clear_va(struct radeon_bo *bo) { struct radeon_bo_va *bo_va, *tmp; list_for_each_entry_safe(bo_va, tmp, &bo->va, bo_list) { /* remove from all vm address space */ mutex_lock(&bo_va->vm->mutex); list_del(&bo_va->vm_list); mutex_unlock(&bo_va->vm->mutex); list_del(&bo_va->bo_list); kfree(bo_va); } } static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) { struct radeon_bo *bo; bo = container_of(tbo, struct radeon_bo, tbo); mutex_lock(&bo->rdev->gem.mutex); list_del_init(&bo->list); mutex_unlock(&bo->rdev->gem.mutex); radeon_bo_clear_surface_reg(bo); radeon_bo_clear_va(bo); drm_gem_object_release(&bo->gem_base); kfree(bo); } bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) { if (bo->destroy == &radeon_ttm_bo_destroy) return true; return false; } void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain) { u32 c = 0; rbo->placement.fpfn = 0; rbo->placement.lpfn = 0; rbo->placement.placement = rbo->placements; rbo->placement.busy_placement = rbo->placements; if (domain & RADEON_GEM_DOMAIN_VRAM) rbo->placements[c++] = TTM_PL_FLAG_WC | TTM_PL_FLAG_UNCACHED | TTM_PL_FLAG_VRAM; if (domain & RADEON_GEM_DOMAIN_GTT) rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_TT; if (domain & RADEON_GEM_DOMAIN_CPU) rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_SYSTEM; if (!c) rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_SYSTEM; rbo->placement.num_placement = c; rbo->placement.num_busy_placement = c; } int radeon_bo_create(struct radeon_device *rdev, unsigned long size, int byte_align, bool kernel, u32 domain, struct radeon_bo **bo_ptr) { struct radeon_bo *bo; enum ttm_bo_type type; unsigned long page_align = roundup(byte_align, PAGE_SIZE) >> PAGE_SHIFT; unsigned long max_size = 0; size_t acc_size; int r; size = ALIGN(size, PAGE_SIZE); if (unlikely(rdev->mman.bdev.dev_mapping == NULL)) { rdev->mman.bdev.dev_mapping = rdev->ddev->dev_mapping; } if (kernel) { type = ttm_bo_type_kernel; } else { type = ttm_bo_type_device; } *bo_ptr = NULL; /* maximun bo size is the minimun btw visible vram and gtt size */ max_size = min(rdev->mc.visible_vram_size, rdev->mc.gtt_size); if ((page_align << PAGE_SHIFT) >= max_size) { printk(KERN_WARNING "%s:%d alloc size %ldM bigger than %ldMb limit\n", __func__, __LINE__, page_align >> (20 - PAGE_SHIFT), max_size >> 20); return -ENOMEM; } acc_size = ttm_bo_dma_acc_size(&rdev->mman.bdev, size, sizeof(struct radeon_bo)); retry: bo = kzalloc(sizeof(struct radeon_bo), GFP_KERNEL); if (bo == NULL) return -ENOMEM; r = drm_gem_object_init(rdev->ddev, &bo->gem_base, size); if (unlikely(r)) { kfree(bo); return r; } bo->rdev = rdev; bo->gem_base.driver_private = NULL; bo->surface_reg = -1; INIT_LIST_HEAD(&bo->list); INIT_LIST_HEAD(&bo->va); radeon_ttm_placement_from_domain(bo, domain); /* Kernel allocation are uninterruptible */ mutex_lock(&rdev->vram_mutex); r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, &bo->placement, page_align, 0, !kernel, NULL, acc_size, &radeon_ttm_bo_destroy); mutex_unlock(&rdev->vram_mutex); if (unlikely(r != 0)) { if (r != -ERESTARTSYS) { if (domain == RADEON_GEM_DOMAIN_VRAM) { domain |= RADEON_GEM_DOMAIN_GTT; goto retry; } dev_err(rdev->dev, "object_init failed for (%lu, 0x%08X)\n", size, domain); } return r; } *bo_ptr = bo; trace_radeon_bo_create(bo); return 0; } int radeon_bo_kmap(struct radeon_bo *bo, void **ptr) { bool is_iomem; int r; if (bo->kptr) { if (ptr) { *ptr = bo->kptr; } return 0; } r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); if (r) { return r; } bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); if (ptr) { *ptr = bo->kptr; } radeon_bo_check_tiling(bo, 0, 0); return 0; } void radeon_bo_kunmap(struct radeon_bo *bo) { if (bo->kptr == NULL) return; bo->kptr = NULL; radeon_bo_check_tiling(bo, 0, 0); ttm_bo_kunmap(&bo->kmap); } void radeon_bo_unref(struct radeon_bo **bo) { struct ttm_buffer_object *tbo; struct radeon_device *rdev; if ((*bo) == NULL) return; rdev = (*bo)->rdev; tbo = &((*bo)->tbo); mutex_lock(&rdev->vram_mutex); ttm_bo_unref(&tbo); mutex_unlock(&rdev->vram_mutex); if (tbo == NULL) *bo = NULL; } int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) { int r, i; if (bo->pin_count) { bo->pin_count++; if (gpu_addr) *gpu_addr = radeon_bo_gpu_offset(bo); return 0; } radeon_ttm_placement_from_domain(bo, domain); if (domain == RADEON_GEM_DOMAIN_VRAM) { /* force to pin into visible video ram */ bo->placement.lpfn = bo->rdev->mc.visible_vram_size >> PAGE_SHIFT; } for (i = 0; i < bo->placement.num_placement; i++) bo->placements[i] |= TTM_PL_FLAG_NO_EVICT; r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false, false); if (likely(r == 0)) { bo->pin_count = 1; if (gpu_addr != NULL) *gpu_addr = radeon_bo_gpu_offset(bo); } if (unlikely(r != 0)) dev_err(bo->rdev->dev, "%p pin failed\n", bo); return r; } int radeon_bo_unpin(struct radeon_bo *bo) { int r, i; if (!bo->pin_count) { dev_warn(bo->rdev->dev, "%p unpin not necessary\n", bo); return 0; } bo->pin_count--; if (bo->pin_count) return 0; for (i = 0; i < bo->placement.num_placement; i++) bo->placements[i] &= ~TTM_PL_FLAG_NO_EVICT; r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false, false); if (unlikely(r != 0)) dev_err(bo->rdev->dev, "%p validate failed for unpin\n", bo); return r; } int radeon_bo_evict_vram(struct radeon_device *rdev) { /* late 2.6.33 fix IGP hibernate - we need pm ops to do this correct */ if (0 && (rdev->flags & RADEON_IS_IGP)) { if (rdev->mc.igp_sideport_enabled == false) /* Useless to evict on IGP chips */ return 0; } return ttm_bo_evict_mm(&rdev->mman.bdev, TTM_PL_VRAM); } void radeon_bo_force_delete(struct radeon_device *rdev) { struct radeon_bo *bo, *n; if (list_empty(&rdev->gem.objects)) { return; } dev_err(rdev->dev, "Userspace still has active objects !\n"); list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { mutex_lock(&rdev->ddev->struct_mutex); dev_err(rdev->dev, "%p %p %lu %lu force free\n", &bo->gem_base, bo, (unsigned long)bo->gem_base.size, *((unsigned long *)&bo->gem_base.refcount)); mutex_lock(&bo->rdev->gem.mutex); list_del_init(&bo->list); mutex_unlock(&bo->rdev->gem.mutex); /* this should unref the ttm bo */ drm_gem_object_unreference(&bo->gem_base); mutex_unlock(&rdev->ddev->struct_mutex); } } int radeon_bo_init(struct radeon_device *rdev) { /* Add an MTRR for the VRAM */ rdev->mc.vram_mtrr = mtrr_add(rdev->mc.aper_base, rdev->mc.aper_size, MTRR_TYPE_WRCOMB, 1); DRM_INFO("Detected VRAM RAM=%lluM, BAR=%lluM\n", rdev->mc.mc_vram_size >> 20, (unsigned long long)rdev->mc.aper_size >> 20); DRM_INFO("RAM width %dbits %cDR\n", rdev->mc.vram_width, rdev->mc.vram_is_ddr ? 'D' : 'S'); return radeon_ttm_init(rdev); } void radeon_bo_fini(struct radeon_device *rdev) { radeon_ttm_fini(rdev); } void radeon_bo_list_add_object(struct radeon_bo_list *lobj, struct list_head *head) { if (lobj->wdomain) { list_add(&lobj->tv.head, head); } else { list_add_tail(&lobj->tv.head, head); } } int radeon_bo_list_validate(struct list_head *head) { struct radeon_bo_list *lobj; struct radeon_bo *bo; u32 domain; int r; r = ttm_eu_reserve_buffers(head); if (unlikely(r != 0)) { return r; } list_for_each_entry(lobj, head, tv.head) { bo = lobj->bo; if (!bo->pin_count) { domain = lobj->wdomain ? lobj->wdomain : lobj->rdomain; retry: radeon_ttm_placement_from_domain(bo, domain); r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false, false); if (unlikely(r)) { if (r != -ERESTARTSYS && domain == RADEON_GEM_DOMAIN_VRAM) { domain |= RADEON_GEM_DOMAIN_GTT; goto retry; } return r; } } lobj->gpu_offset = radeon_bo_gpu_offset(bo); lobj->tiling_flags = bo->tiling_flags; } return 0; } int radeon_bo_fbdev_mmap(struct radeon_bo *bo, struct vm_area_struct *vma) { return ttm_fbdev_mmap(vma, &bo->tbo); } int radeon_bo_get_surface_reg(struct radeon_bo *bo) { struct radeon_device *rdev = bo->rdev; struct radeon_surface_reg *reg; struct radeon_bo *old_object; int steal; int i; BUG_ON(!atomic_read(&bo->tbo.reserved)); if (!bo->tiling_flags) return 0; if (bo->surface_reg >= 0) { reg = &rdev->surface_regs[bo->surface_reg]; i = bo->surface_reg; goto out; } steal = -1; for (i = 0; i < RADEON_GEM_MAX_SURFACES; i++) { reg = &rdev->surface_regs[i]; if (!reg->bo) break; old_object = reg->bo; if (old_object->pin_count == 0) steal = i; } /* if we are all out */ if (i == RADEON_GEM_MAX_SURFACES) { if (steal == -1) return -ENOMEM; /* find someone with a surface reg and nuke their BO */ reg = &rdev->surface_regs[steal]; old_object = reg->bo; /* blow away the mapping */ DRM_DEBUG("stealing surface reg %d from %p\n", steal, old_object); ttm_bo_unmap_virtual(&old_object->tbo); old_object->surface_reg = -1; i = steal; } bo->surface_reg = i; reg->bo = bo; out: radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch, bo->tbo.mem.start << PAGE_SHIFT, bo->tbo.num_pages << PAGE_SHIFT); return 0; } static void radeon_bo_clear_surface_reg(struct radeon_bo *bo) { struct radeon_device *rdev = bo->rdev; struct radeon_surface_reg *reg; if (bo->surface_reg == -1) return; reg = &rdev->surface_regs[bo->surface_reg]; radeon_clear_surface_reg(rdev, bo->surface_reg); reg->bo = NULL; bo->surface_reg = -1; } int radeon_bo_set_tiling_flags(struct radeon_bo *bo, uint32_t tiling_flags, uint32_t pitch) { int r; r = radeon_bo_reserve(bo, false); if (unlikely(r != 0)) return r; bo->tiling_flags = tiling_flags; bo->pitch = pitch; radeon_bo_unreserve(bo); return 0; } void radeon_bo_get_tiling_flags(struct radeon_bo *bo, uint32_t *tiling_flags, uint32_t *pitch) { BUG_ON(!atomic_read(&bo->tbo.reserved)); if (tiling_flags) *tiling_flags = bo->tiling_flags; if (pitch) *pitch = bo->pitch; } int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, bool force_drop) { BUG_ON(!atomic_read(&bo->tbo.reserved)); if (!(bo->tiling_flags & RADEON_TILING_SURFACE)) return 0; if (force_drop) { radeon_bo_clear_surface_reg(bo); return 0; } if (bo->tbo.mem.mem_type != TTM_PL_VRAM) { if (!has_moved) return 0; if (bo->surface_reg >= 0) radeon_bo_clear_surface_reg(bo); return 0; } if ((bo->surface_reg >= 0) && !has_moved) return 0; return radeon_bo_get_surface_reg(bo); } void radeon_bo_move_notify(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem) { struct radeon_bo *rbo; if (!radeon_ttm_bo_is_radeon_bo(bo)) return; rbo = container_of(bo, struct radeon_bo, tbo); radeon_bo_check_tiling(rbo, 0, 1); radeon_vm_bo_invalidate(rbo->rdev, rbo); } int radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) { struct radeon_device *rdev; struct radeon_bo *rbo; unsigned long offset, size; int r; if (!radeon_ttm_bo_is_radeon_bo(bo)) return 0; rbo = container_of(bo, struct radeon_bo, tbo); radeon_bo_check_tiling(rbo, 0, 0); rdev = rbo->rdev; if (bo->mem.mem_type == TTM_PL_VRAM) { size = bo->mem.num_pages << PAGE_SHIFT; offset = bo->mem.start << PAGE_SHIFT; if ((offset + size) > rdev->mc.visible_vram_size) { /* hurrah the memory is not visible ! */ radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_VRAM); rbo->placement.lpfn = rdev->mc.visible_vram_size >> PAGE_SHIFT; r = ttm_bo_validate(bo, &rbo->placement, false, true, false); if (unlikely(r != 0)) return r; offset = bo->mem.start << PAGE_SHIFT; /* this should not happen */ if ((offset + size) > rdev->mc.visible_vram_size) return -EINVAL; } } return 0; } int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait) { int r; r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0); if (unlikely(r != 0)) return r; spin_lock(&bo->tbo.bdev->fence_lock); if (mem_type) *mem_type = bo->tbo.mem.mem_type; if (bo->tbo.sync_obj) r = ttm_bo_wait(&bo->tbo, true, true, no_wait); spin_unlock(&bo->tbo.bdev->fence_lock); ttm_bo_unreserve(&bo->tbo); return r; } /** * radeon_bo_reserve - reserve bo * @bo: bo structure * @no_wait: don't sleep while trying to reserve (return -EBUSY) * * Returns: * -EBUSY: buffer is busy and @no_wait is true * -ERESTARTSYS: A wait for the buffer to become unreserved was interrupted by * a signal. Release all buffer reservations and return to user-space. */ int radeon_bo_reserve(struct radeon_bo *bo, bool no_wait) { int r; r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0); if (unlikely(r != 0)) { if (r != -ERESTARTSYS) dev_err(bo->rdev->dev, "%p reserve failed\n", bo); return r; } return 0; } /* object have to be reserved */ struct radeon_bo_va *radeon_bo_va(struct radeon_bo *rbo, struct radeon_vm *vm) { struct radeon_bo_va *bo_va; list_for_each_entry(bo_va, &rbo->va, bo_list) { if (bo_va->vm == vm) { return bo_va; } } return NULL; }
{'repo_name': 'shinpei0208/gdev', 'stars': '162', 'repo_language': 'C', 'file_name': 'CMakeLists.txt', 'mime_type': 'text/plain', 'hash': -2177450548924289947, 'source_dataset': 'data'}
// Copyright (C) 2005-2020 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 25.2.4 replace_copy_if // { dg-do compile } #include <algorithm> #include <testsuite_iterators.h> using __gnu_test::input_iterator_wrapper; using __gnu_test::output_iterator_wrapper; struct X { }; struct Y { }; struct Z { Z& operator=(const X&) { return *this; } Z& operator=(const Y&) { return *this; } }; bool predicate(const X&) { return true; } output_iterator_wrapper<Z> test1(input_iterator_wrapper<X>& begin, input_iterator_wrapper<X>& end, output_iterator_wrapper<Z>& output, const Y& new_val) { return std::replace_copy_if(begin, end, output, predicate, new_val); }
{'repo_name': 'gcc-mirror/gcc', 'stars': '3986', 'repo_language': 'C', 'file_name': 'epiphany.opt', 'mime_type': 'text/plain', 'hash': 8311734424936644947, 'source_dataset': 'data'}
/** * Copyright © 2002 Instituto Superior Técnico * * This file is part of FenixEdu Academic. * * FenixEdu Academic is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FenixEdu Academic is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>. */ package org.fenixedu.academic.util.report; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.Objects; import org.fenixedu.academic.util.report.ReportPrinter.ReportDescription; import org.fenixedu.academic.util.report.ReportPrinter.ReportResult; public class ReportsUtils { private static ReportPrinter printer = (reports) -> { throw new UnsupportedOperationException("Cannot print reports: " + Arrays.toString(reports)); }; public static void setPrinter(ReportPrinter printer) { ReportsUtils.printer = Objects.requireNonNull(printer); } public static ReportResult generateReport(final String key, final Map<String, Object> parameters, final Collection<?> dataSource) { try { return printer.printReport(key, parameters, dataSource); } catch (Exception e) { throw new RuntimeException("Exception while generating report '" + key + "'", e); } } public static ReportResult generateReport(final ReportDescription... reports) { try { return printer.printReports(reports); } catch (Exception e) { throw new RuntimeException("Exception while generating reports '" + Arrays.toString(reports) + "'", e); } } }
{'repo_name': 'FenixEdu/fenixedu-academic', 'stars': '154', 'repo_language': 'Java', 'file_name': 'ImportMessaging.java', 'mime_type': 'text/x-java', 'hash': 4772630883848453292, 'source_dataset': 'data'}
@import url("class.css"); body { font-family:Arial, sans-serif; background:#F3F3F3; overflow:hidden; } input { padding:0; margin:0; } a { outline:none; } /* Buttons */ a.btt, a.arr { display:block; background:url(../images/btt-buttons.png) no-repeat 0 0; text-indent:-9999px; line-height:0; } a.arr-add { width:13px; height:13px; background-position:-38px -562px; } a.arr-add:hover { background-position:-38px -580px; } a.arr-remove { width:13px; height:13px; background-position:-72px -562px; } a.arr-remove:hover { background-position:-72px -580px; } a.arr-duplicate { width:13px; height:13px; background-position:-177px -562px; } a.arr-duplicate:hover { background-position:-177px -580px; } a.btt-design { width:52px; height:22px; background-position:-249px -917px; } a.btt-design:hover { background-position:-249px -1000px; } a.btt-design.active { background-position:-249px -960px; } a.btt-present { width:58px; height:22px; background-position:-306px -917px; } a.btt-present:hover { background-position:-306px -1000px; } a.btt-present.active { background-position:-306px -960px; } a.btt-login { width:43px; height:22px; background-position:-17px -917px; } a.btt-login:hover { background-position:-17px -1000px; } a.btt-login.active { background-position:-17px -960px; } a.btt-save { width:40px; height:22px; background-position:-108px -917px; } a.btt-save:hover { background-position:-108px -1000px; } a.btt-save.active { background-position:-17px -960px; } a.btt-load { width:40px; height:22px; background-position:-160px -917px; } a.btt-load:hover { background-position:-160px -1000px; } a.btt-load.active { background-position:-17px -960px; } a.btt-edit-master { width:90px; height:26px; background-position:-343px -1226px; } a.btt-edit-master:hover { background-position:-343px -1269px; } a.btt-edit-instance { width:90px; height:26px; background-position:-451px -1226px; } a.btt-edit-instance:hover { background-position:-451px -1269px; } a.btt-ok { width:35px; height:26px; background-position:-495px -211px; } a.btt-ok:hover { background-position:-495px -182px; } a.btt-paste { width:48px; height:26px; background-position:-201px -415px; } a.btt-paste:hover { background-position:-201px -385px; } a.btt-edit-all { width:57px; height:26px; background-position:-544px -212px; } a.btt-edit-all:hover { background-position:-544px -182px; } a.btt-edit-all.active { background-position:-544px -152px; } a.btt-new-master-object { width:128px; height:26px; background-position:-495px -1039px; } a.btt-new-master-object:hover { background-position:-495px -1103px; } a.btt-new-master-object.active { background-position:-495px -980px; } a.btt-instance-of-master { width:90px; height:40px; background-position:-635px -979px; } a.btt-instance-of-master:hover { background-position:-635px -1102px; } a.btt-instance-of-master.active { background-position:-635px -1038px; } a.btt-save-locally { width:90px; height:26px; background-position:-397px -212px; } a.btt-save-locally:hover { background-position:-397px -182px; } a.btt-load-file { width:66px; height:26px; background-position:-325px -212px; position: relative; } a.btt-load-file:hover { background-position:-325px -182px; } a.btt-selection { width:28px; height:28px; background-position:-57px -1267px; } a.btt-selection:hover { background-position:-160px -1267px; } a.btt-selection.active { background-position:-109px -1267px; } a.btt-object { width:28px; height:28px; background-position:-57px -1301px; } a.btt-object:hover { background-position:-160px -1301px; } a.btt-object.active { background-position:-109px -1301px; } a.btt-text { width:28px; height:28px; background-position:-57px -1335px; } a.btt-text:hover { background-position:-160px -1335px; } a.btt-text.active { background-position:-109px -1335px; } a.btt-form { width:28px; height:28px; background-position:-57px -1369px; } a.btt-form:hover { background-position:-160px -1369px; } a.btt-form.active { background-position:-109px -1369px; } a.btt-exp-threads { width:28px; height:28px; background-position:-57px -1403px; } a.btt-exp-threads:hover { background-position:-160px -1403px; } a.btt-exp-threads.active { background-position:-109px -1403px; } a.btt-delete { margin:5px 3px 0 0; width:17px; height:17px; background-position:-713px -156px; } a.btt-delete:hover { background-position:-713px -187px; } a.btt-add { width:26px; height:26px; background-position:-611px -212px; } a.btt-add:hover { background-position:-611px -182px; } a.btt-see-all { width:58px; height:26px; background-position:-736px -213px; } a.btt-see-all:hover { background-position:-736px -182px; } a.btt-close { width:52px; height:26px; background-position:-649px -212px; } a.btt-close:hover { background-position:-649px -182px; } a.btt-frm-input-field { width:80px; height:60px; background-position:-10px -1490px; } a.btt-frm-input-field:hover { background-position:-10px -1552px; } a.btt-frm-checkbox { width:80px; height:60px; width:70px; background-position:-92px -1490px; } a.btt-frm-checkbox:hover { background-position:-92px -1552px; } a.btt-frm-radio-button { width:80px; height:60px; background-position:-164px -1490px; } a.btt-frm-radio-button:hover { background-position:-164px -1552px; } a.btt-frm-textarea { width:80px; height:60px; background-position:-246px -1490px; } a.btt-frm-textarea:hover { background-position:-246px -1552px; } a.btt-frm-pulldown {width:80px; height:60px; background-position:-328px -1490px; } a.btt-frm-pulldown:hover { background-position:-328px -1552px; } a.btt-frm-button { width:80px; height:60px; background-position:-410px -1490px; } a.btt-frm-button:hover { background-position:-410px -1552px; } a.btt-inbetween { width:8px; height:4px; background-position:-352px -1417px; } a.btt-inbetween:hover { background-position:-272px -1417px; } a.btt-thread { width:28px; height:28px; background-position:-242px -1405px; } a.btt-thread:hover { background-position:-282px -1405px; } a.btt-thread.active { background-position:-322px -1405px; } a.btt-add-thread { width:28px; height:28px; background-position:-362px -1405px; } a.btt-add-thread:hover { background-position:-362px -1440px; } a.btt-snapshot { width:28px; height:28px; background-position:-491px -1363px; } a.btt-snapshot:hover { background-position:-533px -1363px; } a.btt-action { width:28px; height:28px; background-position:-491px -1397px; } a.btt-action:hover { background-position:-533px -1397px; } a.btt-states { width:36px; height:36px; background-position:-591px -50px; } a.btt-states.active { background-position:-511px -50px; } a.btt-states:hover { background-position:-551px -50px; } .w-hold { display:block; width:116px; height:45px; background:url(../images/btt-buttons.png) no-repeat 0 0; text-indent:-9999px; line-height:0; } .w-hold.holding-z { background-position:-677px -530px !important; } .w-hold.holding-x { width:110px; height:78px; background-position:-563px -566px !important; } .w-hold.hold-c { float:right; position:fixed; bottom:-5px; right:-5px; width:112px; height:37px; background-position:-122px -476px; } .bg-over { background:url(../images/btt-buttons.png); background-position:-26px -365px; } /* WRAPPER */ #wrapper { float:left; width:100%; margin-bottom:2px; } #header { display:block; height:37px; background:#F3F3F3 url(../images/btt-buttons.png) no-repeat 0 0; } #toolbox { background:#F3F3F3; float:left; width:38px; } #workspace { float:left; display:block; margin-top:1px; } #rightpanel { background:#F3F3F3; float:right; width:140px; font-size:9px; position:relative; } #footer { clear:both; background:#F3F3F3; color:#DFDFDF; padding:0; width:100%; height:37px; font-size:10px; } /* Header */ #header #wrap, #header #project #fProjName, #header #project #fThreadName, #header #project input#fEditing { float:left; } #header #logo { float:left; width:38px; height:37px; } /* Change View: Design / Present */ #header #change-view { float:left; width:150px; padding:7px 0 0 50px; } #header #change-view a { float:left; margin:0 5px; } /* Project: Name - Login - Save and Load */ #header #project { float:left; padding:5px 0 0 45px; font-size:18px; color:#999999; font-family:Verdana; font-weight:bold; } #header #hSave { padding:7px 0 0 0; position:absolute; right:0; top:0; } #header #hSave a { float:left; margin:0 3px; } #header #hSave > div, #hSave > a { float:left; } #header #hSave a#fBLogout { float:right; margin-top:1px; } .hButton { padding:0 5px 0 0; border:0; } #header #hLoggedOut { } #header #hLoggedIn { text-align:right; padding:0 10px 0 0; width:260px; height:15px; display:none; } /* On hover over anchor buttons */ /* SaveLoad Manager Stuff */ #fSaveLoad { position:absolute; top:0px; right:0px; width:205px; z-index:100001; display:none; } #fSLSave, #fSLLoad, #fSLLogin { display:none; } .fSLCore { background: #4c4c4c; padding: 20px 0px 5px 10px; border-left:5px solid #4c4c4c; border-bottom:7px solid #4c4c4c; border-radius-bottomleft:4px; -webkit-border-radius-bottomleft:4px; -moz-border-radius-bottomleft:4px; } /* fSLLogin: Login Module */ #fSLLogin {} #fSLLogin .d-menu { position:absolute; top:7px; right:0; } #fSLLogin .d-menu a { opacity:0.2; } #fSLLogin .d-inside {} #fSLLogin .d-inside h3 { margin:0; font-family:Verdana, serif; font-size:18px; font-weight:bold; color:#999; } #fSLLogin .d-inside small { margin:0 0 10px 0; font-family:Verdana, serif; font-size:11px; font-weight:normal; color:#CCC; } #fSLLogin .d-inside .d-wrap { margin:3px 0 5px 0; } #fSLLogin .d-inside .d-wrap input { width:95px; } #fSLLogin .d-inside a.btt-ok { margin-top:5px; } /* fSLSave: Save Module */ #fSLSave { } #fSLSave .d-menu { position:absolute; top:7px; right:0; } #fSLSave .d-menu a { float:left; margin:0 6px 0 0; opacity:0.2; } #fSLSave .d-menu a.btt-load { margin-right:0; } #fSLSave .d-inside {} #fSLSave .d-inside h3 { margin:0; font-family:Verdana, serif; font-size:18px; font-weight:bold; color:#999; } #fSLSave .d-inside small { margin:0 0 10px 0; font-family:Verdana, serif; font-size:11px; font-weight:normal; color:#CCC; } #fSLSave .d-inside .d-wrap { margin:10px 0; } #fSLSave .d-inside .d-wrap a.btt-ok { float:right; margin:-3px 0 0 0; } #fSLSave .d-inside .d-wrap span.s-url { font-size:9px; color:#999; } #fSLSave .d-inside .d-wrap input.ipt-name { width:95px; } #fSLSave .d-inside .d-wrap #fTaken { display:none; position:absolute; right:49px; width:71px; padding:10px; color:#222; background:#EEE; border:1px solid #FFF; } #fSLSave .d-inside a.btt-save-locally { display:block; margin:3px 0 0 -2px; } /* fSLLoad: Load Module */ #fSLLoad {} #fSLLoad .d-menu { position:absolute; top:7px; right:0; } #fSLLoad .d-menu a { float:left; margin:0 6px 0 0; opacity:0.2; } #fSLLoad .d-menu a.btt-load { margin-right:0; } #fSLLoad .d-inside {} #fSLLoad .d-inside h3 { margin:0; font-family:Verdana, serif; font-size:18px; font-weight:bold; color:#999; } #fSLLoad .d-inside small { margin:0 0 10px 0; font-family:Verdana, serif; font-size:11px; font-weight:normal; color:#CCC; } #fSLLoad .d-inside ul { margin:15px 0; } #fSLLoad .d-inside ul li { color:#999; font-size:9px; padding:3px 0; border-bottom:1px solid #666; } #fSLLoad .d-inside ul li a { color:#999; } #fSLLoad .d-inside a.btt-load-file { display:block; margin:3px 0 0 -2px; } /* Left: Toolbox's anchors */ #toolbox a { margin:0 0 5px 5px; } /* Workspace */ #fNotify { background:#4C4C4C; border:4px solid #4C4C4C; border-radius:6px; -webkit-border-bottom-right:6px; -moz-border-radius-bottomright:6px; position:absolute; height:30px; left:38px; z-index:100002; padding:10px 10px 0 10px; margin:1px 0 0 0; color:#FFFFFF; font-size:18px; font-family:Verdana; display:none; opacity:0; } .fWorkspace { border:1px solid #dFdFdF; overflow:hidden; position:absolute; background:#FFF; } /* Experience Threads */ #fExpThreadBar { position:absolute; z-index:100001; height:48px; margin:1px 0 0 1px; display:none; background:#B2B2B2; } #fExpTBArrow { width:10px; height:7px; background:url(../images/btt-buttons.png) -290px -1395px !important; position:absolute; top:-47px; left:26px; } #fExpTBThread { float:left; padding:10px 0 0 15px; } #fExpTBThread * { float:left; } #fExpTBThread a.btt-inbetween { margin:12px 5px 0 5px; } #fExpTBControls { float:right; padding:10px; } #fExpTBControls a { float:left; margin:0 0 0 5px; } #fExpTBControls a.btt-delete { margin:5px 0 5px 0; } #fExpTBOver { width:58px; top:48px; display:none; position:absolute; text-align:center; } #fExpTBOver div { width:48px; margin:0 auto; background:#B4B4B4; } #fExpTBOver div a { margin:0 auto; padding:3px 0 3px 0; } /* Add Thread */ #fExpAdd { display:none; width:390px; position:absolute; top:100px; margin-left:30%; } .fExpAddCore { width:360px; background:#4C4C4C; padding:10px 15px 10px 15px; margin:0; font-size:1px; position:relative; border:5px solid #4C4C4C; border-radius:5px; -moz-border-radius:5px; -webkit-border-radius:5px; } /* See all */ #fExpOverview { display:none; background:url('../images/bg-exp-thread-overview.png') repeat-y; position:absolute; z-index:10000; margin:1px 0 0 1px; } /* Right - Pages - Panel */ .panelTitle { padding:4px; font-family:Verdana; font-size:10px; font-weight:bold; color:#ffffff; background:#888; margin-bottom:1px; } .fPanelItemsList { overflow:hidden; clear:left; position:relative; cursor:auto; } .panelItemsListCollapse { position:relative; height:-40px; } .panelItemSelected { background: #333333 !important; color:#FFF !important; } .fPanelItemsList div:hover, #fRightClickMenu div:hover{ background:#E5E5E5; } /* Right - Pages - Panel Navigation: Up and Down */ .panel-navigation { display:block; } .panel-navigation a { display:block; width:138px; height:6px; background:url(../images/btt-buttons.png) no-repeat 0 0; text-indent:-9999px; line-height:0; } .panel-navigation a.panelArrowUp { background-position:-332px -1085px; } .panel-navigation a.panelArrowUp:hover { background-position:-332px -1048px; } .panel-navigation a.panelArrowDown { background-position:-332px -1102px; } .panel-navigation a.panelArrowDown:hover { background-position:-332px -1065px; } /* Right - Pages - Panel Icons: Add, Remove, Duplicate */ .panelIcons { background:#F4F4F4; width:100%; bottom:0; right:0; position:absolute; } .panelIcons a { float:left; display:block; margin:0px 2px 1px 2px; width:13px; height:13px; } /* Editing: Ideas */ .fFCore { background:#4C4C4C; height:60px; display:inline; float:left; border:5px solid #4C4C4C; border-radius-topleft:5px; border-radius-topright:5px; -moz-border-radius-topleft:5px; -moz-border-radius-topright:5px; -webkit-border-radius-topleft:5px; -webkit-border-radius-topright:5px; } /* All Instance */ .fFInstCore { height:60px; background:#CCC; width:190px; padding-left:10px; border:5px solid #CCC; border-radius-topleft:5px; border-radius-topright:5px; -moz-border-radius-topleft:5px; -moz-border-radius-topright:5px; -webkit-border-radius-topleft:5px; -webkit-border-radius-topright:5px; } /* Form Manager */ #fFormManager {} #fFormManager .fFMCore { padding:10px 0 10px 0; } #fFormManager .fFMCore a { float:left; margin:0 5px 0 0; } /* Edit States */ #fFStates { width:590px; border:5px solid #4C4C4C; border-radius-topleft:5px; border-radius-topright:5px; -moz-border-radius-topleft:5px; -moz-border-radius-topright:5px; -webkit-border-radius-topleft:5px; -webkit-border-radius-topright:5px; } /* Edit States: Edit Master + Press V */ #fFStates #fSMIToggle { position:absolute; margin-top:8px; left:750px; } /* Paste as */ #fPManager { position:absolute; width:270px; top:100px; left:100px; display:none; } #fPManager .fCBCorePaste { background:#4C4C4C; padding:20px 0 20px 0; text-align:center; background:#4c4c4c; border:4px solid #4c4c4c; border-radius:4px; -moz-border-radius:4px; } #fPManager .fCBCorePaste .w-buttons { width:223px; margin:0 auto; } #fPManager a.btt-new-master-object { float:left; } #fPManager a.btt-instance-of-master { float:right; } #fPManager a.btt-paste { margin:0 auto; } /* SET ACTION */ #fExpAct { width:390px; position:absolute; top:100px; margin-left:30%; display:none; cursor:move; } #fExpAct .fInput { width:100%; } /* fStateManager - STATES: Holding Z */ #fStateManager { position:absolute; height:80px; top:50px; left:100px; display:none; } #fStateManager .fSMCore { padding:5px; } #fStateManager .fSMCore table { margin:0; padding:0; border: 0px;} #fStateManager .fSMCore table p { font-size:12px; color:#EEE; } #fStateManager .fSMCore table small { font-size:11px; color:#8C95C8; margin:0 0 0 10px; } #fStateManager .fSMCore table a#editAll { margin:10px 0 15px 13px; position: relative; } #fStateManager .fSMCore table #fSMStateName { white-space:nowrap; vertical-align:top; padding: 2px 0 0 2px; } #fStateManager .fSMCore table .fSMArrows { width:31px; margin:0 auto; padding: 0 0 15px 5px; } #fStateManager .fSMCore table .fSMArrows a { float:left; } #fStateManager .fSMCore table .fSMArrows a.arr-add { margin:0 5px 0 0; } #fStateManager .fSMCore table .fSMStates { padding-top: 8px; } #fStateManager .fSMCore table .fSMStates a { padding-bottom: 5px; } /* fIdeaManager - Ideas STATES - Holding X */ #fIdeaManager { position:absolute; top:100px; left:100px; display:none; } #fIdeaManager #fIMCore { width:100px; background:#4C4C4C; padding:10px; margin:0; font-size:12px; border:4px solid #4C4C4C; border-radius:4px; -moz-border-radius:4px; position: relative; } #fIdeaManager #fIMCore .fIMHeader { position:absolute; top:10px; right:-5px; width:65px; } #fIdeaManager #fIMCore .fIMIdea { padding:5px 0 0 0; margin:0; position:relative; } #fIdeaManager #fIMCore .fIMArrows { margin:10px 0 0 1px; padding:0; } #fIdeaManager #fIMCore .fIMArrows a { float:left; } #fIdeaManager #fIMCore .fIMArrows a.arr-add { margin:0 5px 0 0; } .fSMCore, .fFMCore { background:#4C4C4C; float:left; padding:5px 5px 0 0; margin:0; font-size:1px; border:6px solid #4C4C4C; border-radius:6px; -webkit-border-radius:6px; -moz-border-radius:6px; } .fWorkspaceExpanded { margin:0 102px 0 0 !important; } /* Text */ .fTThick, .fTThickDrk { font:bold 32px Verdana; color:#4C4C4C; } /* h3, .h3 = h-thick, h-thick-dark */ .fTThick2 { font:bold 18px Verdana; color:#999; } /* h4, .h4 = h-thick-18 */ .fT18 { font:18px Verdana; color:#999; } /* h5, h-18 */ .fT12 { font:12px Verdana; color:#FFF; } /* h6, h-12 */ .fT11 { font:11px Verdana; color:#CCC; } /* h7, h-11 */ .fTsmall { font:bold 10px verdana; color:#FFF; } /* h8, h-10 */ .fT9 { font:normal 9px Verdana; color:#CCC; } /* h9, h-9 */ .fTI { font-style:italic; } /* h-italic */ .fTB { font-weight:bold; } /* h-bold */ .fTDim { color:#999; } /* h-999 */ .fTThickDrk { color:#333; } /* h-333 */ .fTBlue { color:#8C92C5 !important; } /* h-blue */ .fTOrange { color: #FF6600 !important; } /* h-orange */ .fLink { text-decoration:none; } .fLink:hover { color:#ffffff; text-decoration:underline; } /* form elements */ .fInput { border:1px solid #000000; background-color:#cccccc; padding:3px; color:#2f2f2f; } .fOptional { background-color:#737373; } .fOptional:focus { background-color:#cccccc; } #fNotifyRound { position:absolute; top:0px; right:-7px; } /* EditMaster Overlay */ #fEditMasterOverlay { background:url('../images/bg_editMaster.png'); position:absolute; z-index:10001; width:100%; height:100%; } /* List */ .fList { margin:3px 0 0 0; padding:0px; list-style:none; border-top:1px solid #666666; } .fList li { font:9px verdana; padding:3px 0 3px 0; color:#999999; border-bottom:1px solid #666666; } .fList li a { color:inherit; text-decoration:none; } .fList li:hover { background-color: #333333; color:#ffffff; cursor:pointer; } /* Panels */ .panel { width:138px; background-color:#f3f3f3; border:1px solid #DFDFDF; margin-bottom:3px; position:absolute; right:0px; height:100%; z-index:10001; } #fRightClickMenu { width:200px; background-color:#ffffff; border:1px solid #666666; margin-bottom:3px; position:absolute; z-index:10003; right:0px; } #fRightClickMenu div a { font-family:verdana; font-size:9px; color:#616DB4; text-decoration:none; } .panelItem, #fRightClickMenu div { padding:2px; padding-left:4px; background-color:#F4F4F4; font-family:verdana; font-size:9px; color:#666666; cursor:pointer; } /* Workspace and items inside of it */ .fObject { border:1px solid #dfdfdf; background:#FFF; position:absolute; text-align:center; } .fForm { position:absolute !important; cursor:default; opacity:1; } .p1 {} .p2 { background:#dfdfdf; } .p3 { background:#5f5f5f; } .p4 { background:#1f1f1f; } .fText { background:#FFF url('../images/bg_txt01.png') 0px 1px repeat; position:absolute; font-size:12px; text-align:left; wrap-option:softwrap; white-space:pre-wrap !important; } .fTextHasTxt { outline:0px solid #ffffff; border:1px solid transparent; background:url('') !important; color:#909090; } .fStateLabel { top:0px; right:0px; margin-right:-19px; margin-top: -1px; height:10px; width:12px; padding:0 2px 0 2px; background-color:#CFCFCF; color:#ffffff; font-size:8px; position:absolute; text-transform:capitalize; font-weight:bold; font-family:verdana; letter-spacing:-1px; } .fLabelHolder { top:50%; display:inline; position:relative; } .fLabel { top:-6px; display:inline; font-size:10px; color:#9f9f9f; position:relative; } .fLBracket { color:#6f6f6f; } div.fLabel:hover { color:#333333; } .fHighlight { position:absolute; top:0px; left:0px; outline:1px solid #000000; outline-offset:1px; text-align:left; width:100%; height:100%; } .parent { background-image:url('../images/bg_parent.gif'); background-position:top right; background-repeat:no-repeat; } .selected.fObject,.selectedWorkspace, .selectedTxt { position: absolute; border-color:#0f0f0f; } .selected.fForm { outline:1px solid #2f2f2f; opacity:1; } .selectedInst { /*border-color: #D95700; background-image: url('../images/bg_selectedInst.gif');*/ } .selectedTxt { border: 1px solid; } .toBeCut { opacity: 0.3; } .selectedTxt { border:1px solid; } /*inside items */ div.selected.fInsideMaster, .selected div.fInsideMaster { border-color:#626FB5; } div.selected.fInsideInstance, .selected div.fInsideInstance { border-color:#FF6600; } div.selectedTxt.fInsideMasterText, .selected div.fInsideMasterText { background:url('../images/bg_txtM01.png') 0px 1px repeat; color:#626FB5; } div.selectedTxt.fInsideInstanceText, .selected div.fInsideInstanceText { background:url('../images/bg_txtI01.png') 0px 1px repeat; color:#FF6600; } .fForm.selected.fInsideMaster, .selected .fForm.fInsideMaster { outline:1px solid #626FB5; opacity:1; } .fForm.selected.fInsideInstance, .selected .fForm.fInsideInstance { outline:1px solid #FF6600; opacity:1; } input[type="text"].fForm, textarea.fForm { border:1px inset #cfcfcf; } input[type="button"].fForm, input[type="text"].fForm, select.fForm { width:100px; } /*cursor handling*/ .cursorMove { cursor:move !important; } div.resizable { cursor:auto; } .cursorCrosshair { cursor:crosshair; } .pasteFillInst { background-color:#D95700; width:100%; height:100%; position:absolute; float:left; opacity:1; z-index:100000; } /**** Footer Items - separate states *****/ #fFooterOI, #fFooterText, #fFooterForm, #fFooterWorkspace { padding-left:45px; position:absolute; display:none; z-index:10003; } #fFooterWorkspace { display:block; } /**** Footer Items - generic classes *****/ .fFItem,.fFInstItem { float:left; bottom:0px; height:95px; padding-left:10px; } .fFInstItem { left:873px; bottom:-25px; position:absolute; } .fFTop { height:35px; bottom:3px; position:relative; } #fFInstItems { padding:5px 0 5px 0; } #fFInstItems a img { opacity:0.5; border:0px; } #fFInstItems a { padding:0 2px 2px 0; } #fFooterOI { height:105px !important; } #fFAs { width:150px; text-align:center; } #fFAs * { padding: 18px 1px 0 1px; } #fWHXYHolder { padding:5px 0 0 5px; } #fInstCore, #fStates { height:100%; display:inline; float:left; padding:0 3px 0 3px; } #fInstCore .fIcons, #fStates .fIcons { padding:4px 0px 0px 0px; } #fObjName, #fStateName { text-decoration:none; } .fWSTitle { display:inline; float:left; padding-top:16px; } #fSSize,#fSPos,#fSContents,#fSEvents,#fSTone,#fMIToggleText { display:inline; float: left; padding-right:15px; } #fMIToggleText { width:45px; } #fSTone { padding-right:0px; } #fSSize { margin-left:20px; padding-right:20px; } .fSCheck { display:inline; float:left; padding:0 0 0 0; margin:4px 0 0 4px; background:url('../images/checkbox_on.png') no-repeat top left; width:18px; height:16px; cursor:pointer; } .fEditingObj div div.fSTitle { padding:3px 0 0 3px !important; } .fEditingObj div div.fSTitle, .fSTitle { display:inline; float:left; padding:3px 0 0 0px; color:#8C95C8; height:17px; cursor:pointer; } .fSInputLetter { clear:left; margin:2px 0 2px -13px; display:inline; float:left; } .fSInput { display:inline; float:left; margin-left:-1px; } .fSText { clear:left; margin-left:4px; } .fEditingObj div div.fSInput input, .fSInput input { font-size:10px; height:15px; background-color:#626FB5; font-family:arial; border:1px solid #3F4985; color:#ffffff; padding:1px; margin-left:4px; } /* off/unchecked state */ .fSOff div input { font-size:10px; height:15px; background-color:#FF6600; font-family:arial; border:1px solid #CA5100; color:#ffffff; padding:1px; margin-left:4px; } .fSOff .fSTitle { color:#FF6600; } .fSOff .fSCheck { background: url('../images/checkbox_off.png') no-repeat top left; } .fFooterLink { color:#ffffff; } .fEditable { border:1px solid #cccccc; background-color:#ffffff; padding:1px; padding-left:2px; font-size:9px; font-family:verdana; } .fEditableLabel { width:65%; border:1px solid #CFCFCF; display:inline; font-family:inherit; font-size:inherit; } .fEditableText { width:100%; height:100%; border:0px; margin:0px; background-color:#ffffff; position:absolute; font-family:inherit; font-size:inherit; } .fCenter { text-align:center; height:14px; padding-top:2px; } #fCBManager,#fPManager,#fEventManager,#fStateManager,#fIdeaManager,#fFormManager,#fExpAdd,#fExpAct { z-index:100000; } #fEventManager { width: 238px; background-color: #ffffff; border:1px solid #666666; margin-bottom:3px; float:right; top:100px; left:100px; position:absolute; background:#333333; color:#dfdfdf; padding:5px; } .fActCursor { cursor:url('../images/a_arrowFaded.png'), default; } .fActNone { opacity:0.5; } #fActMouseClear,#fActKeyClear { display:none; } #fExpArrow { position:absolute; z-index:100002; background:url('../images/a_arrow.png'); width:38px; height:38px; display:none; cursor:move; } .fArrowL { background: url('../images/a_arrow_L.png') !important; } .fArrowR { background: url('../images/a_arrow_R.png') !important; } .fArrowDL { background: url('../images/a_arrow_DL.png') !important; } #fExpKey { position:absolute; background:url('../images/a_key.png') no-repeat right top; padding:2px 50px 0 0; z-index:100002; height:38px; top:95px; right:160px; min-width:100px; color:#CD0000; font-weight:bold; font-size:24px; text-align:right; display:none; } /* State Manager*/ #fFormManager { position:absolute; height:80px; top:100px; left:100px; display:none; } .fSMTitle1 { text-align:right; vertical-align:bottom; float:left; margin:2px 4px 0 0; width:95px; height:80px; position:relative; } .fSMState { float:left; padding:0 3px 0 0; margin:0px; width:36px; text-align:center; vertical-align:top; position:relative; } .fSMState input,.fIMIdea input { position:absolute; left:11px; bottom:-17px; margin:0px; padding:0px; width:14px; height:14px; } .fIMIdea input { left:45px; top:15px; } .fSMArrows { padding:3px 3px 0 3px; width:20px; } .fSMStarting { display:block; position:absolute; bottom:13px; right:0px; color:#8C95C8; } /* Clipboard State */ #fCBManager { position:absolute; width:270px; top:100px; left:100px; display:none; } #fCBManager .fCBCore { padding:0px 10px 10px 10px; background:#4c4c4c; border:4px solid #4c4c4c; border-radius:4px; -moz-border-radius:4px; } .fCBContents { display:none; } #CBEmpty { padding:20px; } #fCBInto { width:100%; height:33px; background:url('../images/into.png') no-repeat; vertical-align:middle; padding:10px 0 0 70px; text-align:left; font-weight: bold; color:#cccccc; } .fThreadOverviewItem { position:absolute; text-decoration:none; cursor:move; } .fExpHoverText { padding:3px 0 0 6px; } #fTempJData { width:650px; height:550px; background-color:#ffffff; border:1px solid #666666; margin-bottom:3px; float:right; top:20px; left:400px; position:absolute; color:#000000; padding:5px; } #fTempJData textarea { width:100%; height:100%; } .hiddenSubmit { display: none; } /* ----------------------------------------------------------------- // ------------------------------------------------------------------------ */
{'repo_name': 'linowski/fluidia', 'stars': '125', 'repo_language': 'JavaScript', 'file_name': 'project01.json', 'mime_type': 'text/plain', 'hash': 3251487100552507507, 'source_dataset': 'data'}
/* * Copyright (C) Igor Sysoev * Copyright (C) Nginx, Inc. */ /* ngx_radix_tree_t基数树与ngx_rbtree_t红黑树一样都是二叉查找树,ngx_rbtree_t红黑树具备的优点,ngx_radix_tree t基数树同样也有, 但ngx_radix_treej基数树的应用范围要比ngx_rbtree_t红黑树小,因为ngx_radix_treej要求元素必须以整型数据作为关键字,所以大大减少 了它的应用场景。然而,由于ngx_radix_tree_t墓数树在插入、删除元素时不需要做旋转操作,因此它的插入、删除效率一般要比ngx_rbtree_t 红黑树高。选择使用哪种二叉查找树取决于实际的应用场景。不过,ngx_radix_tree_t基数树的用法要比ngx_rbtree_t红黑树简单许多。 */ #include <ngx_config.h> #include <ngx_core.h> static ngx_radix_node_t *ngx_radix_alloc(ngx_radix_tree_t *tree); /* 将预分配节点简单地设置为-1,这样pool内存池中就会只使用1个页面来尽可能地分配基数树节点 */ ngx_radix_tree_t * ngx_radix_tree_create(ngx_pool_t *pool, ngx_int_t preallocate) { uint32_t key, mask, inc; ngx_radix_tree_t *tree; tree = ngx_palloc(pool, sizeof(ngx_radix_tree_t)); if (tree == NULL) { return NULL; } tree->pool = pool; tree->free = NULL; tree->start = NULL; tree->size = 0; tree->root = ngx_radix_alloc(tree); if (tree->root == NULL) { return NULL; } tree->root->right = NULL; tree->root->left = NULL; tree->root->parent = NULL; tree->root->value = NGX_RADIX_NO_VALUE; if (preallocate == 0) { return tree; } /* * Preallocation of first nodes : 0, 1, 00, 01, 10, 11, 000, 001, etc. * increases TLB hits even if for first lookup iterations. * On 32-bit platforms the 7 preallocated bits takes continuous 4K, * 8 - 8K, 9 - 16K, etc. On 64-bit platforms the 6 preallocated bits * takes continuous 4K, 7 - 8K, 8 - 16K, etc. There is no sense to * to preallocate more than one page, because further preallocation * distributes the only bit per page. Instead, a random insertion * may distribute several bits per page. * * Thus, by default we preallocate maximum * 6 bits on amd64 (64-bit platform and 4K pages) * 7 bits on i386 (32-bit platform and 4K pages) * 7 bits on sparc64 in 64-bit mode (8K pages) * 8 bits on sparc64 in 32-bit mode (8K pages) */ if (preallocate == -1) { switch (ngx_pagesize / sizeof(ngx_radix_node_t)) { /* amd64 */ case 128: preallocate = 6; break; /* i386, sparc64 */ case 256: preallocate = 7; break; /* sparc64 in 32-bit mode */ default: preallocate = 8; } } mask = 0; inc = 0x80000000; while (preallocate--) { key = 0; mask >>= 1; mask |= 0x80000000; do { if (ngx_radix32tree_insert(tree, key, mask, NGX_RADIX_NO_VALUE) != NGX_OK) { return NULL; } key += inc; } while (key); inc >>= 1; } return tree; } ngx_int_t ngx_radix32tree_insert(ngx_radix_tree_t *tree, uint32_t key, uint32_t mask, uintptr_t value) { uint32_t bit; ngx_radix_node_t *node, *next; bit = 0x80000000; node = tree->root; next = tree->root; while (bit & mask) { if (key & bit) { next = node->right; } else { next = node->left; } if (next == NULL) { break; } bit >>= 1; node = next; } if (next) { if (node->value != NGX_RADIX_NO_VALUE) { return NGX_BUSY; } node->value = value; return NGX_OK; } while (bit & mask) { next = ngx_radix_alloc(tree); if (next == NULL) { return NGX_ERROR; } next->right = NULL; next->left = NULL; next->parent = node; next->value = NGX_RADIX_NO_VALUE; if (key & bit) { node->right = next; } else { node->left = next; } bit >>= 1; node = next; } node->value = value; return NGX_OK; } ngx_int_t ngx_radix32tree_delete(ngx_radix_tree_t *tree, uint32_t key, uint32_t mask) { uint32_t bit; ngx_radix_node_t *node; bit = 0x80000000; node = tree->root; while (node && (bit & mask)) { if (key & bit) { node = node->right; } else { node = node->left; } bit >>= 1; } if (node == NULL) { return NGX_ERROR; } if (node->right || node->left) { if (node->value != NGX_RADIX_NO_VALUE) { node->value = NGX_RADIX_NO_VALUE; return NGX_OK; } return NGX_ERROR; } for ( ;; ) { if (node->parent->right == node) { node->parent->right = NULL; } else { node->parent->left = NULL; } node->right = tree->free; tree->free = node; node = node->parent; if (node->right || node->left) { break; } if (node->value != NGX_RADIX_NO_VALUE) { break; } if (node->parent == NULL) { break; } } return NGX_OK; } uintptr_t ngx_radix32tree_find(ngx_radix_tree_t *tree, uint32_t key) { uint32_t bit; uintptr_t value; ngx_radix_node_t *node; bit = 0x80000000; value = NGX_RADIX_NO_VALUE; node = tree->root; while (node) { if (node->value != NGX_RADIX_NO_VALUE) { value = node->value; } if (key & bit) { node = node->right; } else { node = node->left; } bit >>= 1; } return value; } #if (NGX_HAVE_INET6) ngx_int_t ngx_radix128tree_insert(ngx_radix_tree_t *tree, u_char *key, u_char *mask, uintptr_t value) { u_char bit; ngx_uint_t i; ngx_radix_node_t *node, *next; i = 0; bit = 0x80; node = tree->root; next = tree->root; while (bit & mask[i]) { if (key[i] & bit) { next = node->right; } else { next = node->left; } if (next == NULL) { break; } bit >>= 1; node = next; if (bit == 0) { if (++i == 16) { break; } bit = 0x80; } } if (next) { if (node->value != NGX_RADIX_NO_VALUE) { return NGX_BUSY; } node->value = value; return NGX_OK; } while (bit & mask[i]) { next = ngx_radix_alloc(tree); if (next == NULL) { return NGX_ERROR; } next->right = NULL; next->left = NULL; next->parent = node; next->value = NGX_RADIX_NO_VALUE; if (key[i] & bit) { node->right = next; } else { node->left = next; } bit >>= 1; node = next; if (bit == 0) { if (++i == 16) { break; } bit = 0x80; } } node->value = value; return NGX_OK; } ngx_int_t ngx_radix128tree_delete(ngx_radix_tree_t *tree, u_char *key, u_char *mask) { u_char bit; ngx_uint_t i; ngx_radix_node_t *node; i = 0; bit = 0x80; node = tree->root; while (node && (bit & mask[i])) { if (key[i] & bit) { node = node->right; } else { node = node->left; } bit >>= 1; if (bit == 0) { if (++i == 16) { break; } bit = 0x80; } } if (node == NULL) { return NGX_ERROR; } if (node->right || node->left) { if (node->value != NGX_RADIX_NO_VALUE) { node->value = NGX_RADIX_NO_VALUE; return NGX_OK; } return NGX_ERROR; } for ( ;; ) { if (node->parent->right == node) { node->parent->right = NULL; } else { node->parent->left = NULL; } node->right = tree->free; tree->free = node; node = node->parent; if (node->right || node->left) { break; } if (node->value != NGX_RADIX_NO_VALUE) { break; } if (node->parent == NULL) { break; } } return NGX_OK; } uintptr_t ngx_radix128tree_find(ngx_radix_tree_t *tree, u_char *key) { u_char bit; uintptr_t value; ngx_uint_t i; ngx_radix_node_t *node; i = 0; bit = 0x80; value = NGX_RADIX_NO_VALUE; node = tree->root; while (node) { if (node->value != NGX_RADIX_NO_VALUE) { value = node->value; } if (key[i] & bit) { node = node->right; } else { node = node->left; } bit >>= 1; if (bit == 0) { i++; bit = 0x80; } } return value; } #endif static ngx_radix_node_t * ngx_radix_alloc(ngx_radix_tree_t *tree) { ngx_radix_node_t *p; if (tree->free) { p = tree->free; tree->free = tree->free->right; return p; } if (tree->size < sizeof(ngx_radix_node_t)) { tree->start = ngx_pmemalign(tree->pool, ngx_pagesize, ngx_pagesize); if (tree->start == NULL) { return NULL; } tree->size = ngx_pagesize; } p = (ngx_radix_node_t *) tree->start; tree->start += sizeof(ngx_radix_node_t); tree->size -= sizeof(ngx_radix_node_t); return p; }
{'repo_name': 'y123456yz/reading-code-of-nginx-1.9.2', 'stars': '2382', 'repo_language': 'C', 'file_name': 'koi-win', 'mime_type': 'text/plain', 'hash': -1470252824763278143, 'source_dataset': 'data'}
/** Copyright (C) 2005 Michael Ahlberg, Måns Rullgård Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. **/ #include <stdlib.h> #include "libavutil/avstring.h" #include "libavutil/bswap.h" #include "libavcodec/get_bits.h" #include "libavcodec/bytestream.h" #include "avformat.h" #include "oggdec.h" static int ogm_chapter(AVFormatContext *as, uint8_t *key, uint8_t *val) { int i, cnum, h, m, s, ms, keylen = strlen(key); AVChapter *chapter = NULL; if (keylen < 9 || sscanf(key, "CHAPTER%02d", &cnum) != 1) return 0; if (keylen == 9) { if (sscanf(val, "%02d:%02d:%02d.%03d", &h, &m, &s, &ms) < 4) return 0; ff_new_chapter(as, cnum, (AVRational){1,1000}, ms + 1000*(s + 60*(m + 60*h)), AV_NOPTS_VALUE, NULL); av_free(val); } else if (!strcmp(key+9, "NAME")) { for(i = 0; i < as->nb_chapters; i++) if (as->chapters[i]->id == cnum) { chapter = as->chapters[i]; break; } if (!chapter) return 0; av_metadata_set2(&chapter->metadata, "title", val, AV_METADATA_DONT_STRDUP_VAL); } else return 0; av_free(key); return 1; } int ff_vorbis_comment(AVFormatContext * as, AVMetadata **m, const uint8_t *buf, int size) { const uint8_t *p = buf; const uint8_t *end = buf + size; unsigned n, j; int s; if (size < 8) /* must have vendor_length and user_comment_list_length */ return -1; s = bytestream_get_le32(&p); if (end - p - 4 < s || s < 0) return -1; p += s; n = bytestream_get_le32(&p); while (end - p >= 4 && n > 0) { const char *t, *v; int tl, vl; s = bytestream_get_le32(&p); if (end - p < s || s < 0) break; t = p; p += s; n--; v = memchr(t, '=', s); if (!v) continue; tl = v - t; vl = s - tl - 1; v++; if (tl && vl) { char *tt, *ct; tt = av_malloc(tl + 1); ct = av_malloc(vl + 1); if (!tt || !ct) { av_freep(&tt); av_freep(&ct); av_log(as, AV_LOG_WARNING, "out-of-memory error. skipping VorbisComment tag.\n"); continue; } for (j = 0; j < tl; j++) tt[j] = toupper(t[j]); tt[tl] = 0; memcpy(ct, v, vl); ct[vl] = 0; if (!ogm_chapter(as, tt, ct)) av_metadata_set2(m, tt, ct, AV_METADATA_DONT_STRDUP_KEY | AV_METADATA_DONT_STRDUP_VAL); } } if (p != end) av_log(as, AV_LOG_INFO, "%ti bytes of comment header remain\n", end-p); if (n > 0) av_log(as, AV_LOG_INFO, "truncated comment header, %i comments not found\n", n); return 0; } /** Parse the vorbis header * Vorbis Identification header from Vorbis_I_spec.html#vorbis-spec-codec * [vorbis_version] = read 32 bits as unsigned integer | Not used * [audio_channels] = read 8 bit integer as unsigned | Used * [audio_sample_rate] = read 32 bits as unsigned integer | Used * [bitrate_maximum] = read 32 bits as signed integer | Not used yet * [bitrate_nominal] = read 32 bits as signed integer | Not used yet * [bitrate_minimum] = read 32 bits as signed integer | Used as bitrate * [blocksize_0] = read 4 bits as unsigned integer | Not Used * [blocksize_1] = read 4 bits as unsigned integer | Not Used * [framing_flag] = read one bit | Not Used * */ struct oggvorbis_private { unsigned int len[3]; unsigned char *packet[3]; }; static unsigned int fixup_vorbis_headers(AVFormatContext * as, struct oggvorbis_private *priv, uint8_t **buf) { int i,offset, len; unsigned char *ptr; len = priv->len[0] + priv->len[1] + priv->len[2]; ptr = *buf = av_mallocz(len + len/255 + 64); ptr[0] = 2; offset = 1; offset += av_xiphlacing(&ptr[offset], priv->len[0]); offset += av_xiphlacing(&ptr[offset], priv->len[1]); for (i = 0; i < 3; i++) { memcpy(&ptr[offset], priv->packet[i], priv->len[i]); offset += priv->len[i]; av_freep(&priv->packet[i]); } *buf = av_realloc(*buf, offset + FF_INPUT_BUFFER_PADDING_SIZE); return offset; } static int vorbis_header (AVFormatContext * s, int idx) { struct ogg *ogg = s->priv_data; struct ogg_stream *os = ogg->streams + idx; AVStream *st = s->streams[idx]; struct oggvorbis_private *priv; int pkt_type = os->buf[os->pstart]; if (!(pkt_type & 1)) return 0; if (!os->private) { os->private = av_mallocz(sizeof(struct oggvorbis_private)); if (!os->private) return 0; } if (os->psize < 1 || pkt_type > 5) return -1; priv = os->private; priv->len[pkt_type >> 1] = os->psize; priv->packet[pkt_type >> 1] = av_mallocz(os->psize); memcpy(priv->packet[pkt_type >> 1], os->buf + os->pstart, os->psize); if (os->buf[os->pstart] == 1) { const uint8_t *p = os->buf + os->pstart + 7; /* skip "\001vorbis" tag */ unsigned blocksize, bs0, bs1; if (os->psize != 30) return -1; if (bytestream_get_le32(&p) != 0) /* vorbis_version */ return -1; st->codec->channels = bytestream_get_byte(&p); st->codec->sample_rate = bytestream_get_le32(&p); p += 4; // skip maximum bitrate st->codec->bit_rate = bytestream_get_le32(&p); // nominal bitrate p += 4; // skip minimum bitrate blocksize = bytestream_get_byte(&p); bs0 = blocksize & 15; bs1 = blocksize >> 4; if (bs0 > bs1) return -1; if (bs0 < 6 || bs1 > 13) return -1; if (bytestream_get_byte(&p) != 1) /* framing_flag */ return -1; st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_id = CODEC_ID_VORBIS; st->time_base.num = 1; st->time_base.den = st->codec->sample_rate; } else if (os->buf[os->pstart] == 3) { if (os->psize > 8) ff_vorbis_comment (s, &st->metadata, os->buf + os->pstart + 7, os->psize - 8); } else { st->codec->extradata_size = fixup_vorbis_headers(s, priv, &st->codec->extradata); } return 1; } const struct ogg_codec ff_vorbis_codec = { .magic = "\001vorbis", .magicsize = 7, .header = vorbis_header };
{'repo_name': 'havlenapetr/FFMpeg', 'stars': '877', 'repo_language': 'C', 'file_name': 'Android.mk', 'mime_type': 'text/plain', 'hash': 5119760717702992768, 'source_dataset': 'data'}
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the Apache 2.0 License. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; using Microsoft.Scripting; using Microsoft.Scripting.Actions; using Microsoft.Scripting.Runtime; using Microsoft.Scripting.Utils; using IronPython.Runtime; using IronPython.Runtime.Binding; using MSAst = System.Linq.Expressions; namespace IronPython.Compiler.Ast { using Ast = MSAst.Expression; using AstUtils = Microsoft.Scripting.Ast.Utils; public abstract class ScopeStatement : Statement { private bool _importStar; // from module import * private bool _unqualifiedExec; // exec "code" private bool _nestedFreeVariables; // nested function with free variable private bool _locals; // The scope needs locals dictionary // due to "exec" or call to dir, locals, eval, vars... private bool _hasLateboundVarSets; // calls code which can assign to variables private bool _containsExceptionHandling; // true if this block contains a try/with statement private bool _forceCompile; // true if this scope should always be compiled private FunctionCode _funcCode; // the function code object created for this scope private Dictionary<string, PythonVariable> _variables; // mapping of string to variables private ClosureInfo[] _closureVariables; // closed over variables, bool indicates if we accessed it in this scope. private List<PythonVariable> _freeVars; // list of variables accessed from outer scopes private List<string> _globalVars; // global variables accessed from this scope private List<string> _cellVars; // variables accessed from nested scopes private Dictionary<string, PythonReference> _references; // names of all variables referenced, null after binding completes internal Dictionary<PythonVariable, MSAst.Expression> _variableMapping = new Dictionary<PythonVariable, MSAst.Expression>(); private MSAst.ParameterExpression _localParentTuple; // parent's tuple local saved locally private readonly DelayedFunctionCode _funcCodeExpr = new DelayedFunctionCode(); // expression that refers to the function code for this scope internal static MSAst.ParameterExpression LocalCodeContextVariable = Ast.Parameter(typeof(CodeContext), "$localContext"); private static MSAst.ParameterExpression _catchException = Ast.Parameter(typeof(Exception), "$updException"); internal const string NameForExec = "module: <exec>"; internal bool ContainsImportStar { get { return _importStar; } set { _importStar = value; } } internal bool ContainsExceptionHandling { get { return _containsExceptionHandling; } set { _containsExceptionHandling = value; } } internal bool ContainsUnqualifiedExec { get { return _unqualifiedExec; } set { _unqualifiedExec = value; } } internal virtual bool IsGeneratorMethod { get { return false; } } /// <summary> /// The variable used to hold out parents closure tuple in our local scope. /// </summary> internal MSAst.ParameterExpression LocalParentTuple { get { return _localParentTuple; } } /// <summary> /// Gets the expression associated with the local CodeContext. If the function /// doesn't have a local CodeContext then this is the global context. /// </summary> internal virtual MSAst.Expression LocalContext { get { return LocalCodeContextVariable; } } /// <summary> /// True if this scope accesses a variable from an outer scope. /// </summary> internal bool IsClosure { get { return FreeVariables != null && FreeVariables.Count > 0; } } /// <summary> /// True if an inner scope is accessing a variable defined in this scope. /// </summary> internal bool ContainsNestedFreeVariables { get { return _nestedFreeVariables; } set { _nestedFreeVariables = value; } } /// <summary> /// True if we are forcing the creation of a dictionary for storing locals. /// /// This occurs for calls to locals(), dir(), vars(), unqualified exec, and /// from ... import *. /// </summary> internal bool NeedsLocalsDictionary { get { return _locals; } set { _locals = value; } } public virtual string Name { get { return "<unknown>"; } } internal virtual string Filename { get { return GlobalParent.SourceUnit.Path ?? "<string>"; } } /// <summary> /// True if variables can be set in a late bound fashion that we don't /// know about at code gen time - for example via from foo import *. /// /// This is tracked independently of the ContainsUnqualifiedExec/NeedsLocalsDictionary /// </summary> internal virtual bool HasLateBoundVariableSets { get { return _hasLateboundVarSets; } set { _hasLateboundVarSets = value; } } internal Dictionary<string, PythonVariable> Variables { get { return _variables; } } internal virtual bool IsGlobal { get { return false; } } internal bool NeedsLocalContext { get { return NeedsLocalsDictionary || ContainsNestedFreeVariables; } } internal virtual string[] ParameterNames { get { return ArrayUtils.EmptyStrings; } } internal virtual int ArgCount { get { return 0; } } internal virtual FunctionAttributes Flags { get { return FunctionAttributes.None; } } internal abstract Microsoft.Scripting.Ast.LightLambdaExpression GetLambda(); /// <summary> /// Gets or creates the FunctionCode object for this FunctionDefinition. /// </summary> internal FunctionCode GetOrMakeFunctionCode() { if (_funcCode == null) { Interlocked.CompareExchange(ref _funcCode, new FunctionCode(GlobalParent.PyContext, OriginalDelegate, this, ScopeDocumentation, null, true), null); } return _funcCode; } internal virtual string ScopeDocumentation { get { return null; } } internal virtual Delegate OriginalDelegate { get { return null; } } internal virtual IList<string> GetVarNames() { List<string> res = new List<string>(); AppendVariables(res); return res; } internal void AddFreeVariable(PythonVariable variable, bool accessedInScope) { if (_freeVars == null) { _freeVars = new List<PythonVariable>(); } if(!_freeVars.Contains(variable)) { _freeVars.Add(variable); } } internal bool ShouldInterpret { get { if (_forceCompile) { return false; } else if (GlobalParent.CompilationMode == CompilationMode.Lookup) { return true; } CompilerContext context = GlobalParent.CompilerContext; return ((PythonContext)context.SourceUnit.LanguageContext).ShouldInterpret((PythonCompilerOptions)context.Options, context.SourceUnit); } set { _forceCompile = !value; } } internal string AddReferencedGlobal(string name) { if (_globalVars == null) { _globalVars = new List<string>(); } if (!_globalVars.Contains(name)) { _globalVars.Add(name); } return name; } internal void AddCellVariable(PythonVariable variable) { if (_cellVars == null) { _cellVars = new List<string>(); } if (!_cellVars.Contains(variable.Name)) { _cellVars.Add(variable.Name); } } internal List<string> AppendVariables(List<string> res) { if (Variables != null) { foreach (var variable in Variables) { if (variable.Value.Kind != VariableKind.Local) { continue; } if (CellVariables == null || !CellVariables.Contains(variable.Key)) { res.Add(variable.Key); } } } return res; } /// <summary> /// Variables that are bound in an outer scope - but not a global scope /// </summary> internal IList<PythonVariable> FreeVariables { get { return _freeVars; } } /// <summary> /// Variables that are bound to the global scope /// </summary> internal IList<string> GlobalVariables { get { return _globalVars; } } /// <summary> /// Variables that are referred to from a nested scope and need to be /// promoted to cells. /// </summary> internal IList<string> CellVariables { get { return _cellVars; } } internal Type GetClosureTupleType() { if (TupleCells > 0) { Type[] args = new Type[TupleCells]; for (int i = 0; i < TupleCells; i++) { args[i] = typeof(ClosureCell); } return MutableTuple.MakeTupleType(args); } return null; } internal virtual int TupleCells { get { if (_closureVariables == null) { return 0; } return _closureVariables.Length; } } internal abstract bool ExposesLocalVariable(PythonVariable variable); internal virtual MSAst.Expression GetParentClosureTuple() { // PythonAst will never call this. throw new NotSupportedException(); } private bool TryGetAnyVariable(string name, out PythonVariable variable) { if (_variables != null) { return _variables.TryGetValue(name, out variable); } else { variable = null; return false; } } internal bool TryGetVariable(string name, out PythonVariable variable) { if (TryGetAnyVariable(name, out variable)) { return true; } else { variable = null; return false; } } internal virtual bool TryBindOuter(ScopeStatement from, PythonReference reference, out PythonVariable variable) { // Hide scope contents by default (only functions expose their locals) variable = null; return false; } internal abstract PythonVariable BindReference(PythonNameBinder binder, PythonReference reference); internal virtual void Bind(PythonNameBinder binder) { if (_references != null) { foreach (var reference in _references.Values) { PythonVariable variable; reference.PythonVariable = variable = BindReference(binder, reference); // Accessing outer scope variable which is being deleted? if (variable != null) { if (variable.Deleted && variable.Scope != this && !variable.Scope.IsGlobal) { // report syntax error binder.ReportSyntaxError( String.Format( System.Globalization.CultureInfo.InvariantCulture, "can not delete variable '{0}' referenced in nested scope", reference.Name ), this); } } } } } internal virtual void FinishBind(PythonNameBinder binder) { List<ClosureInfo> closureVariables = null; if (FreeVariables != null && FreeVariables.Count > 0) { _localParentTuple = Ast.Parameter(Parent.GetClosureTupleType(), "$tuple"); foreach (var variable in _freeVars) { var parentClosure = Parent._closureVariables; Debug.Assert(parentClosure != null); for (int i = 0; i < parentClosure.Length; i++) { if (parentClosure[i].Variable == variable) { _variableMapping[variable] = new ClosureExpression(variable, Ast.Property(_localParentTuple, String.Format("Item{0:D3}", i)), null); break; } } Debug.Assert(_variableMapping.ContainsKey(variable)); if (closureVariables == null) { closureVariables = new List<ClosureInfo>(); } closureVariables.Add(new ClosureInfo(variable, !(this is ClassDefinition))); } } if (Variables != null) { foreach (PythonVariable variable in Variables.Values) { if (!HasClosureVariable(closureVariables, variable) && !variable.IsGlobal && (variable.AccessedInNestedScope || ExposesLocalVariable(variable))) { if (closureVariables == null) { closureVariables = new List<ClosureInfo>(); } closureVariables.Add(new ClosureInfo(variable, true)); } if (variable.Kind == VariableKind.Local) { Debug.Assert(variable.Scope == this); if (variable.AccessedInNestedScope || ExposesLocalVariable(variable)) { _variableMapping[variable] = new ClosureExpression(variable, Ast.Parameter(typeof(ClosureCell), variable.Name), null); } else { _variableMapping[variable] = Ast.Parameter(typeof(object), variable.Name); } } } } if (closureVariables != null) { _closureVariables = closureVariables.ToArray(); } // no longer needed _references = null; } private static bool HasClosureVariable(List<ClosureInfo> closureVariables, PythonVariable variable) { if (closureVariables == null) { return false; } for (int i = 0; i < closureVariables.Count; i++) { if (closureVariables[i].Variable == variable) { return true; } } return false; } private void EnsureVariables() { if (_variables == null) { _variables = new Dictionary<string, PythonVariable>(StringComparer.Ordinal); } } internal void AddGlobalVariable(PythonVariable variable) { EnsureVariables(); _variables[variable.Name] = variable; } internal PythonReference Reference(string name) { if (_references == null) { _references = new Dictionary<string, PythonReference>(StringComparer.Ordinal); } PythonReference reference; if (!_references.TryGetValue(name, out reference)) { _references[name] = reference = new PythonReference(name); } return reference; } internal bool IsReferenced(string name) { PythonReference reference; return _references != null && _references.TryGetValue(name, out reference); } internal PythonVariable/*!*/ CreateVariable(string name, VariableKind kind) { EnsureVariables(); Debug.Assert(!_variables.ContainsKey(name)); PythonVariable variable; _variables[name] = variable = new PythonVariable(name, kind, this); return variable; } internal PythonVariable/*!*/ EnsureVariable(string name) { PythonVariable variable; if (!TryGetVariable(name, out variable)) { return CreateVariable(name, VariableKind.Local); } return variable; } internal PythonVariable DefineParameter(string name) { return CreateVariable(name, VariableKind.Parameter); } internal PythonContext PyContext { get { return (PythonContext)GlobalParent.CompilerContext.SourceUnit.LanguageContext; } } #region Debug Info Tracking private MSAst.SymbolDocumentInfo Document { get { return GlobalParent.Document; } } internal MSAst.Expression/*!*/ AddDebugInfo(MSAst.Expression/*!*/ expression, SourceLocation start, SourceLocation end) { if (PyContext.PythonOptions.GCStress != null) { expression = Ast.Block( Ast.Call( typeof(GC).GetMethod("Collect", new[] { typeof(int) }), Ast.Constant(PyContext.PythonOptions.GCStress.Value) ), expression ); } return AstUtils.AddDebugInfo(expression, Document, start, end); } internal MSAst.Expression/*!*/ AddDebugInfo(MSAst.Expression/*!*/ expression, SourceSpan location) { return AddDebugInfo(expression, location.Start, location.End); } internal MSAst.Expression/*!*/ AddDebugInfoAndVoid(MSAst.Expression/*!*/ expression, SourceSpan location) { if (expression.Type != typeof(void)) { expression = AstUtils.Void(expression); } return AddDebugInfo(expression, location); } #endregion #region Runtime Line Number Tracing /// <summary> /// Gets the expression for updating the dynamic stack trace at runtime when an /// exception is thrown. /// </summary> internal MSAst.Expression GetUpdateTrackbackExpression(MSAst.ParameterExpression exception) { if (!_containsExceptionHandling) { Debug.Assert(Name != null); Debug.Assert(exception.Type == typeof(Exception)); return UpdateStackTrace(exception); } return GetSaveLineNumberExpression(exception, true); } private MSAst.Expression UpdateStackTrace(MSAst.ParameterExpression exception) { return Ast.Call( AstMethods.UpdateStackTrace, exception, LocalContext, _funcCodeExpr, LineNumberExpression ); } /// <summary> /// Gets the expression for the actual updating of the line number for stack traces to be available /// </summary> internal MSAst.Expression GetSaveLineNumberExpression(MSAst.ParameterExpression exception, bool preventAdditionalAdds) { Debug.Assert(exception.Type == typeof(Exception)); return Ast.Block( AstUtils.If( Ast.Not( LineNumberUpdated ), UpdateStackTrace(exception) ), Ast.Assign( LineNumberUpdated, AstUtils.Constant(preventAdditionalAdds) ), AstUtils.Empty() ); } /// <summary> /// Wraps the body of a statement which should result in a frame being available during /// exception handling. This ensures the line number is updated as the stack is unwound. /// </summary> internal MSAst.Expression/*!*/ WrapScopeStatements(MSAst.Expression/*!*/ body, bool canThrow) { if (canThrow) { body = Ast.Block( new[] { LineNumberExpression, LineNumberUpdated }, Ast.TryCatch( body, Ast.Catch( _catchException, Ast.Block( GetUpdateTrackbackExpression(_catchException), Ast.Rethrow(body.Type) ) ) ) ); } return body; } #endregion /// <summary> /// Provides a place holder for the expression which represents /// a FunctionCode. For functions/classes this gets updated after /// the AST has been generated because the FunctionCode needs to /// know about the tree which gets generated. For modules we /// immediately have the value because it always comes in as a parameter. /// </summary> class DelayedFunctionCode : MSAst.Expression { private MSAst.Expression _funcCode; public override bool CanReduce { get { return true; } } public MSAst.Expression Code { get { return _funcCode; } set { _funcCode = value; } } public override Type Type { get { return typeof(FunctionCode); } } protected override MSAst.Expression VisitChildren(MSAst.ExpressionVisitor visitor) { if (_funcCode != null) { MSAst.Expression funcCode = visitor.Visit(_funcCode); if (funcCode != _funcCode) { DelayedFunctionCode res = new DelayedFunctionCode(); res._funcCode = funcCode; return res; } } return this; } public override MSAst.Expression Reduce() { Debug.Assert(_funcCode != null); return _funcCode; } public override MSAst.ExpressionType NodeType { get { return MSAst.ExpressionType.Extension; } } } internal MSAst.Expression FuncCodeExpr { get { return _funcCodeExpr.Code; } set { _funcCodeExpr.Code = value; } } internal MSAst.MethodCallExpression CreateLocalContext(MSAst.Expression parentContext) { var closureVariables = _closureVariables; if (_closureVariables == null) { closureVariables = new ClosureInfo[0]; } return Ast.Call( AstMethods.CreateLocalContext, parentContext, MutableTuple.Create(ArrayUtils.ConvertAll(closureVariables, x => GetClosureCell(x))), Ast.Constant(ArrayUtils.ConvertAll(closureVariables, x => x.AccessedInScope ? x.Variable.Name : null)) ); } private MSAst.Expression GetClosureCell(ClosureInfo variable) { return ((ClosureExpression)GetVariableExpression(variable.Variable)).ClosureCell; } internal virtual MSAst.Expression GetVariableExpression(PythonVariable variable) { if (variable.IsGlobal) { return GlobalParent.ModuleVariables[variable]; } Debug.Assert(_variableMapping.ContainsKey(variable)); return _variableMapping[variable]; } internal void CreateVariables(ReadOnlyCollectionBuilder<MSAst.ParameterExpression> locals, List<MSAst.Expression> init) { if (Variables != null) { foreach (PythonVariable variable in Variables.Values) { if(variable.Kind != VariableKind.Global) { if (GetVariableExpression(variable) is ClosureExpression closure) { init.Add(closure.Create()); locals.Add((MSAst.ParameterExpression)closure.ClosureCell); } else if (variable.Kind == VariableKind.Local) { locals.Add((MSAst.ParameterExpression)GetVariableExpression(variable)); if (variable.ReadBeforeInitialized) { init.Add( AssignValue( GetVariableExpression(variable), MSAst.Expression.Field(null, typeof(Uninitialized).GetField("Instance")) ) ); } } } } } if (IsClosure) { Type tupleType = Parent.GetClosureTupleType(); Debug.Assert(tupleType != null); init.Add( MSAst.Expression.Assign( LocalParentTuple, MSAst.Expression.Convert( GetParentClosureTuple(), tupleType ) ) ); locals.Add(LocalParentTuple); } } internal MSAst.Expression AddDecorators(MSAst.Expression ret, IList<Expression> decorators) { // add decorators if (decorators != null) { for (int i = decorators.Count - 1; i >= 0; i--) { Expression decorator = decorators[i]; ret = Parent.Invoke( new CallSignature(1), Parent.LocalContext, decorator, ret ); } } return ret; } internal MSAst.Expression/*!*/ Invoke(CallSignature signature, params MSAst.Expression/*!*/[]/*!*/ args) { PythonInvokeBinder invoke = PyContext.Invoke(signature); switch (args.Length) { case 1: return GlobalParent.CompilationMode.Dynamic(invoke, typeof(object), args[0]); case 2: return GlobalParent.CompilationMode.Dynamic(invoke, typeof(object), args[0], args[1]); case 3: return GlobalParent.CompilationMode.Dynamic(invoke, typeof(object), args[0], args[1], args[2]); case 4: return GlobalParent.CompilationMode.Dynamic(invoke, typeof(object), args[0], args[1], args[2], args[3]); default: return GlobalParent.CompilationMode.Dynamic( invoke, typeof(object), args ); } } internal ScopeStatement CopyForRewrite() { return (ScopeStatement)MemberwiseClone(); } internal virtual void RewriteBody(MSAst.ExpressionVisitor visitor) { _funcCode = null; } struct ClosureInfo { public PythonVariable Variable; public bool AccessedInScope; public ClosureInfo(PythonVariable variable, bool accessedInScope) { Variable = variable; AccessedInScope = accessedInScope; } } internal virtual bool PrintExpressions { get { return false; } } #region Profiling Support internal virtual string ProfilerName { get { return Name; } } /// <summary> /// Reducible node so that re-writing for profiling does not occur until /// after the script code has been completed and is ready to be compiled. /// /// Without this extra node profiling would force reduction of the node /// and we wouldn't have setup our constant access correctly yet. /// </summary> class DelayedProfiling : MSAst.Expression { private readonly ScopeStatement _ast; private readonly MSAst.Expression _body; private readonly MSAst.ParameterExpression _tick; public DelayedProfiling(ScopeStatement ast, MSAst.Expression body, MSAst.ParameterExpression tick) { _ast = ast; _body = body; _tick = tick; } public override bool CanReduce { get { return true; } } public override Type Type { get { return _body.Type; } } protected override MSAst.Expression VisitChildren(MSAst.ExpressionVisitor visitor) { return visitor.Visit(_body); } public override MSAst.Expression Reduce() { string profilerName = _ast.ProfilerName; bool unique = (profilerName == NameForExec); return Ast.Block( new[] { _tick }, _ast.GlobalParent._profiler.AddProfiling(_body, _tick, profilerName, unique) ); } public override MSAst.ExpressionType NodeType { get { return MSAst.ExpressionType.Extension; } } } internal MSAst.Expression AddProfiling(MSAst.Expression/*!*/ body) { if (GlobalParent._profiler != null) { MSAst.ParameterExpression tick = Ast.Variable(typeof(long), "$tick"); return new DelayedProfiling(this, body, tick); } return body; } #endregion } }
{'repo_name': 'IronLanguages/ironpython2', 'stars': '748', 'repo_language': 'Python', 'file_name': 'interop.net.event.html', 'mime_type': 'text/html', 'hash': 2435727933684938654, 'source_dataset': 'data'}
// SPDX-License-Identifier: LGPL-3.0-or-later import QtQuick 2.12 import "../../Base" HCheckBox { text: qsTr("Encrypt messages") subtitle.textFormat: Text.StyledText subtitle.text: qsTr("Only you and those you trust will be able to read the " + "conversation") + `<br><font color="${theme.colors.warningText}">` + qsTr("Cannot be disabled later!") + "</font>" }
{'repo_name': 'mirukana/mirage', 'stars': '110', 'repo_language': 'QML', 'file_name': 'mirage.desktop', 'mime_type': 'text/plain', 'hash': 5271698184636862902, 'source_dataset': 'data'}
// Ceres Solver - A fast non-linear least squares minimizer // Copyright 2015 Google Inc. All rights reserved. // http://ceres-solver.org/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of Google Inc. nor the names of its contributors may be // used to endorse or promote products derived from this software without // specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // // Author: sameeragarwal@google.com (Sameer Agarwal) // // Solve dense rectangular systems Ax = b using the QR factorization. #ifndef CERES_INTERNAL_DENSE_QR_SOLVER_H_ #define CERES_INTERNAL_DENSE_QR_SOLVER_H_ #include "ceres/linear_solver.h" #include "ceres/internal/eigen.h" #include "ceres/internal/macros.h" namespace ceres { namespace internal { class DenseSparseMatrix; // This class implements the LinearSolver interface for solving // rectangular/unsymmetric (well constrained) linear systems of the // form // // Ax = b // // Since there does not usually exist a solution that satisfies these // equations, the solver instead solves the linear least squares // problem // // min_x |Ax - b|^2 // // The solution strategy is based on computing the QR decomposition of // A, i.e. // // A = QR // // Where Q is an orthonormal matrix and R is an upper triangular // matrix. Then // // Ax = b // QRx = b // Q'QRx = Q'b // Rx = Q'b // x = R^{-1} Q'b // // If the PerSolveOptions struct has a non-null array D, then the // augmented/regularized linear system // // [ A ]x = [b] // [ diag(D) ] [0] // // is solved. // // This class uses the dense QR factorization routines from the Eigen // library. This solver always returns a solution, it is the user's // responsibility to judge if the solution is good enough for their // purposes. class DenseQRSolver: public DenseSparseMatrixSolver { public: explicit DenseQRSolver(const LinearSolver::Options& options); private: virtual LinearSolver::Summary SolveImpl( DenseSparseMatrix* A, const double* b, const LinearSolver::PerSolveOptions& per_solve_options, double* x); LinearSolver::Summary SolveUsingEigen( DenseSparseMatrix* A, const double* b, const LinearSolver::PerSolveOptions& per_solve_options, double* x); LinearSolver::Summary SolveUsingLAPACK( DenseSparseMatrix* A, const double* b, const LinearSolver::PerSolveOptions& per_solve_options, double* x); const LinearSolver::Options options_; ColMajorMatrix lhs_; Vector rhs_; Vector work_; CERES_DISALLOW_COPY_AND_ASSIGN(DenseQRSolver); }; } // namespace internal } // namespace ceres #endif // CERES_INTERNAL_DENSE_QR_SOLVER_H_
{'repo_name': 'openMVG/openMVG', 'stars': '2956', 'repo_language': 'C++', 'file_name': 'README.md', 'mime_type': 'text/plain', 'hash': -549613734352852185, 'source_dataset': 'data'}
//! Anthony Deschamps //! //! - **Date:** November 13, 2017 (recorded October 27, 2017) //! - [**Audio**][mp3] //! //! [mp3]: https://www.podtrac.com/pts/redirect.mp3/cdn.newrustacean.com/file/newrustacean/interview/rbr/anthony_deschamps.mp3 //! //! <audio style="width: 100%" title="Rust Belt Rust 2017: Colin Dean" controls preload=metadata> //! <source src="https://www.podtrac.com/pts/redirect.mp3/cdn.newrustacean.com/file/newrustacean/interview/rbr/anthony_deschamps.mp3"> //! </audio> #[doc(include = "../docs/rbr_2017/anthony-deschamps.md")] pub struct Transcript;
{'repo_name': 'chriskrycho/newrustacean.com', 'stars': '154', 'repo_language': 'Rust', 'file_name': 'media-playback.css', 'mime_type': 'text/plain', 'hash': -2252179143117033379, 'source_dataset': 'data'}
Higgs classification --------------------------- You can find the full data from this here (`Link`__) The data has been produced using Monte Carlo simulations. The first 21 features (columns 2-22) are kinematic properties measured by the particle detectors in the accelerator. The last seven features are functions of the first 21 features; these are high-level features derived by physicists to help discriminate between the two classes. There is an interest in using deep learning methods to obviate the need for physicists to manually develop such features. Benchmark results using Bayesian Decision Trees from a standard physics package and 5-layer neural networks are presented in the original paper. The last 500,000 examples are used as a test set. .. __: https://archive.ics.uci.edu/ml/datasets/HIGGS
{'repo_name': 'aksnzhy/xlearn', 'stars': '2716', 'repo_language': 'C++', 'file_name': 'xlearn.Rd', 'mime_type': 'text/plain', 'hash': 7385267606947356542, 'source_dataset': 'data'}
<map id="vst" name="vst"> <area shape="rect" id="node4" href="dir_85853d1f2c321934b2d0b4b8cda6e5c5.html" title="adelay" alt="" coords="1111,61,1183,109"/> <area shape="rect" id="node5" href="dir_5ffc5b65933164afb77ebc5b5eb628d9.html" title="again" alt="" coords="1015,61,1087,109"/> <area shape="rect" id="node6" href="dir_6e70dddd8efe192a200442b707c5a654.html" title="channelcontext" alt="" coords="889,61,991,109"/> <area shape="rect" id="node7" href="dir_25155a3fac66965a025910ca5a9dd925.html" title="common" alt="" coords="428,157,500,205"/> <area shape="rect" id="node8" href="dir_7a4e78622845ac8e2d8be874b02fb572.html" title="hostchecker" alt="" coords="778,61,864,109"/> <area shape="rect" id="node9" href="dir_a8e36a444327f87643906ead2e71d6f1.html" title="InterAppAudio" alt="" coords="657,61,753,109"/> <area shape="rect" id="node10" href="dir_49976055b6bc2e03b4acc62441be6b92.html" title="mda&#45;vst3" alt="" coords="560,61,632,109"/> <area shape="rect" id="node11" href="dir_0140fa053d2e858593a0f7abc2b650ff.html" title="note_expression_synth" alt="" coords="392,61,536,109"/> <area shape="rect" id="edge4-headlabel" href="dir_000007_000015.html" title="3" alt="" coords="469,132,476,146"/> <area shape="rect" id="node12" href="dir_b39eac9c57979fc2d284d85f699e7720.html" title="pitchnames" alt="" coords="285,61,368,109"/> <area shape="rect" id="node13" href="dir_12f6b7b17c5635ab054371ff76a5507c.html" title="prefetchablesupport" alt="" coords="134,61,260,109"/> <area shape="rect" id="node14" href="dir_504f3f6582c88869dcb0e7c0fcff5bdc.html" title="validator" alt="" coords="37,61,109,109"/> <area shape="rect" id="graph3" href="dir_5898a34b900eeb5b54fd3cf30212c70a.html" alt="" coords="27,51,1289,216"/> <area shape="rect" id="graph2" href="dir_14ae8cef5b4cd5ebc5413814d7bbe8ce.html" title="samples" alt="" coords="16,16,1300,227"/> </map>
{'repo_name': 'creativeintent/temper', 'stars': '112', 'repo_language': 'HTML', 'file_name': 'all_1.html', 'mime_type': 'text/html', 'hash': -4250810316402611243, 'source_dataset': 'data'}
<?php /** * * PHP 5 * * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://cakephp.org CakePHP(tm) Project * @package Cake.View.Errors * @since CakePHP(tm) v 0.10.0.1076 * @license MIT License (http://www.opensource.org/licenses/mit-license.php) */ ?> <h2><?php echo __d('cake_dev', 'Missing View'); ?></h2> <p class="error"> <strong><?php echo __d('cake_dev', 'Error'); ?>: </strong> <?php echo __d('cake_dev', 'The view for %1$s%2$s was not found.', '<em>' . Inflector::camelize($this->request->controller) . 'Controller::</em>', '<em>' . $this->request->action . '()</em>'); ?> </p> <p class="error"> <strong><?php echo __d('cake_dev', 'Error'); ?>: </strong> <?php echo __d('cake_dev', 'Confirm you have created the file: %s', $file); ?> </p> <p class="notice"> <strong><?php echo __d('cake_dev', 'Notice'); ?>: </strong> <?php echo __d('cake_dev', 'If you want to customize this error message, create %s', APP_DIR . DS . 'View' . DS . 'Errors' . DS . 'missing_view.ctp'); ?> </p> <?php echo $this->element('exception_stack_trace'); ?>
{'repo_name': 'infinitas/infinitas', 'stars': '124', 'repo_language': 'PHP', 'file_name': 'cherokee.conf', 'mime_type': 'text/plain', 'hash': 6833280654167482532, 'source_dataset': 'data'}
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !windows package test // Session functional tests. import ( "bytes" "errors" "io" "strings" "testing" "golang.org/x/crypto/ssh" ) func TestRunCommandSuccess(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() err = session.Run("true") if err != nil { t.Fatalf("session failed: %v", err) } } func TestHostKeyCheck(t *testing.T) { server := newServer(t) defer server.Shutdown() conf := clientConfig() hostDB := hostKeyDB() conf.HostKeyCallback = hostDB.Check // change the keys. hostDB.keys[ssh.KeyAlgoRSA][25]++ hostDB.keys[ssh.KeyAlgoDSA][25]++ hostDB.keys[ssh.KeyAlgoECDSA256][25]++ conn, err := server.TryDial(conf) if err == nil { conn.Close() t.Fatalf("dial should have failed.") } else if !strings.Contains(err.Error(), "host key mismatch") { t.Fatalf("'host key mismatch' not found in %v", err) } } func TestRunCommandStdin(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() r, w := io.Pipe() defer r.Close() defer w.Close() session.Stdin = r err = session.Run("true") if err != nil { t.Fatalf("session failed: %v", err) } } func TestRunCommandStdinError(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() r, w := io.Pipe() defer r.Close() session.Stdin = r pipeErr := errors.New("closing write end of pipe") w.CloseWithError(pipeErr) err = session.Run("true") if err != pipeErr { t.Fatalf("expected %v, found %v", pipeErr, err) } } func TestRunCommandFailed(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() err = session.Run(`bash -c "kill -9 $$"`) if err == nil { t.Fatalf("session succeeded: %v", err) } } func TestRunCommandWeClosed(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } err = session.Shell() if err != nil { t.Fatalf("shell failed: %v", err) } err = session.Close() if err != nil { t.Fatalf("shell failed: %v", err) } } func TestFuncLargeRead(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("unable to create new session: %s", err) } stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } err = session.Start("dd if=/dev/urandom bs=2048 count=1024") if err != nil { t.Fatalf("unable to execute remote command: %s", err) } buf := new(bytes.Buffer) n, err := io.Copy(buf, stdout) if err != nil { t.Fatalf("error reading from remote stdout: %s", err) } if n != 2048*1024 { t.Fatalf("Expected %d bytes but read only %d from remote command", 2048, n) } } func TestKeyChange(t *testing.T) { server := newServer(t) defer server.Shutdown() conf := clientConfig() hostDB := hostKeyDB() conf.HostKeyCallback = hostDB.Check conf.RekeyThreshold = 1024 conn := server.Dial(conf) defer conn.Close() for i := 0; i < 4; i++ { session, err := conn.NewSession() if err != nil { t.Fatalf("unable to create new session: %s", err) } stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } err = session.Start("dd if=/dev/urandom bs=1024 count=1") if err != nil { t.Fatalf("unable to execute remote command: %s", err) } buf := new(bytes.Buffer) n, err := io.Copy(buf, stdout) if err != nil { t.Fatalf("error reading from remote stdout: %s", err) } want := int64(1024) if n != want { t.Fatalf("Expected %d bytes but read only %d from remote command", want, n) } } if changes := hostDB.checkCount; changes < 4 { t.Errorf("got %d key changes, want 4", changes) } } func TestInvalidTerminalMode(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() if err = session.RequestPty("vt100", 80, 40, ssh.TerminalModes{255: 1984}); err == nil { t.Fatalf("req-pty failed: successful request with invalid mode") } } func TestValidTerminalMode(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } stdin, err := session.StdinPipe() if err != nil { t.Fatalf("unable to acquire stdin pipe: %s", err) } tm := ssh.TerminalModes{ssh.ECHO: 0} if err = session.RequestPty("xterm", 80, 40, tm); err != nil { t.Fatalf("req-pty failed: %s", err) } err = session.Shell() if err != nil { t.Fatalf("session failed: %s", err) } stdin.Write([]byte("stty -a && exit\n")) var buf bytes.Buffer if _, err := io.Copy(&buf, stdout); err != nil { t.Fatalf("reading failed: %s", err) } if sttyOutput := buf.String(); !strings.Contains(sttyOutput, "-echo ") { t.Fatalf("terminal mode failure: expected -echo in stty output, got %s", sttyOutput) } } func TestWindowChange(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } stdin, err := session.StdinPipe() if err != nil { t.Fatalf("unable to acquire stdin pipe: %s", err) } tm := ssh.TerminalModes{ssh.ECHO: 0} if err = session.RequestPty("xterm", 80, 40, tm); err != nil { t.Fatalf("req-pty failed: %s", err) } if err := session.WindowChange(100, 100); err != nil { t.Fatalf("window-change failed: %s", err) } err = session.Shell() if err != nil { t.Fatalf("session failed: %s", err) } stdin.Write([]byte("stty size && exit\n")) var buf bytes.Buffer if _, err := io.Copy(&buf, stdout); err != nil { t.Fatalf("reading failed: %s", err) } if sttyOutput := buf.String(); !strings.Contains(sttyOutput, "100 100") { t.Fatalf("terminal WindowChange failure: expected \"100 100\" stty output, got %s", sttyOutput) } } func TestCiphers(t *testing.T) { var config ssh.Config config.SetDefaults() cipherOrder := config.Ciphers // These ciphers will not be tested when commented out in cipher.go it will // fallback to the next available as per line 292. cipherOrder = append(cipherOrder, "aes128-cbc", "3des-cbc") for _, ciph := range cipherOrder { server := newServer(t) defer server.Shutdown() conf := clientConfig() conf.Ciphers = []string{ciph} // Don't fail if sshd doesn't have the cipher. conf.Ciphers = append(conf.Ciphers, cipherOrder...) conn, err := server.TryDial(conf) if err == nil { conn.Close() } else { t.Fatalf("failed for cipher %q", ciph) } } } func TestMACs(t *testing.T) { var config ssh.Config config.SetDefaults() macOrder := config.MACs for _, mac := range macOrder { server := newServer(t) defer server.Shutdown() conf := clientConfig() conf.MACs = []string{mac} // Don't fail if sshd doesn't have the MAC. conf.MACs = append(conf.MACs, macOrder...) if conn, err := server.TryDial(conf); err == nil { conn.Close() } else { t.Fatalf("failed for MAC %q", mac) } } } func TestKeyExchanges(t *testing.T) { var config ssh.Config config.SetDefaults() kexOrder := config.KeyExchanges for _, kex := range kexOrder { server := newServer(t) defer server.Shutdown() conf := clientConfig() // Don't fail if sshd doesn't have the kex. conf.KeyExchanges = append([]string{kex}, kexOrder...) conn, err := server.TryDial(conf) if err == nil { conn.Close() } else { t.Errorf("failed for kex %q", kex) } } } func TestClientAuthAlgorithms(t *testing.T) { for _, key := range []string{ "rsa", "dsa", "ecdsa", "ed25519", } { server := newServer(t) conf := clientConfig() conf.SetDefaults() conf.Auth = []ssh.AuthMethod{ ssh.PublicKeys(testSigners[key]), } conn, err := server.TryDial(conf) if err == nil { conn.Close() } else { t.Errorf("failed for key %q", key) } server.Shutdown() } }
{'repo_name': 'tjfoc/hyperledger-fabric-gm', 'stars': '134', 'repo_language': 'Go', 'file_name': 'Makefile', 'mime_type': 'text/x-makefile', 'hash': 1566585130853476648, 'source_dataset': 'data'}
# Azure AD B2C: Force password after 90 days This solution demonstrates how to force user to reset password after 90 day or so. The solution is based on an extension attribute that stores the last time user sets the password and a comparison to the current date and time, minus specified number of days. Read here how to [configure extension attributes](https://docs.microsoft.com/en-us/azure/active-directory-b2c/active-directory-b2c-create-custom-attributes-profile-edit-custom). When a user sign-up or resets the password, the policy sets the extension attributes to the current datetime. On sign-in the policy checks whether both the extension attribute is not null and whether the current date time, minus 90 day is greater than last time user sets the password or not. If greater, it means that at least 90 days passed from the time user reset the password. ## Adding this functionality to your policy To merge the policy with yours, you need: 1. Add the claims in the extension policy 1. Add the claims transformations in the extension policy 1. In the **ComparePasswordResetOnWithCurrentDateTime** claims transformation, change the value of the **timeSpanInSeconds** input parameter to the number of days in seconds you want the users to reset their password. For testing only the value is set to 80 seconds. Note: the value must be negative (starts with minus) 1. In the **AAD-UserReadUsingObjectId** technical profile, add the output claim and output claims transformation. This technical profile reads the extension_passwordResetOn attribute from the user account, checks if the claim is null and compares the value of the extension_passwordResetOn claim with current datetime. The result of this technical profile is the **skipPasswordReset** output claim (return in the last output claims transformation) that indicates whether password reset is required or not (based on date comparison and if extension_passwordResetOn is null) 1. In **AAD-UserWriteUsingLogonEmail** and **AAD-UserWritePasswordUsingObjectId** add the input claims transformation and perssis claims. These technical profiles, set the current datetime to the extension_passwordResetOn claim and persists the data do the user account. 1. Add the extra orchestration before the last orchestration step. This orchestration step asks the user to reset the password, saves the new password, and also sets the extension_passwordResetOn claim to current date and time. The orchestration setup will NOT run for social account and if the skipPasswordReset claim is true. ## Tests You should run at least following acceptance tests: - **New account** doesn't need to reset the password (skipPasswordReset: true) - **Existing account before 90 days** doesn't need to reset the password (skipPasswordReset: true) - **Existing account after 90 days** need to reset the password (skipPasswordReset: false) - **Account that extension_passwordResetOn never set before**, should reset the password (skipPasswordReset: false) = pass ## Community Help and Support Use [Stack Overflow](https://stackoverflow.com/questions/tagged/azure-ad-b2c) to get support from the community. Ask your questions on Stack Overflow first and browse existing issues to see if someone has asked your question before. Make sure that your questions or comments are tagged with [azure-ad-b2c]. If you find a bug in the sample, please raise the issue on [GitHub Issues](https://github.com/azure-ad-b2c/samples/issues). To provide product feedback, visit the Azure Active Directory B2C [Feedback page](https://feedback.azure.com/forums/169401-azure-active-directory?category_id=160596). > Note: This sample policy is based on [SocialAndLocalAccounts starter pack](https://github.com/Azure-Samples/active-directory-b2c-custom-policy-starterpack/tree/master/SocialAndLocalAccounts). All changes are marked with **Demo:** comment inside the policy XML files. Make the necessary changes in the **Demo action required** sections.
{'repo_name': 'azure-ad-b2c/samples', 'stars': '154', 'repo_language': 'HTML', 'file_name': 'VSWorkspaceState.json', 'mime_type': 'text/plain', 'hash': 5107508388245350244, 'source_dataset': 'data'}
/*************************************************************************/ /* dictionary.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ /* https://godotengine.org */ /*************************************************************************/ /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */ /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */ /* */ /* Permission is hereby granted, free of charge, to any person obtaining */ /* a copy of this software and associated documentation files (the */ /* "Software"), to deal in the Software without restriction, including */ /* without limitation the rights to use, copy, modify, merge, publish, */ /* distribute, sublicense, and/or sell copies of the Software, and to */ /* permit persons to whom the Software is furnished to do so, subject to */ /* the following conditions: */ /* */ /* The above copyright notice and this permission notice shall be */ /* included in all copies or substantial portions of the Software. */ /* */ /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ #ifndef GODOT_DICTIONARY_H #define GODOT_DICTIONARY_H #ifdef __cplusplus extern "C" { #endif #include <stdint.h> #define GODOT_DICTIONARY_SIZE sizeof(void *) #ifndef GODOT_CORE_API_GODOT_DICTIONARY_TYPE_DEFINED #define GODOT_CORE_API_GODOT_DICTIONARY_TYPE_DEFINED typedef struct { uint8_t _dont_touch_that[GODOT_DICTIONARY_SIZE]; } godot_dictionary; #endif // reduce extern "C" nesting for VS2013 #ifdef __cplusplus } #endif #include <gdnative/array.h> #include <gdnative/gdnative.h> #include <gdnative/variant.h> #ifdef __cplusplus extern "C" { #endif void GDAPI godot_dictionary_new(godot_dictionary *r_dest); void GDAPI godot_dictionary_new_copy(godot_dictionary *r_dest, const godot_dictionary *p_src); void GDAPI godot_dictionary_destroy(godot_dictionary *p_self); godot_dictionary GDAPI godot_dictionary_duplicate(const godot_dictionary *p_self, const godot_bool p_deep); godot_int GDAPI godot_dictionary_size(const godot_dictionary *p_self); godot_bool GDAPI godot_dictionary_empty(const godot_dictionary *p_self); void GDAPI godot_dictionary_clear(godot_dictionary *p_self); godot_bool GDAPI godot_dictionary_has(const godot_dictionary *p_self, const godot_variant *p_key); godot_bool GDAPI godot_dictionary_has_all(const godot_dictionary *p_self, const godot_array *p_keys); void GDAPI godot_dictionary_erase(godot_dictionary *p_self, const godot_variant *p_key); godot_int GDAPI godot_dictionary_hash(const godot_dictionary *p_self); godot_array GDAPI godot_dictionary_keys(const godot_dictionary *p_self); godot_array GDAPI godot_dictionary_values(const godot_dictionary *p_self); godot_variant GDAPI godot_dictionary_get(const godot_dictionary *p_self, const godot_variant *p_key); void GDAPI godot_dictionary_set(godot_dictionary *p_self, const godot_variant *p_key, const godot_variant *p_value); godot_variant GDAPI *godot_dictionary_operator_index(godot_dictionary *p_self, const godot_variant *p_key); const godot_variant GDAPI *godot_dictionary_operator_index_const(const godot_dictionary *p_self, const godot_variant *p_key); godot_variant GDAPI *godot_dictionary_next(const godot_dictionary *p_self, const godot_variant *p_key); godot_bool GDAPI godot_dictionary_operator_equal(const godot_dictionary *p_self, const godot_dictionary *p_b); godot_string GDAPI godot_dictionary_to_json(const godot_dictionary *p_self); // GDNative core 1.1 godot_bool GDAPI godot_dictionary_erase_with_return(godot_dictionary *p_self, const godot_variant *p_key); godot_variant GDAPI godot_dictionary_get_with_default(const godot_dictionary *p_self, const godot_variant *p_key, const godot_variant *p_default); #ifdef __cplusplus } #endif #endif // GODOT_DICTIONARY_H
{'repo_name': 'godotengine/godot', 'stars': '31917', 'repo_language': 'C++', 'file_name': 'Skeleton3D.xml', 'mime_type': 'text/xml', 'hash': -1978262028310209179, 'source_dataset': 'data'}
// // MMPinView.m // MMPopupView // // Created by Ralph Li on 9/6/15. // Copyright © 2015 LJC. All rights reserved. // #import "MMPinView.h" #import "MMPopupItem.h" #import "MMPopupCategory.h" #import "MMPopupDefine.h" #import "MMPopupWindow.h" #import <Masonry/Masonry.h> @interface MMPinView() @property (nonatomic, strong) UIView *backView; @property (nonatomic, strong) UILabel *lblStatus; @property (nonatomic, strong) UILabel *lblPhone; @property (nonatomic, strong) UIView *numberView; @property (nonatomic, strong) NSArray *numberArray; @property (nonatomic, strong) UIButton *btnCountDown; @property (nonatomic, strong) UITextField *tfPin; @property (nonatomic, strong) UIButton *btnClose; @property (nonatomic, assign) BOOL pinLocked; @property (nonatomic, strong) NSString *pinLockValue; @property (nonatomic, strong) NSDate *dateCountdown; @property (nonatomic, assign) NSUInteger nCountdown; @end @implementation MMPinView - (instancetype)init { self = [super init]; if ( self ) { self.type = MMPopupTypeCustom; [self mas_makeConstraints:^(MASConstraintMaker *make) { make.size.mas_equalTo(CGSizeMake(240, 200)); }]; self.withKeyboard = YES; self.backView = [UIView new]; [self addSubview:self.backView]; [self.backView mas_makeConstraints:^(MASConstraintMaker *make) { make.edges.equalTo(self); }]; self.backView.layer.cornerRadius = 5.0f; self.backView.clipsToBounds = YES; self.backView.backgroundColor = [UIColor whiteColor]; self.btnClose = [UIButton mm_buttonWithTarget:self action:@selector(actionClose)]; [self.backView addSubview:self.btnClose]; [self.btnClose mas_makeConstraints:^(MASConstraintMaker *make) { make.top.right.equalTo(self.backView).insets(UIEdgeInsetsMake(0, 0, 0, 5)); make.size.mas_equalTo(CGSizeMake(40, 40)); }]; [self.btnClose setTitle:@"Close" forState:UIControlStateNormal]; [self.btnClose setTitleColor:[UIColor blackColor] forState:UIControlStateNormal]; self.btnClose.titleLabel.font = [UIFont systemFontOfSize:14]; self.lblStatus = [UILabel new]; [self.backView addSubview:self.lblStatus]; [self.lblStatus mas_makeConstraints:^(MASConstraintMaker *make) { make.left.top.right.equalTo(self.backView).insets(UIEdgeInsetsMake(0, 19, 0, 19)); make.height.equalTo(@50); }]; self.lblStatus.textColor = MMHexColor(0x333333FF); self.lblStatus.font = [UIFont boldSystemFontOfSize:17]; self.lblStatus.text = @"You Pin Code"; self.lblStatus.textAlignment = NSTextAlignmentCenter; UIView *split = [UIView new]; [self.backView addSubview:split]; [split mas_makeConstraints:^(MASConstraintMaker *make) { make.left.right.equalTo(self.backView); make.bottom.equalTo(self.lblStatus.mas_bottom); make.height.mas_equalTo(MM_SPLIT_WIDTH); }]; self.lblPhone = [UILabel new]; [self.backView addSubview:self.lblPhone]; [self.lblPhone mas_makeConstraints:^(MASConstraintMaker *make) { make.left.right.equalTo(self.backView).insets(UIEdgeInsetsMake(0, 19, 0, 19)); make.top.equalTo(self.lblStatus.mas_bottom).offset(10); }]; self.lblPhone.numberOfLines = 0; self.lblPhone.textAlignment = NSTextAlignmentCenter; self.lblPhone.font = [UIFont systemFontOfSize:14]; self.lblPhone.textColor = MMHexColor(0x999999FF); [self.lblPhone setContentCompressionResistancePriority:UILayoutPriorityFittingSizeLevel forAxis:UILayoutConstraintAxisVertical]; [self.lblPhone setContentHuggingPriority:UILayoutPriorityRequired forAxis:UILayoutConstraintAxisVertical]; self.lblPhone.text = @"The Code was sent to\n186 8877 8877"; self.btnCountDown = [UIButton mm_buttonWithTarget:self action:@selector(actionResend)]; [self.backView addSubview:self.btnCountDown]; [self.btnCountDown mas_makeConstraints:^(MASConstraintMaker *make) { make.left.right.equalTo(self.backView).insets(UIEdgeInsetsMake(0, 19, 0, 19)); make.bottom.equalTo(self.backView.mas_bottom).offset(-20); }]; self.btnCountDown.titleLabel.textAlignment = NSTextAlignmentCenter; self.btnCountDown.titleLabel.font = [UIFont systemFontOfSize:12]; [self.btnCountDown setTitleColor:MMHexColor(0x999999FF) forState:UIControlStateDisabled]; [self.btnCountDown setTitleColor:MMHexColor(0xE76153FF) forState:UIControlStateNormal]; [self.btnCountDown setContentCompressionResistancePriority:UILayoutPriorityFittingSizeLevel forAxis:UILayoutConstraintAxisVertical]; [self.btnCountDown setContentHuggingPriority:UILayoutPriorityRequired forAxis:UILayoutConstraintAxisVertical]; self.numberView = [UIView new]; [self.backView addSubview:self.numberView]; [self.numberView mas_makeConstraints:^(MASConstraintMaker *make) { make.top.lessThanOrEqualTo(self.lblPhone.mas_bottom); make.bottom.greaterThanOrEqualTo(self.btnCountDown.mas_top); make.centerX.equalTo(self.backView); make.width.equalTo(@150); }]; [self.numberView setContentHuggingPriority:UILayoutPriorityFittingSizeLevel forAxis:UILayoutConstraintAxisVertical]; [self.numberView setContentCompressionResistancePriority:UILayoutPriorityRequired forAxis:UILayoutConstraintAxisVertical]; self.numberArray = @[[UILabel new],[UILabel new],[UILabel new],[UILabel new]]; for ( UILabel *label in self.numberArray ) { [self.numberView addSubview:label]; [label mas_makeConstraints:^(MASConstraintMaker *make) { make.top.bottom.equalTo(self.numberView); make.width.equalTo(@30); }]; label.font = [UIFont boldSystemFontOfSize:40]; label.textColor = MMHexColor(0xE76153FF); label.text = @"_"; } [self.numberView mm_distributeSpacingHorizontallyWith:self.numberArray]; self.tfPin = [UITextField new]; [self addSubview:self.tfPin]; self.tfPin.keyboardType = UIKeyboardTypeNumberPad; [self sendSubviewToBack:self.tfPin]; [self startCountDown]; } return self; } - (void)startCountDown { // [self stopCountDown]; // // self.nCountdown = 30; // // self.btnCountDown.enabled = NO; // // [self checkCountDown]; } - (void)stopCountDown { // [NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(checkCountDown) object:nil]; } - (void)checkCountDown { if ( self.nCountdown == 0 ) { self.btnCountDown.enabled = YES; [self.btnCountDown setTitle:@"Resent" forState:UIControlStateNormal]; } else { NSString *text = [NSString stringWithFormat:@"Receive in %@ secs", [@(self.nCountdown) stringValue]]; [self.btnCountDown setTitle:text forState:UIControlStateDisabled]; --self.nCountdown; [self performSelector:@selector(checkCountDown) withObject:nil afterDelay:1 inModes:@[NSRunLoopCommonModes]]; } } - (void)actionClose { [self hide]; } - (void)actionResend { [self startCountDown]; } - (void)showKeyboard { [self.tfPin becomeFirstResponder]; } - (void)hideKeyboard { [self.tfPin resignFirstResponder]; } @end
{'repo_name': 'adad184/MMPopupView', 'stars': '2080', 'repo_language': 'Objective-C', 'file_name': 'contents.xcworkspacedata', 'mime_type': 'text/xml', 'hash': 5545075235557474154, 'source_dataset': 'data'}
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Pdf * @subpackage FileParser * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id$ */ /** @see Zend_Pdf_FileParser_Image */ //require_once 'Zend/Pdf/FileParser/Image.php'; /** * Abstract base class for Image file parsers. * * @package Zend_Pdf * @subpackage FileParser * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Pdf_FileParser_Image_Png extends Zend_Pdf_FileParser_Image { protected $_isPNG; protected $_width; protected $_height; protected $_bits; protected $_color; protected $_compression; protected $_preFilter; protected $_interlacing; protected $_imageData; protected $_paletteData; protected $_transparencyData; /**** Public Interface ****/ public function getWidth() { if(!$this->_isParsed) { $this->parse(); } return $this->_width; } public function getHeight() { if(!$this->_isParsed) { $this->parse(); } return $this->_width; } public function getBitDepth() { if(!$this->_isParsed) { $this->parse(); } return $this->_bits; } public function getColorSpace() { if(!$this->_isParsed) { $this->parse(); } return $this->_color; } public function getCompressionStrategy() { if(!$this->_isParsed) { $this->parse(); } return $this->_compression; } public function getPaethFilter() { if(!$this->_isParsed) { $this->parse(); } return $this->_preFilter; } public function getInterlacingMode() { if(!$this->_isParsed) { $this->parse(); } return $this->_interlacing; } public function getRawImageData() { if(!$this->_isParsed) { $this->parse(); } return $this->_imageData; } public function getRawPaletteData() { if(!$this->_isParsed) { $this->parse(); } return $this->_paletteData; } public function getRawTransparencyData() { if(!$this->_isParsed) { $this->parse(); } return $this->_transparencyData; } /* Semi-Concrete Class Implementation */ /** * Verifies that the image file is in the expected format. * * @throws Zend_Pdf_Exception */ public function screen() { if ($this->_isScreened) { return; } return $this->_checkSignature(); } /** * Reads and parses the image data from the file on disk. * * @throws Zend_Pdf_Exception */ public function parse() { if ($this->_isParsed) { return; } /* Screen the font file first, if it hasn't been done yet. */ $this->screen(); $this->_parseIHDRChunk(); $this->_parseChunks(); } protected function _parseSignature() { $this->moveToOffset(1); //Skip the first byte (%) if('PNG' != $this->readBytes(3)) { $this->_isPNG = false; } else { $this->_isPNG = true; } } protected function _checkSignature() { if(!isset($this->_isPNG)) { $this->_parseSignature(); } return $this->_isPNG; } protected function _parseChunks() { $this->moveToOffset(33); //Variable chunks start at the end of IHDR //Start processing chunks. If there are no more bytes to read parsing is complete. $size = $this->getSize(); while($size - $this->getOffset() >= 8) { $chunkLength = $this->readUInt(4); if($chunkLength < 0 || ($chunkLength + $this->getOffset() + 4) > $size) { //require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception("PNG Corrupt: Invalid Chunk Size In File."); } $chunkType = $this->readBytes(4); $offset = $this->getOffset(); //If we know how to process the chunk, do it here, else ignore the chunk and move on to the next switch($chunkType) { case 'IDAT': // This chunk may appear more than once. It contains the actual image data. $this->_parseIDATChunk($offset, $chunkLength); break; case 'PLTE': // This chunk contains the image palette. $this->_parsePLTEChunk($offset, $chunkLength); break; case 'tRNS': // This chunk contains non-alpha channel transparency data $this->_parseTRNSChunk($offset, $chunkLength); break; case 'IEND': break 2; //End the loop too //@TODO Implement the rest of the PNG chunks. (There are many not implemented here) } if($offset + $chunkLength + 4 < $size) { $this->moveToOffset($offset + $chunkLength + 4); //Skip past the data finalizer. (Don't rely on the parse to leave the offsets correct) } } if(empty($this->_imageData)) { //require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception ( "This PNG is corrupt. All png must contain IDAT chunks." ); } } protected function _parseIHDRChunk() { $this->moveToOffset(12); //IHDR must always start at offset 12 and run for 17 bytes if(!$this->readBytes(4) == 'IHDR') { //require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception( "This PNG is corrupt. The first chunk in a PNG file must be IHDR." ); } $this->_width = $this->readUInt(4); $this->_height = $this->readUInt(4); $this->_bits = $this->readInt(1); $this->_color = $this->readInt(1); $this->_compression = $this->readInt(1); $this->_preFilter = $this->readInt(1); $this->_interlacing = $this->readInt(1); if($this->_interlacing != Zend_Pdf_Image::PNG_INTERLACING_DISABLED) { //require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception( "Only non-interlaced images are currently supported." ); } } protected function _parseIDATChunk($chunkOffset, $chunkLength) { $this->moveToOffset($chunkOffset); if(!isset($this->_imageData)) { $this->_imageData = $this->readBytes($chunkLength); } else { $this->_imageData .= $this->readBytes($chunkLength); } } protected function _parsePLTEChunk($chunkOffset, $chunkLength) { $this->moveToOffset($chunkOffset); $this->_paletteData = $this->readBytes($chunkLength); } protected function _parseTRNSChunk($chunkOffset, $chunkLength) { $this->moveToOffset($chunkOffset); //Processing of tRNS data varies dependending on the color depth switch($this->_color) { case Zend_Pdf_Image::PNG_CHANNEL_GRAY: $baseColor = $this->readInt(1); $this->_transparencyData = array($baseColor, $baseColor); break; case Zend_Pdf_Image::PNG_CHANNEL_RGB: //@TODO Fix this hack. //This parser cheats and only uses the lsb's (and only works with < 16 bit depth images) /* From the standard: For color type 2 (truecolor), the tRNS chunk contains a single RGB color value, stored in the format: Red: 2 bytes, range 0 .. (2^bitdepth)-1 Green: 2 bytes, range 0 .. (2^bitdepth)-1 Blue: 2 bytes, range 0 .. (2^bitdepth)-1 (If the image bit depth is less than 16, the least significant bits are used and the others are 0.) Pixels of the specified color value are to be treated as transparent (equivalent to alpha value 0); all other pixels are to be treated as fully opaque (alpha value 2bitdepth-1). */ $red = $this->readInt(1); $this->skipBytes(1); $green = $this->readInt(1); $this->skipBytes(1); $blue = $this->readInt(1); $this->_transparencyData = array($red, $red, $green, $green, $blue, $blue); break; case Zend_Pdf_Image::PNG_CHANNEL_INDEXED: //@TODO Fix this hack. //This parser cheats too. It only masks the first color in the palette. /* From the standard: For color type 3 (indexed color), the tRNS chunk contains a series of one-byte alpha values, corresponding to entries in the PLTE chunk: Alpha for palette index 0: 1 byte Alpha for palette index 1: 1 byte ...etc... Each entry indicates that pixels of the corresponding palette index must be treated as having the specified alpha value. Alpha values have the same interpretation as in an 8-bit full alpha channel: 0 is fully transparent, 255 is fully opaque, regardless of image bit depth. The tRNS chunk must not contain more alpha values than there are palette entries, but tRNS can contain fewer values than there are palette entries. In this case, the alpha value for all remaining palette entries is assumed to be 255. In the common case in which only palette index 0 need be made transparent, only a one-byte tRNS chunk is needed. */ $tmpData = $this->readBytes($chunkLength); if(($trnsIdx = strpos($tmpData, "\0")) !== false) { $this->_transparencyData = array($trnsIdx, $trnsIdx); } break; case Zend_Pdf_Image::PNG_CHANNEL_GRAY_ALPHA: //Fall through to the next case case Zend_Pdf_Image::PNG_CHANNEL_RGB_ALPHA: //require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception( "tRNS chunk illegal for Alpha Channel Images" ); break; } } }
{'repo_name': 'wecenter/wecenter', 'stars': '901', 'repo_language': 'PHP', 'file_name': 'aws_offical_external.php', 'mime_type': 'text/x-php', 'hash': -2472035983388363047, 'source_dataset': 'data'}
const fs = require('fs').promises; fs.readFile('./readme.txt') .then((data) => { console.log(data); console.log(data.toString()); }) .catch((err) => { console.error(err); });
{'repo_name': 'ZeroCho/nodejs-book', 'stars': '269', 'repo_language': 'JavaScript', 'file_name': 'layout.html', 'mime_type': 'text/html', 'hash': 2656098258814150649, 'source_dataset': 'data'}
#pragma once #include <cstddef> template<typename T, size_t Size> class RingBuffer { public: inline size_t size() const { return Size; } inline bool empty() const { return _read == _write; } inline bool full() const { return writable() == 0; } inline size_t entries() const { return (_write - _read) % Size; } inline size_t writable() const { return (_read - _write - 1) % Size; } inline size_t readable() const { return (_write - _read) % Size; } inline void write(T value) { size_t write = _write; _buffer[write] = value; _write = (write + 1) % Size; } inline void write(const T *data, size_t length) { while (length--) { write(*data++); } } inline T read() { size_t read = _read; T value = _buffer[read]; _read = (read + 1) % Size; return value; } inline void read(T *data, size_t length) { while (length--) { read(*data++); } } private: T _buffer[Size]; volatile size_t _read = 0; volatile size_t _write = 0; };
{'repo_name': 'westlicht/performer', 'stars': '106', 'repo_language': 'C', 'file_name': 'FindSDL2_image.cmake', 'mime_type': 'text/plain', 'hash': -8125389746921420121, 'source_dataset': 'data'}
module Eye::Cli::Commands private def client @client ||= Eye::Client.new(Eye::Local.socket_path) end def _cmd(cmd, *args) client.execute(command: cmd, args: args) rescue Errno::ECONNREFUSED, Errno::ENOENT :not_started end def cmd(cmd, *args) res = _cmd(cmd, *args) if res == :not_started error! "socket(#{Eye::Local.socket_path}) not found, did you run `eye load`?" elsif res == :timeouted error! 'eye timed out without responding...' end res end def say_load_result(res = {}, opts = {}) error!(res) unless res.is_a?(Hash) say_filename = (res.size > 1) error = false res.each do |filename, res2| say "#{filename}: ", nil, true if say_filename show_load_message(res2, opts) error = true if res2[:error] end exit(1) if error end def show_load_message(res, opts = {}) if res[:error] say res[:message], :red res[:backtrace].to_a.each { |line| say line, :red } else unless res[:empty] say(opts[:syntax] ? 'Config ok!' : 'Config loaded!', :green) end if opts[:print_config] require 'pp' PP.pp res[:config], STDOUT, 150 end end end def send_command(command, *args) res = cmd(command, *args) if res == :unknown_command error! "unknown command :#{command}" elsif res == :corrupted_data error! 'something crazy wrong, check eye logs!' elsif res.is_a?(Hash) if res[:error] error! "Error: #{res[:error]}" elsif res = res[:result] if res == [] error! "command :#{command}, objects not found!" else say "command :#{command} sent to [#{res * ', '}]" end end else error! "unknown result #{res.inspect}" end end end
{'repo_name': 'kostya/eye', 'stars': '1160', 'repo_language': 'Ruby', 'file_name': 'status.rb', 'mime_type': 'text/x-ruby', 'hash': -688243033448021627, 'source_dataset': 'data'}
fileFormatVersion: 2 guid: b804088948820194cbda76af39c08174 timeCreated: 1529972058 licenseType: Free DefaultImporter: userData: assetBundleName: assetBundleVariant:
{'repo_name': 'EsotericSoftware/spine-runtimes', 'stars': '2640', 'repo_language': 'JavaScript', 'file_name': 'AssemblyInfo.cs', 'mime_type': 'text/plain', 'hash': -7233544834340956709, 'source_dataset': 'data'}
# Protocol Buffers for Go with Gadgets [![Build Status](https://travis-ci.org/gogo/protobuf.svg?branch=master)](https://travis-ci.org/gogo/protobuf) [![GoDoc](https://godoc.org/github.com/gogo/protobuf?status.svg)](http://godoc.org/github.com/gogo/protobuf) gogoprotobuf is a fork of <a href="https://github.com/golang/protobuf">golang/protobuf</a> with extra code generation features. This code generation is used to achieve: - fast marshalling and unmarshalling - more canonical Go structures - goprotobuf compatibility - less typing by optionally generating extra helper code - peace of mind by optionally generating test and benchmark code - other serialization formats Keeping track of how up to date gogoprotobuf is relative to golang/protobuf is done in this <a href="https://github.com/gogo/protobuf/issues/191">issue</a> ## Release v1.3.0 The project has updated to release v1.3.0. Check out the release notes <a href="https://github.com/gogo/protobuf/releases/tag/v1.3.0">here</a>. With this new release comes a new internal library version. This means any newly generated *pb.go files generated with the v1.3.0 library will not be compatible with the old library version (v1.2.1). However, current *pb.go files (generated with v1.2.1) should still work with the new library. Please make sure you manage your dependencies correctly when upgrading your project. If you are still using v1.2.1 and you update your dependencies, one of which could include a new *pb.go (generated with v1.3.0), you could get a compile time error. Our upstream repo, golang/protobuf, also had to go through this process in order to update their library version. Here is a link explaining <a href="https://github.com/golang/protobuf/issues/763#issuecomment-442434870">hermetic builds</a>. ## Users These projects use gogoprotobuf: - <a href="http://godoc.org/github.com/coreos/etcd">etcd</a> - <a href="https://blog.gopheracademy.com/advent-2015/etcd-distributed-key-value-store-with-grpc-http2/">blog</a> - <a href="https://github.com/coreos/etcd/blob/master/etcdserver/etcdserverpb/etcdserver.proto">sample proto file</a> - <a href="https://www.spacemonkey.com/">spacemonkey</a> - <a href="https://www.spacemonkey.com/blog/posts/go-space-monkey">blog</a> - <a href="http://badoo.com">badoo</a> - <a href="https://github.com/badoo/lsd/blob/32061f501c5eca9c76c596d790b450501ba27b2f/proto/lsd.proto">sample proto file</a> - <a href="https://github.com/mesos/mesos-go">mesos-go</a> - <a href="https://github.com/mesos/mesos-go/blob/f9e5fb7c2f50ab5f23299f26b6b07c5d6afdd252/api/v0/mesosproto/authentication.proto">sample proto file</a> - <a href="https://github.com/mozilla-services/heka">heka</a> - <a href="https://github.com/mozilla-services/heka/commit/eb72fbf7d2d28249fbaf8d8dc6607f4eb6f03351">the switch from golang/protobuf to gogo/protobuf when it was still on code.google.com</a> - <a href="https://github.com/cockroachdb/cockroach">cockroachdb</a> - <a href="https://github.com/cockroachdb/cockroach/blob/651d54d393e391a30154e9117ab4b18d9ee6d845/roachpb/metadata.proto">sample proto file</a> - <a href="https://github.com/jbenet/go-ipfs">go-ipfs</a> - <a href="https://github.com/ipfs/go-ipfs/blob/2b6da0c024f28abeb16947fb452787196a6b56a2/merkledag/pb/merkledag.proto">sample proto file</a> - <a href="https://github.com/philhofer/rkive">rkive-go</a> - <a href="https://github.com/philhofer/rkive/blob/e5dd884d3ea07b341321073882ae28aa16dd11be/rpbc/riak_dt.proto">sample proto file</a> - <a href="https://www.dropbox.com">dropbox</a> - <a href="https://srclib.org/">srclib</a> - <a href="https://github.com/sourcegraph/srclib/blob/6538858f0c410cac5c63440317b8d009e889d3fb/graph/def.proto">sample proto file</a> - <a href="http://www.adyoulike.com/">adyoulike</a> - <a href="http://www.cloudfoundry.org/">cloudfoundry</a> - <a href="https://github.com/cloudfoundry/bbs/blob/d673710b8c4211037805129944ee4c5373d6588a/models/events.proto">sample proto file</a> - <a href="http://kubernetes.io/">kubernetes</a> - <a href="https://github.com/kubernetes/kubernetes/tree/88d8628137f94ee816aaa6606ae8cd045dee0bff/cmd/libs/go2idl">go2idl built on top of gogoprotobuf</a> - <a href="https://dgraph.io/">dgraph</a> - <a href="https://github.com/dgraph-io/dgraph/releases/tag/v0.4.3">release notes</a> - <a href="https://discuss.dgraph.io/t/gogoprotobuf-is-extremely-fast/639">benchmarks</a></a> - <a href="https://github.com/centrifugal/centrifugo">centrifugo</a> - <a href="https://forum.golangbridge.org/t/centrifugo-real-time-messaging-websocket-or-sockjs-server-v1-5-0-released/2861">release notes</a> - <a href="https://medium.com/@fzambia/centrifugo-protobuf-inside-json-outside-21d39bdabd68#.o3icmgjqd">blog</a> - <a href="https://github.com/docker/swarmkit">docker swarmkit</a> - <a href="https://github.com/docker/swarmkit/blob/63600e01af3b8da2a0ed1c9fa6e1ae4299d75edb/api/objects.proto">sample proto file</a> - <a href="https://nats.io/">nats.io</a> - <a href="https://github.com/nats-io/go-nats-streaming/blob/master/pb/protocol.proto">go-nats-streaming</a> - <a href="https://github.com/pingcap/tidb">tidb</a> - Communication between <a href="https://github.com/pingcap/tipb/blob/master/generate-go.sh#L4">tidb</a> and <a href="https://github.com/pingcap/kvproto/blob/master/generate_go.sh#L3">tikv</a> - <a href="https://github.com/AsynkronIT/protoactor-go">protoactor-go</a> - <a href="https://github.com/AsynkronIT/protoactor-go/blob/master/protobuf/protoc-gen-protoactor/main.go">vanity command</a> that also generates actors from service definitions - <a href="https://containerd.io/">containerd</a> - <a href="https://github.com/containerd/containerd/tree/master/cmd/protoc-gen-gogoctrd">vanity command with custom field names</a> that conforms to the golang convention. - <a href="https://github.com/heroiclabs/nakama">nakama</a> - <a href="https://github.com/src-d/proteus">proteus</a> - <a href="https://github.com/go-graphite">carbonzipper stack</a> - <a href="https://sendgrid.com/">sendgrid</a> - <a href="https://github.com/zero-os/0-stor">zero-os/0-stor</a> - <a href="https://github.com/spacemeshos/go-spacemesh">go-spacemesh</a> - <a href="https://github.com/weaveworks/cortex">cortex</a> - <a href="https://github.com/weaveworks/cortex/blob/fee02a59729d3771ef888f7bf0fd050e1197c56e/pkg/ingester/client/cortex.proto">sample proto file</a> - <a href="http://skywalking.apache.org/">Apache SkyWalking APM</a> - Istio telemetry receiver based on Mixer bypass protocol - <a href="https://github.com/hyperledger/burrow">Hyperledger Burrow</a> - a permissioned DLT framework - <a href="https://github.com/iov-one/weave">IOV Weave</a> - a blockchain framework - <a href="https://github.com/iov-one/weave/tree/23f9856f1e316f93cb3d45d92c4c6a0c4810f6bf/spec/gogo">sample proto files</a> Please let us know if you are using gogoprotobuf by posting on our <a href="https://groups.google.com/forum/#!topic/gogoprotobuf/Brw76BxmFpQ">GoogleGroup</a>. ### Mentioned - <a href="http://www.slideshare.net/albertstrasheim/serialization-in-go">Cloudflare - go serialization talk - Albert Strasheim</a> - <a href="https://youtu.be/4xB46Xl9O9Q?t=557">GopherCon 2014 Writing High Performance Databases in Go by Ben Johnson</a> - <a href="https://github.com/alecthomas/go_serialization_benchmarks">alecthomas' go serialization benchmarks</a> - <a href="http://agniva.me/go/2017/11/18/gogoproto.html">Go faster with gogoproto - Agniva De Sarker</a> - <a href="https://www.youtube.com/watch?v=CY9T020HLP8">Evolution of protobuf (Gource Visualization) - Landon Wilkins</a> - <a href="https://fosdem.org/2018/schedule/event/gopherjs/">Creating GopherJS Apps with gRPC-Web - Johan Brandhorst</a> - <a href="https://jbrandhorst.com/post/gogoproto/">So you want to use GoGo Protobuf - Johan Brandhorst</a> - <a href="https://jbrandhorst.com/post/grpc-errors/">Advanced gRPC Error Usage - Johan Brandhorst</a> - <a href="https://www.udemy.com/grpc-golang/?couponCode=GITHUB10">gRPC Golang Course on Udemy - Stephane Maarek</a> ## Getting Started There are several ways to use gogoprotobuf, but for all you need to install go and protoc. After that you can choose: - Speed - More Speed and more generated code - Most Speed and most customization ### Installation To install it, you must first have Go (at least version 1.6.3 or 1.9 if you are using gRPC) installed (see [http://golang.org/doc/install](http://golang.org/doc/install)). Latest patch versions of 1.10 and 1.11 are continuously tested. Next, install the standard protocol buffer implementation from [https://github.com/google/protobuf](https://github.com/google/protobuf). Most versions from 2.3.1 should not give any problems, but 2.6.1, 3.0.2 and 3.6.1 are continuously tested. ### Speed Install the protoc-gen-gofast binary go get github.com/gogo/protobuf/protoc-gen-gofast Use it to generate faster marshaling and unmarshaling go code for your protocol buffers. protoc --gofast_out=. myproto.proto This does not allow you to use any of the other gogoprotobuf [extensions](https://github.com/gogo/protobuf/blob/master/extensions.md). ### More Speed and more generated code Fields without pointers cause less time in the garbage collector. More code generation results in more convenient methods. Other binaries are also included: protoc-gen-gogofast (same as gofast, but imports gogoprotobuf) protoc-gen-gogofaster (same as gogofast, without XXX_unrecognized, less pointer fields) protoc-gen-gogoslick (same as gogofaster, but with generated string, gostring and equal methods) Installing any of these binaries is easy. Simply run: go get github.com/gogo/protobuf/proto go get github.com/gogo/protobuf/{binary} go get github.com/gogo/protobuf/gogoproto These binaries allow you to use gogoprotobuf [extensions](https://github.com/gogo/protobuf/blob/master/extensions.md). You can also use your own binary. To generate the code, you also need to set the include path properly. protoc -I=. -I=$GOPATH/src -I=$GOPATH/src/github.com/gogo/protobuf/protobuf --{binary}_out=. myproto.proto To use proto files from "google/protobuf" you need to add additional args to protoc. protoc -I=. -I=$GOPATH/src -I=$GOPATH/src/github.com/gogo/protobuf/protobuf --{binary}_out=\ Mgoogle/protobuf/any.proto=github.com/gogo/protobuf/types,\ Mgoogle/protobuf/duration.proto=github.com/gogo/protobuf/types,\ Mgoogle/protobuf/struct.proto=github.com/gogo/protobuf/types,\ Mgoogle/protobuf/timestamp.proto=github.com/gogo/protobuf/types,\ Mgoogle/protobuf/wrappers.proto=github.com/gogo/protobuf/types:. \ myproto.proto Note that in the protoc command, {binary} does not contain the initial prefix of "protoc-gen". ### Most Speed and most customization Customizing the fields of the messages to be the fields that you actually want to use removes the need to copy between the structs you use and structs you use to serialize. gogoprotobuf also offers more serialization formats and generation of tests and even more methods. Please visit the [extensions](https://github.com/gogo/protobuf/blob/master/extensions.md) page for more documentation. Install protoc-gen-gogo: go get github.com/gogo/protobuf/proto go get github.com/gogo/protobuf/jsonpb go get github.com/gogo/protobuf/protoc-gen-gogo go get github.com/gogo/protobuf/gogoproto ## GRPC It works the same as golang/protobuf, simply specify the plugin. Here is an example using gofast: protoc --gofast_out=plugins=grpc:. my.proto See [https://github.com/gogo/grpc-example](https://github.com/gogo/grpc-example) for an example of using gRPC with gogoprotobuf and the wider grpc-ecosystem. ## License This software is licensed under the 3-Clause BSD License ("BSD License 2.0", "Revised BSD License", "New BSD License", or "Modified BSD License").
{'repo_name': 'docker/cli', 'stars': '2265', 'repo_language': 'Go', 'file_name': 'docker_windows.go', 'mime_type': 'text/plain', 'hash': -3441137602413724568, 'source_dataset': 'data'}
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream} import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContext import org.apache.spark.{broadcast, SparkEnv} import org.apache.spark.internal.Logging import org.apache.spark.io.CompressionCodec import org.apache.spark.rdd.{RDD, RDDOperationScope} import org.apache.spark.sql.{Row, SparkSession} import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow} import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.{Predicate => GenPredicate, _} import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.plans.physical._ import org.apache.spark.sql.execution.metric.SQLMetric import org.apache.spark.sql.types.DataType import org.apache.spark.util.ThreadUtils /** * The base class for physical operators. * * The naming convention is that physical operators end with "Exec" suffix, e.g. [[ProjectExec]]. */ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable { /** * A handle to the SQL Context that was used to create this plan. Since many operators need * access to the sqlContext for RDD operations or configuration this field is automatically * populated by the query planning infrastructure. */ @transient final val sqlContext = SparkSession.getActiveSession.map(_.sqlContext).orNull protected def sparkContext = sqlContext.sparkContext // sqlContext will be null when we are being deserialized on the slaves. In this instance // the value of subexpressionEliminationEnabled will be set by the deserializer after the // constructor has run. val subexpressionEliminationEnabled: Boolean = if (sqlContext != null) { sqlContext.conf.subexpressionEliminationEnabled } else { false } /** Overridden make copy also propagates sqlContext to copied plan. */ override def makeCopy(newArgs: Array[AnyRef]): SparkPlan = { SparkSession.setActiveSession(sqlContext.sparkSession) super.makeCopy(newArgs) } /** * Return all metadata that describes more details of this SparkPlan. */ def metadata: Map[String, String] = Map.empty /** * Return all metrics containing metrics of this SparkPlan. */ def metrics: Map[String, SQLMetric] = Map.empty /** * Reset all the metrics. */ def resetMetrics(): Unit = { metrics.valuesIterator.foreach(_.reset()) } /** * Return a LongSQLMetric according to the name. */ def longMetric(name: String): SQLMetric = metrics(name) // TODO: Move to `DistributedPlan` /** Specifies how data is partitioned across different nodes in the cluster. */ def outputPartitioning: Partitioning = UnknownPartitioning(0) // TODO: WRONG WIDTH! /** Specifies any partition requirements on the input data for this operator. */ def requiredChildDistribution: Seq[Distribution] = Seq.fill(children.size)(UnspecifiedDistribution) /** Specifies how data is ordered in each partition. */ def outputOrdering: Seq[SortOrder] = Nil /** Specifies sort order for each partition requirements on the input data for this operator. */ def requiredChildOrdering: Seq[Seq[SortOrder]] = Seq.fill(children.size)(Nil) /** * Returns the result of this query as an RDD[InternalRow] by delegating to `doExecute` after * preparations. * * Concrete implementations of SparkPlan should override `doExecute`. */ final def execute(): RDD[InternalRow] = executeQuery { doExecute() } /** * Returns the result of this query as a broadcast variable by delegating to `doExecuteBroadcast` * after preparations. * * Concrete implementations of SparkPlan should override `doExecuteBroadcast`. */ final def executeBroadcast[T](): broadcast.Broadcast[T] = executeQuery { doExecuteBroadcast() } /** * Execute a query after preparing the query and adding query plan information to created RDDs * for visualization. */ protected final def executeQuery[T](query: => T): T = { RDDOperationScope.withScope(sparkContext, nodeName, false, true) { prepare() waitForSubqueries() query } } /** * List of (uncorrelated scalar subquery, future holding the subquery result) for this plan node. * This list is populated by [[prepareSubqueries]], which is called in [[prepare]]. */ @transient private val runningSubqueries = new ArrayBuffer[ExecSubqueryExpression] /** * Finds scalar subquery expressions in this plan node and starts evaluating them. */ protected def prepareSubqueries(): Unit = { expressions.foreach { _.collect { case e: ExecSubqueryExpression => e.plan.prepare() runningSubqueries += e } } } /** * Blocks the thread until all subqueries finish evaluation and update the results. */ protected def waitForSubqueries(): Unit = synchronized { // fill in the result of subqueries runningSubqueries.foreach { sub => sub.updateResult() } runningSubqueries.clear() } /** * Whether the "prepare" method is called. */ private var prepared = false /** * Prepare a SparkPlan for execution. It's idempotent. */ final def prepare(): Unit = { // doPrepare() may depend on it's children, we should call prepare() on all the children first. children.foreach(_.prepare()) synchronized { if (!prepared) { prepareSubqueries() doPrepare() prepared = true } } } /** * Overridden by concrete implementations of SparkPlan. It is guaranteed to run before any * `execute` of SparkPlan. This is helpful if we want to set up some state before executing the * query, e.g., `BroadcastHashJoin` uses it to broadcast asynchronously. * * Note: the prepare method has already walked down the tree, so the implementation doesn't need * to call children's prepare methods. * * This will only be called once, protected by `this`. */ protected def doPrepare(): Unit = {} /** * Overridden by concrete implementations of SparkPlan. * Produces the result of the query as an RDD[InternalRow] */ protected def doExecute(): RDD[InternalRow] /** * Overridden by concrete implementations of SparkPlan. * Produces the result of the query as a broadcast variable. */ protected[sql] def doExecuteBroadcast[T](): broadcast.Broadcast[T] = { throw new UnsupportedOperationException(s"$nodeName does not implement doExecuteBroadcast") } /** * Packing the UnsafeRows into byte array for faster serialization. * The byte arrays are in the following format: * [size] [bytes of UnsafeRow] [size] [bytes of UnsafeRow] ... [-1] * * UnsafeRow is highly compressible (at least 8 bytes for any column), the byte array is also * compressed. */ private def getByteArrayRdd(n: Int = -1): RDD[Array[Byte]] = { execute().mapPartitionsInternal { iter => var count = 0 val buffer = new Array[Byte](4 << 10) // 4K val codec = CompressionCodec.createCodec(SparkEnv.get.conf) val bos = new ByteArrayOutputStream() val out = new DataOutputStream(codec.compressedOutputStream(bos)) while (iter.hasNext && (n < 0 || count < n)) { val row = iter.next().asInstanceOf[UnsafeRow] out.writeInt(row.getSizeInBytes) row.writeToStream(out, buffer) count += 1 } out.writeInt(-1) out.flush() out.close() Iterator(bos.toByteArray) } } /** * Decode the byte arrays back to UnsafeRows and put them into buffer. */ private def decodeUnsafeRows(bytes: Array[Byte]): Iterator[InternalRow] = { val nFields = schema.length val codec = CompressionCodec.createCodec(SparkEnv.get.conf) val bis = new ByteArrayInputStream(bytes) val ins = new DataInputStream(codec.compressedInputStream(bis)) new Iterator[InternalRow] { private var sizeOfNextRow = ins.readInt() override def hasNext: Boolean = sizeOfNextRow >= 0 override def next(): InternalRow = { val bs = new Array[Byte](sizeOfNextRow) ins.readFully(bs) val row = new UnsafeRow(nFields) row.pointTo(bs, sizeOfNextRow) sizeOfNextRow = ins.readInt() row } } } /** * Runs this query returning the result as an array. */ def executeCollect(): Array[InternalRow] = { val byteArrayRdd = getByteArrayRdd() val results = ArrayBuffer[InternalRow]() byteArrayRdd.collect().foreach { bytes => decodeUnsafeRows(bytes).foreach(results.+=) } results.toArray } /** * Runs this query returning the result as an iterator of InternalRow. * * Note: this will trigger multiple jobs (one for each partition). */ def executeToIterator(): Iterator[InternalRow] = { getByteArrayRdd().toLocalIterator.flatMap(decodeUnsafeRows) } /** * Runs this query returning the result as an array, using external Row format. */ def executeCollectPublic(): Array[Row] = { val converter = CatalystTypeConverters.createToScalaConverter(schema) executeCollect().map(converter(_).asInstanceOf[Row]) } /** * Runs this query returning the first `n` rows as an array. * * This is modeled after RDD.take but never runs any job locally on the driver. */ def executeTake(n: Int): Array[InternalRow] = { if (n == 0) { return new Array[InternalRow](0) } val childRDD = getByteArrayRdd(n) val buf = new ArrayBuffer[InternalRow] val totalParts = childRDD.partitions.length var partsScanned = 0 while (buf.size < n && partsScanned < totalParts) { // The number of partitions to try in this iteration. It is ok for this number to be // greater than totalParts because we actually cap it at totalParts in runJob. var numPartsToTry = 1L if (partsScanned > 0) { // If we didn't find any rows after the previous iteration, quadruple and retry. // Otherwise, interpolate the number of partitions we need to try, but overestimate // it by 50%. We also cap the estimation in the end. val limitScaleUpFactor = Math.max(sqlContext.conf.limitScaleUpFactor, 2) if (buf.isEmpty) { numPartsToTry = partsScanned * limitScaleUpFactor } else { // the left side of max is >=1 whenever partsScanned >= 2 numPartsToTry = Math.max((1.5 * n * partsScanned / buf.size).toInt - partsScanned, 1) numPartsToTry = Math.min(numPartsToTry, partsScanned * limitScaleUpFactor) } } val p = partsScanned.until(math.min(partsScanned + numPartsToTry, totalParts).toInt) val sc = sqlContext.sparkContext val res = sc.runJob(childRDD, (it: Iterator[Array[Byte]]) => if (it.hasNext) it.next() else Array.empty[Byte], p) buf ++= res.flatMap(decodeUnsafeRows) partsScanned += p.size } if (buf.size > n) { buf.take(n).toArray } else { buf.toArray } } protected def newMutableProjection( expressions: Seq[Expression], inputSchema: Seq[Attribute], useSubexprElimination: Boolean = false): MutableProjection = { log.debug(s"Creating MutableProj: $expressions, inputSchema: $inputSchema") GenerateMutableProjection.generate(expressions, inputSchema, useSubexprElimination) } protected def newPredicate( expression: Expression, inputSchema: Seq[Attribute]): GenPredicate = { GeneratePredicate.generate(expression, inputSchema) } protected def newOrdering( order: Seq[SortOrder], inputSchema: Seq[Attribute]): Ordering[InternalRow] = { GenerateOrdering.generate(order, inputSchema) } /** * Creates a row ordering for the given schema, in natural ascending order. */ protected def newNaturalAscendingOrdering(dataTypes: Seq[DataType]): Ordering[InternalRow] = { val order: Seq[SortOrder] = dataTypes.zipWithIndex.map { case (dt, index) => SortOrder(BoundReference(index, dt, nullable = true), Ascending) } newOrdering(order, Seq.empty) } } object SparkPlan { private[execution] val subqueryExecutionContext = ExecutionContext.fromExecutorService( ThreadUtils.newDaemonCachedThreadPool("subquery", 16)) } trait LeafExecNode extends SparkPlan { override final def children: Seq[SparkPlan] = Nil override def producedAttributes: AttributeSet = outputSet } object UnaryExecNode { def unapply(a: Any): Option[(SparkPlan, SparkPlan)] = a match { case s: SparkPlan if s.children.size == 1 => Some((s, s.children.head)) case _ => None } } trait UnaryExecNode extends SparkPlan { def child: SparkPlan override final def children: Seq[SparkPlan] = child :: Nil } trait BinaryExecNode extends SparkPlan { def left: SparkPlan def right: SparkPlan override final def children: Seq[SparkPlan] = Seq(left, right) }
{'repo_name': 'qubole/spark-on-lambda', 'stars': '131', 'repo_language': 'Scala', 'file_name': 'ReplSuite.scala', 'mime_type': 'text/plain', 'hash': -3241036619974612428, 'source_dataset': 'data'}
fileFormatVersion: 2 guid: ee42f1964a0e8224c90be81905946699 timeCreated: 1484091313 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{'repo_name': 'UnityTechnologies/MachineLearningRoguelike', 'stars': '225', 'repo_language': 'C#', 'file_name': 'AudioManager.asset', 'mime_type': 'text/plain', 'hash': -4829328760161892664, 'source_dataset': 'data'}
<?php /** * Filters custom meta box class to add cusotm meta to collection component * * @since 1.9.3 */ class AesopCollectionComponentAdmin { public function __construct() { add_action( 'wp_ajax_aesop_get_more_posts', array( $this, 'get_more_posts' ) ); add_action( 'wp_ajax_nopriv_aesop_get_more_posts', array( $this, 'get_more_posts' ) ); } /** * @since 1.9.3 */ public function get_more_posts() { $cat= $_POST["cat"]; $ppp = $_POST["posts_per_page"]; $paged = $_POST["page"]; $order = $_POST["order"]; $args = array( 'orderby' => array( 'date' => $order ), 'posts_per_page' => $ppp, 'cat' => $cat, 'ignore_sticky' => true, 'post_status' => array('publish'), 'paged' => $paged ); $query = new wp_query( apply_filters( 'aesop_collection_query', $args ) ); if ( $query->have_posts() ) : while ( $query->have_posts() ) : $query->the_post(); $coverimg = wp_get_attachment_image_src( get_post_thumbnail_id( get_the_ID() ), 'large' ); ?><div class="aesop-collection-item <?php if ($coverimg) {echo "aesop-has-image";} ?>"> <?php do_action( 'aesop_collection_inside_item_top', $atts, $unique ); // action ?> <a class="aesop-fader aesop-collection-item-link" href="<?php the_permalink();?>"> <div class="aesop-collection-item-inner"> <h2 class="aesop-collection-entry-title" itemprop="title"><?php the_title();?></h2> <p class="aesop-collection-meta"><?php printf( __( 'Written by %s', 'aesop-core' ), apply_filters( 'aesop_collection_author', get_the_author(), get_the_ID() ) ); ?></p> <div class="aesop-collection-item-excerpt"><?php echo wp_trim_words( preg_replace( '/\[[^\]]+\]/', '', get_the_excerpt()), 16, '...' );?></div> </div> <div class="aesop-collection-item-img" style="background-image:url(<?php echo $coverimg[0];?>);background-repeat:no-repeat;background-size:cover;"></div> </a> <?php do_action( 'aesop_collection_inside_item_bottom', $atts, $unique ); // action ?> </div> <?php endwhile;endif; wp_reset_postdata(); exit; } } new AesopCollectionComponentAdmin;
{'repo_name': 'hyunsupul/aesop-core', 'stars': '244', 'repo_language': 'JavaScript', 'file_name': 'leaflet-src.js', 'mime_type': 'text/plain', 'hash': 4503715479210231686, 'source_dataset': 'data'}
/* * searchtools.js * ~~~~~~~~~~~~~~~~ * * Sphinx JavaScript utilities for the full-text search. * * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ if (!Scorer) { /** * Simple result scoring code. */ var Scorer = { // Implement the following function to further tweak the score for each result // The function takes a result array [filename, title, anchor, descr, score] // and returns the new score. /* score: function(result) { return result[4]; }, */ // query matches the full name of an object objNameMatch: 11, // or matches in the last dotted part of the object name objPartialMatch: 6, // Additive scores depending on the priority of the object objPrio: {0: 15, // used to be importantResults 1: 5, // used to be objectResults 2: -5}, // used to be unimportantResults // Used when the priority is not in the mapping. objPrioDefault: 0, // query found in title title: 15, // query found in terms term: 5 }; } if (!splitQuery) { function splitQuery(query) { return query.split(/\s+/); } } /** * Search Module */ var Search = { _index : null, _queued_query : null, _pulse_status : -1, init : function() { var params = $.getQueryParameters(); if (params.q) { var query = params.q[0]; $('input[name="q"]')[0].value = query; this.performSearch(query); } }, loadIndex : function(url) { $.ajax({type: "GET", url: url, data: null, dataType: "script", cache: true, complete: function(jqxhr, textstatus) { if (textstatus != "success") { document.getElementById("searchindexloader").src = url; } }}); }, setIndex : function(index) { var q; this._index = index; if ((q = this._queued_query) !== null) { this._queued_query = null; Search.query(q); } }, hasIndex : function() { return this._index !== null; }, deferQuery : function(query) { this._queued_query = query; }, stopPulse : function() { this._pulse_status = 0; }, startPulse : function() { if (this._pulse_status >= 0) return; function pulse() { var i; Search._pulse_status = (Search._pulse_status + 1) % 4; var dotString = ''; for (i = 0; i < Search._pulse_status; i++) dotString += '.'; Search.dots.text(dotString); if (Search._pulse_status > -1) window.setTimeout(pulse, 500); } pulse(); }, /** * perform a search for something (or wait until index is loaded) */ performSearch : function(query) { // create the required interface elements this.out = $('#search-results'); this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out); this.dots = $('<span></span>').appendTo(this.title); this.status = $('<p style="display: none"></p>').appendTo(this.out); this.output = $('<ul class="search"/>').appendTo(this.out); $('#search-progress').text(_('Preparing search...')); this.startPulse(); // index already loaded, the browser was quick! if (this.hasIndex()) this.query(query); else this.deferQuery(query); }, /** * execute search (requires search index to be loaded) */ query : function(query) { var i; // stem the searchterms and add them to the correct list var stemmer = new Stemmer(); var searchterms = []; var excluded = []; var hlterms = []; var tmp = splitQuery(query); var objectterms = []; for (i = 0; i < tmp.length; i++) { if (tmp[i] !== "") { objectterms.push(tmp[i].toLowerCase()); } if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) || tmp[i] === "") { // skip this "word" continue; } // stem the word var word = stemmer.stemWord(tmp[i].toLowerCase()); // prevent stemmer from cutting word smaller than two chars if(word.length < 3 && tmp[i].length >= 3) { word = tmp[i]; } var toAppend; // select the correct list if (word[0] == '-') { toAppend = excluded; word = word.substr(1); } else { toAppend = searchterms; hlterms.push(tmp[i].toLowerCase()); } // only add if not already in the list if (!$u.contains(toAppend, word)) toAppend.push(word); } var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" ")); // console.debug('SEARCH: searching for:'); // console.info('required: ', searchterms); // console.info('excluded: ', excluded); // prepare search var terms = this._index.terms; var titleterms = this._index.titleterms; // array of [filename, title, anchor, descr, score] var results = []; $('#search-progress').empty(); // lookup as object for (i = 0; i < objectterms.length; i++) { var others = [].concat(objectterms.slice(0, i), objectterms.slice(i+1, objectterms.length)); results = results.concat(this.performObjectSearch(objectterms[i], others)); } // lookup as search terms in fulltext results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms)); // let the scorer override scores with a custom scoring function if (Scorer.score) { for (i = 0; i < results.length; i++) results[i][4] = Scorer.score(results[i]); } // now sort the results by score (in opposite order of appearance, since the // display function below uses pop() to retrieve items) and then // alphabetically results.sort(function(a, b) { var left = a[4]; var right = b[4]; if (left > right) { return 1; } else if (left < right) { return -1; } else { // same score: sort alphabetically left = a[1].toLowerCase(); right = b[1].toLowerCase(); return (left > right) ? -1 : ((left < right) ? 1 : 0); } }); // for debugging //Search.lastresults = results.slice(); // a copy //console.info('search results:', Search.lastresults); // print the results var resultCount = results.length; function displayNextItem() { // results left, load the summary and display it if (results.length) { var item = results.pop(); var listItem = $('<li style="display:none"></li>'); if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') { // dirhtml builder var dirname = item[0] + '/'; if (dirname.match(/\/index\/$/)) { dirname = dirname.substring(0, dirname.length-6); } else if (dirname == 'index/') { dirname = ''; } listItem.append($('<a/>').attr('href', DOCUMENTATION_OPTIONS.URL_ROOT + dirname + highlightstring + item[2]).html(item[1])); } else { // normal html builders listItem.append($('<a/>').attr('href', item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX + highlightstring + item[2]).html(item[1])); } if (item[3]) { listItem.append($('<span> (' + item[3] + ')</span>')); Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) { var suffix = DOCUMENTATION_OPTIONS.SOURCELINK_SUFFIX; if (suffix === undefined) { suffix = '.txt'; } $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[5] + (item[5].slice(-suffix.length) === suffix ? '' : suffix), dataType: "text", complete: function(jqxhr, textstatus) { var data = jqxhr.responseText; if (data !== '' && data !== undefined) { listItem.append(Search.makeSearchSummary(data, searchterms, hlterms)); } Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); }}); } else { // no source available, just display title Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); } } // search finished, update title and status message else { Search.stopPulse(); Search.title.text(_('Search Results')); if (!resultCount) Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.')); else Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount)); Search.status.fadeIn(500); } } displayNextItem(); }, /** * search for object names */ performObjectSearch : function(object, otherterms) { var filenames = this._index.filenames; var docnames = this._index.docnames; var objects = this._index.objects; var objnames = this._index.objnames; var titles = this._index.titles; var i; var results = []; for (var prefix in objects) { for (var name in objects[prefix]) { var fullname = (prefix ? prefix + '.' : '') + name; if (fullname.toLowerCase().indexOf(object) > -1) { var score = 0; var parts = fullname.split('.'); // check for different match types: exact matches of full name or // "last name" (i.e. last dotted part) if (fullname == object || parts[parts.length - 1] == object) { score += Scorer.objNameMatch; // matches in last name } else if (parts[parts.length - 1].indexOf(object) > -1) { score += Scorer.objPartialMatch; } var match = objects[prefix][name]; var objname = objnames[match[1]][2]; var title = titles[match[0]]; // If more than one term searched for, we require other words to be // found in the name/title/description if (otherterms.length > 0) { var haystack = (prefix + ' ' + name + ' ' + objname + ' ' + title).toLowerCase(); var allfound = true; for (i = 0; i < otherterms.length; i++) { if (haystack.indexOf(otherterms[i]) == -1) { allfound = false; break; } } if (!allfound) { continue; } } var descr = objname + _(', in ') + title; var anchor = match[3]; if (anchor === '') anchor = fullname; else if (anchor == '-') anchor = objnames[match[1]][1] + '-' + fullname; // add custom score for some objects according to scorer if (Scorer.objPrio.hasOwnProperty(match[2])) { score += Scorer.objPrio[match[2]]; } else { score += Scorer.objPrioDefault; } results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]); } } } return results; }, /** * search for full-text terms in the index */ performTermsSearch : function(searchterms, excluded, terms, titleterms) { var docnames = this._index.docnames; var filenames = this._index.filenames; var titles = this._index.titles; var i, j, file; var fileMap = {}; var scoreMap = {}; var results = []; // perform the search on the required terms for (i = 0; i < searchterms.length; i++) { var word = searchterms[i]; var files = []; var _o = [ {files: terms[word], score: Scorer.term}, {files: titleterms[word], score: Scorer.title} ]; // no match but word was a required one if ($u.every(_o, function(o){return o.files === undefined;})) { break; } // found search word in contents $u.each(_o, function(o) { var _files = o.files; if (_files === undefined) return if (_files.length === undefined) _files = [_files]; files = files.concat(_files); // set score for the word in each file to Scorer.term for (j = 0; j < _files.length; j++) { file = _files[j]; if (!(file in scoreMap)) scoreMap[file] = {} scoreMap[file][word] = o.score; } }); // create the mapping for (j = 0; j < files.length; j++) { file = files[j]; if (file in fileMap) fileMap[file].push(word); else fileMap[file] = [word]; } } // now check if the files don't contain excluded terms for (file in fileMap) { var valid = true; // check if all requirements are matched if (fileMap[file].length != searchterms.length) continue; // ensure that none of the excluded terms is in the search result for (i = 0; i < excluded.length; i++) { if (terms[excluded[i]] == file || titleterms[excluded[i]] == file || $u.contains(terms[excluded[i]] || [], file) || $u.contains(titleterms[excluded[i]] || [], file)) { valid = false; break; } } // if we have still a valid result we can add it to the result list if (valid) { // select one (max) score for the file. // for better ranking, we should calculate ranking by using words statistics like basic tf-idf... var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]})); results.push([docnames[file], titles[file], '', null, score, filenames[file]]); } } return results; }, /** * helper function to return a node containing the * search summary for a given text. keywords is a list * of stemmed words, hlwords is the list of normal, unstemmed * words. the first one is used to find the occurrence, the * latter for highlighting it. */ makeSearchSummary : function(text, keywords, hlwords) { var textLower = text.toLowerCase(); var start = 0; $.each(keywords, function() { var i = textLower.indexOf(this.toLowerCase()); if (i > -1) start = i; }); start = Math.max(start - 120, 0); var excerpt = ((start > 0) ? '...' : '') + $.trim(text.substr(start, 240)) + ((start + 240 - text.length) ? '...' : ''); var rv = $('<div class="context"></div>').text(excerpt); $.each(hlwords, function() { rv = rv.highlightText(this, 'highlighted'); }); return rv; } }; $(document).ready(function() { Search.init(); });
{'repo_name': 'sahib/brig', 'stars': '340', 'repo_language': 'Go', 'file_name': 'local_api.capnp', 'mime_type': 'text/x-c', 'hash': -1960212716433042289, 'source_dataset': 'data'}
chr9 68288114 68330909 BC040840 0 - 68288114 68330909 0 4 342,176,21,479, 0,13134,41835,42316, chr9 68331023 68424451 BC038225 0 + 68331023 68424451 0 25 275,142,53,163,135,141,162,72,256,155,114,95,133,174,170,124,123,126,45,96,105,120,180,96,226, 0,5315,8934,18321,19502,21770,23433,27211,38977,41057,46258,55753,56457,59525,59805,64494,65032,85155,87616,88077,88624,88930,91125,91387,93202, chr9 68331037 68419993 BC017666 0 + 68331037 68419993 0 22 261,142,53,163,135,141,162,72,256,155,114,95,133,174,170,124,123,126,45,96,105,40, 0,5301,8920,18307,19488,21756,23419,27197,38963,41043,46244,55739,56443,59511,59791,64480,65018,85141,87602,88063,88610,88916, chr9 68331055 68426908 AB011166 0 + 68331055 68426908 0 25 243,142,53,163,135,141,162,72,256,155,114,95,133,174,170,124,123,126,45,96,105,120,180,96,2683, 0,5283,8902,18289,19470,21738,23401,27179,38945,41025,46226,55721,56425,59493,59773,64462,65000,85123,87584,88045,88592,88898,91093,91355,93170, chr9 68331083 68390919 AK055825 0 + 68331083 68390919 0 16 215,142,53,163,135,52,88,162,72,256,155,114,95,133,174,91, 0,5255,8874,18261,19442,21710,21763,23373,27151,38917,40997,46198,55693,56397,59465,59745, chr9 68331113 68424451 AJ310550 0 + 68331113 68424451 0 25 185,142,53,163,135,141,162,72,256,155,114,95,133,174,170,124,123,126,45,96,105,120,180,96,226, 0,5225,8844,18231,19412,21680,23343,27121,38887,40967,46168,55663,56367,59435,59715,64404,64942,85065,87526,87987,88534,88840,91035,91297,93112, chr9 68382909 68384155 BC035661 0 + 68382909 68384155 0 1 1246, 0, chr9 68386410 68388126 BC020393 0 + 68386410 68388126 0 2 937,779, 0,937, chr9 68392171 68394878 AK093849 0 + 68392171 68394878 0 4 1263,111,673,655, 0,1263,1375,2052, chr9 68392171 68394878 AX748336 0 + 68392171 68394878 0 4 1263,111,673,655, 0,1263,1375,2052, chr9 68406049 68412891 BX537694 0 + 68406049 68412891 0 6 117,1163,468,858,270,294, 0,117,4948,5416,6275,6548, chr9 68426170 68426904 BC034441 0 + 68426170 68426904 0 1 734, 0, chr9 68456943 68486659 D31716 0 - 68456943 68486659 0 14 488,38,168,632,615,544,3,16,584,574,19,465,131,561, 0,491,530,699,1332,1947,2492,2496,2513,27950,28525,28557,29023,29155, chr9 68485303 68486659 S72504 0 - 68485303 68486659 0 5 164,19,465,131,561, 0,165,197,663,795, chr9 68266032 68330901 AK124136 0 - 68266032 68330901 0 4 558,136,171,952, 0,22288,24317,63917,
{'repo_name': 'ENCODE-DCC/kentUtils', 'stars': '119', 'repo_language': 'C', 'file_name': 'fetchChromSizes', 'mime_type': 'text/x-shellscript', 'hash': -5582437498875298177, 'source_dataset': 'data'}
using System; using System.Collections.Generic; using System.Linq; using Foundation; using UIKit; namespace NavigationPageTitleView.iOS { // The UIApplicationDelegate for the application. This class is responsible for launching the // User Interface of the application, as well as listening (and optionally responding) to // application events from iOS. [Register("AppDelegate")] public partial class AppDelegate : global::Xamarin.Forms.Platform.iOS.FormsApplicationDelegate { // // This method is invoked when the application has loaded and is ready to run. In this // method you should instantiate the window, load the UI into it and then make the window // visible. // // You have 17 seconds to return from this method, or iOS will terminate your application. // public override bool FinishedLaunching(UIApplication app, NSDictionary options) { global::Xamarin.Forms.Forms.Init(); LoadApplication(new App()); return base.FinishedLaunching(app, options); } } }
{'repo_name': 'xamarin/xamarin-forms-samples', 'stars': '3387', 'repo_language': 'C#', 'file_name': 'App.cs', 'mime_type': 'text/x-c++', 'hash': 1346963697639348463, 'source_dataset': 'data'}
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.internal.resolve; import org.gradle.api.artifacts.ModuleVersionIdentifier; import org.gradle.api.artifacts.ModuleVersionSelector; import org.gradle.api.artifacts.component.ComponentIdentifier; import org.gradle.api.artifacts.component.ComponentSelector; import org.gradle.api.artifacts.component.ModuleComponentIdentifier; import org.gradle.api.internal.artifacts.DefaultModuleIdentifier; import org.gradle.api.internal.artifacts.dependencies.DefaultImmutableVersionConstraint; import org.gradle.internal.Factory; import org.gradle.internal.UncheckedException; import org.gradle.internal.component.external.model.DefaultModuleComponentSelector; import org.gradle.internal.exceptions.Contextual; import org.gradle.internal.exceptions.DefaultMultiCauseExceptionNoStackTrace; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Formatter; import java.util.List; @Contextual public class ModuleVersionResolveException extends DefaultMultiCauseExceptionNoStackTrace { private final List<List<? extends ComponentIdentifier>> paths = new ArrayList<>(); private final ComponentSelector selector; public ModuleVersionResolveException(ComponentSelector selector, Factory<String> message, Throwable cause) { super(message, cause); this.selector = selector; } public ModuleVersionResolveException(ComponentSelector selector, Factory<String> message) { super(message); this.selector = selector; } public ModuleVersionResolveException(ComponentSelector selector, Throwable cause) { this(selector, format("Could not resolve %s.", selector)); initCause(cause); } public ModuleVersionResolveException(ComponentSelector selector, Iterable<? extends Throwable> causes) { this(selector, format("Could not resolve %s.", selector)); initCauses(causes); } public ModuleVersionResolveException(ModuleVersionSelector selector, Factory<String> message) { this(DefaultModuleComponentSelector.newSelector(selector), message); } public ModuleVersionResolveException(ModuleVersionIdentifier id, Factory<String> message) { this(DefaultModuleComponentSelector.newSelector(id.getModule(), DefaultImmutableVersionConstraint.of(id.getVersion())), message); } public ModuleVersionResolveException(ModuleComponentIdentifier id, Factory<String> messageFormat) { this(DefaultModuleComponentSelector.newSelector(DefaultModuleIdentifier.newId(id.getGroup(), id.getModule()), DefaultImmutableVersionConstraint.of(id.getVersion())), messageFormat); } public ModuleVersionResolveException(ModuleComponentIdentifier id, Throwable cause) { this(DefaultModuleComponentSelector.newSelector(DefaultModuleIdentifier.newId(id.getGroup(), id.getModule()), DefaultImmutableVersionConstraint.of(id.getVersion())), Collections.singletonList(cause)); } public ModuleVersionResolveException(ModuleComponentIdentifier id, Iterable<? extends Throwable> causes) { this(DefaultModuleComponentSelector.newSelector(DefaultModuleIdentifier.newId(id.getGroup(), id.getModule()), DefaultImmutableVersionConstraint.of(id.getVersion())), causes); } public ModuleVersionResolveException(ModuleVersionSelector selector, Throwable cause) { this(DefaultModuleComponentSelector.newSelector(selector), cause); } public ModuleVersionResolveException(ModuleVersionSelector selector, Iterable<? extends Throwable> causes) { this(DefaultModuleComponentSelector.newSelector(selector), causes); } /** * Returns the selector that could not be resolved. */ public ComponentSelector getSelector() { return selector; } protected static Factory<String> format(String messageFormat, ComponentSelector selector) { return () -> String.format(messageFormat, selector.getDisplayName()); } /** * Creates a copy of this exception, with the given incoming paths. */ public ModuleVersionResolveException withIncomingPaths(Collection<? extends List<? extends ComponentIdentifier>> paths) { ModuleVersionResolveException copy = createCopy(); copy.paths.addAll(paths); copy.initCauses(getCauses()); copy.setStackTrace(getStackTrace()); return copy; } @Override public String getMessage() { if (paths.isEmpty()) { return super.getMessage(); } Formatter formatter = new Formatter(); formatter.format("%s%nRequired by:", super.getMessage()); for (List<? extends ComponentIdentifier> path : paths) { formatter.format("%n %s", toString(path.get(0))); for (int i = 1; i < path.size(); i++) { formatter.format(" > %s", toString(path.get(i))); } } return formatter.toString(); } private String toString(ComponentIdentifier identifier) { return identifier.getDisplayName(); } protected ModuleVersionResolveException createCopy() { try { String message = getMessage(); return getClass().getConstructor(ComponentSelector.class, Factory.class).newInstance(selector, (Factory<String>) () -> message); } catch (Exception e) { throw UncheckedException.throwAsUncheckedException(e); } } }
{'repo_name': 'gradle/gradle', 'stars': '10712', 'repo_language': 'Groovy', 'file_name': 'DefaultRuleActionAdapterTest.groovy', 'mime_type': 'text/plain', 'hash': -1875925935327542911, 'source_dataset': 'data'}
classdef TestValidateDisparity %TestValidateDisparity methods (Static) function test_1 im1 = imread(fullfile(mexopencv.root(),'test','tsukuba_l.png')); im2 = imread(fullfile(mexopencv.root(),'test','tsukuba_r.png')); bm = cv.StereoBM('NumDisparities',16, 'BlockSize',15); disparity = bm.compute(im1, im2); cost = randi([0 1000], size(disparity), 'int16'); D = cv.validateDisparity(disparity, cost, ... 'MinDisparity',0, 'NumDisparities',64, 'Disp12MaxDiff',1); validateattributes(D, {class(disparity)}, ... {'size',size(disparity)}); end function test_error_argnum try cv.validateDisparity(); throw('UnitTest:Fail'); catch e assert(strcmp(e.identifier,'mexopencv:error')); end end end end
{'repo_name': 'kyamagu/mexopencv', 'stars': '563', 'repo_language': 'Matlab', 'file_name': 'helptoc.xml', 'mime_type': 'text/xml', 'hash': 3267439459040831461, 'source_dataset': 'data'}
/***************************************************************************** * * QUERY: * SELECT STATEMENTS * *****************************************************************************/ /* A complete SELECT statement looks like this. * * The rule returns either a single PGSelectStmt node or a tree of them, * representing a set-operation tree. * * There is an ambiguity when a sub-SELECT is within an a_expr and there * are excess parentheses: do the parentheses belong to the sub-SELECT or * to the surrounding a_expr? We don't really care, but bison wants to know. * To resolve the ambiguity, we are careful to define the grammar so that * the decision is staved off as long as possible: as long as we can keep * absorbing parentheses into the sub-SELECT, we will do so, and only when * it's no longer possible to do that will we decide that parens belong to * the expression. For example, in "SELECT (((SELECT 2)) + 3)" the extra * parentheses are treated as part of the sub-select. The necessity of doing * it that way is shown by "SELECT (((SELECT 2)) UNION SELECT 2)". Had we * parsed "((SELECT 2))" as an a_expr, it'd be too late to go back to the * SELECT viewpoint when we see the UNION. * * This approach is implemented by defining a nonterminal select_with_parens, * which represents a SELECT with at least one outer layer of parentheses, * and being careful to use select_with_parens, never '(' PGSelectStmt ')', * in the expression grammar. We will then have shift-reduce conflicts * which we can resolve in favor of always treating '(' <select> ')' as * a select_with_parens. To resolve the conflicts, the productions that * conflict with the select_with_parens productions are manually given * precedences lower than the precedence of ')', thereby ensuring that we * shift ')' (and then reduce to select_with_parens) rather than trying to * reduce the inner <select> nonterminal to something else. We use UMINUS * precedence for this, which is a fairly arbitrary choice. * * To be able to define select_with_parens itself without ambiguity, we need * a nonterminal select_no_parens that represents a SELECT structure with no * outermost parentheses. This is a little bit tedious, but it works. * * In non-expression contexts, we use PGSelectStmt which can represent a SELECT * with or without outer parentheses. */ SelectStmt: select_no_parens %prec UMINUS | select_with_parens %prec UMINUS ; select_with_parens: '(' select_no_parens ')' { $$ = $2; } | '(' select_with_parens ')' { $$ = $2; } ; /* * This rule parses the equivalent of the standard's <query expression>. * The duplicative productions are annoying, but hard to get rid of without * creating shift/reduce conflicts. * * The locking clause (FOR UPDATE etc) may be before or after LIMIT/OFFSET. * In <=7.2.X, LIMIT/OFFSET had to be after FOR UPDATE * We now support both orderings, but prefer LIMIT/OFFSET before the locking * clause. * 2002-08-28 bjm */ select_no_parens: simple_select { $$ = $1; } | select_clause sort_clause { insertSelectOptions((PGSelectStmt *) $1, $2, NIL, NULL, NULL, NULL, yyscanner); $$ = $1; } | select_clause opt_sort_clause for_locking_clause opt_select_limit { insertSelectOptions((PGSelectStmt *) $1, $2, $3, (PGNode*) list_nth($4, 0), (PGNode*) list_nth($4, 1), NULL, yyscanner); $$ = $1; } | select_clause opt_sort_clause select_limit opt_for_locking_clause { insertSelectOptions((PGSelectStmt *) $1, $2, $4, (PGNode*) list_nth($3, 0), (PGNode*) list_nth($3, 1), NULL, yyscanner); $$ = $1; } | with_clause select_clause { insertSelectOptions((PGSelectStmt *) $2, NULL, NIL, NULL, NULL, $1, yyscanner); $$ = $2; } | with_clause select_clause sort_clause { insertSelectOptions((PGSelectStmt *) $2, $3, NIL, NULL, NULL, $1, yyscanner); $$ = $2; } | with_clause select_clause opt_sort_clause for_locking_clause opt_select_limit { insertSelectOptions((PGSelectStmt *) $2, $3, $4, (PGNode*) list_nth($5, 0), (PGNode*) list_nth($5, 1), $1, yyscanner); $$ = $2; } | with_clause select_clause opt_sort_clause select_limit opt_for_locking_clause { insertSelectOptions((PGSelectStmt *) $2, $3, $5, (PGNode*) list_nth($4, 0), (PGNode*) list_nth($4, 1), $1, yyscanner); $$ = $2; } ; select_clause: simple_select { $$ = $1; } | select_with_parens { $$ = $1; } ; /* * This rule parses SELECT statements that can appear within set operations, * including UNION, INTERSECT and EXCEPT. '(' and ')' can be used to specify * the ordering of the set operations. Without '(' and ')' we want the * operations to be ordered per the precedence specs at the head of this file. * * As with select_no_parens, simple_select cannot have outer parentheses, * but can have parenthesized subclauses. * * Note that sort clauses cannot be included at this level --- SQL requires * SELECT foo UNION SELECT bar ORDER BY baz * to be parsed as * (SELECT foo UNION SELECT bar) ORDER BY baz * not * SELECT foo UNION (SELECT bar ORDER BY baz) * Likewise for WITH, FOR UPDATE and LIMIT. Therefore, those clauses are * described as part of the select_no_parens production, not simple_select. * This does not limit functionality, because you can reintroduce these * clauses inside parentheses. * * NOTE: only the leftmost component PGSelectStmt should have INTO. * However, this is not checked by the grammar; parse analysis must check it. */ simple_select: SELECT opt_all_clause opt_target_list into_clause from_clause where_clause group_clause having_clause window_clause { PGSelectStmt *n = makeNode(PGSelectStmt); n->targetList = $3; n->intoClause = $4; n->fromClause = $5; n->whereClause = $6; n->groupClause = $7; n->havingClause = $8; n->windowClause = $9; $$ = (PGNode *)n; } | SELECT distinct_clause target_list into_clause from_clause where_clause group_clause having_clause window_clause { PGSelectStmt *n = makeNode(PGSelectStmt); n->distinctClause = $2; n->targetList = $3; n->intoClause = $4; n->fromClause = $5; n->whereClause = $6; n->groupClause = $7; n->havingClause = $8; n->windowClause = $9; $$ = (PGNode *)n; } | values_clause { $$ = $1; } | TABLE relation_expr { /* same as SELECT * FROM relation_expr */ PGColumnRef *cr = makeNode(PGColumnRef); PGResTarget *rt = makeNode(PGResTarget); PGSelectStmt *n = makeNode(PGSelectStmt); cr->fields = list_make1(makeNode(PGAStar)); cr->location = -1; rt->name = NULL; rt->indirection = NIL; rt->val = (PGNode *)cr; rt->location = -1; n->targetList = list_make1(rt); n->fromClause = list_make1($2); $$ = (PGNode *)n; } | select_clause UNION all_or_distinct select_clause { $$ = makeSetOp(PG_SETOP_UNION, $3, $1, $4); } | select_clause INTERSECT all_or_distinct select_clause { $$ = makeSetOp(PG_SETOP_INTERSECT, $3, $1, $4); } | select_clause EXCEPT all_or_distinct select_clause { $$ = makeSetOp(PG_SETOP_EXCEPT, $3, $1, $4); } ; /* * SQL standard WITH clause looks like: * * WITH [ RECURSIVE ] <query name> [ (<column>,...) ] * AS (query) [ SEARCH or CYCLE clause ] * * We don't currently support the SEARCH or CYCLE clause. * * Recognizing WITH_LA here allows a CTE to be named TIME or ORDINALITY. */ with_clause: WITH cte_list { $$ = makeNode(PGWithClause); $$->ctes = $2; $$->recursive = false; $$->location = @1; } | WITH_LA cte_list { $$ = makeNode(PGWithClause); $$->ctes = $2; $$->recursive = false; $$->location = @1; } | WITH RECURSIVE cte_list { $$ = makeNode(PGWithClause); $$->ctes = $3; $$->recursive = true; $$->location = @1; } ; cte_list: common_table_expr { $$ = list_make1($1); } | cte_list ',' common_table_expr { $$ = lappend($1, $3); } ; common_table_expr: name opt_name_list AS '(' PreparableStmt ')' { PGCommonTableExpr *n = makeNode(PGCommonTableExpr); n->ctename = $1; n->aliascolnames = $2; n->ctequery = $5; n->location = @1; $$ = (PGNode *) n; } ; into_clause: INTO OptTempTableName { $$ = makeNode(PGIntoClause); $$->rel = $2; $$->colNames = NIL; $$->options = NIL; $$->onCommit = PG_ONCOMMIT_NOOP; $$->viewQuery = NULL; $$->skipData = false; } | /*EMPTY*/ { $$ = NULL; } ; /* * Redundancy here is needed to avoid shift/reduce conflicts, * since TEMP is not a reserved word. See also OptTemp. */ OptTempTableName: TEMPORARY opt_table qualified_name { $$ = $3; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | TEMP opt_table qualified_name { $$ = $3; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | LOCAL TEMPORARY opt_table qualified_name { $$ = $4; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | LOCAL TEMP opt_table qualified_name { $$ = $4; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | GLOBAL TEMPORARY opt_table qualified_name { ereport(PGWARNING, (errmsg("GLOBAL is deprecated in temporary table creation"), parser_errposition(@1))); $$ = $4; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | GLOBAL TEMP opt_table qualified_name { ereport(PGWARNING, (errmsg("GLOBAL is deprecated in temporary table creation"), parser_errposition(@1))); $$ = $4; $$->relpersistence = PG_RELPERSISTENCE_TEMP; } | UNLOGGED opt_table qualified_name { $$ = $3; $$->relpersistence = PG_RELPERSISTENCE_UNLOGGED; } | TABLE qualified_name { $$ = $2; $$->relpersistence = RELPERSISTENCE_PERMANENT; } | qualified_name { $$ = $1; $$->relpersistence = RELPERSISTENCE_PERMANENT; } ; opt_table: TABLE {} | /*EMPTY*/ {} ; all_or_distinct: ALL { $$ = true; } | DISTINCT { $$ = false; } | /*EMPTY*/ { $$ = false; } ; /* We use (NIL) as a placeholder to indicate that all target expressions * should be placed in the DISTINCT list during parsetree analysis. */ distinct_clause: DISTINCT { $$ = list_make1(NIL); } | DISTINCT ON '(' expr_list ')' { $$ = $4; } ; opt_all_clause: ALL { $$ = NIL;} | /*EMPTY*/ { $$ = NIL; } ; opt_sort_clause: sort_clause { $$ = $1;} | /*EMPTY*/ { $$ = NIL; } ; sort_clause: ORDER BY sortby_list { $$ = $3; } ; sortby_list: sortby { $$ = list_make1($1); } | sortby_list ',' sortby { $$ = lappend($1, $3); } ; sortby: a_expr USING qual_all_Op opt_nulls_order { $$ = makeNode(PGSortBy); $$->node = $1; $$->sortby_dir = SORTBY_USING; $$->sortby_nulls = $4; $$->useOp = $3; $$->location = @3; } | a_expr opt_asc_desc opt_nulls_order { $$ = makeNode(PGSortBy); $$->node = $1; $$->sortby_dir = $2; $$->sortby_nulls = $3; $$->useOp = NIL; $$->location = -1; /* no operator */ } ; opt_asc_desc: ASC_P { $$ = PG_SORTBY_ASC; } | DESC_P { $$ = PG_SORTBY_DESC; } | /*EMPTY*/ { $$ = PG_SORTBY_DEFAULT; } ; opt_nulls_order: NULLS_LA FIRST_P { $$ = PG_SORTBY_NULLS_FIRST; } | NULLS_LA LAST_P { $$ = PG_SORTBY_NULLS_LAST; } | /*EMPTY*/ { $$ = PG_SORTBY_NULLS_DEFAULT; } ; select_limit: limit_clause offset_clause { $$ = list_make2($2, $1); } | offset_clause limit_clause { $$ = list_make2($1, $2); } | limit_clause { $$ = list_make2(NULL, $1); } | offset_clause { $$ = list_make2($1, NULL); } ; opt_select_limit: select_limit { $$ = $1; } | /* EMPTY */ { $$ = list_make2(NULL,NULL); } ; limit_clause: LIMIT select_limit_value { $$ = $2; } | LIMIT select_limit_value ',' select_offset_value { /* Disabled because it was too confusing, bjm 2002-02-18 */ ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("LIMIT #,# syntax is not supported"), errhint("Use separate LIMIT and OFFSET clauses."), parser_errposition(@1))); } /* SQL:2008 syntax */ /* to avoid shift/reduce conflicts, handle the optional value with * a separate production rather than an opt_ expression. The fact * that ONLY is fully reserved means that this way, we defer any * decision about what rule reduces ROW or ROWS to the point where * we can see the ONLY token in the lookahead slot. */ | FETCH first_or_next select_fetch_first_value row_or_rows ONLY { $$ = $3; } | FETCH first_or_next row_or_rows ONLY { $$ = makeIntConst(1, -1); } ; offset_clause: OFFSET select_offset_value { $$ = $2; } /* SQL:2008 syntax */ | OFFSET select_fetch_first_value row_or_rows { $$ = $2; } ; select_limit_value: a_expr { $$ = $1; } | ALL { /* LIMIT ALL is represented as a NULL constant */ $$ = makeNullAConst(@1); } ; select_offset_value: a_expr { $$ = $1; } ; /* * Allowing full expressions without parentheses causes various parsing * problems with the trailing ROW/ROWS key words. SQL spec only calls for * <simple value specification>, which is either a literal or a parameter (but * an <SQL parameter reference> could be an identifier, bringing up conflicts * with ROW/ROWS). We solve this by leveraging the presence of ONLY (see above) * to determine whether the expression is missing rather than trying to make it * optional in this rule. * * c_expr covers almost all the spec-required cases (and more), but it doesn't * cover signed numeric literals, which are allowed by the spec. So we include * those here explicitly. We need FCONST as well as ICONST because values that * don't fit in the platform's "long", but do fit in bigint, should still be * accepted here. (This is possible in 64-bit Windows as well as all 32-bit * builds.) */ select_fetch_first_value: c_expr { $$ = $1; } | '+' I_or_F_const { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "+", NULL, $2, @1); } | '-' I_or_F_const { $$ = doNegate($2, @1); } ; I_or_F_const: Iconst { $$ = makeIntConst($1,@1); } | FCONST { $$ = makeFloatConst($1,@1); } ; /* noise words */ row_or_rows: ROW { $$ = 0; } | ROWS { $$ = 0; } ; first_or_next: FIRST_P { $$ = 0; } | NEXT { $$ = 0; } ; /* * This syntax for group_clause tries to follow the spec quite closely. * However, the spec allows only column references, not expressions, * which introduces an ambiguity between implicit row constructors * (a,b) and lists of column references. * * We handle this by using the a_expr production for what the spec calls * <ordinary grouping set>, which in the spec represents either one column * reference or a parenthesized list of column references. Then, we check the * top node of the a_expr to see if it's an implicit PGRowExpr, and if so, just * grab and use the list, discarding the node. (this is done in parse analysis, * not here) * * (we abuse the row_format field of PGRowExpr to distinguish implicit and * explicit row constructors; it's debatable if anyone sanely wants to use them * in a group clause, but if they have a reason to, we make it possible.) * * Each item in the group_clause list is either an expression tree or a * PGGroupingSet node of some type. */ group_clause: GROUP_P BY group_by_list { $$ = $3; } | /*EMPTY*/ { $$ = NIL; } ; group_by_list: group_by_item { $$ = list_make1($1); } | group_by_list ',' group_by_item { $$ = lappend($1,$3); } ; group_by_item: a_expr { $$ = $1; } | empty_grouping_set { $$ = $1; } ; empty_grouping_set: '(' ')' { $$ = (PGNode *) makeGroupingSet(GROUPING_SET_EMPTY, NIL, @1); } ; /* * These hacks rely on setting precedence of CUBE and ROLLUP below that of '(', * so that they shift in these rules rather than reducing the conflicting * unreserved_keyword rule. */ having_clause: HAVING a_expr { $$ = $2; } | /*EMPTY*/ { $$ = NULL; } ; for_locking_clause: for_locking_items { $$ = $1; } | FOR READ_P ONLY { $$ = NIL; } ; opt_for_locking_clause: for_locking_clause { $$ = $1; } | /* EMPTY */ { $$ = NIL; } ; for_locking_items: for_locking_item { $$ = list_make1($1); } | for_locking_items for_locking_item { $$ = lappend($1, $2); } ; for_locking_item: for_locking_strength locked_rels_list opt_nowait_or_skip { PGLockingClause *n = makeNode(PGLockingClause); n->lockedRels = $2; n->strength = $1; n->waitPolicy = $3; $$ = (PGNode *) n; } ; for_locking_strength: FOR UPDATE { $$ = LCS_FORUPDATE; } | FOR NO KEY UPDATE { $$ = PG_LCS_FORNOKEYUPDATE; } | FOR SHARE { $$ = PG_LCS_FORSHARE; } | FOR KEY SHARE { $$ = PG_LCS_FORKEYSHARE; } ; locked_rels_list: OF qualified_name_list { $$ = $2; } | /* EMPTY */ { $$ = NIL; } ; opt_nowait_or_skip: NOWAIT { $$ = LockWaitError; } | SKIP LOCKED { $$ = PGLockWaitSkip; } | /*EMPTY*/ { $$ = PGLockWaitBlock; } ; /* * We should allow ROW '(' expr_list ')' too, but that seems to require * making VALUES a fully reserved word, which will probably break more apps * than allowing the noise-word is worth. */ values_clause: VALUES '(' expr_list ')' { PGSelectStmt *n = makeNode(PGSelectStmt); n->valuesLists = list_make1($3); $$ = (PGNode *) n; } | values_clause ',' '(' expr_list ')' { PGSelectStmt *n = (PGSelectStmt *) $1; n->valuesLists = lappend(n->valuesLists, $4); $$ = (PGNode *) n; } ; /***************************************************************************** * * clauses common to all Optimizable Stmts: * from_clause - allow list of both JOIN expressions and table names * where_clause - qualifications for joins or restrictions * *****************************************************************************/ from_clause: FROM from_list { $$ = $2; } | /*EMPTY*/ { $$ = NIL; } ; from_list: table_ref { $$ = list_make1($1); } | from_list ',' table_ref { $$ = lappend($1, $3); } ; /* * table_ref is where an alias clause can be attached. */ table_ref: relation_expr opt_alias_clause { $1->alias = $2; $$ = (PGNode *) $1; } | relation_expr opt_alias_clause tablesample_clause { PGRangeTableSample *n = (PGRangeTableSample *) $3; $1->alias = $2; /* relation_expr goes inside the PGRangeTableSample node */ n->relation = (PGNode *) $1; $$ = (PGNode *) n; } | func_table func_alias_clause { PGRangeFunction *n = (PGRangeFunction *) $1; n->alias = (PGAlias*) linitial($2); n->coldeflist = (PGList*) lsecond($2); $$ = (PGNode *) n; } | LATERAL_P func_table func_alias_clause { PGRangeFunction *n = (PGRangeFunction *) $2; n->lateral = true; n->alias = (PGAlias*) linitial($3); n->coldeflist = (PGList*) lsecond($3); $$ = (PGNode *) n; } | select_with_parens opt_alias_clause { PGRangeSubselect *n = makeNode(PGRangeSubselect); n->lateral = false; n->subquery = $1; n->alias = $2; /* * The SQL spec does not permit a subselect * (<derived_table>) without an alias clause, * so we don't either. This avoids the problem * of needing to invent a unique refname for it. * That could be surmounted if there's sufficient * popular demand, but for now let's just implement * the spec and see if anyone complains. * However, it does seem like a good idea to emit * an error message that's better than "syntax error". */ if ($2 == NULL) { if (IsA($1, PGSelectStmt) && ((PGSelectStmt *) $1)->valuesLists) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("VALUES in FROM must have an alias"), errhint("For example, FROM (VALUES ...) [AS] foo."), parser_errposition(@1))); else ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("subquery in FROM must have an alias"), errhint("For example, FROM (SELECT ...) [AS] foo."), parser_errposition(@1))); } $$ = (PGNode *) n; } | LATERAL_P select_with_parens opt_alias_clause { PGRangeSubselect *n = makeNode(PGRangeSubselect); n->lateral = true; n->subquery = $2; n->alias = $3; /* same comment as above */ if ($3 == NULL) { if (IsA($2, PGSelectStmt) && ((PGSelectStmt *) $2)->valuesLists) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("VALUES in FROM must have an alias"), errhint("For example, FROM (VALUES ...) [AS] foo."), parser_errposition(@2))); else ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("subquery in FROM must have an alias"), errhint("For example, FROM (SELECT ...) [AS] foo."), parser_errposition(@2))); } $$ = (PGNode *) n; } | joined_table { $$ = (PGNode *) $1; } | '(' joined_table ')' alias_clause { $2->alias = $4; $$ = (PGNode *) $2; } ; /* * It may seem silly to separate joined_table from table_ref, but there is * method in SQL's madness: if you don't do it this way you get reduce- * reduce conflicts, because it's not clear to the parser generator whether * to expect alias_clause after ')' or not. For the same reason we must * treat 'JOIN' and 'join_type JOIN' separately, rather than allowing * join_type to expand to empty; if we try it, the parser generator can't * figure out when to reduce an empty join_type right after table_ref. * * Note that a CROSS JOIN is the same as an unqualified * INNER JOIN, and an INNER JOIN/ON has the same shape * but a qualification expression to limit membership. * A NATURAL JOIN implicitly matches column names between * tables and the shape is determined by which columns are * in common. We'll collect columns during the later transformations. */ joined_table: '(' joined_table ')' { $$ = $2; } | table_ref CROSS JOIN table_ref { /* CROSS JOIN is same as unqualified inner join */ PGJoinExpr *n = makeNode(PGJoinExpr); n->jointype = PG_JOIN_INNER; n->isNatural = false; n->larg = $1; n->rarg = $4; n->usingClause = NIL; n->quals = NULL; $$ = n; } | table_ref join_type JOIN table_ref join_qual { PGJoinExpr *n = makeNode(PGJoinExpr); n->jointype = $2; n->isNatural = false; n->larg = $1; n->rarg = $4; if ($5 != NULL && IsA($5, PGList)) n->usingClause = (PGList *) $5; /* USING clause */ else n->quals = $5; /* ON clause */ $$ = n; } | table_ref JOIN table_ref join_qual { /* letting join_type reduce to empty doesn't work */ PGJoinExpr *n = makeNode(PGJoinExpr); n->jointype = PG_JOIN_INNER; n->isNatural = false; n->larg = $1; n->rarg = $3; if ($4 != NULL && IsA($4, PGList)) n->usingClause = (PGList *) $4; /* USING clause */ else n->quals = $4; /* ON clause */ $$ = n; } | table_ref NATURAL join_type JOIN table_ref { PGJoinExpr *n = makeNode(PGJoinExpr); n->jointype = $3; n->isNatural = true; n->larg = $1; n->rarg = $5; n->usingClause = NIL; /* figure out which columns later... */ n->quals = NULL; /* fill later */ $$ = n; } | table_ref NATURAL JOIN table_ref { /* letting join_type reduce to empty doesn't work */ PGJoinExpr *n = makeNode(PGJoinExpr); n->jointype = PG_JOIN_INNER; n->isNatural = true; n->larg = $1; n->rarg = $4; n->usingClause = NIL; /* figure out which columns later... */ n->quals = NULL; /* fill later */ $$ = n; } ; alias_clause: AS ColId '(' name_list ')' { $$ = makeNode(PGAlias); $$->aliasname = $2; $$->colnames = $4; } | AS ColIdOrString { $$ = makeNode(PGAlias); $$->aliasname = $2; } | ColId '(' name_list ')' { $$ = makeNode(PGAlias); $$->aliasname = $1; $$->colnames = $3; } | ColId { $$ = makeNode(PGAlias); $$->aliasname = $1; } ; opt_alias_clause: alias_clause { $$ = $1; } | /*EMPTY*/ { $$ = NULL; } ; /* * func_alias_clause can include both an PGAlias and a coldeflist, so we make it * return a 2-element list that gets disassembled by calling production. */ func_alias_clause: alias_clause { $$ = list_make2($1, NIL); } | AS '(' TableFuncElementList ')' { $$ = list_make2(NULL, $3); } | AS ColId '(' TableFuncElementList ')' { PGAlias *a = makeNode(PGAlias); a->aliasname = $2; $$ = list_make2(a, $4); } | ColId '(' TableFuncElementList ')' { PGAlias *a = makeNode(PGAlias); a->aliasname = $1; $$ = list_make2(a, $3); } | /*EMPTY*/ { $$ = list_make2(NULL, NIL); } ; join_type: FULL join_outer { $$ = PG_JOIN_FULL; } | LEFT join_outer { $$ = PG_JOIN_LEFT; } | RIGHT join_outer { $$ = PG_JOIN_RIGHT; } | INNER_P { $$ = PG_JOIN_INNER; } ; /* OUTER is just noise... */ join_outer: OUTER_P { $$ = NULL; } | /*EMPTY*/ { $$ = NULL; } ; /* JOIN qualification clauses * Possibilities are: * USING ( column list ) allows only unqualified column names, * which must match between tables. * ON expr allows more general qualifications. * * We return USING as a PGList node, while an ON-expr will not be a List. */ join_qual: USING '(' name_list ')' { $$ = (PGNode *) $3; } | ON a_expr { $$ = $2; } ; relation_expr: qualified_name { /* inheritance query, implicitly */ $$ = $1; $$->inh = true; $$->alias = NULL; } | qualified_name '*' { /* inheritance query, explicitly */ $$ = $1; $$->inh = true; $$->alias = NULL; } | ONLY qualified_name { /* no inheritance */ $$ = $2; $$->inh = false; $$->alias = NULL; } | ONLY '(' qualified_name ')' { /* no inheritance, SQL99-style syntax */ $$ = $3; $$->inh = false; $$->alias = NULL; } ; /* * Given "UPDATE foo set set ...", we have to decide without looking any * further ahead whether the first "set" is an alias or the UPDATE's SET * keyword. Since "set" is allowed as a column name both interpretations * are feasible. We resolve the shift/reduce conflict by giving the first * production a higher precedence than the SET token * has, causing the parser to prefer to reduce, in effect assuming that the * SET is not an alias. */ /* * TABLESAMPLE decoration in a FROM item */ tablesample_clause: TABLESAMPLE func_name '(' expr_list ')' opt_repeatable_clause { PGRangeTableSample *n = makeNode(PGRangeTableSample); /* n->relation will be filled in later */ n->method = $2; n->args = $4; n->repeatable = $6; n->location = @2; $$ = (PGNode *) n; } ; opt_repeatable_clause: REPEATABLE '(' a_expr ')' { $$ = (PGNode *) $3; } | /*EMPTY*/ { $$ = NULL; } ; /* * func_table represents a function invocation in a FROM list. It can be * a plain function call, like "foo(...)", or a ROWS FROM expression with * one or more function calls, "ROWS FROM (foo(...), bar(...))", * optionally with WITH ORDINALITY attached. * In the ROWS FROM syntax, a column list can be given for each * function, for example: * ROWS FROM (foo() AS (foo_res_a text, foo_res_b text), * bar() AS (bar_res_a text, bar_res_b text)) * It's also possible to attach a column list to the PGRangeFunction * as a whole, but that's handled by the table_ref production. */ func_table: func_expr_windowless opt_ordinality { PGRangeFunction *n = makeNode(PGRangeFunction); n->lateral = false; n->ordinality = $2; n->is_rowsfrom = false; n->functions = list_make1(list_make2($1, NIL)); /* alias and coldeflist are set by table_ref production */ $$ = (PGNode *) n; } | ROWS FROM '(' rowsfrom_list ')' opt_ordinality { PGRangeFunction *n = makeNode(PGRangeFunction); n->lateral = false; n->ordinality = $6; n->is_rowsfrom = true; n->functions = $4; /* alias and coldeflist are set by table_ref production */ $$ = (PGNode *) n; } ; rowsfrom_item: func_expr_windowless opt_col_def_list { $$ = list_make2($1, $2); } ; rowsfrom_list: rowsfrom_item { $$ = list_make1($1); } | rowsfrom_list ',' rowsfrom_item { $$ = lappend($1, $3); } ; opt_col_def_list: AS '(' TableFuncElementList ')' { $$ = $3; } | /*EMPTY*/ { $$ = NIL; } ; opt_ordinality: WITH_LA ORDINALITY { $$ = true; } | /*EMPTY*/ { $$ = false; } ; where_clause: WHERE a_expr { $$ = $2; } | /*EMPTY*/ { $$ = NULL; } ; /* variant for UPDATE and DELETE */ TableFuncElementList: TableFuncElement { $$ = list_make1($1); } | TableFuncElementList ',' TableFuncElement { $$ = lappend($1, $3); } ; TableFuncElement: ColId Typename opt_collate_clause { PGColumnDef *n = makeNode(PGColumnDef); n->colname = $1; n->typeName = $2; n->inhcount = 0; n->is_local = true; n->is_not_null = false; n->is_from_type = false; n->storage = 0; n->raw_default = NULL; n->cooked_default = NULL; n->collClause = (PGCollateClause *) $3; n->collOid = InvalidOid; n->constraints = NIL; n->location = @1; $$ = (PGNode *)n; } ; opt_collate_clause: COLLATE any_name { PGCollateClause *n = makeNode(PGCollateClause); n->arg = NULL; n->collname = $2; n->location = @1; $$ = (PGNode *) n; } | /* EMPTY */ { $$ = NULL; } ; /***************************************************************************** * * Type syntax * SQL introduces a large amount of type-specific syntax. * Define individual clauses to handle these cases, and use * the generic case to handle regular type-extensible Postgres syntax. * - thomas 1997-10-10 * *****************************************************************************/ Typename: SimpleTypename opt_array_bounds { $$ = $1; $$->arrayBounds = $2; } | SETOF SimpleTypename opt_array_bounds { $$ = $2; $$->arrayBounds = $3; $$->setof = true; } /* SQL standard syntax, currently only one-dimensional */ | SimpleTypename ARRAY '[' Iconst ']' { $$ = $1; $$->arrayBounds = list_make1(makeInteger($4)); } | SETOF SimpleTypename ARRAY '[' Iconst ']' { $$ = $2; $$->arrayBounds = list_make1(makeInteger($5)); $$->setof = true; } | SimpleTypename ARRAY { $$ = $1; $$->arrayBounds = list_make1(makeInteger(-1)); } | SETOF SimpleTypename ARRAY { $$ = $2; $$->arrayBounds = list_make1(makeInteger(-1)); $$->setof = true; } ; opt_array_bounds: opt_array_bounds '[' ']' { $$ = lappend($1, makeInteger(-1)); } | opt_array_bounds '[' Iconst ']' { $$ = lappend($1, makeInteger($3)); } | /*EMPTY*/ { $$ = NIL; } ; SimpleTypename: GenericType { $$ = $1; } | Numeric { $$ = $1; } | Bit { $$ = $1; } | Character { $$ = $1; } | ConstDatetime { $$ = $1; } | ConstInterval opt_interval { $$ = $1; $$->typmods = $2; } | ConstInterval '(' Iconst ')' { $$ = $1; $$->typmods = list_make2(makeIntConst(INTERVAL_FULL_RANGE, -1), makeIntConst($3, @3)); } ; /* We have a separate ConstTypename to allow defaulting fixed-length * types such as CHAR() and BIT() to an unspecified length. * SQL9x requires that these default to a length of one, but this * makes no sense for constructs like CHAR 'hi' and BIT '0101', * where there is an obvious better choice to make. * Note that ConstInterval is not included here since it must * be pushed up higher in the rules to accommodate the postfix * options (e.g. INTERVAL '1' YEAR). Likewise, we have to handle * the generic-type-name case in AExprConst to avoid premature * reduce/reduce conflicts against function names. */ ConstTypename: Numeric { $$ = $1; } | ConstBit { $$ = $1; } | ConstCharacter { $$ = $1; } | ConstDatetime { $$ = $1; } ; /* * GenericType covers all type names that don't have special syntax mandated * by the standard, including qualified names. We also allow type modifiers. * To avoid parsing conflicts against function invocations, the modifiers * have to be shown as expr_list here, but parse analysis will only accept * constants for them. */ GenericType: type_function_name opt_type_modifiers { $$ = makeTypeName($1); $$->typmods = $2; $$->location = @1; } | type_function_name attrs opt_type_modifiers { $$ = makeTypeNameFromNameList(lcons(makeString($1), $2)); $$->typmods = $3; $$->location = @1; } ; opt_type_modifiers: '(' expr_list ')' { $$ = $2; } | /* EMPTY */ { $$ = NIL; } ; /* * SQL numeric data types */ Numeric: INT_P { $$ = SystemTypeName("int4"); $$->location = @1; } | INTEGER { $$ = SystemTypeName("int4"); $$->location = @1; } | SMALLINT { $$ = SystemTypeName("int2"); $$->location = @1; } | BIGINT { $$ = SystemTypeName("int8"); $$->location = @1; } | REAL { $$ = SystemTypeName("float4"); $$->location = @1; } | FLOAT_P opt_float { $$ = $2; $$->location = @1; } | DOUBLE_P PRECISION { $$ = SystemTypeName("float8"); $$->location = @1; } | DECIMAL_P opt_type_modifiers { $$ = SystemTypeName("numeric"); $$->typmods = $2; $$->location = @1; } | DEC opt_type_modifiers { $$ = SystemTypeName("numeric"); $$->typmods = $2; $$->location = @1; } | NUMERIC opt_type_modifiers { $$ = SystemTypeName("numeric"); $$->typmods = $2; $$->location = @1; } | BOOLEAN_P { $$ = SystemTypeName("bool"); $$->location = @1; } ; opt_float: '(' Iconst ')' { /* * Check FLOAT() precision limits assuming IEEE floating * types - thomas 1997-09-18 */ if ($2 < 1) ereport(ERROR, (errcode(PG_ERRCODE_INVALID_PARAMETER_VALUE), errmsg("precision for type float must be at least 1 bit"), parser_errposition(@2))); else if ($2 <= 24) $$ = SystemTypeName("float4"); else if ($2 <= 53) $$ = SystemTypeName("float8"); else ereport(ERROR, (errcode(PG_ERRCODE_INVALID_PARAMETER_VALUE), errmsg("precision for type float must be less than 54 bits"), parser_errposition(@2))); } | /*EMPTY*/ { $$ = SystemTypeName("float4"); } ; /* * SQL bit-field data types * The following implements BIT() and BIT VARYING(). */ Bit: BitWithLength { $$ = $1; } | BitWithoutLength { $$ = $1; } ; /* ConstBit is like Bit except "BIT" defaults to unspecified length */ /* See notes for ConstCharacter, which addresses same issue for "CHAR" */ ConstBit: BitWithLength { $$ = $1; } | BitWithoutLength { $$ = $1; $$->typmods = NIL; } ; BitWithLength: BIT opt_varying '(' expr_list ')' { const char *typname; typname = $2 ? "varbit" : "bit"; $$ = SystemTypeName(typname); $$->typmods = $4; $$->location = @1; } ; BitWithoutLength: BIT opt_varying { /* bit defaults to bit(1), varbit to no limit */ if ($2) { $$ = SystemTypeName("varbit"); } else { $$ = SystemTypeName("bit"); $$->typmods = list_make1(makeIntConst(1, -1)); } $$->location = @1; } ; /* * SQL character data types * The following implements CHAR() and VARCHAR(). */ Character: CharacterWithLength { $$ = $1; } | CharacterWithoutLength { $$ = $1; } ; ConstCharacter: CharacterWithLength { $$ = $1; } | CharacterWithoutLength { /* Length was not specified so allow to be unrestricted. * This handles problems with fixed-length (bpchar) strings * which in column definitions must default to a length * of one, but should not be constrained if the length * was not specified. */ $$ = $1; $$->typmods = NIL; } ; CharacterWithLength: character '(' Iconst ')' { $$ = SystemTypeName($1); $$->typmods = list_make1(makeIntConst($3, @3)); $$->location = @1; } ; CharacterWithoutLength: character { $$ = SystemTypeName($1); /* char defaults to char(1), varchar to no limit */ if (strcmp($1, "bpchar") == 0) $$->typmods = list_make1(makeIntConst(1, -1)); $$->location = @1; } ; character: CHARACTER opt_varying { $$ = $2 ? "varchar": "bpchar"; } | CHAR_P opt_varying { $$ = $2 ? "varchar": "bpchar"; } | VARCHAR { $$ = "varchar"; } | NATIONAL CHARACTER opt_varying { $$ = $3 ? "varchar": "bpchar"; } | NATIONAL CHAR_P opt_varying { $$ = $3 ? "varchar": "bpchar"; } | NCHAR opt_varying { $$ = $2 ? "varchar": "bpchar"; } ; opt_varying: VARYING { $$ = true; } | /*EMPTY*/ { $$ = false; } ; /* * SQL date/time types */ ConstDatetime: TIMESTAMP '(' Iconst ')' opt_timezone { if ($5) $$ = SystemTypeName("timestamptz"); else $$ = SystemTypeName("timestamp"); $$->typmods = list_make1(makeIntConst($3, @3)); $$->location = @1; } | TIMESTAMP opt_timezone { if ($2) $$ = SystemTypeName("timestamptz"); else $$ = SystemTypeName("timestamp"); $$->location = @1; } | TIME '(' Iconst ')' opt_timezone { if ($5) $$ = SystemTypeName("timetz"); else $$ = SystemTypeName("time"); $$->typmods = list_make1(makeIntConst($3, @3)); $$->location = @1; } | TIME opt_timezone { if ($2) $$ = SystemTypeName("timetz"); else $$ = SystemTypeName("time"); $$->location = @1; } ; ConstInterval: INTERVAL { $$ = SystemTypeName("interval"); $$->location = @1; } ; opt_timezone: WITH_LA TIME ZONE { $$ = true; } | WITHOUT TIME ZONE { $$ = false; } | /*EMPTY*/ { $$ = false; } ; opt_interval: YEAR_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(YEAR), @1)); } | MONTH_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(MONTH), @1)); } | DAY_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(DAY), @1)); } | HOUR_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(HOUR), @1)); } | MINUTE_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(MINUTE), @1)); } | interval_second { $$ = $1; } | YEAR_P TO MONTH_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(YEAR) | INTERVAL_MASK(MONTH), @1)); } | DAY_P TO HOUR_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(DAY) | INTERVAL_MASK(HOUR), @1)); } | DAY_P TO MINUTE_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(DAY) | INTERVAL_MASK(HOUR) | INTERVAL_MASK(MINUTE), @1)); } | DAY_P TO interval_second { $$ = $3; linitial($$) = makeIntConst(INTERVAL_MASK(DAY) | INTERVAL_MASK(HOUR) | INTERVAL_MASK(MINUTE) | INTERVAL_MASK(SECOND), @1); } | HOUR_P TO MINUTE_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(HOUR) | INTERVAL_MASK(MINUTE), @1)); } | HOUR_P TO interval_second { $$ = $3; linitial($$) = makeIntConst(INTERVAL_MASK(HOUR) | INTERVAL_MASK(MINUTE) | INTERVAL_MASK(SECOND), @1); } | MINUTE_P TO interval_second { $$ = $3; linitial($$) = makeIntConst(INTERVAL_MASK(MINUTE) | INTERVAL_MASK(SECOND), @1); } | /*EMPTY*/ { $$ = NIL; } ; interval_second: SECOND_P { $$ = list_make1(makeIntConst(INTERVAL_MASK(SECOND), @1)); } | SECOND_P '(' Iconst ')' { $$ = list_make2(makeIntConst(INTERVAL_MASK(SECOND), @1), makeIntConst($3, @3)); } ; /***************************************************************************** * * expression grammar * *****************************************************************************/ /* * General expressions * This is the heart of the expression syntax. * * We have two expression types: a_expr is the unrestricted kind, and * b_expr is a subset that must be used in some places to avoid shift/reduce * conflicts. For example, we can't do BETWEEN as "BETWEEN a_expr AND a_expr" * because that use of AND conflicts with AND as a boolean operator. So, * b_expr is used in BETWEEN and we remove boolean keywords from b_expr. * * Note that '(' a_expr ')' is a b_expr, so an unrestricted expression can * always be used by surrounding it with parens. * * c_expr is all the productions that are common to a_expr and b_expr; * it's factored out just to eliminate redundant coding. * * Be careful of productions involving more than one terminal token. * By default, bison will assign such productions the precedence of their * last terminal, but in nearly all cases you want it to be the precedence * of the first terminal instead; otherwise you will not get the behavior * you expect! So we use %prec annotations freely to set precedences. */ a_expr: c_expr { $$ = $1; } | a_expr TYPECAST Typename { $$ = makeTypeCast($1, $3, @2); } | a_expr COLLATE any_name { PGCollateClause *n = makeNode(PGCollateClause); n->arg = $1; n->collname = $3; n->location = @2; $$ = (PGNode *) n; } | a_expr AT TIME ZONE a_expr %prec AT { $$ = (PGNode *) makeFuncCall(SystemFuncName("timezone"), list_make2($5, $1), @2); } /* * These operators must be called out explicitly in order to make use * of bison's automatic operator-precedence handling. All other * operator names are handled by the generic productions using "Op", * below; and all those operators will have the same precedence. * * If you add more explicitly-known operators, be sure to add them * also to b_expr and to the MathOp list below. */ | '+' a_expr %prec UMINUS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "+", NULL, $2, @1); } | '-' a_expr %prec UMINUS { $$ = doNegate($2, @1); } | a_expr '+' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "+", $1, $3, @2); } | a_expr '-' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "-", $1, $3, @2); } | a_expr '*' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "*", $1, $3, @2); } | a_expr '/' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "/", $1, $3, @2); } | a_expr '%' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "%", $1, $3, @2); } | a_expr '^' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "^", $1, $3, @2); } | a_expr '<' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<", $1, $3, @2); } | a_expr '>' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, ">", $1, $3, @2); } | a_expr '=' a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "=", $1, $3, @2); } | a_expr LESS_EQUALS a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<=", $1, $3, @2); } | a_expr GREATER_EQUALS a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, ">=", $1, $3, @2); } | a_expr NOT_EQUALS a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<>", $1, $3, @2); } | a_expr qual_Op a_expr %prec Op { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $2, $1, $3, @2); } | qual_Op a_expr %prec Op { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $1, NULL, $2, @1); } | a_expr qual_Op %prec POSTFIXOP { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $2, $1, NULL, @2); } | a_expr AND a_expr { $$ = makeAndExpr($1, $3, @2); } | a_expr OR a_expr { $$ = makeOrExpr($1, $3, @2); } | NOT a_expr { $$ = makeNotExpr($2, @1); } | NOT_LA a_expr %prec NOT { $$ = makeNotExpr($2, @1); } | a_expr GLOB a_expr %prec GLOB { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_GLOB, "~~~", $1, $3, @2); } | a_expr LIKE a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_LIKE, "~~", $1, $3, @2); } | a_expr LIKE a_expr ESCAPE a_expr %prec LIKE { PGFuncCall *n = makeFuncCall(SystemFuncName("like_escape"), list_make3($1, $3, $5), @2); $$ = (PGNode *) n; } | a_expr NOT_LA LIKE a_expr %prec NOT_LA { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_LIKE, "!~~", $1, $4, @2); } | a_expr NOT_LA LIKE a_expr ESCAPE a_expr %prec NOT_LA { PGFuncCall *n = makeFuncCall(SystemFuncName("not_like_escape"), list_make3($1, $4, $6), @2); $$ = (PGNode *) n; } | a_expr ILIKE a_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_ILIKE, "~~*", $1, $3, @2); } | a_expr ILIKE a_expr ESCAPE a_expr %prec ILIKE { PGFuncCall *n = makeFuncCall(SystemFuncName("like_escape"), list_make2($3, $5), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_ILIKE, "~~*", $1, (PGNode *) n, @2); } | a_expr NOT_LA ILIKE a_expr %prec NOT_LA { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_ILIKE, "!~~*", $1, $4, @2); } | a_expr NOT_LA ILIKE a_expr ESCAPE a_expr %prec NOT_LA { PGFuncCall *n = makeFuncCall(SystemFuncName("not_like_escape"), list_make2($4, $6), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_ILIKE, "!~~*", $1, (PGNode *) n, @2); } | a_expr SIMILAR TO a_expr %prec SIMILAR { PGFuncCall *n = makeFuncCall(SystemFuncName("similar_escape"), list_make2($4, makeNullAConst(-1)), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_SIMILAR, "~", $1, (PGNode *) n, @2); } | a_expr SIMILAR TO a_expr ESCAPE a_expr %prec SIMILAR { PGFuncCall *n = makeFuncCall(SystemFuncName("similar_escape"), list_make2($4, $6), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_SIMILAR, "~", $1, (PGNode *) n, @2); } | a_expr NOT_LA SIMILAR TO a_expr %prec NOT_LA { PGFuncCall *n = makeFuncCall(SystemFuncName("similar_escape"), list_make2($5, makeNullAConst(-1)), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_SIMILAR, "!~", $1, (PGNode *) n, @2); } | a_expr NOT_LA SIMILAR TO a_expr ESCAPE a_expr %prec NOT_LA { PGFuncCall *n = makeFuncCall(SystemFuncName("similar_escape"), list_make2($5, $7), @2); $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_SIMILAR, "!~", $1, (PGNode *) n, @2); } /* PGNullTest clause * Define SQL-style Null test clause. * Allow two forms described in the standard: * a IS NULL * a IS NOT NULL * Allow two SQL extensions * a ISNULL * a NOTNULL */ | a_expr IS NULL_P %prec IS { PGNullTest *n = makeNode(PGNullTest); n->arg = (PGExpr *) $1; n->nulltesttype = PG_IS_NULL; n->location = @2; $$ = (PGNode *)n; } | a_expr ISNULL { PGNullTest *n = makeNode(PGNullTest); n->arg = (PGExpr *) $1; n->nulltesttype = PG_IS_NULL; n->location = @2; $$ = (PGNode *)n; } | a_expr IS NOT NULL_P %prec IS { PGNullTest *n = makeNode(PGNullTest); n->arg = (PGExpr *) $1; n->nulltesttype = IS_NOT_NULL; n->location = @2; $$ = (PGNode *)n; } | a_expr NOT NULL_P { PGNullTest *n = makeNode(PGNullTest); n->arg = (PGExpr *) $1; n->nulltesttype = IS_NOT_NULL; n->location = @2; $$ = (PGNode *)n; } | a_expr NOTNULL { PGNullTest *n = makeNode(PGNullTest); n->arg = (PGExpr *) $1; n->nulltesttype = IS_NOT_NULL; n->location = @2; $$ = (PGNode *)n; } | row OVERLAPS row { if (list_length($1) != 2) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("wrong number of parameters on left side of OVERLAPS expression"), parser_errposition(@1))); if (list_length($3) != 2) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("wrong number of parameters on right side of OVERLAPS expression"), parser_errposition(@3))); $$ = (PGNode *) makeFuncCall(SystemFuncName("overlaps"), list_concat($1, $3), @2); } | a_expr IS TRUE_P %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = PG_IS_TRUE; b->location = @2; $$ = (PGNode *)b; } | a_expr IS NOT TRUE_P %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = IS_NOT_TRUE; b->location = @2; $$ = (PGNode *)b; } | a_expr IS FALSE_P %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = IS_FALSE; b->location = @2; $$ = (PGNode *)b; } | a_expr IS NOT FALSE_P %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = IS_NOT_FALSE; b->location = @2; $$ = (PGNode *)b; } | a_expr IS UNKNOWN %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = IS_UNKNOWN; b->location = @2; $$ = (PGNode *)b; } | a_expr IS NOT UNKNOWN %prec IS { PGBooleanTest *b = makeNode(PGBooleanTest); b->arg = (PGExpr *) $1; b->booltesttype = IS_NOT_UNKNOWN; b->location = @2; $$ = (PGNode *)b; } | a_expr IS DISTINCT FROM a_expr %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_DISTINCT, "=", $1, $5, @2); } | a_expr IS NOT DISTINCT FROM a_expr %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_NOT_DISTINCT, "=", $1, $6, @2); } | a_expr IS OF '(' type_list ')' %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OF, "=", $1, (PGNode *) $5, @2); } | a_expr IS NOT OF '(' type_list ')' %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OF, "<>", $1, (PGNode *) $6, @2); } | a_expr BETWEEN opt_asymmetric b_expr AND a_expr %prec BETWEEN { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_BETWEEN, "BETWEEN", $1, (PGNode *) list_make2($4, $6), @2); } | a_expr NOT_LA BETWEEN opt_asymmetric b_expr AND a_expr %prec NOT_LA { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_NOT_BETWEEN, "NOT BETWEEN", $1, (PGNode *) list_make2($5, $7), @2); } | a_expr BETWEEN SYMMETRIC b_expr AND a_expr %prec BETWEEN { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_BETWEEN_SYM, "BETWEEN SYMMETRIC", $1, (PGNode *) list_make2($4, $6), @2); } | a_expr NOT_LA BETWEEN SYMMETRIC b_expr AND a_expr %prec NOT_LA { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_NOT_BETWEEN_SYM, "NOT BETWEEN SYMMETRIC", $1, (PGNode *) list_make2($5, $7), @2); } | a_expr IN_P in_expr { /* in_expr returns a PGSubLink or a list of a_exprs */ if (IsA($3, PGSubLink)) { /* generate foo = ANY (subquery) */ PGSubLink *n = (PGSubLink *) $3; n->subLinkType = PG_ANY_SUBLINK; n->subLinkId = 0; n->testexpr = $1; n->operName = NIL; /* show it's IN not = ANY */ n->location = @2; $$ = (PGNode *)n; } else { /* generate scalar IN expression */ $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_IN, "=", $1, $3, @2); } } | a_expr NOT_LA IN_P in_expr %prec NOT_LA { /* in_expr returns a PGSubLink or a list of a_exprs */ if (IsA($4, PGSubLink)) { /* generate NOT (foo = ANY (subquery)) */ /* Make an = ANY node */ PGSubLink *n = (PGSubLink *) $4; n->subLinkType = PG_ANY_SUBLINK; n->subLinkId = 0; n->testexpr = $1; n->operName = NIL; /* show it's IN not = ANY */ n->location = @2; /* Stick a NOT on top; must have same parse location */ $$ = makeNotExpr((PGNode *) n, @2); } else { /* generate scalar NOT IN expression */ $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_IN, "<>", $1, $4, @2); } } | a_expr subquery_Op sub_type select_with_parens %prec Op { PGSubLink *n = makeNode(PGSubLink); n->subLinkType = $3; n->subLinkId = 0; n->testexpr = $1; n->operName = $2; n->subselect = $4; n->location = @2; $$ = (PGNode *)n; } | a_expr subquery_Op sub_type '(' a_expr ')' %prec Op { if ($3 == PG_ANY_SUBLINK) $$ = (PGNode *) makeAExpr(PG_AEXPR_OP_ANY, $2, $1, $5, @2); else $$ = (PGNode *) makeAExpr(PG_AEXPR_OP_ALL, $2, $1, $5, @2); } | DEFAULT { /* * The SQL spec only allows DEFAULT in "contextually typed * expressions", but for us, it's easier to allow it in * any a_expr and then throw error during parse analysis * if it's in an inappropriate context. This way also * lets us say something smarter than "syntax error". */ PGSetToDefault *n = makeNode(PGSetToDefault); /* parse analysis will fill in the rest */ n->location = @1; $$ = (PGNode *)n; } ; /* * Restricted expressions * * b_expr is a subset of the complete expression syntax defined by a_expr. * * Presently, AND, NOT, IS, and IN are the a_expr keywords that would * cause trouble in the places where b_expr is used. For simplicity, we * just eliminate all the boolean-keyword-operator productions from b_expr. */ b_expr: c_expr { $$ = $1; } | b_expr TYPECAST Typename { $$ = makeTypeCast($1, $3, @2); } | '+' b_expr %prec UMINUS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "+", NULL, $2, @1); } | '-' b_expr %prec UMINUS { $$ = doNegate($2, @1); } | b_expr '+' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "+", $1, $3, @2); } | b_expr '-' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "-", $1, $3, @2); } | b_expr '*' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "*", $1, $3, @2); } | b_expr '/' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "/", $1, $3, @2); } | b_expr '%' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "%", $1, $3, @2); } | b_expr '^' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "^", $1, $3, @2); } | b_expr '<' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<", $1, $3, @2); } | b_expr '>' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, ">", $1, $3, @2); } | b_expr '=' b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "=", $1, $3, @2); } | b_expr LESS_EQUALS b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<=", $1, $3, @2); } | b_expr GREATER_EQUALS b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, ">=", $1, $3, @2); } | b_expr NOT_EQUALS b_expr { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OP, "<>", $1, $3, @2); } | b_expr qual_Op b_expr %prec Op { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $2, $1, $3, @2); } | qual_Op b_expr %prec Op { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $1, NULL, $2, @1); } | b_expr qual_Op %prec POSTFIXOP { $$ = (PGNode *) makeAExpr(PG_AEXPR_OP, $2, $1, NULL, @2); } | b_expr IS DISTINCT FROM b_expr %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_DISTINCT, "=", $1, $5, @2); } | b_expr IS NOT DISTINCT FROM b_expr %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_NOT_DISTINCT, "=", $1, $6, @2); } | b_expr IS OF '(' type_list ')' %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OF, "=", $1, (PGNode *) $5, @2); } | b_expr IS NOT OF '(' type_list ')' %prec IS { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_OF, "<>", $1, (PGNode *) $6, @2); } ; /* * Productions that can be used in both a_expr and b_expr. * * Note: productions that refer recursively to a_expr or b_expr mostly * cannot appear here. However, it's OK to refer to a_exprs that occur * inside parentheses, such as function arguments; that cannot introduce * ambiguity to the b_expr syntax. */ c_expr: columnref { $$ = $1; } | AexprConst { $$ = $1; } | '?' opt_indirection { if ($2) { PGAIndirection *n = makeNode(PGAIndirection); n->arg = makeParamRef(0, @1); n->indirection = check_indirection($2, yyscanner); $$ = (PGNode *) n; } else $$ = makeParamRef(0, @1); } | PARAM opt_indirection { PGParamRef *p = makeNode(PGParamRef); p->number = $1; p->location = @1; if ($2) { PGAIndirection *n = makeNode(PGAIndirection); n->arg = (PGNode *) p; n->indirection = check_indirection($2, yyscanner); $$ = (PGNode *) n; } else $$ = (PGNode *) p; } | '(' a_expr ')' opt_indirection { if ($4) { PGAIndirection *n = makeNode(PGAIndirection); n->arg = $2; n->indirection = check_indirection($4, yyscanner); $$ = (PGNode *)n; } else $$ = $2; } | case_expr { $$ = $1; } | func_expr { $$ = $1; } | select_with_parens %prec UMINUS { PGSubLink *n = makeNode(PGSubLink); n->subLinkType = PG_EXPR_SUBLINK; n->subLinkId = 0; n->testexpr = NULL; n->operName = NIL; n->subselect = $1; n->location = @1; $$ = (PGNode *)n; } | select_with_parens indirection { /* * Because the select_with_parens nonterminal is designed * to "eat" as many levels of parens as possible, the * '(' a_expr ')' opt_indirection production above will * fail to match a sub-SELECT with indirection decoration; * the sub-SELECT won't be regarded as an a_expr as long * as there are parens around it. To support applying * subscripting or field selection to a sub-SELECT result, * we need this redundant-looking production. */ PGSubLink *n = makeNode(PGSubLink); PGAIndirection *a = makeNode(PGAIndirection); n->subLinkType = PG_EXPR_SUBLINK; n->subLinkId = 0; n->testexpr = NULL; n->operName = NIL; n->subselect = $1; n->location = @1; a->arg = (PGNode *)n; a->indirection = check_indirection($2, yyscanner); $$ = (PGNode *)a; } | EXISTS select_with_parens { PGSubLink *n = makeNode(PGSubLink); n->subLinkType = PG_EXISTS_SUBLINK; n->subLinkId = 0; n->testexpr = NULL; n->operName = NIL; n->subselect = $2; n->location = @1; $$ = (PGNode *)n; } ; func_application: func_name '(' ')' { $$ = (PGNode *) makeFuncCall($1, NIL, @1); } | func_name '(' func_arg_list opt_sort_clause ')' { PGFuncCall *n = makeFuncCall($1, $3, @1); n->agg_order = $4; $$ = (PGNode *)n; } | func_name '(' VARIADIC func_arg_expr opt_sort_clause ')' { PGFuncCall *n = makeFuncCall($1, list_make1($4), @1); n->func_variadic = true; n->agg_order = $5; $$ = (PGNode *)n; } | func_name '(' func_arg_list ',' VARIADIC func_arg_expr opt_sort_clause ')' { PGFuncCall *n = makeFuncCall($1, lappend($3, $6), @1); n->func_variadic = true; n->agg_order = $7; $$ = (PGNode *)n; } | func_name '(' ALL func_arg_list opt_sort_clause ')' { PGFuncCall *n = makeFuncCall($1, $4, @1); n->agg_order = $5; /* Ideally we'd mark the PGFuncCall node to indicate * "must be an aggregate", but there's no provision * for that in PGFuncCall at the moment. */ $$ = (PGNode *)n; } | func_name '(' DISTINCT func_arg_list opt_sort_clause ')' { PGFuncCall *n = makeFuncCall($1, $4, @1); n->agg_order = $5; n->agg_distinct = true; $$ = (PGNode *)n; } | func_name '(' '*' ')' { /* * We consider AGGREGATE(*) to invoke a parameterless * aggregate. This does the right thing for COUNT(*), * and there are no other aggregates in SQL that accept * '*' as parameter. * * The PGFuncCall node is also marked agg_star = true, * so that later processing can detect what the argument * really was. */ PGFuncCall *n = makeFuncCall($1, NIL, @1); n->agg_star = true; $$ = (PGNode *)n; } ; /* * func_expr and its cousin func_expr_windowless are split out from c_expr just * so that we have classifications for "everything that is a function call or * looks like one". This isn't very important, but it saves us having to * document which variants are legal in places like "FROM function()" or the * backwards-compatible functional-index syntax for CREATE INDEX. * (Note that many of the special SQL functions wouldn't actually make any * sense as functional index entries, but we ignore that consideration here.) */ func_expr: func_application within_group_clause filter_clause over_clause { PGFuncCall *n = (PGFuncCall *) $1; /* * The order clause for WITHIN GROUP and the one for * plain-aggregate ORDER BY share a field, so we have to * check here that at most one is present. We also check * for DISTINCT and VARIADIC here to give a better error * location. Other consistency checks are deferred to * parse analysis. */ if ($2 != NIL) { if (n->agg_order != NIL) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("cannot use multiple ORDER BY clauses with WITHIN GROUP"), parser_errposition(@2))); if (n->agg_distinct) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("cannot use DISTINCT with WITHIN GROUP"), parser_errposition(@2))); if (n->func_variadic) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("cannot use VARIADIC with WITHIN GROUP"), parser_errposition(@2))); n->agg_order = $2; n->agg_within_group = true; } n->agg_filter = $3; n->over = $4; $$ = (PGNode *) n; } | func_expr_common_subexpr { $$ = $1; } ; /* * As func_expr but does not accept WINDOW functions directly * (but they can still be contained in arguments for functions etc). * Use this when window expressions are not allowed, where needed to * disambiguate the grammar (e.g. in CREATE INDEX). */ func_expr_windowless: func_application { $$ = $1; } | func_expr_common_subexpr { $$ = $1; } ; /* * Special expressions that are considered to be functions. */ func_expr_common_subexpr: COLLATION FOR '(' a_expr ')' { $$ = (PGNode *) makeFuncCall(SystemFuncName("pg_collation_for"), list_make1($4), @1); } | CURRENT_DATE { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_DATE, -1, @1); } | CURRENT_TIME { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_TIME, -1, @1); } | CURRENT_TIME '(' Iconst ')' { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_TIME_N, $3, @1); } | CURRENT_TIMESTAMP { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_TIMESTAMP, -1, @1); } | CURRENT_TIMESTAMP '(' Iconst ')' { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_TIMESTAMP_N, $3, @1); } | LOCALTIME { $$ = makeSQLValueFunction(PG_SVFOP_LOCALTIME, -1, @1); } | LOCALTIME '(' Iconst ')' { $$ = makeSQLValueFunction(PG_SVFOP_LOCALTIME_N, $3, @1); } | LOCALTIMESTAMP { $$ = makeSQLValueFunction(PG_SVFOP_LOCALTIMESTAMP, -1, @1); } | LOCALTIMESTAMP '(' Iconst ')' { $$ = makeSQLValueFunction(PG_SVFOP_LOCALTIMESTAMP_N, $3, @1); } | CURRENT_ROLE { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_ROLE, -1, @1); } | CURRENT_USER { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_USER, -1, @1); } | SESSION_USER { $$ = makeSQLValueFunction(PG_SVFOP_SESSION_USER, -1, @1); } | USER { $$ = makeSQLValueFunction(PG_SVFOP_USER, -1, @1); } | CURRENT_CATALOG { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_CATALOG, -1, @1); } | CURRENT_SCHEMA { $$ = makeSQLValueFunction(PG_SVFOP_CURRENT_SCHEMA, -1, @1); } | CAST '(' a_expr AS Typename ')' { $$ = makeTypeCast($3, $5, @1); } | EXTRACT '(' extract_list ')' { $$ = (PGNode *) makeFuncCall(SystemFuncName("date_part"), $3, @1); } | OVERLAY '(' overlay_list ')' { /* overlay(A PLACING B FROM C FOR D) is converted to * overlay(A, B, C, D) * overlay(A PLACING B FROM C) is converted to * overlay(A, B, C) */ $$ = (PGNode *) makeFuncCall(SystemFuncName("overlay"), $3, @1); } | POSITION '(' position_list ')' { /* position(A in B) is converted to position(B, A) */ $$ = (PGNode *) makeFuncCall(SystemFuncName("position"), $3, @1); } | SUBSTRING '(' substr_list ')' { /* substring(A from B for C) is converted to * substring(A, B, C) - thomas 2000-11-28 */ $$ = (PGNode *) makeFuncCall(SystemFuncName("substring"), $3, @1); } | TREAT '(' a_expr AS Typename ')' { /* TREAT(expr AS target) converts expr of a particular type to target, * which is defined to be a subtype of the original expression. * In SQL99, this is intended for use with structured UDTs, * but let's make this a generally useful form allowing stronger * coercions than are handled by implicit casting. * * Convert SystemTypeName() to SystemFuncName() even though * at the moment they result in the same thing. */ $$ = (PGNode *) makeFuncCall(SystemFuncName(((PGValue *)llast($5->names))->val.str), list_make1($3), @1); } | TRIM '(' BOTH trim_list ')' { /* various trim expressions are defined in SQL * - thomas 1997-07-19 */ $$ = (PGNode *) makeFuncCall(SystemFuncName("trim"), $4, @1); } | TRIM '(' LEADING trim_list ')' { $$ = (PGNode *) makeFuncCall(SystemFuncName("ltrim"), $4, @1); } | TRIM '(' TRAILING trim_list ')' { $$ = (PGNode *) makeFuncCall(SystemFuncName("rtrim"), $4, @1); } | TRIM '(' trim_list ')' { $$ = (PGNode *) makeFuncCall(SystemFuncName("trim"), $3, @1); } | NULLIF '(' a_expr ',' a_expr ')' { $$ = (PGNode *) makeSimpleAExpr(PG_AEXPR_NULLIF, "=", $3, $5, @1); } | COALESCE '(' expr_list ')' { PGCoalesceExpr *c = makeNode(PGCoalesceExpr); c->args = $3; c->location = @1; $$ = (PGNode *)c; } ; /* We allow several variants for SQL and other compatibility. */ /* * Aggregate decoration clauses */ within_group_clause: WITHIN GROUP_P '(' sort_clause ')' { $$ = $4; } | /*EMPTY*/ { $$ = NIL; } ; filter_clause: FILTER '(' WHERE a_expr ')' { $$ = $4; } | /*EMPTY*/ { $$ = NULL; } ; /* * Window Definitions */ window_clause: WINDOW window_definition_list { $$ = $2; } | /*EMPTY*/ { $$ = NIL; } ; window_definition_list: window_definition { $$ = list_make1($1); } | window_definition_list ',' window_definition { $$ = lappend($1, $3); } ; window_definition: ColId AS window_specification { PGWindowDef *n = $3; n->name = $1; $$ = n; } ; over_clause: OVER window_specification { $$ = $2; } | OVER ColId { PGWindowDef *n = makeNode(PGWindowDef); n->name = $2; n->refname = NULL; n->partitionClause = NIL; n->orderClause = NIL; n->frameOptions = FRAMEOPTION_DEFAULTS; n->startOffset = NULL; n->endOffset = NULL; n->location = @2; $$ = n; } | /*EMPTY*/ { $$ = NULL; } ; window_specification: '(' opt_existing_window_name opt_partition_clause opt_sort_clause opt_frame_clause ')' { PGWindowDef *n = makeNode(PGWindowDef); n->name = NULL; n->refname = $2; n->partitionClause = $3; n->orderClause = $4; /* copy relevant fields of opt_frame_clause */ n->frameOptions = $5->frameOptions; n->startOffset = $5->startOffset; n->endOffset = $5->endOffset; n->location = @1; $$ = n; } ; /* * If we see PARTITION, RANGE, or ROWS as the first token after the '(' * of a window_specification, we want the assumption to be that there is * no existing_window_name; but those keywords are unreserved and so could * be ColIds. We fix this by making them have the same precedence as IDENT * and giving the empty production here a slightly higher precedence, so * that the shift/reduce conflict is resolved in favor of reducing the rule. * These keywords are thus precluded from being an existing_window_name but * are not reserved for any other purpose. */ opt_existing_window_name: ColId { $$ = $1; } | /*EMPTY*/ %prec Op { $$ = NULL; } ; opt_partition_clause: PARTITION BY expr_list { $$ = $3; } | /*EMPTY*/ { $$ = NIL; } ; /* * For frame clauses, we return a PGWindowDef, but only some fields are used: * frameOptions, startOffset, and endOffset. * * This is only a subset of the full SQL:2008 frame_clause grammar. * We don't support <window frame exclusion> yet. */ opt_frame_clause: RANGE frame_extent { PGWindowDef *n = $2; n->frameOptions |= FRAMEOPTION_NONDEFAULT | FRAMEOPTION_RANGE; if (n->frameOptions & (FRAMEOPTION_START_VALUE_PRECEDING | FRAMEOPTION_END_VALUE_PRECEDING)) ereport(ERROR, (errcode(PG_ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("RANGE PRECEDING is only supported with UNBOUNDED"), parser_errposition(@1))); if (n->frameOptions & (FRAMEOPTION_START_VALUE_FOLLOWING | FRAMEOPTION_END_VALUE_FOLLOWING)) ereport(ERROR, (errcode(PG_ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("RANGE FOLLOWING is only supported with UNBOUNDED"), parser_errposition(@1))); $$ = n; } | ROWS frame_extent { PGWindowDef *n = $2; n->frameOptions |= FRAMEOPTION_NONDEFAULT | FRAMEOPTION_ROWS; $$ = n; } | /*EMPTY*/ { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_DEFAULTS; n->startOffset = NULL; n->endOffset = NULL; $$ = n; } ; frame_extent: frame_bound { PGWindowDef *n = $1; /* reject invalid cases */ if (n->frameOptions & FRAMEOPTION_START_UNBOUNDED_FOLLOWING) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame start cannot be UNBOUNDED FOLLOWING"), parser_errposition(@1))); if (n->frameOptions & FRAMEOPTION_START_VALUE_FOLLOWING) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame starting from following row cannot end with current row"), parser_errposition(@1))); n->frameOptions |= FRAMEOPTION_END_CURRENT_ROW; $$ = n; } | BETWEEN frame_bound AND frame_bound { PGWindowDef *n1 = $2; PGWindowDef *n2 = $4; /* form merged options */ int frameOptions = n1->frameOptions; /* shift converts START_ options to END_ options */ frameOptions |= n2->frameOptions << 1; frameOptions |= FRAMEOPTION_BETWEEN; /* reject invalid cases */ if (frameOptions & FRAMEOPTION_START_UNBOUNDED_FOLLOWING) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame start cannot be UNBOUNDED FOLLOWING"), parser_errposition(@2))); if (frameOptions & FRAMEOPTION_END_UNBOUNDED_PRECEDING) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame end cannot be UNBOUNDED PRECEDING"), parser_errposition(@4))); if ((frameOptions & FRAMEOPTION_START_CURRENT_ROW) && (frameOptions & FRAMEOPTION_END_VALUE_PRECEDING)) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame starting from current row cannot have preceding rows"), parser_errposition(@4))); if ((frameOptions & FRAMEOPTION_START_VALUE_FOLLOWING) && (frameOptions & (FRAMEOPTION_END_VALUE_PRECEDING | FRAMEOPTION_END_CURRENT_ROW))) ereport(ERROR, (errcode(PG_ERRCODE_WINDOWING_ERROR), errmsg("frame starting from following row cannot have preceding rows"), parser_errposition(@4))); n1->frameOptions = frameOptions; n1->endOffset = n2->startOffset; $$ = n1; } ; /* * This is used for both frame start and frame end, with output set up on * the assumption it's frame start; the frame_extent productions must reject * invalid cases. */ frame_bound: UNBOUNDED PRECEDING { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_START_UNBOUNDED_PRECEDING; n->startOffset = NULL; n->endOffset = NULL; $$ = n; } | UNBOUNDED FOLLOWING { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_START_UNBOUNDED_FOLLOWING; n->startOffset = NULL; n->endOffset = NULL; $$ = n; } | CURRENT_P ROW { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_START_CURRENT_ROW; n->startOffset = NULL; n->endOffset = NULL; $$ = n; } | a_expr PRECEDING { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_START_VALUE_PRECEDING; n->startOffset = $1; n->endOffset = NULL; $$ = n; } | a_expr FOLLOWING { PGWindowDef *n = makeNode(PGWindowDef); n->frameOptions = FRAMEOPTION_START_VALUE_FOLLOWING; n->startOffset = $1; n->endOffset = NULL; $$ = n; } ; /* * Supporting nonterminals for expressions. */ /* Explicit row production. * * SQL99 allows an optional ROW keyword, so we can now do single-element rows * without conflicting with the parenthesized a_expr production. Without the * ROW keyword, there must be more than one a_expr inside the parens. */ row: ROW '(' expr_list ')' { $$ = $3; } | ROW '(' ')' { $$ = NIL; } | '(' expr_list ',' a_expr ')' { $$ = lappend($2, $4); } ; sub_type: ANY { $$ = PG_ANY_SUBLINK; } | SOME { $$ = PG_ANY_SUBLINK; } | ALL { $$ = PG_ALL_SUBLINK; } ; all_Op: Op { $$ = $1; } | MathOp { $$ = (char*) $1; } ; MathOp: '+' { $$ = "+"; } | '-' { $$ = "-"; } | '*' { $$ = "*"; } | '/' { $$ = "/"; } | '%' { $$ = "%"; } | '^' { $$ = "^"; } | '<' { $$ = "<"; } | '>' { $$ = ">"; } | '=' { $$ = "="; } | LESS_EQUALS { $$ = "<="; } | GREATER_EQUALS { $$ = ">="; } | NOT_EQUALS { $$ = "<>"; } ; qual_Op: Op { $$ = list_make1(makeString($1)); } | OPERATOR '(' any_operator ')' { $$ = $3; } ; qual_all_Op: all_Op { $$ = list_make1(makeString($1)); } | OPERATOR '(' any_operator ')' { $$ = $3; } ; subquery_Op: all_Op { $$ = list_make1(makeString($1)); } | OPERATOR '(' any_operator ')' { $$ = $3; } | LIKE { $$ = list_make1(makeString("~~")); } | NOT_LA LIKE { $$ = list_make1(makeString("!~~")); } | GLOB { $$ = list_make1(makeString("~~~")); } | NOT_LA GLOB { $$ = list_make1(makeString("!~~~")); } | ILIKE { $$ = list_make1(makeString("~~*")); } | NOT_LA ILIKE { $$ = list_make1(makeString("!~~*")); } /* cannot put SIMILAR TO here, because SIMILAR TO is a hack. * the regular expression is preprocessed by a function (similar_escape), * and the ~ operator for posix regular expressions is used. * x SIMILAR TO y -> x ~ similar_escape(y) * this transformation is made on the fly by the parser upwards. * however the PGSubLink structure which handles any/some/all stuff * is not ready for such a thing. */ ; any_operator: all_Op { $$ = list_make1(makeString($1)); } | ColId '.' any_operator { $$ = lcons(makeString($1), $3); } ; expr_list: a_expr { $$ = list_make1($1); } | expr_list ',' a_expr { $$ = lappend($1, $3); } ; /* function arguments can have names */ func_arg_list: func_arg_expr { $$ = list_make1($1); } | func_arg_list ',' func_arg_expr { $$ = lappend($1, $3); } ; func_arg_expr: a_expr { $$ = $1; } | param_name COLON_EQUALS a_expr { PGNamedArgExpr *na = makeNode(PGNamedArgExpr); na->name = $1; na->arg = (PGExpr *) $3; na->argnumber = -1; /* until determined */ na->location = @1; $$ = (PGNode *) na; } | param_name EQUALS_GREATER a_expr { PGNamedArgExpr *na = makeNode(PGNamedArgExpr); na->name = $1; na->arg = (PGExpr *) $3; na->argnumber = -1; /* until determined */ na->location = @1; $$ = (PGNode *) na; } ; type_list: Typename { $$ = list_make1($1); } | type_list ',' Typename { $$ = lappend($1, $3); } ; extract_list: extract_arg FROM a_expr { $$ = list_make2(makeStringConst($1, @1), $3); } | /*EMPTY*/ { $$ = NIL; } ; /* Allow delimited string Sconst in extract_arg as an SQL extension. * - thomas 2001-04-12 */ extract_arg: IDENT { $$ = $1; } | YEAR_P { $$ = (char*) "year"; } | MONTH_P { $$ = (char*) "month"; } | DAY_P { $$ = (char*) "day"; } | HOUR_P { $$ = (char*) "hour"; } | MINUTE_P { $$ = (char*) "minute"; } | SECOND_P { $$ = (char*) "second"; } | Sconst { $$ = $1; } ; /* OVERLAY() arguments * SQL99 defines the OVERLAY() function: * o overlay(text placing text from int for int) * o overlay(text placing text from int) * and similarly for binary strings */ overlay_list: a_expr overlay_placing substr_from substr_for { $$ = list_make4($1, $2, $3, $4); } | a_expr overlay_placing substr_from { $$ = list_make3($1, $2, $3); } ; overlay_placing: PLACING a_expr { $$ = $2; } ; /* position_list uses b_expr not a_expr to avoid conflict with general IN */ position_list: b_expr IN_P b_expr { $$ = list_make2($3, $1); } | /*EMPTY*/ { $$ = NIL; } ; /* SUBSTRING() arguments * SQL9x defines a specific syntax for arguments to SUBSTRING(): * o substring(text from int for int) * o substring(text from int) get entire string from starting point "int" * o substring(text for int) get first "int" characters of string * o substring(text from pattern) get entire string matching pattern * o substring(text from pattern for escape) same with specified escape char * We also want to support generic substring functions which accept * the usual generic list of arguments. So we will accept both styles * here, and convert the SQL9x style to the generic list for further * processing. - thomas 2000-11-28 */ substr_list: a_expr substr_from substr_for { $$ = list_make3($1, $2, $3); } | a_expr substr_for substr_from { /* not legal per SQL99, but might as well allow it */ $$ = list_make3($1, $3, $2); } | a_expr substr_from { $$ = list_make2($1, $2); } | a_expr substr_for { /* * Since there are no cases where this syntax allows * a textual FOR value, we forcibly cast the argument * to int4. The possible matches in pg_proc are * substring(text,int4) and substring(text,text), * and we don't want the parser to choose the latter, * which it is likely to do if the second argument * is unknown or doesn't have an implicit cast to int4. */ $$ = list_make3($1, makeIntConst(1, -1), makeTypeCast($2, SystemTypeName("int4"), -1)); } | expr_list { $$ = $1; } | /*EMPTY*/ { $$ = NIL; } ; substr_from: FROM a_expr { $$ = $2; } ; substr_for: FOR a_expr { $$ = $2; } ; trim_list: a_expr FROM expr_list { $$ = lappend($3, $1); } | FROM expr_list { $$ = $2; } | expr_list { $$ = $1; } ; in_expr: select_with_parens { PGSubLink *n = makeNode(PGSubLink); n->subselect = $1; /* other fields will be filled later */ $$ = (PGNode *)n; } | '(' expr_list ')' { $$ = (PGNode *)$2; } ; /* * Define SQL-style CASE clause. * - Full specification * CASE WHEN a = b THEN c ... ELSE d END * - Implicit argument * CASE a WHEN b THEN c ... ELSE d END */ case_expr: CASE case_arg when_clause_list case_default END_P { PGCaseExpr *c = makeNode(PGCaseExpr); c->casetype = InvalidOid; /* not analyzed yet */ c->arg = (PGExpr *) $2; c->args = $3; c->defresult = (PGExpr *) $4; c->location = @1; $$ = (PGNode *)c; } ; when_clause_list: /* There must be at least one */ when_clause { $$ = list_make1($1); } | when_clause_list when_clause { $$ = lappend($1, $2); } ; when_clause: WHEN a_expr THEN a_expr { PGCaseWhen *w = makeNode(PGCaseWhen); w->expr = (PGExpr *) $2; w->result = (PGExpr *) $4; w->location = @1; $$ = (PGNode *)w; } ; case_default: ELSE a_expr { $$ = $2; } | /*EMPTY*/ { $$ = NULL; } ; case_arg: a_expr { $$ = $1; } | /*EMPTY*/ { $$ = NULL; } ; columnref: ColId { $$ = makeColumnRef($1, NIL, @1, yyscanner); } | ColId indirection { $$ = makeColumnRef($1, $2, @1, yyscanner); } ; indirection_el: '.' attr_name { $$ = (PGNode *) makeString($2); } | '.' '*' { $$ = (PGNode *) makeNode(PGAStar); } | '[' a_expr ']' { PGAIndices *ai = makeNode(PGAIndices); ai->is_slice = false; ai->lidx = NULL; ai->uidx = $2; $$ = (PGNode *) ai; } | '[' opt_slice_bound ':' opt_slice_bound ']' { PGAIndices *ai = makeNode(PGAIndices); ai->is_slice = true; ai->lidx = $2; ai->uidx = $4; $$ = (PGNode *) ai; } ; opt_slice_bound: a_expr { $$ = $1; } | /*EMPTY*/ { $$ = NULL; } ; indirection: indirection_el { $$ = list_make1($1); } | indirection indirection_el { $$ = lappend($1, $2); } ; opt_indirection: /*EMPTY*/ { $$ = NIL; } | opt_indirection indirection_el { $$ = lappend($1, $2); } ; opt_asymmetric: ASYMMETRIC | /*EMPTY*/ ; /***************************************************************************** * * target list for SELECT * *****************************************************************************/ opt_target_list: target_list { $$ = $1; } | /* EMPTY */ { $$ = NIL; } ; target_list: target_el { $$ = list_make1($1); } | target_list ',' target_el { $$ = lappend($1, $3); } ; target_el: a_expr AS ColLabelOrString { $$ = makeNode(PGResTarget); $$->name = $3; $$->indirection = NIL; $$->val = (PGNode *)$1; $$->location = @1; } /* * We support omitting AS only for column labels that aren't * any known keyword. There is an ambiguity against postfix * operators: is "a ! b" an infix expression, or a postfix * expression and a column label? We prefer to resolve this * as an infix expression, which we accomplish by assigning * IDENT a precedence higher than POSTFIXOP. */ | a_expr IDENT { $$ = makeNode(PGResTarget); $$->name = $2; $$->indirection = NIL; $$->val = (PGNode *)$1; $$->location = @1; } | a_expr { $$ = makeNode(PGResTarget); $$->name = NULL; $$->indirection = NIL; $$->val = (PGNode *)$1; $$->location = @1; } | '*' { PGColumnRef *n = makeNode(PGColumnRef); n->fields = list_make1(makeNode(PGAStar)); n->location = @1; $$ = makeNode(PGResTarget); $$->name = NULL; $$->indirection = NIL; $$->val = (PGNode *)n; $$->location = @1; } ; /***************************************************************************** * * Names and constants * *****************************************************************************/ qualified_name_list: qualified_name { $$ = list_make1($1); } | qualified_name_list ',' qualified_name { $$ = lappend($1, $3); } ; /* * The production for a qualified relation name has to exactly match the * production for a qualified func_name, because in a FROM clause we cannot * tell which we are parsing until we see what comes after it ('(' for a * func_name, something else for a relation). Therefore we allow 'indirection' * which may contain subscripts, and reject that case in the C code. */ qualified_name: ColId { $$ = makeRangeVar(NULL, $1, @1); } | ColId indirection { check_qualified_name($2, yyscanner); $$ = makeRangeVar(NULL, NULL, @1); switch (list_length($2)) { case 1: $$->catalogname = NULL; $$->schemaname = $1; $$->relname = strVal(linitial($2)); break; case 2: $$->catalogname = $1; $$->schemaname = strVal(linitial($2)); $$->relname = strVal(lsecond($2)); break; default: ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("improper qualified name (too many dotted names): %s", NameListToString(lcons(makeString($1), $2))), parser_errposition(@1))); break; } } ; name_list: name { $$ = list_make1(makeString($1)); } | name_list ',' name { $$ = lappend($1, makeString($3)); } ; name: ColId { $$ = $1; }; attr_name: ColLabel { $$ = $1; }; /* * The production for a qualified func_name has to exactly match the * production for a qualified columnref, because we cannot tell which we * are parsing until we see what comes after it ('(' or Sconst for a func_name, * anything else for a columnref). Therefore we allow 'indirection' which * may contain subscripts, and reject that case in the C code. (If we * ever implement SQL99-like methods, such syntax may actually become legal!) */ func_name: type_function_name { $$ = list_make1(makeString($1)); } | ColId indirection { $$ = check_func_name(lcons(makeString($1), $2), yyscanner); } ; /* * Constants */ AexprConst: Iconst { $$ = makeIntConst($1, @1); } | FCONST { $$ = makeFloatConst($1, @1); } | Sconst { $$ = makeStringConst($1, @1); } | BCONST { $$ = makeBitStringConst($1, @1); } | XCONST { /* This is a bit constant per SQL99: * Without Feature F511, "BIT data type", * a <general literal> shall not be a * <bit string literal> or a <hex string literal>. */ $$ = makeBitStringConst($1, @1); } | func_name Sconst { /* generic type 'literal' syntax */ PGTypeName *t = makeTypeNameFromNameList($1); t->location = @1; $$ = makeStringConstCast($2, @2, t); } | func_name '(' func_arg_list opt_sort_clause ')' Sconst { /* generic syntax with a type modifier */ PGTypeName *t = makeTypeNameFromNameList($1); PGListCell *lc; /* * We must use func_arg_list and opt_sort_clause in the * production to avoid reduce/reduce conflicts, but we * don't actually wish to allow PGNamedArgExpr in this * context, nor ORDER BY. */ foreach(lc, $3) { PGNamedArgExpr *arg = (PGNamedArgExpr *) lfirst(lc); if (IsA(arg, PGNamedArgExpr)) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("type modifier cannot have parameter name"), parser_errposition(arg->location))); } if ($4 != NIL) ereport(ERROR, (errcode(PG_ERRCODE_SYNTAX_ERROR), errmsg("type modifier cannot have ORDER BY"), parser_errposition(@4))); t->typmods = $3; t->location = @1; $$ = makeStringConstCast($6, @6, t); } | ConstTypename Sconst { $$ = makeStringConstCast($2, @2, $1); } | ConstInterval Sconst opt_interval { PGTypeName *t = $1; t->typmods = $3; $$ = makeStringConstCast($2, @2, t); } | ConstInterval '(' Iconst ')' Sconst { PGTypeName *t = $1; t->typmods = list_make2(makeIntConst(INTERVAL_FULL_RANGE, -1), makeIntConst($3, @3)); $$ = makeStringConstCast($5, @5, t); } /* Version without () is handled in a_expr/b_expr logic due to ? mis-parsing as operator */ | ConstInterval '(' '?' ')' '?' opt_interval { PGTypeName *t = $1; if ($6 != NIL) { t->typmods = lappend($6, makeParamRef(0, @3)); } else t->typmods = list_make2(makeIntConst(INTERVAL_FULL_RANGE, -1), makeParamRef(0, @3)); $$ = makeParamRefCast(0, @5, t); } | TRUE_P { $$ = makeBoolAConst(true, @1); } | FALSE_P { $$ = makeBoolAConst(false, @1); } | NULL_P { $$ = makeNullAConst(@1); } ; Iconst: ICONST { $$ = $1; }; Sconst: SCONST { $$ = $1; }; /* Role specifications */ /* * Name classification hierarchy. * * IDENT is the lexeme returned by the lexer for identifiers that match * no known keyword. In most cases, we can accept certain keywords as * names, not only IDENTs. We prefer to accept as many such keywords * as possible to minimize the impact of "reserved words" on programmers. * So, we divide names into several possible classes. The classification * is chosen in part to make keywords acceptable as names wherever possible. */ /* Column identifier --- names that can be column, table, etc names. */ ColId: IDENT { $$ = $1; } | unreserved_keyword { $$ = pstrdup($1); } | col_name_keyword { $$ = pstrdup($1); } ; ColIdOrString: ColId { $$ = $1; } | SCONST { $$ = $1; } ; /* Type/function identifier --- names that can be type or function names. */ type_function_name: IDENT { $$ = $1; } | unreserved_keyword { $$ = pstrdup($1); } | type_func_name_keyword { $$ = pstrdup($1); } ; any_name: ColId { $$ = list_make1(makeString($1)); } | ColId attrs { $$ = lcons(makeString($1), $2); } ; attrs: '.' attr_name { $$ = list_make1(makeString($2)); } | attrs '.' attr_name { $$ = lappend($1, makeString($3)); } ; opt_name_list: '(' name_list ')' { $$ = $2; } | /*EMPTY*/ { $$ = NIL; } ; param_name: type_function_name ; /* Any not-fully-reserved word --- these names can be, eg, role names. */ /* Column label --- allowed labels in "AS" clauses. * This presently includes *all* Postgres keywords. */ ColLabel: IDENT { $$ = $1; } | unreserved_keyword { $$ = pstrdup($1); } | col_name_keyword { $$ = pstrdup($1); } | type_func_name_keyword { $$ = pstrdup($1); } | reserved_keyword { $$ = pstrdup($1); } ; ColLabelOrString: ColLabel { $$ = $1; } | SCONST { $$ = $1; } ;
{'repo_name': 'cwida/duckdb', 'stars': '1359', 'repo_language': 'C++', 'file_name': 'expr.hh', 'mime_type': 'text/x-c++', 'hash': -4708390464472595576, 'source_dataset': 'data'}
package gitkit import ( "bytes" "crypto/rand" "crypto/rsa" "crypto/x509" "encoding/pem" "errors" "fmt" "io" "io/ioutil" "log" "net" "os" "os/exec" "path/filepath" "strings" "golang.org/x/crypto/ssh" ) var ( ErrAlreadyStarted = errors.New("server has already been started") ErrNoListener = errors.New("cannot call Serve() before Listen()") ) type PublicKey struct { Id string Name string Fingerprint string Content string } type SSH struct { listener net.Listener sshconfig *ssh.ServerConfig config *Config PublicKeyLookupFunc func(string) (*PublicKey, error) } func NewSSH(config Config) *SSH { s := &SSH{config: &config} // Use PATH if full path is not specified if s.config.GitPath == "" { s.config.GitPath = "git" } return s } func fileExists(path string) bool { _, err := os.Stat(path) return err == nil || os.IsExist(err) } func cleanCommand(cmd string) string { i := strings.Index(cmd, "git") if i == -1 { return cmd } return cmd[i:] } func execCommandBytes(cmdname string, args ...string) ([]byte, []byte, error) { bufOut := new(bytes.Buffer) bufErr := new(bytes.Buffer) cmd := exec.Command(cmdname, args...) cmd.Stdout = bufOut cmd.Stderr = bufErr err := cmd.Run() return bufOut.Bytes(), bufErr.Bytes(), err } func execCommand(cmdname string, args ...string) (string, string, error) { bufOut, bufErr, err := execCommandBytes(cmdname, args...) return string(bufOut), string(bufErr), err } func (s *SSH) handleConnection(keyID string, chans <-chan ssh.NewChannel) { for newChan := range chans { if newChan.ChannelType() != "session" { newChan.Reject(ssh.UnknownChannelType, "unknown channel type") continue } ch, reqs, err := newChan.Accept() if err != nil { log.Printf("error accepting channel: %v", err) continue } go func(in <-chan *ssh.Request) { defer ch.Close() for req := range in { payload := cleanCommand(string(req.Payload)) switch req.Type { case "env": log.Printf("ssh: incoming env request: %s\n", payload) args := strings.Split(strings.Replace(payload, "\x00", "", -1), "\v") if len(args) != 2 { log.Printf("env: invalid env arguments: '%#v'", args) continue } args[0] = strings.TrimLeft(args[0], "\x04") if len(args[0]) == 0 { log.Printf("env: invalid key from payload: %s", payload) continue } _, _, err := execCommandBytes("env", args[0]+"="+args[1]) if err != nil { log.Printf("env: %v", err) return } case "exec": log.Printf("ssh: incoming exec request: %s\n", payload) cmdName := strings.TrimLeft(payload, "'()") log.Printf("ssh: payload '%v'", cmdName) if strings.HasPrefix(cmdName, "\x00") { cmdName = strings.Replace(cmdName, "\x00", "", -1)[1:] } gitcmd, err := ParseGitCommand(cmdName) if err != nil { log.Println("ssh: error parsing command:", err) ch.Write([]byte("Invalid command.\r\n")) return } if !repoExists(filepath.Join(s.config.Dir, gitcmd.Repo)) && s.config.AutoCreate == true { err := initRepo(gitcmd.Repo, s.config) if err != nil { logError("repo-init", err) return } } cmd := exec.Command(gitcmd.Command, gitcmd.Repo) cmd.Dir = s.config.Dir cmd.Env = append(os.Environ(), "GITKIT_KEY="+keyID) // cmd.Env = append(os.Environ(), "SSH_ORIGINAL_COMMAND="+cmdName) stdout, err := cmd.StdoutPipe() if err != nil { log.Printf("ssh: cant open stdout pipe: %v", err) return } stderr, err := cmd.StderrPipe() if err != nil { log.Printf("ssh: cant open stderr pipe: %v", err) return } input, err := cmd.StdinPipe() if err != nil { log.Printf("ssh: cant open stdin pipe: %v", err) return } if err = cmd.Start(); err != nil { log.Printf("ssh: start error: %v", err) return } req.Reply(true, nil) go io.Copy(input, ch) io.Copy(ch, stdout) io.Copy(ch.Stderr(), stderr) if err = cmd.Wait(); err != nil { log.Printf("ssh: command failed: %v", err) return } ch.SendRequest("exit-status", false, []byte{0, 0, 0, 0}) return default: ch.Write([]byte("Unsupported request type.\r\n")) log.Println("ssh: unsupported req type:", req.Type) return } } }(reqs) } } func (s *SSH) createServerKey() error { if err := os.MkdirAll(s.config.KeyDir, os.ModePerm); err != nil { return err } privateKey, err := rsa.GenerateKey(rand.Reader, 2048) if err != nil { return err } privateKeyFile, err := os.Create(s.config.KeyPath()) if err != nil { return err } if err := os.Chmod(s.config.KeyPath(), 0600); err != nil { return err } defer privateKeyFile.Close() if err != nil { return err } privateKeyPEM := &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(privateKey)} if err := pem.Encode(privateKeyFile, privateKeyPEM); err != nil { return err } pubKeyPath := s.config.KeyPath() + ".pub" pub, err := ssh.NewPublicKey(&privateKey.PublicKey) if err != nil { return err } return ioutil.WriteFile(pubKeyPath, ssh.MarshalAuthorizedKey(pub), 0644) } func (s *SSH) setup() error { config := &ssh.ServerConfig{ ServerVersion: fmt.Sprintf("SSH-2.0-gitkit %s", Version), } if s.config.KeyDir == "" { return fmt.Errorf("key directory is not provided") } if !s.config.Auth { config.NoClientAuth = true } else { if s.PublicKeyLookupFunc == nil { return fmt.Errorf("public key lookup func is not provided") } config.PublicKeyCallback = func(conn ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) { pkey, err := s.PublicKeyLookupFunc(strings.TrimSpace(string(ssh.MarshalAuthorizedKey(key)))) if err != nil { return nil, err } if pkey == nil { return nil, fmt.Errorf("auth handler did not return a key") } return &ssh.Permissions{Extensions: map[string]string{"key-id": pkey.Id}}, nil } } keypath := s.config.KeyPath() if !fileExists(keypath) { if err := s.createServerKey(); err != nil { return err } } privateBytes, err := ioutil.ReadFile(keypath) if err != nil { return err } private, err := ssh.ParsePrivateKey(privateBytes) if err != nil { return err } config.AddHostKey(private) s.sshconfig = config return nil } func (s *SSH) Listen(bind string) error { if s.listener != nil { return ErrAlreadyStarted } if err := s.setup(); err != nil { return err } if err := s.config.Setup(); err != nil { return err } var err error s.listener, err = net.Listen("tcp", bind) if err != nil { return err } return nil } func (s *SSH) Serve() error { if s.listener == nil { return ErrNoListener } for { // wait for connection or Stop() conn, err := s.listener.Accept() if err != nil { return err } go func() { log.Printf("ssh: handshaking for %s", conn.RemoteAddr()) sConn, chans, reqs, err := ssh.NewServerConn(conn, s.sshconfig) if err != nil { if err == io.EOF { log.Printf("ssh: handshaking was terminated: %v", err) } else { log.Printf("ssh: error on handshaking: %v", err) } return } log.Printf("ssh: connection from %s (%s)", sConn.RemoteAddr(), sConn.ClientVersion()) if s.config.Auth && s.config.GitUser != "" && sConn.User() != s.config.GitUser { sConn.Close() return } keyId := "" if sConn.Permissions != nil { keyId = sConn.Permissions.Extensions["key-id"] } go ssh.DiscardRequests(reqs) go s.handleConnection(keyId, chans) }() } } func (s *SSH) ListenAndServe(bind string) error { if err := s.Listen(bind); err != nil { return err } return s.Serve() } // Stop stops the server if it has been started, otherwise it is a no-op. func (s *SSH) Stop() error { if s.listener == nil { return nil } defer func() { s.listener = nil }() return s.listener.Close() } // Address returns the network address of the listener. This is in // particular useful when binding to :0 to get a free port assigned by // the OS. func (s *SSH) Address() string { if s.listener != nil { return s.listener.Addr().String() } return "" }
{'repo_name': 'sosedoff/gitkit', 'stars': '140', 'repo_language': 'Go', 'file_name': 'codec.go', 'mime_type': 'text/plain', 'hash': 6978420503412440439, 'source_dataset': 'data'}
import { gcd } from './gcd'; export const lcm = (num1: number, num2: number) => { if (num1 === 0 || num2 === 0) { return 0; } num1 = Math.abs(num1); num2 = Math.abs(num2); return (num1 * num2) / gcd(num1, num2); }; export const lcmArray = (num: number[]) => { let result = num[0]; for (let i = 1; i < num.length; i++) { result = num[i] * result / gcd(num[i], result); } return result; };
{'repo_name': 'PacktPublishing/Learning-JavaScript-Data-Structures-and-Algorithms-Third-Edition', 'stars': '276', 'repo_language': 'JavaScript', 'file_name': 'settings.json', 'mime_type': 'text/plain', 'hash': -2313208965103303815, 'source_dataset': 'data'}
{ "parent": "buildcraftfactory:block/chute", "display": { "gui": { "rotation": [30, 225, 0], "translation": [0, 0, 0], "scale": [0.625, 0.625, 0.625] }, "ground": { "rotation": [0, 0, 0], "translation": [0, 3, 0], "scale": [0.25, 0.25, 0.25] }, "fixed": { "rotation": [0, 0, 0], "translation": [0, 0, 0], "scale": [0.5, 0.5, 0.5] }, "thirdperson_righthand": { "rotation": [75, 45, 0], "translation": [0, 2.5, 0], "scale": [0.375, 0.375, 0.375] }, "firstperson_righthand": { "rotation": [0, 45, 0], "translation": [0, 0, 0], "scale": [0.40, 0.40, 0.40] }, "firstperson_lefthand": { "rotation": [0, 225, 0], "translation": [0, 0, 0], "scale": [0.40, 0.40, 0.40] } } }
{'repo_name': 'BuildCraft/BuildCraft', 'stars': '1167', 'repo_language': 'Java', 'file_name': 'buildcraft.checkstyle', 'mime_type': 'text/xml', 'hash': 4577835617349668191, 'source_dataset': 'data'}
/* Copyright Oliver Kowalke 2009. Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ /**************************************************************************************** * * * ---------------------------------------------------------------------------------- * * | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | * * ---------------------------------------------------------------------------------- * * | 0x0 | 0x4 | 0x8 | 0xc | 0x10 | 0x14 | 0x18 | 0x1c | * * ---------------------------------------------------------------------------------- * * | fc_mxcsr|fc_x87_cw| EDI | ESI | EBX | EBP | EIP | hidden | * * ---------------------------------------------------------------------------------- * * ---------------------------------------------------------------------------------- * * | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | * * ---------------------------------------------------------------------------------- * * | 0x20 | 0x24 | | * * ---------------------------------------------------------------------------------- * * | to | data | | * * ---------------------------------------------------------------------------------- * * * ****************************************************************************************/ .text .globl _make_fcontext .align 2 _make_fcontext: /* first arg of make_fcontext() == top of context-stack */ movl 0x4(%esp), %eax /* reserve space for first argument of context-function eax might already point to a 16byte border */ leal -0x8(%eax), %eax /* shift address in EAX to lower 16 byte boundary */ andl $-16, %eax /* reserve space for context-data on context-stack */ leal -0x28(%eax), %eax /* third arg of make_fcontext() == address of context-function */ /* stored in EBX */ movl 0xc(%esp), %ecx movl %ecx, 0x10(%eax) /* save MMX control- and status-word */ stmxcsr (%eax) /* save x87 control-word */ fnstcw 0x4(%eax) /* return transport_t */ /* FCTX == EDI, DATA == ESI */ leal 0x8(%eax), %ecx movl %ecx, 0x1c(%eax) /* compute abs address of label trampoline */ call 1f /* address of trampoline 1 */ 1: popl %ecx /* compute abs address of label trampoline */ addl $trampoline-1b, %ecx /* save address of trampoline as return address */ /* will be entered after calling jump_fcontext() first time */ movl %ecx, 0x18(%eax) /* compute abs address of label finish */ call 2f /* address of label 2 */ 2: popl %ecx /* compute abs address of label finish */ addl $finish-2b, %ecx /* save address of finish as return-address for context-function */ /* will be entered after context-function returns */ movl %ecx, 0x14(%eax) ret /* return pointer to context-data */ trampoline: /* move transport_t for entering context-function */ movl %edi, (%esp) movl %esi, 0x4(%esp) pushl %ebp /* jump to context-function */ jmp *%ebx finish: /* exit code is zero */ xorl %eax, %eax movl %eax, (%esp) /* exit application */ call __exit hlt
{'repo_name': 'RichieSams/FiberTaskingLib', 'stars': '588', 'repo_language': 'C++', 'file_name': 'catch.hpp', 'mime_type': 'text/x-c++', 'hash': 8273875699569428747, 'source_dataset': 'data'}
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/core/kernels/cwise_ops_common.h" namespace tensorflow { REGISTER_KERNEL_BUILDER(Name("LogicalAnd").Device(DEVICE_CPU), BinaryOp<CPUDevice, functor::logical_and>); #if GOOGLE_CUDA REGISTER_KERNEL_BUILDER(Name("LogicalAnd").Device(DEVICE_GPU), BinaryOp<GPUDevice, functor::logical_and>); #endif } // namespace tensorflow
{'repo_name': 'miyosuda/TensorFlowAndroidMNIST', 'stars': '264', 'repo_language': 'C++', 'file_name': 'beginner.py', 'mime_type': 'text/x-python', 'hash': -6865199220180304278, 'source_dataset': 'data'}
Microsoft Visual Studio Solution File, Format Version 11.00 # Visual Studio 2010 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "DOMParser", "DOMParser\DOMParser_vs100.vcxproj", "{70F2F655-67D5-32A1-A99B-D4903547DB3E}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "DOMWriter", "DOMWriter\DOMWriter_vs100.vcxproj", "{A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "PrettyPrint", "PrettyPrint\PrettyPrint_vs100.vcxproj", "{DFA97011-8DD4-3A84-A0C9-EB2101BD6082}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "SAXParser", "SAXParser\SAXParser_vs100.vcxproj", "{2A54653D-9F55-348B-8F79-A3E454563AE3}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution debug_shared|Win32 = debug_shared|Win32 release_shared|Win32 = release_shared|Win32 debug_static_mt|Win32 = debug_static_mt|Win32 release_static_mt|Win32 = release_static_mt|Win32 debug_static_md|Win32 = debug_static_md|Win32 release_static_md|Win32 = release_static_md|Win32 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_shared|Win32.ActiveCfg = debug_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_shared|Win32.Build.0 = debug_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_shared|Win32.Deploy.0 = debug_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_shared|Win32.ActiveCfg = release_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_shared|Win32.Build.0 = release_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_shared|Win32.Deploy.0 = release_shared|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_mt|Win32.ActiveCfg = debug_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_mt|Win32.Build.0 = debug_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_mt|Win32.Deploy.0 = debug_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_mt|Win32.ActiveCfg = release_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_mt|Win32.Build.0 = release_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_mt|Win32.Deploy.0 = release_static_mt|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_md|Win32.ActiveCfg = debug_static_md|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_md|Win32.Build.0 = debug_static_md|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.debug_static_md|Win32.Deploy.0 = debug_static_md|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_md|Win32.ActiveCfg = release_static_md|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_md|Win32.Build.0 = release_static_md|Win32 {70F2F655-67D5-32A1-A99B-D4903547DB3E}.release_static_md|Win32.Deploy.0 = release_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_shared|Win32.ActiveCfg = debug_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_shared|Win32.Build.0 = debug_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_shared|Win32.Deploy.0 = debug_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_shared|Win32.ActiveCfg = release_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_shared|Win32.Build.0 = release_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_shared|Win32.Deploy.0 = release_shared|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_mt|Win32.ActiveCfg = debug_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_mt|Win32.Build.0 = debug_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_mt|Win32.Deploy.0 = debug_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_mt|Win32.ActiveCfg = release_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_mt|Win32.Build.0 = release_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_mt|Win32.Deploy.0 = release_static_mt|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_md|Win32.ActiveCfg = debug_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_md|Win32.Build.0 = debug_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.debug_static_md|Win32.Deploy.0 = debug_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_md|Win32.ActiveCfg = release_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_md|Win32.Build.0 = release_static_md|Win32 {A3CBDFA6-6261-3C04-B1FD-51AA20763BB8}.release_static_md|Win32.Deploy.0 = release_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_shared|Win32.ActiveCfg = debug_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_shared|Win32.Build.0 = debug_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_shared|Win32.Deploy.0 = debug_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_shared|Win32.ActiveCfg = release_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_shared|Win32.Build.0 = release_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_shared|Win32.Deploy.0 = release_shared|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_mt|Win32.ActiveCfg = debug_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_mt|Win32.Build.0 = debug_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_mt|Win32.Deploy.0 = debug_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_mt|Win32.ActiveCfg = release_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_mt|Win32.Build.0 = release_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_mt|Win32.Deploy.0 = release_static_mt|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_md|Win32.ActiveCfg = debug_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_md|Win32.Build.0 = debug_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.debug_static_md|Win32.Deploy.0 = debug_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_md|Win32.ActiveCfg = release_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_md|Win32.Build.0 = release_static_md|Win32 {DFA97011-8DD4-3A84-A0C9-EB2101BD6082}.release_static_md|Win32.Deploy.0 = release_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_shared|Win32.ActiveCfg = debug_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_shared|Win32.Build.0 = debug_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_shared|Win32.Deploy.0 = debug_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_shared|Win32.ActiveCfg = release_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_shared|Win32.Build.0 = release_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_shared|Win32.Deploy.0 = release_shared|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_mt|Win32.ActiveCfg = debug_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_mt|Win32.Build.0 = debug_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_mt|Win32.Deploy.0 = debug_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_mt|Win32.ActiveCfg = release_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_mt|Win32.Build.0 = release_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_mt|Win32.Deploy.0 = release_static_mt|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_md|Win32.ActiveCfg = debug_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_md|Win32.Build.0 = debug_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.debug_static_md|Win32.Deploy.0 = debug_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_md|Win32.ActiveCfg = release_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_md|Win32.Build.0 = release_static_md|Win32 {2A54653D-9F55-348B-8F79-A3E454563AE3}.release_static_md|Win32.Deploy.0 = release_static_md|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal
{'repo_name': 'toggl-open-source/toggldesktop', 'stars': '1359', 'repo_language': 'C++', 'file_name': 'InfoPlist.strings', 'mime_type': 'text/plain', 'hash': -6343215128075987033, 'source_dataset': 'data'}
apiVersion: v1 kind: ServiceAccount metadata: name: metrics-server namespace: kube-system --- apiVersion: extensions/v1beta1 kind: Deployment metadata: name: metrics-server namespace: kube-system labels: k8s-app: metrics-server spec: selector: matchLabels: k8s-app: metrics-server template: metadata: name: metrics-server labels: k8s-app: metrics-server spec: serviceAccountName: metrics-server containers: - name: metrics-server image: gcr.io/google_containers/metrics-server-amd64:v0.2.1 imagePullPolicy: Always command: - /metrics-server - --source=kubernetes.summary_api:''
{'repo_name': 'feiskyer/kubernetes-handbook', 'stars': '3736', 'repo_language': 'Makefile', 'file_name': 'index.md', 'mime_type': 'text/plain', 'hash': -1511791955927866999, 'source_dataset': 'data'}
// // AVIMAudioMessage.h // AVOSCloudIM // // Created by Qihe Bian on 1/12/15. // Copyright (c) 2015 LeanCloud Inc. All rights reserved. // #import "AVIMTypedMessage.h" NS_ASSUME_NONNULL_BEGIN /** * Audio Message. Can be created by the audio's file path. */ @interface AVIMAudioMessage : AVIMTypedMessage <AVIMTypedMessageSubclassing> /// File size in bytes. @property(nonatomic, assign, readonly) uint64_t size; /// Audio's duration in seconds. @property(nonatomic, assign, readonly) float duration; /// Audio format, mp3, aac, etc. Simply get it by the file extension. @property(nonatomic, copy, readonly, nullable) NSString *format; @end NS_ASSUME_NONNULL_END
{'repo_name': 'Brances/ZMBCY-iOS', 'stars': '189', 'repo_language': 'Objective-C', 'file_name': 'Info.plist', 'mime_type': 'text/xml', 'hash': -5078548899846981869, 'source_dataset': 'data'}
/************************************************ Copyright (c) 2016, Xilinx, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.// Copyright (c) 2015 Xilinx, Inc. ************************************************/ #include "echo_server_application.hpp" #include <iostream> using namespace hls; int main() { stream<ap_uint<16> > listenPort("listenPort"); stream<bool> listenPortStatus("listenPortStatus"); stream<appNotification> notifications; stream<appReadRequest> readRequest; stream<ap_uint<16> > rxMetaData; stream<axiWord> rxData; stream<ipTuple> openConnection; stream<openStatus> openConStatus; stream<ap_uint<16> > closeConnection; stream<ap_uint<16> > txMetaData; stream<axiWord> txData; stream<ap_int<17> > txStatus; int count = 0; while (count < 50) { echo_server_application( listenPort, listenPortStatus, notifications, readRequest, rxMetaData, rxData, openConnection, openConStatus, closeConnection, txMetaData, txData, txStatus); if (!listenPort.empty()) { listenPort.read(); listenPortStatus.write(true); } count++; } return 0; }
{'repo_name': 'Xilinx/HLx_Examples', 'stars': '226', 'repo_language': 'Matlab', 'file_name': 'image_filter.cpp', 'mime_type': 'text/x-c', 'hash': 5008992410171918434, 'source_dataset': 'data'}
// Code generated by zanzibar // @generated // Copyright (c) 2018 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package module import ( zanzibar "github.com/uber/zanzibar/runtime" ) // Dependencies contains dependencies for the google-now client module type Dependencies struct { Default *zanzibar.DefaultDependencies }
{'repo_name': 'uber/zanzibar', 'stars': '210', 'repo_language': 'Go', 'file_name': 'template_files.go', 'mime_type': 'text/plain', 'hash': 8103592207938109717, 'source_dataset': 'data'}
<?php namespace Ardent\Collection; class HashSetTest extends SetTest { /** * @var HashSet */ protected $object; function instance() { return new HashSet(); } function test_has() { $set = $this->instance(); $scalar = 0; $this->assertFalse($set->has($scalar)); $set->add($scalar); $this->assertTrue($set->has($scalar)); $this->assertTrue($set->has('0')); $object = new \StdClass(); $this->assertFalse($set->has($object)); $set->add($object); $this->assertTrue($set->has($object)); $resource = fopen(__FILE__, 'r'); $this->assertFalse($set->has($resource)); $set->add($resource); $this->assertTrue($set->has($resource)); fclose($resource); $emptyArray = array(); $this->assertFalse($set->has($emptyArray)); $set->add($emptyArray); $this->assertTrue($set->has($emptyArray)); $array = array(0, 1); $this->assertFalse($set->has($array)); $set->add($array); $this->assertTrue($set->has($array)); $null = null; $this->assertFalse($set->has($null)); $set->add($null); $this->assertTrue($set->has($null)); } }
{'repo_name': 'morrisonlevi/Ardent', 'stars': '627', 'repo_language': 'PHP', 'file_name': 'AvlTreeTest.php', 'mime_type': 'text/x-php', 'hash': -6918952494597019445, 'source_dataset': 'data'}
<properties pageTitle="Configure the Visual Studio Tools for Apache Cordova | Cordova" description="description" services="na" documentationCenter="" authors="jmatthiesen" tags=""/> <tags ms.technology="cordova" ms.prod="visual-studio-dev14" ms.service="na" ms.devlang="javascript" ms.topic="article" ms.tgt_pltfrm="mobile-multiple" ms.workload="na" ms.date="09/10/2015" ms.author="jomatthi"/> # Configure the Visual Studio Tools for Apache Cordova You can download Visual Studio from the [Microsoft Download Center](http://go.microsoft.com/fwlink/p/?linkid=517106). Once you have [installed the tools](install-vs-tools-apache-cordova.md), refer to this topic for additional ways to quickly configure, update, or customize the tools for your environment. * If you choose not to install one or more dependencies with Visual Studio Tools for Apache Cordova, you may need to [install the dependencies manually](#ThirdParty). * If you need to verify the path for a third-party dependency or if you have multiple versions of a dependency installed, see [Override system environment variables](#env-var). * If you are running Visual Studio behind a proxy, see [Configure tools to work with a proxy](#Proxy). * To install, start, and configure the remotebuild agent (previously called vs-mda-remote) for building iOS apps, see the following: * [Install the remote agent and get it running](ios-guide.md) (external topic) * [Generate a new security PIN](#IosPin) * [Configure the remote agent](#IosConfig) * [Generate a new server certificate](#IosCert) * [Verify the remote agent configuration](#IosVerify) * If you see unexpected errors when trying to build the Blank App template, see [Re-install the Cordova CLI pre-processor](#vstac). >**Caution:** If you are migrating a project from an earlier version of Visual Studio, see this [migration information](https://github.com/Microsoft/cordova-docs/blob/master/known-issues/known-issues-vs2015.md#known-issues—visual-studio-2015) (github). ##<a name="ThirdParty"></a>Install dependencies manually If you choose not to install one or more dependencies with the extension, you can install them later manually. You can install the dependencies in any order, except for Java. You must install and configure Java before you install the Android SDK. Read the following information and use these links to install dependencies manually. * [Joyent Node.js](http://nodejs.org) We recommend installing the x86 version of Node.js. Before installing this software, read about [safely installing Node.js](../develop-apps/change-node-version.md). * [Google Chrome](https://www.google.com/intl/en/chrome/browser/index.html) * [Git Command Line Tools](http://go.microsoft.com/fwlink/?LinkID=396870) When you install Git command line tools, select the option that adds Git to your command prompt path. >**Caution:** Git command line tools 1.9.5 are installed by default. Unexpected failures may occur if you install a version prior to 1.9.0. * [Apache Ant](http://go.microsoft.com/fwlink/?LinkID=396869) * Download and extract Ant to a location like C:/ant-1.x.x * Set the ANT_HOME environment variable to point to the preceding location. * Add %ANT_HOME%\bin to the system path. >**Note:** If you need to set this environment variable manually, see [Override system environment variables](#env-var). * [32-bit Oracle Java 7](http://go.microsoft.com/fwlink/?LinkID=396871) * Set the JAVA_HOME environment variable to C:/Program Files/Java/jdk1.7.0_55 * Add this to the system path: %JAVA_HOME%\bin * To avoid out of memory issues, set a *JAVA_OPTIONS environment variable with at least -Xmx512M in it. >**Note:** If you need to set this environment variable manually, see [Override system environment variables](#env-var). * [Android SDK](http://go.microsoft.com/fwlink/?LinkID=396873) with the following SDK packages: * Android SDK Tools (latest version) * Android SDK Platform-tools (latest version) * Android SDK Build-tools (19.1, 19.0.3, and 21) * Android 5.0 (API level 21) with the following packages: * SDK Platform * If you want to use the Google Android Emulator to emulate a 5.0.x device: * ARM EABI v7a System Image * Intel x86 Atom System Image * Google APIs (x86 System Image) * Google APIs (ARM System Image) * If you want to use Cordova 5.0.0 or later: * Android 5.1.x (API level 22) with the following packages: SDK platform The following illustration shows the minimum required packages in the Android SDK Manager. ![Cordova_SDK_Android_Packages](media/configure-vs-tools-apache-cordova/IC795810.png) Set the ADT_HOME environment variable to the SDK installation location. Add this to the system path: %ADT_HOME%\tools;%ADT_HOME%\platform-tools If you need to set this environment variable manually, see [Override system environment variables](#env-var). >**Tip:** If you install the Android SDK to its default location on Windows, it gets installed to C:\Program Files (x86)\Android\android-sdk. * If you want to use the Google Android Emulator to emulate a 5.1.x device: * ARM EABI v7a System Image * Intel x86 Atom System Image * Google APIs (x86 System Image) * Google APIs (ARM System Image) * Apple iTunes ([x86](http://go.microsoft.com/fwlink/?LinkID=397715), [x64](http://go.microsoft.com/fwlink/?LinkID=397313)) * WebSocket4Net (required if you’re developing your app on Windows 7) 1. Download WebSocket4Net(0.9).Binaries.zip from [CodePlex](http://go.microsoft.com/fwlink/?LinkID=403031). 2. Unzip the binaries and then unblock net45\Release\WebSocket4Net.dll. To unblock the DLL, open the file Properties for the DLL and choose Unblock in the General tab (at the bottom of the dialog box). 3. After you unblock the DLL, copy net45\Release\WebSocket4Net.dll into the %ProgramFiles(x86)%\Microsoft Visual Studio 14.0\Common7\IDE\CommonExtensions\Microsoft\WebClient\Diagnostics\ToolWindows folder on your computer. ##<a name="env-var"></a>Override system environment variables Visual Studio detects the configurations for the third-party software you’ve installed, and maintains the installation paths in the following environment variables: * **ADT_HOME** points to the Android installation path. * **ANT_HOME** points to the Ant folder on your computer. * **GIT_HOME** points to the Git installation path. * **JAVA_HOME** points to the Java installation path. Visual Studio uses these environment variables when building and running your app. You can view the environment variables and revise their values through the Visual Studio Options dialog box. You might want to override the default settings for one of the following reasons: * Visual Studio was unable to verify the path. In this case, a warning is displayed next to the environment variable. * You have multiple versions of the software installed, and you’d like to use a specific version. * You want your global environment path to be different from the local Visual Studio environment. ### To override the variables 1. On the Visual Studio menu bar, choose **Tools**, **Options**. 4. In the **Options** dialog box, choose **Tools* for Apache Cordova**, and then choose **Environment Variable Overrides**. 2. Make your changes: * To override a value, select its check box, and then revise the value. If the path information is invalid or missing, Visual Studio displays a warning next to that variable. * To reset an environment variable to its default value, clear its check box or choose **Reset to Default**. 3. Choose the **OK** button to save your changes and close the dialog box. ![Environment variables, warning message](media/configure-vs-tools-apache-cordova/options-dialog.png) ##<a name="IosPin"></a>Generate a new security PIN When you [start the agent](ios-guide.md#remoteAgent) the first time, the generated PIN is valid for a limited amount of time (10 minutes by default). If you don’t connect to the agent before the time expires, or if you want to connect a second client to the agent, you will need to generate a new PIN. ### To generate a new security PIN 1. Stop the agent (or open a second Terminal app window on your Mac and use that to enter the command). 3. From the Terminal app on your Mac, type: remotebuild certificates generate > **Note** If you are running an older version of the agent, the preceding command is not supported. Make sure that you update the remotebuild agent by [re-installing](ios-guide.md#first-install-a-few-things-onto-your-mac). 4. Follow instructions to [start the agent](ios-guide.md#remoteAgent) on your Mac and configure the agent in Visual Studio. ##<a name="IosCert"></a>Generate a new server certificate For security purposes, the server certificates that pair Visual Studio with the remote agent are tied to your Mac’s IP or host name. If these values have changed, you will need to generate a new server certificate, and then reconfigure Visual Studio with the new values. ### To generate a new server certificate 1. Stop the agent. 2. From the Terminal app on your Mac, type: remotebuild certificates reset --hostname=my.hostname.com > **Note** If you are running an older version of the agent, the preceding command is not supported. Make sure that you update the remotebuild agent by [re-installing](ios-guide.md#first-install-a-few-things-onto-your-mac). 3. When prompted, type “Y” and then type Enter. 4. From the Terminal app on your Mac, type: remotebuild certificates generate --hostname=my.hostname.com –hostname is optional. If omitted, the agent will attempt to determine the hostname automatically. 5. Follow instructions to [start the agent](ios-guide.md#remoteAgent) on your Mac and configure the agent in Visual Studio. ##<a name="IosConfig"></a>Configure the iOS remote agent You can configure the remote agent using various command line options. For example, you can specify the port to listen for build requests and specify the maximum number of builds to maintain on the file system. (By default, the limit is 10\. The agent will remove builds that exceed the maximum on shutdown.) >**Caution:** Many options have changed between vs-mda-remote and remotebuild. ### To configure the remote agent * To see a complete list of agent commands, type: remotebuild --help To see the full list of supported options, type `remotebuild --help <*command*>`. For example, to see options for the certificates parameter, type: remotebuild --help certificates * To disable secure mode and enable simple HTTP based connections, type: remotebuild –-secure=false When you use this option, leave the PIN field blank and make sure to set Secure Mode to False when configuring the agent in Visual Studio. * To specify a location for remote agent files, type: remotebuild --serverDir <directory> where _<directory\>_ is a location on your Mac where log files, builds, and server certificates will be placed. For example, the location could be /Users/username/builds. (Builds will be organized by build number in this location.) * To use a background process to capture _stdout_ and _stderr_ to a file (server.log), type: remotebuild > server.log 2>&1 & The server.log file might assist in troubleshooting build issues. * To run the agent by using a configuration file instead of command-line parameters, type: remotebuild --config <path-to-config-file> The configuration file must be in JSON format. The startup options and their values must not include dashes. To see a documented configuration file, look at the remotebuild/examples/exampleConfig.json folder in the remote agent installation directory, although you must remove the comments in the file that you use for your configuration. An example of a path you might use when running this command is _/Users/<username\>/myConfig.json_. The default path where the agent looks for a configuration file is ~/.taco_home/RemoteBuild.config.\ ##<a name="IosVerify"></a>Verify the iOS remote agent configuration Once you have [installed the agent](ios-guide.md), you can verify the remote agent configuration. ### To verify the remote agent configuration * With the remote agent running, open a second Terminal app window (choose **Shell**, **New Window**). * From the second Terminal app window on your Mac, type: remotebuild test <same-options-as-first-agent> >**Important:** This command will fail if the agent is not running in a second window, or if the two instances are not using the same configuration options. This command initiates a test build. The output from the command should show the build number and other information about the build, such as its progress. * If you started the server on a port other than 3000, use the following command instead to initiate a test build: remotebuild test –-server http://localhost:<portNumber> * To verify that your developer signing identity is set up correctly for device builds (using the Debug and Release configurations in Visual Studio), type: remotebuild test --device * To verify that your distribution signing identity is set up correctly for device builds (using the Debug configuration in Visual Studio), type: remotebuild test --device For more information about app provisioning and certificate signing identities, see [Package Your App Built with Visual Studio Tools for Apache Cordova](./package-and-publish/package-app-built-with-visual-studio.md). ##<a name="vstac"></a>Reinstall the Cordova CLI pre-processor (vs-tac) If you see unexpected errors when trying to build the Blank App template after installing Visual Studio Tools for Apache Cordova, you can try clearing your cache and reinstalling the Cordova CLI pre-processor, vs-tac, on your PC. Typically, this is only necessary if you try to build a Cordova app and see the error Cannot find module *[modulename]*. >**Note**: If you do not see the module error, go through steps in [Resolving build errors](../tips-and-workarounds/general/tips-and-workarounds-general-readme.md) before re-installing vs-tac. ### To try the quick fix * Delete the platforms/*platform* folder for the platform you are targeting (like the platforms/android folder) and then rebuild your project. If you have no errors this time, you don't need to clear the cache. ### To clear the cache 1. Choose **Tools**, **Options**, **Tools for Apache Cordova**, and then choose **Cordova Tools**. 2. Choose **Clear Cordova Cache**. 3. Close and re-open your project. 4. Choose **Build**, **Clean Solution**. 5. Delete the platforms/*platform* folder, like platforms/android. >**Tip:** If you have no errors, you do not need to re-install vs-tac. If you still have the same error, then re-install vs-tac. ### To re-install vs-tac 1. Close Visual Studio. 2. Open a command line and type the following command: npm install -g <path-to-vs-tac> The default path to vs-tac is C:\Program Files (x86)\Microsoft Visual Studio 14.0\Common7\IDE\Extensions\ApacheCordovaTools\packages\vs-tac 3. Re-open Visual Studio. 4. Open your project, choose **Build**, **Clean Solution**. 5. Delete the platforms/*platform* folder, like platforms/android, and then rebuild your project. If this does not resolve the issue, see the [Known Issues](./known-issues/known-issues-general.md). ##Configure tools to work with a proxy If you are using Visual Studio behind a proxy, such as a corporate firewall, you may need to configure proxy settings for the npm package manager and for git before you can use Visual Studio Tools for Apache Cordova. >**Important:** Using npm proxy settings with recent versions of Node.js can cause Cordova to fail to acquire plugins at the command line or in the configuration designer or when adding platforms required for build. If you encounter unexpected issues (particularly a “TypeError: Request path contains unescaped characters” error), try downgrading Node.js to 0.10.29. ### To configure proxy settings for npm package manager 1. Close Visual Studio. 2. Open a Visual Studio developer command window (Ctrl + Alt + A) and type the following command. npm -g uninstall vs-tac 3. Open %AppData%\npm\node_modules and verify that the vs-tac folder has been removed. 4. In the Visual Studio developer command window, type the following command. npm config set proxy <proxy-port> where *proxy-port* is the proxy address and port number, such as http://proxy.mycompany.com:80/. 5. Then type this command: npm config set https-proxy <proxy-port> where proxy-port might be a value such as http://proxy.mycompany.com:80/ 6. Open Visual Studio. 7. Open your Apache Cordova solution and rebuild your project. ### <a name="Proxy"></a>To configure proxy settings for git 1. Close Visual Studio. 2. Open a Visual Studio developer command window (Ctrl + Alt + A) and type the following command. git config --global http.proxy http://<username>:<password>@<proxy-port> where *username* and *password* are your proxy username and password; *proxy-port* might be a value such as proxy.mycompany.com:80. 3. Type this command: git config --global https.proxy http://<username>:<password>@<proxy-port> where *username* and *password* are your proxy username and password; *proxy-port* might be a value such as proxy.mycompany.com:80 4. Open Visual Studio. 5. Open your Apache Cordova solution and rebuild your project.
{'repo_name': 'MicrosoftDocs/cordova-docs', 'stars': '141', 'repo_language': 'JavaScript', 'file_name': 'docfx.json', 'mime_type': 'text/plain', 'hash': 7365292128697254169, 'source_dataset': 'data'}
#include "MatrixOpe.h" int autoNorm(Matrix x); Matrix cdistances(Matrix test,Matrix x); Matrix getK(Matrix oneTest,Matrix x,int K); int classfiy(Matrix &testData,Matrix &testDatay,Matrix &x,Matrix &y,const int &K); int KNN();
{'repo_name': 'myazi/myLearn', 'stars': '105', 'repo_language': 'C++', 'file_name': '5.txt', 'mime_type': 'text/plain', 'hash': 8842257837561298377, 'source_dataset': 'data'}
/** * Copyright (c) 2014,2019 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.eclipse.smarthome.core.thing.dto; import java.util.List; import java.util.Map; import org.eclipse.smarthome.config.core.dto.ConfigDescriptionParameterDTO; import org.eclipse.smarthome.config.core.dto.ConfigDescriptionParameterGroupDTO; /** * This is a data transfer object that is used with to serialize thing types. * * @author Dennis Nobel - Initial contribution * @author Thomas Höfer - Added thing and thing type properties * @author Chris Jackson - Added parameter groups * @author Miki Jankov - Introducing StrippedThingTypeDTO * */ public class ThingTypeDTO extends StrippedThingTypeDTO { public List<ChannelDefinitionDTO> channels; public List<ChannelGroupDefinitionDTO> channelGroups; public List<ConfigDescriptionParameterDTO> configParameters; public List<ConfigDescriptionParameterGroupDTO> parameterGroups; public Map<String, String> properties; public List<String> extensibleChannelTypeIds; public ThingTypeDTO() { } public ThingTypeDTO(String UID, String label, String description, String category, boolean listed, List<ConfigDescriptionParameterDTO> configParameters, List<ChannelDefinitionDTO> channels, List<ChannelGroupDefinitionDTO> channelGroups, List<String> supportedBridgeTypeUIDs, Map<String, String> properties, boolean bridge, List<ConfigDescriptionParameterGroupDTO> parameterGroups, List<String> extensibleChannelTypeIds) { this.UID = UID; this.label = label; this.description = description; this.category = category; this.listed = listed; this.configParameters = configParameters; this.channels = channels; this.channelGroups = channelGroups; this.supportedBridgeTypeUIDs = supportedBridgeTypeUIDs; this.properties = properties; this.bridge = bridge; this.parameterGroups = parameterGroups; this.extensibleChannelTypeIds = extensibleChannelTypeIds; } }
{'repo_name': 'eclipse-archived/smarthome', 'stars': '865', 'repo_language': 'Java', 'file_name': 'MANIFEST.MF', 'mime_type': 'text/plain', 'hash': -8535657874232201325, 'source_dataset': 'data'}
<?xml version="1.0" encoding="UTF-8"?> <bpmn2:definitions xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:bpmn2="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xsi:schemaLocation="http://www.omg.org/spec/BPMN/20100524/MODEL BPMN20.xsd" id="_qyxQQOTSEeO-H55oRJubsg" exporter="camunda modeler" exporterVersion="2.5.0" targetNamespace="http://camunda.org/schema/1.0/bpmn"> <bpmn2:process id="process" isExecutable="true"> <bpmn2:startEvent id="StartEvent_1"> <bpmn2:outgoing>SequenceFlow_1</bpmn2:outgoing> </bpmn2:startEvent> <bpmn2:subProcess id="SubProcess_1"> <bpmn2:incoming>SequenceFlow_1</bpmn2:incoming> <bpmn2:outgoing>SequenceFlow_2</bpmn2:outgoing> <bpmn2:startEvent id="StartEvent_2"> <bpmn2:outgoing>SequenceFlow_3</bpmn2:outgoing> </bpmn2:startEvent> <bpmn2:serviceTask id="ServiceTask_1" camunda:class="org.camunda.bpm.engine.test.bpmn.subprocess.util.GetActInstanceDelegate" name="Service&#xA;Task"> <bpmn2:incoming>SequenceFlow_3</bpmn2:incoming> <bpmn2:outgoing>SequenceFlow_4</bpmn2:outgoing> </bpmn2:serviceTask> <bpmn2:sequenceFlow id="SequenceFlow_3" name="" sourceRef="StartEvent_2" targetRef="ServiceTask_1"/> <bpmn2:endEvent id="EndEvent_2"> <bpmn2:incoming>SequenceFlow_4</bpmn2:incoming> </bpmn2:endEvent> <bpmn2:sequenceFlow id="SequenceFlow_4" name="" sourceRef="ServiceTask_1" targetRef="EndEvent_2"/> </bpmn2:subProcess> <bpmn2:sequenceFlow id="SequenceFlow_1" name="" sourceRef="StartEvent_1" targetRef="SubProcess_1"/> <bpmn2:endEvent id="EndEvent_1"> <bpmn2:incoming>SequenceFlow_2</bpmn2:incoming> </bpmn2:endEvent> <bpmn2:sequenceFlow id="SequenceFlow_2" name="" sourceRef="SubProcess_1" targetRef="EndEvent_1"/> </bpmn2:process> <bpmndi:BPMNDiagram id="BPMNDiagram_1"> <bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="process"> <bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1"> <dc:Bounds height="36.0" width="36.0" x="193.0" y="160.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNShape id="_BPMNShape_SubProcess_2" bpmnElement="SubProcess_1" isExpanded="true"> <dc:Bounds height="150.0" width="517.0" x="288.0" y="103.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNEdge id="BPMNEdge_SequenceFlow_1" bpmnElement="SequenceFlow_1" sourceElement="_BPMNShape_StartEvent_2" targetElement="_BPMNShape_SubProcess_2"> <di:waypoint xsi:type="dc:Point" x="229.0" y="178.0"/> <di:waypoint xsi:type="dc:Point" x="288.0" y="178.0"/> </bpmndi:BPMNEdge> <bpmndi:BPMNShape id="_BPMNShape_EndEvent_2" bpmnElement="EndEvent_1"> <dc:Bounds height="36.0" width="36.0" x="855.0" y="160.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNEdge id="BPMNEdge_SequenceFlow_2" bpmnElement="SequenceFlow_2" sourceElement="_BPMNShape_SubProcess_2" targetElement="_BPMNShape_EndEvent_2"> <di:waypoint xsi:type="dc:Point" x="804.0" y="178.0"/> <di:waypoint xsi:type="dc:Point" x="855.0" y="178.0"/> </bpmndi:BPMNEdge> <bpmndi:BPMNShape id="_BPMNShape_StartEvent_3" bpmnElement="StartEvent_2"> <dc:Bounds height="36.0" width="36.0" x="347.0" y="160.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNShape id="_BPMNShape_ServiceTask_2" bpmnElement="ServiceTask_1"> <dc:Bounds height="80.0" width="100.0" x="433.0" y="138.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNEdge id="BPMNEdge_SequenceFlow_3" bpmnElement="SequenceFlow_3" sourceElement="_BPMNShape_StartEvent_3" targetElement="_BPMNShape_ServiceTask_2"> <di:waypoint xsi:type="dc:Point" x="383.0" y="178.0"/> <di:waypoint xsi:type="dc:Point" x="433.0" y="178.0"/> </bpmndi:BPMNEdge> <bpmndi:BPMNShape id="_BPMNShape_EndEvent_3" bpmnElement="EndEvent_2"> <dc:Bounds height="36.0" width="36.0" x="583.0" y="160.0"/> </bpmndi:BPMNShape> <bpmndi:BPMNEdge id="BPMNEdge_SequenceFlow_4" bpmnElement="SequenceFlow_4" sourceElement="_BPMNShape_ServiceTask_2" targetElement="_BPMNShape_EndEvent_3"> <di:waypoint xsi:type="dc:Point" x="533.0" y="178.0"/> <di:waypoint xsi:type="dc:Point" x="583.0" y="178.0"/> </bpmndi:BPMNEdge> </bpmndi:BPMNPlane> </bpmndi:BPMNDiagram> </bpmn2:definitions>
{'repo_name': 'camunda/camunda-bpm-platform', 'stars': '1508', 'repo_language': 'Java', 'file_name': 'DmnBusinessRuleTaskTest.java', 'mime_type': 'text/x-java', 'hash': -4168188778184698764, 'source_dataset': 'data'}
// Copyright 2014 Cognitect. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. goog.provide("com.cognitect.transit.eq"); goog.require("com.cognitect.transit.util"); goog.scope(function() { var eq = com.cognitect.transit.eq, util = com.cognitect.transit.util; /** * @const * @type {string} */ eq.hashCodeProperty = "transit$hashCode$"; /** * @type {number} */ eq.hashCodeCounter = 1; eq.equals = function (x, y) { if(x == null) { return y == null; } else if(x === y) { return true; } else if(typeof x === "object") { if(util.isArray(x)) { if(util.isArray(y)) { if(x.length === y.length) { for(var i = 0; i < x.length; i++) { if(!eq.equals(x[i], y[i])) { return false; } } return true; } else { return false; } } else { return false; } } else if(x.com$cognitect$transit$equals) { return x.com$cognitect$transit$equals(y); } else if((y != null) && (typeof y === "object")) { if(y.com$cognitect$transit$equals) { return y.com$cognitect$transit$equals(x); } else { var xklen = 0, yklen = util.objectKeys(y).length; for(var p in x) { if(!x.hasOwnProperty(p)) continue; xklen++; if(!y.hasOwnProperty(p)) { return false; } else { if(!eq.equals(x[p], y[p])) { return false; } } } return xklen === yklen; } } else { return false; } } else { return false } }; eq.hashCombine = function(seed, hash) { return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2)); }; eq.stringCodeCache = {}; eq.stringCodeCacheSize = 0; /** * @const * @type {number} */ eq.STR_CACHE_MAX = 256; eq.hashString = function(str) { // a la goog.string.HashCode // http://docs.closure-library.googlecode.com/git/local_closure_goog_string_string.js.source.html#line1206 var cached = eq.stringCodeCache[str]; if(cached != null) { return cached; } var code = 0; for (var i = 0; i < str.length; ++i) { code = 31 * code + str.charCodeAt(i); code %= 0x100000000; } eq.stringCodeCacheSize++; if(eq.stringCodeCacheSize >= eq.STR_CACHE_MAX) { eq.stringCodeCache = {}; eq.stringCodeCacheSize = 1; } eq.stringCodeCache[str] = code; return code; }; eq.hashMapLike = function(m) { var code = 0; // ES6 Map-like case if(m.forEach != null) { m.forEach(function(val, key, m) { code = (code + (eq.hashCode(key) ^ eq.hashCode(val))) % 4503599627370496; }); } else { // JS Object case var keys = util.objectKeys(m); for(var i = 0; i < keys.length; i++) { var key = keys[i]; var val = m[key]; code = (code + (eq.hashCode(key) ^ eq.hashCode(val))) % 4503599627370496; } } return code; }; eq.hashArrayLike = function(arr) { var code = 0; if(util.isArray(arr)) { for(var i = 0; i < arr.length; i++) { code = eq.hashCombine(code, eq.hashCode(arr[i])); } } else if(arr.forEach) { arr.forEach(function(x, i) { code = eq.hashCombine(code, eq.hashCode(x)); }); } return code; }; eq.hashCode = function(x) { if(x == null) { return 0; } else { switch(typeof x) { case 'number': return x; break; case 'boolean': return x === true ? 1 : 0; break; case 'string': return eq.hashString(x); break; case 'function': var code = x[eq.hashCodeProperty]; if(code) { return code; } else { code = eq.hashCodeCounter; if(typeof Object.defineProperty != "undefined") { Object.defineProperty(x, eq.hashCodeProperty, { value: code, enumerable: false }); } else { x[eq.hashCodeProperty] = code; } eq.hashCodeCounter++; return code; } break; default: if(x instanceof Date) { return x.valueOf(); } else if(util.isArray(x)) { return eq.hashArrayLike(x); } if(x.com$cognitect$transit$hashCode) { return x.com$cognitect$transit$hashCode(); } else { return eq.hashMapLike(x); } break; } } } eq.extendToEQ = function(obj, opts) { obj.com$cognitect$transit$hashCode = opts["hashCode"]; obj.com$cognitect$transit$equals = opts["equals"]; return obj; } });
{'repo_name': 'cognitect/transit-js', 'stars': '710', 'repo_language': 'JavaScript', 'file_name': 'bson_compare.js', 'mime_type': 'text/plain', 'hash': 8604327181272633393, 'source_dataset': 'data'}
<?php use think\facade\Env; return [ // 默认使用的数据库连接配置 'default' => Env::get('database.driver', 'mysql'), // 自定义时间查询规则 'time_query_rule' => [], // 自动写入时间戳字段 // true为自动识别类型 false关闭 // 字符串则明确指定时间字段类型 支持 int timestamp datetime date 'auto_timestamp' => true, // 时间字段取出后的默认时间格式 'datetime_format' => 'Y-m-d H:i:s', // 数据库连接配置信息 'connections' => [ 'mysql' => [ // 数据库类型 'type' => Env::get('database.type', 'mysql'), // 服务器地址 'hostname' => Env::get('database.hostname', 'host.docker.internal'), // 数据库名 'database' => Env::get('database.database', 'easyadmin'), // 用户名 'username' => Env::get('database.username', 'root'), // 密码 'password' => Env::get('database.password', 'root'), // 端口 'hostport' => Env::get('database.hostport', '3306'), // 数据库连接参数 'params' => [], // 数据库编码默认采用utf8 'charset' => Env::get('database.charset', 'utf8'), // 数据库表前缀 'prefix' => Env::get('database.prefix', 'ea_'), // 数据库部署方式:0 集中式(单一服务器),1 分布式(主从服务器) 'deploy' => 0, // 数据库读写是否分离 主从式有效 'rw_separate' => false, // 读写分离后 主服务器数量 'master_num' => 1, // 指定从服务器序号 'slave_no' => '', // 是否严格检查字段是否存在 'fields_strict' => true, // 是否需要断线重连 'break_reconnect' => false, // 监听SQL 'trigger_sql' => true, // 开启字段缓存 'fields_cache' => false, // 字段缓存路径 'schema_cache_path' => app()->getRuntimePath() . 'schema' . DIRECTORY_SEPARATOR, ], // 更多的数据库配置信息 ], ];
{'repo_name': 'zhongshaofa/easyadmin', 'stars': '113', 'repo_language': 'PHP', 'file_name': 'goods.js', 'mime_type': 'text/plain', 'hash': 3187550496789945484, 'source_dataset': 'data'}
# Copyright (C) 2018 Heron Systems, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from adept.exp.base.exp_module import ExpModule class ExperienceReplay(ExpModule): def write_actor(self, items): pass def write_env(self, obs, rewards, terminals, infos): pass def read(self): pass def is_ready(self): pass class PrioritizedExperienceReplay(ExpModule): def write_actor(self, items): pass def write_env(self, obs, rewards, terminals, infos): pass def read(self): pass def is_ready(self): pass
{'repo_name': 'heronsystems/adeptRL', 'stars': '113', 'repo_language': 'Python', 'file_name': 'test_rollout.py', 'mime_type': 'text/x-python', 'hash': 3156563140301728070, 'source_dataset': 'data'}
/* * Copyright (C) 2001, 2004, 2011 Free Software Foundation, Inc. * This file is part of the GNU LIBICONV Library. * * The GNU LIBICONV Library is free software; you can redistribute it * and/or modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * The GNU LIBICONV Library is distributed in the hope that it will be * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with the GNU LIBICONV Library; see the file COPYING.LIB. * If not, write to the Free Software Foundation, Inc., 51 Franklin Street, * Fifth Floor, Boston, MA 02110-1301, USA. */ /* Combining characters used in Vietnamese encodings CP1258, TCVN. */ #ifndef _VIETCOMB_H #define _VIETCOMB_H /* Relevant combining characters: 0x0300, 0x0301, 0x0303, 0x0309, 0x0323. */ /* Composition tables for each of the relevant combining characters. */ static const struct { unsigned short base; unsigned short composed; } viet_comp_table_data[] = { #define viet_comp_table0300_idx 0 #define viet_comp_table0300_len 31 { 0x0041, 0x00C0 }, { 0x0045, 0x00C8 }, { 0x0049, 0x00CC }, { 0x004E, 0x01F8 }, { 0x004F, 0x00D2 }, { 0x0055, 0x00D9 }, { 0x0057, 0x1E80 }, { 0x0059, 0x1EF2 }, { 0x0061, 0x00E0 }, { 0x0065, 0x00E8 }, { 0x0069, 0x00EC }, { 0x006E, 0x01F9 }, { 0x006F, 0x00F2 }, { 0x0075, 0x00F9 }, { 0x0077, 0x1E81 }, { 0x0079, 0x1EF3 }, { 0x00A8, 0x1FED }, { 0x00C2, 0x1EA6 }, { 0x00CA, 0x1EC0 }, { 0x00D4, 0x1ED2 }, { 0x00DC, 0x01DB }, { 0x00E2, 0x1EA7 }, { 0x00EA, 0x1EC1 }, { 0x00F4, 0x1ED3 }, { 0x00FC, 0x01DC }, { 0x0102, 0x1EB0 }, { 0x0103, 0x1EB1 }, /*{ 0x0112, 0x1E14 },*/ /*{ 0x0113, 0x1E15 },*/ /*{ 0x014C, 0x1E50 },*/ /*{ 0x014D, 0x1E51 },*/ { 0x01A0, 0x1EDC }, { 0x01A1, 0x1EDD }, { 0x01AF, 0x1EEA }, { 0x01B0, 0x1EEB }, #define viet_comp_table0301_idx (viet_comp_table0300_idx+viet_comp_table0300_len) #define viet_comp_table0301_len 63 { 0x0041, 0x00C1 }, { 0x0043, 0x0106 }, { 0x0045, 0x00C9 }, { 0x0047, 0x01F4 }, { 0x0049, 0x00CD }, { 0x004B, 0x1E30 }, { 0x004C, 0x0139 }, { 0x004D, 0x1E3E }, { 0x004E, 0x0143 }, { 0x004F, 0x00D3 }, { 0x0050, 0x1E54 }, { 0x0052, 0x0154 }, { 0x0053, 0x015A }, { 0x0055, 0x00DA }, { 0x0057, 0x1E82 }, { 0x0059, 0x00DD }, { 0x005A, 0x0179 }, { 0x0061, 0x00E1 }, { 0x0063, 0x0107 }, { 0x0065, 0x00E9 }, { 0x0067, 0x01F5 }, { 0x0069, 0x00ED }, { 0x006B, 0x1E31 }, { 0x006C, 0x013A }, { 0x006D, 0x1E3F }, { 0x006E, 0x0144 }, { 0x006F, 0x00F3 }, { 0x0070, 0x1E55 }, { 0x0072, 0x0155 }, { 0x0073, 0x015B }, { 0x0075, 0x00FA }, { 0x0077, 0x1E83 }, { 0x0079, 0x00FD }, { 0x007A, 0x017A }, { 0x00A8, 0x0385 }, /* prefer U+0385 over U+1FEE */ { 0x00C2, 0x1EA4 }, { 0x00C5, 0x01FA }, { 0x00C6, 0x01FC }, { 0x00C7, 0x1E08 }, { 0x00CA, 0x1EBE }, { 0x00CF, 0x1E2E }, { 0x00D4, 0x1ED0 }, { 0x00D5, 0x1E4C }, { 0x00D8, 0x01FE }, { 0x00DC, 0x01D7 }, { 0x00E2, 0x1EA5 }, { 0x00E5, 0x01FB }, { 0x00E6, 0x01FD }, { 0x00E7, 0x1E09 }, { 0x00EA, 0x1EBF }, { 0x00EF, 0x1E2F }, { 0x00F4, 0x1ED1 }, { 0x00F5, 0x1E4D }, { 0x00F8, 0x01FF }, { 0x00FC, 0x01D8 }, { 0x0102, 0x1EAE }, { 0x0103, 0x1EAF }, /*{ 0x0112, 0x1E16 },*/ /*{ 0x0113, 0x1E17 },*/ /*{ 0x014C, 0x1E52 },*/ /*{ 0x014D, 0x1E53 },*/ { 0x0168, 0x1E78 }, { 0x0169, 0x1E79 }, { 0x01A0, 0x1EDA }, { 0x01A1, 0x1EDB }, { 0x01AF, 0x1EE8 }, { 0x01B0, 0x1EE9 }, #define viet_comp_table0303_idx (viet_comp_table0301_idx+viet_comp_table0301_len) #define viet_comp_table0303_len 34 { 0x0041, 0x00C3 }, { 0x0045, 0x1EBC }, { 0x0049, 0x0128 }, { 0x004E, 0x00D1 }, { 0x004F, 0x00D5 }, { 0x0055, 0x0168 }, { 0x0056, 0x1E7C }, { 0x0059, 0x1EF8 }, { 0x0061, 0x00E3 }, { 0x0065, 0x1EBD }, { 0x0069, 0x0129 }, { 0x006E, 0x00F1 }, { 0x006F, 0x00F5 }, { 0x0075, 0x0169 }, { 0x0076, 0x1E7D }, { 0x0079, 0x1EF9 }, { 0x00C2, 0x1EAA }, { 0x00CA, 0x1EC4 }, { 0x00D3, 0x1E4C }, { 0x00D4, 0x1ED6 }, { 0x00D6, 0x1E4E }, { 0x00DA, 0x1E78 }, { 0x00E2, 0x1EAB }, { 0x00EA, 0x1EC5 }, { 0x00F3, 0x1E4D }, { 0x00F4, 0x1ED7 }, { 0x00F6, 0x1E4F }, { 0x00FA, 0x1E79 }, { 0x0102, 0x1EB4 }, { 0x0103, 0x1EB5 }, { 0x01A0, 0x1EE0 }, { 0x01A1, 0x1EE1 }, { 0x01AF, 0x1EEE }, { 0x01B0, 0x1EEF }, #define viet_comp_table0309_idx (viet_comp_table0303_idx+viet_comp_table0303_len) #define viet_comp_table0309_len 24 { 0x0041, 0x1EA2 }, { 0x0045, 0x1EBA }, { 0x0049, 0x1EC8 }, { 0x004F, 0x1ECE }, { 0x0055, 0x1EE6 }, { 0x0059, 0x1EF6 }, { 0x0061, 0x1EA3 }, { 0x0065, 0x1EBB }, { 0x0069, 0x1EC9 }, { 0x006F, 0x1ECF }, { 0x0075, 0x1EE7 }, { 0x0079, 0x1EF7 }, { 0x00C2, 0x1EA8 }, { 0x00CA, 0x1EC2 }, { 0x00D4, 0x1ED4 }, { 0x00E2, 0x1EA9 }, { 0x00EA, 0x1EC3 }, { 0x00F4, 0x1ED5 }, { 0x0102, 0x1EB2 }, { 0x0103, 0x1EB3 }, { 0x01A0, 0x1EDE }, { 0x01A1, 0x1EDF }, { 0x01AF, 0x1EEC }, { 0x01B0, 0x1EED }, #define viet_comp_table0323_idx (viet_comp_table0309_idx+viet_comp_table0309_len) #define viet_comp_table0323_len 50 { 0x0041, 0x1EA0 }, { 0x0042, 0x1E04 }, { 0x0044, 0x1E0C }, { 0x0045, 0x1EB8 }, { 0x0048, 0x1E24 }, { 0x0049, 0x1ECA }, { 0x004B, 0x1E32 }, { 0x004C, 0x1E36 }, { 0x004D, 0x1E42 }, { 0x004E, 0x1E46 }, { 0x004F, 0x1ECC }, { 0x0052, 0x1E5A }, { 0x0053, 0x1E62 }, { 0x0054, 0x1E6C }, { 0x0055, 0x1EE4 }, { 0x0056, 0x1E7E }, { 0x0057, 0x1E88 }, { 0x0059, 0x1EF4 }, { 0x005A, 0x1E92 }, { 0x0061, 0x1EA1 }, { 0x0062, 0x1E05 }, { 0x0064, 0x1E0D }, { 0x0065, 0x1EB9 }, { 0x0068, 0x1E25 }, { 0x0069, 0x1ECB }, { 0x006B, 0x1E33 }, { 0x006C, 0x1E37 }, { 0x006D, 0x1E43 }, { 0x006E, 0x1E47 }, { 0x006F, 0x1ECD }, { 0x0072, 0x1E5B }, { 0x0073, 0x1E63 }, { 0x0074, 0x1E6D }, { 0x0075, 0x1EE5 }, { 0x0076, 0x1E7F }, { 0x0077, 0x1E89 }, { 0x0079, 0x1EF5 }, { 0x007A, 0x1E93 }, { 0x00C2, 0x1EAC }, { 0x00CA, 0x1EC6 }, { 0x00D4, 0x1ED8 }, { 0x00E2, 0x1EAD }, { 0x00EA, 0x1EC7 }, { 0x00F4, 0x1ED9 }, { 0x0102, 0x1EB6 }, { 0x0103, 0x1EB7 }, { 0x01A0, 0x1EE2 }, { 0x01A1, 0x1EE3 }, { 0x01AF, 0x1EF0 }, { 0x01B0, 0x1EF1 }, }; static const struct { unsigned int len; unsigned int idx; } viet_comp_table[] = { { viet_comp_table0300_len, viet_comp_table0300_idx }, { viet_comp_table0301_len, viet_comp_table0301_idx }, { viet_comp_table0303_len, viet_comp_table0303_idx }, { viet_comp_table0309_len, viet_comp_table0309_idx }, { viet_comp_table0323_len, viet_comp_table0323_idx }, }; /* Decomposition table for the relevant Unicode characters. */ struct viet_decomp { unsigned short composed; unsigned int base : 12; int comb1 : 4; }; static const struct viet_decomp viet_decomp_table[] = { { 0x00B4, 0x0020, 1 }, /* compatibility decomposition - for TCVN only */ { 0x00C0, 0x0041, 0 }, { 0x00C1, 0x0041, 1 }, { 0x00C3, 0x0041, 2 }, { 0x00C8, 0x0045, 0 }, { 0x00C9, 0x0045, 1 }, { 0x00CC, 0x0049, 0 }, { 0x00CD, 0x0049, 1 }, { 0x00D1, 0x004E, 2 }, { 0x00D2, 0x004F, 0 }, { 0x00D3, 0x004F, 1 }, { 0x00D5, 0x004F, 2 }, { 0x00D9, 0x0055, 0 }, { 0x00DA, 0x0055, 1 }, { 0x00DD, 0x0059, 1 }, { 0x00E0, 0x0061, 0 }, { 0x00E1, 0x0061, 1 }, { 0x00E3, 0x0061, 2 }, { 0x00E8, 0x0065, 0 }, { 0x00E9, 0x0065, 1 }, { 0x00EC, 0x0069, 0 }, { 0x00ED, 0x0069, 1 }, { 0x00F1, 0x006E, 2 }, { 0x00F2, 0x006F, 0 }, { 0x00F3, 0x006F, 1 }, { 0x00F5, 0x006F, 2 }, { 0x00F9, 0x0075, 0 }, { 0x00FA, 0x0075, 1 }, { 0x00FD, 0x0079, 1 }, { 0x0106, 0x0043, 1 }, { 0x0107, 0x0063, 1 }, { 0x0128, 0x0049, 2 }, { 0x0129, 0x0069, 2 }, { 0x0139, 0x004C, 1 }, { 0x013A, 0x006C, 1 }, { 0x0143, 0x004E, 1 }, { 0x0144, 0x006E, 1 }, { 0x0154, 0x0052, 1 }, { 0x0155, 0x0072, 1 }, { 0x015A, 0x0053, 1 }, { 0x015B, 0x0073, 1 }, { 0x0168, 0x0055, 2 }, { 0x0169, 0x0075, 2 }, { 0x0179, 0x005A, 1 }, { 0x017A, 0x007A, 1 }, { 0x01D7, 0x00DC, 1 }, { 0x01D8, 0x00FC, 1 }, { 0x01DB, 0x00DC, 0 }, { 0x01DC, 0x00FC, 0 }, { 0x01F4, 0x0047, 1 }, { 0x01F5, 0x0067, 1 }, { 0x01F8, 0x004E, 0 }, { 0x01F9, 0x006E, 0 }, { 0x01FA, 0x00C5, 1 }, { 0x01FB, 0x00E5, 1 }, { 0x01FC, 0x00C6, 1 }, { 0x01FD, 0x00E6, 1 }, { 0x01FE, 0x00D8, 1 }, { 0x01FF, 0x00F8, 1 }, { 0x02DC, 0x0020, 2 }, /* compatibility decomposition - for TCVN only */ { 0x0385, 0x00A8, 1 }, { 0x1E04, 0x0042, 4 }, { 0x1E05, 0x0062, 4 }, { 0x1E08, 0x00C7, 1 }, { 0x1E09, 0x00E7, 1 }, { 0x1E0C, 0x0044, 4 }, { 0x1E0D, 0x0064, 4 }, { 0x1E24, 0x0048, 4 }, { 0x1E25, 0x0068, 4 }, { 0x1E2E, 0x00CF, 1 }, { 0x1E2F, 0x00EF, 1 }, { 0x1E30, 0x004B, 1 }, { 0x1E31, 0x006B, 1 }, { 0x1E32, 0x004B, 4 }, { 0x1E33, 0x006B, 4 }, { 0x1E36, 0x004C, 4 }, { 0x1E37, 0x006C, 4 }, { 0x1E3E, 0x004D, 1 }, { 0x1E3F, 0x006D, 1 }, { 0x1E42, 0x004D, 4 }, { 0x1E43, 0x006D, 4 }, { 0x1E46, 0x004E, 4 }, { 0x1E47, 0x006E, 4 }, { 0x1E4C, 0x00D3, 2 }, /*{ 0x1E4C, 0x00D5, 1 },*/ /*{ 0x1E4C, 0x004F, 1, 2 },*/ { 0x1E4D, 0x00F3, 2 }, /*{ 0x1E4D, 0x00F5, 1 },*/ /*{ 0x1E4D, 0x006F, 1, 2 },*/ { 0x1E4E, 0x00D6, 2 }, { 0x1E4F, 0x00F6, 2 }, { 0x1E54, 0x0050, 1 }, { 0x1E55, 0x0070, 1 }, { 0x1E5A, 0x0052, 4 }, { 0x1E5B, 0x0072, 4 }, { 0x1E62, 0x0053, 4 }, { 0x1E63, 0x0073, 4 }, { 0x1E6C, 0x0054, 4 }, { 0x1E6D, 0x0074, 4 }, { 0x1E78, 0x00DA, 2 }, /*{ 0x1E78, 0x0168, 1 },*/ /*{ 0x1E78, 0x0055, 1, 2 },*/ { 0x1E79, 0x00FA, 2 }, /*{ 0x1E79, 0x0169, 1 },*/ /*{ 0x1E79, 0x0075, 1, 2 },*/ { 0x1E7C, 0x0056, 2 }, { 0x1E7D, 0x0076, 2 }, { 0x1E7E, 0x0056, 4 }, { 0x1E7F, 0x0076, 4 }, { 0x1E80, 0x0057, 0 }, { 0x1E81, 0x0077, 0 }, { 0x1E82, 0x0057, 1 }, { 0x1E83, 0x0077, 1 }, { 0x1E88, 0x0057, 4 }, { 0x1E89, 0x0077, 4 }, { 0x1E92, 0x005A, 4 }, { 0x1E93, 0x007A, 4 }, { 0x1EA0, 0x0041, 4 }, { 0x1EA1, 0x0061, 4 }, { 0x1EA2, 0x0041, 3 }, { 0x1EA3, 0x0061, 3 }, { 0x1EA4, 0x00C2, 1 }, { 0x1EA5, 0x00E2, 1 }, { 0x1EA6, 0x00C2, 0 }, { 0x1EA7, 0x00E2, 0 }, { 0x1EA8, 0x00C2, 3 }, { 0x1EA9, 0x00E2, 3 }, { 0x1EAA, 0x00C2, 2 }, { 0x1EAB, 0x00E2, 2 }, { 0x1EAC, 0x00C2, 4 }, { 0x1EAD, 0x00E2, 4 }, { 0x1EAE, 0x0102, 1 }, { 0x1EAF, 0x0103, 1 }, { 0x1EB0, 0x0102, 0 }, { 0x1EB1, 0x0103, 0 }, { 0x1EB2, 0x0102, 3 }, { 0x1EB3, 0x0103, 3 }, { 0x1EB4, 0x0102, 2 }, { 0x1EB5, 0x0103, 2 }, { 0x1EB6, 0x0102, 4 }, { 0x1EB7, 0x0103, 4 }, { 0x1EB8, 0x0045, 4 }, { 0x1EB9, 0x0065, 4 }, { 0x1EBA, 0x0045, 3 }, { 0x1EBB, 0x0065, 3 }, { 0x1EBC, 0x0045, 2 }, { 0x1EBD, 0x0065, 2 }, { 0x1EBE, 0x00CA, 1 }, { 0x1EBF, 0x00EA, 1 }, { 0x1EC0, 0x00CA, 0 }, { 0x1EC1, 0x00EA, 0 }, { 0x1EC2, 0x00CA, 3 }, { 0x1EC3, 0x00EA, 3 }, { 0x1EC4, 0x00CA, 2 }, { 0x1EC5, 0x00EA, 2 }, { 0x1EC6, 0x00CA, 4 }, { 0x1EC7, 0x00EA, 4 }, { 0x1EC8, 0x0049, 3 }, { 0x1EC9, 0x0069, 3 }, { 0x1ECA, 0x0049, 4 }, { 0x1ECB, 0x0069, 4 }, { 0x1ECC, 0x004F, 4 }, { 0x1ECD, 0x006F, 4 }, { 0x1ECE, 0x004F, 3 }, { 0x1ECF, 0x006F, 3 }, { 0x1ED0, 0x00D4, 1 }, { 0x1ED1, 0x00F4, 1 }, { 0x1ED2, 0x00D4, 0 }, { 0x1ED3, 0x00F4, 0 }, { 0x1ED4, 0x00D4, 3 }, { 0x1ED5, 0x00F4, 3 }, { 0x1ED6, 0x00D4, 2 }, { 0x1ED7, 0x00F4, 2 }, { 0x1ED8, 0x00D4, 4 }, { 0x1ED9, 0x00F4, 4 }, { 0x1EDA, 0x01A0, 1 }, { 0x1EDB, 0x01A1, 1 }, { 0x1EDC, 0x01A0, 0 }, { 0x1EDD, 0x01A1, 0 }, { 0x1EDE, 0x01A0, 3 }, { 0x1EDF, 0x01A1, 3 }, { 0x1EE0, 0x01A0, 2 }, { 0x1EE1, 0x01A1, 2 }, { 0x1EE2, 0x01A0, 4 }, { 0x1EE3, 0x01A1, 4 }, { 0x1EE4, 0x0055, 4 }, { 0x1EE5, 0x0075, 4 }, { 0x1EE6, 0x0055, 3 }, { 0x1EE7, 0x0075, 3 }, { 0x1EE8, 0x01AF, 1 }, { 0x1EE9, 0x01B0, 1 }, { 0x1EEA, 0x01AF, 0 }, { 0x1EEB, 0x01B0, 0 }, { 0x1EEC, 0x01AF, 3 }, { 0x1EED, 0x01B0, 3 }, { 0x1EEE, 0x01AF, 2 }, { 0x1EEF, 0x01B0, 2 }, { 0x1EF0, 0x01AF, 4 }, { 0x1EF1, 0x01B0, 4 }, { 0x1EF2, 0x0059, 0 }, { 0x1EF3, 0x0079, 0 }, { 0x1EF4, 0x0059, 4 }, { 0x1EF5, 0x0079, 4 }, { 0x1EF6, 0x0059, 3 }, { 0x1EF7, 0x0079, 3 }, { 0x1EF8, 0x0059, 2 }, { 0x1EF9, 0x0079, 2 }, { 0x1FED, 0x00A8, 0 }, { 0x1FEE, 0x00A8, 1 }, /* U+1FEE => U+0385 => U+00A8 U+0301 */ }; #endif /* _VIETCOMB_H */
{'repo_name': 'rizzoma/rizzoma', 'stars': '114', 'repo_language': 'CoffeeScript', 'file_name': 'sphinxsearch', 'mime_type': 'text/plain', 'hash': 1244045455749493406, 'source_dataset': 'data'}
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!29 &1 OcclusionCullingSettings: m_ObjectHideFlags: 0 serializedVersion: 2 m_OcclusionBakeSettings: smallestOccluder: 5 smallestHole: 0.25 backfaceThreshold: 100 m_SceneGUID: 00000000000000000000000000000000 m_OcclusionCullingData: {fileID: 0} --- !u!104 &2 RenderSettings: m_ObjectHideFlags: 0 serializedVersion: 9 m_Fog: 0 m_FogColor: {r: 0.121568635, g: 0.18431373, b: 0.3803922, a: 1} m_FogMode: 3 m_FogDensity: 0.015 m_LinearFogStart: 0 m_LinearFogEnd: 300 m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1} m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} m_AmbientIntensity: 1 m_AmbientMode: 0 m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} m_SkyboxMaterial: {fileID: 2100000, guid: 22b5117545d774b5cb61a7808c5123ee, type: 2} m_HaloStrength: 0.5 m_FlareStrength: 1 m_FlareFadeSpeed: 3 m_HaloTexture: {fileID: 0} m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} m_DefaultReflectionMode: 0 m_DefaultReflectionResolution: 128 m_ReflectionBounces: 1 m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 1379798262} m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} m_UseRadianceAmbientProbe: 0 --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 serializedVersion: 11 m_GIWorkflowMode: 1 m_GISettings: serializedVersion: 2 m_BounceScale: 1 m_IndirectOutputScale: 1 m_AlbedoBoost: 1 m_EnvironmentLightingMode: 0 m_EnableBakedLightmaps: 0 m_EnableRealtimeLightmaps: 0 m_LightmapEditorSettings: serializedVersion: 12 m_Resolution: 2 m_BakeResolution: 20 m_AtlasSize: 512 m_AO: 0 m_AOMaxDistance: 1 m_CompAOExponent: 1 m_CompAOExponentDirect: 0 m_ExtractAmbientOcclusion: 0 m_Padding: 2 m_LightmapParameters: {fileID: 0} m_LightmapsBakeMode: 1 m_TextureCompression: 1 m_FinalGather: 0 m_FinalGatherFiltering: 1 m_FinalGatherRayCount: 256 m_ReflectionCompression: 2 m_MixedBakeMode: 0 m_BakeBackend: 1 m_PVRSampling: 1 m_PVRDirectSampleCount: 16 m_PVRSampleCount: 30 m_PVRBounces: 1 m_PVREnvironmentSampleCount: 30 m_PVREnvironmentReferencePointCount: 2048 m_PVRFilteringMode: 2 m_PVRDenoiserTypeDirect: 0 m_PVRDenoiserTypeIndirect: 0 m_PVRDenoiserTypeAO: 0 m_PVRFilterTypeDirect: 0 m_PVRFilterTypeIndirect: 0 m_PVRFilterTypeAO: 0 m_PVREnvironmentMIS: 0 m_PVRCulling: 0 m_PVRFilteringGaussRadiusDirect: 1 m_PVRFilteringGaussRadiusIndirect: 5 m_PVRFilteringGaussRadiusAO: 2 m_PVRFilteringAtrousPositionSigmaDirect: 0.5 m_PVRFilteringAtrousPositionSigmaIndirect: 2 m_PVRFilteringAtrousPositionSigmaAO: 1 m_ExportTrainingData: 0 m_TrainingDataDestination: TrainingData m_LightingDataAsset: {fileID: 112000002, guid: d235646aaa0510e46bd9ad6cd956ac82, type: 2} m_UseShadowmask: 0 --- !u!196 &4 NavMeshSettings: serializedVersion: 2 m_ObjectHideFlags: 0 m_BuildSettings: serializedVersion: 2 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 agentSlope: 45 agentClimb: 0.4 ledgeDropHeight: 0 maxJumpAcrossDistance: 0 minRegionArea: 2 manualCellSize: 0 cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 accuratePlacement: 0 debug: m_Flags: 0 m_NavMeshData: {fileID: 0} --- !u!1 &2402805 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 2402806} m_Layer: 0 m_Name: Cameras m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &2402806 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2402805} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 12.855254, y: 3.9177809, z: -18.813847} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - {fileID: 220979867} - {fileID: 1496275640} m_Father: {fileID: 0} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &220979866 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 220979867} - component: {fileID: 220979868} m_Layer: 0 m_Name: VCam - Follow Player m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &220979867 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 220979866} m_LocalRotation: {x: 0.1724006, y: 0.8273177, z: -0.3897202, w: 0.36598048} m_LocalPosition: {x: -8.095254, y: 5.072219, z: 4.8838463} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - {fileID: 1457675942} m_Father: {fileID: 2402806} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!114 &220979868 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 220979866} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 45e653bab7fb20e499bda25e1b646fea, type: 3} m_Name: m_EditorClassIdentifier: m_ExcludedPropertiesInInspector: - m_Script m_LockStageInInspector: m_StreamingVersion: 20170927 m_Priority: 10 m_StandbyUpdate: 2 m_LookAt: {fileID: 402024086} m_Follow: {fileID: 402024086} m_Lens: FieldOfView: 50 OrthographicSize: 10 NearClipPlane: 0.1 FarClipPlane: 50 Dutch: 0 LensShift: {x: 0, y: 0} m_Transitions: m_BlendHint: 0 m_InheritPosition: 0 m_OnCameraLive: m_PersistentCalls: m_Calls: [] m_TypeName: Cinemachine.CinemachineBrain+VcamActivatedEvent, Cinemachine, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null m_LegacyBlendHint: 0 m_ComponentOwner: {fileID: 1457675942} --- !u!4 &402024085 stripped Transform: m_CorrespondingSourceObject: {fileID: 4914135124288442, guid: 387be402c6e254edb8f04a699355f406, type: 3} m_PrefabInstance: {fileID: 1142627572} m_PrefabAsset: {fileID: 0} --- !u!4 &402024086 stripped Transform: m_CorrespondingSourceObject: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} m_PrefabInstance: {fileID: 1142627572} m_PrefabAsset: {fileID: 0} --- !u!1 &427756555 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 427756557} - component: {fileID: 427756556} m_Layer: 0 m_Name: Game Settings m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!114 &427756556 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 427756555} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 65022c6b389ac1e48845218a3a15af20, type: 3} m_Name: m_EditorClassIdentifier: player: {fileID: 402024086} playerCollisionRadius: 0.8 enemyCollisionRadius: 0.8 --- !u!4 &427756557 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 427756555} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 12.636259, y: -0.6796955, z: -18.553877} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} m_RootOrder: 5 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &454054842 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 454054846} - component: {fileID: 454054845} - component: {fileID: 454054844} - component: {fileID: 454054843} m_Layer: 0 m_Name: Ground m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!64 &454054843 MeshCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 454054842} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 3 m_Convex: 0 m_CookingOptions: 14 m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} --- !u!23 &454054844 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 454054842} m_Enabled: 1 m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_RenderingLayerMask: 4294967295 m_RendererPriority: 0 m_Materials: - {fileID: 2100000, guid: 5bc81860d99363345955e95c1557ed02, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 m_StaticBatchRoot: {fileID: 0} m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 m_ReceiveGI: 1 m_PreserveUVs: 0 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 m_SelectedEditorRenderState: 3 m_MinimumChartSize: 4 m_AutoUVMaxDistance: 0.5 m_AutoUVMaxAngle: 89 m_LightmapParameters: {fileID: 0} m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 --- !u!33 &454054845 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 454054842} m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &454054846 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 454054842} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 10.26, y: -1.083, z: -18.93} m_LocalScale: {x: 10, y: 10, z: 10} m_Children: [] m_Father: {fileID: 0} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &487209746 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 1300351076624756, guid: 137eb4d5ff6dd4c04a4e2be53f003a59, type: 2} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 487209747} - component: {fileID: 487209748} m_Layer: 8 m_Name: Global Post Processing Volume m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &487209747 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 4679847986911992, guid: 137eb4d5ff6dd4c04a4e2be53f003a59, type: 2} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 487209746} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!114 &487209748 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 487209746} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 8b9a305e18de0c04dbd257a21cd47087, type: 3} m_Name: m_EditorClassIdentifier: sharedProfile: {fileID: 11400000, guid: ae1bad5c1fb2cec45883a3fc2900054e, type: 2} isGlobal: 1 blendDistance: 0 weight: 1 priority: 1 --- !u!1 &716467324 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 716467325} - component: {fileID: 716467326} m_Layer: 0 m_Name: Enemy Spawner m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &716467325 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 716467324} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} m_RootOrder: 6 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!114 &716467326 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 716467324} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 9819dff8a58816942a75b0586ae132b6, type: 3} m_Name: m_EditorClassIdentifier: spawnEnemies: 1 useECS: 1 enemySpawnRadius: 17 enemyPrefab: {fileID: 40720117310684751, guid: 0e3f83bdd0786fc418e17d7be7fd67a3, type: 3} spawnsPerInterval: 1 spawnInterval: 1 --- !u!1001 &1142627572 PrefabInstance: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: m_TransformParent: {fileID: 0} m_Modifications: - target: {fileID: 1500293480320252, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_Name value: Player objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalPosition.x value: 10.26 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalPosition.y value: -1.01 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalPosition.z value: -18.93 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalRotation.x value: 0 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalRotation.y value: 0 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalRotation.z value: 0 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalRotation.w value: 1 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_RootOrder value: 0 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalEulerAnglesHint.x value: 0 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalEulerAnglesHint.y value: 90 objectReference: {fileID: 0} - target: {fileID: 4874788804663312, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_LocalEulerAnglesHint.z value: 0 objectReference: {fileID: 0} - target: {fileID: 114445215679820892, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: mainCamera value: objectReference: {fileID: 1496275639} - target: {fileID: 114445215679820892, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: whatIsGround.m_Bits value: 512 objectReference: {fileID: 0} - target: {fileID: 114167452494013206, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: bulletPrefab value: objectReference: {fileID: 1218724716591446, guid: c4b4b81382c254a999d09f9375fbe2c7, type: 3} - target: {fileID: 114167452494013206, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: spreadAmount value: 20 objectReference: {fileID: 0} - target: {fileID: 114167452494013206, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: fireRate value: 0.1 objectReference: {fileID: 0} - target: {fileID: 114167452494013206, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: spreadShot value: 1 objectReference: {fileID: 0} - target: {fileID: 114167452494013206, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: useECS value: 1 objectReference: {fileID: 0} - target: {fileID: 136433486275610172, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: m_Center.y value: 1.1 objectReference: {fileID: 0} - target: {fileID: 1194508039, guid: 387be402c6e254edb8f04a699355f406, type: 3} propertyPath: ConversionMode value: 1 objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 387be402c6e254edb8f04a699355f406, type: 3} --- !u!1 &1379798260 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 1379798261} - component: {fileID: 1379798262} m_Layer: 0 m_Name: Directional Light m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &1379798261 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1379798260} m_LocalRotation: {x: 0.42182714, y: -0, z: -0, w: 0.9066763} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - {fileID: 1552434843} m_Father: {fileID: 0} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 49.9, y: 0, z: 0} --- !u!108 &1379798262 Light: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1379798260} m_Enabled: 1 serializedVersion: 9 m_Type: 1 m_Color: {r: 0.4575472, g: 0.72401524, b: 1, a: 1} m_Intensity: 0.5 m_Range: 10 m_SpotAngle: 30 m_InnerSpotAngle: 21.80208 m_CookieSize: 10 m_Shadows: m_Type: 2 m_Resolution: -1 m_CustomResolution: -1 m_Strength: 0.817 m_Bias: 0.05 m_NormalBias: 0.4 m_NearPlane: 0.2 m_CullingMatrixOverride: e00: 1 e01: 0 e02: 0 e03: 0 e10: 0 e11: 1 e12: 0 e13: 0 e20: 0 e21: 0 e22: 1 e23: 0 e30: 0 e31: 0 e32: 0 e33: 1 m_UseCullingMatrixOverride: 0 m_Cookie: {fileID: 0} m_DrawHalo: 0 m_Flare: {fileID: 0} m_RenderMode: 1 m_CullingMask: serializedVersion: 2 m_Bits: 4294967295 m_RenderingLayerMask: 1 m_Lightmapping: 4 m_LightShadowCasterMode: 0 m_AreaSize: {x: 1, y: 1} m_BounceIntensity: 1 m_ColorTemperature: 6570 m_UseColorTemperature: 0 m_BoundingSphereOverride: {x: 0, y: 0, z: 0, w: 0} m_UseBoundingSphereOverride: 0 m_ShadowRadius: 0 m_ShadowAngle: 15.4 --- !u!1 &1415310047 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 1415310049} - component: {fileID: 1415310048} m_Layer: 0 m_Name: Bullet Impact Object Pool m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!114 &1415310048 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1415310047} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: caa9c1b965593a04b97699021e55094b, type: 3} m_Name: m_EditorClassIdentifier: bulletHitPrefab: {fileID: 1607228222434220, guid: 3c6a9770554a2499da6dc5e68b870e15, type: 3} impactPoolSize: 100 --- !u!4 &1415310049 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1415310047} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} m_RootOrder: 7 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &1457675941 GameObject: m_ObjectHideFlags: 3 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 1457675942} - component: {fileID: 1457675945} - component: {fileID: 1457675944} - component: {fileID: 1457675943} m_Layer: 0 m_Name: cm m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &1457675942 Transform: m_ObjectHideFlags: 3 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1457675941} m_LocalRotation: {x: -0.17240052, y: -0.82731766, z: 0.3897204, w: 0.3659804} m_LocalPosition: {x: -5.5125513, y: -14.981099, z: -0.6093712} m_LocalScale: {x: 1.0000002, y: 1, z: 1.0000001} m_Children: [] m_Father: {fileID: 220979867} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!114 &1457675943 MonoBehaviour: m_ObjectHideFlags: 3 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1457675941} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: f4044717213e31446939f7bd49c896ea, type: 3} m_Name: m_EditorClassIdentifier: m_TrackedObjectOffset: {x: 0, y: 1, z: 0} m_LookaheadTime: 0.1 m_LookaheadSmoothing: 10 m_LookaheadIgnoreY: 0 m_HorizontalDamping: 0.5 m_VerticalDamping: 0.5 m_ScreenX: 0.5 m_ScreenY: 0.5 m_DeadZoneWidth: 0.1 m_DeadZoneHeight: 0.1 m_SoftZoneWidth: 0.8 m_SoftZoneHeight: 0.8 m_BiasX: 0 m_BiasY: 0 m_CenterOnActivate: 1 --- !u!114 &1457675944 MonoBehaviour: m_ObjectHideFlags: 3 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1457675941} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: fa7155796051b734daa718462081dc5f, type: 3} m_Name: m_EditorClassIdentifier: m_BindingMode: 4 m_FollowOffset: {x: -5.5, y: 10, z: 5} m_XDamping: 1 m_YDamping: 1 m_ZDamping: 1 m_AngularDampingMode: 0 m_PitchDamping: 0 m_YawDamping: 0 m_RollDamping: 0 m_AngularDamping: 0 --- !u!114 &1457675945 MonoBehaviour: m_ObjectHideFlags: 3 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1457675941} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: ac0b09e7857660247b1477e93731de29, type: 3} m_Name: m_EditorClassIdentifier: --- !u!1 &1496275636 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 1496275640} - component: {fileID: 1496275639} - component: {fileID: 1496275638} - component: {fileID: 1496275637} - component: {fileID: 1496275642} - component: {fileID: 1496275641} m_Layer: 0 m_Name: Main Camera m_TagString: MainCamera m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!81 &1496275637 AudioListener: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_Enabled: 1 --- !u!124 &1496275638 Behaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_Enabled: 1 --- !u!20 &1496275639 Camera: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_Enabled: 1 serializedVersion: 2 m_ClearFlags: 1 m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} m_projectionMatrixMode: 1 m_GateFitMode: 2 m_FOVAxisMode: 0 m_SensorSize: {x: 36, y: 24} m_LensShift: {x: 0, y: 0} m_FocalLength: 50 m_NormalizedViewPortRect: serializedVersion: 2 x: 0 y: 0 width: 1 height: 1 near clip plane: 0.1 far clip plane: 50 field of view: 50 orthographic: 0 orthographic size: 10 m_Depth: -1 m_CullingMask: serializedVersion: 2 m_Bits: 4294967295 m_RenderingPath: 3 m_TargetTexture: {fileID: 0} m_TargetDisplay: 0 m_TargetEye: 3 m_HDR: 1 m_AllowMSAA: 1 m_AllowDynamicResolution: 0 m_ForceIntoRT: 1 m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 --- !u!4 &1496275640 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_LocalRotation: {x: 0.1724006, y: 0.8273177, z: -0.3897202, w: 0.36598048} m_LocalPosition: {x: -8.095254, y: 5.072219, z: 4.8838463} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 2402806} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!114 &1496275641 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 948f4100a11a5c24981795d21301da5c, type: 3} m_Name: m_EditorClassIdentifier: volumeTrigger: {fileID: 402024085} volumeLayer: serializedVersion: 2 m_Bits: 256 stopNaNPropagation: 1 finalBlitToCameraTarget: 1 antialiasingMode: 1 temporalAntialiasing: jitterSpread: 0.75 sharpness: 0.25 stationaryBlending: 0.95 motionBlending: 0.85 subpixelMorphologicalAntialiasing: quality: 2 fastApproximateAntialiasing: fastMode: 0 keepAlpha: 0 fog: enabled: 1 excludeSkybox: 1 debugLayer: lightMeter: width: 512 height: 256 showCurves: 1 histogram: width: 512 height: 256 channel: 3 waveform: exposure: 0.12 height: 256 vectorscope: size: 256 exposure: 0.12 overlaySettings: linearDepth: 0 motionColorIntensity: 4 motionGridSize: 64 colorBlindnessType: 0 colorBlindnessStrength: 1 m_Resources: {fileID: 11400000, guid: d82512f9c8e5d4a4d938b575d47f88d4, type: 2} m_ShowToolkit: 0 m_ShowCustomSorter: 0 breakBeforeColorGrading: 0 m_BeforeTransparentBundles: [] m_BeforeStackBundles: [] m_AfterStackBundles: [] --- !u!114 &1496275642 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1496275636} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 72ece51f2901e7445ab60da3685d6b5f, type: 3} m_Name: m_EditorClassIdentifier: m_ShowDebugText: 0 m_ShowCameraFrustum: 1 m_IgnoreTimeScale: 0 m_WorldUpOverride: {fileID: 0} m_UpdateMethod: 2 m_DefaultBlend: m_Style: 1 m_Time: 2 m_CustomCurve: serializedVersion: 2 m_Curve: [] m_PreInfinity: 2 m_PostInfinity: 2 m_RotationOrder: 4 m_CustomBlends: {fileID: 0} m_CameraCutEvent: m_PersistentCalls: m_Calls: [] m_TypeName: Cinemachine.CinemachineBrain+BrainEvent, Cinemachine, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null m_CameraActivatedEvent: m_PersistentCalls: m_Calls: [] m_TypeName: Cinemachine.CinemachineBrain+VcamActivatedEvent, Cinemachine, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null --- !u!1 &1552434841 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - component: {fileID: 1552434843} - component: {fileID: 1552434842} m_Layer: 0 m_Name: High light m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!108 &1552434842 Light: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1552434841} m_Enabled: 1 serializedVersion: 9 m_Type: 1 m_Color: {r: 0.9622642, g: 0.9622642, b: 0.9622642, a: 1} m_Intensity: 0.11 m_Range: 10 m_SpotAngle: 30 m_InnerSpotAngle: 21.80208 m_CookieSize: 10 m_Shadows: m_Type: 0 m_Resolution: -1 m_CustomResolution: -1 m_Strength: 0.817 m_Bias: 0.05 m_NormalBias: 0.4 m_NearPlane: 0.2 m_CullingMatrixOverride: e00: 1 e01: 0 e02: 0 e03: 0 e10: 0 e11: 1 e12: 0 e13: 0 e20: 0 e21: 0 e22: 1 e23: 0 e30: 0 e31: 0 e32: 0 e33: 1 m_UseCullingMatrixOverride: 0 m_Cookie: {fileID: 0} m_DrawHalo: 0 m_Flare: {fileID: 0} m_RenderMode: 1 m_CullingMask: serializedVersion: 2 m_Bits: 4294967295 m_RenderingLayerMask: 1 m_Lightmapping: 4 m_LightShadowCasterMode: 0 m_AreaSize: {x: 1, y: 1} m_BounceIntensity: 0 m_ColorTemperature: 6570 m_UseColorTemperature: 0 m_BoundingSphereOverride: {x: 0, y: 0, z: 0, w: 0} m_UseBoundingSphereOverride: 0 m_ShadowRadius: 0 m_ShadowAngle: 15.4 --- !u!4 &1552434843 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1552434841} m_LocalRotation: {x: -0.15697315, y: 0.6948365, z: -0.45876932, w: 0.53112376} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 1379798261} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 16.97, y: 112.739006, z: 0}
{'repo_name': 'UnityTechnologies/AngryBots_ECS', 'stars': '286', 'repo_language': 'C#', 'file_name': 'AudioManager.asset', 'mime_type': 'text/plain', 'hash': 3687011847798389651, 'source_dataset': 'data'}
/* * Copyright 2014-2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.core.service; import static org.hawkular.metrics.model.AvailabilityType.UP; import java.util.HashMap; import java.util.Map; import org.hawkular.metrics.model.AvailabilityBucketPoint; import org.hawkular.metrics.model.AvailabilityType; import org.hawkular.metrics.model.Buckets; import org.hawkular.metrics.model.DataPoint; /** * Accumulates availability data points to produce an {@link AvailabilityBucketPoint}. * * @author Thomas Segismont */ final class AvailabilityDataPointCollector { private final Buckets buckets; private final long bucketStart; private DataPoint<AvailabilityType> previous; private Map<AvailabilityType, Long> durationMap; private long lastNotUptime; private long notUpCount; private long samples; AvailabilityDataPointCollector(Buckets buckets, int bucketIndex) { this.buckets = buckets; this.bucketStart = buckets.getBucketStart(bucketIndex); this.durationMap = new HashMap<>(); } void increment(DataPoint<AvailabilityType> dataPoint) { long timestamp = dataPoint.getTimestamp(); AvailabilityType availType = dataPoint.getValue(); if (previous != null && timestamp <= previous.getTimestamp()) { throw new IllegalStateException("Expected stream sorted in time ascending order"); } ++samples; if (previous == null) { Long availTypeDuration = durationMap.getOrDefault(availType, 0L); availTypeDuration += (timestamp - bucketStart); durationMap.put(availType, availTypeDuration); if (availType != UP) { lastNotUptime = timestamp; ++notUpCount; } } else { Long previousAvailTypeDuration = durationMap.getOrDefault(previous.getValue(), 0L); previousAvailTypeDuration += (timestamp - previous.getTimestamp()); durationMap.put(previous.getValue(), previousAvailTypeDuration); if (availType == UP) { if (previous.getValue() != UP) { lastNotUptime = timestamp; } } else { if (previous.getValue() == UP) { ++notUpCount; } lastNotUptime = timestamp; } } previous = dataPoint; } AvailabilityBucketPoint toBucketPoint() { long to = bucketStart + buckets.getStep(); Long availTypeDuration = durationMap.getOrDefault(previous.getValue(), 0L); availTypeDuration += (to - previous.getTimestamp()); durationMap.put(previous.getValue(), availTypeDuration); if (previous.getValue() != UP) { lastNotUptime = to; } return new AvailabilityBucketPoint.Builder(bucketStart, to) .setDurationMap(durationMap) .setLastNotUptime(lastNotUptime) .setUptimeRatio(((double) durationMap.getOrDefault(AvailabilityType.UP, 0L) / buckets.getStep())) .setNotUptimeCount(notUpCount) .setSamples(samples) .build(); } }
{'repo_name': 'hawkular/hawkular-metrics', 'stars': '222', 'repo_language': 'Java', 'file_name': 'main.yml', 'mime_type': 'text/plain', 'hash': -2132194052641402525, 'source_dataset': 'data'}
/************************************************* * Perl-Compatible Regular Expressions * *************************************************/ /* PCRE is a library of functions to support regular expressions whose syntax and semantics are as close as possible to those of the Perl 5 language. Written by: Philip Hazel <ph10@cam.ac.uk> Copyright (c) 1997-2003 University of Cambridge ----------------------------------------------------------------------------- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the University of Cambridge nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ----------------------------------------------------------------------------- See the file Tech.Notes for some information on the internals. */ /* This file is compiled on its own as part of the PCRE library. However, it is also included in the compilation of dftables.c, in which case the macro DFTABLES is defined. */ #ifndef DFTABLES #include "internal.h" #endif /************************************************* * Create PCRE character tables * *************************************************/ /* This function builds a set of character tables for use by PCRE and returns a pointer to them. They are build using the ctype functions, and consequently their contents will depend upon the current locale setting. When compiled as part of the library, the store is obtained via pcre_malloc(), but when compiled inside dftables, use malloc(). Arguments: none Returns: pointer to the contiguous block of data */ const unsigned char * pcre_maketables(void) { unsigned char *yield, *p; int i; #ifndef DFTABLES yield = (unsigned char*)(pcre_malloc)(tables_length); #else yield = (unsigned char*)malloc(tables_length); #endif if (yield == NULL) return NULL; p = yield; /* First comes the lower casing table */ for (i = 0; i < 256; i++) *p++ = tolower(i); /* Next the case-flipping table */ for (i = 0; i < 256; i++) *p++ = islower(i)? toupper(i) : tolower(i); /* Then the character class tables. Don't try to be clever and save effort on exclusive ones - in some locales things may be different. Note that the table for "space" includes everything "isspace" gives, including VT in the default locale. This makes it work for the POSIX class [:space:]. */ memset(p, 0, cbit_length); for (i = 0; i < 256; i++) { if (isdigit(i)) { p[cbit_digit + i/8] |= 1 << (i&7); p[cbit_word + i/8] |= 1 << (i&7); } if (isupper(i)) { p[cbit_upper + i/8] |= 1 << (i&7); p[cbit_word + i/8] |= 1 << (i&7); } if (islower(i)) { p[cbit_lower + i/8] |= 1 << (i&7); p[cbit_word + i/8] |= 1 << (i&7); } if (i == '_') p[cbit_word + i/8] |= 1 << (i&7); if (isspace(i)) p[cbit_space + i/8] |= 1 << (i&7); if (isxdigit(i))p[cbit_xdigit + i/8] |= 1 << (i&7); if (isgraph(i)) p[cbit_graph + i/8] |= 1 << (i&7); if (isprint(i)) p[cbit_print + i/8] |= 1 << (i&7); if (ispunct(i)) p[cbit_punct + i/8] |= 1 << (i&7); if (iscntrl(i)) p[cbit_cntrl + i/8] |= 1 << (i&7); } p += cbit_length; /* Finally, the character type table. In this, we exclude VT from the white space chars, because Perl doesn't recognize it as such for \s and for comments within regexes. */ for (i = 0; i < 256; i++) { int x = 0; if (i != 0x0b && isspace(i)) x += ctype_space; if (isalpha(i)) x += ctype_letter; if (isdigit(i)) x += ctype_digit; if (isxdigit(i)) x += ctype_xdigit; if (isalnum(i) || i == '_') x += ctype_word; /* Note: strchr includes the terminating zero in the characters it considers. In this instance, that is ok because we want binary zero to be flagged as a meta-character, which in this sense is any character that terminates a run of data characters. */ if (strchr("*+?{^.$|()[", i) != 0) x += ctype_meta; *p++ = x; } return yield; } /* End of maketables.c */
{'repo_name': 'etternagame/etterna', 'stars': '191', 'repo_language': 'C++', 'file_name': 'NoteSkin.lua', 'mime_type': 'text/plain', 'hash': 680654676079607946, 'source_dataset': 'data'}
--- Makefile.in.orig 2009-07-04 02:29:28.000000000 -0300 +++ Makefile.in 2011-08-01 21:22:47.000000000 -0300 @@ -142,9 +142,7 @@ $(INSTALL_PROG) $(CTAGS_EXEC) $@ && chmod 755 $@ $(DEST_ETAGS): - - if [ -x $(DEST_CTAGS) ]; then \ - cd $(bindir) && $(SLINK) $(CTAGS_EXEC) $(ETAGS_EXEC); \ - fi + - cd $(bindir) && $(SLINK) $(CTAGS_EXEC) $(ETAGS_EXEC) # # install the man pages @@ -157,9 +155,7 @@ - $(INSTALL_DATA) $(srcdir)/$(MANPAGE) $@ && chmod 644 $@ $(DEST_EMAN): - - if [ -f $(DEST_CMAN) ]; then \ - cd $(man1dir) && $(SLINK) $(CMAN) $(EMAN); \ - fi + - cd $(man1dir) && $(SLINK) $(CMAN) $(EMAN) # # install the library
{'repo_name': 'rudix-mac/rudix', 'stars': '181', 'repo_language': 'Makefile', 'file_name': 'Makefile', 'mime_type': 'text/plain', 'hash': 3695179076155737937, 'source_dataset': 'data'}
{$mode objfpc} {$h+} unit pkghandler; {$IFDEF OS2} {$DEFINE NO_UNIT_PROCESS} {$ENDIF OS2} {$IFDEF GO32V2} {$DEFINE NO_UNIT_PROCESS} {$ENDIF GO32V2} {$ifndef NO_UNIT_PROCESS} {$define HAS_UNIT_PROCESS} {$endif NO_UNIT_PROCESS} interface uses Classes,SysUtils, pkgglobals, pkgoptions, {$ifdef HAS_UNIT_PROCESS} process, {$endif HAS_UNIT_PROCESS} fprepos, pkgFppkg; type { TPackageHandler } TPackageHandler = Class(TComponent) private FPackageName : string; FPackageManager: tpkgFPpkg; Protected Procedure Log(Level: TLogLevel;Msg : String); Procedure Log(Level: TLogLevel;Fmt : String; const Args : array of const); Procedure Error(Msg : String); Procedure Error(Fmt : String; const Args : array of const); Function ExecuteProcess(Const Prog,Args:String):Integer; Procedure SetCurrentDir(Const ADir:String); Property PackageManager:TpkgFPpkg Read FPackageManager; Public Constructor Create(AOwner:TComponent; APackageManager:TpkgFPpkg; const APackageName:string); virtual; function PackageLogPrefix:String; function ExecuteAction(const APackageName,AAction:string): Boolean; function Execute: Boolean; virtual; abstract; Property PackageName:string Read FPackageName; end; TPackageHandlerClass = class of TPackageHandler; EPackageHandler = Class(Exception); // Actions/PkgHandler procedure RegisterPkgHandler(const AAction:string;pkghandlerclass:TPackageHandlerClass); function GetPkgHandler(const AAction:string):TPackageHandlerClass; function ExecuteAction(const APackageName,AAction:string; PackageManager: TpkgFPpkg): Boolean; function PackageManifestFile(APackage:TFPPackage): String; procedure ClearExecutedAction; Implementation uses typinfo, contnrs, uriparser, pkgrepos, pkgmessages; var PkgHandlerList : TFPHashList; ExecutedActions : TFPHashList; CurrentDir : string; procedure RegisterPkgHandler(const AAction:string;pkghandlerclass:TPackageHandlerClass); begin if PkgHandlerList.Find(AAction)<>nil then begin Raise EPackageHandler.CreateFmt(SErrActionAlreadyRegistered,[AAction]); exit; end; PkgHandlerList.Add(AAction,pkghandlerclass); end; function GetPkgHandler(const AAction:string):TPackageHandlerClass; begin result:=TPackageHandlerClass(PkgHandlerList.Find(AAction)); if result=nil then Raise EPackageHandler.CreateFmt(SErrActionNotFound,[AAction]); end; function ExecuteAction(const APackageName,AAction:string; PackageManager: TpkgFPpkg): Boolean; var pkghandlerclass : TPackageHandlerClass; FullActionName : string; begin Result := True; // Check if we have already executed or are executing the action FullActionName:=APackageName+AAction; if ExecutedActions.Find(FullActionName)<>nil then begin Log(llDebug,'Already executed or executing action '+FullActionName); exit; end; ExecutedActions.Add(FullActionName,Pointer(PtrUInt(1))); // Create action handler class pkghandlerclass:=GetPkgHandler(AAction); With pkghandlerclass.Create(nil,PackageManager,APackageName) do try Log(llDebug,SLogRunAction+' start',[AAction]); Result := Execute; Log(llDebug,SLogRunAction+' end',[AAction]); finally Free; end; end; function PackageManifestFile(APackage:TFPPackage): String; begin Result:=ManifestFileName; end; procedure ClearExecutedAction; begin ExecutedActions.Clear; end; { TPackageHandler } Constructor TPackageHandler.Create(AOwner: TComponent; APackageManager: TpkgFPpkg; const APackageName: string); begin inherited Create(AOwner); FPackageName:=APackageName; FPackageManager:=APackageManager; end; {$ifdef HAS_UNIT_PROCESS} function ExecuteFPC(const Path: string; const ComLine: string): integer; var P: TProcess; ConsoleOutput: TMemoryStream; BytesRead: longint; function ReadFromStream: longint; const READ_BYTES = 2048; var n: longint; BuffPos: longint; sLine: string; ch: char; begin // make sure we have room ConsoleOutput.SetSize(BytesRead + READ_BYTES); // try reading it n := P.Output.Read((ConsoleOutput.Memory + BytesRead)^, READ_BYTES); if n > 0 then begin Inc(BytesRead, n); sLine := ''; BuffPos := ConsoleOutput.Position; //read lines from the stream repeat ConsoleOutput.Read(ch,1); if ch in [#10, #13] then begin log(llProgress,sLine); sLine := ''; BuffPos := ConsoleOutput.Position; end else sLine := sLine + ch; until ConsoleOutput.Position >= BytesRead; ConsoleOutput.Position := BuffPos; end else begin // no data, wait 100 ms Sleep(100); end; Result := n; end; begin result := -1; BytesRead := 0; ConsoleOutput := TMemoryStream.Create; try P := TProcess.Create(nil); try P.CommandLine := Path + ' ' + ComLine; P.Options := [poUsePipes]; P.Execute; while P.Running do ReadFromStream; // read last part repeat until ReadFromStream = 0; ConsoleOutput.SetSize(BytesRead); result := P.ExitStatus; finally P.Free; end; finally ConsoleOutput.Free; end; end; {$endif HAS_UNIT_PROCESS} Function TPackageHandler.ExecuteProcess(Const Prog,Args:String):Integer; begin Log(llCommands,SLogExecute,[Prog,Args]); Flush(StdOut); {$ifdef HAS_UNIT_PROCESS} Result:=ExecuteFPC(Prog,Args); {$else HAS_UNIT_PROCESS} Result:=SysUtils.ExecuteProcess(Prog,Args); {$endif HAS_UNIT_PROCESS} end; Procedure TPackageHandler.SetCurrentDir(Const ADir:String); begin Log(llCommands,SLogChangeDir,[ADir]); if not SysUtils.SetCurrentDir(ADir) then Error(SErrChangeDirFailed,[ADir]); end; function TPackageHandler.PackageLogPrefix:String; begin if PackageName<>'' then Result:='['+PackageName+'] ' else Result:=''; end; function TPackageHandler.ExecuteAction(const APackageName, AAction: string): Boolean; begin Result := pkghandler.ExecuteAction(APackageName,AAction,PackageManager); end; Procedure TPackageHandler.Log(Level:TLogLevel; Msg:String); begin pkgglobals.Log(Level,PackageLogPrefix+Msg); end; Procedure TPackageHandler.Log(Level:TLogLevel; Fmt:String; const Args:array of const); begin pkgglobals.log(Level,PackageLogPrefix+Fmt,Args); end; Procedure TPackageHandler.Error(Msg:String); begin pkgglobals.Error(PackageLogPrefix+Msg); end; Procedure TPackageHandler.Error(Fmt:String; const Args:array of const); begin pkgglobals.Error(PackageLogPrefix+Fmt,Args); end; initialization PkgHandlerList:=TFPHashList.Create; ExecutedActions:=TFPHashList.Create; finalization FreeAndNil(PkgHandlerList); FreeAndNil(ExecutedActions); end.
{'repo_name': 'graemeg/freepascal', 'stars': '350', 'repo_language': 'Pascal', 'file_name': 'Makefile', 'mime_type': 'text/plain', 'hash': 2139526469859544319, 'source_dataset': 'data'}
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "suggest/policyimpl/typing/typing_suggest_policy.h" namespace latinime { const TypingSuggestPolicy TypingSuggestPolicy::sInstance; } // namespace latinime
{'repo_name': 'smc/Indic-Keyboard', 'stars': '103', 'repo_language': 'Java', 'file_name': 'sample.combined', 'mime_type': 'text/plain', 'hash': 1180582089611446700, 'source_dataset': 'data'}
<?xml version="1.0" encoding="utf-8"?> <selector xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2" xmlns:aapt="http://schemas.android.com/aapt"> <item android:state_focused="true" android:drawable="@drawable/aul"/> <item android:state_pressed="true" android:drawable="@drawable/aul"/> <item android:state_selected="true" android:drawable="@drawable/aul"/> <item android:drawable="@drawable/auk"/> </selector>
{'repo_name': 'HelloHuDi/AndroidReverseNotes', 'stars': '116', 'repo_language': 'Smali', 'file_name': 'v.java', 'mime_type': 'text/x-java', 'hash': -2831008059092171671, 'source_dataset': 'data'}
// just pre-load all the stuff that index.js lazily exports const internalRe = require('./internal/re') module.exports = { re: internalRe.re, src: internalRe.src, tokens: internalRe.t, SEMVER_SPEC_VERSION: require('./internal/constants').SEMVER_SPEC_VERSION, SemVer: require('./classes/semver'), compareIdentifiers: require('./internal/identifiers').compareIdentifiers, rcompareIdentifiers: require('./internal/identifiers').rcompareIdentifiers, parse: require('./functions/parse'), valid: require('./functions/valid'), clean: require('./functions/clean'), inc: require('./functions/inc'), diff: require('./functions/diff'), major: require('./functions/major'), minor: require('./functions/minor'), patch: require('./functions/patch'), prerelease: require('./functions/prerelease'), compare: require('./functions/compare'), rcompare: require('./functions/rcompare'), compareLoose: require('./functions/compare-loose'), compareBuild: require('./functions/compare-build'), sort: require('./functions/sort'), rsort: require('./functions/rsort'), gt: require('./functions/gt'), lt: require('./functions/lt'), eq: require('./functions/eq'), neq: require('./functions/neq'), gte: require('./functions/gte'), lte: require('./functions/lte'), cmp: require('./functions/cmp'), coerce: require('./functions/coerce'), Comparator: require('./classes/comparator'), Range: require('./classes/range'), satisfies: require('./functions/satisfies'), toComparators: require('./ranges/to-comparators'), maxSatisfying: require('./ranges/max-satisfying'), minSatisfying: require('./ranges/min-satisfying'), minVersion: require('./ranges/min-version'), validRange: require('./ranges/valid'), outside: require('./ranges/outside'), gtr: require('./ranges/gtr'), ltr: require('./ranges/ltr'), intersects: require('./ranges/intersects'), simplifyRange: require('./ranges/simplify'), subset: require('./ranges/subset'), }
{'repo_name': 'nodejs/node', 'stars': '71611', 'repo_language': 'JavaScript', 'file_name': 'rc4-586.s', 'mime_type': 'text/x-asm', 'hash': 7980732461228535357, 'source_dataset': 'data'}
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package codestar const ( // ErrCodeConcurrentModificationException for service response error code // "ConcurrentModificationException". // // Another modification is being made. That modification must complete before // you can make your change. ErrCodeConcurrentModificationException = "ConcurrentModificationException" // ErrCodeInvalidNextTokenException for service response error code // "InvalidNextTokenException". // // The next token is not valid. ErrCodeInvalidNextTokenException = "InvalidNextTokenException" // ErrCodeInvalidServiceRoleException for service response error code // "InvalidServiceRoleException". // // The service role is not valid. ErrCodeInvalidServiceRoleException = "InvalidServiceRoleException" // ErrCodeLimitExceededException for service response error code // "LimitExceededException". // // A resource limit has been exceeded. ErrCodeLimitExceededException = "LimitExceededException" // ErrCodeProjectAlreadyExistsException for service response error code // "ProjectAlreadyExistsException". // // An AWS CodeStar project with the same ID already exists in this region for // the AWS account. AWS CodeStar project IDs must be unique within a region // for the AWS account. ErrCodeProjectAlreadyExistsException = "ProjectAlreadyExistsException" // ErrCodeProjectConfigurationException for service response error code // "ProjectConfigurationException". // // Project configuration information is required but not specified. ErrCodeProjectConfigurationException = "ProjectConfigurationException" // ErrCodeProjectCreationFailedException for service response error code // "ProjectCreationFailedException". // // The project creation request was valid, but a nonspecific exception or error // occurred during project creation. The project could not be created in AWS // CodeStar. ErrCodeProjectCreationFailedException = "ProjectCreationFailedException" // ErrCodeProjectNotFoundException for service response error code // "ProjectNotFoundException". // // The specified AWS CodeStar project was not found. ErrCodeProjectNotFoundException = "ProjectNotFoundException" // ErrCodeTeamMemberAlreadyAssociatedException for service response error code // "TeamMemberAlreadyAssociatedException". // // The team member is already associated with a role in this project. ErrCodeTeamMemberAlreadyAssociatedException = "TeamMemberAlreadyAssociatedException" // ErrCodeTeamMemberNotFoundException for service response error code // "TeamMemberNotFoundException". // // The specified team member was not found. ErrCodeTeamMemberNotFoundException = "TeamMemberNotFoundException" // ErrCodeUserProfileAlreadyExistsException for service response error code // "UserProfileAlreadyExistsException". // // A user profile with that name already exists in this region for the AWS account. // AWS CodeStar user profile names must be unique within a region for the AWS // account. ErrCodeUserProfileAlreadyExistsException = "UserProfileAlreadyExistsException" // ErrCodeUserProfileNotFoundException for service response error code // "UserProfileNotFoundException". // // The user profile was not found. ErrCodeUserProfileNotFoundException = "UserProfileNotFoundException" // ErrCodeValidationException for service response error code // "ValidationException". // // The specified input is either not valid, or it could not be validated. ErrCodeValidationException = "ValidationException" )
{'repo_name': 'amazon-archives/aws-service-operator', 'stars': '747', 'repo_language': 'Go', 'file_name': 'PATENTS', 'mime_type': 'text/plain', 'hash': 1793790041287385524, 'source_dataset': 'data'}
from unittest import mock import pytest from urllib3.response import HTTPHeaderDict from briefcase.exceptions import ( BadNetworkResourceError, MissingNetworkResourceError ) @pytest.mark.parametrize( 'url,content_disposition', [ # A `None` value for `content_disposition` means we skip the header. # Other values are passed through as HTTP header values. ('https://example.com/path/to/something.zip', None), # Ensure empty header is ignored. ('https://example.com/path/to/something.zip', ''), # Paradigmatic case for Content-Disposition: attachment. ('https://example.com/path/to/irrelevant.zip', 'attachment; filename=something.zip'), # Ensure extra parameters are ignored. ('https://example.com/path/to/irrelevant.zip', 'attachment; filename=something.zip; ignored=okay'), # Ensure garbage headers are ignored. ('https://example.com/path/to/something.zip', 'garbage'), # Ensure we respect unusual quoting & case permitted by RFC 6266. ('https://example.com/path/to/irrelevant.zip', 'ATTACHment; filename= "something.zip"'), # Ensure we use filename=, even if filename*= is provided. This makes us a # "legacy user agent" in the terms of RFC 6266, for our own simplicity. ('https://example.com/path/to/irrelevant.zip', 'attachment; filename="something.zip"; filename*=utf-8' "''%e2%82%ac%20rates"), ] ) def test_new_download_oneshot(base_command, url, content_disposition): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.url = url response.status_code = 200 response.headers = mock.Mock(wraps=HTTPHeaderDict({ 'content-disposition': content_disposition, } if content_disposition is not None else {})) response.content = b'all content' base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path / 'downloads', ) # requests.get has been invoked, but content isn't iterated base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_called_with('content-length') response.iter_content.assert_not_called() # The filename is derived from the URL or header assert filename == base_command.base_path / 'downloads' / 'something.zip' # File content is as expected with (base_command.base_path / 'downloads' / 'something.zip').open() as f: assert f.read() == 'all content' def test_new_download_chunked(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.url = 'https://example.com/path/to/something.zip' response.status_code = 200 response.headers.get.return_value = '24' response.iter_content.return_value = iter([ b'chunk-1;', b'chunk-2;', b'chunk-3;', ]) base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, and content is chunked. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_called_with('content-length') response.iter_content.assert_called_once_with(chunk_size=1048576) # The filename is derived from the URL assert filename == base_command.base_path / 'something.zip' # The downloaded file exists, and content is as expected assert filename.exists() with (base_command.base_path / 'something.zip').open() as f: assert f.read() == 'chunk-1;chunk-2;chunk-3;' def test_already_downloaded(base_command): # Create an existing file existing_file = base_command.base_path / 'something.zip' with (existing_file).open('w') as f: f.write('existing content') base_command.requests = mock.MagicMock() response = mock.MagicMock() response.headers.get.return_value = '' response.url = 'https://example.com/path/to/something.zip' response.status_code = 200 base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # The GET request will have been made base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) # The request's Content-Disposition header is consumed to # examine the filename; the request is abandoned before # any other headers are read. response.headers.get.assert_called_once_with('Content-Disposition') # but the file existed, so the method returns assert filename == existing_file assert filename.exists() def test_missing_resource(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.status_code = 404 base_command.requests.get.return_value = response # Download the file with pytest.raises(MissingNetworkResourceError): base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, but nothing else. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_not_called() # The file doesn't exist as a result of the download failure assert not (base_command.base_path / 'something.zip').exists() def test_bad_resource(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.status_code = 500 base_command.requests.get.return_value = response # Download the file with pytest.raises(BadNetworkResourceError): base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, but nothing else. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_not_called() # The file doesn't exist as a result of the download failure assert not (base_command.base_path / 'something.zip').exists()
{'repo_name': 'beeware/briefcase', 'stars': '809', 'repo_language': 'Python', 'file_name': 'test_call.py', 'mime_type': 'text/x-python', 'hash': 5812601831061380655, 'source_dataset': 'data'}
using System; using System.Collections.Generic; using Dev2.Common.Interfaces.DB; namespace Dev2.Common.Interfaces { public interface IWcfService { string Name { get; set; } Guid Id { get; set; } IWcfServerSource Source { get; set; } IList<IServiceInput> Inputs { get; set; } IList<IServiceOutputMapping> OutputMappings { get; set; } string Path { get; set; } IWcfAction Action { get; set; } } }
{'repo_name': 'Warewolf-ESB/Warewolf', 'stars': '227', 'repo_language': 'C#', 'file_name': 'testElasticsearchSource.xml', 'mime_type': 'text/plain', 'hash': 5948946056465699123, 'source_dataset': 'data'}
// Configuration for ImageMin task(s) // Compresses jpg, jpeg, png, and svg files 'use strict'; var pngquant = require('imagemin-pngquant'); var svgo = require('imagemin-svgo'); var taskConfig = function(grunt) { grunt.config.set('imagemin', { dist: { options: { use: [pngquant({ quality: '65-80', speed: 4 }), svgo()] }, files: [{ expand: true, cwd: '<%= snippod.client %>/static/images', src: '**/*.{jpg,jpeg,gif,png,svg}', dest: '<%= snippod.dist %>/client/images' }] } }); }; module.exports = taskConfig;
{'repo_name': 'shalomeir/snippod-boilerplate', 'stars': '120', 'repo_language': 'JavaScript', 'file_name': 'prod.txt', 'mime_type': 'text/plain', 'hash': 8601230142732496149, 'source_dataset': 'data'}
/** * @file quests.cpp * * Implementation of functionality for handling quests. */ #include "all.h" DEVILUTION_BEGIN_NAMESPACE int qtopline; BOOL questlog; BYTE *pQLogCel; /** Contains the quests of the current game. */ QuestStruct quests[MAXQUESTS]; int qline; int qlist[MAXQUESTS]; int numqlines; int WaterDone; int ReturnLvlX; int ReturnLvlY; int ReturnLvlT; int ReturnLvl; /** Contains the data related to each quest_id. */ QuestData questlist[MAXQUESTS] = { // clang-format off // _qdlvl, _qdmultlvl, _qlvlt, _qdtype, _qdrnd, _qslvl, _qflags, _qdmsg, _qlstr { 5, -1, DTYPE_NONE, Q_ROCK, 100, 0, 0, TEXT_INFRA5, "The Magic Rock" }, { 9, -1, DTYPE_NONE, Q_MUSHROOM, 100, 0, 0, TEXT_MUSH8, "Black Mushroom" }, { 4, -1, DTYPE_NONE, Q_GARBUD, 100, 0, 0, TEXT_GARBUD1, "Gharbad The Weak" }, { 8, -1, DTYPE_NONE, Q_ZHAR, 100, 0, 0, TEXT_ZHAR1, "Zhar the Mad" }, { 14, -1, DTYPE_NONE, Q_VEIL, 100, 0, 0, TEXT_VEIL9, "Lachdanan" }, { 15, -1, DTYPE_NONE, Q_DIABLO, 100, 0, 1, TEXT_VILE3, "Diablo" }, { 2, 2, DTYPE_NONE, Q_BUTCHER, 100, 0, 1, TEXT_BUTCH9, "The Butcher" }, { 4, -1, DTYPE_NONE, Q_LTBANNER, 100, 0, 0, TEXT_BANNER2, "Ogden's Sign" }, { 7, -1, DTYPE_NONE, Q_BLIND, 100, 0, 0, TEXT_BLINDING, "Halls of the Blind" }, { 5, -1, DTYPE_NONE, Q_BLOOD, 100, 0, 0, TEXT_BLOODY, "Valor" }, { 10, -1, DTYPE_NONE, Q_ANVIL, 100, 0, 0, TEXT_ANVIL5, "Anvil of Fury" }, { 13, -1, DTYPE_NONE, Q_WARLORD, 100, 0, 0, TEXT_BLOODWAR, "Warlord of Blood" }, { 3, 3, DTYPE_CATHEDRAL, Q_SKELKING, 100, 1, 1, TEXT_KING2, "The Curse of King Leoric" }, { 2, -1, DTYPE_CAVES, Q_PWATER, 100, 4, 0, TEXT_POISON3, "Poisoned Water Supply" }, { 6, -1, DTYPE_CATACOMBS, Q_SCHAMB, 100, 2, 0, TEXT_BONER, "The Chamber of Bone" }, { 15, 15, DTYPE_CATHEDRAL, Q_BETRAYER, 100, 5, 1, TEXT_VILE1, "Archbishop Lazarus" }, // clang-format on }; /** * Specifies a delta in X-coordinates from the quest entrance for * which the hover text of the cursor will be visible. */ char questxoff[7] = { 0, -1, 0, -1, -2, -1, -2 }; /** * Specifies a delta in Y-coordinates from the quest entrance for * which the hover text of the cursor will be visible. */ char questyoff[7] = { 0, 0, -1, -1, -1, -2, -2 }; char *questtrigstr[5] = { "King Leoric's Tomb", "The Chamber of Bone", "Maze", "A Dark Passage", "Unholy Altar" }; /** * A quest group containing the three quests the Butcher, * Ogden's Sign and Gharbad the Weak, which ensures that exactly * two of these three quests appear in any single player game. */ int QuestGroup1[3] = { Q_BUTCHER, Q_LTBANNER, Q_GARBUD }; /** * A quest group containing the three quests Halls of the Blind, * the Magic Rock and Valor, which ensures that exactly two of * these three quests appear in any single player game. */ int QuestGroup2[3] = { Q_BLIND, Q_ROCK, Q_BLOOD }; /** * A quest group containing the three quests Black Mushroom, * Zhar the Mad and Anvil of Fury, which ensures that exactly * two of these three quests appear in any single player game. */ int QuestGroup3[3] = { Q_MUSHROOM, Q_ZHAR, Q_ANVIL }; /** * A quest group containing the two quests Lachdanan and Warlord * of Blood, which ensures that exactly one of these two quests * appears in any single player game. */ int QuestGroup4[2] = { Q_VEIL, Q_WARLORD }; void InitQuests() { int i, initiatedQuests; DWORD z; if (gbMaxPlayers == 1) { for (i = 0; i < MAXQUESTS; i++) { quests[i]._qactive = QUEST_NOTAVAIL; } } else { for (i = 0; i < MAXQUESTS; i++) { if (!(questlist[i]._qflags & 1)) { quests[i]._qactive = QUEST_NOTAVAIL; } } } questlog = FALSE; PentSpn2Frame = 1; WaterDone = 0; initiatedQuests = 0; for (z = 0; z < MAXQUESTS; z++) { if (gbMaxPlayers <= 1 || questlist[z]._qflags & 1) { quests[z]._qtype = questlist[z]._qdtype; if (gbMaxPlayers > 1) { quests[z]._qlevel = questlist[z]._qdmultlvl; if (!delta_quest_inited(initiatedQuests)) { quests[z]._qactive = QUEST_INIT; quests[z]._qvar1 = 0; quests[z]._qlog = 0; } initiatedQuests++; } else { quests[z]._qactive = QUEST_INIT; quests[z]._qlevel = questlist[z]._qdlvl; quests[z]._qvar1 = 0; quests[z]._qlog = 0; } quests[z]._qslvl = questlist[z]._qslvl; quests[z]._qtx = 0; quests[z]._qty = 0; quests[z]._qidx = z; quests[z]._qlvltype = questlist[z]._qlvlt; quests[z]._qvar2 = 0; quests[z]._qmsg = questlist[z]._qdmsg; } } if (gbMaxPlayers == 1) { SetRndSeed(glSeedTbl[15]); if (random_(0, 2)) quests[Q_PWATER]._qactive = QUEST_NOTAVAIL; else quests[Q_SKELKING]._qactive = QUEST_NOTAVAIL; quests[QuestGroup1[random_(0, sizeof(QuestGroup1) / sizeof(int))]]._qactive = QUEST_NOTAVAIL; quests[QuestGroup2[random_(0, sizeof(QuestGroup2) / sizeof(int))]]._qactive = QUEST_NOTAVAIL; quests[QuestGroup3[random_(0, sizeof(QuestGroup3) / sizeof(int))]]._qactive = QUEST_NOTAVAIL; quests[QuestGroup4[random_(0, sizeof(QuestGroup4) / sizeof(int))]]._qactive = QUEST_NOTAVAIL; } #ifdef _DEBUG if (questdebug != -1) quests[questdebug]._qactive = QUEST_ACTIVE; #endif #ifdef SPAWN for (z = 0; z < MAXQUESTS; z++) { quests[z]._qactive = QUEST_NOTAVAIL; } #endif if (!quests[Q_SKELKING]._qactive) quests[Q_SKELKING]._qvar2 = 2; if (!quests[Q_ROCK]._qactive) quests[Q_ROCK]._qvar2 = 2; quests[Q_LTBANNER]._qvar1 = 1; if (gbMaxPlayers != 1) quests[Q_BETRAYER]._qvar1 = 2; } void CheckQuests() { #ifndef SPAWN int i, rportx, rporty; if (QuestStatus(Q_BETRAYER) && gbMaxPlayers != 1 && quests[Q_BETRAYER]._qvar1 == 2) { AddObject(OBJ_ALTBOY, 2 * setpc_x + 20, 2 * setpc_y + 22); quests[Q_BETRAYER]._qvar1 = 3; NetSendCmdQuest(TRUE, Q_BETRAYER); } if (gbMaxPlayers != 1) { return; } if (currlevel == quests[Q_BETRAYER]._qlevel && !setlevel && quests[Q_BETRAYER]._qvar1 >= 2 && (quests[Q_BETRAYER]._qactive == QUEST_ACTIVE || quests[Q_BETRAYER]._qactive == QUEST_DONE) && (quests[Q_BETRAYER]._qvar2 == 0 || quests[Q_BETRAYER]._qvar2 == 2)) { quests[Q_BETRAYER]._qtx = 2 * quests[Q_BETRAYER]._qtx + 16; quests[Q_BETRAYER]._qty = 2 * quests[Q_BETRAYER]._qty + 16; rportx = quests[Q_BETRAYER]._qtx; rporty = quests[Q_BETRAYER]._qty; AddMissile(rportx, rporty, rportx, rporty, 0, MIS_RPORTAL, 0, myplr, 0, 0); quests[Q_BETRAYER]._qvar2 = 1; if (quests[Q_BETRAYER]._qactive == QUEST_ACTIVE) { quests[Q_BETRAYER]._qvar1 = 3; } } if (quests[Q_BETRAYER]._qactive == QUEST_DONE && setlevel && setlvlnum == SL_VILEBETRAYER && quests[Q_BETRAYER]._qvar2 == 4) { rportx = 35; rporty = 32; AddMissile(rportx, rporty, rportx, rporty, 0, MIS_RPORTAL, 0, myplr, 0, 0); quests[Q_BETRAYER]._qvar2 = 3; } if (setlevel) { if (setlvlnum == quests[Q_PWATER]._qslvl && quests[Q_PWATER]._qactive != QUEST_INIT && leveltype == quests[Q_PWATER]._qlvltype && nummonsters == 4 && quests[Q_PWATER]._qactive != QUEST_DONE) { quests[Q_PWATER]._qactive = QUEST_DONE; PlaySfxLoc(IS_QUESTDN, plr[myplr]._px, plr[myplr]._py); LoadPalette("Levels\\L3Data\\L3pwater.pal"); WaterDone = 32; } if (WaterDone > 0) { palette_update_quest_palette(WaterDone); WaterDone--; } } else if (plr[myplr]._pmode == PM_STAND) { for (i = 0; i < MAXQUESTS; i++) { if (currlevel == quests[i]._qlevel && quests[i]._qslvl != 0 && quests[i]._qactive != QUEST_NOTAVAIL && plr[myplr]._px == quests[i]._qtx && plr[myplr]._py == quests[i]._qty) { if (quests[i]._qlvltype != DTYPE_NONE) { setlvltype = quests[i]._qlvltype; } StartNewLvl(myplr, WM_DIABSETLVL, quests[i]._qslvl); } } } #endif } BOOL ForceQuests() { #ifndef SPAWN int i, j, qx, qy, ql; if (gbMaxPlayers != 1) { return FALSE; } for (i = 0; i < MAXQUESTS; i++) { if (i != Q_BETRAYER && currlevel == quests[i]._qlevel && quests[i]._qslvl != 0) { ql = quests[quests[i]._qidx]._qslvl - 1; qx = quests[i]._qtx; qy = quests[i]._qty; for (j = 0; j < 7; j++) { if (qx + questxoff[j] == cursmx && qy + questyoff[j] == cursmy) { sprintf(infostr, "To %s", questtrigstr[ql]); cursmx = qx; cursmy = qy; return TRUE; } } } } #endif return FALSE; } BOOL QuestStatus(int i) { BOOL result; if (setlevel || currlevel != quests[i]._qlevel || !quests[i]._qactive || (result = 1, gbMaxPlayers != 1) && !(questlist[i]._qflags & 1)) { result = FALSE; } return result; } void CheckQuestKill(int m, BOOL sendmsg) { #ifndef SPAWN int i, j; if (monster[m].MType->mtype == MT_SKING) { quests[Q_SKELKING]._qactive = QUEST_DONE; sfxdelay = 30; if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR82; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE82; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE82; } if (sendmsg) NetSendCmdQuest(TRUE, Q_SKELKING); } else if (monster[m].MType->mtype == MT_CLEAVER) { quests[Q_BUTCHER]._qactive = QUEST_DONE; sfxdelay = 30; if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR80; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE80; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE80; } if (sendmsg) NetSendCmdQuest(TRUE, Q_BUTCHER); } else if (monster[m].mName == UniqMonst[UMT_GARBUD].mName) { //"Gharbad the Weak" quests[Q_GARBUD]._qactive = QUEST_DONE; sfxdelay = 30; if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR61; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE61; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE61; } } else if (monster[m].mName == UniqMonst[UMT_ZHAR].mName) { //"Zhar the Mad" quests[Q_ZHAR]._qactive = QUEST_DONE; sfxdelay = 30; if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR62; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE62; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE62; } } else if (monster[m].mName == UniqMonst[UMT_LAZURUS].mName && gbMaxPlayers != 1) { //"Arch-Bishop Lazarus" quests[Q_BETRAYER]._qactive = QUEST_DONE; quests[Q_BETRAYER]._qvar1 = 7; sfxdelay = 30; quests[Q_DIABLO]._qactive = QUEST_ACTIVE; for (j = 0; j < MAXDUNY; j++) { for (i = 0; i < MAXDUNX; i++) { if (dPiece[i][j] == 370) { trigs[numtrigs]._tx = i; trigs[numtrigs]._ty = j; trigs[numtrigs]._tmsg = WM_DIABNEXTLVL; numtrigs++; } } } if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR83; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE83; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE83; } if (sendmsg) { NetSendCmdQuest(TRUE, Q_BETRAYER); NetSendCmdQuest(TRUE, Q_DIABLO); } } else if (monster[m].mName == UniqMonst[UMT_LAZURUS].mName && gbMaxPlayers == 1) { //"Arch-Bishop Lazarus" quests[Q_BETRAYER]._qactive = QUEST_DONE; sfxdelay = 30; InitVPTriggers(); quests[Q_BETRAYER]._qvar1 = 7; quests[Q_BETRAYER]._qvar2 = 4; quests[Q_DIABLO]._qactive = QUEST_ACTIVE; AddMissile(35, 32, 35, 32, 0, MIS_RPORTAL, 0, myplr, 0, 0); if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR83; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE83; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE83; } } else if (monster[m].mName == UniqMonst[UMT_WARLORD].mName) { //"Warlord of Blood" quests[Q_WARLORD]._qactive = QUEST_DONE; sfxdelay = 30; if (plr[myplr]._pClass == PC_WARRIOR) { sfxdnum = PS_WARR94; } else if (plr[myplr]._pClass == PC_ROGUE) { sfxdnum = PS_ROGUE94; } else if (plr[myplr]._pClass == PC_SORCERER) { sfxdnum = PS_MAGE94; } } #endif } void DrawButcher() { int x, y; x = 2 * setpc_x + 16; y = 2 * setpc_y + 16; DRLG_RectTrans(x + 3, y + 3, x + 10, y + 10); } void DrawSkelKing(int q, int x, int y) { quests[q]._qtx = 2 * x + 28; quests[q]._qty = 2 * y + 23; } void DrawWarLord(int x, int y) { int rw, rh; int i, j; BYTE *sp, *setp; int v; setp = LoadFileInMem("Levels\\L4Data\\Warlord2.DUN", NULL); rw = *setp; sp = setp + 2; rh = *sp; sp += 2; setpc_w = rw; setpc_h = rh; setpc_x = x; setpc_y = y; for (j = y; j < y + rh; j++) { for (i = x; i < x + rw; i++) { if (*sp != 0) { v = *sp; } else { v = 6; } dungeon[i][j] = v; sp += 2; } } mem_free_dbg(setp); } void DrawSChamber(int q, int x, int y) { int i, j; int rw, rh; int xx, yy; BYTE *sp, *setp; int v; setp = LoadFileInMem("Levels\\L2Data\\Bonestr1.DUN", NULL); rw = *setp; sp = setp + 2; rh = *sp; sp += 2; setpc_w = rw; setpc_h = rh; setpc_x = x; setpc_y = y; for (j = y; j < y + rh; j++) { for (i = x; i < x + rw; i++) { if (*sp != 0) { v = *sp; } else { v = 3; } dungeon[i][j] = v; sp += 2; } } xx = 2 * x + 22; yy = 2 * y + 23; quests[q]._qtx = xx; quests[q]._qty = yy; mem_free_dbg(setp); } void DrawLTBanner(int x, int y) { int rw, rh; int i, j; BYTE *sp, *setp; setp = LoadFileInMem("Levels\\L1Data\\Banner1.DUN", NULL); rw = *setp; sp = setp + 2; rh = *sp; sp += 2; setpc_w = rw; setpc_h = rh; setpc_x = x; setpc_y = y; for (j = 0; j < rh; j++) { for (i = 0; i < rw; i++) { if (*sp != 0) { pdungeon[x + i][y + j] = *sp; } sp += 2; } } mem_free_dbg(setp); } void DrawBlind(int x, int y) { int rw, rh; int i, j; BYTE *sp, *setp; setp = LoadFileInMem("Levels\\L2Data\\Blind1.DUN", NULL); rw = *setp; sp = setp + 2; rh = *sp; sp += 2; setpc_x = x; setpc_y = y; setpc_w = rw; setpc_h = rh; for (j = 0; j < rh; j++) { for (i = 0; i < rw; i++) { if (*sp != 0) { pdungeon[x + i][y + j] = *sp; } sp += 2; } } mem_free_dbg(setp); } void DrawBlood(int x, int y) { int rw, rh; int i, j; BYTE *sp, *setp; setp = LoadFileInMem("Levels\\L2Data\\Blood2.DUN", NULL); rw = *setp; sp = setp + 2; rh = *sp; sp += 2; setpc_x = x; setpc_y = y; setpc_w = rw; setpc_h = rh; for (j = 0; j < rh; j++) { for (i = 0; i < rw; i++) { if (*sp != 0) { dungeon[x + i][y + j] = *sp; } sp += 2; } } mem_free_dbg(setp); } void DRLG_CheckQuests(int x, int y) { int i; for (i = 0; i < MAXQUESTS; i++) { if (QuestStatus(i)) { switch (quests[i]._qtype) { case Q_BUTCHER: DrawButcher(); break; case Q_LTBANNER: DrawLTBanner(x, y); break; case Q_BLIND: DrawBlind(x, y); break; case Q_BLOOD: DrawBlood(x, y); break; case Q_WARLORD: DrawWarLord(x, y); break; case Q_SKELKING: DrawSkelKing(i, x, y); break; case Q_SCHAMB: DrawSChamber(i, x, y); break; } } } } void SetReturnLvlPos() { switch (setlvlnum) { case SL_SKELKING: ReturnLvlX = quests[Q_SKELKING]._qtx + 1; ReturnLvlY = quests[Q_SKELKING]._qty; ReturnLvl = quests[Q_SKELKING]._qlevel; ReturnLvlT = DTYPE_CATHEDRAL; break; case SL_BONECHAMB: ReturnLvlX = quests[Q_SCHAMB]._qtx + 1; ReturnLvlY = quests[Q_SCHAMB]._qty; ReturnLvl = quests[Q_SCHAMB]._qlevel; ReturnLvlT = DTYPE_CATACOMBS; break; case SL_POISONWATER: ReturnLvlX = quests[Q_PWATER]._qtx; ReturnLvlY = quests[Q_PWATER]._qty + 1; ReturnLvl = quests[Q_PWATER]._qlevel; ReturnLvlT = DTYPE_CATHEDRAL; break; case SL_VILEBETRAYER: ReturnLvlX = quests[Q_BETRAYER]._qtx + 1; ReturnLvlY = quests[Q_BETRAYER]._qty - 1; ReturnLvl = quests[Q_BETRAYER]._qlevel; ReturnLvlT = DTYPE_HELL; break; } } void GetReturnLvlPos() { if (quests[Q_BETRAYER]._qactive == QUEST_DONE) quests[Q_BETRAYER]._qvar2 = 2; ViewX = ReturnLvlX; ViewY = ReturnLvlY; currlevel = ReturnLvl; leveltype = ReturnLvlT; } void ResyncMPQuests() { #ifndef SPAWN if (quests[Q_SKELKING]._qactive == QUEST_INIT && currlevel >= quests[Q_SKELKING]._qlevel - 1 && currlevel <= quests[Q_SKELKING]._qlevel + 1) { quests[Q_SKELKING]._qactive = QUEST_ACTIVE; NetSendCmdQuest(TRUE, Q_SKELKING); } if (quests[Q_BUTCHER]._qactive == QUEST_INIT && currlevel >= quests[Q_BUTCHER]._qlevel - 1 && currlevel <= quests[Q_BUTCHER]._qlevel + 1) { quests[Q_BUTCHER]._qactive = QUEST_ACTIVE; NetSendCmdQuest(TRUE, Q_BUTCHER); } if (quests[Q_BETRAYER]._qactive == QUEST_INIT && currlevel == quests[Q_BETRAYER]._qlevel - 1) { quests[Q_BETRAYER]._qactive = QUEST_ACTIVE; NetSendCmdQuest(TRUE, Q_BETRAYER); } if (QuestStatus(Q_BETRAYER)) AddObject(OBJ_ALTBOY, 2 * setpc_x + 20, 2 * setpc_y + 22); #endif } void ResyncQuests() { #ifndef SPAWN int i, tren, x, y; if (setlevel && setlvlnum == quests[Q_PWATER]._qslvl && quests[Q_PWATER]._qactive != QUEST_INIT && leveltype == quests[Q_PWATER]._qlvltype) { if (quests[Q_PWATER]._qactive == QUEST_DONE) LoadPalette("Levels\\L3Data\\L3pwater.pal"); else LoadPalette("Levels\\L3Data\\L3pfoul.pal"); for (i = 0; i <= 32; i++) palette_update_quest_palette(i); } if (QuestStatus(Q_LTBANNER)) { if (quests[Q_LTBANNER]._qvar1 == 1) ObjChangeMapResync( setpc_w + setpc_x - 2, setpc_h + setpc_y - 2, setpc_w + setpc_x + 1, setpc_h + setpc_y + 1); if (quests[Q_LTBANNER]._qvar1 == 2) { ObjChangeMapResync( setpc_w + setpc_x - 2, setpc_h + setpc_y - 2, setpc_w + setpc_x + 1, setpc_h + setpc_y + 1); ObjChangeMapResync(setpc_x, setpc_y, (setpc_w >> 1) + setpc_x + 2, (setpc_h >> 1) + setpc_y - 2); for (i = 0; i < nobjects; i++) SyncObjectAnim(objectactive[i]); tren = TransVal; TransVal = 9; DRLG_MRectTrans(setpc_x, setpc_y, (setpc_w >> 1) + setpc_x + 4, setpc_y + (setpc_h >> 1)); TransVal = tren; } if (quests[Q_LTBANNER]._qvar1 == 3) { x = setpc_x; y = setpc_y; ObjChangeMapResync(x, y, x + setpc_w + 1, y + setpc_h + 1); for (i = 0; i < nobjects; i++) SyncObjectAnim(objectactive[i]); tren = TransVal; TransVal = 9; DRLG_MRectTrans(setpc_x, setpc_y, (setpc_w >> 1) + setpc_x + 4, setpc_y + (setpc_h >> 1)); TransVal = tren; } } if (currlevel == quests[Q_MUSHROOM]._qlevel) { if (quests[Q_MUSHROOM]._qactive == QUEST_INIT && !quests[Q_MUSHROOM]._qvar1) { SpawnQuestItem(IDI_FUNGALTM, 0, 0, 5, 1); quests[Q_MUSHROOM]._qvar1 = QS_TOMESPAWNED; } else { if (quests[Q_MUSHROOM]._qactive == QUEST_ACTIVE) { if (quests[Q_MUSHROOM]._qvar1 >= QS_MUSHGIVEN) { Qtalklist[TOWN_WITCH]._qblkm = -1; Qtalklist[TOWN_HEALER]._qblkm = TEXT_MUSH3; } else if (quests[Q_MUSHROOM]._qvar1 >= QS_BRAINGIVEN) { Qtalklist[TOWN_HEALER]._qblkm = -1; } } } } if (currlevel == quests[Q_VEIL]._qlevel + 1 && quests[Q_VEIL]._qactive == QUEST_ACTIVE && !quests[Q_VEIL]._qvar1) { quests[Q_VEIL]._qvar1 = 1; SpawnQuestItem(IDI_GLDNELIX, 0, 0, 5, 1); } if (setlevel && setlvlnum == SL_VILEBETRAYER) { if (quests[Q_BETRAYER]._qvar1 >= 4) ObjChangeMapResync(1, 11, 20, 18); if (quests[Q_BETRAYER]._qvar1 >= 6) ObjChangeMapResync(1, 18, 20, 24); if (quests[Q_BETRAYER]._qvar1 >= 7) InitVPTriggers(); for (i = 0; i < nobjects; i++) SyncObjectAnim(objectactive[i]); } if (currlevel == quests[Q_BETRAYER]._qlevel && !setlevel && (quests[Q_BETRAYER]._qvar2 == 1 || quests[Q_BETRAYER]._qvar2 >= 3) && (quests[Q_BETRAYER]._qactive == QUEST_ACTIVE || quests[Q_BETRAYER]._qactive == QUEST_DONE)) { quests[Q_BETRAYER]._qvar2 = 2; } #endif } void PrintQLString(int x, int y, BOOL cjustflag, char *str, int col) { int len, width, i, k, sx, sy; BYTE c; sx = x + 32 + SCREEN_X; sy = y * 12 + 44 + SCREEN_Y; len = strlen(str); k = 0; if (cjustflag) { width = 0; for (i = 0; i < len; i++) width += fontkern[fontframe[gbFontTransTbl[(BYTE)str[i]]]] + 1; if (width < 257) k = (257 - width) >> 1; sx += k; } if (qline == y) { CelDraw(cjustflag ? x + k + 12 + SCREEN_X : x + 12 + SCREEN_X, sy + 1, pSPentSpn2Cels, PentSpn2Frame, 12); } for (i = 0; i < len; i++) { c = fontframe[gbFontTransTbl[(BYTE)str[i]]]; k += fontkern[c] + 1; if (c && k <= 257) { PrintChar(sx, sy, c, col); } sx += fontkern[c] + 1; } if (qline == y) { CelDraw(cjustflag ? x + k + 36 + SCREEN_X : 276 + SCREEN_X - x, sy + 1, pSPentSpn2Cels, PentSpn2Frame, 12); } } void DrawQuestLog() { int y, i; PrintQLString(0, 2, TRUE, "Quest Log", 3); CelDraw(SCREEN_X, SCREEN_Y + 351, pQLogCel, 1, SPANEL_WIDTH); y = qtopline; for (i = 0; i < numqlines; i++) { PrintQLString(0, y, TRUE, questlist[qlist[i]]._qlstr, 0); y += 2; } PrintQLString(0, 22, TRUE, "Close Quest Log", 0); PentSpn2Spin(); } void StartQuestlog() { DWORD i; numqlines = 0; for (i = 0; i < MAXQUESTS; i++) { if (quests[i]._qactive == QUEST_ACTIVE && quests[i]._qlog) { qlist[numqlines] = i; numqlines++; } } if (numqlines > 5) { qtopline = 5 - (numqlines >> 1); } else { qtopline = 8; } qline = 22; if (numqlines != 0) qline = qtopline; questlog = TRUE; PentSpn2Frame = 1; } void QuestlogUp() { if (numqlines) { if (qline == qtopline) { qline = 22; } else if (qline == 22) { qline = qtopline + 2 * numqlines - 2; } else { qline -= 2; } PlaySFX(IS_TITLEMOV); } } void QuestlogDown() { if (numqlines) { if (qline == 22) { qline = qtopline; } else if (qline == qtopline + 2 * numqlines - 2) { qline = 22; } else { qline += 2; } PlaySFX(IS_TITLEMOV); } } void QuestlogEnter() { PlaySFX(IS_TITLSLCT); if (numqlines && qline != 22) InitQTextMsg(quests[qlist[(qline - qtopline) >> 1]]._qmsg); questlog = FALSE; } void QuestlogESC() { int y, i; y = (MouseY - 32) / 12; if (numqlines) { for (i = 0; i < numqlines; i++) { if (y == qtopline + 2 * i) { qline = y; QuestlogEnter(); } } } if (y == 22) { qline = 22; QuestlogEnter(); } } void SetMultiQuest(int q, int s, int l, int v1) { #ifndef SPAWN if (quests[q]._qactive != QUEST_DONE) { if (s > quests[q]._qactive) quests[q]._qactive = s; quests[q]._qlog |= l; if (v1 > quests[q]._qvar1) quests[q]._qvar1 = v1; } #endif } DEVILUTION_END_NAMESPACE
{'repo_name': 'diasurgical/devilutionX', 'stars': '2677', 'repo_language': 'C++', 'file_name': 'config.yml', 'mime_type': 'text/plain', 'hash': 1757527409447340140, 'source_dataset': 'data'}
image: "/assets/png/tiles.png" tile_width: 48 tile_height: 16 tile_margin: 0 tile_spacing: 0 collision: "/assets/png/tiles.png" material_tag: "tile" convex_hulls { index: 0 count: 10 collision_group: "" } convex_hulls { index: 10 count: 13 collision_group: "" } convex_hulls { index: 23 count: 13 collision_group: "" } convex_hulls { index: 36 count: 13 collision_group: "" } convex_hulls { index: 49 count: 0 collision_group: "" } convex_hulls { index: 49 count: 0 collision_group: "" } convex_hulls { index: 49 count: 4 collision_group: "" } convex_hulls { index: 53 count: 4 collision_group: "" } convex_hulls { index: 57 count: 4 collision_group: "" } animations { id: "arrow" start_tile: 2 end_tile: 2 playback: PLAYBACK_NONE fps: 30 flip_horizontal: 0 flip_vertical: 0 } animations { id: "player" start_tile: 1 end_tile: 1 playback: PLAYBACK_NONE fps: 1 flip_horizontal: 0 flip_vertical: 0 } animations { id: "sub" start_tile: 3 end_tile: 3 playback: PLAYBACK_NONE fps: 30 flip_horizontal: 0 flip_vertical: 0 } animations { id: "super" start_tile: 2 end_tile: 2 playback: PLAYBACK_NONE fps: 30 flip_horizontal: 0 flip_vertical: 0 } extrude_borders: 2 inner_padding: 0 sprite_trim_mode: SPRITE_TRIM_MODE_OFF
{'repo_name': 'benjames-171/defold-games', 'stars': '137', 'repo_language': 'Go', 'file_name': 'custom.render_script', 'mime_type': 'text/plain', 'hash': 7110511023444691471, 'source_dataset': 'data'}
/** <p/> * See the file "license.terms" for information on usage and * redistribution of this file, and for a DISCLAIMER OF ALL * WARRANTIES. * <p/> * <p/> */ package edu.cmu.sphinx.tools.riddler.shared; import edu.cmu.sphinx.tools.riddler.shared.MetadataWrapper; import java.net.URI; import java.rmi.RemoteException; import java.util.*; import javax.ejb.Remote; /** * Interface for Riddler, the corpus creation and training tool for Sphinx. * @author Garrett Weinberg */ @Remote public interface RiddlerRemote { /** * Kick off a model training operation, given a suitable (i.e. full) Corpus * @param corpusID Corpus from which the models should be trained * @return a URI pointing to an RSS feed that will contain the models matching this request */ public URI trainModelsFromCorpus(String corpusID); /** * Kick off a model training operation that uses multiple Corpora * @param corpusIDs Corpora from which the models should be trained * @return a URI pointing to an RSS feed that will contain the models matching this request */ public URI trainModelsFromCorpora(ArrayList<String> corpusIDs); /** * Get a valid identifier for a new Dictionary * @return the new Dictionary's identifier * @throws RemoteException if a Dictionary with the given metadata already exists @param metadata a map of metadata about the Dictionary */ public String createDictionary(MetadataWrapper metadata) throws RemoteException; /** * Get a Dictionary matching the provided descriptor * @return the existing Dictionary's identifier * @throws RemoteException if no Dictionary matching the metadata exists @param metadata metadata for the Dictionary you're trying to retrieve */ public String getDictionary(MetadataWrapper metadata) throws RemoteException; /** * Fetch the DictionaryDescriptor matching the provided ID * @param dictionaryID an existing Dictionary identifier * @return the metadata provided when the given Dictionary was created * @throws java.rmi.RemoteException if the provided ID is invalid */ public MetadataWrapper getDictionaryMetadata(String dictionaryID) throws RemoteException; /** * Create a Pronunciation record (case insensitive). * @return the new Pronunciation's identifier * @throws RemoteException if the word contained in the given PronunciationDescriptor already * has an entry in the given dictionary @param dictionaryID Dictionary in which the entry should be created * @param word the word to be added to the specified dictionary * @param pronunciations main and any variant pronunciations of the word */ public String addPronuncations(String dictionaryID, String word, ArrayList<String> pronunciations) throws RemoteException; /** * Check whether a Pronunciation record exists for the given word (case insensitive) * @param dictionaryID Dictionary in which the entry should be created * @param word word that should be queried * @return true if the word has at least one pronunciation */ public boolean hasPronuncation(String dictionaryID, String word) throws RemoteException; /** * Get a valid identifier for a new Corpus * @return a new Corpus identifier * @param dictionaryID Dictionary with which this Corpus should be associated * @param metadata map of metadata about this corpus * @param collectDate date with which this corpus should be associated (e.g. date when data collection * began or ended) */ public String createCorpus(String dictionaryID, MetadataWrapper metadata, Date collectDate); /** * Deeply create a new, empty Item record associated with the given Corpus. * Also creates corresponding Audio and Text records, each consisting of a single * RegionOfAudio and RegionOfText, respectively. * @param corpusId Corpus to which the Item should be added * @return the ID of an Item with valid deep linkages to its newly-created component records */ public String createItem(String corpusId); /** * Deeply create a new Item record with exactly one Audio record containing one RegionOfAudio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself */ public String createItemWithShortAudio(String corpusId, int samplesPerSecond, int channelCount, short[] data); /** * Deeply create a new Item record with exactly one Audio record containing one RegionOfAudio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself */ public String createItemWithByteAudio(String corpusId, int samplesPerSecond, int channelCount, byte[] data); /** * Deeply create a new Item record with exactly one Audio record containing one RegionOfAudio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself */ public String createItemWithIntAudio(String corpusId, int samplesPerSecond, int channelCount, int[] data); /** * Deeply create a new Item record with exactly one Audio record containing one RegionOfAudio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself */ public String createItemWithLongAudio(String corpusId, int samplesPerSecond, int channelCount, long[] data); /** * Deeply create a new Item record with exactly one Audio record containing one RegionOfAudio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself */ public String createItemWithFloatAudio(String corpusId, int samplesPerSecond, int channelCount, float[] data); /** * Deeply create a new Item record with exactly one Text record containing one RegionOfText. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param words text with which the Item should be created; note that a List is used * because not all languages use spaces consistently as word delimiters */ public String createItemWithText(String corpusId, ArrayList<String> words); /** * Deeply create a new Item record with one Audio record and one Text record. The Audio * record contains one RegionOfAudio that points to the Text record's single RegionOfText.<p/> * This method should be used to indicate that the Text is a transcript of the Audio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself * @param words transcript of the audio being sent */ public String createItemWithShortAudioAndText(String corpusId, int samplesPerSecond, int channelCount, short[] data, ArrayList<String> words); /** * Deeply create a new Item record with one Audio record and one Text record. The Audio * record contains one RegionOfAudio that points to the Text record's single RegionOfText.<p/> * This method should be used to indicate that the Text is a transcript of the Audio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself * @param words transcript of the audio being sent */ public String createItemWithByteAudioAndText(String corpusId, int samplesPerSecond, int channelCount, byte[] data, ArrayList<String> words); /** * Deeply create a new Item record with one Audio record and one Text record. The Audio * record contains one RegionOfAudio that points to the Text record's single RegionOfText.<p/> * This method should be used to indicate that the Text is a transcript of the Audio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself * @param words transcript of the audio being sent */ public String createItemWithIntAudioAndText(String corpusId, int samplesPerSecond, int channelCount, int[] data, ArrayList<String> words); /** * Deeply create a new Item record with one Audio record and one Text record. The Audio * record contains one RegionOfAudio that points to the Text record's single RegionOfText.<p/> * This method should be used to indicate that the Text is a transcript of the Audio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself * @param words transcript of the audio being sent */ public String createItemWithLongAudioAndText(String corpusId, int samplesPerSecond, int channelCount, long[] data, ArrayList<String> words); /** * Deeply create a new Item record with one Audio record and one Text record. The Audio * record contains one RegionOfAudio that points to the Text record's single RegionOfText.<p/> * This method should be used to indicate that the Text is a transcript of the Audio. * @return the ID of an Item with valid deep linkages to its newly-created component records * @throws RemoteException if the words list contains a word not found in the * provided Corpus' Dictionary * @param corpusId Corpus to which the Item should be added * @param samplesPerSecond samples per second * @param channelCount number of audio channels * @param data audio data itself * @param words transcript of the audio being sent */ public String createItemWithFloatAudioAndText(String corpusId, int samplesPerSecond, int channelCount, float[] data, ArrayList<String> words); /** * Add a RegionOfText to the given Item * @return a Text identifier representing the newly created record * @param itemID Item record to which the new RegionOfText should be added * @param startIndex index of the word at which this text region begins * @param endIndex index of the word at which this text region ends * @see RiddlerRemote#createItemWithText(String, java.util.ArrayList) */ public String createTextRegion(String itemID, int startIndex, int endIndex); /** * Add a RegionOfAudio to the given Item * @return an Audio identifier representing the newly created record * @param itemID Item record to which the new RegionOfAudio should be added * @param beginTime time (in milliseconds) within the parent Audio record when this region begins * @param endTime time (in milliseconds) within the parent Audio record when this region ends */ public String createAudioRegion(String itemID, int beginTime, int endTime); /** * Add a RegionOfAudio to the given Item, associating it with the given RegionOfText * @return an Audio identifier representing the newly created record * @param itemID Item to which the matched regions should be added * @param beginTime time (in milliseconds) within the parent Audio record when this region begins * @param endTime time (in milliseconds) within the parent Audio record when this region ends * @param startIndex index of the word at which this text region begins * @param endIndex index of the word at which this text region ends */ public String createAudioRegionWithText(String itemID, int beginTime, int endTime, int startIndex, int endIndex); /** * Link a pre-existing audio region to a pre-existing text region * @param audioID ID of an Audio record * @param textID ID of a Text record * @throws RemoteException if either the audio or text ID's are invalid */ public void associateAudioRegionWithText(String audioID, String textID); }
{'repo_name': 'cjac/cmusphinx', 'stars': '109', 'repo_language': 'C', 'file_name': 'sphinxman_manual.html', 'mime_type': 'text/html', 'hash': 6134721010253501170, 'source_dataset': 'data'}
config BR2_PACKAGE_WSAPI bool "wsapi" select BR2_PACKAGE_COXPCALL if BR2_PACKAGE_LUA_5_1 # runtime select BR2_PACKAGE_LUAFILESYSTEM # runtime select BR2_PACKAGE_RINGS # runtime help API that abstracts the web server from Lua web applications. https://keplerproject.github.io/wsapi/
{'repo_name': 'buildroot/buildroot', 'stars': '1103', 'repo_language': 'Makefile', 'file_name': 'Config.in', 'mime_type': 'text/plain', 'hash': -4888067921738801261, 'source_dataset': 'data'}
fileFormatVersion: 2 guid: 25096163ea08801469d628d2389714bf DefaultImporter: externalObjects: {} userData: assetBundleName: assetBundleVariant:
{'repo_name': 'umutbebek/shadertoy-to-unity-URP', 'stars': '175', 'repo_language': 'GLSL', 'file_name': 'AudioManager.asset', 'mime_type': 'text/plain', 'hash': -5445585209808736155, 'source_dataset': 'data'}
/** * Module dependencies. */ var debug = require('debug')('koa-mount'); var compose = require('koa-compose'); var assert = require('assert'); /** * Expose `mount()`. */ module.exports = mount; /** * Mount `app` with `prefix`, `app` * may be a Koa application or * middleware function. * * @param {String|Application|Function} prefix, app, or function * @param {Application|Function} [app or function] * @return {Function} * @api public */ function mount(prefix, app) { if ('string' != typeof prefix) { app = prefix; prefix = '/'; } assert('/' == prefix[0], 'mount path must begin with "/"'); // compose var downstream = app.middleware ? compose(app.middleware) : app; // don't need to do mounting here if ('/' == prefix) return downstream; var trailingSlash = '/' == prefix.slice(-1); var name = app.name || 'unnamed'; debug('mount %s %s', prefix, name); return function *(upstream){ var prev = this.path; var newPath = match(prev); debug('mount %s %s -> %s', prefix, name, newPath); if (!newPath) return yield* upstream; this.mountPath = prefix; this.path = newPath; debug('enter %s -> %s', prev, this.path); yield* downstream.call(this, function *(){ this.path = prev; yield* upstream; this.path = newPath; }.call(this)); debug('leave %s -> %s', prev, this.path); this.path = prev; } /** * Check if `prefix` satisfies a `path`. * Returns the new path. * * match('/images/', '/lkajsldkjf') => false * match('/images', '/images') => / * match('/images/', '/images') => false * match('/images/', '/images/asdf') => /asdf * * @param {String} prefix * @param {String} path * @return {String|Boolean} * @api private */ function match(path) { // does not match prefix at all if (0 != path.indexOf(prefix)) return false; var newPath = path.replace(prefix, '') || '/'; if (trailingSlash) return newPath; // `/mount` does not match `/mountlkjalskjdf` if ('/' != newPath[0]) return false; return newPath; } }
{'repo_name': 'zhangmengxue/React-Code-Snippet', 'stars': '304', 'repo_language': 'None', 'file_name': 'prefix-source-map.jsm', 'mime_type': 'text/plain', 'hash': -594915647769331269, 'source_dataset': 'data'}
""" """ # Created on 2013.05.15 # # Author: Giovanni Cannata # # Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # # ldap3 is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ldap3 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with ldap3 in the COPYING and COPYING.LESSER files. # If not, see <http://www.gnu.org/licenses/>. ####################### # ldap ASN.1 Definition # from RFC4511 - Appendix B # extended with result codes from IANA ldap-parameters as of 2013.08.21 # extended with modify_increment from RFC4525 ######################################################### # Lightweight-Directory-Access-Protocol-V3 {1 3 6 1 1 18} # -- Copyright (C) The Internet Society (2006). This version of # -- this ASN.1 module is part of RFC 4511; see the RFC itself # -- for full legal notices. # DEFINITIONS # IMPLICIT TAGS # EXTENSIBILITY IMPLIED from pyasn1.type.univ import OctetString, Integer, Sequence, Choice, SequenceOf, Boolean, Null, Enumerated, SetOf from pyasn1.type.namedtype import NamedTypes, NamedType, OptionalNamedType, DefaultedNamedType from pyasn1.type.constraint import ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint from pyasn1.type.namedval import NamedValues from pyasn1.type.tag import tagClassApplication, tagFormatConstructed, Tag, tagClassContext, tagFormatSimple # constants # maxInt INTEGER ::= 2147483647 -- (2^^31 - 1) -- LDAP_MAX_INT = 2147483647 MAXINT = Integer(LDAP_MAX_INT) # constraints rangeInt0ToMaxConstraint = ValueRangeConstraint(0, MAXINT) rangeInt1To127Constraint = ValueRangeConstraint(1, 127) size1ToMaxConstraint = ValueSizeConstraint(1, MAXINT) responseValueConstraint = SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 32, 33, 34, 36, 48, 49, 50, 51, 52, 53, 54, 64, 65, 66, 67, 68, 69, 71, 80, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 4096) # custom constraints numericOIDConstraint = None # TODO distinguishedNameConstraint = None # TODO nameComponentConstraint = None # TODO attributeDescriptionConstraint = None # TODO uriConstraint = None # TODO attributeSelectorConstraint = None # TODO class Integer0ToMax(Integer): subtypeSpec = Integer.subtypeSpec + rangeInt0ToMaxConstraint class LDAPString(OctetString): # LDAPString ::= OCTET STRING -- UTF-8 encoded, -- [ISO10646] characters encoding = 'utf-8' class MessageID(Integer0ToMax): # MessageID ::= INTEGER (0 .. maxInt) pass class LDAPOID(OctetString): # LDAPOID ::= OCTET STRING -- Constrained to <numericoid> # -- [RFC4512] # subtypeSpec = numericOIDConstraint pass class LDAPDN(LDAPString): # LDAPDN ::= LDAPString -- Constrained to <distinguishedName> # -- [RFC4514] # subtypeSpec = distinguishedName pass class RelativeLDAPDN(LDAPString): # RelativeLDAPDN ::= LDAPString -- Constrained to <name-component> # -- [RFC4514] # subtypeSpec = LDAPString.subtypeSpec + nameComponentConstraint pass class AttributeDescription(LDAPString): # AttributeDescription ::= LDAPString -- Constrained to <attributedescription> # -- [RFC4512] # subtypeSpec = LDAPString.subtypeSpec + attributeDescriptionConstraint pass class AttributeValue(OctetString): # AttributeValue ::= OCTET STRING encoding = 'utf-8' class AssertionValue(OctetString): # AssertionValue ::= OCTET STRING encoding = 'utf-8' class AttributeValueAssertion(Sequence): # AttributeValueAssertion ::= SEQUENCE { # attributeDesc AttributeDescription, # assertionValue AssertionValue } componentType = NamedTypes(NamedType('attributeDesc', AttributeDescription()), NamedType('assertionValue', AssertionValue())) class MatchingRuleId(LDAPString): # MatchingRuleId ::= LDAPString pass class Vals(SetOf): # vals SET OF value AttributeValue } componentType = AttributeValue() class ValsAtLeast1(SetOf): # vals SET OF value AttributeValue } componentType = AttributeValue() subtypeSpec = SetOf.subtypeSpec + size1ToMaxConstraint class PartialAttribute(Sequence): # PartialAttribute ::= SEQUENCE { # type AttributeDescription, # vals SET OF value AttributeValue } componentType = NamedTypes(NamedType('type', AttributeDescription()), NamedType('vals', Vals())) class Attribute(Sequence): # Attribute ::= PartialAttribute(WITH COMPONENTS { # ..., # vals (SIZE(1..MAX))}) componentType = NamedTypes(NamedType('type', AttributeDescription()), # NamedType('vals', ValsAtLeast1())) NamedType('vals', Vals())) # changed from ValsAtLeast1() to allow empty member values in groups - this should not be as per rfc4511 4.1.7, but openldap accept it class AttributeList(SequenceOf): # AttributeList ::= SEQUENCE OF attribute Attribute componentType = Attribute() class Simple(OctetString): # simple [0] OCTET STRING, tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0)) encoding = 'utf-8' class Credentials(OctetString): # credentials OCTET STRING encoding = 'utf-8' class SaslCredentials(Sequence): # SaslCredentials ::= SEQUENCE { # mechanism LDAPString, # credentials OCTET STRING OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 3)) componentType = NamedTypes(NamedType('mechanism', LDAPString()), OptionalNamedType('credentials', Credentials())) # not in RFC4511 but used by Microsoft to embed the NTLM protocol in the BindRequest (Sicily Protocol) class SicilyPackageDiscovery(OctetString): # sicilyPackageDiscovery [9] OCTET STRING, tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 9)) encoding = 'utf-8' # not in RFC4511 but used by Microsoft to embed the NTLM protocol in the BindRequest (Sicily Protocol) class SicilyNegotiate(OctetString): # sicilyNegotiate [10] OCTET STRING, tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 10)) encoding = 'utf-8' # not in RFC4511 but used by Microsoft to embed the NTLM protocol in the BindRequest (Sicily Protocol) class SicilyResponse(OctetString): # sicilyResponse [11] OCTET STRING, tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 11)) encoding = 'utf-8' class AuthenticationChoice(Choice): # AuthenticationChoice ::= CHOICE { # simple [0] OCTET STRING, # -- 1 and 2 reserved # sasl [3] SaslCredentials, # ... } # from https://msdn.microsoft.com/en-us/library/cc223498.aspx # legacy NTLM authentication for Windows Active Directory # sicilyPackageDiscovery [9] OCTET STRING # sicilyNegotiate [10] OCTET STRING # sicilyResponse [11] OCTET STRING } componentType = NamedTypes(NamedType('simple', Simple()), NamedType('sasl', SaslCredentials()), NamedType('sicilyPackageDiscovery', SicilyPackageDiscovery()), NamedType('sicilyNegotiate', SicilyNegotiate()), NamedType('sicilyResponse', SicilyResponse()), ) class Version(Integer): # version INTEGER (1 .. 127), subtypeSpec = Integer.subtypeSpec + rangeInt1To127Constraint class ResultCode(Enumerated): # resultCode ENUMERATED { # success (0), # operationsError (1), # protocolError (2), # timeLimitExceeded (3), # sizeLimitExceeded (4), # compareFalse (5), # compareTrue (6), # authMethodNotSupported (7), # strongerAuthRequired (8), # -- 9 reserved -- # referral (10), # adminLimitExceeded (11), # unavailableCriticalExtension (12), # confidentialityRequired (13), # saslBindInProgress (14), # noSuchAttribute (16), # undefinedAttributeType (17), # inappropriateMatching (18), # constraintViolation (19), # attributeOrValueExists (20), # invalidAttributeSyntax (21), # -- 22-31 unused -- # noSuchObject (32), # aliasProblem (33), # invalidDNSyntax (34), # -- 35 reserved for undefined isLeaf -- # aliasDereferencingProblem (36), # -- 37-47 unused -- # inappropriateAuthentication (48), # invalidCredentials (49), # insufficientAccessRights (50), # busy (51), # unavailable (52), # unwillingToPerform (53), # loopDetect (54), # -- 55-63 unused -- # namingViolation (64), # objectClassViolation (65), # notAllowedOnNonLeaf (66), # notAllowedOnRDN (67), # entryAlreadyExists (68), # objectClassModsProhibited (69), # -- 70 reserved for CLDAP -- # affectsMultipleDSAs (71), # -- 72-79 unused -- # other (80), # ... } # # from IANA ldap-parameters: # lcupResourcesExhausted 113 IESG [RFC3928] # lcupSecurityViolation 114 IESG [RFC3928] # lcupInvalidData 115 IESG [RFC3928] # lcupUnsupportedScheme 116 IESG [RFC3928] # lcupReloadRequired 117 IESG [RFC3928] # canceled 118 IESG [RFC3909] # noSuchOperation 119 IESG [RFC3909] # tooLate 120 IESG [RFC3909] # cannotCancel 121 IESG [RFC3909] # assertionFailed 122 IESG [RFC4528] # authorizationDenied 123 WELTMAN [RFC4370] # e-syncRefreshRequired 4096 [Kurt_Zeilenga] [Jong_Hyuk_Choi] [RFC4533] namedValues = NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongerAuthRequired', 8), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetected', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('affectMultipleDSAs', 71), ('other', 80), ('lcupResourcesExhausted', 113), ('lcupSecurityViolation', 114), ('lcupInvalidData', 115), ('lcupUnsupportedScheme', 116), ('lcupReloadRequired', 117), ('canceled', 118), ('noSuchOperation', 119), ('tooLate', 120), ('cannotCancel', 121), ('assertionFailed', 122), ('authorizationDenied', 123), ('e-syncRefreshRequired', 4096)) subTypeSpec = Enumerated.subtypeSpec + responseValueConstraint class URI(LDAPString): # URI ::= LDAPString -- limited to characters permitted in # -- URIs # subtypeSpec = LDAPString.subTypeSpec + uriConstrain pass class Referral(SequenceOf): # Referral ::= SEQUENCE SIZE (1..MAX) OF uri URI tagSet = SequenceOf.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 3)) componentType = URI() class ServerSaslCreds(OctetString): # serverSaslCreds [7] OCTET STRING OPTIONAL tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 7)) encoding = 'utf-8' class LDAPResult(Sequence): # LDAPResult ::= SEQUENCE { # resultCode ENUMERATED { # success (0), # operationsError (1), # protocolError (2), # timeLimitExceeded (3), # sizeLimitExceeded (4), # compareFalse (5), # compareTrue (6), # authMethodNotSupported (7), # strongerAuthRequired (8), # -- 9 reserved -- # referral (10), # adminLimitExceeded (11), # unavailableCriticalExtension (12), # confidentialityRequired (13), # saslBindInProgress (14), # noSuchAttribute (16), # undefinedAttributeType (17), # inappropriateMatching (18), # constraintViolation (19), # attributeOrValueExists (20), # invalidAttributeSyntax (21), # -- 22-31 unused -- # noSuchObject (32), # aliasProblem (33), # invalidDNSyntax (34), # -- 35 reserved for undefined isLeaf -- # aliasDereferencingProblem (36), # -- 37-47 unused -- # inappropriateAuthentication (48), # invalidCredentials (49), # insufficientAccessRights (50), # busy (51), # unavailable (52), # unwillingToPerform (53), # loopDetect (54), # -- 55-63 unused -- # namingViolation (64), # objectClassViolation (65), # notAllowedOnNonLeaf (66), # notAllowedOnRDN (67), # entryAlreadyExists (68), # objectClassModsProhibited (69), # -- 70 reserved for CLDAP -- # affectsMultipleDSAs (71), # -- 72-79 unused -- # other (80), # ... }, # matchedDN LDAPDN, # diagnosticMessage LDAPString, # referral [3] Referral OPTIONAL } componentType = NamedTypes(NamedType('resultCode', ResultCode()), NamedType('matchedDN', LDAPDN()), NamedType('diagnosticMessage', LDAPString()), OptionalNamedType('referral', Referral())) class Criticality(Boolean): # criticality BOOLEAN DEFAULT FALSE defaultValue = False class ControlValue(OctetString): # controlValue OCTET STRING encoding = 'utf-8' class Control(Sequence): # Control ::= SEQUENCE { # controlType LDAPOID, # criticality BOOLEAN DEFAULT FALSE, # controlValue OCTET STRING OPTIONAL } componentType = NamedTypes(NamedType('controlType', LDAPOID()), DefaultedNamedType('criticality', Criticality()), OptionalNamedType('controlValue', ControlValue())) class Controls(SequenceOf): # Controls ::= SEQUENCE OF control Control tagSet = SequenceOf.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 0)) componentType = Control() class Scope(Enumerated): # scope ENUMERATED { # baseObject (0), # singleLevel (1), # wholeSubtree (2), namedValues = NamedValues(('baseObject', 0), ('singleLevel', 1), ('wholeSubtree', 2)) class DerefAliases(Enumerated): # derefAliases ENUMERATED { # neverDerefAliases (0), # derefInSearching (1), # derefFindingBaseObj (2), # derefAlways (3) }, namedValues = NamedValues(('neverDerefAliases', 0), ('derefInSearching', 1), ('derefFindingBaseObj', 2), ('derefAlways', 3)) class TypesOnly(Boolean): # typesOnly BOOLEAN pass class Selector(LDAPString): # -- The LDAPString is constrained to # -- <attributeSelector> in Section 4.5.1.8 # subtypeSpec = LDAPString.subtypeSpec + attributeSelectorConstraint pass class AttributeSelection(SequenceOf): # AttributeSelection ::= SEQUENCE OF selector LDAPString # -- The LDAPString is constrained to # -- <attributeSelector> in Section 4.5.1.8 componentType = Selector() class MatchingRule(MatchingRuleId): # matchingRule [1] MatchingRuleId tagSet = MatchingRuleId.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 1)) class Type(AttributeDescription): # type [2] AttributeDescription tagSet = AttributeDescription.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 2)) class MatchValue(AssertionValue): # matchValue [3] AssertionValue, tagSet = AssertionValue.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 3)) class DnAttributes(Boolean): # dnAttributes [4] BOOLEAN DEFAULT FALSE } tagSet = Boolean.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 4)) defaultValue = Boolean(False) class MatchingRuleAssertion(Sequence): # MatchingRuleAssertion ::= SEQUENCE { # matchingRule [1] MatchingRuleId OPTIONAL, # type [2] AttributeDescription OPTIONAL, # matchValue [3] AssertionValue, # dnAttributes [4] BOOLEAN DEFAULT FALSE } componentType = NamedTypes(OptionalNamedType('matchingRule', MatchingRule()), OptionalNamedType('type', Type()), NamedType('matchValue', MatchValue()), DefaultedNamedType('dnAttributes', DnAttributes())) class Initial(AssertionValue): # initial [0] AssertionValue, -- can occur at most once tagSet = AssertionValue.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0)) class Any(AssertionValue): # any [1] AssertionValue, tagSet = AssertionValue.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 1)) class Final(AssertionValue): # final [1] AssertionValue, -- can occur at most once tagSet = AssertionValue.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 2)) class Substring(Choice): # substring CHOICE { # initial [0] AssertionValue, -- can occur at most once # any [1] AssertionValue, # final [2] AssertionValue } -- can occur at most once # } componentType = NamedTypes(NamedType('initial', Initial()), NamedType('any', Any()), NamedType('final', Final())) class Substrings(SequenceOf): # substrings SEQUENCE SIZE (1..MAX) OF substring CHOICE { # ... # } subtypeSpec = SequenceOf.subtypeSpec + size1ToMaxConstraint componentType = Substring() class SubstringFilter(Sequence): # SubstringFilter ::= SEQUENCE { # type AttributeDescription, # substrings SEQUENCE SIZE (1..MAX) OF substring CHOICE { # initial [0] AssertionValue, -- can occur at most once # any [1] AssertionValue, # final [2] AssertionValue } -- can occur at most once # } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 4)) componentType = NamedTypes(NamedType('type', AttributeDescription()), NamedType('substrings', Substrings())) class And(SetOf): # and [0] SET SIZE (1..MAX) OF filter Filter tagSet = SetOf.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 0)) subtypeSpec = SetOf.subtypeSpec + size1ToMaxConstraint class Or(SetOf): # or [1] SET SIZE (1..MAX) OF filter Filter tagSet = SetOf.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 1)) subtypeSpec = SetOf.subtypeSpec + size1ToMaxConstraint class Not(Choice): # not [2] Filter pass # defined after Filter definition to allow recursion class EqualityMatch(AttributeValueAssertion): # equalityMatch [3] AttributeValueAssertion tagSet = AttributeValueAssertion.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 3)) class GreaterOrEqual(AttributeValueAssertion): # greaterOrEqual [5] AttributeValueAssertion tagSet = AttributeValueAssertion.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 5)) class LessOrEqual(AttributeValueAssertion): # lessOrEqual [6] AttributeValueAssertion tagSet = AttributeValueAssertion.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 6)) class Present(AttributeDescription): # present [7] AttributeDescription tagSet = AttributeDescription.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 7)) class ApproxMatch(AttributeValueAssertion): # approxMatch [8] AttributeValueAssertion tagSet = AttributeValueAssertion.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 8)) class ExtensibleMatch(MatchingRuleAssertion): # extensibleMatch [9] MatchingRuleAssertion tagSet = MatchingRuleAssertion.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 9)) class Filter(Choice): # Filter ::= CHOICE { # and [0] SET SIZE (1..MAX) OF filter Filter, # or [1] SET SIZE (1..MAX) OF filter Filter, # not [2] Filter, # equalityMatch [3] AttributeValueAssertion, # substrings [4] SubstringFilter, # greaterOrEqual [5] AttributeValueAssertion, # lessOrEqual [6] AttributeValueAssertion, # present [7] AttributeDescription, # approxMatch [8] AttributeValueAssertion, # extensibleMatch [9] MatchingRuleAssertion, # ... } componentType = NamedTypes(NamedType('and', And()), NamedType('or', Or()), NamedType('notFilter', Not()), NamedType('equalityMatch', EqualityMatch()), NamedType('substringFilter', SubstringFilter()), NamedType('greaterOrEqual', GreaterOrEqual()), NamedType('lessOrEqual', LessOrEqual()), NamedType('present', Present()), NamedType('approxMatch', ApproxMatch()), NamedType('extensibleMatch', ExtensibleMatch())) And.componentType = Filter() Or.componentType = Filter() Not.componentType = NamedTypes(NamedType('innerNotFilter', Filter())) Not.tagSet = Filter.tagSet.tagExplicitly(Tag(tagClassContext, tagFormatConstructed, 2)) # as per RFC4511 page 23 class PartialAttributeList(SequenceOf): # PartialAttributeList ::= SEQUENCE OF # partialAttribute PartialAttribute componentType = PartialAttribute() class Operation(Enumerated): # operation ENUMERATED { # add (0), # delete (1), # replace (2), # ... } namedValues = NamedValues(('add', 0), ('delete', 1), ('replace', 2), ('increment', 3)) class Change(Sequence): # change SEQUENCE { # operation ENUMERATED { # add (0), # delete (1), # replace (2), # ... }, # modification PartialAttribute } } componentType = NamedTypes(NamedType('operation', Operation()), NamedType('modification', PartialAttribute())) class Changes(SequenceOf): # changes SEQUENCE OF change SEQUENCE componentType = Change() class DeleteOldRDN(Boolean): # deleteoldrdn BOOLEAN pass class NewSuperior(LDAPDN): # newSuperior [0] LDAPDN tagSet = LDAPDN.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0)) class RequestName(LDAPOID): # requestName [0] LDAPOID tagSet = LDAPOID.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0)) class RequestValue(OctetString): # requestValue [1] OCTET STRING tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 1)) encoding = 'utf-8' class ResponseName(LDAPOID): # responseName [10] LDAPOID tagSet = LDAPOID.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 10)) class ResponseValue(OctetString): # responseValue [11] OCTET STRING tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 11)) encoding = 'utf-8' class IntermediateResponseName(LDAPOID): # responseName [0] LDAPOID tagSet = LDAPOID.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0)) class IntermediateResponseValue(OctetString): # responseValue [1] OCTET STRING tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 1)) encoding = 'utf-8' # operations class BindRequest(Sequence): # BindRequest ::= [APPLICATION 0] SEQUENCE { # version INTEGER (1 .. 127), # name LDAPDN, # authentication AuthenticationChoice } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 0)) componentType = NamedTypes(NamedType('version', Version()), NamedType('name', LDAPDN()), NamedType('authentication', AuthenticationChoice())) class BindResponse(Sequence): # BindResponse ::= [APPLICATION 1] SEQUENCE { # COMPONENTS OF LDAPResult, # serverSaslCreds [7] OCTET STRING OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 1)) componentType = NamedTypes(NamedType('resultCode', ResultCode()), NamedType('matchedDN', LDAPDN()), NamedType('diagnosticMessage', LDAPString()), OptionalNamedType('referral', Referral()), OptionalNamedType('serverSaslCreds', ServerSaslCreds())) class UnbindRequest(Null): # UnbindRequest ::= [APPLICATION 2] NULL tagSet = Null.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatSimple, 2)) class SearchRequest(Sequence): # SearchRequest ::= [APPLICATION 3] SEQUENCE { # baseObject LDAPDN, # scope ENUMERATED { # baseObject (0), # singleLevel (1), # wholeSubtree (2), # ... }, # derefAliases ENUMERATED { # neverDerefAliases (0), # derefInSearching (1), # derefFindingBaseObj (2), # derefAlways (3) }, # sizeLimit INTEGER (0 .. maxInt), # timeLimit INTEGER (0 .. maxInt), # typesOnly BOOLEAN, # filter Filter, # attributes AttributeSelection } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 3)) componentType = NamedTypes(NamedType('baseObject', LDAPDN()), NamedType('scope', Scope()), NamedType('derefAliases', DerefAliases()), NamedType('sizeLimit', Integer0ToMax()), NamedType('timeLimit', Integer0ToMax()), NamedType('typesOnly', TypesOnly()), NamedType('filter', Filter()), NamedType('attributes', AttributeSelection())) class SearchResultReference(SequenceOf): # SearchResultReference ::= [APPLICATION 19] SEQUENCE # SIZE (1..MAX) OF uri URI tagSet = SequenceOf.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 19)) subtypeSpec = SequenceOf.subtypeSpec + size1ToMaxConstraint componentType = URI() class SearchResultEntry(Sequence): # SearchResultEntry ::= [APPLICATION 4] SEQUENCE { # objectName LDAPDN, # attributes PartialAttributeList } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 4)) componentType = NamedTypes(NamedType('object', LDAPDN()), NamedType('attributes', PartialAttributeList())) class SearchResultDone(LDAPResult): # SearchResultDone ::= [APPLICATION 5] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 5)) class ModifyRequest(Sequence): # ModifyRequest ::= [APPLICATION 6] SEQUENCE { # object LDAPDN, # changes SEQUENCE OF change SEQUENCE { # operation ENUMERATED { # add (0), # delete (1), # replace (2), # ... }, # modification PartialAttribute } } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 6)) componentType = NamedTypes(NamedType('object', LDAPDN()), NamedType('changes', Changes())) class ModifyResponse(LDAPResult): # ModifyResponse ::= [APPLICATION 7] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 7)) class AddRequest(Sequence): # AddRequest ::= [APPLICATION 8] SEQUENCE { # entry LDAPDN, # attributes AttributeList } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 8)) componentType = NamedTypes(NamedType('entry', LDAPDN()), NamedType('attributes', AttributeList())) class AddResponse(LDAPResult): # AddResponse ::= [APPLICATION 9] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 9)) class DelRequest(LDAPDN): # DelRequest ::= [APPLICATION 10] LDAPDN tagSet = LDAPDN.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatSimple, 10)) class DelResponse(LDAPResult): # DelResponse ::= [APPLICATION 11] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 11)) class ModifyDNRequest(Sequence): # ModifyDNRequest ::= [APPLICATION 12] SEQUENCE { # entry LDAPDN, # newrdn RelativeLDAPDN, # deleteoldrdn BOOLEAN, # newSuperior [0] LDAPDN OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 12)) componentType = NamedTypes(NamedType('entry', LDAPDN()), NamedType('newrdn', RelativeLDAPDN()), NamedType('deleteoldrdn', DeleteOldRDN()), OptionalNamedType('newSuperior', NewSuperior())) class ModifyDNResponse(LDAPResult): # ModifyDNResponse ::= [APPLICATION 13] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 13)) class CompareRequest(Sequence): # CompareRequest ::= [APPLICATION 14] SEQUENCE { # entry LDAPDN, # ava AttributeValueAssertion } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 14)) componentType = NamedTypes(NamedType('entry', LDAPDN()), NamedType('ava', AttributeValueAssertion())) class CompareResponse(LDAPResult): # CompareResponse ::= [APPLICATION 15] LDAPResult tagSet = LDAPResult.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 15)) class AbandonRequest(MessageID): # AbandonRequest ::= [APPLICATION 16] MessageID tagSet = MessageID.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatSimple, 16)) class ExtendedRequest(Sequence): # ExtendedRequest ::= [APPLICATION 23] SEQUENCE { # requestName [0] LDAPOID, # requestValue [1] OCTET STRING OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 23)) componentType = NamedTypes(NamedType('requestName', RequestName()), OptionalNamedType('requestValue', RequestValue())) class ExtendedResponse(Sequence): # ExtendedResponse ::= [APPLICATION 24] SEQUENCE { # COMPONENTS OF LDAPResult, # responseName [10] LDAPOID OPTIONAL, # responseValue [11] OCTET STRING OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 24)) componentType = NamedTypes(NamedType('resultCode', ResultCode()), NamedType('matchedDN', LDAPDN()), NamedType('diagnosticMessage', LDAPString()), OptionalNamedType('referral', Referral()), OptionalNamedType('responseName', ResponseName()), OptionalNamedType('responseValue', ResponseValue())) class IntermediateResponse(Sequence): # IntermediateResponse ::= [APPLICATION 25] SEQUENCE { # responseName [0] LDAPOID OPTIONAL, # responseValue [1] OCTET STRING OPTIONAL } tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 25)) componentType = NamedTypes(OptionalNamedType('responseName', IntermediateResponseName()), OptionalNamedType('responseValue', IntermediateResponseValue())) class ProtocolOp(Choice): # protocolOp CHOICE { # bindRequest BindRequest, # bindResponse BindResponse, # unbindRequest UnbindRequest, # searchRequest SearchRequest, # searchResEntry SearchResultEntry, # searchResDone SearchResultDone, # searchResRef SearchResultReference, # modifyRequest ModifyRequest, # modifyResponse ModifyResponse, # addRequest AddRequest, # addResponse AddResponse, # delRequest DelRequest, # delResponse DelResponse, # modDNRequest ModifyDNRequest, # modDNResponse ModifyDNResponse, # compareRequest CompareRequest, # compareResponse CompareResponse, # abandonRequest AbandonRequest, # extendedReq ExtendedRequest, # extendedResp ExtendedResponse, # ..., # intermediateResponse IntermediateResponse } componentType = NamedTypes(NamedType('bindRequest', BindRequest()), NamedType('bindResponse', BindResponse()), NamedType('unbindRequest', UnbindRequest()), NamedType('searchRequest', SearchRequest()), NamedType('searchResEntry', SearchResultEntry()), NamedType('searchResDone', SearchResultDone()), NamedType('searchResRef', SearchResultReference()), NamedType('modifyRequest', ModifyRequest()), NamedType('modifyResponse', ModifyResponse()), NamedType('addRequest', AddRequest()), NamedType('addResponse', AddResponse()), NamedType('delRequest', DelRequest()), NamedType('delResponse', DelResponse()), NamedType('modDNRequest', ModifyDNRequest()), NamedType('modDNResponse', ModifyDNResponse()), NamedType('compareRequest', CompareRequest()), NamedType('compareResponse', CompareResponse()), NamedType('abandonRequest', AbandonRequest()), NamedType('extendedReq', ExtendedRequest()), NamedType('extendedResp', ExtendedResponse()), NamedType('intermediateResponse', IntermediateResponse())) class LDAPMessage(Sequence): # LDAPMessage ::= SEQUENCE { # messageID MessageID, # protocolOp CHOICE { # bindRequest BindRequest, # bindResponse BindResponse, # unbindRequest UnbindRequest, # searchRequest SearchRequest, # searchResEntry SearchResultEntry, # searchResDone SearchResultDone, # searchResRef SearchResultReference, # modifyRequest ModifyRequest, # modifyResponse ModifyResponse, # addRequest AddRequest, # addResponse AddResponse, # delRequest DelRequest, # delResponse DelResponse, # modDNRequest ModifyDNRequest, # modDNResponse ModifyDNResponse, # compareRequest CompareRequest, # compareResponse CompareResponse, # abandonRequest AbandonRequest, # extendedReq ExtendedRequest, # extendedResp ExtendedResponse, # ..., # intermediateResponse IntermediateResponse }, # controls [0] Controls OPTIONAL } componentType = NamedTypes(NamedType('messageID', MessageID()), NamedType('protocolOp', ProtocolOp()), OptionalNamedType('controls', Controls()))
{'repo_name': 'tp4a/teleport', 'stars': '630', 'repo_language': 'Python', 'file_name': 'modules.xml', 'mime_type': 'text/xml', 'hash': -8591838389567759614, 'source_dataset': 'data'}
/* Ole Agesen, 1994. */ #include <ctype.h> #include <stdio.h> #include <string.h> #include <errno.h> #define FALSE 0 #define TRUE 1 #define MAX_LINE_LEN 10000 FILE *inFile, *outFile; int lineNo = 0; int stack[1000], stackTop = 0; void push(int e) { stack[stackTop] = e; stackTop++; } int pop() { if (0 == stackTop) { fprintf(stderr, "Empty stack, line %d.\n", lineNo); exit(-1); } stackTop--; return stack[stackTop]; } int isLoneBar(char line[], int i) { char ch; if (0 != i) { ch = line[i-1]; if (strchr("!@#$%^&*-+=~/?<>,;|`\\", ch)) /* Probably a binary operator. */ return FALSE; } ch = line[i+1]; if (ch) { if (strchr("!@#$%^&*-+=~/?<>,;|`\\", ch)) /* Probably a binary operator. */ return FALSE; } return TRUE; } void escapeError(char ch) { fprintf(stderr, "Should not escape %c, line %d\n", ch, lineNo); exit(-1); } int isBlank(char ch) { return ch == ' ' || ch == '\n' || ch == '\t'; } char outLine[MAX_LINE_LEN]; int outIdx = 0; void output1(char ch) { outLine[outIdx++] = ch; } void output(char *str) { while (*str) outLine[outIdx++] = *str++; } void back2() { return; /* Currently not used. */ if (0 != outIdx && outLine[outIdx-1] == ' ') outIdx--; if (0 != outIdx && outLine[outIdx-1] == ' ') outIdx--; } int checkFor(char *target, char *line, int i) { if (i != 0 && !isBlank(line[i-1])) return FALSE; if (strncmp(target, line + i, strlen(target))) return FALSE; i = i + strlen(target); if (line[i] && !isBlank(line[i]) && line[i] != '\\') return FALSE; return TRUE; } void printOutput() { outLine[outIdx] = '\0'; fputs(outLine, outFile); } void processFile(char *filename) { char inLine[MAX_LINE_LEN], bakFilename[1000]; int inSlotList = 0, inSingleQuotes = 0, inDoubleQuotes = 0; fprintf(stderr, "Converting privacy decl's in %s\n", filename); strcpy(bakFilename, filename); strcat(bakFilename, ".BAK"); if (rename(filename, bakFilename)) { fprintf(stderr, "Could not rename file %s to %s: ", filename, bakFilename); perror(NULL); exit(-1); } inFile = fopen(bakFilename, "r"); if (!inFile) { fprintf(stderr, "Could not open file %s for reading: ", bakFilename); perror(NULL); exit(-1); } outFile = fopen(filename, "w"); if (!outFile) { fprintf(stderr, "Could not open file %s for writing: ", filename); perror(NULL); exit(-1); } lineNo = 0; while(fgets(inLine, MAX_LINE_LEN, inFile)) { int len, esc = 0; /* NB: It is correct to start each line non-escaped! */ int i = 0; outIdx = 0; /* Start new output line. */ len = strlen(inLine); lineNo++; while (i < len) { int isPrinted = 0; switch (inLine[i]) { case '\'': if (!inDoubleQuotes && !esc) inSingleQuotes = !inSingleQuotes; break; case '"': if (!inSingleQuotes && !esc) inDoubleQuotes = !inDoubleQuotes; break; case '|': if (!inSingleQuotes && !inDoubleQuotes && isLoneBar(inLine,i)) { if (esc) escapeError(inLine[i]); inSlotList = !inSlotList; } break; case '[': case '(': if (!inSingleQuotes && !inDoubleQuotes) { if (esc) escapeError(inLine[i]); push(inSlotList); inSlotList = FALSE; } break; case ']': case ')': if (!inSingleQuotes && !inDoubleQuotes) { if (esc) escapeError(inLine[i]); inSlotList = pop(); } break; case '_': if (inSlotList && !inSingleQuotes && !inDoubleQuotes) { if (esc) escapeError(inLine[i]); if (checkFor("_", inLine, i)) { back2(); output("\"_\""); isPrinted = TRUE; } else if(checkFor("_^", inLine, i)) { back2(); output("\"_^\""); isPrinted = TRUE; i++; } } break; case '^': if (inSlotList && !inSingleQuotes && !inDoubleQuotes) { if (esc) escapeError(inLine[i]); if (checkFor("^", inLine, i)) { back2(); output("\"^\""); isPrinted = TRUE; } else if(checkFor("^_", inLine, i)) { back2(); output("\"^_\""); isPrinted = TRUE; i++; } } break; default: break; } if (inLine[i] == '\\') esc = !esc; else esc = 0; if (!isPrinted) output1(inLine[i]); i++; } printOutput(); } fclose(inFile); fclose(outFile); if (inSingleQuotes) fprintf(stderr, "Warning: finished inside single-quotes - check result!\n"); if (inDoubleQuotes) fprintf(stderr, "Warning: Finished inside double-quotes - check result!\n"); if (stackTop) fprintf(stderr, "Warning: Finished with non-empty stack (size %d) " "- check result!\n", stackTop); } int main(int argc, char *argv[]) { int i = 1; if (argc == 1 || (argc > 1 && !strcmp("-h", argv[1]))) { fprintf(stderr, "%s is a program that will convert Self privacy decl's " "to comments.\n", argv[0]); fprintf(stderr, "Usage: %s filename1 filename2 ...\n", argv[0]); fprintf(stderr, "The original files will be saved with extension .BAK\n"); exit(0); } while (i < argc) { processFile(argv[i++]); } }
{'repo_name': 'russellallen/self', 'stars': '509', 'repo_language': 'Self', 'file_name': 'Info.plist', 'mime_type': 'text/xml', 'hash': -5927022175562864285, 'source_dataset': 'data'}
<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" > <head> <title>Test of dojox.gfx3d.scheduler</title> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <style type="text/css"> @import "../../../dojo/resources/dojo.css"; @import "../../../dijit/tests/css/dijitTests.css"; </style> <script type="text/javascript" src="../../../dojo/dojo.js" data-dojo-config="isDebug: true"></script> <script type="text/javascript"> dojo.require("dojox.gfx3d"); var view = null; makeObjects = function(){ var surface = dojox.gfx.createSurface("test", 500, 500); view = surface.createViewport(); var tas = [ [{x: 100, y: 0, z: 0}, {x: 100, y: 100, z: 0}, {x: 50, y: 50, z: 50}], [{x: 100, y: 0, z: 0}, {x: 100, y: 100, z: 0}, {x: 0, y: 70, z: 50}] ]; var fills = ["#0cc", "#c0c"]; var m = dojox.gfx3d.matrix; for(var i = 0; i < tas.length; i++){ console.debug(fills[i]); view.createPolygon(tas[i]) .setStroke({color: "blue", width: 1}) .setFill(fills[i]); } var camera = dojox.gfx3d.matrix.normalize([m.cameraTranslate(0, -300, 0)]); view.applyCameraTransform(camera); view.render(); // set up the click handlers. dojo.connect(dojo.byId("bsp"), "onclick", renderWithBSP); dojo.connect(dojo.byId("zorder"), "onclick", renderWithZOrder); }; render = function(title, render){ dojo.byId("render").innerHTML = title; view.setScheduler(render); view.invalidate(); view.render(); }; renderWithBSP = function(){ render("BSP", dojox.gfx3d.scheduler.bsp); }; renderWithZOrder = function(){ render("ZOrder", dojox.gfx3d.scheduler.zOrder); }; dojo.addOnLoad(makeObjects); </script> </head> <body> <h1>Scheduler Test</h1> <p>There are two schedulers available in dojox.gfx3d, zOrder and BSP. zOrder is much simpler, and it performs quite well in most cases, it may fail in some rare cases, for example: two triangles share the same two vertice, and have the same Z value of the third vertex, in this case, they have the same z-order. They are rendered in arbitary order. In this case, BSP is the rescure.</p> <p>Current render: <strong id="render">default</strong></p> <p><button id="bsp">BSP</button>&nbsp;<button id="zorder">zOrder</button></p> <div id="test" style="width: 500px; height: 500px;"></div> <p>That's all Folks!</p> </body> </html>
{'repo_name': 'jrossi227/ApacheGUI', 'stars': '115', 'repo_language': 'HTML', 'file_name': 'context.xml', 'mime_type': 'text/xml', 'hash': 6202146020618097203, 'source_dataset': 'data'}
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <objc/NSObject.h> @class NSString, WAAppItemData; @interface WAPermissionLaunchCGIUserInfo : NSObject { _Bool _isFromBackground; unsigned int _enterScene; WAAppItemData *_appItem; NSString *_sessionId; } @property(copy, nonatomic) NSString *sessionId; // @synthesize sessionId=_sessionId; @property(nonatomic) _Bool isFromBackground; // @synthesize isFromBackground=_isFromBackground; @property(nonatomic) unsigned int enterScene; // @synthesize enterScene=_enterScene; @property(retain, nonatomic) WAAppItemData *appItem; // @synthesize appItem=_appItem; - (void).cxx_destruct; @end
{'repo_name': 'lefex/iWeChat', 'stars': '1481', 'repo_language': 'Objective-C', 'file_name': 'LaunchScreen.storyboard', 'mime_type': 'text/xml', 'hash': 7500371212696942537, 'source_dataset': 'data'}
namespace SampleLibrary.IOC { public class SampleRecursiveTestComponent1 { public SampleRecursiveTestComponent1(SampleRecursiveTestComponent2 other) { } } }
{'repo_name': 'philiplaureano/LinFu', 'stars': '200', 'repo_language': 'C#', 'file_name': 'Microsoft.Practices.ServiceLocation.XML', 'mime_type': 'text/xml', 'hash': 1772177589324462777, 'source_dataset': 'data'}
'use strict'; describe('OnsBackButtonElement', () => { it('exists', () => { expect(window.ons.elements.BackButton).to.be.ok; }); describe('class attribute', () => { it('should contain "back-button" class name automatically', () => { const element = new ons.elements.BackButton(); element.setAttribute('class', 'foobar'); expect(element.classList.contains('back-button')).to.be.ok; expect(element.classList.contains('foobar')).to.be.ok; }); }); it('provides \'modifier\' attribute', () => { const element = ons._util.createElement('<ons-back-button>label</ons-back-button>'); element.setAttribute('modifier', 'hoge'); expect(element.classList.contains('back-button--hoge')).to.be.true; element.setAttribute('modifier', 'foo bar'); expect(element.classList.contains('back-button--foo')).to.be.true; expect(element.classList.contains('back-button--bar')).to.be.true; expect(element.classList.contains('back-button--hoge')).not.to.be.true; element.classList.add('back-button--piyo'); element.setAttribute('modifier', 'fuga'); expect(element.classList.contains('back-button--piyo')).to.be.true; expect(element.classList.contains('back-button--fuga')).to.be.true; }); it('has two children', () => { const element = ons._util.createElement('<ons-back-button>label</ons-back-button>'); document.body.appendChild(element); expect(element.children[0]).to.be.ok; expect(element.children[1]).to.be.ok; expect(element.children[2]).not.to.be.ok; }); describe('#_onClick()', () => { let div, nav; beforeEach((done) => { div = ons._util.createElement(` <div> <template id="page1"> <ons-page id="p1">page1 content</ons-page> </template> <template id="page2"> <ons-page id="p2"> <ons-back-button>content</ons-back-button> </ons-page> </template> </div> `); nav = new ons.elements.Navigator(); nav._options = {cancelIfRunning: false}; document.body.appendChild(div); document.body.appendChild(nav); nav.pushPage('page1').then(function(e) { done(); }); }); afterEach(() => { div.remove(); nav.remove(); div = nav = null; }); it('will pop a page', () => { const promise = new Promise((resolve) => { nav.addEventListener('postpop', () => { resolve(); }); nav.pushPage('page2').then(function(page) { const element = nav.querySelector('ons-back-button'); nav.querySelector('ons-back-button')._onClick(); }); }); return expect(promise).to.eventually.be.fulfilled; }); }); describe('#onClick', () => { it ('overrides the default click handler', () => { const backButton = ons._util.createElement('<ons-back-button></ons-back-button>'); backButton.onClick = function () {}; const spy = chai.spy.on(backButton, 'onClick'); backButton._onClick(); expect(spy).to.have.been.called.once; }); }); describe('#_compile()', () => { it('does not compile twice', () => { const div1 = document.createElement('div'); const div2 = document.createElement('div'); div1.innerHTML = '<ons-back-button>Back</ons-back-button>'; div2.innerHTML = div1.innerHTML; expect(div1.isEqualNode(div2)).to.be.true; }); }); describe('autoStyling', () => { it('adds \'material\' modifiers and effects on Android', () => { ons.platform.select('android'); const e = ons._util.createElement('<ons-back-button>label</ons-back-button>'); expect(e.getAttribute('modifier')).to.equal('material'); ons.platform.select(''); }); }); });
{'repo_name': 'OnsenUI/OnsenUI', 'stars': '8027', 'repo_language': 'JavaScript', 'file_name': 'config.yml', 'mime_type': 'text/plain', 'hash': -8252888494924534314, 'source_dataset': 'data'}
// Copyright 2016 Attic Labs, Inc. All rights reserved. // Licensed under the Apache License, version 2.0: // http://www.apache.org/licenses/LICENSE-2.0 package chunks import ( "sync" "github.com/attic-labs/noms/go/hash" ) type ReadRequest interface { Hashes() hash.HashSet Outstanding() OutstandingRequest } func NewGetRequest(r hash.Hash, ch chan<- *Chunk) GetRequest { return GetRequest{hash.HashSet{r: struct{}{}}, ch} } type GetRequest struct { hashes hash.HashSet ch chan<- *Chunk } func NewGetManyRequest(hashes hash.HashSet, wg *sync.WaitGroup, ch chan<- *Chunk) GetManyRequest { return GetManyRequest{hashes, wg, ch} } type GetManyRequest struct { hashes hash.HashSet wg *sync.WaitGroup ch chan<- *Chunk } func NewAbsentRequest(r hash.Hash, ch chan<- bool) AbsentRequest { return AbsentRequest{hash.HashSet{r: struct{}{}}, ch} } type AbsentRequest struct { hashes hash.HashSet ch chan<- bool } func NewAbsentManyRequest(hashes hash.HashSet, wg *sync.WaitGroup, ch chan<- hash.Hash) AbsentManyRequest { return AbsentManyRequest{hashes, wg, ch} } type AbsentManyRequest struct { hashes hash.HashSet wg *sync.WaitGroup ch chan<- hash.Hash } func (g GetRequest) Hashes() hash.HashSet { return g.hashes } func (g GetRequest) Outstanding() OutstandingRequest { return OutstandingGet(g.ch) } func (g GetManyRequest) Hashes() hash.HashSet { return g.hashes } func (g GetManyRequest) Outstanding() OutstandingRequest { return OutstandingGetMany{g.wg, g.ch} } func (h AbsentRequest) Hashes() hash.HashSet { return h.hashes } func (h AbsentRequest) Outstanding() OutstandingRequest { return OutstandingAbsent(h.ch) } func (h AbsentManyRequest) Hashes() hash.HashSet { return h.hashes } func (h AbsentManyRequest) Outstanding() OutstandingRequest { return OutstandingAbsentMany{h.wg, h.ch} } type OutstandingRequest interface { Satisfy(h hash.Hash, c *Chunk) Fail() } type OutstandingGet chan<- *Chunk type OutstandingGetMany struct { wg *sync.WaitGroup ch chan<- *Chunk } type OutstandingAbsent chan<- bool type OutstandingAbsentMany struct { wg *sync.WaitGroup ch chan<- hash.Hash } func (r OutstandingGet) Satisfy(h hash.Hash, c *Chunk) { r <- c } func (r OutstandingGet) Fail() { r <- &EmptyChunk } func (ogm OutstandingGetMany) Satisfy(h hash.Hash, c *Chunk) { ogm.ch <- c ogm.wg.Done() } func (ogm OutstandingGetMany) Fail() { ogm.wg.Done() } func (oh OutstandingAbsent) Satisfy(h hash.Hash, c *Chunk) { oh <- false } func (oh OutstandingAbsent) Fail() { oh <- true } func (ohm OutstandingAbsentMany) Satisfy(h hash.Hash, c *Chunk) { ohm.ch <- h ohm.wg.Done() } func (ohm OutstandingAbsentMany) Fail() { ohm.wg.Done() } // ReadBatch represents a set of queued Get/Has requests, each of which are blocking on a receive channel for a response. type ReadBatch map[hash.Hash][]OutstandingRequest // Close ensures that callers to Get() and Has() are failed correctly if the corresponding chunk wasn't in the response from the server (i.e. it wasn't found). func (rb *ReadBatch) Close() error { for _, reqs := range *rb { for _, req := range reqs { req.Fail() } } return nil }
{'repo_name': 'attic-labs/noms', 'stars': '7231', 'repo_language': 'Go', 'file_name': 'types.js', 'mime_type': 'text/plain', 'hash': -7926342176993643864, 'source_dataset': 'data'}
/* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ function runSourceMapTests(modName, do_throw) { let mod = require(modName); let assert = require('test/source-map/assert'); let util = require('test/source-map/util'); assert.init(do_throw); for (let k in mod) { if (/^test/.test(k)) { mod[k](assert, util); } } } this.runSourceMapTests = runSourceMapTests;
{'repo_name': 'zhangmengxue/React-Code-Snippet', 'stars': '304', 'repo_language': 'None', 'file_name': 'prefix-source-map.jsm', 'mime_type': 'text/plain', 'hash': 1698414332812499323, 'source_dataset': 'data'}
/* * Copyright (c) 2012 The WebRTC@AnyRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_ #include "webrtc/modules/video_capture/video_capture_impl.h" namespace webrtc { namespace videocapturemodule { // VideoCapture implementation that uses the Media Foundation API on Windows. // This will replace the DirectShow based implementation on Vista and higher. // TODO(tommi): Finish implementing and switch out the DS in the factory method // for supported platforms. class VideoCaptureMF : public VideoCaptureImpl { public: explicit VideoCaptureMF(const int32_t id); int32_t Init(const int32_t id, const char* device_id); // Overrides from VideoCaptureImpl. virtual int32_t StartCapture(const VideoCaptureCapability& capability); virtual int32_t StopCapture(); virtual bool CaptureStarted(); virtual int32_t CaptureSettings( VideoCaptureCapability& settings); // NOLINT protected: virtual ~VideoCaptureMF(); }; } // namespace videocapturemodule } // namespace webrtc #endif // WEBRTC_MODULES_VIDEO_CAPTURE_WINDOWS_VIDEO_CAPTURE_MF_H_
{'repo_name': 'anyRTC/anyRTC-RTMP-OpenSource', 'stars': '3540', 'repo_language': 'C++', 'file_name': 'config.h', 'mime_type': 'text/plain', 'hash': -2498087356298717192, 'source_dataset': 'data'}