{"size":267,"ext":"java","lang":"Java","max_stars_count":null,"content":"package de.polocloud.api.event.service;\n\nimport de.polocloud.api.event.CloudEvent;\nimport lombok.AllArgsConstructor;\nimport lombok.Getter;\n\n@Getter\n@AllArgsConstructor\npublic final class CloudServiceRemoveEvent implements CloudEvent {\n\n private String service;\n\n}\n","avg_line_length":19.0714285714,"max_line_length":66,"alphanum_fraction":0.8202247191} {"size":34346,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Licensed to Elasticsearch under one or more contributor\n * license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright\n * ownership. Elasticsearch licenses this file to you under\n * the Apache License, Version 2.0 (the \"License\"); you may\n * not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\/\n\npackage org.elasticsearch.index.search;\n\nimport org.apache.lucene.analysis.Analyzer;\nimport org.apache.lucene.analysis.TokenStream;\nimport org.apache.lucene.analysis.tokenattributes.CharTermAttribute;\nimport org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;\nimport org.apache.lucene.index.Term;\nimport org.apache.lucene.queryparser.classic.ParseException;\nimport org.apache.lucene.queryparser.classic.Token;\nimport org.apache.lucene.queryparser.classic.XQueryParser;\nimport org.apache.lucene.search.BooleanClause;\nimport org.apache.lucene.search.BoostAttribute;\nimport org.apache.lucene.search.BoostQuery;\nimport org.apache.lucene.search.DisjunctionMaxQuery;\nimport org.apache.lucene.search.FuzzyQuery;\nimport org.apache.lucene.search.MatchNoDocsQuery;\nimport org.apache.lucene.search.MultiPhraseQuery;\nimport org.apache.lucene.search.MultiTermQuery;\nimport org.apache.lucene.search.PhraseQuery;\nimport org.apache.lucene.search.Query;\nimport org.apache.lucene.search.SynonymQuery;\nimport org.apache.lucene.search.WildcardQuery;\nimport org.apache.lucene.search.spans.SpanNearQuery;\nimport org.apache.lucene.search.spans.SpanOrQuery;\nimport org.apache.lucene.search.spans.SpanQuery;\nimport org.apache.lucene.util.BytesRef;\nimport org.apache.lucene.util.automaton.RegExp;\nimport org.elasticsearch.common.lucene.search.Queries;\nimport org.elasticsearch.common.regex.Regex;\nimport org.elasticsearch.common.unit.Fuzziness;\nimport org.elasticsearch.core.internal.io.IOUtils;\nimport org.elasticsearch.index.IndexSettings;\nimport org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;\nimport org.elasticsearch.index.mapper.FieldNamesFieldMapper;\nimport org.elasticsearch.index.mapper.MappedFieldType;\nimport org.elasticsearch.index.mapper.MapperService;\nimport org.elasticsearch.index.mapper.TextSearchInfo;\nimport org.elasticsearch.index.query.ExistsQueryBuilder;\nimport org.elasticsearch.index.query.MultiMatchQueryBuilder;\nimport org.elasticsearch.index.query.QueryShardContext;\nimport org.elasticsearch.index.query.support.QueryParsers;\n\nimport java.io.IOException;\nimport java.time.ZoneId;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\n\nimport static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;\nimport static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery;\nimport static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery;\nimport static org.elasticsearch.index.search.QueryParserHelper.checkForTooManyFields;\nimport static org.elasticsearch.index.search.QueryParserHelper.resolveMappingField;\nimport static org.elasticsearch.index.search.QueryParserHelper.resolveMappingFields;\n\n\/**\n * A {@link XQueryParser} that uses the {@link MapperService} in order to build smarter\n * queries based on the mapping information.\n * This class uses {@link MultiMatchQuery} to build the text query around operators and {@link XQueryParser}\n * to assemble the result logically.\n *\/\npublic class QueryStringQueryParser extends XQueryParser {\n private static final String EXISTS_FIELD = \"_exists_\";\n\n private final QueryShardContext context;\n private final Map fieldsAndWeights;\n private final boolean lenient;\n\n private final MultiMatchQuery queryBuilder;\n private MultiMatchQueryBuilder.Type type = MultiMatchQueryBuilder.Type.BEST_FIELDS;\n private Float groupTieBreaker;\n\n private Analyzer forceAnalyzer;\n private Analyzer forceQuoteAnalyzer;\n private String quoteFieldSuffix;\n private boolean analyzeWildcard;\n private ZoneId timeZone;\n private Fuzziness fuzziness = Fuzziness.AUTO;\n private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;\n private MultiTermQuery.RewriteMethod fuzzyRewriteMethod;\n private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;\n\n \/**\n * @param context The query shard context.\n * @param defaultField The default field for query terms.\n *\/\n public QueryStringQueryParser(QueryShardContext context, String defaultField) {\n this(context, defaultField, Collections.emptyMap(), false, context.getMapperService().searchAnalyzer());\n }\n\n \/**\n * @param context The query shard context.\n * @param defaultField The default field for query terms.\n * @param lenient If set to `true` will cause format based failures (like providing text to a numeric field) to be ignored.\n *\/\n public QueryStringQueryParser(QueryShardContext context, String defaultField, boolean lenient) {\n this(context, defaultField, Collections.emptyMap(), lenient, context.getMapperService().searchAnalyzer());\n }\n\n \/**\n * @param context The query shard context\n * @param fieldsAndWeights The default fields and weights expansion for query terms\n *\/\n public QueryStringQueryParser(QueryShardContext context, Map fieldsAndWeights) {\n this(context, null, fieldsAndWeights, false, context.getMapperService().searchAnalyzer());\n }\n\n \/**\n * @param context The query shard context.\n * @param fieldsAndWeights The default fields and weights expansion for query terms.\n * @param lenient If set to `true` will cause format based failures (like providing text to a numeric field) to be ignored.\n *\/\n public QueryStringQueryParser(QueryShardContext context, Map fieldsAndWeights, boolean lenient) {\n this(context, null, fieldsAndWeights, lenient, context.getMapperService().searchAnalyzer());\n }\n\n \/**\n * Defaults to all queryable fields extracted from the mapping for query terms\n * @param context The query shard context\n * @param lenient If set to `true` will cause format based failures (like providing text to a numeric field) to be ignored.\n *\/\n public QueryStringQueryParser(QueryShardContext context, boolean lenient) {\n this(context, \"*\",\n resolveMappingField(context, \"*\", 1.0f, false, false, null),\n lenient, context.getMapperService().searchAnalyzer());\n }\n\n private QueryStringQueryParser(QueryShardContext context, String defaultField,\n Map fieldsAndWeights,\n boolean lenient, Analyzer analyzer) {\n super(defaultField, analyzer);\n this.context = context;\n this.fieldsAndWeights = Collections.unmodifiableMap(fieldsAndWeights);\n this.queryBuilder = new MultiMatchQuery(context);\n queryBuilder.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NULL);\n queryBuilder.setLenient(lenient);\n this.lenient = lenient;\n }\n\n @Override\n public void setEnablePositionIncrements(boolean enable) {\n super.setEnablePositionIncrements(enable);\n queryBuilder.setEnablePositionIncrements(enable);\n }\n\n @Override\n public void setDefaultOperator(Operator op) {\n super.setDefaultOperator(op);\n queryBuilder.setOccur(op == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD);\n }\n\n @Override\n public void setPhraseSlop(int phraseSlop) {\n super.setPhraseSlop(phraseSlop);\n queryBuilder.setPhraseSlop(phraseSlop);\n }\n\n \/**\n * @param type Sets how multiple fields should be combined to build textual part queries.\n *\/\n public void setType(MultiMatchQueryBuilder.Type type) {\n this.type = type;\n }\n\n \/**\n * @param fuzziness Sets the default {@link Fuzziness} for fuzzy query.\n * Defaults to {@link Fuzziness#AUTO}.\n *\/\n public void setFuzziness(Fuzziness fuzziness) {\n this.fuzziness = fuzziness;\n }\n\n \/**\n * @param fuzzyRewriteMethod Sets the default rewrite method for fuzzy query.\n *\/\n public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {\n this.fuzzyRewriteMethod = fuzzyRewriteMethod;\n }\n\n \/**\n * @param fuzzyMaxExpansions Sets the maximum number of expansions allowed in a fuzzy query.\n * Defaults to {@link FuzzyQuery#defaultMaxExpansions}.\n *\/\n public void setFuzzyMaxExpansions(int fuzzyMaxExpansions) {\n this.fuzzyMaxExpansions = fuzzyMaxExpansions;\n }\n\n \/**\n * @param analyzer Force the provided analyzer to be used for all query analysis regardless of the field.\n *\/\n public void setForceAnalyzer(Analyzer analyzer) {\n this.forceAnalyzer = analyzer;\n }\n\n \/**\n * @param analyzer Force the provided analyzer to be used for all phrase query analysis regardless of the field.\n *\/\n public void setForceQuoteAnalyzer(Analyzer analyzer) {\n this.forceQuoteAnalyzer = analyzer;\n }\n\n \/**\n * @param quoteFieldSuffix The suffix to append to fields for quoted parts of the query string.\n *\/\n public void setQuoteFieldSuffix(String quoteFieldSuffix) {\n this.quoteFieldSuffix = quoteFieldSuffix;\n }\n\n \/**\n * @param analyzeWildcard If true, the wildcard operator analyzes the term to build a wildcard query.\n * Otherwise the query terms are only normalized.\n *\/\n public void setAnalyzeWildcard(boolean analyzeWildcard) {\n this.analyzeWildcard = analyzeWildcard;\n }\n\n \/**\n * @param timeZone Time Zone to be applied to any range query related to dates.\n *\/\n public void setTimeZone(ZoneId timeZone) {\n this.timeZone = timeZone;\n }\n\n \/**\n * @param groupTieBreaker The tie breaker to apply when multiple fields are used.\n *\/\n public void setGroupTieBreaker(float groupTieBreaker) {\n \/\/ Force the tie breaker in the query builder too\n queryBuilder.setTieBreaker(groupTieBreaker);\n this.groupTieBreaker = groupTieBreaker;\n }\n\n @Override\n public void setAutoGenerateMultiTermSynonymsPhraseQuery(boolean enable) {\n queryBuilder.setAutoGenerateSynonymsPhraseQuery(enable);\n }\n\n \/**\n * @param fuzzyTranspositions Sets whether transpositions are supported in fuzzy queries.\n * Defaults to {@link FuzzyQuery#defaultTranspositions}.\n *\/\n public void setFuzzyTranspositions(boolean fuzzyTranspositions) {\n this.fuzzyTranspositions = fuzzyTranspositions;\n }\n\n private Query applyBoost(Query q, Float boost) {\n if (boost != null && boost != 1f) {\n return new BoostQuery(q, boost);\n }\n return q;\n }\n\n private Map extractMultiFields(String field, boolean quoted) {\n Map extractedFields;\n if (field != null) {\n boolean allFields = Regex.isMatchAllPattern(field);\n if (allFields && this.field != null && this.field.equals(field)) {\n \/\/ \"*\" is the default field\n extractedFields = fieldsAndWeights;\n }\n boolean multiFields = Regex.isSimpleMatchPattern(field);\n \/\/ Filters unsupported fields if a pattern is requested\n \/\/ Filters metadata fields if all fields are requested\n extractedFields = resolveMappingField(context, field, 1.0f, !allFields, !multiFields, quoted ? quoteFieldSuffix : null);\n } else if (quoted && quoteFieldSuffix != null) {\n extractedFields = resolveMappingFields(context, fieldsAndWeights, quoteFieldSuffix);\n } else {\n extractedFields = fieldsAndWeights;\n }\n checkForTooManyFields(extractedFields.size(), this.context, field);\n return extractedFields;\n }\n\n @Override\n protected Query newMatchAllDocsQuery() {\n return Queries.newMatchAllQuery();\n }\n\n @Override\n public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {\n if (field != null && EXISTS_FIELD.equals(field)) {\n return existsQuery(queryText);\n }\n\n if (quoted) {\n return getFieldQuery(field, queryText, getPhraseSlop());\n }\n\n \/\/ Detects additional operators '<', '<=', '>', '>=' to handle range query with one side unbounded.\n \/\/ It is required to use a prefix field operator to enable the detection since they are not treated\n \/\/ as logical operator by the query parser (e.g. age:>=10).\n if (field != null) {\n if (queryText.length() > 1) {\n if (queryText.charAt(0) == '>') {\n if (queryText.length() > 2) {\n if (queryText.charAt(1) == '=') {\n return getRangeQuery(field, queryText.substring(2), null, true, true);\n }\n }\n return getRangeQuery(field, queryText.substring(1), null, false, true);\n } else if (queryText.charAt(0) == '<') {\n if (queryText.length() > 2) {\n if (queryText.charAt(1) == '=') {\n return getRangeQuery(field, null, queryText.substring(2), true, true);\n }\n }\n return getRangeQuery(field, null, queryText.substring(1), true, false);\n }\n \/\/ if we are querying a single date field, we also create a range query that leverages the time zone setting\n if (context.fieldMapper(field) instanceof DateFieldType && this.timeZone != null) {\n return getRangeQuery(field, queryText, queryText, true, true);\n }\n }\n }\n\n Map fields = extractMultiFields(field, quoted);\n if (fields.isEmpty()) {\n \/\/ the requested fields do not match any field in the mapping\n \/\/ happens for wildcard fields only since we cannot expand to a valid field name\n \/\/ if there is no match in the mappings.\n return newUnmappedFieldQuery(field);\n }\n Analyzer oldAnalyzer = queryBuilder.analyzer;\n try {\n if (forceAnalyzer != null) {\n queryBuilder.setAnalyzer(forceAnalyzer);\n }\n return queryBuilder.parse(type, fields, queryText, null);\n } catch (IOException e) {\n throw new ParseException(e.getMessage());\n } finally {\n queryBuilder.setAnalyzer(oldAnalyzer);\n }\n }\n\n @Override\n protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException {\n if (field != null && EXISTS_FIELD.equals(field)) {\n return existsQuery(queryText);\n }\n\n Map fields = extractMultiFields(field, true);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(field);\n }\n Analyzer oldAnalyzer = queryBuilder.analyzer;\n int oldSlop = queryBuilder.phraseSlop;\n try {\n if (forceQuoteAnalyzer != null) {\n queryBuilder.setAnalyzer(forceQuoteAnalyzer);\n } else if (forceAnalyzer != null) {\n queryBuilder.setAnalyzer(forceAnalyzer);\n }\n queryBuilder.setPhraseSlop(slop);\n Query query = queryBuilder.parse(MultiMatchQueryBuilder.Type.PHRASE, fields, queryText, null);\n if (query == null) {\n return null;\n }\n return applySlop(query, slop);\n } catch (IOException e) {\n throw new ParseException(e.getMessage());\n } finally {\n queryBuilder.setAnalyzer(oldAnalyzer);\n queryBuilder.setPhraseSlop(oldSlop);\n }\n }\n\n @Override\n protected Query getRangeQuery(String field, String part1, String part2,\n boolean startInclusive, boolean endInclusive) throws ParseException {\n if (\"*\".equals(part1)) {\n part1 = null;\n }\n if (\"*\".equals(part2)) {\n part2 = null;\n }\n\n Map fields = extractMultiFields(field, false);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(field);\n }\n\n List queries = new ArrayList<>();\n for (Map.Entry entry : fields.entrySet()) {\n Query q = getRangeQuerySingle(entry.getKey(), part1, part2, startInclusive, endInclusive, context);\n assert q != null;\n queries.add(applyBoost(q, entry.getValue()));\n }\n\n if (queries.size() == 1) {\n return queries.get(0);\n }\n float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;\n return new DisjunctionMaxQuery(queries, tiebreaker);\n }\n\n private Query getRangeQuerySingle(String field, String part1, String part2,\n boolean startInclusive, boolean endInclusive, QueryShardContext context) {\n MappedFieldType currentFieldType = context.fieldMapper(field);\n if (currentFieldType == null) {\n return newUnmappedFieldQuery(field);\n }\n try {\n Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer;\n BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1);\n BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2);\n Query rangeQuery = currentFieldType.rangeQuery(part1Binary, part2Binary,\n startInclusive, endInclusive, null, timeZone, null, context);\n return rangeQuery;\n } catch (RuntimeException e) {\n if (lenient) {\n return newLenientFieldQuery(field, e);\n }\n throw e;\n }\n }\n\n @Override\n protected Query handleBareFuzzy(String field, Token fuzzySlop, String termImage) throws ParseException {\n if (fuzzySlop.image.length() == 1) {\n return getFuzzyQuery(field, termImage, fuzziness.asDistance(termImage));\n }\n float distance = Fuzziness.build(fuzzySlop.image.substring(1)).asDistance(termImage);\n return getFuzzyQuery(field, termImage, distance);\n }\n\n @Override\n protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException {\n Map fields = extractMultiFields(field, false);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(field);\n }\n List queries = new ArrayList<>();\n for (Map.Entry entry : fields.entrySet()) {\n Query q = getFuzzyQuerySingle(entry.getKey(), termStr, minSimilarity);\n assert q != null;\n queries.add(applyBoost(q, entry.getValue()));\n }\n\n if (queries.size() == 1) {\n return queries.get(0);\n } else {\n float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;\n return new DisjunctionMaxQuery(queries, tiebreaker);\n }\n }\n\n private Query getFuzzyQuerySingle(String field, String termStr, float minSimilarity) throws ParseException {\n MappedFieldType currentFieldType = context.fieldMapper(field);\n if (currentFieldType == null) {\n return newUnmappedFieldQuery(field);\n }\n try {\n Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer;\n BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr);\n return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity),\n getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions, context);\n } catch (RuntimeException e) {\n if (lenient) {\n return newLenientFieldQuery(field, e);\n }\n throw e;\n }\n }\n\n @Override\n protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {\n int numEdits = Fuzziness.build(minimumSimilarity).asDistance(term.text());\n FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength,\n fuzzyMaxExpansions, fuzzyTranspositions);\n QueryParsers.setRewriteMethod(query, fuzzyRewriteMethod);\n return query;\n }\n\n @Override\n protected Query getPrefixQuery(String field, String termStr) throws ParseException {\n Map fields = extractMultiFields(field, false);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(termStr);\n }\n List queries = new ArrayList<>();\n for (Map.Entry entry : fields.entrySet()) {\n Query q = getPrefixQuerySingle(entry.getKey(), termStr);\n if (q != null) {\n queries.add(applyBoost(q, entry.getValue()));\n }\n }\n if (queries.isEmpty()) {\n return null;\n } else if (queries.size() == 1) {\n return queries.get(0);\n } else {\n float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;\n return new DisjunctionMaxQuery(queries, tiebreaker);\n }\n }\n\n private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {\n Analyzer oldAnalyzer = getAnalyzer();\n try {\n MappedFieldType currentFieldType = context.fieldMapper(field);\n if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) {\n return newUnmappedFieldQuery(field);\n }\n setAnalyzer(forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer);\n Query query = null;\n if (currentFieldType.getTextSearchInfo().isTokenized() == false) {\n query = currentFieldType.prefixQuery(termStr, getMultiTermRewriteMethod(), context);\n } else {\n query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr, currentFieldType);\n }\n return query;\n } catch (RuntimeException e) {\n if (lenient) {\n return newLenientFieldQuery(field, e);\n }\n throw e;\n } finally {\n setAnalyzer(oldAnalyzer);\n }\n }\n\n private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, MappedFieldType currentFieldType) throws ParseException {\n if (analyzeWildcard == false) {\n return currentFieldType.prefixQuery(getAnalyzer().normalize(field, termStr).utf8ToString(),\n getMultiTermRewriteMethod(), context);\n }\n List > tlist;\n \/\/ get Analyzer from superclass and tokenize the term\n TokenStream source = null;\n try {\n try {\n source = getAnalyzer().tokenStream(field, termStr);\n source.reset();\n } catch (IOException e) {\n return super.getPrefixQuery(field, termStr);\n }\n tlist = new ArrayList<>();\n List currentPos = new ArrayList<>();\n CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);\n PositionIncrementAttribute posAtt = source.addAttribute(PositionIncrementAttribute.class);\n\n while (true) {\n try {\n if (!source.incrementToken()) break;\n } catch (IOException e) {\n break;\n }\n if (currentPos.isEmpty() == false && posAtt.getPositionIncrement() > 0) {\n tlist.add(currentPos);\n currentPos = new ArrayList<>();\n }\n currentPos.add(termAtt.toString());\n }\n if (currentPos.isEmpty() == false) {\n tlist.add(currentPos);\n }\n } finally {\n if (source != null) {\n IOUtils.closeWhileHandlingException(source);\n }\n }\n\n if (tlist.size() == 0) {\n return null;\n }\n\n if (tlist.size() == 1 && tlist.get(0).size() == 1) {\n return currentFieldType.prefixQuery(tlist.get(0).get(0), getMultiTermRewriteMethod(), context);\n }\n\n \/\/ build a boolean query with prefix on the last position only.\n List clauses = new ArrayList<>();\n for (int pos = 0; pos < tlist.size(); pos++) {\n List plist = tlist.get(pos);\n boolean isLastPos = (pos == tlist.size() - 1);\n Query posQuery;\n if (plist.size() == 1) {\n if (isLastPos) {\n posQuery = currentFieldType.prefixQuery(plist.get(0), getMultiTermRewriteMethod(), context);\n } else {\n posQuery = newTermQuery(new Term(field, plist.get(0)), BoostAttribute.DEFAULT_BOOST);\n }\n } else if (isLastPos == false) {\n \/\/ build a synonym query for terms in the same position.\n Term[] terms = new Term[plist.size()];\n for (int i = 0; i < plist.size(); i++) {\n terms[i] = new Term(field, plist.get(i));\n }\n posQuery = new SynonymQuery(terms);\n } else {\n List innerClauses = new ArrayList<>();\n for (String token : plist) {\n innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token),\n BooleanClause.Occur.SHOULD));\n }\n posQuery = getBooleanQuery(innerClauses);\n }\n clauses.add(new BooleanClause(posQuery,\n getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));\n }\n return getBooleanQuery(clauses);\n }\n\n private Query existsQuery(String fieldName) {\n final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType =\n (FieldNamesFieldMapper.FieldNamesFieldType) context.getMapperService().fieldType(FieldNamesFieldMapper.NAME);\n if (fieldNamesFieldType == null) {\n return new MatchNoDocsQuery(\"No mappings yet\");\n }\n if (fieldNamesFieldType.isEnabled() == false) {\n \/\/ The field_names_field is disabled so we switch to a wildcard query that matches all terms\n return new WildcardQuery(new Term(fieldName, \"*\"));\n }\n\n return ExistsQueryBuilder.newFilter(context, fieldName, false);\n }\n\n @Override\n protected Query getWildcardQuery(String field, String termStr) throws ParseException {\n String actualField = field != null ? field : this.field;\n if (termStr.equals(\"*\") && actualField != null) {\n if (Regex.isMatchAllPattern(actualField)) {\n return newMatchAllDocsQuery();\n }\n \/\/ effectively, we check if a field exists or not\n return existsQuery(actualField);\n }\n\n Map fields = extractMultiFields(field, false);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(termStr);\n }\n List queries = new ArrayList<>();\n for (Map.Entry entry : fields.entrySet()) {\n Query q = getWildcardQuerySingle(entry.getKey(), termStr);\n assert q != null;\n queries.add(applyBoost(q, entry.getValue()));\n }\n if (queries.size() == 1) {\n return queries.get(0);\n } else {\n float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;\n return new DisjunctionMaxQuery(queries, tiebreaker);\n }\n }\n\n private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {\n if (\"*\".equals(termStr)) {\n \/\/ effectively, we check if a field exists or not\n return existsQuery(field);\n }\n Analyzer oldAnalyzer = getAnalyzer();\n try {\n MappedFieldType currentFieldType = queryBuilder.context.fieldMapper(field);\n if (currentFieldType == null) {\n return newUnmappedFieldQuery(field);\n } \n if (forceAnalyzer != null && \n (analyzeWildcard || currentFieldType.getTextSearchInfo().isTokenized())) {\n setAnalyzer(forceAnalyzer);\n return super.getWildcardQuery(currentFieldType.name(), termStr);\n }\n if (getAllowLeadingWildcard() == false && (termStr.startsWith(\"*\") || termStr.startsWith(\"?\"))) {\n throw new ParseException(\"'*' or '?' not allowed as first character in WildcardQuery\");\n }\n return currentFieldType.wildcardQuery(termStr, getMultiTermRewriteMethod(), context);\n } catch (RuntimeException e) {\n if (lenient) {\n return newLenientFieldQuery(field, e);\n }\n throw e;\n } finally {\n setAnalyzer(oldAnalyzer);\n }\n }\n\n @Override\n protected Query getRegexpQuery(String field, String termStr) throws ParseException {\n final int maxAllowedRegexLength = context.getIndexSettings().getMaxRegexLength();\n if (termStr.length() > maxAllowedRegexLength) {\n throw new IllegalArgumentException(\n \"The length of regex [\" + termStr.length() + \"] used in the [query_string] has exceeded \" +\n \"the allowed maximum of [\" + maxAllowedRegexLength + \"]. This maximum can be set by changing the [\" +\n IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + \"] index level setting.\");\n }\n Map fields = extractMultiFields(field, false);\n if (fields.isEmpty()) {\n return newUnmappedFieldQuery(termStr);\n }\n List queries = new ArrayList<>();\n for (Map.Entry entry : fields.entrySet()) {\n Query q = getRegexpQuerySingle(entry.getKey(), termStr);\n assert q != null;\n queries.add(applyBoost(q, entry.getValue()));\n }\n if (queries.size() == 1) {\n return queries.get(0);\n } else {\n float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;\n return new DisjunctionMaxQuery(queries, tiebreaker);\n }\n }\n\n private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {\n Analyzer oldAnalyzer = getAnalyzer();\n try {\n MappedFieldType currentFieldType = queryBuilder.context.fieldMapper(field);\n if (currentFieldType == null) {\n return newUnmappedFieldQuery(field);\n }\n if (forceAnalyzer != null) {\n setAnalyzer(forceAnalyzer);\n return super.getRegexpQuery(field, termStr);\n } \n return currentFieldType.regexpQuery(termStr, RegExp.ALL, getMaxDeterminizedStates(), \n getMultiTermRewriteMethod(), context);\n } catch (RuntimeException e) {\n if (lenient) {\n return newLenientFieldQuery(field, e);\n }\n throw e;\n } finally {\n setAnalyzer(oldAnalyzer);\n }\n }\n\n @Override\n protected Query getBooleanQuery(List clauses) throws ParseException {\n Query q = super.getBooleanQuery(clauses);\n if (q == null) {\n return null;\n }\n return fixNegativeQueryIfNeeded(q);\n }\n\n private Query applySlop(Query q, int slop) {\n if (q instanceof PhraseQuery) {\n \/\/make sure that the boost hasn't been set beforehand, otherwise we'd lose it\n assert q instanceof BoostQuery == false;\n return addSlopToPhrase((PhraseQuery) q, slop);\n } else if (q instanceof MultiPhraseQuery) {\n MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q);\n builder.setSlop(slop);\n return builder.build();\n } else if (q instanceof SpanQuery) {\n return addSlopToSpan((SpanQuery) q, slop);\n } else {\n return q;\n }\n }\n\n private Query addSlopToSpan(SpanQuery query, int slop) {\n if (query instanceof SpanNearQuery) {\n return new SpanNearQuery(((SpanNearQuery) query).getClauses(), slop,\n ((SpanNearQuery) query).isInOrder());\n } else if (query instanceof SpanOrQuery) {\n SpanQuery[] clauses = new SpanQuery[((SpanOrQuery) query).getClauses().length];\n int pos = 0;\n for (SpanQuery clause : ((SpanOrQuery) query).getClauses()) {\n clauses[pos++] = (SpanQuery) addSlopToSpan(clause, slop);\n }\n return new SpanOrQuery(clauses);\n } else {\n return query;\n }\n }\n\n \/**\n * Rebuild a phrase query with a slop value\n *\/\n private PhraseQuery addSlopToPhrase(PhraseQuery query, int slop) {\n PhraseQuery.Builder builder = new PhraseQuery.Builder();\n builder.setSlop(slop);\n final Term[] terms = query.getTerms();\n final int[] positions = query.getPositions();\n for (int i = 0; i < terms.length; ++i) {\n builder.add(terms[i], positions[i]);\n }\n\n return builder.build();\n }\n\n @Override\n public Query parse(String query) throws ParseException {\n if (query.trim().isEmpty()) {\n return Queries.newMatchNoDocsQuery(\"Matching no documents because no terms present\");\n }\n return super.parse(query);\n }\n}\n","avg_line_length":42.2980295567,"max_line_length":136,"alphanum_fraction":0.6342514412} {"size":1064,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*\nCopyright 2017 yangchong211\uff08github.com\/yangchong211\uff09\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\nhttp:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\npackage com.ycbjie.webviewlib.base;\n\nimport java.util.Map;\n\n\/**\n *
\n *     @author yangchong\n *     blog  : https:\/\/github.com\/yangchong211\n *     time  : 2019\/9\/10\n *     desc  : \u81ea\u5b9a\u4e49RequestInfo\u5b9e\u4f53\u7c7b\n *     revise:\n * <\/pre>\n *\/\npublic class RequestInfo {\n\n    public String url;\n\n    public Map headers;\n\n    public RequestInfo(String url, Map additionalHttpHeaders) {\n        this.url = url;\n        this.headers = additionalHttpHeaders;\n    }\n\n}\n","avg_line_length":25.9512195122,"max_line_length":79,"alphanum_fraction":0.7208646617}
{"size":1717,"ext":"java","lang":"Java","max_stars_count":98.0,"content":"\/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *   http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied.  See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\/\npackage org.apache.apex.malhar.lib.appdata;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ThreadUtils\n{\n  private ThreadUtils()\n  {\n  }\n\n  \/**\n   * Exception handler used for testing threads.\n   *\/\n  public static class ExceptionSaverExceptionHandler implements Thread.UncaughtExceptionHandler\n  {\n    private volatile Throwable caughtThrowable;\n\n    public ExceptionSaverExceptionHandler()\n    {\n    }\n\n    @Override\n    public void uncaughtException(Thread t, Throwable e)\n    {\n      caughtThrowable.printStackTrace();\n      caughtThrowable = e;\n    }\n\n    \/**\n     * Gets the {@link Throwable} caught by this exception handler;\n     * @return The {@link Throwable} caught by this exception handler.\n     *\/\n    public Throwable getCaughtThrowable()\n    {\n      return caughtThrowable;\n    }\n  }\n\n  private static final Logger LOG = LoggerFactory.getLogger(ThreadUtils.class);\n}\n","avg_line_length":28.6166666667,"max_line_length":95,"alphanum_fraction":0.7245195108}
{"size":6529,"ext":"java","lang":"Java","max_stars_count":2.0,"content":"\/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage org.apache.solr.client.solrj.routing;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.apache.solr.SolrTestCaseJ4;\nimport org.apache.solr.common.cloud.Replica;\nimport org.apache.solr.common.cloud.ZkStateReader;\nimport org.apache.solr.common.params.ModifiableSolrParams;\nimport org.apache.solr.common.params.ShardParams;\nimport org.junit.Test;\n\npublic class RequestReplicaListTransformerGeneratorTest extends SolrTestCaseJ4 {\n\n  @Test\n  public void testNodePreferenceRulesBase() {\n    RequestReplicaListTransformerGenerator generator = new RequestReplicaListTransformerGenerator();\n    ModifiableSolrParams params = new ModifiableSolrParams();\n    List replicas = getBasicReplicaList();\n\n    String rulesParam = ShardParams.SHARDS_PREFERENCE_REPLICA_BASE + \":stable:dividend:routingPreference\";\n\n    params.add(\"routingPreference\", \"0\");\n    params.add(ShardParams.SHARDS_PREFERENCE, rulesParam);\n\n    ReplicaListTransformer rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node1\", replicas.get(0).getNodeName());\n    assertEquals(\"node2\", replicas.get(1).getNodeName());\n    assertEquals(\"node3\", replicas.get(2).getNodeName());\n\n    params.set(\"routingPreference\", \"1\");\n    rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node2\", replicas.get(0).getNodeName());\n    assertEquals(\"node3\", replicas.get(1).getNodeName());\n    assertEquals(\"node1\", replicas.get(2).getNodeName());\n\n    params.set(\"routingPreference\", \"2\");\n    rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node3\", replicas.get(0).getNodeName());\n    assertEquals(\"node1\", replicas.get(1).getNodeName());\n    assertEquals(\"node2\", replicas.get(2).getNodeName());\n\n    params.set(\"routingPreference\", \"3\");\n    rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node1\", replicas.get(0).getNodeName());\n    assertEquals(\"node2\", replicas.get(1).getNodeName());\n    assertEquals(\"node3\", replicas.get(2).getNodeName());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  @Test\n  public void replicaTypeAndReplicaBase() {\n    RequestReplicaListTransformerGenerator generator = new RequestReplicaListTransformerGenerator();\n    ModifiableSolrParams params = new ModifiableSolrParams();\n    List replicas = getBasicReplicaList();\n\n    \/\/ Add a replica so that sorting by replicaType:TLOG can cause a tie\n    replicas.add(\n        new Replica(\n            \"node4\",\n            map(\n                ZkStateReader.BASE_URL_PROP, \"http:\/\/host2_2:8983\/solr\",\n                ZkStateReader.NODE_NAME_PROP, \"node4\",\n                ZkStateReader.CORE_NAME_PROP, \"collection1\",\n                ZkStateReader.REPLICA_TYPE, \"TLOG\"\n            ), \"c1\",\"s1\"\n        )\n    );\n\n    \/\/ Add a PULL replica so that there's a tie for \"last place\"\n    replicas.add(\n        new Replica(\n            \"node5\",\n            map(\n                ZkStateReader.BASE_URL_PROP, \"http:\/\/host2_2:8983\/solr\",\n                ZkStateReader.NODE_NAME_PROP, \"node5\",\n                ZkStateReader.CORE_NAME_PROP, \"collection1\",\n                ZkStateReader.REPLICA_TYPE, \"PULL\"\n            ), \"c1\",\"s1\"\n        )\n    );\n\n    \/\/ replicaType and replicaBase combined rule param\n    String rulesParam = ShardParams.SHARDS_PREFERENCE_REPLICA_TYPE + \":NRT,\" +\n        ShardParams.SHARDS_PREFERENCE_REPLICA_TYPE + \":TLOG,\" +\n        ShardParams.SHARDS_PREFERENCE_REPLICA_BASE + \":stable:dividend:routingPreference\";\n\n    params.add(\"routingPreference\", \"0\");\n    params.add(ShardParams.SHARDS_PREFERENCE, rulesParam);\n    ReplicaListTransformer rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node1\", replicas.get(0).getNodeName());\n    assertEquals(\"node2\", replicas.get(1).getNodeName());\n    assertEquals(\"node4\", replicas.get(2).getNodeName());\n    assertEquals(\"node3\", replicas.get(3).getNodeName());\n    assertEquals(\"node5\", replicas.get(4).getNodeName());\n\n    params.set(\"routingPreference\", \"1\");\n    rlt = generator.getReplicaListTransformer(params);\n    rlt.transform(replicas);\n    assertEquals(\"node1\", replicas.get(0).getNodeName());\n    assertEquals(\"node4\", replicas.get(1).getNodeName());\n    assertEquals(\"node2\", replicas.get(2).getNodeName());\n    assertEquals(\"node5\", replicas.get(3).getNodeName());\n    assertEquals(\"node3\", replicas.get(4).getNodeName());\n  }\n\n  @SuppressWarnings(\"unchecked\")\n  private static List getBasicReplicaList() {\n    List replicas = new ArrayList();\n    replicas.add(\n        new Replica(\n            \"node1\",\n            map(\n                ZkStateReader.BASE_URL_PROP, \"http:\/\/host1:8983\/solr\",\n                ZkStateReader.NODE_NAME_PROP, \"node1\",\n                ZkStateReader.CORE_NAME_PROP, \"collection1\",\n                ZkStateReader.REPLICA_TYPE, \"NRT\"\n            ),\"c1\",\"s1\"\n        )\n    );\n    replicas.add(\n        new Replica(\n            \"node2\",\n            map(\n                ZkStateReader.BASE_URL_PROP, \"http:\/\/host2:8983\/solr\",\n                ZkStateReader.NODE_NAME_PROP, \"node2\",\n                ZkStateReader.CORE_NAME_PROP, \"collection1\",\n                ZkStateReader.REPLICA_TYPE, \"TLOG\"\n            ),\"c1\",\"s1\"\n        )\n    );\n    replicas.add(\n        new Replica(\n            \"node3\",\n            map(\n                ZkStateReader.BASE_URL_PROP, \"http:\/\/host2_2:8983\/solr\",\n                ZkStateReader.NODE_NAME_PROP, \"node3\",\n                ZkStateReader.CORE_NAME_PROP, \"collection1\",\n                ZkStateReader.REPLICA_TYPE, \"PULL\"\n            ),\"c1\",\"s1\"\n        )\n    );\n    return replicas;\n  }\n}\n","avg_line_length":38.8630952381,"max_line_length":106,"alphanum_fraction":0.6739163731}
{"size":1632,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * \n * Support classes useful for encoding and processing X.509 certificates.\n *\/\npackage org.bouncycastle.asn1.x509;\n\n\npublic class AttributeCertificateInfo extends org.bouncycastle.asn1.ASN1Object {\n\n\tpublic static AttributeCertificateInfo getInstance(org.bouncycastle.asn1.ASN1TaggedObject obj, boolean explicit) {\n\t}\n\n\tpublic static AttributeCertificateInfo getInstance(Object obj) {\n\t}\n\n\tpublic org.bouncycastle.asn1.ASN1Integer getVersion() {\n\t}\n\n\tpublic Holder getHolder() {\n\t}\n\n\tpublic AttCertIssuer getIssuer() {\n\t}\n\n\tpublic AlgorithmIdentifier getSignature() {\n\t}\n\n\tpublic org.bouncycastle.asn1.ASN1Integer getSerialNumber() {\n\t}\n\n\tpublic AttCertValidityPeriod getAttrCertValidityPeriod() {\n\t}\n\n\tpublic org.bouncycastle.asn1.ASN1Sequence getAttributes() {\n\t}\n\n\tpublic org.bouncycastle.asn1.DERBitString getIssuerUniqueID() {\n\t}\n\n\tpublic Extensions getExtensions() {\n\t}\n\n\t\/**\n\t *  Produce an object suitable for an ASN1OutputStream.\n\t *  
\n\t *   AttributeCertificateInfo ::= SEQUENCE {\n\t *        version              AttCertVersion -- version is v2,\n\t *        holder               Holder,\n\t *        issuer               AttCertIssuer,\n\t *        signature            AlgorithmIdentifier,\n\t *        serialNumber         CertificateSerialNumber,\n\t *        attrCertValidityPeriod   AttCertValidityPeriod,\n\t *        attributes           SEQUENCE OF Attribute,\n\t *        issuerUniqueID       UniqueIdentifier OPTIONAL,\n\t *        extensions           Extensions OPTIONAL\n\t *   }\n\t * \n\t *   AttCertVersion ::= INTEGER { v2(1) }\n\t *  <\/pre>\n\t *\/\n\tpublic org.bouncycastle.asn1.ASN1Primitive toASN1Primitive() {\n\t}\n}\n","avg_line_length":25.5,"max_line_length":115,"alphanum_fraction":0.6789215686}
{"size":345,"ext":"java","lang":"Java","max_stars_count":null,"content":"import java.util.*;\r\npublic class Main {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t\/\/ TODO Auto-generated method stub\r\nScanner s=new Scanner(System.in);\r\nString num1=s.nextLine();\r\nString num2=s.nextLine();\r\nint A=Integer.parseInt(num1);\r\nint B=Integer.parseInt(num2);\r\nint result=A+B;\r\nSystem.out.println(\"SOMA = \"+result);\r\n\r\n\r\n\t}\r\n\r\n}\r\n","avg_line_length":19.1666666667,"max_line_length":42,"alphanum_fraction":0.6753623188}
{"size":6134,"ext":"java","lang":"Java","max_stars_count":35.0,"content":"\/*\n *  MIT License\n *\n *  Copyright (c) 2021 MASES s.r.l.\n *\n *  Permission is hereby granted, free of charge, to any person obtaining a copy\n *  of this software and associated documentation files (the \"Software\"), to deal\n *  in the Software without restriction, including without limitation the rights\n *  to use, copy, modify, merge, publish, distribute, sublicense, and\/or sell\n *  copies of the Software, and to permit persons to whom the Software is\n *  furnished to do so, subject to the following conditions:\n *\n *  The above copyright notice and this permission notice shall be included in all\n *  copies or substantial portions of the Software.\n *\n *  THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n *  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n *  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n *  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n *  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n *  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n *  SOFTWARE.\n *\/\n\n\/**************************************************************************************\n * \n *      This code was generated from a template using JCOReflector\n * \n *      Manual changes to this file may cause unexpected behavior in your application.\n *      Manual changes to this file will be overwritten if the code is regenerated.\n * <\/auto-generated>\n *************************************************************************************\/\n\npackage system.componentmodel;\n\nimport org.mases.jcobridge.*;\nimport org.mases.jcobridge.netreflection.*;\n\n\/\/ Import section\nimport system.collections.IList;\nimport system.collections.IListImplementation;\nimport system.collections.ICollection;\nimport system.collections.ICollectionImplementation;\nimport system.componentmodel.PropertyDescriptor;\nimport system.componentmodel.ListSortDirection;\nimport system.Array;\nimport system.componentmodel.ListChangedEventHandler;\n\n\n\/**\n * The base .NET class managing System.ComponentModel.IBindingList, System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089.\n * 

\n * \n * See: https:\/\/docs.microsoft.com\/en-us\/dotnet\/api\/System.ComponentModel.IBindingList<\/a>\n *\/\npublic interface IBindingList extends IJCOBridgeReflected, IList, ICollection, IEnumerable {\n \/**\n * Fully assembly qualified name: System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089\n *\/\n public static final String assemblyFullName = \"System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089\";\n \/**\n * Assembly name: System\n *\/\n public static final String assemblyShortName = \"System\";\n \/**\n * Qualified class name: System.ComponentModel.IBindingList\n *\/\n public static final String className = \"System.ComponentModel.IBindingList\";\n \/**\n * Try to cast the {@link IJCOBridgeReflected} instance into {@link IBindingList}, a cast assert is made to check if types are compatible.\n * @param from {@link IJCOBridgeReflected} instance to be casted\n * @return {@link IBindingList} instance\n * @throws java.lang.Throwable in case of error during cast operation\n *\/\n public static IBindingList ToIBindingList(IJCOBridgeReflected from) throws Throwable {\n JCOBridge bridge = JCOBridgeInstance.getInstance(\"System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089\");\n JCType classType = bridge.GetType(className + \", \" + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName));\n NetType.AssertCast(classType, from);\n return new IBindingListImplementation(from.getJCOInstance());\n }\n\n \/**\n * Returns the reflected Assembly name\n * \n * @return A {@link String} representing the Fullname of reflected Assembly\n *\/\n public String getJCOAssemblyName();\n\n \/**\n * Returns the reflected Class name\n * \n * @return A {@link String} representing the Fullname of reflected Class\n *\/\n public String getJCOClassName();\n\n \/**\n * Returns the reflected Class name used to build the object\n * \n * @return A {@link String} representing the name used to allocated the object\n * in CLR context\n *\/\n public String getJCOObjectName();\n\n \/**\n * Returns the instantiated class\n * \n * @return An {@link java.lang.Object} representing the instance of the instantiated Class\n *\/\n public java.lang.Object getJCOInstance();\n\n \/**\n * Returns the instantiated class Type\n * \n * @return A {@link JCType} representing the Type of the instantiated Class\n *\/\n public JCType getJCOType();\n\n \/\/ Methods section\n \n\n\n public int Find(PropertyDescriptor property, NetObject key) throws Throwable;\n\n\n public NetObject AddNew() throws Throwable;\n\n public void AddIndex(PropertyDescriptor property) throws Throwable;\n\n public void ApplySort(PropertyDescriptor property, ListSortDirection direction) throws Throwable;\n\n\n\n\n\n\n public void RemoveIndex(PropertyDescriptor property) throws Throwable;\n\n public void RemoveSort() throws Throwable;\n\n\n \n \/\/ Properties section\n \n public boolean getAllowEdit() throws Throwable;\n\n public boolean getAllowNew() throws Throwable;\n\n public boolean getAllowRemove() throws Throwable;\n\n public boolean getIsSorted() throws Throwable;\n\n public boolean getSupportsChangeNotification() throws Throwable;\n\n public boolean getSupportsSearching() throws Throwable;\n\n public boolean getSupportsSorting() throws Throwable;\n\n public ListSortDirection getSortDirection() throws Throwable;\n\n public PropertyDescriptor getSortProperty() throws Throwable;\n\n\n\n \/\/ Instance Events section\n \n public void addListChanged(ListChangedEventHandler handler) throws Throwable;\n\n public void removeListChanged(ListChangedEventHandler handler) throws Throwable;\n\n\n}","avg_line_length":35.8713450292,"max_line_length":193,"alphanum_fraction":0.7117704597} {"size":329,"ext":"java","lang":"Java","max_stars_count":null,"content":"package br.recomende.infra.exception.http;\n\nimport org.springframework.http.HttpStatus;\nimport org.springframework.web.bind.annotation.ResponseStatus;\n\n@ResponseStatus(HttpStatus.FORBIDDEN)\npublic class ForbiddenResourceException extends RuntimeException {\n\n\tprivate static final long serialVersionUID = 9171085188206763368L;\n\n}\n","avg_line_length":27.4166666667,"max_line_length":67,"alphanum_fraction":0.8510638298} {"size":751,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.dennismoviedb.moviedb;\n\nimport android.content.Context;\nimport android.support.test.InstrumentationRegistry;\nimport android.support.test.runner.AndroidJUnit4;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport static org.junit.Assert.*;\n\n\/**\n * Instrumented test, which will execute on an Android device.\n *\n * @see Testing documentation<\/a>\n *\/\n@RunWith(AndroidJUnit4.class)\npublic class ExampleInstrumentedTest {\n @Test\n public void useAppContext() throws Exception {\n \/\/ Context of the app under test.\n Context appContext = InstrumentationRegistry.getTargetContext();\n\n assertEquals(\"com.dennismoviedb.moviedb\", appContext.getPackageName());\n }\n}\n","avg_line_length":27.8148148148,"max_line_length":79,"alphanum_fraction":0.7496671105} {"size":14336,"ext":"java","lang":"Java","max_stars_count":2.0,"content":"package jp.co.cybird.app.android.lib.commons.file.json.util;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.WeakHashMap;\n\npublic final class BeanInfo {\n private static final Map, BeanInfo>> cache = new WeakHashMap();\n private ConstructorInfo ci;\n private Map methods;\n private Map props;\n private Map smethods;\n private Map sprops;\n private Class type;\n\n \/* JADX WARNING: Code restructure failed: missing block: B:14:0x0031, code lost:\n return r1;\n *\/\n public static BeanInfo get(Class cls) {\n BeanInfo info;\n BeanInfo info2 = null;\n synchronized (cache) {\n try {\n Map, BeanInfo> map = cache.get(cls.getClassLoader());\n if (map == null) {\n map = new LinkedHashMap, BeanInfo>(16, 0.75f, true) {\n protected boolean removeEldestEntry(Map.Entry, BeanInfo> entry) {\n return size() > 1024;\n }\n };\n cache.put(cls.getClassLoader(), map);\n info = null;\n } else {\n info = map.get(cls);\n }\n if (info == null) {\n try {\n info2 = new BeanInfo(cls);\n map.put(cls, info2);\n } catch (Throwable th) {\n th = th;\n BeanInfo beanInfo = info;\n throw th;\n }\n } else {\n info2 = info;\n }\n } catch (Throwable th2) {\n \/\/throw th2;\n }\n }\n return info2;\n }\n\n public static void clear() {\n synchronized (cache) {\n cache.clear();\n }\n }\n\n \/* JADX WARNING: Removed duplicated region for block: B:105:0x028c *\/\n \/* JADX WARNING: Removed duplicated region for block: B:111:0x02c2 *\/\n \/* JADX WARNING: Removed duplicated region for block: B:68:0x01b3 *\/\n \/* JADX WARNING: Removed duplicated region for block: B:76:0x01ec *\/\n private BeanInfo(Class cls) {\n MethodInfo mi;\n int type2;\n String name;\n PropertyInfo prop;\n this.type = cls;\n for (Constructor con : cls.getConstructors()) {\n if (!con.isSynthetic()) {\n if (this.ci == null) {\n this.ci = new ConstructorInfo(cls, (Collection>) null);\n }\n con.setAccessible(true);\n this.ci.constructors.add(con);\n }\n }\n for (Field f : cls.getFields()) {\n if (!f.isSynthetic()) {\n boolean isStatic = Modifier.isStatic(f.getModifiers());\n String name2 = f.getName();\n f.setAccessible(true);\n if (isStatic) {\n if (this.sprops == null) {\n this.sprops = Collections.synchronizedMap(new LinkedHashMap());\n }\n this.sprops.put(name2, new PropertyInfo(cls, name2, f, (Method) null, (Method) null, isStatic, -1));\n } else {\n if (this.props == null) {\n this.props = Collections.synchronizedMap(new LinkedHashMap());\n }\n this.props.put(name2, new PropertyInfo(cls, name2, f, (Method) null, (Method) null, isStatic, -1));\n }\n }\n }\n for (Method m : cls.getMethods()) {\n if (!m.isSynthetic() && !m.isBridge()) {\n String name3 = m.getName();\n Class[] paramTypes = m.getParameterTypes();\n Class returnType = m.getReturnType();\n boolean isStatic2 = Modifier.isStatic(m.getModifiers());\n if (isStatic2) {\n if (this.smethods == null) {\n this.smethods = Collections.synchronizedMap(new LinkedHashMap());\n }\n mi = this.smethods.get(name3);\n if (mi == null) {\n mi = new MethodInfo(cls, name3, (Collection) null, isStatic2);\n this.smethods.put(name3, mi);\n }\n } else {\n if (this.methods == null) {\n this.methods = Collections.synchronizedMap(new LinkedHashMap());\n }\n mi = this.methods.get(name3);\n if (mi == null) {\n mi = new MethodInfo(cls, name3, (Collection) null, isStatic2);\n this.methods.put(name3, mi);\n }\n }\n m.setAccessible(true);\n mi.methods.add(m);\n if (name3.startsWith(\"get\") && name3.length() > 3 && !Character.isLowerCase(name3.charAt(3)) && paramTypes.length == 0) {\n if (!returnType.equals(Void.TYPE)) {\n type2 = 1;\n name = name3.substring(3);\n if (name.length() < 2 || !Character.isUpperCase(name.charAt(1))) {\n char[] chars = name.toCharArray();\n chars[0] = Character.toLowerCase(chars[0]);\n name = String.valueOf(chars);\n }\n if (isStatic2) {\n if (this.sprops == null) {\n this.sprops = Collections.synchronizedMap(new LinkedHashMap());\n }\n prop = this.sprops.get(name);\n if (prop == null) {\n prop = new PropertyInfo(cls, name, (Field) null, (Method) null, (Method) null, isStatic2, -1);\n this.sprops.put(name, prop);\n }\n } else {\n if (this.props == null) {\n this.props = Collections.synchronizedMap(new LinkedHashMap());\n }\n prop = this.props.get(name);\n if (prop == null) {\n prop = new PropertyInfo(cls, name, (Field) null, (Method) null, (Method) null, isStatic2, -1);\n this.props.put(name, prop);\n }\n }\n if (type2 == 1) {\n prop.readMethod = m;\n } else {\n prop.writeMethod = m;\n }\n }\n }\n if (name3.startsWith(\"is\") && name3.length() > 2 && !Character.isLowerCase(name3.charAt(2)) && paramTypes.length == 0) {\n if (!returnType.equals(Void.TYPE)) {\n type2 = 1;\n name = name3.substring(2);\n char[] chars2 = name.toCharArray();\n chars2[0] = Character.toLowerCase(chars2[0]);\n name = String.valueOf(chars2);\n if (isStatic2) {\n }\n if (type2 == 1) {\n }\n }\n }\n if (name3.startsWith(\"set\") && name3.length() > 3 && !Character.isLowerCase(name3.charAt(3)) && paramTypes.length == 1 && !paramTypes[0].equals(Void.TYPE)) {\n type2 = 2;\n name = name3.substring(3);\n char[] chars22 = name.toCharArray();\n chars22[0] = Character.toLowerCase(chars22[0]);\n name = String.valueOf(chars22);\n if (isStatic2) {\n }\n if (type2 == 1) {\n }\n }\n }\n }\n if (this.sprops == null) {\n this.sprops = Collections.emptyMap();\n }\n if (this.smethods == null) {\n this.smethods = Collections.emptyMap();\n }\n if (this.props == null) {\n this.props = Collections.emptyMap();\n }\n if (this.methods == null) {\n this.methods = Collections.emptyMap();\n }\n }\n\n public Object newInstance() {\n try {\n Constructor target = this.type.getConstructor(new Class[0]);\n target.setAccessible(true);\n return target.newInstance(new Object[0]);\n } catch (Exception e) {\n throw new IllegalStateException(e);\n }\n }\n\n public Class getType() {\n return this.type;\n }\n\n public ConstructorInfo getConstructor() {\n return this.ci;\n }\n\n public PropertyInfo getStaticProperty(String name) {\n return this.sprops.get(name);\n }\n\n public MethodInfo getStaticMethod(String name) {\n return this.smethods.get(name);\n }\n\n public Collection getStaticProperties() {\n return this.sprops.values();\n }\n\n public Collection getStaticMethods() {\n return this.smethods.values();\n }\n\n public PropertyInfo getProperty(String name) {\n return this.props.get(name);\n }\n\n public MethodInfo getMethod(String name) {\n return this.methods.get(name);\n }\n\n public Collection getProperties() {\n return this.props.values();\n }\n\n public Collection getMethods() {\n return this.methods.values();\n }\n\n public int hashCode() {\n if (this.type == null) {\n return 0;\n }\n return this.type.hashCode();\n }\n\n public boolean equals(Object obj) {\n if (this == obj) {\n return true;\n }\n if (obj == null) {\n return false;\n }\n if (getClass() != obj.getClass()) {\n return false;\n }\n BeanInfo other = (BeanInfo) obj;\n if (this.type == null) {\n if (other.type != null) {\n return false;\n }\n return true;\n } else if (!this.type.equals(other.type)) {\n return false;\n } else {\n return true;\n }\n }\n\n public String toString() {\n return \"BeanInfo [static properties = \" + this.sprops + \", static methods = \" + this.smethods + \", properties = \" + this.props + \", methods = \" + this.methods + \"]\";\n }\n\n static int calcurateDistance(Class[] params, Object[] args) {\n int point = 0;\n for (int i = 0; i < args.length; i++) {\n if (args[i] == null) {\n if (!params[i].isPrimitive()) {\n point += 5;\n }\n } else if (params[i].equals(args[i].getClass())) {\n point += 10;\n } else if (params[i].isAssignableFrom(args[i].getClass())) {\n point += 8;\n } else if (Boolean.TYPE.equals(args[i].getClass()) || Boolean.class.equals(args[i].getClass())) {\n if (Boolean.TYPE.equals(params[i]) || Boolean.class.equals(params[i].getClass())) {\n point += 10;\n }\n } else if (Byte.TYPE.equals(args[i].getClass()) || Byte.class.equals(args[i].getClass())) {\n if (Byte.TYPE.equals(params[i]) || Short.TYPE.equals(params[i]) || Character.TYPE.equals(params[i]) || Integer.TYPE.equals(params[i]) || Long.TYPE.equals(params[i]) || Float.TYPE.equals(params[i]) || Double.TYPE.equals(params[i]) || Byte.class.equals(params[i]) || Short.class.equals(params[i]) || Character.class.equals(params[i]) || Integer.class.equals(params[i]) || Long.class.equals(params[i]) || Float.class.equals(params[i]) || Double.class.equals(params[i])) {\n point += 10;\n }\n } else if (Short.TYPE.equals(args[i].getClass()) || Short.class.equals(args[i].getClass()) || Character.TYPE.equals(args[i].getClass()) || Character.class.equals(args[i].getClass())) {\n if (Short.TYPE.equals(params[i]) || Character.TYPE.equals(params[i]) || Integer.TYPE.equals(params[i]) || Long.TYPE.equals(params[i]) || Float.TYPE.equals(params[i]) || Double.TYPE.equals(params[i]) || Short.class.equals(params[i]) || Character.class.equals(params[i]) || Integer.class.equals(params[i]) || Long.class.equals(params[i]) || Float.class.equals(params[i]) || Double.class.equals(params[i])) {\n point += 10;\n }\n } else if (Integer.TYPE.equals(args[i].getClass()) || Integer.class.equals(args[i].getClass())) {\n if (Integer.TYPE.equals(params[i]) || Long.TYPE.equals(params[i]) || Float.TYPE.equals(params[i]) || Double.TYPE.equals(params[i]) || Integer.class.equals(params[i]) || Long.class.equals(params[i]) || Float.class.equals(params[i]) || Double.class.equals(params[i])) {\n point += 10;\n }\n } else if (Long.TYPE.equals(args[i].getClass()) || Long.class.equals(args[i].getClass())) {\n if (Long.TYPE.equals(params[i]) || Float.TYPE.equals(params[i]) || Double.TYPE.equals(params[i]) || Long.class.equals(params[i]) || Float.class.equals(params[i]) || Double.class.equals(params[i])) {\n point += 10;\n }\n } else if (Float.TYPE.equals(args[i].getClass()) || Float.class.equals(args[i].getClass())) {\n if (Float.TYPE.equals(params[i]) || Double.TYPE.equals(params[i]) || Float.class.equals(params[i]) || Double.class.equals(params[i])) {\n point += 10;\n }\n } else if ((Double.TYPE.equals(args[i].getClass()) || Double.class.equals(args[i].getClass())) && (Double.TYPE.equals(params[i]) || Double.class.equals(params[i]))) {\n point += 10;\n }\n }\n return point;\n }\n}\n","avg_line_length":43.1807228916,"max_line_length":484,"alphanum_fraction":0.483468192} {"size":3553,"ext":"java","lang":"Java","max_stars_count":null,"content":"package tk.jingzing.web.rest;\n\nimport tk.jingzing.service.AuditEventService;\n\nimport java.time.LocalDate;\nimport tk.jingzing.web.rest.util.PaginationUtil;\nimport org.springframework.boot.actuate.audit.AuditEvent;\nimport org.springframework.data.domain.Page;\nimport org.springframework.data.domain.Pageable;\nimport org.springframework.format.annotation.DateTimeFormat;\nimport org.springframework.http.MediaType;\nimport org.springframework.web.bind.annotation.*;\nimport org.springframework.http.HttpStatus;\nimport org.springframework.http.HttpHeaders;\nimport org.springframework.http.ResponseEntity;\n\nimport java.net.URISyntaxException;\nimport javax.inject.Inject;\nimport java.util.List;\n\n\/**\n * REST controller for getting the audit events.\n *\/\n@RestController\n@RequestMapping(value = \"\/management\/jhipster\/audits\", produces = MediaType.APPLICATION_JSON_VALUE)\npublic class AuditResource {\n\n private AuditEventService auditEventService;\n\n @Inject\n public AuditResource(AuditEventService auditEventService) {\n this.auditEventService = auditEventService;\n }\n\n \/**\n * GET \/audits : get a page of AuditEvents.\n *\n * @param pageable the pagination information\n * @return the ResponseEntity with status 200 (OK) and the list of AuditEvents in body\n * @throws URISyntaxException if there is an error to generate the pagination HTTP headers\n *\/\n @RequestMapping(method = RequestMethod.GET)\n public ResponseEntity> getAll(Pageable pageable) throws URISyntaxException {\n Page page = auditEventService.findAll(pageable);\n HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, \"\/api\/audits\");\n return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);\n }\n\n \/**\n * GET \/audits : get a page of AuditEvents between the fromDate and toDate.\n *\n * @param fromDate the start of the time period of AuditEvents to get\n * @param toDate the end of the time period of AuditEvents to get\n * @param pageable the pagination information\n * @return the ResponseEntity with status 200 (OK) and the list of AuditEvents in body\n * @throws URISyntaxException if there is an error to generate the pagination HTTP headers\n *\/\n\n @RequestMapping(method = RequestMethod.GET,\n params = {\"fromDate\", \"toDate\"})\n public ResponseEntity> getByDates(\n @RequestParam(value = \"fromDate\") @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate fromDate,\n @RequestParam(value = \"toDate\") @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate toDate,\n Pageable pageable) throws URISyntaxException {\n\n Page page = auditEventService.findByDates(fromDate.atTime(0, 0), toDate.atTime(23, 59), pageable);\n HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, \"\/api\/audits\");\n return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);\n }\n\n \/**\n * GET \/audits\/:id : get an AuditEvent by id.\n *\n * @param id the id of the entity to get\n * @return the ResponseEntity with status 200 (OK) and the AuditEvent in body, or status 404 (Not Found)\n *\/\n @RequestMapping(value = \"\/{id:.+}\",\n method = RequestMethod.GET)\n public ResponseEntity get(@PathVariable Long id) {\n return auditEventService.find(id)\n .map((entity) -> new ResponseEntity<>(entity, HttpStatus.OK))\n .orElse(new ResponseEntity<>(HttpStatus.NOT_FOUND));\n }\n}\n","avg_line_length":41.8,"max_line_length":118,"alphanum_fraction":0.7255840135} {"size":47949,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/\/ Generated by the protocol buffer compiler. DO NOT EDIT!\n\/\/ source: google\/spanner\/admin\/instance\/v1\/spanner_instance_admin.proto\n\npackage com.google.spanner.admin.instance.v1;\n\n\/**\n *\n *\n *

\n * Metadata type for the operation returned by\n * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance].\n * <\/pre>\n *\n * Protobuf type {@code google.spanner.admin.instance.v1.CreateInstanceMetadata}\n *\/\npublic final class CreateInstanceMetadata extends com.google.protobuf.GeneratedMessageV3\n    implements\n    \/\/ @@protoc_insertion_point(message_implements:google.spanner.admin.instance.v1.CreateInstanceMetadata)\n    CreateInstanceMetadataOrBuilder {\n  private static final long serialVersionUID = 0L;\n  \/\/ Use CreateInstanceMetadata.newBuilder() to construct.\n  private CreateInstanceMetadata(com.google.protobuf.GeneratedMessageV3.Builder builder) {\n    super(builder);\n  }\n\n  private CreateInstanceMetadata() {}\n\n  @java.lang.Override\n  public final com.google.protobuf.UnknownFieldSet getUnknownFields() {\n    return this.unknownFields;\n  }\n\n  private CreateInstanceMetadata(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    this();\n    if (extensionRegistry == null) {\n      throw new java.lang.NullPointerException();\n    }\n    int mutable_bitField0_ = 0;\n    com.google.protobuf.UnknownFieldSet.Builder unknownFields =\n        com.google.protobuf.UnknownFieldSet.newBuilder();\n    try {\n      boolean done = false;\n      while (!done) {\n        int tag = input.readTag();\n        switch (tag) {\n          case 0:\n            done = true;\n            break;\n          case 10:\n            {\n              com.google.spanner.admin.instance.v1.Instance.Builder subBuilder = null;\n              if (instance_ != null) {\n                subBuilder = instance_.toBuilder();\n              }\n              instance_ =\n                  input.readMessage(\n                      com.google.spanner.admin.instance.v1.Instance.parser(), extensionRegistry);\n              if (subBuilder != null) {\n                subBuilder.mergeFrom(instance_);\n                instance_ = subBuilder.buildPartial();\n              }\n\n              break;\n            }\n          case 18:\n            {\n              com.google.protobuf.Timestamp.Builder subBuilder = null;\n              if (startTime_ != null) {\n                subBuilder = startTime_.toBuilder();\n              }\n              startTime_ =\n                  input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);\n              if (subBuilder != null) {\n                subBuilder.mergeFrom(startTime_);\n                startTime_ = subBuilder.buildPartial();\n              }\n\n              break;\n            }\n          case 26:\n            {\n              com.google.protobuf.Timestamp.Builder subBuilder = null;\n              if (cancelTime_ != null) {\n                subBuilder = cancelTime_.toBuilder();\n              }\n              cancelTime_ =\n                  input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);\n              if (subBuilder != null) {\n                subBuilder.mergeFrom(cancelTime_);\n                cancelTime_ = subBuilder.buildPartial();\n              }\n\n              break;\n            }\n          case 34:\n            {\n              com.google.protobuf.Timestamp.Builder subBuilder = null;\n              if (endTime_ != null) {\n                subBuilder = endTime_.toBuilder();\n              }\n              endTime_ =\n                  input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);\n              if (subBuilder != null) {\n                subBuilder.mergeFrom(endTime_);\n                endTime_ = subBuilder.buildPartial();\n              }\n\n              break;\n            }\n          default:\n            {\n              if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {\n                done = true;\n              }\n              break;\n            }\n        }\n      }\n    } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n      throw e.setUnfinishedMessage(this);\n    } catch (java.io.IOException e) {\n      throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);\n    } finally {\n      this.unknownFields = unknownFields.build();\n      makeExtensionsImmutable();\n    }\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {\n    return com.google.spanner.admin.instance.v1.SpannerInstanceAdminProto\n        .internal_static_google_spanner_admin_instance_v1_CreateInstanceMetadata_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.google.spanner.admin.instance.v1.SpannerInstanceAdminProto\n        .internal_static_google_spanner_admin_instance_v1_CreateInstanceMetadata_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.google.spanner.admin.instance.v1.CreateInstanceMetadata.class,\n            com.google.spanner.admin.instance.v1.CreateInstanceMetadata.Builder.class);\n  }\n\n  public static final int INSTANCE_FIELD_NUMBER = 1;\n  private com.google.spanner.admin.instance.v1.Instance instance_;\n  \/**\n   *\n   *\n   * 
\n   * The instance being created.\n   * <\/pre>\n   *\n   * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n   *\/\n  public boolean hasInstance() {\n    return instance_ != null;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The instance being created.\n   * <\/pre>\n   *\n   * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n   *\/\n  public com.google.spanner.admin.instance.v1.Instance getInstance() {\n    return instance_ == null\n        ? com.google.spanner.admin.instance.v1.Instance.getDefaultInstance()\n        : instance_;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The instance being created.\n   * <\/pre>\n   *\n   * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n   *\/\n  public com.google.spanner.admin.instance.v1.InstanceOrBuilder getInstanceOrBuilder() {\n    return getInstance();\n  }\n\n  public static final int START_TIME_FIELD_NUMBER = 2;\n  private com.google.protobuf.Timestamp startTime_;\n  \/**\n   *\n   *\n   * 
\n   * The time at which the\n   * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n   * received.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp start_time = 2;<\/code>\n   *\/\n  public boolean hasStartTime() {\n    return startTime_ != null;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which the\n   * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n   * received.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp start_time = 2;<\/code>\n   *\/\n  public com.google.protobuf.Timestamp getStartTime() {\n    return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which the\n   * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n   * received.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp start_time = 2;<\/code>\n   *\/\n  public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {\n    return getStartTime();\n  }\n\n  public static final int CANCEL_TIME_FIELD_NUMBER = 3;\n  private com.google.protobuf.Timestamp cancelTime_;\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation was cancelled. If set, this operation is\n   * in the process of undoing itself (which is guaranteed to succeed) and\n   * cannot be cancelled again.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n   *\/\n  public boolean hasCancelTime() {\n    return cancelTime_ != null;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation was cancelled. If set, this operation is\n   * in the process of undoing itself (which is guaranteed to succeed) and\n   * cannot be cancelled again.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n   *\/\n  public com.google.protobuf.Timestamp getCancelTime() {\n    return cancelTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : cancelTime_;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation was cancelled. If set, this operation is\n   * in the process of undoing itself (which is guaranteed to succeed) and\n   * cannot be cancelled again.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n   *\/\n  public com.google.protobuf.TimestampOrBuilder getCancelTimeOrBuilder() {\n    return getCancelTime();\n  }\n\n  public static final int END_TIME_FIELD_NUMBER = 4;\n  private com.google.protobuf.Timestamp endTime_;\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation failed or was completed successfully.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp end_time = 4;<\/code>\n   *\/\n  public boolean hasEndTime() {\n    return endTime_ != null;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation failed or was completed successfully.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp end_time = 4;<\/code>\n   *\/\n  public com.google.protobuf.Timestamp getEndTime() {\n    return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;\n  }\n  \/**\n   *\n   *\n   * 
\n   * The time at which this operation failed or was completed successfully.\n   * <\/pre>\n   *\n   * .google.protobuf.Timestamp end_time = 4;<\/code>\n   *\/\n  public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {\n    return getEndTime();\n  }\n\n  private byte memoizedIsInitialized = -1;\n\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {\n    if (instance_ != null) {\n      output.writeMessage(1, getInstance());\n    }\n    if (startTime_ != null) {\n      output.writeMessage(2, getStartTime());\n    }\n    if (cancelTime_ != null) {\n      output.writeMessage(3, getCancelTime());\n    }\n    if (endTime_ != null) {\n      output.writeMessage(4, getEndTime());\n    }\n    unknownFields.writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (instance_ != null) {\n      size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getInstance());\n    }\n    if (startTime_ != null) {\n      size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime());\n    }\n    if (cancelTime_ != null) {\n      size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCancelTime());\n    }\n    if (endTime_ != null) {\n      size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getEndTime());\n    }\n    size += unknownFields.getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n      return true;\n    }\n    if (!(obj instanceof com.google.spanner.admin.instance.v1.CreateInstanceMetadata)) {\n      return super.equals(obj);\n    }\n    com.google.spanner.admin.instance.v1.CreateInstanceMetadata other =\n        (com.google.spanner.admin.instance.v1.CreateInstanceMetadata) obj;\n\n    boolean result = true;\n    result = result && (hasInstance() == other.hasInstance());\n    if (hasInstance()) {\n      result = result && getInstance().equals(other.getInstance());\n    }\n    result = result && (hasStartTime() == other.hasStartTime());\n    if (hasStartTime()) {\n      result = result && getStartTime().equals(other.getStartTime());\n    }\n    result = result && (hasCancelTime() == other.hasCancelTime());\n    if (hasCancelTime()) {\n      result = result && getCancelTime().equals(other.getCancelTime());\n    }\n    result = result && (hasEndTime() == other.hasEndTime());\n    if (hasEndTime()) {\n      result = result && getEndTime().equals(other.getEndTime());\n    }\n    result = result && unknownFields.equals(other.unknownFields);\n    return result;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    if (hasInstance()) {\n      hash = (37 * hash) + INSTANCE_FIELD_NUMBER;\n      hash = (53 * hash) + getInstance().hashCode();\n    }\n    if (hasStartTime()) {\n      hash = (37 * hash) + START_TIME_FIELD_NUMBER;\n      hash = (53 * hash) + getStartTime().hashCode();\n    }\n    if (hasCancelTime()) {\n      hash = (37 * hash) + CANCEL_TIME_FIELD_NUMBER;\n      hash = (53 * hash) + getCancelTime().hashCode();\n    }\n    if (hasEndTime()) {\n      hash = (37 * hash) + END_TIME_FIELD_NUMBER;\n      hash = (53 * hash) + getEndTime().hashCode();\n    }\n    hash = (29 * hash) + unknownFields.hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      java.io.InputStream input) throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(\n        PARSER, input, extensionRegistry);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseDelimitedFrom(\n      java.io.InputStream input) throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseDelimitedFrom(\n      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(\n        PARSER, input, extensionRegistry);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      com.google.protobuf.CodedInputStream input) throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(\n        PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() {\n    return newBuilder();\n  }\n\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n\n  public static Builder newBuilder(\n      com.google.spanner.admin.instance.v1.CreateInstanceMetadata prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  \/**\n   *\n   *\n   * 
\n   * Metadata type for the operation returned by\n   * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance].\n   * <\/pre>\n   *\n   * Protobuf type {@code google.spanner.admin.instance.v1.CreateInstanceMetadata}\n   *\/\n  public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder\n      implements\n      \/\/ @@protoc_insertion_point(builder_implements:google.spanner.admin.instance.v1.CreateInstanceMetadata)\n      com.google.spanner.admin.instance.v1.CreateInstanceMetadataOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {\n      return com.google.spanner.admin.instance.v1.SpannerInstanceAdminProto\n          .internal_static_google_spanner_admin_instance_v1_CreateInstanceMetadata_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.google.spanner.admin.instance.v1.SpannerInstanceAdminProto\n          .internal_static_google_spanner_admin_instance_v1_CreateInstanceMetadata_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.google.spanner.admin.instance.v1.CreateInstanceMetadata.class,\n              com.google.spanner.admin.instance.v1.CreateInstanceMetadata.Builder.class);\n    }\n\n    \/\/ Construct using com.google.spanner.admin.instance.v1.CreateInstanceMetadata.newBuilder()\n    private Builder() {\n      maybeForceBuilderInitialization();\n    }\n\n    private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n      maybeForceBuilderInitialization();\n    }\n\n    private void maybeForceBuilderInitialization() {\n      if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}\n    }\n\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      if (instanceBuilder_ == null) {\n        instance_ = null;\n      } else {\n        instance_ = null;\n        instanceBuilder_ = null;\n      }\n      if (startTimeBuilder_ == null) {\n        startTime_ = null;\n      } else {\n        startTime_ = null;\n        startTimeBuilder_ = null;\n      }\n      if (cancelTimeBuilder_ == null) {\n        cancelTime_ = null;\n      } else {\n        cancelTime_ = null;\n        cancelTimeBuilder_ = null;\n      }\n      if (endTimeBuilder_ == null) {\n        endTime_ = null;\n      } else {\n        endTime_ = null;\n        endTimeBuilder_ = null;\n      }\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {\n      return com.google.spanner.admin.instance.v1.SpannerInstanceAdminProto\n          .internal_static_google_spanner_admin_instance_v1_CreateInstanceMetadata_descriptor;\n    }\n\n    @java.lang.Override\n    public com.google.spanner.admin.instance.v1.CreateInstanceMetadata getDefaultInstanceForType() {\n      return com.google.spanner.admin.instance.v1.CreateInstanceMetadata.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.google.spanner.admin.instance.v1.CreateInstanceMetadata build() {\n      com.google.spanner.admin.instance.v1.CreateInstanceMetadata result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.google.spanner.admin.instance.v1.CreateInstanceMetadata buildPartial() {\n      com.google.spanner.admin.instance.v1.CreateInstanceMetadata result =\n          new com.google.spanner.admin.instance.v1.CreateInstanceMetadata(this);\n      if (instanceBuilder_ == null) {\n        result.instance_ = instance_;\n      } else {\n        result.instance_ = instanceBuilder_.build();\n      }\n      if (startTimeBuilder_ == null) {\n        result.startTime_ = startTime_;\n      } else {\n        result.startTime_ = startTimeBuilder_.build();\n      }\n      if (cancelTimeBuilder_ == null) {\n        result.cancelTime_ = cancelTime_;\n      } else {\n        result.cancelTime_ = cancelTimeBuilder_.build();\n      }\n      if (endTimeBuilder_ == null) {\n        result.endTime_ = endTime_;\n      } else {\n        result.endTime_ = endTimeBuilder_.build();\n      }\n      onBuilt();\n      return result;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return (Builder) super.clone();\n    }\n\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {\n      return (Builder) super.setField(field, value);\n    }\n\n    @java.lang.Override\n    public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return (Builder) super.clearField(field);\n    }\n\n    @java.lang.Override\n    public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return (Builder) super.clearOneof(oneof);\n    }\n\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {\n      return (Builder) super.setRepeatedField(field, index, value);\n    }\n\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {\n      return (Builder) super.addRepeatedField(field, value);\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.google.spanner.admin.instance.v1.CreateInstanceMetadata) {\n        return mergeFrom((com.google.spanner.admin.instance.v1.CreateInstanceMetadata) other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.google.spanner.admin.instance.v1.CreateInstanceMetadata other) {\n      if (other == com.google.spanner.admin.instance.v1.CreateInstanceMetadata.getDefaultInstance())\n        return this;\n      if (other.hasInstance()) {\n        mergeInstance(other.getInstance());\n      }\n      if (other.hasStartTime()) {\n        mergeStartTime(other.getStartTime());\n      }\n      if (other.hasCancelTime()) {\n        mergeCancelTime(other.getCancelTime());\n      }\n      if (other.hasEndTime()) {\n        mergeEndTime(other.getEndTime());\n      }\n      this.mergeUnknownFields(other.unknownFields);\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      com.google.spanner.admin.instance.v1.CreateInstanceMetadata parsedMessage = null;\n      try {\n        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        parsedMessage =\n            (com.google.spanner.admin.instance.v1.CreateInstanceMetadata) e.getUnfinishedMessage();\n        throw e.unwrapIOException();\n      } finally {\n        if (parsedMessage != null) {\n          mergeFrom(parsedMessage);\n        }\n      }\n      return this;\n    }\n\n    private com.google.spanner.admin.instance.v1.Instance instance_ = null;\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.spanner.admin.instance.v1.Instance,\n            com.google.spanner.admin.instance.v1.Instance.Builder,\n            com.google.spanner.admin.instance.v1.InstanceOrBuilder>\n        instanceBuilder_;\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public boolean hasInstance() {\n      return instanceBuilder_ != null || instance_ != null;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public com.google.spanner.admin.instance.v1.Instance getInstance() {\n      if (instanceBuilder_ == null) {\n        return instance_ == null\n            ? com.google.spanner.admin.instance.v1.Instance.getDefaultInstance()\n            : instance_;\n      } else {\n        return instanceBuilder_.getMessage();\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public Builder setInstance(com.google.spanner.admin.instance.v1.Instance value) {\n      if (instanceBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        instance_ = value;\n        onChanged();\n      } else {\n        instanceBuilder_.setMessage(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public Builder setInstance(\n        com.google.spanner.admin.instance.v1.Instance.Builder builderForValue) {\n      if (instanceBuilder_ == null) {\n        instance_ = builderForValue.build();\n        onChanged();\n      } else {\n        instanceBuilder_.setMessage(builderForValue.build());\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public Builder mergeInstance(com.google.spanner.admin.instance.v1.Instance value) {\n      if (instanceBuilder_ == null) {\n        if (instance_ != null) {\n          instance_ =\n              com.google.spanner.admin.instance.v1.Instance.newBuilder(instance_)\n                  .mergeFrom(value)\n                  .buildPartial();\n        } else {\n          instance_ = value;\n        }\n        onChanged();\n      } else {\n        instanceBuilder_.mergeFrom(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public Builder clearInstance() {\n      if (instanceBuilder_ == null) {\n        instance_ = null;\n        onChanged();\n      } else {\n        instance_ = null;\n        instanceBuilder_ = null;\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public com.google.spanner.admin.instance.v1.Instance.Builder getInstanceBuilder() {\n\n      onChanged();\n      return getInstanceFieldBuilder().getBuilder();\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    public com.google.spanner.admin.instance.v1.InstanceOrBuilder getInstanceOrBuilder() {\n      if (instanceBuilder_ != null) {\n        return instanceBuilder_.getMessageOrBuilder();\n      } else {\n        return instance_ == null\n            ? com.google.spanner.admin.instance.v1.Instance.getDefaultInstance()\n            : instance_;\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The instance being created.\n     * <\/pre>\n     *\n     * .google.spanner.admin.instance.v1.Instance instance = 1;<\/code>\n     *\/\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.spanner.admin.instance.v1.Instance,\n            com.google.spanner.admin.instance.v1.Instance.Builder,\n            com.google.spanner.admin.instance.v1.InstanceOrBuilder>\n        getInstanceFieldBuilder() {\n      if (instanceBuilder_ == null) {\n        instanceBuilder_ =\n            new com.google.protobuf.SingleFieldBuilderV3<\n                com.google.spanner.admin.instance.v1.Instance,\n                com.google.spanner.admin.instance.v1.Instance.Builder,\n                com.google.spanner.admin.instance.v1.InstanceOrBuilder>(\n                getInstance(), getParentForChildren(), isClean());\n        instance_ = null;\n      }\n      return instanceBuilder_;\n    }\n\n    private com.google.protobuf.Timestamp startTime_ = null;\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        startTimeBuilder_;\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public boolean hasStartTime() {\n      return startTimeBuilder_ != null || startTime_ != null;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp getStartTime() {\n      if (startTimeBuilder_ == null) {\n        return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;\n      } else {\n        return startTimeBuilder_.getMessage();\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public Builder setStartTime(com.google.protobuf.Timestamp value) {\n      if (startTimeBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        startTime_ = value;\n        onChanged();\n      } else {\n        startTimeBuilder_.setMessage(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) {\n      if (startTimeBuilder_ == null) {\n        startTime_ = builderForValue.build();\n        onChanged();\n      } else {\n        startTimeBuilder_.setMessage(builderForValue.build());\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public Builder mergeStartTime(com.google.protobuf.Timestamp value) {\n      if (startTimeBuilder_ == null) {\n        if (startTime_ != null) {\n          startTime_ =\n              com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial();\n        } else {\n          startTime_ = value;\n        }\n        onChanged();\n      } else {\n        startTimeBuilder_.mergeFrom(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public Builder clearStartTime() {\n      if (startTimeBuilder_ == null) {\n        startTime_ = null;\n        onChanged();\n      } else {\n        startTime_ = null;\n        startTimeBuilder_ = null;\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() {\n\n      onChanged();\n      return getStartTimeFieldBuilder().getBuilder();\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {\n      if (startTimeBuilder_ != null) {\n        return startTimeBuilder_.getMessageOrBuilder();\n      } else {\n        return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which the\n     * [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] request was\n     * received.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp start_time = 2;<\/code>\n     *\/\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        getStartTimeFieldBuilder() {\n      if (startTimeBuilder_ == null) {\n        startTimeBuilder_ =\n            new com.google.protobuf.SingleFieldBuilderV3<\n                com.google.protobuf.Timestamp,\n                com.google.protobuf.Timestamp.Builder,\n                com.google.protobuf.TimestampOrBuilder>(\n                getStartTime(), getParentForChildren(), isClean());\n        startTime_ = null;\n      }\n      return startTimeBuilder_;\n    }\n\n    private com.google.protobuf.Timestamp cancelTime_ = null;\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        cancelTimeBuilder_;\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public boolean hasCancelTime() {\n      return cancelTimeBuilder_ != null || cancelTime_ != null;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp getCancelTime() {\n      if (cancelTimeBuilder_ == null) {\n        return cancelTime_ == null\n            ? com.google.protobuf.Timestamp.getDefaultInstance()\n            : cancelTime_;\n      } else {\n        return cancelTimeBuilder_.getMessage();\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public Builder setCancelTime(com.google.protobuf.Timestamp value) {\n      if (cancelTimeBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        cancelTime_ = value;\n        onChanged();\n      } else {\n        cancelTimeBuilder_.setMessage(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public Builder setCancelTime(com.google.protobuf.Timestamp.Builder builderForValue) {\n      if (cancelTimeBuilder_ == null) {\n        cancelTime_ = builderForValue.build();\n        onChanged();\n      } else {\n        cancelTimeBuilder_.setMessage(builderForValue.build());\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public Builder mergeCancelTime(com.google.protobuf.Timestamp value) {\n      if (cancelTimeBuilder_ == null) {\n        if (cancelTime_ != null) {\n          cancelTime_ =\n              com.google.protobuf.Timestamp.newBuilder(cancelTime_).mergeFrom(value).buildPartial();\n        } else {\n          cancelTime_ = value;\n        }\n        onChanged();\n      } else {\n        cancelTimeBuilder_.mergeFrom(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public Builder clearCancelTime() {\n      if (cancelTimeBuilder_ == null) {\n        cancelTime_ = null;\n        onChanged();\n      } else {\n        cancelTime_ = null;\n        cancelTimeBuilder_ = null;\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp.Builder getCancelTimeBuilder() {\n\n      onChanged();\n      return getCancelTimeFieldBuilder().getBuilder();\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    public com.google.protobuf.TimestampOrBuilder getCancelTimeOrBuilder() {\n      if (cancelTimeBuilder_ != null) {\n        return cancelTimeBuilder_.getMessageOrBuilder();\n      } else {\n        return cancelTime_ == null\n            ? com.google.protobuf.Timestamp.getDefaultInstance()\n            : cancelTime_;\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation was cancelled. If set, this operation is\n     * in the process of undoing itself (which is guaranteed to succeed) and\n     * cannot be cancelled again.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp cancel_time = 3;<\/code>\n     *\/\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        getCancelTimeFieldBuilder() {\n      if (cancelTimeBuilder_ == null) {\n        cancelTimeBuilder_ =\n            new com.google.protobuf.SingleFieldBuilderV3<\n                com.google.protobuf.Timestamp,\n                com.google.protobuf.Timestamp.Builder,\n                com.google.protobuf.TimestampOrBuilder>(\n                getCancelTime(), getParentForChildren(), isClean());\n        cancelTime_ = null;\n      }\n      return cancelTimeBuilder_;\n    }\n\n    private com.google.protobuf.Timestamp endTime_ = null;\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        endTimeBuilder_;\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public boolean hasEndTime() {\n      return endTimeBuilder_ != null || endTime_ != null;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp getEndTime() {\n      if (endTimeBuilder_ == null) {\n        return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;\n      } else {\n        return endTimeBuilder_.getMessage();\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public Builder setEndTime(com.google.protobuf.Timestamp value) {\n      if (endTimeBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        endTime_ = value;\n        onChanged();\n      } else {\n        endTimeBuilder_.setMessage(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) {\n      if (endTimeBuilder_ == null) {\n        endTime_ = builderForValue.build();\n        onChanged();\n      } else {\n        endTimeBuilder_.setMessage(builderForValue.build());\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public Builder mergeEndTime(com.google.protobuf.Timestamp value) {\n      if (endTimeBuilder_ == null) {\n        if (endTime_ != null) {\n          endTime_ =\n              com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial();\n        } else {\n          endTime_ = value;\n        }\n        onChanged();\n      } else {\n        endTimeBuilder_.mergeFrom(value);\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public Builder clearEndTime() {\n      if (endTimeBuilder_ == null) {\n        endTime_ = null;\n        onChanged();\n      } else {\n        endTime_ = null;\n        endTimeBuilder_ = null;\n      }\n\n      return this;\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() {\n\n      onChanged();\n      return getEndTimeFieldBuilder().getBuilder();\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {\n      if (endTimeBuilder_ != null) {\n        return endTimeBuilder_.getMessageOrBuilder();\n      } else {\n        return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;\n      }\n    }\n    \/**\n     *\n     *\n     * 
\n     * The time at which this operation failed or was completed successfully.\n     * <\/pre>\n     *\n     * .google.protobuf.Timestamp end_time = 4;<\/code>\n     *\/\n    private com.google.protobuf.SingleFieldBuilderV3<\n            com.google.protobuf.Timestamp,\n            com.google.protobuf.Timestamp.Builder,\n            com.google.protobuf.TimestampOrBuilder>\n        getEndTimeFieldBuilder() {\n      if (endTimeBuilder_ == null) {\n        endTimeBuilder_ =\n            new com.google.protobuf.SingleFieldBuilderV3<\n                com.google.protobuf.Timestamp,\n                com.google.protobuf.Timestamp.Builder,\n                com.google.protobuf.TimestampOrBuilder>(\n                getEndTime(), getParentForChildren(), isClean());\n        endTime_ = null;\n      }\n      return endTimeBuilder_;\n    }\n\n    @java.lang.Override\n    public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFieldsProto3(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n    \/\/ @@protoc_insertion_point(builder_scope:google.spanner.admin.instance.v1.CreateInstanceMetadata)\n  }\n\n  \/\/ @@protoc_insertion_point(class_scope:google.spanner.admin.instance.v1.CreateInstanceMetadata)\n  private static final com.google.spanner.admin.instance.v1.CreateInstanceMetadata DEFAULT_INSTANCE;\n\n  static {\n    DEFAULT_INSTANCE = new com.google.spanner.admin.instance.v1.CreateInstanceMetadata();\n  }\n\n  public static com.google.spanner.admin.instance.v1.CreateInstanceMetadata getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser PARSER =\n      new com.google.protobuf.AbstractParser() {\n        @java.lang.Override\n        public CreateInstanceMetadata parsePartialFrom(\n            com.google.protobuf.CodedInputStream input,\n            com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n            throws com.google.protobuf.InvalidProtocolBufferException {\n          return new CreateInstanceMetadata(input, extensionRegistry);\n        }\n      };\n\n  public static com.google.protobuf.Parser parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.spanner.admin.instance.v1.CreateInstanceMetadata getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n}\n","avg_line_length":31.3187459177,"max_line_length":109,"alphanum_fraction":0.6364470583}
{"size":2871,"ext":"java","lang":"Java","max_stars_count":240.0,"content":"\/*\n * Copyright 2014 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage de.codecentric.batch.filetodb;\n\nimport static org.junit.Assert.assertEquals;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.springframework.batch.core.ExitStatus;\nimport org.springframework.boot.test.context.SpringBootTest;\nimport org.springframework.boot.test.context.SpringBootTest.WebEnvironment;\nimport org.springframework.boot.web.client.RestTemplateBuilder;\nimport org.springframework.test.context.junit4.SpringJUnit4ClassRunner;\nimport org.springframework.util.LinkedMultiValueMap;\nimport org.springframework.util.MultiValueMap;\nimport org.springframework.web.client.RestTemplate;\n\n@RunWith(SpringJUnit4ClassRunner.class)\n@SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT, properties = {\n    \"server.port=8090\",\n    \"spring.main.allow-bean-definition-overriding=true\"\n})\npublic class FlatFileJobIntegrationTest {\n\n\tprivate RestTemplate restTemplate = new RestTemplateBuilder().build();\n\n\t@Test\n\tpublic void testLaunchJob() throws Exception {\n\t\t\/\/ Given\n\t\tString jobParameters = \"pathToFile=classpath:partner-import.csv\";\n\t\t\/\/ When\n\t\tExitStatus exitStatus = runJobAndWaitForCompletion(\"localhost\", \"8090\", \"flatfileJob\", jobParameters);\n\t\t\/\/ Then\n\t\tassertEquals(ExitStatus.COMPLETED.getExitCode(), exitStatus.getExitCode());\n\t}\n\n\tprotected ExitStatus runJobAndWaitForCompletion(String hostname, String port, String jobName, String jobParameters)\n\t\t\tthrows InterruptedException {\n\t\tMultiValueMap parameters = new LinkedMultiValueMap();\n\t\tparameters.add(\"jobParameters\", jobParameters);\n\t\tString jobExecutionId = restTemplate.postForObject(\n\t\t\t\t\"http:\/\/\" + hostname + \":\" + port + \"\/batch\/operations\/jobs\/\" + jobName + \"\", parameters, String.class);\n\t\tExitStatus exitStatus = getStatus(hostname, port, jobExecutionId);\n\t\t\/\/ Wait for end of job\n\t\twhile (exitStatus.isRunning()) {\n\t\t\tThread.sleep(100);\n\t\t\texitStatus = getStatus(hostname, port, jobExecutionId);\n\t\t}\n\t\treturn exitStatus;\n\t}\n\n\tprivate ExitStatus getStatus(String hostname, String port, String jobExecutionId) {\n\t\tString jobstatus = restTemplate.getForObject(\n\t\t\t\t\"http:\/\/\" + hostname + \":\" + port + \"\/batch\/operations\/jobs\/executions\/\" + jobExecutionId,\n\t\t\t\tString.class);\n\t\treturn new ExitStatus(jobstatus);\n\t}\n\n}\n","avg_line_length":38.7972972973,"max_line_length":116,"alphanum_fraction":0.7704632532}
{"size":11331,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * Copyright (c) 2002-2013 \"Neo Technology,\"\n * Network Engine for Objects in Lund AB [http:\/\/neotechnology.com]\n *\n * This file is part of Neo4j.\n *\n * Neo4j is free software: you can redistribute it and\/or modify\n * it under the terms of the GNU Affero General Public License as\n * published by the Free Software Foundation, either version 3 of the\n * License, or (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see .\n *\/\npackage org.neo4j.backup;\n\nimport static org.neo4j.helpers.ProgressIndicator.SimpleProgress.textual;\nimport static org.neo4j.helpers.collection.MapUtil.stringMap;\nimport static org.neo4j.kernel.impl.nioneo.xa.NeoStoreXaDataSource.LOGICAL_LOG_DEFAULT_NAME;\nimport static org.neo4j.kernel.impl.transaction.xaframework.XaLogicalLog.getHighestHistoryLogVersion;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.RandomAccessFile;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.FileChannel;\nimport java.nio.channels.ReadableByteChannel;\nimport java.util.Map;\n\nimport javax.transaction.xa.Xid;\n\nimport org.neo4j.consistency.ConsistencyCheckSettings;\nimport org.neo4j.consistency.checking.full.ConsistencyCheckIncompleteException;\nimport org.neo4j.consistency.checking.full.FullCheck;\nimport org.neo4j.graphdb.factory.GraphDatabaseSettings;\nimport org.neo4j.helpers.Args;\nimport org.neo4j.helpers.ProgressIndicator;\nimport org.neo4j.helpers.progress.ProgressMonitorFactory;\nimport org.neo4j.kernel.DefaultFileSystemAbstraction;\nimport org.neo4j.kernel.GraphDatabaseAPI;\nimport org.neo4j.kernel.configuration.Config;\nimport org.neo4j.kernel.configuration.ConfigParam;\nimport org.neo4j.kernel.impl.nioneo.store.FileSystemAbstraction;\nimport org.neo4j.kernel.impl.nioneo.store.StoreAccess;\nimport org.neo4j.kernel.impl.nioneo.xa.Command;\nimport org.neo4j.kernel.impl.transaction.xaframework.InMemoryLogBuffer;\nimport org.neo4j.kernel.impl.transaction.xaframework.LogEntry;\nimport org.neo4j.kernel.impl.transaction.xaframework.LogExtractor;\nimport org.neo4j.kernel.impl.transaction.xaframework.LogIoUtils;\nimport org.neo4j.kernel.impl.transaction.xaframework.XaCommand;\nimport org.neo4j.kernel.impl.transaction.xaframework.XaCommandFactory;\nimport org.neo4j.kernel.impl.transaction.xaframework.XaDataSource;\nimport org.neo4j.kernel.impl.util.StringLogger;\n\nclass RebuildFromLogs\n{\n    private static final FileSystemAbstraction FS = new DefaultFileSystemAbstraction();\n    \n    \/*\n     * TODO: This process can be sped up if the target db doesn't have to write tx logs.\n     *\/\n    private final XaDataSource nioneo;\n    private final StoreAccess stores;\n\n    RebuildFromLogs( GraphDatabaseAPI graphdb )\n    {\n        this.nioneo = getDataSource( graphdb, Config.DEFAULT_DATA_SOURCE_NAME );\n        this.stores = new StoreAccess( graphdb );\n    }\n\n    \/\/ TODO: rewrite to use the new progress indication API\n    RebuildFromLogs applyTransactionsFrom( ProgressIndicator progress, File sourceDir ) throws IOException\n    {\n        LogExtractor extractor = null;\n        try\n        {\n            extractor = LogExtractor.from( FS, sourceDir );\n            for ( InMemoryLogBuffer buffer = new InMemoryLogBuffer(); ; buffer.reset() )\n            {\n                long txId = extractor.extractNext( buffer );\n                if ( txId == -1 )\n                {\n                    break;\n                }\n                applyTransaction( txId, buffer );\n                if ( progress != null )\n                {\n                    progress.update( false, txId );\n                }\n            }\n        }\n        finally\n        {\n            if ( extractor != null )\n            {\n                extractor.close();\n            }\n        }\n        return this;\n    }\n\n    public void applyTransaction( long txId, ReadableByteChannel txData ) throws IOException\n    {\n        nioneo.applyCommittedTransaction( txId, txData );\n    }\n\n    private static XaDataSource getDataSource( GraphDatabaseAPI graphdb, String name )\n    {\n        XaDataSource datasource = graphdb.getXaDataSourceManager().getXaDataSource( name );\n        if ( datasource == null )\n        {\n            throw new NullPointerException( \"Could not access \" + name );\n        }\n        return datasource;\n    }\n\n    public static void main( String[] args )\n    {\n        if ( args == null )\n        {\n            printUsage();\n            return;\n        }\n        Args params = new Args( args );\n        @SuppressWarnings(\"boxing\")\n        boolean full = params.getBoolean( \"full\", false, true );\n        args = params.orphans().toArray( new String[0] );\n        if ( args.length != 2 )\n        {\n            printUsage( \"Exactly two positional arguments expected: \"\n                    + \" , got \" + args.length );\n            System.exit( -1 );\n            return;\n        }\n        File source = new File( args[0] ), target = new File( args[1] );\n        if ( !source.isDirectory() )\n        {\n            printUsage( source + \" is not a directory\" );\n            System.exit( -1 );\n            return;\n        }\n        if ( target.exists() )\n        {\n            if ( target.isDirectory() )\n            {\n                if ( new BackupService().directoryContainsDb( target.getAbsolutePath() ) )\n                {\n                    printUsage( \"target graph database already exists\" );\n                    System.exit( -1 );\n                    return;\n                }\n                else\n                {\n                    System.err.println( \"WARNING: the directory \" + target + \" already exists\" );\n                }\n            }\n            else\n            {\n                printUsage( target + \" is a file\" );\n                System.exit( -1 );\n                return;\n            }\n        }\n        long maxFileId = findMaxLogFileId( source );\n        if ( maxFileId < 0 )\n        {\n            printUsage( \"Inconsistent number of log files found in \" + source );\n            System.exit( -1 );\n            return;\n        }\n        long txCount = findLastTransactionId( source, LOGICAL_LOG_DEFAULT_NAME + \".v\" + maxFileId );\n        String txdifflog = params.get( \"txdifflog\", null, new File( target, \"txdiff.log\" ).getAbsolutePath() );\n        GraphDatabaseAPI graphdb = BackupService.startTemporaryDb( target.getAbsolutePath(),\n                new TxDiffLogConfig( full\n                        ? VerificationLevel.FULL_WITH_LOGGING\n                        : VerificationLevel.LOGGING, txdifflog ) );\n\n        ProgressIndicator progress;\n        if ( txCount < 0 )\n        {\n            progress = null;\n            System.err.println( \"Unable to report progress, cannot find highest txId, attempting rebuild anyhow.\" );\n        }\n        else\n        {\n            progress = textual( System.err, txCount );\n            System.err.printf( \"Rebuilding store from %s transactions %n\", txCount );\n        }\n        try\n        {\n            try\n            {\n                RebuildFromLogs rebuilder = new RebuildFromLogs( graphdb ).applyTransactionsFrom( progress, source );\n                if ( progress != null )\n                {\n                    progress.done( txCount );\n                }\n                \/\/ if we didn't run the full checker for each transaction, run it afterwards\n                if ( !full )\n                {\n                    rebuilder.checkConsistency();\n                }\n            }\n            finally\n            {\n                graphdb.shutdown();\n            }\n        }\n        catch ( Exception e )\n        {\n            System.err.println();\n            e.printStackTrace( System.err );\n            System.exit( -1 );\n            return;\n        }\n    }\n\n    private static long findLastTransactionId( File storeDir, String logFileName )\n    {\n        long txId;\n        try\n        {\n            FileChannel channel = new RandomAccessFile( new File( storeDir, logFileName ), \"r\" ).getChannel();\n            try\n            {\n                ByteBuffer buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE + Xid.MAXBQUALSIZE * 10 );\n                txId = LogIoUtils.readLogHeader( buffer, channel, true )[1];\n                XaCommandFactory cf = new CommandFactory();\n                for ( LogEntry entry; (entry = LogIoUtils.readEntry( buffer, channel, cf )) != null; )\n                {\n                    if ( entry instanceof LogEntry.Commit )\n                    {\n                        txId = ((LogEntry.Commit) entry).getTxId();\n                    }\n                }\n            }\n            finally\n            {\n                if ( channel != null )\n                {\n                    channel.close();\n                }\n            }\n        }\n        catch ( IOException e )\n        {\n            return -1;\n        }\n        return txId;\n    }\n\n    private void checkConsistency() throws ConsistencyCheckIncompleteException\n    {\n        Config tuningConfiguration = new Config( stringMap(),\n                GraphDatabaseSettings.class, ConsistencyCheckSettings.class );\n        new FullCheck( tuningConfiguration, ProgressMonitorFactory.textual( System.err ) )\n                .execute( stores, StringLogger.SYSTEM );\n    }\n\n    private static void printUsage( String... msgLines )\n    {\n        for ( String line : msgLines )\n        {\n            System.err.println( line );\n        }\n        System.err.println( Args.jarUsage( RebuildFromLogs.class, \"[-full]  \" ) );\n        System.err.println( \"WHERE:     is the path for where transactions to rebuild from are stored\" );\n        System.err.println( \"           is the path for where to create the new graph database\" );\n        System.err.println( \"         -full     --  to run a full check over the entire store for each transaction\" );\n    }\n\n    private static long findMaxLogFileId( File source )\n    {\n        return getHighestHistoryLogVersion( FS, source, LOGICAL_LOG_DEFAULT_NAME );\n    }\n\n    private static class TxDiffLogConfig implements ConfigParam\n    {\n        private final String targetFile;\n        private final VerificationLevel level;\n\n        TxDiffLogConfig( VerificationLevel level, String targetFile )\n        {\n            this.level = level;\n            this.targetFile = targetFile;\n        }\n\n        @Override\n        public void configure( Map config )\n        {\n            if ( targetFile != null )\n            {\n                level.configureWithDiffLog( config, targetFile );\n            }\n            else\n            {\n                level.configure( config );\n            }\n        }\n    }\n\n    private static class CommandFactory extends XaCommandFactory\n    {\n        @Override\n        public XaCommand readCommand( ReadableByteChannel byteChannel, ByteBuffer buffer ) throws IOException\n        {\n            return Command.readCommand( null, byteChannel, buffer );\n        }\n    }\n}\n","avg_line_length":35.7444794953,"max_line_length":118,"alphanum_fraction":0.5905039273}
{"size":745,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/**\n * Copyright (2018, ) Institute of Software, Chinese Academy of Sciences\n *\/\npackage com.github.isdream.openstack;\n\nimport org.openstack4j.api.OSClient.OSClientV3;\nimport org.openstack4j.api.compute.ComputeService;\nimport org.openstack4j.openstack.internal.OSClientSession.OSClientSessionV3;\n\n\/**\n * @author wuheng{@otcaix.iscas.ac.cn}\n * 2018\u5e743\u670830\u65e5\n *\n *\/\npublic class OpenStackAPIExample {\n\n\tfinal static String REGINE_ID = \"cn-qingdao\";\n\n\tfinal static String ACCESS_KEY_ID = \"\";\n\n\tfinal static String ACCESS_KEY_SECRET = \"\";\n\n\t\/**\n\t * @param args\n\t * @throws Exception\n\t *\/\n\tpublic static void main(String[] args) throws Exception {\n\t\tComputeService os = OSClientSessionV3.getCurrent().compute();\n\t\tSystem.out.println(os.images());\n\t}\n\n}\n","avg_line_length":22.5757575758,"max_line_length":76,"alphanum_fraction":0.7395973154}
{"size":6107,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/\/ Copyright (c) YugaByte, Inc.\n\npackage com.yugabyte.yw.commissioner.tasks;\n\nimport static com.yugabyte.yw.common.ModelFactory.createUniverse;\nimport static org.junit.Assert.*;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.List;\nimport java.util.UUID;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.mockito.InjectMocks;\nimport org.mockito.junit.MockitoJUnitRunner;\n\nimport com.google.common.collect.ImmutableList;\nimport com.yugabyte.yw.commissioner.Commissioner;\nimport com.yugabyte.yw.common.ApiUtils;\nimport com.yugabyte.yw.common.ModelFactory;\nimport com.yugabyte.yw.common.ShellResponse;\nimport com.yugabyte.yw.forms.UniverseDefinitionTaskParams;\nimport com.yugabyte.yw.models.Alert;\nimport com.yugabyte.yw.models.Backup;\nimport com.yugabyte.yw.models.CustomerConfig;\nimport com.yugabyte.yw.models.AvailabilityZone;\nimport com.yugabyte.yw.models.Region;\nimport com.yugabyte.yw.models.TaskInfo;\nimport com.yugabyte.yw.models.Universe;\nimport com.yugabyte.yw.models.helpers.TaskType;\n\n@RunWith(MockitoJUnitRunner.class)\npublic class DestroyUniverseTest extends CommissionerBaseTest {\n\n  private static final String ALERT_TEST_MESSAGE = \"Test message\";\n  private CustomerConfig s3StorageConfig;\n\n  @InjectMocks private Commissioner commissioner;\n\n  private Universe defaultUniverse;\n  private ShellResponse dummyShellResponse;\n\n  @Before\n  public void setUp() {\n    super.setUp();\n    Region region = Region.create(defaultProvider, \"region-1\", \"Region 1\", \"yb-image-1\");\n    AvailabilityZone.createOrThrow(region, \"az-1\", \"AZ 1\", \"subnet-1\");\n    UniverseDefinitionTaskParams.UserIntent userIntent;\n    \/\/ create default universe\n    userIntent = new UniverseDefinitionTaskParams.UserIntent();\n    userIntent.numNodes = 3;\n    userIntent.ybSoftwareVersion = \"yb-version\";\n    userIntent.accessKeyCode = \"demo-access\";\n    userIntent.replicationFactor = 3;\n    userIntent.regionList = ImmutableList.of(region.uuid);\n    defaultUniverse = createUniverse(defaultCustomer.getCustomerId());\n    Universe.saveDetails(\n        defaultUniverse.universeUUID,\n        ApiUtils.mockUniverseUpdater(userIntent, false \/* setMasters *\/));\n\n    dummyShellResponse = new ShellResponse();\n    dummyShellResponse.message = \"true\";\n    when(mockNodeManager.nodeCommand(any(), any())).thenReturn(dummyShellResponse);\n  }\n\n  @Test\n  public void testReleaseUniverseAndResolveAlerts() {\n    DestroyUniverse.Params taskParams = new DestroyUniverse.Params();\n    taskParams.universeUUID = defaultUniverse.universeUUID;\n    taskParams.customerUUID = defaultCustomer.uuid;\n    taskParams.isForceDelete = Boolean.FALSE;\n    taskParams.isDeleteBackups = Boolean.FALSE;\n\n    Alert.create(\n        defaultCustomer.uuid,\n        defaultUniverse.universeUUID,\n        Alert.TargetType.UniverseType,\n        \"errorCode\",\n        \"Warning\",\n        ALERT_TEST_MESSAGE);\n    Alert.create(\n        defaultCustomer.uuid,\n        defaultUniverse.universeUUID,\n        Alert.TargetType.UniverseType,\n        \"errorCode2\",\n        \"Warning\",\n        ALERT_TEST_MESSAGE);\n\n    submitTask(taskParams, 4);\n    assertFalse(Universe.checkIfUniverseExists(defaultUniverse.name));\n\n    List alerts = Alert.list(defaultCustomer.uuid);\n    assertEquals(2, alerts.size());\n    assertEquals(Alert.State.RESOLVED, alerts.get(0).getState());\n    assertEquals(Alert.State.RESOLVED, alerts.get(1).getState());\n  }\n\n  @Test\n  public void testDestroyUniverseAndDeleteBackups() {\n    s3StorageConfig = ModelFactory.createS3StorageConfig(defaultCustomer);\n    Backup b =\n        ModelFactory.createBackup(\n            defaultCustomer.uuid, defaultUniverse.universeUUID, s3StorageConfig.configUUID);\n    b.transitionState(Backup.BackupState.Completed);\n    ShellResponse shellResponse = new ShellResponse();\n    shellResponse.message = \"{\\\"success\\\": true}\";\n    shellResponse.code = 0;\n    when(mockTableManager.deleteBackup(any())).thenReturn(shellResponse);\n    DestroyUniverse.Params taskParams = new DestroyUniverse.Params();\n    taskParams.universeUUID = defaultUniverse.universeUUID;\n    taskParams.customerUUID = defaultCustomer.uuid;\n    taskParams.isForceDelete = Boolean.FALSE;\n    taskParams.isDeleteBackups = Boolean.TRUE;\n    TaskInfo taskInfo = submitTask(taskParams, 4);\n\n    Backup backup = Backup.get(defaultCustomer.uuid, b.backupUUID);\n    verify(mockTableManager, times(1)).deleteBackup(any());\n    \/\/ Backup state should be DELETED.\n    assertEquals(Backup.BackupState.Deleted, backup.state);\n    assertFalse(Universe.checkIfUniverseExists(defaultUniverse.name));\n  }\n\n  @Test\n  public void testDestroyUniverseAndDeleteBackupsFalse() {\n    s3StorageConfig = ModelFactory.createS3StorageConfig(defaultCustomer);\n    Backup b =\n        ModelFactory.createBackup(\n            defaultCustomer.uuid, defaultUniverse.universeUUID, s3StorageConfig.configUUID);\n    b.transitionState(Backup.BackupState.Completed);\n    DestroyUniverse.Params taskParams = new DestroyUniverse.Params();\n    taskParams.universeUUID = defaultUniverse.universeUUID;\n    taskParams.customerUUID = defaultCustomer.uuid;\n    taskParams.isForceDelete = Boolean.FALSE;\n    taskParams.isDeleteBackups = Boolean.FALSE;\n    TaskInfo taskInfo = submitTask(taskParams, 4);\n    b.setTaskUUID(taskInfo.getTaskUUID());\n\n    Backup backup = Backup.get(defaultCustomer.uuid, b.backupUUID);\n    verify(mockTableManager, times(0)).deleteBackup(any());\n    \/\/ Backup should be in COMPLETED state.\n    assertEquals(Backup.BackupState.Completed, backup.state);\n    assertFalse(Universe.checkIfUniverseExists(defaultUniverse.name));\n  }\n\n  private TaskInfo submitTask(DestroyUniverse.Params taskParams, int version) {\n    taskParams.expectedUniverseVersion = version;\n    try {\n      UUID taskUUID = commissioner.submit(TaskType.DestroyUniverse, taskParams);\n      return waitForTask(taskUUID);\n    } catch (InterruptedException e) {\n      assertNull(e.getMessage());\n    }\n    return null;\n  }\n}\n","avg_line_length":38.16875,"max_line_length":92,"alphanum_fraction":0.7570001637}
{"size":9376,"ext":"java","lang":"Java","max_stars_count":15.0,"content":"\/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *   http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied.  See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\/\n\npackage org.apache.sysml.test.integration.functions.aggregate;\n\nimport org.apache.sysml.api.DMLScript;\nimport org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;\nimport org.apache.sysml.lops.LopProperties.ExecType;\nimport org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex;\nimport org.apache.sysml.test.integration.AutomatedTestBase;\nimport org.apache.sysml.test.integration.TestConfiguration;\nimport org.apache.sysml.test.utils.TestUtils;\nimport org.junit.Test;\n\nimport java.util.HashMap;\n\n\/**\n * Test the standard deviation function, \"sd(X)\".\n *\/\npublic class StdDevTest extends AutomatedTestBase {\n\n    private static final String TEST_NAME = \"StdDev\";\n    private static final String TEST_DIR = \"functions\/aggregate\/\";\n    private static final String TEST_CLASS_DIR = TEST_DIR + StdDevTest.class.getSimpleName() + \"\/\";\n    private static final String INPUT_NAME = \"X\";\n    private static final String OUTPUT_NAME = \"stdDev\";\n\n    private static final int rows = 1234;\n    private static final int cols = 1432;\n    private static final double sparsitySparse = 0.2;\n    private static final double sparsityDense = 0.7;\n    private static final double eps = Math.pow(10, -10);\n\n    private enum Sparsity {EMPTY, SPARSE, DENSE}\n    private enum DataType {MATRIX, ROWVECTOR, COLUMNVECTOR}\n\n    @Override\n    public void setUp() {\n        TestUtils.clearAssertionInformation();\n        TestConfiguration config = new TestConfiguration(TEST_CLASS_DIR, TEST_NAME);\n        addTestConfiguration(TEST_NAME, config);\n    }\n\n    \/\/ Dense matrix\n    @Test\n    public void testStdDevDenseMatrixCP() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.MATRIX, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevDenseMatrixSpark() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.MATRIX, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevDenseMatrixMR() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.MATRIX, ExecType.MR);\n    }\n\n    \/\/ Dense row vector\n    @Test\n    public void testStdDevDenseRowVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.ROWVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevDenseRowVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.ROWVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevDenseRowVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.ROWVECTOR, ExecType.MR);\n    }\n\n    \/\/ Dense column vector\n    @Test\n    public void testStdDevDenseColVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.COLUMNVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevDenseColVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.COLUMNVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevDenseColVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.DENSE,  DataType.COLUMNVECTOR, ExecType.MR);\n    }\n\n    \/\/ Sparse matrix\n    @Test\n    public void testStdDevSparseMatrixCP() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.MATRIX, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevSparseMatrixSpark() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.MATRIX, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevSparseMatrixMR() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.MATRIX, ExecType.MR);\n    }\n\n    \/\/ Sparse row vector\n    @Test\n    public void testStdDevSparseRowVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.ROWVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevSparseRowVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.ROWVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevSparseRowVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.ROWVECTOR, ExecType.MR);\n    }\n\n    \/\/ Sparse column vector\n    @Test\n    public void testStdDevSparseColVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.COLUMNVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevSparseColVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.COLUMNVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevSparseColVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.SPARSE,  DataType.COLUMNVECTOR, ExecType.MR);\n    }\n\n    \/\/ Empty matrix\n    @Test\n    public void testStdDevEmptyMatrixCP() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.MATRIX, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevEmptyMatrixSpark() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.MATRIX, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevEmptyMatrixMR() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.MATRIX, ExecType.MR);\n    }\n\n    \/\/ Empty row vector\n    @Test\n    public void testStdDevEmptyRowVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.ROWVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevEmptyRowVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.ROWVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevEmptyRowVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.ROWVECTOR, ExecType.MR);\n    }\n\n    \/\/ Empty column vector\n    @Test\n    public void testStdDevEmptyColVectorCP() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.COLUMNVECTOR, ExecType.CP);\n    }\n\n    @Test\n    public void testStdDevEmptyColVectorSpark() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.COLUMNVECTOR, ExecType.SPARK);\n    }\n\n    @Test\n    public void testStdDevEmptyColVectorMR() {\n        testStdDev(TEST_NAME, Sparsity.EMPTY,  DataType.COLUMNVECTOR, ExecType.MR);\n    }\n\n    \/**\n     * Test the standard deviation function, \"sd(X)\", on\n     * dense\/sparse matrices\/vectors on the CP\/Spark\/MR platforms.\n     *\n     * @param testName The name of this test case.\n     * @param sparsity Selection between empty, sparse, and dense data.\n     * @param dataType Selection between a matrix, a row vector, and a\n     *                 column vector.\n     * @param platform Selection between CP\/Spark\/MR platforms.\n     *\/\n    private void testStdDev(String testName, Sparsity sparsity, DataType dataType,\n                            ExecType platform) {\n    \tboolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;\n    \tRUNTIME_PLATFORM platformOld = setRuntimePlatform(platform);\n    \tif(shouldSkipTest())\n\t\t\treturn;\n\n        try {\n            \/\/ Create and load test configuration\n            getAndLoadTestConfiguration(testName);\n            String HOME = SCRIPT_DIR + TEST_DIR;\n            fullDMLScriptName = HOME + testName + \".dml\";\n            programArgs = new String[]{\"-explain\", \"-stats\", \"-args\",\n                    input(INPUT_NAME), output(OUTPUT_NAME)};\n            fullRScriptName = HOME + testName + \".R\";\n            rCmd = \"Rscript\" + \" \" + fullRScriptName + \" \" + inputDir() + \" \" + expectedDir();\n\n            \/\/ Generate data\n            \/\/ - sparsity\n            double sparsityVal;\n            switch (sparsity) {\n                case EMPTY:\n                    sparsityVal = 0;\n                    break;\n                case SPARSE:\n                    sparsityVal = sparsitySparse;\n                    break;\n                case DENSE:\n                default:\n                    sparsityVal = sparsityDense;\n            }\n            \/\/ - size\n            int r;\n            int c;\n            switch (dataType) {\n                case ROWVECTOR:\n                    r = 1;\n                    c = cols;\n                    break;\n                case COLUMNVECTOR:\n                    r = rows;\n                    c = 1;\n                    break;\n                case MATRIX:\n                default:\n                    r = rows;\n                    c = cols;\n            }\n            \/\/ - generation\n            double[][] X = getRandomMatrix(r, c, -1, 1, sparsityVal, 7);\n            writeInputMatrixWithMTD(INPUT_NAME, X, true);\n\n            \/\/ Run DML and R scripts\n            runTest(true, false, null, -1);\n            runRScript(true);\n\n            \/\/ Compare output matrices\n            HashMap dmlfile = readDMLMatrixFromHDFS(OUTPUT_NAME);\n            HashMap rfile  = readRMatrixFromFS(OUTPUT_NAME);\n            TestUtils.compareMatrices(dmlfile, rfile, eps, \"Stat-DML\", \"Stat-R\");\n        }\n        finally {\n            \/\/ Reset settings\n            rtplatform = platformOld;\n            DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;\n        }\n    }\n}\n","avg_line_length":33.2482269504,"max_line_length":99,"alphanum_fraction":0.6440912969}
{"size":2739,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright (c) 2018 Hemendra Sharma\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage com.hemendra.comicreader.model.source.comics.remote;\n\nimport com.hemendra.comicreader.model.data.Chapter;\nimport com.hemendra.comicreader.model.http.ConnectionCallback;\nimport com.hemendra.comicreader.model.http.ContentDownloader;\nimport com.hemendra.comicreader.model.source.FailureReason;\nimport com.hemendra.comicreader.model.source.RemoteConfig;\nimport com.hemendra.comicreader.model.source.comics.OnComicsLoadedListener;\nimport com.hemendra.comicreader.model.utils.CustomAsyncTask;\n\nimport java.net.HttpURLConnection;\n\n\/**\n * A background worker thread to download the list of pages for any given chapter.\n * @author Hemendra Sharma\n * @see CustomAsyncTask\n *\/\npublic class RemoteChapterPagesLoader extends CustomAsyncTask {\n\n    private OnComicsLoadedListener listener;\n    private FailureReason reason = FailureReason.UNKNOWN_REMOTE_ERROR;\n\n    RemoteChapterPagesLoader(OnComicsLoadedListener listener) {\n        this.listener = listener;\n    }\n\n    @Override\n    protected Chapter doInBackground(Chapter... params) {\n        String json = ContentDownloader.downloadAsString(\n                RemoteConfig.buildChapterUrl(params[0].id),\n                new ConnectionCallback() {\n                    @Override\n                    public void onResponseCode(int code) {\n                        switch (code) {\n                            case HttpURLConnection.HTTP_NOT_FOUND:\n                                reason = FailureReason.API_MISSING;\n                                break;\n                            default:\n                                reason = FailureReason.INVALID_RESPONSE_FROM_SERVER;\n                        }\n                    }\n                });\n        if(json != null && json.length() > 0) {\n            return ComicsParser.parseChapterPagesFromJSON(params[0], json);\n        }\n        return null;\n    }\n\n    @Override\n    protected void onPostExecute(Chapter chapter) {\n        if(listener != null) {\n            if(chapter != null)\n                listener.onPagesLoaded(chapter);\n            else\n                listener.onFailedToLoadPages(reason);\n        }\n    }\n\n}\n","avg_line_length":36.0394736842,"max_line_length":85,"alphanum_fraction":0.6568090544}
{"size":788,"ext":"java","lang":"Java","max_stars_count":5.0,"content":"package com.backdoored.hacks.chatbot.ChatBotScriptHandler;\n\nimport java.util.*;\nimport org.apache.logging.log4j.*;\nimport javax.script.*;\n\npublic class ScriptHandler\n{\n    private final ScriptEngine in;\n    \n    public ScriptHandler() {\n        super();\n        Objects.requireNonNull(this.in = new ScriptEngineManager(null).getEngineByName(\"nashorn\"));\n    }\n    \n    public ScriptHandler a(final String s) throws ScriptException {\n        this.in.eval(s);\n        return this;\n    }\n    \n    public ScriptHandler a(final Logger logger) {\n        this.in.put(\"logger\", logger);\n        return this;\n    }\n    \n    public Object a(final String s, final Object... array) throws ScriptException, NoSuchMethodException {\n        return ((Invocable)this.in).invokeFunction(s, array);\n    }\n}\n","avg_line_length":26.2666666667,"max_line_length":106,"alphanum_fraction":0.6662436548}
{"size":8741,"ext":"java","lang":"Java","max_stars_count":null,"content":"package goodtrailer.adventtic.smeltery;\n\nimport java.util.function.Consumer;\n\nimport goodtrailer.adventtic.AdventTiC;\nimport goodtrailer.adventtic.fluids.AdventTiCFluids;\nimport net.minecraft.block.Block;\nimport net.minecraft.data.DataGenerator;\nimport net.minecraft.data.IFinishedRecipe;\nimport net.minecraft.entity.Entity;\nimport net.minecraft.entity.EntityType;\nimport net.minecraft.item.Item;\nimport net.minecraft.item.crafting.Ingredient;\nimport net.minecraft.util.IItemProvider;\nimport net.minecraftforge.fluids.FluidStack;\nimport net.minecraftforge.fluids.ForgeFlowingFluid;\nimport net.minecraftforge.fml.RegistryObject;\nimport net.minecraftforge.registries.IForgeRegistryEntry;\nimport net.tslat.aoa3.common.registration.AoABlocks;\nimport net.tslat.aoa3.common.registration.AoAEntities;\nimport net.tslat.aoa3.common.registration.AoAItems;\nimport net.tslat.aoa3.common.registration.AoATools;\nimport net.tslat.aoa3.common.registration.AoAWeapons;\nimport slimeknights.mantle.recipe.EntityIngredient;\nimport slimeknights.mantle.registration.object.FluidObject;\nimport slimeknights.tconstruct.common.data.BaseRecipeProvider;\nimport slimeknights.tconstruct.library.data.recipe.ISmelteryRecipeHelper;\nimport slimeknights.tconstruct.library.recipe.FluidValues;\nimport slimeknights.tconstruct.library.recipe.entitymelting.EntityMeltingRecipeBuilder;\nimport slimeknights.tconstruct.library.recipe.melting.MeltingRecipeBuilder;\n\npublic class AdventTiCMeltingRecipeProvider extends BaseRecipeProvider\n        implements ISmelteryRecipeHelper\n{\n    public static final String NAME = \"AdventTiC Melting Recipes\";\n    public static final String FOLDER = \"smeltery\/melting\/\";\n\n    public AdventTiCMeltingRecipeProvider(DataGenerator gen)\n    {\n        super(gen);\n    }\n\n    @Override\n    public String getName()\n    {\n        return NAME;\n    }\n\n    @Override\n    public String getModId()\n    {\n        return AdventTiC.MOD_ID;\n    }\n\n    @Override\n    protected void buildShapelessRecipes(Consumer con)\n    {\n        metal(con, AdventTiCFluids.MOLTEN_BARONYTE, AdventTiCByproducts.VARSIUM);\n        metal(con, AdventTiCFluids.MOLTEN_BLAZIUM, AdventTiCByproducts.BARONYTE);\n        metal(con, AdventTiCFluids.MOLTEN_ELECANIUM);\n        metal(con, AdventTiCFluids.MOLTEN_EMBERSTONE);\n        metal(con, AdventTiCFluids.MOLTEN_GHASTLY, AdventTiCByproducts.GHOULISH);\n        metal(con, AdventTiCFluids.MOLTEN_GHOULISH, AdventTiCByproducts.GHASTLY);\n        metal(con, AdventTiCFluids.MOLTEN_LIMONITE, AdventTiCByproducts.ROSITE);\n        metal(con, AdventTiCFluids.MOLTEN_LUNAR);\n        metal(con, AdventTiCFluids.MOLTEN_LYON);\n        metal(con, AdventTiCFluids.MOLTEN_MYSTITE);\n        metal(con, AdventTiCFluids.MOLTEN_ROSITE, AdventTiCByproducts.LIMONITE);\n        metal(con, AdventTiCFluids.MOLTEN_SHYRESTONE);\n        metal(con, AdventTiCFluids.MOLTEN_SKELETAL);\n        metal(con, AdventTiCFluids.MOLTEN_VARSIUM, AdventTiCByproducts.BARONYTE);\n\n        tools(con, AdventTiCFluids.MOLTEN_EMBERSTONE, AoATools.EMBERSTONE_SHOVEL,\n                AoAWeapons.EMBERSTONE_SWORD, AoATools.EMBERSTONE_AXE, AoATools.EMBERSTONE_PICKAXE);\n        tools(con, AdventTiCFluids.MOLTEN_LIMONITE, AoATools.LIMONITE_SHOVEL,\n                AoAWeapons.LIMONITE_SWORD, AoATools.LIMONITE_AXE, AoATools.LIMONITE_PICKAXE);\n        tools(con, AdventTiCFluids.MOLTEN_ROSITE, AoATools.ROSITE_SHOVEL,\n                AoAWeapons.ROSITE_SWORD, AoATools.ROSITE_AXE, AoATools.ROSITE_PICKAXE);\n\n        lamp(con, AdventTiCFluids.MOLTEN_BARONYTE, AoABlocks.BARONYTE_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_BLAZIUM, AoABlocks.BLAZIUM_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_ELECANIUM, AoABlocks.ELECANIUM_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_EMBERSTONE, AoABlocks.EMBERSTONE_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_GHASTLY, AoABlocks.GHASTLY_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_GHOULISH, AoABlocks.GHOULISH_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_LIMONITE, AoABlocks.LIMONITE_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_LYON, AoABlocks.LYON_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_MYSTITE, AoABlocks.MYSTIC_LAMP);\n        lamp(con, AdventTiCFluids.MOLTEN_ROSITE, AoABlocks.ROSITE_LAMP);\n\n        items(con, AdventTiCFluids.MOLTEN_BARONYTE, \"sword\", 2 * FluidValues.INGOT,\n                AoAWeapons.BARON_SWORD);\n        items(con, AdventTiCFluids.MOLTEN_LIMONITE, \"rod\", 1 * FluidValues.INGOT,\n                AoAItems.LIMONITE_ROD);\n        items(con, AdventTiCFluids.MOLTEN_LIMONITE, \"cannonball\", 3 * FluidValues.NUGGET,\n                AoAItems.CANNONBALL);\n        items(con, AdventTiCFluids.MOLTEN_SKELETAL, \"bow\", 3 * FluidValues.INGOT,\n                AoAWeapons.SKELETAL_BOW);\n        items(con, AdventTiCFluids.MOLTEN_CHARGER, \"raw_shank\", 17,\n                AdventTiCByproducts.SHYRESTONE_LESS, AoAItems.RAW_CHARGER_SHANK);\n        items(con, AdventTiCFluids.MOLTEN_CHARGER, \"cooked_shank\", 20,\n                AdventTiCByproducts.SHYRESTONE, AoAItems.COOKED_CHARGER_SHANK);\n\n        entities(con, AdventTiCFluids.MOLTEN_CHARGER, \"charger\", 6, 2, AoAEntities.Mobs.CHARGER,\n                AoAEntities.Mobs.DESERT_CHARGER, AoAEntities.Mobs.HILL_CHARGER,\n                AoAEntities.Mobs.SEA_CHARGER, AoAEntities.Mobs.SNOW_CHARGER,\n                AoAEntities.Mobs.SWAMP_CHARGER, AoAEntities.Mobs.VOID_CHARGER);\n        entities(con, AdventTiCFluids.MOLTEN_CHARGER, \"king_charger\", 16, 2,\n                AoAEntities.Mobs.KING_CHARGER);\n    }\n\n    private void metal(Consumer con, FluidObject molten,\n            AdventTiCByproducts.Byproduct... byproduct)\n    {\n        String mat = molten.getId().getPath().substring(AdventTiCFluids.MOLTEN_PREFIX.length());\n        metalMelting(con, molten.get(), mat, true, FOLDER, true, byproduct);\n    }\n\n    private void tools(Consumer con, FluidObject molten,\n            RegistryObject shovel, RegistryObject sword, RegistryObject axe,\n            RegistryObject pickaxe)\n    {\n        items(con, molten, \"shovel\", 1 * FluidValues.INGOT, shovel);\n        items(con, molten, \"sword\", 2 * FluidValues.INGOT, sword);\n        items(con, molten, \"axes\", 3 * FluidValues.INGOT, axe, pickaxe);\n    }\n\n    private void lamp(Consumer con, FluidObject molten,\n            RegistryObject lamp)\n    {\n        items(con, molten, \"lamp\", 1 * FluidValues.INGOT, lamp);\n    }\n\n    @SafeVarargs\n    private final  & IItemProvider> void items(\n            Consumer con, FluidObject molten, String name,\n            int amount, RegistryObject item, RegistryObject... items)\n    {\n        items(con, molten, name, amount, null, item, items);\n    }\n\n    @SafeVarargs\n    private final  & IItemProvider> void items(\n            Consumer con, FluidObject molten, String name,\n            int amount, AdventTiCByproducts.Byproduct byproduct,\n            RegistryObject item, RegistryObject... items)\n    {\n        String mat = molten.getId().getPath().substring(AdventTiCFluids.MOLTEN_PREFIX.length());\n        FluidStack output = new FluidStack(molten.get(), amount);\n\n        IItemProvider[] itemProviders = new IItemProvider[items.length + 1];\n        itemProviders[0] = item.get();\n        for (int i = 0; i < items.length; i++)\n            itemProviders[i + 1] = items[i].get();\n\n        MeltingRecipeBuilder builder = MeltingRecipeBuilder.melting(Ingredient.of(itemProviders),\n                output, amount);\n\n        if (byproduct != null)\n            builder.addByproduct(new FluidStack(byproduct.getFluid(), byproduct.getNuggets()));\n\n        builder.build(con, modResource(FOLDER + mat + \"\/\" + name));\n    }\n\n    @SafeVarargs\n    private final  void entities(Consumer con,\n            FluidObject molten, String name, int amount, int damage,\n            RegistryObject> entity, RegistryObject>... entities)\n    {\n        String mat = molten.getId().getPath().substring(AdventTiCFluids.MOLTEN_PREFIX.length());\n\n        EntityType[] entityTypes = new EntityType[entities.length + 1];\n        entityTypes[0] = entity.get();\n        for (int i = 0; i < entities.length; i++)\n            entityTypes[i + 1] = entities[i].get();\n\n        FluidStack output = new FluidStack(molten.get(), amount);\n        EntityMeltingRecipeBuilder.melting(EntityIngredient.of(entityTypes), output, damage)\n                .build(con, modResource(FOLDER + mat + \"\/\" + name));\n    }\n}\n","avg_line_length":48.0274725275,"max_line_length":99,"alphanum_fraction":0.7195973001}
{"size":3283,"ext":"java","lang":"Java","max_stars_count":1350.0,"content":"\/**\n * Copyright Microsoft Corporation\n * \n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage com.microsoft.windowsazure.services.blob.models;\n\nimport java.util.HashMap;\n\n\/**\n * A wrapper class for the response returned from a Blob Service REST API Get\n * Blob Properties operation. This is returned by calls to implementations of\n * {@link com.microsoft.windowsazure.services.blob.BlobContract#getBlobProperties(String, String)} and\n * {@link com.microsoft.windowsazure.services.blob.BlobContract#getBlobProperties(String, String, GetBlobPropertiesOptions)}\n * .\n * 

\n * See the Get\n * Blob Properties<\/a> documentation on MSDN for details of the underlying Blob\n * Service REST API operation.\n *\/\npublic class GetBlobPropertiesResult {\n private BlobProperties properties;\n private HashMap metadata = new HashMap();\n\n \/**\n * Gets the standard HTTP properties and system properties of the blob.\n * \n * @return A {@link BlobProperties} instance containing the properties of\n * the blob.\n *\/\n public BlobProperties getProperties() {\n return properties;\n }\n\n \/**\n * Reserved for internal use. Sets the blob properties from the headers\n * returned in the response.\n *

\n * This method is invoked by the API to set the value from the Blob Service\n * REST API operation response returned by the server.\n * \n * @param properties\n * A {@link BlobProperties} instance containing the properties of\n * the blob.\n *\/\n public void setProperties(BlobProperties properties) {\n this.properties = properties;\n }\n\n \/**\n * Gets the user-defined blob metadata as a map of name and value pairs. The\n * metadata is for client use and is opaque to the server.\n * \n * @return A {@link java.util.HashMap} of key-value pairs of {@link String}\n * containing the names and values of the blob metadata.\n *\/\n public HashMap getMetadata() {\n return metadata;\n }\n\n \/**\n * Reserved for internal use. Sets the blob metadata from the\n * x-ms-meta-name:value<\/em><\/code> headers returned in the\n * response.\n *

\n * This method is invoked by the API to set the value from the Blob Service\n * REST API operation response returned by the server.\n * \n * @param metadata\n * A {@link java.util.HashMap} of key-value pairs of\n * {@link String} containing the names and values of the blob\n * metadata.\n *\/\n public void setMetadata(HashMap metadata) {\n this.metadata = metadata;\n }\n}\n","avg_line_length":37.3068181818,"max_line_length":124,"alphanum_fraction":0.6786475784} {"size":872,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package ru.job4j.addresses;\r\n\r\nimport java.util.ArrayList;\r\nimport org.junit.Test;\r\nimport static org.junit.Assert.assertEquals;\r\n\/**\r\n * \u041a\u043b\u0430\u0441\u0441 ProfilesTest \u0442\u0435\u0441\u0442\u0438\u0440\u0443\u0435\u0442 \u043a\u043b\u0430\u0441\u0441 Profiles.\r\n *\r\n * @author Gureyev Ilya (mailto:ill-jah@yandex.ru)\r\n * @version 2019-03-11\r\n * @since 2019-03-11\r\n *\/\r\npublic class ProfilesTest {\r\n \/**\r\n * \u0422\u0435\u0441\u0442\u0438\u0440\u0443\u0435\u0442 public List Address collect(List Profile profiles).\r\n *\/\r\n @Test\r\n public void testCollect() {\r\n ArrayList

expected = new ArrayList<>();\r\n expected.add(new Address(0, 1, \"\u041e\u0445\u043e\u0442\u043d\u044b\u0439 \u0440\u044f\u0434\", \"\u041c\u043e\u0441\u043a\u0432\u0430\"));\r\n expected.add(new Address(0, 7, \"\u041c\u043e\u0445\u043e\u0432\u0430\u044f\", \"\u041c\u043e\u0441\u043a\u0432\u0430\"));\r\n ArrayList profiles = new ArrayList<>();\r\n expected.forEach(address -> profiles.add(new Profile(address)));\r\n Profiles p = new Profiles();\r\n assertEquals(expected, p.collect(profiles));\r\n }\r\n}","avg_line_length":32.2962962963,"max_line_length":73,"alphanum_fraction":0.6410550459} {"size":1202,"ext":"java","lang":"Java","max_stars_count":null,"content":"package hu.szigyi.nested.hierarchical.graph.util;\n\nimport hu.szigyi.nested.hierarchical.graph.domain.generated.org.graphdrawing.graphml.xmlns.GraphmlType;\nimport hu.szigyi.nested.hierarchical.graph.domain.generated.org.graphdrawing.graphml.xmlns.ObjectFactory;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URL;\n\nimport javax.xml.bind.JAXBContext;\nimport javax.xml.bind.JAXBElement;\nimport javax.xml.bind.JAXBException;\nimport javax.xml.bind.Unmarshaller;\n\npublic class GraphMLUtil {\n\n\tpublic GraphmlType readGraphML(final String path) throws JAXBException {\n\t\tfinal URL resource = this.getClass().getResource(path);\n\n\t\ttry (InputStream stream = resource.openStream()) {\n\t\t\tfinal JAXBContext jaxbContext = JAXBContext.newInstance(ObjectFactory.class);\n\n\t\t\tfinal Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();\n\t\t\tfinal JAXBElement jaxbElement = (JAXBElement) jaxbUnmarshaller.unmarshal(stream);\n\t\t\tfinal GraphmlType graphml = jaxbElement.getValue();\n\t\t\tSystem.out.println(\"Graph: \" + graphml.getGraphOrData());\n\t\t\treturn graphml;\n\t\t} catch (final IOException e) {\n\t\t\te.printStackTrace();\n\t\t\tthrow new RuntimeException(e);\n\t\t}\n\t}\n}\n","avg_line_length":35.3529411765,"max_line_length":110,"alphanum_fraction":0.7903494176} {"size":987,"ext":"java","lang":"Java","max_stars_count":3.0,"content":"\/*\n * Copyright 2014 University of Southern California\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\n\/** This package is responsible for launching and monitoring external processes\n * . E.g. the Flakes.\n * Local processes are monitored using a file based heartbeat system.\n * NOTE: We initially used an implicit monitoring using the Process class,\n * but that is not easy to be made fault tolerant.\n * @author kumbhare\n *\/\npackage edu.usc.pgroup.floe.processes.monitor;\n","avg_line_length":39.48,"max_line_length":79,"alphanum_fraction":0.7507598784} {"size":25877,"ext":"java","lang":"Java","max_stars_count":3.0,"content":"\/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\/**\n * Autogenerated by Thrift Compiler (0.9.2)\n *\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n * @generated\n *\/\npackage org.apache.zeppelin.interpreter.thrift;\n\nimport org.apache.thrift.scheme.IScheme;\nimport org.apache.thrift.scheme.SchemeFactory;\nimport org.apache.thrift.scheme.StandardScheme;\n\nimport org.apache.thrift.scheme.TupleScheme;\nimport org.apache.thrift.protocol.TTupleProtocol;\nimport org.apache.thrift.protocol.TProtocolException;\nimport org.apache.thrift.EncodingUtils;\nimport org.apache.thrift.TException;\nimport org.apache.thrift.async.AsyncMethodCallback;\nimport org.apache.thrift.server.AbstractNonblockingServer.*;\nimport java.util.List;\nimport java.util.ArrayList;\nimport java.util.Map;\nimport java.util.HashMap;\nimport java.util.EnumMap;\nimport java.util.Set;\nimport java.util.HashSet;\nimport java.util.EnumSet;\nimport java.util.Collections;\nimport java.util.BitSet;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport javax.annotation.Generated;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\"})\n@Generated(value = \"Autogenerated by Thrift Compiler (0.9.2)\", date = \"2018-8-9\")\npublic class AppOutputAppendEvent implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable {\n private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"AppOutputAppendEvent\");\n\n private static final org.apache.thrift.protocol.TField NOTE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField(\"noteId\", org.apache.thrift.protocol.TType.STRING, (short)1);\n private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField(\"paragraphId\", org.apache.thrift.protocol.TType.STRING, (short)2);\n private static final org.apache.thrift.protocol.TField APP_ID_FIELD_DESC = new org.apache.thrift.protocol.TField(\"appId\", org.apache.thrift.protocol.TType.STRING, (short)3);\n private static final org.apache.thrift.protocol.TField INDEX_FIELD_DESC = new org.apache.thrift.protocol.TField(\"index\", org.apache.thrift.protocol.TType.I32, (short)4);\n private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField(\"data\", org.apache.thrift.protocol.TType.STRING, (short)5);\n\n private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>();\n static {\n schemes.put(StandardScheme.class, new AppOutputAppendEventStandardSchemeFactory());\n schemes.put(TupleScheme.class, new AppOutputAppendEventTupleSchemeFactory());\n }\n\n public String noteId; \/\/ required\n public String paragraphId; \/\/ required\n public String appId; \/\/ required\n public int index; \/\/ required\n public String data; \/\/ required\n\n \/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. *\/\n public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n NOTE_ID((short)1, \"noteId\"),\n PARAGRAPH_ID((short)2, \"paragraphId\"),\n APP_ID((short)3, \"appId\"),\n INDEX((short)4, \"index\"),\n DATA((short)5, \"data\");\n\n private static final Map byName = new HashMap();\n\n static {\n for (_Fields field : EnumSet.allOf(_Fields.class)) {\n byName.put(field.getFieldName(), field);\n }\n }\n\n \/**\n * Find the _Fields constant that matches fieldId, or null if its not found.\n *\/\n public static _Fields findByThriftId(int fieldId) {\n switch(fieldId) {\n case 1: \/\/ NOTE_ID\n return NOTE_ID;\n case 2: \/\/ PARAGRAPH_ID\n return PARAGRAPH_ID;\n case 3: \/\/ APP_ID\n return APP_ID;\n case 4: \/\/ INDEX\n return INDEX;\n case 5: \/\/ DATA\n return DATA;\n default:\n return null;\n }\n }\n\n \/**\n * Find the _Fields constant that matches fieldId, throwing an exception\n * if it is not found.\n *\/\n public static _Fields findByThriftIdOrThrow(int fieldId) {\n _Fields fields = findByThriftId(fieldId);\n if (fields == null) throw new IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n return fields;\n }\n\n \/**\n * Find the _Fields constant that matches name, or null if its not found.\n *\/\n public static _Fields findByName(String name) {\n return byName.get(name);\n }\n\n private final short _thriftId;\n private final String _fieldName;\n\n _Fields(short thriftId, String fieldName) {\n _thriftId = thriftId;\n _fieldName = fieldName;\n }\n\n public short getThriftFieldId() {\n return _thriftId;\n }\n\n public String getFieldName() {\n return _fieldName;\n }\n }\n\n \/\/ isset id assignments\n private static final int __INDEX_ISSET_ID = 0;\n private byte __isset_bitfield = 0;\n public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n static {\n Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n tmpMap.put(_Fields.NOTE_ID, new org.apache.thrift.meta_data.FieldMetaData(\"noteId\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData(\"paragraphId\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n tmpMap.put(_Fields.APP_ID, new org.apache.thrift.meta_data.FieldMetaData(\"appId\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n tmpMap.put(_Fields.INDEX, new org.apache.thrift.meta_data.FieldMetaData(\"index\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));\n tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData(\"data\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n metaDataMap = Collections.unmodifiableMap(tmpMap);\n org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AppOutputAppendEvent.class, metaDataMap);\n }\n\n public AppOutputAppendEvent() {\n }\n\n public AppOutputAppendEvent(\n String noteId,\n String paragraphId,\n String appId,\n int index,\n String data)\n {\n this();\n this.noteId = noteId;\n this.paragraphId = paragraphId;\n this.appId = appId;\n this.index = index;\n setIndexIsSet(true);\n this.data = data;\n }\n\n \/**\n * Performs a deep copy on other<\/i>.\n *\/\n public AppOutputAppendEvent(AppOutputAppendEvent other) {\n __isset_bitfield = other.__isset_bitfield;\n if (other.isSetNoteId()) {\n this.noteId = other.noteId;\n }\n if (other.isSetParagraphId()) {\n this.paragraphId = other.paragraphId;\n }\n if (other.isSetAppId()) {\n this.appId = other.appId;\n }\n this.index = other.index;\n if (other.isSetData()) {\n this.data = other.data;\n }\n }\n\n public AppOutputAppendEvent deepCopy() {\n return new AppOutputAppendEvent(this);\n }\n\n @Override\n public void clear() {\n this.noteId = null;\n this.paragraphId = null;\n this.appId = null;\n setIndexIsSet(false);\n this.index = 0;\n this.data = null;\n }\n\n public String getNoteId() {\n return this.noteId;\n }\n\n public AppOutputAppendEvent setNoteId(String noteId) {\n this.noteId = noteId;\n return this;\n }\n\n public void unsetNoteId() {\n this.noteId = null;\n }\n\n \/** Returns true if field noteId is set (has been assigned a value) and false otherwise *\/\n public boolean isSetNoteId() {\n return this.noteId != null;\n }\n\n public void setNoteIdIsSet(boolean value) {\n if (!value) {\n this.noteId = null;\n }\n }\n\n public String getParagraphId() {\n return this.paragraphId;\n }\n\n public AppOutputAppendEvent setParagraphId(String paragraphId) {\n this.paragraphId = paragraphId;\n return this;\n }\n\n public void unsetParagraphId() {\n this.paragraphId = null;\n }\n\n \/** Returns true if field paragraphId is set (has been assigned a value) and false otherwise *\/\n public boolean isSetParagraphId() {\n return this.paragraphId != null;\n }\n\n public void setParagraphIdIsSet(boolean value) {\n if (!value) {\n this.paragraphId = null;\n }\n }\n\n public String getAppId() {\n return this.appId;\n }\n\n public AppOutputAppendEvent setAppId(String appId) {\n this.appId = appId;\n return this;\n }\n\n public void unsetAppId() {\n this.appId = null;\n }\n\n \/** Returns true if field appId is set (has been assigned a value) and false otherwise *\/\n public boolean isSetAppId() {\n return this.appId != null;\n }\n\n public void setAppIdIsSet(boolean value) {\n if (!value) {\n this.appId = null;\n }\n }\n\n public int getIndex() {\n return this.index;\n }\n\n public AppOutputAppendEvent setIndex(int index) {\n this.index = index;\n setIndexIsSet(true);\n return this;\n }\n\n public void unsetIndex() {\n __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __INDEX_ISSET_ID);\n }\n\n \/** Returns true if field index is set (has been assigned a value) and false otherwise *\/\n public boolean isSetIndex() {\n return EncodingUtils.testBit(__isset_bitfield, __INDEX_ISSET_ID);\n }\n\n public void setIndexIsSet(boolean value) {\n __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __INDEX_ISSET_ID, value);\n }\n\n public String getData() {\n return this.data;\n }\n\n public AppOutputAppendEvent setData(String data) {\n this.data = data;\n return this;\n }\n\n public void unsetData() {\n this.data = null;\n }\n\n \/** Returns true if field data is set (has been assigned a value) and false otherwise *\/\n public boolean isSetData() {\n return this.data != null;\n }\n\n public void setDataIsSet(boolean value) {\n if (!value) {\n this.data = null;\n }\n }\n\n public void setFieldValue(_Fields field, Object value) {\n switch (field) {\n case NOTE_ID:\n if (value == null) {\n unsetNoteId();\n } else {\n setNoteId((String)value);\n }\n break;\n\n case PARAGRAPH_ID:\n if (value == null) {\n unsetParagraphId();\n } else {\n setParagraphId((String)value);\n }\n break;\n\n case APP_ID:\n if (value == null) {\n unsetAppId();\n } else {\n setAppId((String)value);\n }\n break;\n\n case INDEX:\n if (value == null) {\n unsetIndex();\n } else {\n setIndex((Integer)value);\n }\n break;\n\n case DATA:\n if (value == null) {\n unsetData();\n } else {\n setData((String)value);\n }\n break;\n\n }\n }\n\n public Object getFieldValue(_Fields field) {\n switch (field) {\n case NOTE_ID:\n return getNoteId();\n\n case PARAGRAPH_ID:\n return getParagraphId();\n\n case APP_ID:\n return getAppId();\n\n case INDEX:\n return Integer.valueOf(getIndex());\n\n case DATA:\n return getData();\n\n }\n throw new IllegalStateException();\n }\n\n \/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise *\/\n public boolean isSet(_Fields field) {\n if (field == null) {\n throw new IllegalArgumentException();\n }\n\n switch (field) {\n case NOTE_ID:\n return isSetNoteId();\n case PARAGRAPH_ID:\n return isSetParagraphId();\n case APP_ID:\n return isSetAppId();\n case INDEX:\n return isSetIndex();\n case DATA:\n return isSetData();\n }\n throw new IllegalStateException();\n }\n\n @Override\n public boolean equals(Object that) {\n if (that == null)\n return false;\n if (that instanceof AppOutputAppendEvent)\n return this.equals((AppOutputAppendEvent)that);\n return false;\n }\n\n public boolean equals(AppOutputAppendEvent that) {\n if (that == null)\n return false;\n\n boolean this_present_noteId = true && this.isSetNoteId();\n boolean that_present_noteId = true && that.isSetNoteId();\n if (this_present_noteId || that_present_noteId) {\n if (!(this_present_noteId && that_present_noteId))\n return false;\n if (!this.noteId.equals(that.noteId))\n return false;\n }\n\n boolean this_present_paragraphId = true && this.isSetParagraphId();\n boolean that_present_paragraphId = true && that.isSetParagraphId();\n if (this_present_paragraphId || that_present_paragraphId) {\n if (!(this_present_paragraphId && that_present_paragraphId))\n return false;\n if (!this.paragraphId.equals(that.paragraphId))\n return false;\n }\n\n boolean this_present_appId = true && this.isSetAppId();\n boolean that_present_appId = true && that.isSetAppId();\n if (this_present_appId || that_present_appId) {\n if (!(this_present_appId && that_present_appId))\n return false;\n if (!this.appId.equals(that.appId))\n return false;\n }\n\n boolean this_present_index = true;\n boolean that_present_index = true;\n if (this_present_index || that_present_index) {\n if (!(this_present_index && that_present_index))\n return false;\n if (this.index != that.index)\n return false;\n }\n\n boolean this_present_data = true && this.isSetData();\n boolean that_present_data = true && that.isSetData();\n if (this_present_data || that_present_data) {\n if (!(this_present_data && that_present_data))\n return false;\n if (!this.data.equals(that.data))\n return false;\n }\n\n return true;\n }\n\n @Override\n public int hashCode() {\n List list = new ArrayList();\n\n boolean present_noteId = true && (isSetNoteId());\n list.add(present_noteId);\n if (present_noteId)\n list.add(noteId);\n\n boolean present_paragraphId = true && (isSetParagraphId());\n list.add(present_paragraphId);\n if (present_paragraphId)\n list.add(paragraphId);\n\n boolean present_appId = true && (isSetAppId());\n list.add(present_appId);\n if (present_appId)\n list.add(appId);\n\n boolean present_index = true;\n list.add(present_index);\n if (present_index)\n list.add(index);\n\n boolean present_data = true && (isSetData());\n list.add(present_data);\n if (present_data)\n list.add(data);\n\n return list.hashCode();\n }\n\n @Override\n public int compareTo(AppOutputAppendEvent other) {\n if (!getClass().equals(other.getClass())) {\n return getClass().getName().compareTo(other.getClass().getName());\n }\n\n int lastComparison = 0;\n\n lastComparison = Boolean.valueOf(isSetNoteId()).compareTo(other.isSetNoteId());\n if (lastComparison != 0) {\n return lastComparison;\n }\n if (isSetNoteId()) {\n lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.noteId, other.noteId);\n if (lastComparison != 0) {\n return lastComparison;\n }\n }\n lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(other.isSetParagraphId());\n if (lastComparison != 0) {\n return lastComparison;\n }\n if (isSetParagraphId()) {\n lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, other.paragraphId);\n if (lastComparison != 0) {\n return lastComparison;\n }\n }\n lastComparison = Boolean.valueOf(isSetAppId()).compareTo(other.isSetAppId());\n if (lastComparison != 0) {\n return lastComparison;\n }\n if (isSetAppId()) {\n lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.appId, other.appId);\n if (lastComparison != 0) {\n return lastComparison;\n }\n }\n lastComparison = Boolean.valueOf(isSetIndex()).compareTo(other.isSetIndex());\n if (lastComparison != 0) {\n return lastComparison;\n }\n if (isSetIndex()) {\n lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.index, other.index);\n if (lastComparison != 0) {\n return lastComparison;\n }\n }\n lastComparison = Boolean.valueOf(isSetData()).compareTo(other.isSetData());\n if (lastComparison != 0) {\n return lastComparison;\n }\n if (isSetData()) {\n lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data);\n if (lastComparison != 0) {\n return lastComparison;\n }\n }\n return 0;\n }\n\n public _Fields fieldForId(int fieldId) {\n return _Fields.findByThriftId(fieldId);\n }\n\n public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n schemes.get(iprot.getScheme()).getScheme().read(iprot, this);\n }\n\n public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n schemes.get(oprot.getScheme()).getScheme().write(oprot, this);\n }\n\n @Override\n public String toString() {\n StringBuilder sb = new StringBuilder(\"AppOutputAppendEvent(\");\n boolean first = true;\n\n sb.append(\"noteId:\");\n if (this.noteId == null) {\n sb.append(\"null\");\n } else {\n sb.append(this.noteId);\n }\n first = false;\n if (!first) sb.append(\", \");\n sb.append(\"paragraphId:\");\n if (this.paragraphId == null) {\n sb.append(\"null\");\n } else {\n sb.append(this.paragraphId);\n }\n first = false;\n if (!first) sb.append(\", \");\n sb.append(\"appId:\");\n if (this.appId == null) {\n sb.append(\"null\");\n } else {\n sb.append(this.appId);\n }\n first = false;\n if (!first) sb.append(\", \");\n sb.append(\"index:\");\n sb.append(this.index);\n first = false;\n if (!first) sb.append(\", \");\n sb.append(\"data:\");\n if (this.data == null) {\n sb.append(\"null\");\n } else {\n sb.append(this.data);\n }\n first = false;\n sb.append(\")\");\n return sb.toString();\n }\n\n public void validate() throws org.apache.thrift.TException {\n \/\/ check for required fields\n \/\/ check for sub-struct validity\n }\n\n private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n try {\n write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n } catch (org.apache.thrift.TException te) {\n throw new java.io.IOException(te);\n }\n }\n\n private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {\n try {\n \/\/ it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.\n __isset_bitfield = 0;\n read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n } catch (org.apache.thrift.TException te) {\n throw new java.io.IOException(te);\n }\n }\n\n private static class AppOutputAppendEventStandardSchemeFactory implements SchemeFactory {\n public AppOutputAppendEventStandardScheme getScheme() {\n return new AppOutputAppendEventStandardScheme();\n }\n }\n\n private static class AppOutputAppendEventStandardScheme extends StandardScheme {\n\n public void read(org.apache.thrift.protocol.TProtocol iprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException {\n org.apache.thrift.protocol.TField schemeField;\n iprot.readStructBegin();\n while (true)\n {\n schemeField = iprot.readFieldBegin();\n if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { \n break;\n }\n switch (schemeField.id) {\n case 1: \/\/ NOTE_ID\n if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n struct.noteId = iprot.readString();\n struct.setNoteIdIsSet(true);\n } else { \n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n break;\n case 2: \/\/ PARAGRAPH_ID\n if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n struct.paragraphId = iprot.readString();\n struct.setParagraphIdIsSet(true);\n } else { \n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n break;\n case 3: \/\/ APP_ID\n if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n struct.appId = iprot.readString();\n struct.setAppIdIsSet(true);\n } else { \n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n break;\n case 4: \/\/ INDEX\n if (schemeField.type == org.apache.thrift.protocol.TType.I32) {\n struct.index = iprot.readI32();\n struct.setIndexIsSet(true);\n } else { \n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n break;\n case 5: \/\/ DATA\n if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n struct.data = iprot.readString();\n struct.setDataIsSet(true);\n } else { \n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n break;\n default:\n org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n }\n iprot.readFieldEnd();\n }\n iprot.readStructEnd();\n\n \/\/ check for required fields of primitive type, which can't be checked in the validate method\n struct.validate();\n }\n\n public void write(org.apache.thrift.protocol.TProtocol oprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException {\n struct.validate();\n\n oprot.writeStructBegin(STRUCT_DESC);\n if (struct.noteId != null) {\n oprot.writeFieldBegin(NOTE_ID_FIELD_DESC);\n oprot.writeString(struct.noteId);\n oprot.writeFieldEnd();\n }\n if (struct.paragraphId != null) {\n oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC);\n oprot.writeString(struct.paragraphId);\n oprot.writeFieldEnd();\n }\n if (struct.appId != null) {\n oprot.writeFieldBegin(APP_ID_FIELD_DESC);\n oprot.writeString(struct.appId);\n oprot.writeFieldEnd();\n }\n oprot.writeFieldBegin(INDEX_FIELD_DESC);\n oprot.writeI32(struct.index);\n oprot.writeFieldEnd();\n if (struct.data != null) {\n oprot.writeFieldBegin(DATA_FIELD_DESC);\n oprot.writeString(struct.data);\n oprot.writeFieldEnd();\n }\n oprot.writeFieldStop();\n oprot.writeStructEnd();\n }\n\n }\n\n private static class AppOutputAppendEventTupleSchemeFactory implements SchemeFactory {\n public AppOutputAppendEventTupleScheme getScheme() {\n return new AppOutputAppendEventTupleScheme();\n }\n }\n\n private static class AppOutputAppendEventTupleScheme extends TupleScheme {\n\n @Override\n public void write(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException {\n TTupleProtocol oprot = (TTupleProtocol) prot;\n BitSet optionals = new BitSet();\n if (struct.isSetNoteId()) {\n optionals.set(0);\n }\n if (struct.isSetParagraphId()) {\n optionals.set(1);\n }\n if (struct.isSetAppId()) {\n optionals.set(2);\n }\n if (struct.isSetIndex()) {\n optionals.set(3);\n }\n if (struct.isSetData()) {\n optionals.set(4);\n }\n oprot.writeBitSet(optionals, 5);\n if (struct.isSetNoteId()) {\n oprot.writeString(struct.noteId);\n }\n if (struct.isSetParagraphId()) {\n oprot.writeString(struct.paragraphId);\n }\n if (struct.isSetAppId()) {\n oprot.writeString(struct.appId);\n }\n if (struct.isSetIndex()) {\n oprot.writeI32(struct.index);\n }\n if (struct.isSetData()) {\n oprot.writeString(struct.data);\n }\n }\n\n @Override\n public void read(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException {\n TTupleProtocol iprot = (TTupleProtocol) prot;\n BitSet incoming = iprot.readBitSet(5);\n if (incoming.get(0)) {\n struct.noteId = iprot.readString();\n struct.setNoteIdIsSet(true);\n }\n if (incoming.get(1)) {\n struct.paragraphId = iprot.readString();\n struct.setParagraphIdIsSet(true);\n }\n if (incoming.get(2)) {\n struct.appId = iprot.readString();\n struct.setAppIdIsSet(true);\n }\n if (incoming.get(3)) {\n struct.index = iprot.readI32();\n struct.setIndexIsSet(true);\n }\n if (incoming.get(4)) {\n struct.data = iprot.readString();\n struct.setDataIsSet(true);\n }\n }\n }\n\n}\n\n","avg_line_length":31.0275779376,"max_line_length":189,"alphanum_fraction":0.6615140859} {"size":1442,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.oms.mail.model;\r\n\r\npublic class OMSMailRequest {\r\n\r\n private String productId;\r\n private String to;\r\n private String cc;\r\n private String bcc;\r\n private String from;\r\n private String subject;\r\n private String message;\r\n private String replyTo;\r\n\r\n public String getProductId() {\r\n return productId;\r\n }\r\n\r\n public void setProductId(String productId) {\r\n this.productId = productId;\r\n }\r\n\r\n public String getTo() {\r\n return to;\r\n }\r\n\r\n public void setTo(String to) {\r\n this.to = to;\r\n }\r\n\r\n public String getCc() {\r\n return cc;\r\n }\r\n\r\n public void setCc(String cc) {\r\n this.cc = cc;\r\n }\r\n\r\n public String getBcc() {\r\n return bcc;\r\n }\r\n\r\n public void setBcc(String bcc) {\r\n this.bcc = bcc;\r\n }\r\n\r\n public String getFrom() {\r\n return from;\r\n }\r\n\r\n public void setFrom(String from) {\r\n this.from = from;\r\n }\r\n\r\n public String getSubject() {\r\n return subject;\r\n }\r\n\r\n public void setSubject(String subject) {\r\n this.subject = subject;\r\n }\r\n\r\n public String getMessage() {\r\n return message;\r\n }\r\n\r\n public void setMessage(String message) {\r\n this.message = message;\r\n }\r\n\r\n public String getReplyTo() {\r\n return replyTo;\r\n }\r\n\r\n public void setReplyTo(String replyTo) {\r\n this.replyTo = replyTo;\r\n }\r\n}\r\n","avg_line_length":18.4871794872,"max_line_length":49,"alphanum_fraction":0.547850208} {"size":976,"ext":"java","lang":"Java","max_stars_count":15.0,"content":"\/*\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\/\/ Generated by the protocol buffer compiler. DO NOT EDIT!\n\/\/ source: google\/cloud\/automl\/v1beta1\/video.proto\n\npackage com.google.cloud.automl.v1beta1;\n\npublic interface VideoObjectTrackingModelMetadataOrBuilder\n extends\n \/\/ @@protoc_insertion_point(interface_extends:google.cloud.automl.v1beta1.VideoObjectTrackingModelMetadata)\n com.google.protobuf.MessageOrBuilder {}\n","avg_line_length":39.04,"max_line_length":111,"alphanum_fraction":0.7674180328} {"size":35597,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*\n * Copyright (C) 2009 University of Washington\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License. You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under the License\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing permissions and limitations under\n * the License.\n *\/\n\npackage org.odk.collect.android.activities;\n\nimport android.app.AlertDialog;\nimport android.app.ProgressDialog;\nimport android.content.Context;\nimport android.content.DialogInterface;\nimport android.content.Intent;\nimport android.database.Cursor;\nimport android.net.ConnectivityManager;\nimport android.net.NetworkInfo;\nimport android.net.Uri;\nimport android.os.AsyncTask;\nimport android.os.Bundle;\nimport android.util.SparseBooleanArray;\nimport android.view.View;\nimport android.view.View.OnClickListener;\nimport android.widget.AdapterView;\nimport android.widget.Button;\nimport android.widget.ListView;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\nimport androidx.lifecycle.ViewModelProviders;\n\nimport org.odk.collect.android.R;\nimport org.odk.collect.android.activities.viewmodels.FormDownloadListViewModel;\nimport org.odk.collect.android.adapters.FormDownloadListAdapter;\nimport org.odk.collect.android.application.Collect;\nimport org.odk.collect.android.dao.FormsDao;\nimport org.odk.collect.android.http.openrosa.HttpCredentialsInterface;\nimport org.odk.collect.android.injection.DaggerUtils;\nimport org.odk.collect.android.listeners.DownloadFormsTaskListener;\nimport org.odk.collect.android.listeners.FormListDownloaderListener;\nimport org.odk.collect.android.listeners.PermissionListener;\nimport org.odk.collect.android.logic.FormDetails;\nimport org.odk.collect.android.tasks.DownloadFormListTask;\nimport org.odk.collect.android.tasks.DownloadFormsTask;\nimport org.odk.collect.android.utilities.ApplicationConstants;\nimport org.odk.collect.android.utilities.AuthDialogUtility;\nimport org.odk.collect.android.utilities.DialogUtils;\nimport org.odk.collect.android.utilities.DownloadFormListUtils;\nimport org.odk.collect.android.utilities.PermissionUtils;\nimport org.odk.collect.android.utilities.ToastUtils;\nimport org.odk.collect.android.utilities.WebCredentialsUtils;\n\nimport java.net.URI;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.Locale;\nimport java.util.Set;\n\nimport javax.inject.Inject;\n\nimport timber.log.Timber;\n\nimport static org.odk.collect.android.utilities.DownloadFormListUtils.DL_AUTH_REQUIRED;\nimport static org.odk.collect.android.utilities.DownloadFormListUtils.DL_ERROR_MSG;\n\n\/**\n * Responsible for displaying, adding and deleting all the valid forms in the forms directory. One\n * caveat. If the server requires authentication, a dialog will pop up asking when you request the\n * form list. If somehow you manage to wait long enough and then try to download selected forms and\n * your authorization has timed out, it won't again ask for authentication, it will just throw a\n * 401\n * and you'll have to hit 'refresh' where it will ask for credentials again. Technically a server\n * could point at other servers requiring authentication to download the forms, but the current\n * implementation in Collect doesn't allow for that. Mostly this is just because it's a pain in the\n * butt to keep track of which forms we've downloaded and where we're needing to authenticate. I\n * think we do something similar in the instanceuploader task\/activity, so should change the\n * implementation eventually.\n *\n * @author Carl Hartung (carlhartung@gmail.com)\n *\/\npublic class FormDownloadList extends FormListActivity implements FormListDownloaderListener,\n DownloadFormsTaskListener, AuthDialogUtility.AuthDialogUtilityResultListener, AdapterView.OnItemClickListener {\n private static final String FORM_DOWNLOAD_LIST_SORTING_ORDER = \"formDownloadListSortingOrder\";\n\n public static final String DISPLAY_ONLY_UPDATED_FORMS = \"displayOnlyUpdatedForms\";\n private static final String BUNDLE_SELECTED_COUNT = \"selectedcount\";\n\n public static final String FORMNAME = \"formname\";\n private static final String FORMDETAIL_KEY = \"formdetailkey\";\n public static final String FORMID_DISPLAY = \"formiddisplay\";\n\n public static final String FORM_ID_KEY = \"formid\";\n private static final String FORM_VERSION_KEY = \"formversion\";\n\n private AlertDialog alertDialog;\n private ProgressDialog progressDialog;\n private ProgressDialog cancelDialog;\n private Button downloadButton;\n\n private DownloadFormListTask downloadFormListTask;\n private DownloadFormsTask downloadFormsTask;\n private Button toggleButton;\n\n private final ArrayList> filteredFormList = new ArrayList<>();\n\n private static final boolean EXIT = true;\n private static final boolean DO_NOT_EXIT = false;\n\n private boolean displayOnlyUpdatedForms;\n\n private FormDownloadListViewModel viewModel;\n\n @Inject\n WebCredentialsUtils webCredentialsUtils;\n\n @Inject\n DownloadFormListUtils downloadFormListUtils;\n\n @SuppressWarnings(\"unchecked\")\n @Override\n public void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n DaggerUtils.getComponent(this).inject(this);\n\n setContentView(R.layout.form_download_list);\n setTitle(getString(R.string.get_forms));\n\n viewModel = ViewModelProviders.of(this).get(FormDownloadListViewModel.class);\n\n \/\/ This activity is accessed directly externally\n new PermissionUtils().requestStoragePermissions(this, new PermissionListener() {\n @Override\n public void granted() {\n \/\/ must be at the beginning of any activity that can be called from an external intent\n try {\n Collect.createODKDirs();\n } catch (RuntimeException e) {\n DialogUtils.showDialog(DialogUtils.createErrorDialog(FormDownloadList.this, e.getMessage(), EXIT), FormDownloadList.this);\n return;\n }\n\n init(savedInstanceState);\n }\n\n @Override\n public void denied() {\n \/\/ The activity has to finish because ODK Collect cannot function without these permissions.\n finish();\n }\n });\n }\n\n private void init(Bundle savedInstanceState) {\n Bundle bundle = getIntent().getExtras();\n if (bundle != null) {\n if (bundle.containsKey(DISPLAY_ONLY_UPDATED_FORMS)) {\n displayOnlyUpdatedForms = (boolean) bundle.get(DISPLAY_ONLY_UPDATED_FORMS);\n }\n\n if (bundle.containsKey(ApplicationConstants.BundleKeys.FORM_IDS)) {\n viewModel.setDownloadOnlyMode(true);\n viewModel.setFormIdsToDownload(bundle.getStringArray(ApplicationConstants.BundleKeys.FORM_IDS));\n\n if (viewModel.getFormIdsToDownload() == null) {\n setReturnResult(false, \"Form Ids is null\", null);\n finish();\n }\n\n if (bundle.containsKey(ApplicationConstants.BundleKeys.URL)) {\n viewModel.setUrl(bundle.getString(ApplicationConstants.BundleKeys.URL));\n\n if (bundle.containsKey(ApplicationConstants.BundleKeys.USERNAME)\n && bundle.containsKey(ApplicationConstants.BundleKeys.PASSWORD)) {\n viewModel.setUsername(bundle.getString(ApplicationConstants.BundleKeys.USERNAME));\n viewModel.setPassword(bundle.getString(ApplicationConstants.BundleKeys.PASSWORD));\n }\n }\n }\n }\n\n downloadButton = findViewById(R.id.add_button);\n downloadButton.setEnabled(listView.getCheckedItemCount() > 0);\n downloadButton.setOnClickListener(new OnClickListener() {\n @Override\n public void onClick(View v) {\n downloadSelectedFiles();\n }\n });\n\n toggleButton = findViewById(R.id.toggle_button);\n toggleButton.setEnabled(false);\n toggleButton.setOnClickListener(new OnClickListener() {\n @Override\n public void onClick(View v) {\n downloadButton.setEnabled(toggleChecked(listView));\n toggleButtonLabel(toggleButton, listView);\n viewModel.clearSelectedForms();\n if (listView.getCheckedItemCount() == listView.getCount()) {\n for (HashMap map : viewModel.getFormList()) {\n viewModel.addSelectedForm(map.get(FORMDETAIL_KEY));\n }\n }\n }\n });\n\n Button refreshButton = findViewById(R.id.refresh_button);\n refreshButton.setOnClickListener(new OnClickListener() {\n @Override\n public void onClick(View v) {\n viewModel.setLoadingCanceled(false);\n viewModel.clearFormList();\n updateAdapter();\n clearChoices();\n downloadFormList();\n }\n });\n\n if (savedInstanceState != null) {\n \/\/ how many items we've selected\n \/\/ Android should keep track of this, but broken on rotate...\n if (savedInstanceState.containsKey(BUNDLE_SELECTED_COUNT)) {\n downloadButton.setEnabled(savedInstanceState.getInt(BUNDLE_SELECTED_COUNT) > 0);\n }\n }\n\n filteredFormList.addAll(viewModel.getFormList());\n\n if (getLastCustomNonConfigurationInstance() instanceof DownloadFormListTask) {\n downloadFormListTask = (DownloadFormListTask) getLastCustomNonConfigurationInstance();\n if (downloadFormListTask.getStatus() == AsyncTask.Status.FINISHED) {\n try {\n if (progressDialog != null && progressDialog.isShowing()) {\n progressDialog.dismiss();\n }\n viewModel.setProgressDialogShowing(false);\n } catch (IllegalArgumentException e) {\n Timber.i(\"Attempting to close a dialog that was not previously opened\");\n }\n downloadFormsTask = null;\n }\n } else if (getLastCustomNonConfigurationInstance() instanceof DownloadFormsTask) {\n downloadFormsTask = (DownloadFormsTask) getLastCustomNonConfigurationInstance();\n if (downloadFormsTask.getStatus() == AsyncTask.Status.FINISHED) {\n try {\n if (progressDialog != null && progressDialog.isShowing()) {\n progressDialog.dismiss();\n }\n viewModel.setProgressDialogShowing(false);\n } catch (IllegalArgumentException e) {\n Timber.i(\"Attempting to close a dialog that was not previously opened\");\n }\n downloadFormsTask = null;\n }\n } else if (viewModel.getFormNamesAndURLs().isEmpty()\n && getLastCustomNonConfigurationInstance() == null\n && !viewModel.wasLoadingCanceled()) {\n \/\/ first time, so get the formlist\n downloadFormList();\n }\n\n listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);\n listView.setItemsCanFocus(false);\n\n sortingOptions = new int[] {\n R.string.sort_by_name_asc, R.string.sort_by_name_desc\n };\n }\n\n private void clearChoices() {\n listView.clearChoices();\n downloadButton.setEnabled(false);\n }\n\n @Override\n public void onItemClick(AdapterView parent, View view, int position, long id) {\n toggleButtonLabel(toggleButton, listView);\n downloadButton.setEnabled(listView.getCheckedItemCount() > 0);\n\n if (listView.isItemChecked(position)) {\n viewModel.addSelectedForm(((HashMap) listView.getAdapter().getItem(position)).get(FORMDETAIL_KEY));\n } else {\n viewModel.removeSelectedForm(((HashMap) listView.getAdapter().getItem(position)).get(FORMDETAIL_KEY));\n }\n }\n\n \/**\n * Starts the download task and shows the progress dialog.\n *\/\n private void downloadFormList() {\n ConnectivityManager connectivityManager =\n (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);\n NetworkInfo ni = connectivityManager.getActiveNetworkInfo();\n\n if (ni == null || !ni.isConnected()) {\n ToastUtils.showShortToast(R.string.no_connection);\n\n if (viewModel.isDownloadOnlyMode()) {\n setReturnResult(false, getString(R.string.no_connection), viewModel.getFormResults());\n finish();\n }\n } else {\n viewModel.clearFormNamesAndURLs();\n if (progressDialog != null) {\n \/\/ This is needed because onPrepareDialog() is broken in 1.6.\n progressDialog.setMessage(viewModel.getProgressDialogMsg());\n }\n createProgressDialog();\n\n if (downloadFormListTask != null\n && downloadFormListTask.getStatus() != AsyncTask.Status.FINISHED) {\n return; \/\/ we are already doing the download!!!\n } else if (downloadFormListTask != null) {\n downloadFormListTask.setDownloaderListener(null);\n downloadFormListTask.cancel(true);\n downloadFormListTask = null;\n }\n\n downloadFormListTask = new DownloadFormListTask(downloadFormListUtils);\n downloadFormListTask.setDownloaderListener(this);\n\n if (viewModel.isDownloadOnlyMode()) {\n \/\/ Pass over the nulls -> They have no effect if even one of them is a null\n downloadFormListTask.setAlternateCredentials(viewModel.getUrl(), viewModel.getUsername(), viewModel.getPassword());\n }\n\n downloadFormListTask.execute();\n }\n }\n\n @Override\n protected void onRestoreInstanceState(Bundle state) {\n super.onRestoreInstanceState(state);\n if (PermissionUtils.areStoragePermissionsGranted(this)) {\n updateAdapter();\n }\n }\n\n @Override\n protected void onSaveInstanceState(Bundle outState) {\n super.onSaveInstanceState(outState);\n outState.putInt(BUNDLE_SELECTED_COUNT, listView.getCheckedItemCount());\n }\n\n @Override\n protected String getSortingOrderKey() {\n return FORM_DOWNLOAD_LIST_SORTING_ORDER;\n }\n\n @Override\n protected void updateAdapter() {\n CharSequence charSequence = getFilterText();\n filteredFormList.clear();\n if (charSequence.length() > 0) {\n for (HashMap form : viewModel.getFormList()) {\n if (form.get(FORMNAME).toLowerCase(Locale.US).contains(charSequence.toString().toLowerCase(Locale.US))) {\n filteredFormList.add(form);\n }\n }\n } else {\n filteredFormList.addAll(viewModel.getFormList());\n }\n sortList();\n if (listView.getAdapter() == null) {\n listView.setAdapter(new FormDownloadListAdapter(this, filteredFormList, viewModel.getFormNamesAndURLs()));\n } else {\n FormDownloadListAdapter formDownloadListAdapter = (FormDownloadListAdapter) listView.getAdapter();\n formDownloadListAdapter.setFromIdsToDetails(viewModel.getFormNamesAndURLs());\n formDownloadListAdapter.notifyDataSetChanged();\n }\n toggleButton.setEnabled(!filteredFormList.isEmpty());\n checkPreviouslyCheckedItems();\n toggleButtonLabel(toggleButton, listView);\n }\n\n @Override\n protected void checkPreviouslyCheckedItems() {\n listView.clearChoices();\n for (int i = 0; i < listView.getCount(); i++) {\n HashMap item =\n (HashMap) listView.getAdapter().getItem(i);\n if (viewModel.getSelectedForms().contains(item.get(FORMDETAIL_KEY))) {\n listView.setItemChecked(i, true);\n }\n }\n }\n\n private void sortList() {\n Collections.sort(filteredFormList, new Comparator>() {\n @Override\n public int compare(HashMap lhs, HashMap rhs) {\n if (getSortingOrder().equals(SORT_BY_NAME_ASC)) {\n return lhs.get(FORMNAME).compareToIgnoreCase(rhs.get(FORMNAME));\n } else {\n return rhs.get(FORMNAME).compareToIgnoreCase(lhs.get(FORMNAME));\n }\n }\n });\n }\n\n \/**\n * starts the task to download the selected forms, also shows progress dialog\n *\/\n private void downloadSelectedFiles() {\n ArrayList filesToDownload = new ArrayList<>();\n\n SparseBooleanArray sba = listView.getCheckedItemPositions();\n for (int i = 0; i < listView.getCount(); i++) {\n if (sba.get(i, false)) {\n HashMap item =\n (HashMap) listView.getAdapter().getItem(i);\n filesToDownload.add(viewModel.getFormNamesAndURLs().get(item.get(FORMDETAIL_KEY)));\n }\n }\n\n startFormsDownload(filesToDownload);\n }\n\n @SuppressWarnings(\"unchecked\")\n private void startFormsDownload(@NonNull ArrayList filesToDownload) {\n int totalCount = filesToDownload.size();\n if (totalCount > 0) {\n \/\/ show dialog box\n createProgressDialog();\n\n downloadFormsTask = new DownloadFormsTask();\n downloadFormsTask.setDownloaderListener(this);\n\n if (viewModel.getUrl() != null) {\n if (viewModel.getUsername() != null && viewModel.getPassword() != null) {\n webCredentialsUtils.saveCredentials(viewModel.getUrl(), viewModel.getUsername(), viewModel.getPassword());\n } else {\n webCredentialsUtils.clearCredentials(viewModel.getUrl());\n }\n }\n\n downloadFormsTask.execute(filesToDownload);\n } else {\n ToastUtils.showShortToast(R.string.noselect_error);\n }\n }\n\n @Override\n public Object onRetainCustomNonConfigurationInstance() {\n if (downloadFormsTask != null) {\n return downloadFormsTask;\n } else {\n return downloadFormListTask;\n }\n }\n\n @Override\n protected void onDestroy() {\n if (downloadFormListTask != null) {\n downloadFormListTask.setDownloaderListener(null);\n }\n if (downloadFormsTask != null) {\n downloadFormsTask.setDownloaderListener(null);\n }\n super.onDestroy();\n }\n\n @Override\n protected void onResume() {\n if (downloadFormListTask != null) {\n downloadFormListTask.setDownloaderListener(this);\n }\n if (downloadFormsTask != null) {\n downloadFormsTask.setDownloaderListener(this);\n }\n if (viewModel.isAlertShowing()) {\n createAlertDialog(viewModel.getAlertTitle(), viewModel.getAlertDialogMsg(), viewModel.shouldExit());\n }\n if (viewModel.isProgressDialogShowing() && (progressDialog == null || !progressDialog.isShowing())) {\n createProgressDialog();\n }\n if (viewModel.isCancelDialogShowing()) {\n createCancelDialog();\n }\n super.onResume();\n }\n\n @Override\n protected void onPause() {\n if (alertDialog != null && alertDialog.isShowing()) {\n alertDialog.dismiss();\n }\n super.onPause();\n }\n\n public boolean isLocalFormSuperseded(String formId) {\n if (formId == null) {\n Timber.e(\"isLocalFormSuperseded: server is not OpenRosa-compliant. is null!\");\n return true;\n }\n\n try (Cursor formCursor = new FormsDao().getFormsCursorForFormId(formId)) {\n return formCursor != null && formCursor.getCount() == 0 \/\/ form does not already exist locally\n || viewModel.getFormNamesAndURLs().get(formId).isNewerFormVersionAvailable() \/\/ or a newer version of this form is available\n || viewModel.getFormNamesAndURLs().get(formId).areNewerMediaFilesAvailable(); \/\/ or newer versions of media files are available\n }\n }\n\n \/**\n * Causes any local forms that have been updated on the server to become checked in the list.\n * This is a prompt and a\n * convenience to users to download the latest version of those forms from the server.\n *\/\n private void selectSupersededForms() {\n\n ListView ls = listView;\n for (int idx = 0; idx < filteredFormList.size(); idx++) {\n HashMap item = filteredFormList.get(idx);\n if (isLocalFormSuperseded(item.get(FORM_ID_KEY))) {\n ls.setItemChecked(idx, true);\n viewModel.addSelectedForm(item.get(FORMDETAIL_KEY));\n }\n }\n }\n\n \/*\n * Called when the form list has finished downloading. results will either contain a set of\n * tuples, or one tuple of DL.ERROR.MSG and the associated message.\n *\/\n public void formListDownloadingComplete(HashMap result) {\n progressDialog.dismiss();\n viewModel.setProgressDialogShowing(false);\n downloadFormListTask.setDownloaderListener(null);\n downloadFormListTask = null;\n\n if (result == null) {\n Timber.e(\"Formlist Downloading returned null. That shouldn't happen\");\n \/\/ Just displayes \"error occured\" to the user, but this should never happen.\n if (viewModel.isDownloadOnlyMode()) {\n setReturnResult(false, \"Formlist Downloading returned null. That shouldn't happen\", null);\n }\n\n createAlertDialog(getString(R.string.load_remote_form_error),\n getString(R.string.error_occured), EXIT);\n return;\n }\n\n if (result.containsKey(DL_AUTH_REQUIRED)) {\n \/\/ need authorization\n createAuthDialog();\n } else if (result.containsKey(DL_ERROR_MSG)) {\n \/\/ Download failed\n String dialogMessage =\n getString(R.string.list_failed_with_error,\n result.get(DL_ERROR_MSG).getErrorStr());\n String dialogTitle = getString(R.string.load_remote_form_error);\n\n if (viewModel.isDownloadOnlyMode()) {\n setReturnResult(false, getString(R.string.load_remote_form_error), viewModel.getFormResults());\n }\n\n createAlertDialog(dialogTitle, dialogMessage, DO_NOT_EXIT);\n } else {\n \/\/ Everything worked. Clear the list and add the results.\n viewModel.setFormNamesAndURLs(result);\n\n viewModel.clearFormList();\n\n ArrayList ids = new ArrayList<>(viewModel.getFormNamesAndURLs().keySet());\n for (int i = 0; i < result.size(); i++) {\n String formDetailsKey = ids.get(i);\n FormDetails details = viewModel.getFormNamesAndURLs().get(formDetailsKey);\n\n if (!displayOnlyUpdatedForms || (details.isNewerFormVersionAvailable() || details.areNewerMediaFilesAvailable())) {\n HashMap item = new HashMap<>();\n item.put(FORMNAME, details.getFormName());\n item.put(FORMID_DISPLAY,\n ((details.getFormVersion() == null) ? \"\" : (getString(R.string.version) + \" \"\n + details.getFormVersion() + \" \")) + \"ID: \" + details.getFormId());\n item.put(FORMDETAIL_KEY, formDetailsKey);\n item.put(FORM_ID_KEY, details.getFormId());\n item.put(FORM_VERSION_KEY, details.getFormVersion());\n\n \/\/ Insert the new form in alphabetical order.\n if (viewModel.getFormList().isEmpty()) {\n viewModel.addForm(item);\n } else {\n int j;\n for (j = 0; j < viewModel.getFormList().size(); j++) {\n HashMap compareMe = viewModel.getFormList().get(j);\n String name = compareMe.get(FORMNAME);\n if (name.compareTo(viewModel.getFormNamesAndURLs().get(ids.get(i)).getFormName()) > 0) {\n break;\n }\n }\n viewModel.addForm(j, item);\n }\n }\n }\n filteredFormList.addAll(viewModel.getFormList());\n updateAdapter();\n selectSupersededForms();\n downloadButton.setEnabled(listView.getCheckedItemCount() > 0);\n toggleButton.setEnabled(listView.getCount() > 0);\n toggleButtonLabel(toggleButton, listView);\n\n if (viewModel.isDownloadOnlyMode()) {\n \/\/1. First check if all form IDS could be found on the server - Register forms that could not be found\n\n for (String formId: viewModel.getFormIdsToDownload()) {\n viewModel.putFormResult(formId, false);\n }\n\n ArrayList filesToDownload = new ArrayList<>();\n\n for (FormDetails formDetails: viewModel.getFormNamesAndURLs().values()) {\n String formId = formDetails.getFormId();\n\n if (viewModel.getFormResults().containsKey(formId)) {\n filesToDownload.add(formDetails);\n }\n }\n\n \/\/2. Select forms and start downloading\n if (!filesToDownload.isEmpty()) {\n startFormsDownload(filesToDownload);\n } else {\n \/\/ None of the forms was found\n setReturnResult(false, \"Forms not found on server\", viewModel.getFormResults());\n finish();\n }\n\n }\n }\n }\n\n \/**\n * Creates an alert dialog with the given tite and message. If shouldExit is set to true, the\n * activity will exit when the user clicks \"ok\".\n *\/\n private void createAlertDialog(String title, String message, final boolean shouldExit) {\n alertDialog = new AlertDialog.Builder(this).create();\n alertDialog.setTitle(title);\n alertDialog.setMessage(message);\n DialogInterface.OnClickListener quitListener = new DialogInterface.OnClickListener() {\n @Override\n public void onClick(DialogInterface dialog, int i) {\n switch (i) {\n case DialogInterface.BUTTON_POSITIVE: \/\/ ok\n \/\/ just close the dialog\n viewModel.setAlertShowing(false);\n \/\/ successful download, so quit\n \/\/ Also quit if in download_mode only(called by another app\/activity just to download)\n if (shouldExit || viewModel.isDownloadOnlyMode()) {\n finish();\n }\n break;\n }\n }\n };\n alertDialog.setCancelable(false);\n alertDialog.setButton(getString(R.string.ok), quitListener);\n alertDialog.setIcon(android.R.drawable.ic_dialog_info);\n viewModel.setAlertDialogMsg(message);\n viewModel.setAlertTitle(title);\n viewModel.setAlertShowing(true);\n viewModel.setShouldExit(shouldExit);\n DialogUtils.showDialog(alertDialog, this);\n }\n\n private void createProgressDialog() {\n progressDialog = new ProgressDialog(this);\n DialogInterface.OnClickListener loadingButtonListener =\n new DialogInterface.OnClickListener() {\n @Override\n public void onClick(DialogInterface dialog, int which) {\n \/\/ we use the same progress dialog for both\n \/\/ so whatever isn't null is running\n dialog.dismiss();\n if (downloadFormListTask != null) {\n downloadFormListTask.setDownloaderListener(null);\n downloadFormListTask.cancel(true);\n downloadFormListTask = null;\n\n \/\/ Only explicitly exit if DownloadFormListTask is running since\n \/\/ DownloadFormTask has a callback when cancelled and has code to handle\n \/\/ cancellation when in download mode only\n if (viewModel.isDownloadOnlyMode()) {\n setReturnResult(false, \"User cancelled the operation\", viewModel.getFormResults());\n finish();\n }\n }\n\n if (downloadFormsTask != null) {\n createCancelDialog();\n downloadFormsTask.cancel(true);\n }\n viewModel.setLoadingCanceled(true);\n viewModel.setProgressDialogShowing(false);\n }\n };\n progressDialog.setTitle(getString(R.string.downloading_data));\n progressDialog.setMessage(viewModel.getProgressDialogMsg());\n progressDialog.setIcon(android.R.drawable.ic_dialog_info);\n progressDialog.setIndeterminate(true);\n progressDialog.setCancelable(false);\n progressDialog.setButton(getString(R.string.cancel), loadingButtonListener);\n viewModel.setProgressDialogShowing(true);\n DialogUtils.showDialog(progressDialog, this);\n }\n\n private void createAuthDialog() {\n viewModel.setAlertShowing(false);\n\n AuthDialogUtility authDialogUtility = new AuthDialogUtility();\n if (viewModel.getUrl() != null && viewModel.getUsername() != null && viewModel.getPassword() != null) {\n authDialogUtility.setCustomUsername(viewModel.getUsername());\n authDialogUtility.setCustomPassword(viewModel.getPassword());\n }\n DialogUtils.showDialog(authDialogUtility.createDialog(this, this, viewModel.getUrl()), this);\n }\n\n private void createCancelDialog() {\n cancelDialog = new ProgressDialog(this);\n cancelDialog.setTitle(getString(R.string.canceling));\n cancelDialog.setMessage(getString(R.string.please_wait));\n cancelDialog.setIcon(android.R.drawable.ic_dialog_info);\n cancelDialog.setIndeterminate(true);\n cancelDialog.setCancelable(false);\n viewModel.setCancelDialogShowing(true);\n DialogUtils.showDialog(cancelDialog, this);\n }\n\n @Override\n public void progressUpdate(String currentFile, int progress, int total) {\n viewModel.setProgressDialogMsg(getString(R.string.fetching_file, currentFile, String.valueOf(progress), String.valueOf(total)));\n progressDialog.setMessage(viewModel.getProgressDialogMsg());\n }\n\n @Override\n public void formsDownloadingComplete(HashMap result) {\n if (downloadFormsTask != null) {\n downloadFormsTask.setDownloaderListener(null);\n }\n\n cleanUpWebCredentials();\n\n if (progressDialog.isShowing()) {\n \/\/ should always be true here\n progressDialog.dismiss();\n viewModel.setProgressDialogShowing(false);\n }\n\n createAlertDialog(getString(R.string.download_forms_result), getDownloadResultMessage(result), EXIT);\n\n \/\/ Set result to true for forms which were downloaded\n if (viewModel.isDownloadOnlyMode()) {\n for (FormDetails formDetails: result.keySet()) {\n String successKey = result.get(formDetails);\n if (Collect.getInstance().getString(R.string.success).equals(successKey)) {\n if (viewModel.getFormResults().containsKey(formDetails.getFormId())) {\n viewModel.putFormResult(formDetails.getFormId(), true);\n }\n }\n }\n\n setReturnResult(true, null, viewModel.getFormResults());\n }\n }\n\n public static String getDownloadResultMessage(HashMap result) {\n Set keys = result.keySet();\n StringBuilder b = new StringBuilder();\n for (FormDetails k : keys) {\n b.append(k.getFormName() + \" (\"\n + ((k.getFormVersion() != null)\n ? (Collect.getInstance().getString(R.string.version) + \": \" + k.getFormVersion() + \" \")\n : \"\") + \"ID: \" + k.getFormId() + \") - \" + result.get(k));\n b.append(\"\\n\\n\");\n }\n\n return b.toString().trim();\n }\n\n @Override\n public void formsDownloadingCancelled() {\n if (downloadFormsTask != null) {\n downloadFormsTask.setDownloaderListener(null);\n downloadFormsTask = null;\n }\n\n cleanUpWebCredentials();\n\n if (cancelDialog != null && cancelDialog.isShowing()) {\n cancelDialog.dismiss();\n viewModel.setCancelDialogShowing(false);\n }\n\n if (viewModel.isDownloadOnlyMode()) {\n setReturnResult(false, \"Download cancelled\", null);\n finish();\n }\n }\n\n @Override\n public void updatedCredentials() {\n \/\/ If the user updated the custom credentials using the dialog, let us update our\n \/\/ variables holding the custom credentials\n if (viewModel.getUrl() != null) {\n HttpCredentialsInterface httpCredentials = webCredentialsUtils.getCredentials(URI.create(viewModel.getUrl()));\n\n if (httpCredentials != null) {\n viewModel.setUsername(httpCredentials.getUsername());\n viewModel.setPassword(httpCredentials.getPassword());\n }\n }\n\n downloadFormList();\n }\n\n @Override\n public void cancelledUpdatingCredentials() {\n finish();\n }\n\n private void setReturnResult(boolean successful, @Nullable String message, @Nullable HashMap resultFormIds) {\n Intent intent = new Intent();\n intent.putExtra(ApplicationConstants.BundleKeys.SUCCESS_KEY, successful);\n if (message != null) {\n intent.putExtra(ApplicationConstants.BundleKeys.MESSAGE, message);\n }\n if (resultFormIds != null) {\n intent.putExtra(ApplicationConstants.BundleKeys.FORM_IDS, resultFormIds);\n }\n\n setResult(RESULT_OK, intent);\n }\n\n private void cleanUpWebCredentials() {\n if (viewModel.getUrl() != null) {\n String host = Uri.parse(viewModel.getUrl())\n .getHost();\n\n if (host != null) {\n webCredentialsUtils.clearCredentials(viewModel.getUrl());\n }\n }\n }\n}\n","avg_line_length":41.7315357562,"max_line_length":147,"alphanum_fraction":0.6216254179} {"size":3178,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/\/ Copyright 2019 Google LLC\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\npackage com.google.api.ads.admanager.jaxws.v201908;\n\nimport javax.xml.bind.annotation.XmlEnum;\nimport javax.xml.bind.annotation.XmlType;\n\n\n\/**\n *

Java class for ProposalLineItemActionError.Reason.\n * \n *

The following schema fragment specifies the expected content contained within this class.\n *

\n *

\n * <simpleType name=\"ProposalLineItemActionError.Reason\">\n *   <restriction base=\"{http:\/\/www.w3.org\/2001\/XMLSchema}string\">\n *     <enumeration value=\"NOT_APPLICABLE\"\/>\n *     <enumeration value=\"PROPOSAL_NOT_EDITABLE\"\/>\n *     <enumeration value=\"CANNOT_SELECTIVELY_ARCHIVE_PROPOSAL_LINE_ITEMS_FROM_MANDATORY_PRODUCTS\"\/>\n *     <enumeration value=\"CANNOT_SELECTIVELY_UNARCHIVE_PROPOSAL_LINE_ITEMS_FROM_MANDATORY_PRODUCTS\"\/>\n *     <enumeration value=\"CANNOT_UNARCHIVE_SOLD_PROGRAMMATIC_PROPOSAL_LINE_ITEM\"\/>\n *     <enumeration value=\"UNKNOWN\"\/>\n *   <\/restriction>\n * <\/simpleType>\n * <\/pre>\n * \n *\/\n@XmlType(name = \"ProposalLineItemActionError.Reason\")\n@XmlEnum\npublic enum ProposalLineItemActionErrorReason {\n\n\n    \/**\n     * \n     *                 The operation is not applicable to the current state.\n     *               \n     * \n     *\/\n    NOT_APPLICABLE,\n\n    \/**\n     * \n     *                 The operation is not applicable because the containing proposal is not editable.\n     *               \n     * \n     *\/\n    PROPOSAL_NOT_EDITABLE,\n\n    \/**\n     * \n     *                 The archive operation is not applicable because it would cause some mandatory products\n     *                 to have no unarchived proposal line items in the package.\n     *               \n     * \n     *\/\n    CANNOT_SELECTIVELY_ARCHIVE_PROPOSAL_LINE_ITEMS_FROM_MANDATORY_PRODUCTS,\n\n    \/**\n     * \n     *                 The unarchive operation is not applicable because it would cause some mandatory products\n     *                 to have no unarchived proposal line items in the package.\n     *               \n     * \n     *\/\n    CANNOT_SELECTIVELY_UNARCHIVE_PROPOSAL_LINE_ITEMS_FROM_MANDATORY_PRODUCTS,\n\n    \/**\n     * \n     *                 Sold programmatic {@link ProposalLineItem} cannot be unarchived.\n     *               \n     * \n     *\/\n    CANNOT_UNARCHIVE_SOLD_PROGRAMMATIC_PROPOSAL_LINE_ITEM,\n\n    \/**\n     * \n     *                 The value returned if the actual value is not exposed by the requested API version.\n     *               \n     * \n     *\/\n    UNKNOWN;\n\n    public String value() {\n        return name();\n    }\n\n    public static ProposalLineItemActionErrorReason fromValue(String v) {\n        return valueOf(v);\n    }\n\n}\n","avg_line_length":30.2666666667,"max_line_length":111,"alphanum_fraction":0.6482064191}
{"size":187,"ext":"java","lang":"Java","max_stars_count":null,"content":"package org.xzhi.af.ingredient.impl;\n\nimport org.xzhi.af.ingredient.Cheese;\n\npublic class ReggianoCheese implements Cheese {\n\n\tpublic String toString() {\n\t\treturn \"Reggiano Cheese\";\n\t}\n}\n","avg_line_length":17.0,"max_line_length":47,"alphanum_fraction":0.7593582888}
{"size":2771,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * Copyright \u00a9 2020 ForgeRock AS (obst@forgerock.com)\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage com.forgerock.securebanking.openbanking.uk.rcs.testsupport.idm.dto.consent;\n\nimport com.forgerock.securebanking.openbanking.uk.rcs.client.idm.dto.consent.FRInternationalScheduledPaymentConsentData;\nimport org.joda.time.DateTime;\nimport uk.org.openbanking.datamodel.payment.OBExternalPermissions2Code;\n\nimport static com.forgerock.securebanking.openbanking.uk.rcs.client.idm.dto.consent.FRConsentStatusCode.AWAITINGAUTHORISATION;\nimport static java.util.UUID.randomUUID;\nimport static uk.org.openbanking.testsupport.payment.OBExchangeRateTestDataFactory.aValidOBWriteInternationalConsentResponse6DataExchangeRateInformation;\nimport static uk.org.openbanking.testsupport.payment.OBWriteInternationalScheduledConsentTestDataFactory.aValidOBWriteInternationalScheduled3DataInitiation;\n\npublic class FRInternationalScheduledPaymentConsentDataTestDataFactory {\n\n    public static FRInternationalScheduledPaymentConsentData aValidInternationalScheduledPaymentConsentData() {\n        return aValidInternationalScheduledPaymentConsentDataBuilder(randomUUID().toString()).build();\n    }\n\n    public static FRInternationalScheduledPaymentConsentData aValidInternationalScheduledPaymentConsentData(String consentId) {\n        return aValidInternationalScheduledPaymentConsentDataBuilder(consentId).build();\n    }\n\n    public static FRInternationalScheduledPaymentConsentData.FRInternationalScheduledPaymentConsentDataBuilder aValidInternationalScheduledPaymentConsentDataBuilder(String consentId) {\n        return FRInternationalScheduledPaymentConsentData.builder()\n                .consentId(consentId)\n                .creationDateTime(DateTime.now())\n                .status(AWAITINGAUTHORISATION)\n                .permission(OBExternalPermissions2Code.CREATE)\n                .statusUpdateDateTime(DateTime.now())\n                .expectedExecutionDateTime(DateTime.now())\n                .expectedSettlementDateTime(DateTime.now().plusDays(1))\n                .exchangeRateInformation(aValidOBWriteInternationalConsentResponse6DataExchangeRateInformation())\n                .initiation(aValidOBWriteInternationalScheduled3DataInitiation());\n    }\n}\n","avg_line_length":55.42,"max_line_length":184,"alphanum_fraction":0.7957416095}
{"size":2689,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.android.favour.NetworkIO;\n\nimport android.content.Context;\nimport android.view.View;\nimport android.widget.ImageView;\nimport android.widget.ProgressBar;\n\nimport com.android.favour.R;\nimport com.bumptech.glide.Glide;\nimport com.bumptech.glide.Priority;\nimport com.bumptech.glide.load.resource.drawable.GlideDrawable;\nimport com.bumptech.glide.request.RequestListener;\nimport com.bumptech.glide.request.target.Target;\n\npublic class ImageServiceClient {\n\n    private Context context;\n\n    public void getImage(String uri, ImageView imgView){\n        context = imgView.getContext();\n        Glide.with(context)\n                .load(uri)\n                .dontAnimate()\n                .priority(Priority.NORMAL)\n                .error(R.drawable.flag)\n                .into(imgView);\n    }\n\n    public void getImage(final NetworkImageRequest imgRequest){\n        context = imgRequest.getImgView().getContext();\n        final ImageView imgView = imgRequest.getImgView();\n        final ProgressBar progressBar;\n        if((progressBar = imgRequest.getLoadingSpinner()) != null) {\n            progressBar.setVisibility(View.VISIBLE);\n        }\n        else if(imgRequest.getLoadingImage() > 0) {\n            imgView.setImageResource(imgRequest.getLoadingImage());\n        }\n        Priority glidePriority = (Priority) imgRequest.getImgPriority();\n        Glide.with(context)\n            .load(imgRequest.getUri())\n            .dontAnimate()\n            .priority(Priority.NORMAL)\n            .override(Target.SIZE_ORIGINAL, Target.SIZE_ORIGINAL)\n            .listener(new RequestListener() {\n\n                @Override\n                public boolean onException(Exception e, String model, Target target, boolean isFirstResource) {\n                    if (progressBar != null) {\n                        progressBar.setVisibility(View.GONE);\n                    }\n                    if (imgRequest.getErrorImage() > 0) {\n                        imgView.setImageResource(imgRequest.getErrorImage());\n                    }\n                    return false;\n                }\n\n                @Override\n                public boolean onResourceReady(GlideDrawable resource, String\n                        model, Target target, boolean isFromMemoryCache,\n                                               boolean isFirstResource) {\n                    if (progressBar != null) {\n                        progressBar.setVisibility(View.GONE);\n                    }\n                    imgRequest.getImgView().setVisibility(View.VISIBLE);\n                    return false;\n                }\n            })\n            .into(imgView);\n    }\n}\n","avg_line_length":37.3472222222,"max_line_length":126,"alphanum_fraction":0.5857195984}
{"size":7594,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright (c) 2002-2018 Gargoyle Software Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage com.gargoylesoftware.htmlunit.html;\n\nimport java.util.HashMap;\nimport java.util.Locale;\nimport java.util.Map;\n\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\nimport org.xml.sax.Attributes;\n\nimport com.gargoylesoftware.htmlunit.SgmlPage;\n\n\/**\n * A specialized creator that knows how to create input objects.\n *\n * @author Christian Sell<\/a>\n * @author Marc Guillemot\n * @author Ahmed Ashour\n * @author David K. Taylor\n * @author Dmitri Zoubkov\n * @author Frank Danek\n *\/\npublic final class InputElementFactory implements ElementFactory {\n\n    \/** Logging support. *\/\n    private static final Log LOG = LogFactory.getLog(InputElementFactory.class);\n\n    \/** The singleton instance. *\/\n    public static final InputElementFactory instance = new InputElementFactory();\n\n    \/** Private singleton constructor. *\/\n    private InputElementFactory() {\n        \/\/ Empty.\n    }\n\n    \/**\n     * Creates an HtmlElement for the specified xmlElement, contained in the specified page.\n     *\n     * @param page the page that this element will belong to\n     * @param tagName the HTML tag name\n     * @param attributes the SAX attributes\n     *\n     * @return a new HtmlInput element\n     *\/\n    @Override\n    public HtmlElement createElement(\n            final SgmlPage page, final String tagName,\n            final Attributes attributes) {\n        return createElementNS(page, null, tagName, attributes);\n    }\n\n    \/**\n     * {@inheritDoc}\n     *\/\n    @Override\n    public HtmlElement createElementNS(final SgmlPage page, final String namespaceURI,\n            final String qualifiedName, final Attributes attributes) {\n        return createElementNS(page, namespaceURI, qualifiedName, attributes, false);\n    }\n\n    \/**\n     * {@inheritDoc}\n     *\/\n    @Override\n    public HtmlElement createElementNS(final SgmlPage page, final String namespaceURI,\n            final String qualifiedName, final Attributes attributes, final boolean asdf) {\n\n        Map attributeMap = DefaultElementFactory.toMap(page, attributes);\n        if (attributeMap == null) {\n            attributeMap = new HashMap<>();\n        }\n\n        String type = null;\n        for (final Map.Entry entry : attributeMap.entrySet()) {\n            if (\"type\".equalsIgnoreCase(entry.getKey())) {\n                type = entry.getValue().getValue();\n            }\n        }\n        if (type == null) {\n            type = \"\";\n        }\n\n        final HtmlInput result;\n        switch (type.toLowerCase(Locale.ROOT)) {\n            case \"\":\n                \/\/ This not an illegal value, as it defaults to \"text\"\n                \/\/ cf http:\/\/www.w3.org\/TR\/REC-html40\/interact\/forms.html#adef-type-INPUT\n                \/\/ and the common browsers seem to treat it as a \"text\" input so we will as well.\n            case \"text\":\n                result = new HtmlTextInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"submit\":\n                result = new HtmlSubmitInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"checkbox\":\n                result = new HtmlCheckBoxInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"radio\":\n                result = new HtmlRadioButtonInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"hidden\":\n                result = new HtmlHiddenInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"password\":\n                result = new HtmlPasswordInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"image\":\n                result = new HtmlImageInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"reset\":\n                result = new HtmlResetInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"button\":\n                result = new HtmlButtonInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"file\":\n                result = new HtmlFileInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"color\":\n                result = new HtmlColorInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"date\":\n                result = new HtmlDateInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"datetime-local\":\n                result = new HtmlDateTimeLocalInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"email\":\n                result = new HtmlEmailInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"month\":\n                result = new HtmlMonthInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"number\":\n                result = new HtmlNumberInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"range\":\n                result = new HtmlRangeInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"search\":\n                result = new HtmlSearchInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"tel\":\n                result = new HtmlTelInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"time\":\n                result = new HtmlTimeInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"url\":\n                result = new HtmlUrlInput(qualifiedName, page, attributeMap);\n                break;\n\n            case \"week\":\n                result = new HtmlWeekInput(qualifiedName, page, attributeMap);\n                break;\n\n            default:\n                LOG.info(\"Bad input type: \\\"\" + type + \"\\\", creating a text input\");\n                result = new HtmlTextInput(qualifiedName, page, attributeMap);\n                break;\n        }\n        return result;\n    }\n\n    \/**\n     * Returns whether the specified type is supported or not.\n     * @param type the type\n     * @return whether the specified type is supported or not\n     *\/\n    public static boolean isSupported(final String type) {\n        boolean supported = false;\n        switch (type) {\n            case \"text\":\n            case \"submit\":\n            case \"checkbox\":\n            case \"radio\":\n            case \"hidden\":\n            case \"password\":\n            case \"image\":\n            case \"reset\":\n            case \"button\":\n            case \"file\":\n            case \"color\":\n            case \"date\":\n            case \"datetime-local\":\n            case \"email\":\n            case \"month\":\n            case \"number\":\n            case \"range\":\n            case \"search\":\n            case \"tel\":\n            case \"time\":\n            case \"url\":\n            case \"week\":\n                supported = true;\n                break;\n\n            default:\n        }\n        return supported;\n    }\n}\n","avg_line_length":32.0421940928,"max_line_length":97,"alphanum_fraction":0.5705820385}
{"size":16719,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package de.ixeption.smilefx.training;\n\nimport de.ixeption.smilefx.features.FeatureExtractor;\nimport org.apache.commons.lang3.time.StopWatch;\nimport org.apache.commons.lang3.tuple.Pair;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport smile.classification.AdaBoost;\nimport smile.classification.LogisticRegression;\nimport smile.classification.RandomForest;\nimport smile.classification.*;\nimport smile.feature.Scaler;\nimport smile.feature.SignalNoiseRatio;\nimport smile.feature.SumSquaresRatio;\nimport smile.math.Math;\nimport smile.math.SparseArray;\nimport smile.math.kernel.*;\nimport smile.validation.ClassificationMeasure;\n\nimport javax.annotation.Nullable;\nimport java.util.*;\nimport java.util.function.Consumer;\nimport java.util.stream.IntStream;\n\nimport static de.ixeption.smilefx.training.GridSearch.MLModelType.*;\nimport static java.util.stream.Collectors.toMap;\n\n\npublic class GridSearch {\n\n    public static final int MAX_ITERATION = 100000;\n    private static final Logger _log = LoggerFactory.getLogger(GridSearch.class);\n    private final EnumSet _models;\n    private final int _foldk;\n    private final Class _type;\n    private List _gridSearchResults;\n\n    \/**\n     * @param mLModelTypeToSearches Set of Models to be used in grid search    *\n     * @param foldk                 number of folds for cross-validation, 10 is a good default\n     *\/\n    public GridSearch(EnumSet mLModelTypeToSearches, int foldk, Class type) {\n        _models = mLModelTypeToSearches;\n        _foldk = foldk;\n        _type = type;\n        _gridSearchResults = new ArrayList<>();\n    }\n\n    \/**\n     * Uses grid search and cross validation to find the best model\n     * uses multi-threading\n     *\n     * @param dataSet              the data set\n     * @param measures             the measures to execute\n     * @param measureForComparsion the measure to use for comparsion (simple class name)\n     * @param featureExtractor     the feature extractor\n     * @return the best model or null\n     *\/\n    public @Nullable\n    TrainedBinarySmileModel findBestModel(TrainingDataSet dataSet, ClassificationMeasure[] measures, String measureForComparsion,\n                                          FeatureExtractor featureExtractor) {\n        T[] x;\n        Scaler scaler = null;\n        if (dataSet.getFeatures() instanceof double[][]) {\n            scaler = new Scaler();\n            scaler.learn(dataSet.getFeatures());\n            x = (T[]) scaler.transform(dataSet.getFeatures());\n            printFeatureImportance(dataSet.getLabels(), (double[][]) x, 20, featureExtractor);\n        } else {\n            x = dataSet.getFeatures();\n        }\n\n        _log.info(\"Starting grid search for {}\" + _models);\n        StopWatch stopWatch = new StopWatch();\n        List results = gridSearch(_models, dataSet, featureExtractor.getNumberOfFeatures(), measures,\n                Runtime.getRuntime().availableProcessors() > 2 ? Runtime.getRuntime().availableProcessors() - 1 : 1, null);\n        _log.info(\"Finished grid search in {}\", stopWatch.toString());\n\n        final Optional best = results.stream().max(Comparator.comparingDouble(gs -> {\n            final double measure = gs.getcVresult().getMeasure(measureForComparsion);\n            return Double.isNaN(measure) ? 0.0 : measure;\n        }));\n        if (best.isPresent()) {\n            _log.info(\"-------------------------------------------\");\n            GridSearch.GridSearchResult result = best.get();\n\n            _log.info(\"Found best model {}: {} @ {}\", best.get().getMLModelType(), result.getcVresult().getMeasure(measureForComparsion),\n                    Arrays.toString(result.getParams().entrySet().toArray()));\n            SmileModelTrainer smileModelTrainer = new SmileModelTrainer<>(result.getClassifierTrainer());\n            final TrainedBinarySmileModel smileModel = new TrainedBinarySmileModel(smileModelTrainer.trainModel(x, dataSet.getLabels()), scaler, null, 0.5);\n            if (smileModel.getImportancesIfAvailable().isPresent()) {\n                double[] importances = (double[]) smileModel.getImportancesIfAvailable().get();\n                String[] labelNames = featureExtractor.getFeatureNames();\n                _log.info(\"Importances (>0): \");\n                for (int i = 0; i < importances.length; i++) {\n                    if (importances[i] > 0) {\n                        _log.info(labelNames[i] + \": \" + importances[i]);\n                    }\n                }\n            }\n\n            return smileModel;\n        }\n        return null;\n\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public List gridSearch(EnumSet models, TrainingDataSet dataSet, int numFeatures, ClassificationMeasure[] measures,\n                                             int parallelism, Consumer progressCallback) {\n\n        List gridSearchResults = getGridSearchResults();\n        Map, CVResult> map;\n        if (dataSet instanceof TrainTestSplitDataSet) {\n            _log.info(\"Dataset train size: {} number of train features: {}\", ((TrainTestSplitDataSet) dataSet).getLabelsTrain().length, numFeatures);\n            models.forEach(\n                    m -> gridSearchModel(m, Arrays.stream(((TrainTestSplitDataSet) dataSet).getLabelsTrain()).average().getAsDouble(), numFeatures, _type));\n            map = BalancedTrainTestSplitValidation.bttsv(((TrainTestSplitDataSet) dataSet).getFeaturesTrain(),\n                    ((TrainTestSplitDataSet) dataSet).getLabelsTrain(), ((TrainTestSplitDataSet) dataSet).getFeaturesTest(),\n                    ((TrainTestSplitDataSet) dataSet).getLabelsTest(), measures, parallelism, progressCallback, false,\n                    gridSearchResults.stream().map(gridSearchResult -> gridSearchResult._classifierTrainer)\/\/\n                            .toArray(ClassifierTrainer[]::new));\/\/\n        } else {\n            final double mean = Arrays.stream(dataSet.getLabels()).average().orElseThrow(IllegalArgumentException::new);\n            _log.info(\"Dataset size: {} number of features: {} mean label: {}\", dataSet.getLabels().length, numFeatures, mean);\n            models.forEach(m -> gridSearchModel(m, mean, numFeatures, _type));\n            map = BalancedCrossValidation.bcv(_foldk, dataSet.getFeatures(), dataSet.getLabels(), measures, parallelism, progressCallback, false,\n                    gridSearchResults.stream().map(gridSearchResult -> gridSearchResult._classifierTrainer)\/\/\n                            .toArray(ClassifierTrainer[]::new));\/\/\n        }\n        for (GridSearchResult res : gridSearchResults) {\n            res.setcVresult(map.get(res.getClassifierTrainer()));\n        }\n        return gridSearchResults;\n\n    }\n\n    public void printFeatureImportance(int[] labels, double[][] x, int topn, FeatureExtractor featureExtractor) {\n        final int classes = IntStream.of(labels).distinct().toArray().length;\n        final double[] rank;\n        if (classes == 2) {\n            _log.info(\"Binary classification: feature importance (SNR)\");\n            SignalNoiseRatio signalNoiseRatio = new SignalNoiseRatio();\n            rank = signalNoiseRatio.rank(x, labels);\n\n        } else {\n            _log.info(\"Multi-class classification: eature importance (SNR)\");\n            SumSquaresRatio sumSquaresRatio = new SumSquaresRatio();\n            rank = sumSquaresRatio.rank(x, labels);\n\n        }\n        IntStream.range(0, rank.length)\n                .boxed()\/\/\n                .collect(toMap(featureExtractor::getFeatureNameForIndex, i -> rank[i]))\/\/\n                .entrySet()\n                .stream()\/\/\n                .filter(e -> !e.getValue().isNaN())\n                .sorted((o1, o2) -> Double.compare(Math.abs(o2.getValue()), Math.abs(o1.getValue())))\n                .limit(topn)\n                .forEach(e -> _log.info(\"{}\\t\\t{}\", e.getKey(), e.getValue()));\n    }\n\n    protected void addToCrossValidation(MLModelType mlModelType, ClassifierTrainer trainer, HashMap params) {\n        getGridSearchResults().add(new GridSearchResult(params, trainer, mlModelType));\n    }\n\n    protected List getGridSearchResults() {\n        return _gridSearchResults;\n    }\n\n    protected void gridSearchAdaBoost(int[] treeSizes, int[] nodeSizes) {\n        for (int treeSize : treeSizes) {\n            for (int nodeSize : nodeSizes) {\n                AdaBoost.Trainer trainer = new AdaBoost.Trainer(treeSize);\n                trainer.setMaxNodes(nodeSize);\n                HashMap params = new HashMap<>();\n                params.put(\"nodeSize\", String.valueOf(nodeSize));\n                params.put(\"treeSize\", String.valueOf(treeSize));\n                addToCrossValidation(MLModelType.AdaBoost, (ClassifierTrainer) trainer, params);\n            }\n        }\n    }\n\n    protected void gridSearchGradientBoostedTree(int[] treeSizes, int[] nodeSizes, double[] shrinkages) {\n        for (int treeSize : treeSizes) {\n            for (int nodeSize : nodeSizes) {\n                for (double shrinkage : shrinkages) {\n                    GradientTreeBoost.Trainer trainer = new GradientTreeBoost.Trainer(treeSize);\n                    trainer.setMaxNodes(nodeSize);\n                    trainer.setShrinkage(shrinkage);\n                    HashMap params = new HashMap<>();\n                    params.put(\"treeSize\", String.valueOf(treeSize));\n                    params.put(\"nodeSize\", String.valueOf(nodeSize));\n                    params.put(\"shrinkage\", String.valueOf(shrinkage));\n                    addToCrossValidation(MLModelType.GradientBoostedTree, (ClassifierTrainer) trainer, params);\n                }\n            }\n        }\n    }\n\n    protected void gridSearchLogisticRegression(int[] lambdas) {\n        for (int lamda : lambdas) {\n            LogisticRegression.Trainer trainer = new LogisticRegression.Trainer();\n            trainer.setRegularizationFactor(lamda);\n            trainer.setMaxNumIteration(MAX_ITERATION);\n            HashMap params = new HashMap<>();\n            params.put(\"lamda\", String.valueOf(lamda));\n            addToCrossValidation(MLModelType.LogisticRegression, (ClassifierTrainer) trainer, params);\n\n        }\n    }\n\n    protected void gridSearchModel(MLModelType model, double mean, int numberOfFeatures, Class type) {\n        final int[] treeSizes = {10, 100, 200};\n        final int[] nodeSizes = {5, 10, 20};\n\n        List> cs = new ArrayList<>();\n        cs.add(Pair.of(0.4, 0.4));\n        cs.add(Pair.of(1.0, 1.0));\n        cs.add(Pair.of(0.4, 0.4 * mean));\n        cs.add(Pair.of(1.0, 1.0 * mean \/ 2));\n        cs.add(Pair.of(5.0, 5.0 * mean \/ 5));\n\n        if (type.equals(SparseArray.class)) {\n            switch (model) {\n                case SVM_Linear:\n                    gridSearchSparseSVM(SVM_Linear, cs, new SparseLinearKernel());\n                    break;\n                case SVM_Gaussian:\n                    gridSearchSparseSVM(SVM_Gaussian, cs, \/\/\n                            new SparseGaussianKernel(0.5), \/\/\n                            new SparseGaussianKernel(1), \/\/\n                            new SparseGaussianKernel(5));\n                    break;\n                case SVM_Laplacian:\n                    gridSearchSparseSVM(SVM_Laplacian, cs, \/\/\n                            new SparseLaplacianKernel(0.5), \/\/\n                            new SparseLaplacianKernel(1), \/\/\n                            new SparseLaplacianKernel(5));\n                    break;\n            }\n        } else {\n            switch (model) {\n                case SVM_Linear:\n                    gridSearchSVM(SVM_Linear, cs, new LinearKernel());\n                    break;\n                case SVM_Gaussian:\n                    gridSearchSVM(SVM_Gaussian, cs, \/\/\n                            new GaussianKernel(0.5), \/\/\n                            new GaussianKernel(1), \/\/\n                            new GaussianKernel(5));\n                    break;\n                case SVM_Laplacian:\n                    gridSearchSVM(SVM_Laplacian, cs, \/\/\n                            new LaplacianKernel(0.5), \/\/\n                            new LaplacianKernel(1), \/\/\n                            new LaplacianKernel(5));\n                    break;\n                case AdaBoost:\n                    gridSearchAdaBoost(treeSizes, nodeSizes);\n                    break;\n                case RandomForest:\n                    gridSearchRandomForest(treeSizes, nodeSizes, numberOfFeatures);\n                    break;\n                case GradientBoostedTree:\n                    gridSearchGradientBoostedTree(treeSizes, nodeSizes, new double[]{0.001, 0.01, 0.05});\n                    break;\n                case LogisticRegression:\n                    gridSearchLogisticRegression(new int[]{1, 2, 10});\n                    break;\n            }\n        }\n\n    }\n\n    protected void gridSearchRandomForest(int[] treeSizes, int[] nodeSizes, int numberOfFeatures) {\n        for (int treeSize : treeSizes) {\n            for (int nodeSize : nodeSizes) {\n                RandomForest.Trainer trainer = new RandomForest.Trainer((int) Math.floor(Math.sqrt(numberOfFeatures)), treeSize);\n                trainer.setNodeSize(nodeSize);\n                trainer.setSplitRule(DecisionTree.SplitRule.GINI);\n                trainer.setNumRandomFeatures((int) Math.sqrt(numberOfFeatures));\n                HashMap params = new HashMap<>();\n                params.put(\"treeSize\", String.valueOf(treeSize));\n                params.put(\"nodeSize\", String.valueOf(nodeSize));\n                addToCrossValidation(MLModelType.RandomForest, (ClassifierTrainer) trainer, params);\n\n            }\n        }\n    }\n\n    @SafeVarargs\n    protected final void gridSearchSVM(MLModelType mlModelType, List> cs, MercerKernel... kernels) {\n        for (MercerKernel mercerKernel : kernels) {\n            for (Pair c : cs) {\n                SVM.Trainer trainer = new SVM.Trainer<>(mercerKernel, c.getLeft(), c.getRight());\n\/\/                trainer.setMaxIter(MAX_ITERATION);\n                HashMap params = new HashMap<>();\n                params.put(\"CP\", String.valueOf(c.getLeft()));\n                params.put(\"CN\", String.valueOf(c.getRight()));\n                addToCrossValidation(mlModelType, (ClassifierTrainer) trainer, params);\n\n            }\n        }\n\n    }\n\n    protected void gridSearchSparseSVM(MLModelType mlModelType, List> cs, MercerKernel... kernels) {\n        for (MercerKernel mercerKernel : kernels) {\n            for (Pair c : cs) {\n                SVM.Trainer trainer = new SVM.Trainer<>(mercerKernel, c.getLeft(), c.getRight());\n\/\/                trainer.setMaxIter(MAX_ITERATION);\n                HashMap params = new HashMap<>();\n                params.put(\"CP\", String.valueOf(c.getLeft()));\n                params.put(\"CN\", String.valueOf(c.getRight()));\n                addToCrossValidation(mlModelType, (ClassifierTrainer) trainer, params);\n\n            }\n        }\n    }\n\n    \/\/ @formatter:off\n    public enum MLModelType {\n        SVM_Linear, SVM_Gaussian, SVM_Laplacian,\n        RandomForest, AdaBoost, GradientBoostedTree,\n        NaiveBayes, LogisticRegression\n\n    }\n    \/\/ @formatter:on\n\n\n    public static class GridSearchResult {\n\n        private final Map params;\n        private final ClassifierTrainer _classifierTrainer;\n        private CVResult _cVresult;\n        private MLModelType _mlModelType;\n\n        public GridSearchResult(Map params, ClassifierTrainer classifierTrainer, MLModelType mlModelType) {\n            this.params = params;\n            _classifierTrainer = classifierTrainer;\n            _mlModelType = mlModelType;\n        }\n\n        public ClassifierTrainer getClassifierTrainer() {\n            return _classifierTrainer;\n        }\n\n        public double getF1() {\n            return _cVresult._map.get(\"FMeasure\");\n        }\n\n        public MLModelType getMLModelType() {\n            return _mlModelType;\n        }\n\n        public double getMcc() {\n            return _cVresult._map.get(\"MCCMeasure\");\n        }\n\n        public Map getParams() {\n            return params;\n        }\n\n        public CVResult getcVresult() {\n            return _cVresult;\n        }\n\n        public void setcVresult(CVResult cVresult) {\n            _cVresult = cVresult;\n        }\n    }\n\n}\n","avg_line_length":44.584,"max_line_length":159,"alphanum_fraction":0.5974639632}
{"size":351,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.zandero.ffpojo.metadata.positional.annotation;\n\n\/**\n * Define the annotation place, if is on field or property (getter method).\n * @author William Miranda\n *\/\npublic enum AccessorType {\n\n\tPROPERTY, FIELD;\n\n\tpublic boolean isByField(){\n\t\treturn FIELD.equals(this);\n\t}\n\n\tpublic boolean isByProperty(){\n\t\treturn PROPERTY.equals(this);\n\t}\n\n\n}\n","avg_line_length":16.7142857143,"max_line_length":75,"alphanum_fraction":0.7264957265}
{"size":1006,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package com.dolphpire.api.models;\n\nimport android.util.ArrayMap;\n\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Map;\n\npublic class SyncIGPost implements Serializable\n{\n\n    private IGPostModel mIGPostModel;\n    private Map INSTANCES = new ArrayMap<>();\n\n    public IGPostModel getPost()\n    {\n        return this.mIGPostModel;\n    }\n\n    public void setNewPost(IGPostModel mIGPostModel)\n    {\n        ArrayList data = new ArrayList<>(INSTANCES.values());\n        for (int i = 0; i < data.size(); i++)\n        {\n            data.get(i).onChange(mIGPostModel);\n        }\n        this.mIGPostModel = mIGPostModel;\n    }\n\n    public void removeInstance(String TAG)\n    {\n        this.INSTANCES.remove(TAG);\n    }\n\n    public void setListener(SyncListener listener, String TAG)\n    {\n        this.INSTANCES.put(String.valueOf(TAG), listener);\n    }\n\n    public interface SyncListener\n    {\n        void onChange(IGPostModel mIGPostModel);\n    }\n\n}","avg_line_length":22.3555555556,"max_line_length":75,"alphanum_fraction":0.6560636183}
{"size":822,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.coolweather.android.db;\n\nimport org.litepal.crud.LitePalSupport;\n\npublic class City extends LitePalSupport {\n    private int id;\n    private String cityName;\/\/\u5e02\u7684\u540d\u5b57\n    private int cityCode;\/\/\u5e02\u7684\u4ee3\u53f7\n    private int provinceId;\/\/\u5e02\u6240\u5c5e\u7701\u7684id\u503c\n\n    public int getId() {\n        return id;\n    }\n\n    public void setId(int id) {\n        this.id = id;\n    }\n\n    public String getCityName() {\n        return cityName;\n    }\n\n    public void setCityName(String cityName) {\n        this.cityName = cityName;\n    }\n\n    public int getCityCode() {\n        return cityCode;\n    }\n\n    public void setCityCode(int cityCode) {\n        this.cityCode = cityCode;\n    }\n\n    public int getProvinceId() {\n        return provinceId;\n    }\n\n    public void setProvinceId(int provinceId) {\n        this.provinceId = provinceId;\n    }\n}\n","avg_line_length":19.1162790698,"max_line_length":47,"alphanum_fraction":0.6240875912}
{"size":6019,"ext":"java","lang":"Java","max_stars_count":2.0,"content":"package com.mockservice.service.route;\n\nimport com.mockservice.domain.Route;\nimport com.mockservice.repository.ConfigObserver;\nimport com.mockservice.repository.ConfigRepository;\nimport com.mockservice.repository.RouteObserver;\nimport com.mockservice.template.TemplateEngine;\nimport com.mockservice.template.TokenParser;\nimport com.mockservice.util.Cache;\nimport com.mockservice.util.HashMapCache;\nimport com.mockservice.util.RandomUtils;\nimport org.springframework.stereotype.Service;\nimport org.springframework.web.bind.annotation.RequestMethod;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.function.BiConsumer;\nimport java.util.stream.Collectors;\n\n@Service\npublic class RouteServiceImpl implements RouteService, ConfigObserver, RouteObserver {\n\n    private final ConfigRepository configRepository;\n    private final RouteMapper routeMapper;\n    private final RandomUtils randomUtils;\n\n    private final Cache> routeVariablesCache;\n    private final Map> routesVariablesValues = new ConcurrentHashMap<>();\n\n    public RouteServiceImpl(ConfigRepository configRepository,\n                            RouteMapper routeMapper,\n                            RandomUtils randomUtils,\n                            TemplateEngine templateEngine\n    ) {\n        this.configRepository = configRepository;\n        this.routeMapper = routeMapper;\n        this.randomUtils = randomUtils;\n\n        routeVariablesCache = new HashMapCache<>(r ->\n            TokenParser.tokenize(r.getResponse()).stream()\n                .filter(TokenParser::isToken)\n                .map(TokenParser::parseToken)\n                .filter(args -> !templateEngine.isFunction(args[0]))\n                .map(args -> {\n                    RouteVariable variable = new RouteVariable().setName(args[0]);\n                    if (args.length > 1) {\n                        variable.setDefaultValue(args[1]);\n                    }\n                    return variable;\n                })\n                .distinct()\n                .collect(Collectors.toList())\n        );\n    }\n\n    @Override\n    public Optional getEnabledRoute(Route route) {\n        return configRepository\n            .findRoute(route)\n            .filter(r -> !r.getDisabled());\n    }\n\n    @Override\n    public Optional getRandomAltFor(RequestMethod method, String path) {\n        List alts = configRepository.findAllRoutes().stream()\n            .filter(r -> method.equals(r.getMethod())\n                && path.equals(r.getPath())\n                && !r.getDisabled())\n            .map(Route::getAlt)\n            .collect(Collectors.toList());\n        if (alts.isEmpty()) {\n            return Optional.empty();\n        }\n        if (alts.size() == 1) {\n            return Optional.of(alts.get(0));\n        }\n        return Optional.of(alts.get(randomUtils.rnd(alts.size())));\n    }\n\n    \/\/----------------------------------------------------------------------------------\n\n    @Override\n    public List getRoutes() {\n        BiConsumer postProcess = (route, dto) -> dto.setVariables(variablesFromRoute(route));\n        return routeMapper.toDto(configRepository.findAllRoutes(), postProcess);\n    }\n\n    private List variablesFromRoute(Route route) {\n        List routeVariables = routeVariablesCache.get(route);\n        routeVariables.forEach(v -> {\n            Map routeVariablesValues = routesVariablesValues.get(route);\n            if (routeVariablesValues != null) {\n                v.setValue(routeVariablesValues.get(v.getName()));\n            }\n        });\n        return routeVariables;\n    }\n\n    @Override\n    public synchronized void putRoute(RouteDto reference, RouteDto route) throws IOException {\n        Route referenceRoute = routeMapper.fromDto(reference);\n        Route newRoute = routeMapper.fromDto(route);\n        configRepository.putRoute(referenceRoute, newRoute);\n    }\n\n    @Override\n    public synchronized void putRoutes(List dtos, boolean overwrite) throws IOException {\n        List routes = routeMapper.fromDto(dtos);\n        configRepository.putRoutes(routes, overwrite);\n    }\n\n    @Override\n    public synchronized void deleteRoutes(List dtos) throws IOException {\n        List routes = routeMapper.fromDto(dtos);\n        configRepository.deleteRoutes(routes);\n    }\n\n    @Override\n    public RouteVariableDto setRouteVariable(RouteVariableDto variable) {\n        Route route = new Route(variable.getMethod(), variable.getPath(), variable.getAlt());\n        Map values = routesVariablesValues.computeIfAbsent(route, r -> new ConcurrentHashMap<>());\n        values.put(variable.getName(), variable.getValue());\n        return variable;\n    }\n\n    @Override\n    public RouteVariableDto clearRouteVariable(RouteVariableDto variable) {\n        Route route = new Route(variable.getMethod(), variable.getPath(), variable.getAlt());\n        Map values = routesVariablesValues.get(route);\n        if (values != null) {\n            values.remove(variable.getName());\n            if (values.isEmpty()) {\n                routesVariablesValues.remove(route);\n            }\n        }\n        return variable.setValue(null);\n    }\n\n    @Override\n    public Map getRouteVariables(Route route) {\n        return routesVariablesValues.get(route);\n    }\n\n    \/\/----------------------------------------------------------------------------------\n\n    @Override\n    public void onBeforeConfigChanged() {\n        routeVariablesCache.invalidate();\n    }\n\n    @Override\n    public void onAfterConfigChanged() {\n        \/\/ ignore\n    }\n\n    @Override\n    public void onRouteCreated(Route route) {\n        \/\/ ignore\n    }\n\n    @Override\n    public void onRouteDeleted(Route route) {\n        routeVariablesCache.evict(route);\n    }\n}\n","avg_line_length":35.6153846154,"max_line_length":114,"alphanum_fraction":0.6329955142}
{"size":7559,"ext":"java","lang":"Java","max_stars_count":12.0,"content":"\/*\n * Licensed to Elasticsearch under one or more contributor\n * license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright\n * ownership. Elasticsearch licenses this file to you under\n * the Apache License, Version 2.0 (the \"License\"); you may\n * not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *    http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied.  See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\/\n\npackage org.elasticsearch.rest.action.cat;\n\nimport org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;\nimport org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;\nimport org.elasticsearch.client.node.NodeClient;\nimport org.elasticsearch.cluster.node.DiscoveryNode;\nimport org.elasticsearch.cluster.node.DiscoveryNodes;\nimport org.elasticsearch.common.Strings;\nimport org.elasticsearch.common.Table;\nimport org.elasticsearch.common.settings.Settings;\nimport org.elasticsearch.common.unit.TimeValue;\nimport org.elasticsearch.rest.RestController;\nimport org.elasticsearch.rest.RestRequest;\nimport org.elasticsearch.rest.RestResponse;\nimport org.elasticsearch.rest.action.RestResponseListener;\nimport org.elasticsearch.tasks.TaskInfo;\nimport org.joda.time.format.DateTimeFormat;\nimport org.joda.time.format.DateTimeFormatter;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.function.Supplier;\n\nimport static org.elasticsearch.rest.RestRequest.Method.GET;\nimport static org.elasticsearch.rest.action.admin.cluster.RestListTasksAction.generateListTasksRequest;\n\npublic class RestTasksAction extends AbstractCatAction {\n    private final Supplier nodesInCluster;\n\n    public RestTasksAction(Settings settings, RestController controller, Supplier nodesInCluster) {\n        super(settings);\n        controller.registerHandler(GET, \"\/_cat\/tasks\", this);\n        this.nodesInCluster = nodesInCluster;\n    }\n\n    @Override\n    public String getName() {\n        return \"cat_tasks_action\";\n    }\n\n    @Override\n    protected void documentation(StringBuilder sb) {\n        sb.append(\"\/_cat\/tasks\\n\");\n    }\n\n    @Override\n    public RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) {\n        return channel ->\n                client.admin().cluster().listTasks(generateListTasksRequest(request), new RestResponseListener(channel) {\n            @Override\n            public RestResponse buildResponse(ListTasksResponse listTasksResponse) throws Exception {\n                return RestTable.buildResponse(buildTable(request, listTasksResponse), channel);\n            }\n        });\n    }\n\n    private static final Set RESPONSE_PARAMS;\n\n    static {\n        final Set responseParams = new HashSet<>();\n        responseParams.add(\"detailed\");\n        responseParams.addAll(AbstractCatAction.RESPONSE_PARAMS);\n        RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams);\n    }\n\n    @Override\n    protected Set responseParams() {\n        return RESPONSE_PARAMS;\n    }\n\n    @Override\n    protected Table getTableWithHeader(final RestRequest request) {\n        boolean detailed = request.paramAsBoolean(\"detailed\", false);\n        Table table = new Table();\n        table.startHeaders();\n\n        \/\/ Task main info\n        table.addCell(\"id\", \"default:false;desc:id of the task with the node\");\n        table.addCell(\"action\", \"alias:ac;desc:task action\");\n        table.addCell(\"task_id\", \"alias:ti;desc:unique task id\");\n        table.addCell(\"parent_task_id\", \"alias:pti;desc:parent task id\");\n        table.addCell(\"type\", \"alias:ty;desc:task type\");\n        table.addCell(\"start_time\", \"alias:start;desc:start time in ms\");\n        table.addCell(\"timestamp\", \"alias:ts,hms,hhmmss;desc:start time in HH:MM:SS\");\n        table.addCell(\"running_time_ns\", \"default:false;alias:time;desc:running time ns\");\n        table.addCell(\"running_time\", \"default:true;alias:time;desc:running time\");\n\n        \/\/ Node info\n        table.addCell(\"node_id\", \"default:false;alias:ni;desc:unique node id\");\n        table.addCell(\"ip\", \"default:true;alias:i;desc:ip address\");\n        table.addCell(\"port\", \"default:false;alias:po;desc:bound transport port\");\n        table.addCell(\"node\", \"default:true;alias:n;desc:node name\");\n        table.addCell(\"version\", \"default:false;alias:v;desc:es version\");\n\n        \/\/ Task detailed info\n        if (detailed) {\n            table.addCell(\"description\", \"default:true;alias:desc;desc:task action\");\n        }\n        table.endHeaders();\n        return table;\n    }\n\n    private DateTimeFormatter dateFormat = DateTimeFormat.forPattern(\"HH:mm:ss\");\n\n    private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) {\n        table.startRow();\n        String nodeId = taskInfo.getTaskId().getNodeId();\n        DiscoveryNode node = discoveryNodes.get(nodeId);\n\n        table.addCell(taskInfo.getId());\n        table.addCell(taskInfo.getAction());\n        table.addCell(taskInfo.getTaskId().toString());\n        if (taskInfo.getParentTaskId().isSet()) {\n            table.addCell(taskInfo.getParentTaskId().toString());\n        } else {\n            table.addCell(\"-\");\n        }\n        table.addCell(taskInfo.getType());\n        table.addCell(taskInfo.getStartTime());\n        table.addCell(dateFormat.print(taskInfo.getStartTime()));\n        table.addCell(taskInfo.getRunningTimeNanos());\n        table.addCell(TimeValue.timeValueNanos(taskInfo.getRunningTimeNanos()).toString());\n\n        \/\/ Node information. Note that the node may be null because it has left the cluster between when we got this response and now.\n        table.addCell(fullId ? nodeId : Strings.substring(nodeId, 0, 4));\n        table.addCell(node == null ? \"-\" : node.getHostAddress());\n        table.addCell(node.getAddress().address().getPort());\n        table.addCell(node == null ? \"-\" : node.getName());\n        table.addCell(node == null ? \"-\" : node.getVersion().toString());\n\n        if (detailed) {\n            table.addCell(taskInfo.getDescription());\n        }\n        table.endRow();\n    }\n\n    private void buildGroups(Table table, boolean fullId, boolean detailed, List taskGroups) {\n        DiscoveryNodes discoveryNodes = nodesInCluster.get();\n        List sortedGroups = new ArrayList<>(taskGroups);\n        sortedGroups.sort((o1, o2) -> Long.compare(o1.getTaskInfo().getStartTime(), o2.getTaskInfo().getStartTime()));\n        for (TaskGroup taskGroup : sortedGroups) {\n            buildRow(table, fullId, detailed, discoveryNodes, taskGroup.getTaskInfo());\n            buildGroups(table, fullId, detailed, taskGroup.getChildTasks());\n        }\n    }\n\n    private Table buildTable(RestRequest request, ListTasksResponse listTasksResponse) {\n        boolean fullId = request.paramAsBoolean(\"full_id\", false);\n        boolean detailed = request.paramAsBoolean(\"detailed\", false);\n        Table table = getTableWithHeader(request);\n        buildGroups(table, fullId, detailed, listTasksResponse.getTaskGroups());\n        return table;\n    }\n}\n","avg_line_length":42.7062146893,"max_line_length":140,"alphanum_fraction":0.7020769943}
{"size":4522,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * Copyright 2014 Cloudera Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage org.kitesdk.data.crunch;\n\nimport com.google.common.base.Preconditions;\nimport org.apache.crunch.SourceTarget;\nimport org.apache.crunch.Target;\nimport org.apache.crunch.io.CrunchOutputs;\nimport org.apache.crunch.io.FormatBundle;\nimport org.apache.crunch.io.MapReduceTarget;\nimport org.apache.crunch.io.OutputHandler;\nimport org.apache.crunch.types.Converter;\nimport org.apache.crunch.types.PType;\nimport org.apache.crunch.types.avro.AvroType;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.kitesdk.data.Dataset;\nimport org.kitesdk.data.DatasetRepositories;\nimport org.kitesdk.data.DatasetRepository;\nimport org.kitesdk.data.View;\nimport org.kitesdk.data.mapreduce.DatasetKeyInputFormat;\nimport org.kitesdk.data.mapreduce.DatasetKeyOutputFormat;\nimport org.kitesdk.data.spi.AbstractDatasetRepository;\n\nclass DatasetTarget implements MapReduceTarget {\n\n  FormatBundle formatBundle;\n\n  public DatasetTarget(Dataset dataset) {\n    this.formatBundle = FormatBundle.forOutput(DatasetKeyOutputFormat.class);\n    formatBundle.set(DatasetKeyOutputFormat.KITE_REPOSITORY_URI, getRepositoryUri(dataset));\n    formatBundle.set(DatasetKeyOutputFormat.KITE_DATASET_NAME, dataset.getName());\n\n    \/\/ TODO: replace with View#getDataset to get the top-level dataset\n    DatasetRepository repo = DatasetRepositories.open(getRepositoryUri(dataset));\n    \/\/ only set the partition dir for subpartitions\n    Dataset topLevelDataset = repo.load(dataset.getName());\n    if (topLevelDataset.getDescriptor().isPartitioned() &&\n        topLevelDataset.getDescriptor().getLocation() != null &&\n        !topLevelDataset.getDescriptor().getLocation().equals(dataset.getDescriptor().getLocation())) {\n      formatBundle.set(DatasetKeyOutputFormat.KITE_PARTITION_DIR, dataset.getDescriptor().getLocation().toString());\n    }\n  }\n\n  public DatasetTarget(View view) {\n    this.formatBundle = FormatBundle.forOutput(DatasetKeyOutputFormat.class);\n    formatBundle.set(DatasetKeyOutputFormat.KITE_REPOSITORY_URI, getRepositoryUri(view.getDataset()));\n    formatBundle.set(DatasetKeyOutputFormat.KITE_DATASET_NAME, view.getDataset().getName());\n\n    Configuration conf = new Configuration();\n    DatasetKeyOutputFormat.setView(conf, view);\n    formatBundle.set(DatasetKeyOutputFormat.KITE_CONSTRAINTS,\n        conf.get(DatasetKeyOutputFormat.KITE_CONSTRAINTS));\n  }\n\n  private String getRepositoryUri(Dataset dataset) {\n    return dataset.getDescriptor().getProperty(\n        AbstractDatasetRepository.REPOSITORY_URI_PROPERTY_NAME);\n  }\n\n  @Override\n  public Target outputConf(String key, String value) {\n    formatBundle.set(key, value);\n    return this;\n  }\n\n  @Override\n  public boolean handleExisting(WriteMode writeMode, long l, Configuration entries) {\n    \/\/ currently don't check for existing outputs\n    return false;\n  }\n\n  @Override\n  public boolean accept(OutputHandler handler, PType ptype) {\n    if (!(ptype instanceof AvroType)) {\n      return false;\n    }\n    handler.configure(this, ptype);\n    return true;\n  }\n\n  @Override\n  @SuppressWarnings(\"unchecked\")\n  public Converter getConverter(PType ptype) {\n    return new KeyConverter((AvroType) ptype);\n  }\n\n  @Override\n  public  SourceTarget asSourceTarget(PType tpType) {\n    return null;\n  }\n\n  @Override\n  @SuppressWarnings(\"unchecked\")\n  public void configureForMapReduce(Job job, PType ptype, Path outputPath, String name) {\n\n    Preconditions.checkNotNull(name, \"Output name should not be null\"); \/\/ see CRUNCH-82\n\n    Converter converter = getConverter(ptype);\n    Class keyClass = converter.getKeyClass();\n    Class valueClass = Void.class;\n\n    CrunchOutputs.addNamedOutput(job, name, formatBundle, keyClass, valueClass);\n    job.setOutputFormatClass(formatBundle.getFormatClass());\n    formatBundle.configure(job.getConfiguration());\n  }\n}\n","avg_line_length":37.0655737705,"max_line_length":116,"alphanum_fraction":0.7593984962}
{"size":347,"ext":"java","lang":"Java","max_stars_count":null,"content":"package fr.sii.ogham.assertion.hamcrest;\n\n\/**\n * Interface for matchers that are able to provide a message to provide a\n * detailed comparison message.\n * \n * @author Aur\u00e9lien Baudet\n *\n *\/\npublic interface ComparisonAwareMatcher {\n\t\/**\n\t * Generate the comparison message\n\t * \n\t * @return the detailed message\n\t *\/\n\tString comparisonMessage();\n}\n","avg_line_length":19.2777777778,"max_line_length":73,"alphanum_fraction":0.7175792507}
{"size":4846,"ext":"java","lang":"Java","max_stars_count":null,"content":"package org.opensrp.register.service.handler;\n\nimport static org.mockito.Mockito.inOrder;\nimport static org.mockito.MockitoAnnotations.initMocks;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.apache.commons.io.FileUtils;\nimport org.codehaus.jackson.map.ObjectMapper;\nimport org.joda.time.DateTime;\nimport org.joda.time.LocalDate;\nimport org.json.JSONArray;\nimport org.json.JSONException;\nimport org.json.JSONObject;\nimport org.junit.Before;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.mockito.InOrder;\nimport org.mockito.Mock;\nimport org.motechproject.scheduletracking.api.domain.json.ScheduleRecord;\nimport org.motechproject.scheduletracking.api.repository.AllSchedules;\nimport org.opensrp.domain.Client;\nimport org.opensrp.domain.Event;\nimport org.opensrp.domain.Obs;\nimport org.opensrp.register.service.handler.BaseScheduleHandler.ActionType;\nimport org.opensrp.register.service.scheduling.AnteNatalCareSchedulesService;\nimport org.opensrp.repository.AllClients;\nimport org.opensrp.scheduler.HealthSchedulerService;\nimport org.powermock.core.classloader.annotations.PowerMockIgnore;\nimport org.powermock.core.classloader.annotations.PrepareForTest;\nimport org.powermock.modules.junit4.PowerMockRunner;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.core.io.DefaultResourceLoader;\nimport org.springframework.core.io.ResourceLoader;\nimport org.springframework.test.context.ContextConfiguration;\nimport org.springframework.test.context.junit4.SpringJUnit4ClassRunner;\n\n@RunWith(PowerMockRunner.class)\n@PowerMockIgnore({ \"org.apache.log4j.*\", \"org.apache.commons.logging.*\" })\npublic class ANCScheduleHandlerTest extends TestResourceLoader {\t\n    @Mock\n    private AnteNatalCareSchedulesService anteNatalCareSchedulesService;    \n    private ANCScheduleHandler aNCScheduleHandler;\n    @Mock\n    private HealthSchedulerService scheduler;\n    private static final String JSON_KEY_HANDLER = \"handler\";\t\n    private static final String JSON_KEY_TYPES = \"types\";\t\n    private static final String JSON_KEY_SCHEDULE_NAME = \"name\";\t\n    private static final String JSON_KEY_EVENTS = \"events\";\t\n\t\n    @Before\n    public void setUp() throws Exception {\n        initMocks(this);\n        aNCScheduleHandler = new ANCScheduleHandler(anteNatalCareSchedulesService);\n    }   \n    \n    @Test\n    public void shouldTestANCScheduleHandler() throws Exception {\n        Event event = geteventOfVaccination();\n        JSONArray schedulesJsonObject = new JSONArray(\"[\" + getFile() + \"]\");\n        String scheduleName = null;\n        for (int i = 0; i < schedulesJsonObject.length(); i++) {\n            JSONObject scheduleJsonObject = schedulesJsonObject.getJSONObject(i);            \n            JSONArray eventsJsonArray = scheduleJsonObject.getJSONArray(JSON_KEY_EVENTS);                      \n            for (int j = 0; j < eventsJsonArray.length(); j++) {\n                JSONObject scheduleConfigEvent = eventsJsonArray.getJSONObject(j);\n                JSONArray eventTypesJsonArray = scheduleConfigEvent.getJSONArray(JSON_KEY_TYPES);\n                List eventsList = jsonArrayToList(eventTypesJsonArray);                \n                if (eventsList.contains(event.getEventType())) {  \n                \tString action = aNCScheduleHandler.getAction(scheduleConfigEvent);                \t\n                \tString milestone = aNCScheduleHandler.getMilestone(scheduleConfigEvent);\n                    LocalDate  date = LocalDate.parse(\"2016-07-10\");\n                \tif (milestone.equalsIgnoreCase(\"opv2\") && action.equalsIgnoreCase(ActionType.enroll.toString())) {\n                \t\taNCScheduleHandler.handle(event,scheduleConfigEvent, scheduleName);\n                        InOrder inOrder = inOrder(anteNatalCareSchedulesService);                        \n                        inOrder.verify(anteNatalCareSchedulesService).enrollMother(event.getBaseEntityId(),\"Ante Natal Care Reminder Visit\", LocalDate.parse(\"2016-07-10\"),\n                            event.getId());                       \n                    }\n                    else if (milestone.equalsIgnoreCase(\"opv2\") && action.equalsIgnoreCase(ActionType.fulfill.toString())) {\n                    \taNCScheduleHandler.handle(event,scheduleConfigEvent, scheduleName);\n                        InOrder inOrder = inOrder(anteNatalCareSchedulesService);                                                \n                        inOrder.verify(anteNatalCareSchedulesService).fullfillMilestone(event.getBaseEntityId(), event.getProviderId(), \"Ante Natal Care Reminder Visit\", date, event.getId()); \n                    } else {\n                    \t\n                    }\n                }\t\t\t\t\n            }\t\t\t\n        }\t\t\n    }    \n   \n}\n","avg_line_length":50.4791666667,"max_line_length":192,"alphanum_fraction":0.6939744119}
{"size":2287,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package seedu.address.model;\n\nimport static java.util.Objects.requireNonNull;\n\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Objects;\n\nimport seedu.address.commons.core.GuiSettings;\n\n\/**\n * Represents User's preferences.\n *\/\npublic class UserPrefs implements ReadOnlyUserPrefs {\n\n    private GuiSettings guiSettings = new GuiSettings();\n    private Path bookShelfFilePath = Paths.get(\"data\", \"bookshelf.json\");\n\n    \/**\n     * Creates a {@code UserPrefs} with default values.\n     *\/\n    public UserPrefs() {}\n\n    \/**\n     * Creates a {@code UserPrefs} with the prefs in {@code userPrefs}.\n     *\/\n    public UserPrefs(ReadOnlyUserPrefs userPrefs) {\n        this();\n        resetData(userPrefs);\n    }\n\n    \/**\n     * Resets the existing data of this {@code UserPrefs} with {@code newUserPrefs}.\n     *\/\n    public void resetData(ReadOnlyUserPrefs newUserPrefs) {\n        requireNonNull(newUserPrefs);\n        setGuiSettings(newUserPrefs.getGuiSettings());\n        setBookShelfFilePath(newUserPrefs.getBookShelfFilePath());\n    }\n\n    public GuiSettings getGuiSettings() {\n        return guiSettings;\n    }\n\n    public void setGuiSettings(GuiSettings guiSettings) {\n        requireNonNull(guiSettings);\n        this.guiSettings = guiSettings;\n    }\n\n    public Path getBookShelfFilePath() {\n        return bookShelfFilePath;\n    }\n\n    public void setBookShelfFilePath(Path bookShelfFilePath) {\n        requireNonNull(bookShelfFilePath);\n        this.bookShelfFilePath = bookShelfFilePath;\n    }\n\n    @Override\n    public boolean equals(Object other) {\n        if (other == this) {\n            return true;\n        }\n        if (!(other instanceof UserPrefs)) { \/\/this handles null as well.\n            return false;\n        }\n\n        UserPrefs o = (UserPrefs) other;\n\n        return guiSettings.equals(o.guiSettings)\n                && bookShelfFilePath.equals((o.bookShelfFilePath));\n    }\n\n    @Override\n    public int hashCode() {\n        return Objects.hash(guiSettings, bookShelfFilePath);\n    }\n\n    @Override\n    public String toString() {\n        StringBuilder sb = new StringBuilder();\n        sb.append(\"Gui Settings : \" + guiSettings);\n        sb.append(\"\\nLocal data file location : \" + bookShelfFilePath);\n        return sb.toString();\n    }\n\n}\n","avg_line_length":25.9886363636,"max_line_length":84,"alphanum_fraction":0.6493222562}
{"size":16713,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/* Copyright 2002-2021 CS GROUP\n * Licensed to CS GROUP (CS) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * CS licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *   http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage org.orekit.attitudes;\n\n\nimport org.hipparchus.Field;\nimport org.hipparchus.RealFieldElement;\nimport org.hipparchus.geometry.euclidean.threed.FieldRotation;\nimport org.hipparchus.geometry.euclidean.threed.FieldVector3D;\nimport org.hipparchus.geometry.euclidean.threed.Rotation;\nimport org.hipparchus.geometry.euclidean.threed.Vector3D;\nimport org.hipparchus.util.Decimal64Field;\nimport org.hipparchus.util.FastMath;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.orekit.Utils;\nimport org.orekit.frames.Frame;\nimport org.orekit.frames.FramesFactory;\nimport org.orekit.orbits.FieldKeplerianOrbit;\nimport org.orekit.orbits.FieldOrbit;\nimport org.orekit.orbits.KeplerianOrbit;\nimport org.orekit.orbits.Orbit;\nimport org.orekit.orbits.PositionAngle;\nimport org.orekit.propagation.FieldPropagator;\nimport org.orekit.propagation.FieldSpacecraftState;\nimport org.orekit.propagation.Propagator;\nimport org.orekit.propagation.SpacecraftState;\nimport org.orekit.propagation.analytical.FieldKeplerianPropagator;\nimport org.orekit.propagation.analytical.KeplerianPropagator;\nimport org.orekit.time.AbsoluteDate;\nimport org.orekit.time.DateComponents;\nimport org.orekit.time.FieldAbsoluteDate;\nimport org.orekit.time.TimeComponents;\nimport org.orekit.time.TimeScalesFactory;\nimport org.orekit.utils.AngularCoordinates;\nimport org.orekit.utils.FieldAngularCoordinates;\nimport org.orekit.utils.FieldPVCoordinates;\nimport org.orekit.utils.PVCoordinates;\n\npublic class FixedRateTest {\n\n    @Test\n    public void testZeroRate() {\n        AbsoluteDate date = new AbsoluteDate(new DateComponents(2004, 3, 2),\n                                             new TimeComponents(13, 17, 7.865),\n                                             TimeScalesFactory.getUTC());\n        final Frame frame = FramesFactory.getEME2000();\n        FixedRate law = new FixedRate(new Attitude(date, frame,\n                                                   new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                                   Vector3D.ZERO, Vector3D.ZERO));\n        PVCoordinates pv =\n            new PVCoordinates(new Vector3D(28812595.32012577, 5948437.4640250085, 0),\n                              new Vector3D(0, 0, 3680.853673522056));\n        Orbit orbit = new KeplerianOrbit(pv, frame, date, 3.986004415e14);\n        Rotation attitude0 = law.getAttitude(orbit, date, frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude0, law.getReferenceAttitude().getRotation()), 1.0e-10);\n        Rotation attitude1 = law.getAttitude(orbit.shiftedBy(10.0), date.shiftedBy(10.0), frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude1, law.getReferenceAttitude().getRotation()), 1.0e-10);\n        Rotation attitude2 = law.getAttitude(orbit.shiftedBy(20.0), date.shiftedBy(20.0), frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude2, law.getReferenceAttitude().getRotation()), 1.0e-10);\n\n    }\n\n    @Test\n    public void testNonZeroRate() {\n        final AbsoluteDate date = new AbsoluteDate(new DateComponents(2004, 3, 2),\n                                                   new TimeComponents(13, 17, 7.865),\n                                                   TimeScalesFactory.getUTC());\n        final double rate = 2 * FastMath.PI \/ (12 * 60);\n        final Frame frame = FramesFactory.getEME2000();\n        final Frame gcrf  = FramesFactory.getGCRF();\n        FixedRate law = new FixedRate(new Attitude(date, frame,\n                                                   new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                                   new Vector3D(rate, Vector3D.PLUS_K), Vector3D.ZERO));\n        final Rotation ref = law.getReferenceAttitude().getRotation().applyTo(gcrf.getTransformTo(frame, date).getRotation());\n        PVCoordinates pv =\n            new PVCoordinates(new Vector3D(28812595.32012577, 5948437.4640250085, 0),\n                              new Vector3D(0, 0, 3680.853673522056));\n        Orbit orbit = new KeplerianOrbit(pv, FramesFactory.getEME2000(), date, 3.986004415e14);\n        Rotation attitude0 = law.getAttitude(orbit, date, gcrf).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude0, ref), 1.0e-10);\n        Rotation attitude1 = law.getAttitude(orbit.shiftedBy(10.0), date.shiftedBy(10.0), gcrf).getRotation();\n        Assert.assertEquals(10 * rate, Rotation.distance(attitude1, ref), 1.0e-10);\n        Rotation attitude2 = law.getAttitude(orbit.shiftedBy(-20.0), date.shiftedBy(-20.0), gcrf).getRotation();\n        Assert.assertEquals(20 * rate, Rotation.distance(attitude2, ref), 1.0e-10);\n        Assert.assertEquals(30 * rate, Rotation.distance(attitude2, attitude1), 1.0e-10);\n        Rotation attitude3 = law.getAttitude(orbit.shiftedBy(0.0), date, frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude3, law.getReferenceAttitude().getRotation()), 1.0e-10);\n\n    }\n\n    @Test\n    public void testSpin() {\n\n        AbsoluteDate date = new AbsoluteDate(new DateComponents(1970, 01, 01),\n                                             new TimeComponents(3, 25, 45.6789),\n                                             TimeScalesFactory.getUTC());\n\n        final double rate = 2 * FastMath.PI \/ (12 * 60);\n        AttitudeProvider law =\n            new FixedRate(new Attitude(date, FramesFactory.getEME2000(),\n                                       new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                       new Vector3D(rate, Vector3D.PLUS_K),\n                                       Vector3D.ZERO));\n\n        KeplerianOrbit orbit =\n            new KeplerianOrbit(7178000.0, 1.e-4, FastMath.toRadians(50.),\n                              FastMath.toRadians(10.), FastMath.toRadians(20.),\n                              FastMath.toRadians(30.), PositionAngle.MEAN,\n                              FramesFactory.getEME2000(), date, 3.986004415e14);\n\n        Propagator propagator = new KeplerianPropagator(orbit, law);\n\n        double h = 0.01;\n        SpacecraftState sMinus = propagator.propagate(date.shiftedBy(-h));\n        SpacecraftState s0     = propagator.propagate(date);\n        SpacecraftState sPlus  = propagator.propagate(date.shiftedBy(h));\n\n        \/\/ check spin is consistent with attitude evolution\n        double errorAngleMinus     = Rotation.distance(sMinus.shiftedBy(h).getAttitude().getRotation(),\n                                                       s0.getAttitude().getRotation());\n        double evolutionAngleMinus = Rotation.distance(sMinus.getAttitude().getRotation(),\n                                                       s0.getAttitude().getRotation());\n        Assert.assertEquals(0.0, errorAngleMinus, 1.0e-6 * evolutionAngleMinus);\n        double errorAnglePlus      = Rotation.distance(s0.getAttitude().getRotation(),\n                                                       sPlus.shiftedBy(-h).getAttitude().getRotation());\n        double evolutionAnglePlus  = Rotation.distance(s0.getAttitude().getRotation(),\n                                                       sPlus.getAttitude().getRotation());\n        Assert.assertEquals(0.0, errorAnglePlus, 1.0e-6 * evolutionAnglePlus);\n\n        Vector3D spin0 = s0.getAttitude().getSpin();\n        Vector3D reference = AngularCoordinates.estimateRate(sMinus.getAttitude().getRotation(),\n                                                             sPlus.getAttitude().getRotation(),\n                                                             2 * h);\n        Assert.assertEquals(0.0, spin0.subtract(reference).getNorm(), 1.0e-14);\n\n    }\n\n    @Test\n    public void testZeroRateField() {\n        doTestZeroRate(Decimal64Field.getInstance());\n    }\n\n    private > void doTestZeroRate(final Field field)\n        {\n        final T zero = field.getZero();\n        FieldAbsoluteDate date = new FieldAbsoluteDate<>(field,\n                                                            new DateComponents(2004, 3, 2),\n                                                            new TimeComponents(13, 17, 7.865),\n                                                            TimeScalesFactory.getUTC());\n        final Frame frame = FramesFactory.getEME2000();\n        final Frame gcrf  = FramesFactory.getGCRF();\n        FixedRate law = new FixedRate(new Attitude(date.toAbsoluteDate(), frame,\n                                                   new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                                   Vector3D.ZERO, Vector3D.ZERO));\n        final Rotation ref = law.getReferenceAttitude().getRotation().applyTo(gcrf.getTransformTo(frame, date.toAbsoluteDate()).getRotation());\n        FieldPVCoordinates pv =\n            new FieldPVCoordinates<>(field.getOne(),\n                                     new PVCoordinates(new Vector3D(28812595.32012577, 5948437.4640250085, 0),\n                                                       new Vector3D(0, 0, 3680.853673522056)));\n        FieldOrbit orbit = new FieldKeplerianOrbit<>(pv, frame, date, zero.add(3.986004415e14));\n        FieldRotation attitude0 = law.getAttitude(orbit, date, gcrf).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude0.toRotation(), ref), 1.0e-10);\n        FieldRotation attitude1 = law.getAttitude(orbit.shiftedBy(zero.add(10.0)), date.shiftedBy(10.0), gcrf).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude1.toRotation(), ref), 1.0e-10);\n        FieldRotation attitude2 = law.getAttitude(orbit.shiftedBy(zero.add(20.0)), date.shiftedBy(20.0), gcrf).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude2.toRotation(), ref), 1.0e-10);\n\n    }\n\n    @Test\n    public void testNonZeroRateField() {\n        doTestNonZeroRate(Decimal64Field.getInstance());\n    }\n\n    private > void doTestNonZeroRate(final Field field) {\n        final T zero = field.getZero();\n        FieldAbsoluteDate date = new FieldAbsoluteDate<>(field,\n                                                            new DateComponents(2004, 3, 2),\n                                                            new TimeComponents(13, 17, 7.865),\n                                                            TimeScalesFactory.getUTC());\n        final T rate = zero.add(2 * FastMath.PI \/ (12 * 60));\n        final Frame frame = FramesFactory.getEME2000();\n        FixedRate law = new FixedRate(new Attitude(date.toAbsoluteDate(), frame,\n                                                   new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                                   new Vector3D(rate.getReal(), Vector3D.PLUS_K), Vector3D.ZERO));\n        FieldPVCoordinates pv =\n                        new FieldPVCoordinates<>(field.getOne(),\n                                                 new PVCoordinates(new Vector3D(28812595.32012577, 5948437.4640250085, 0),\n                                                                   new Vector3D(0, 0, 3680.853673522056)));\n        FieldOrbit orbit = new FieldKeplerianOrbit<>(pv, FramesFactory.getEME2000(), date, zero.add(3.986004415e14));\n        FieldRotation attitude0 = law.getAttitude(orbit, date, frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude0.toRotation(), law.getReferenceAttitude().getRotation()), 1.0e-10);\n        FieldRotation attitude1 = law.getAttitude(orbit.shiftedBy(zero.add(10.0)), date.shiftedBy(10.0), frame).getRotation();\n        Assert.assertEquals(10 * rate.getReal(), Rotation.distance(attitude1.toRotation(), law.getReferenceAttitude().getRotation()), 1.0e-10);\n        FieldRotation attitude2 = law.getAttitude(orbit.shiftedBy(zero.add(-20.0)), date.shiftedBy(-20.0), frame).getRotation();\n        Assert.assertEquals(20 * rate.getReal(), Rotation.distance(attitude2.toRotation(), law.getReferenceAttitude().getRotation()), 1.0e-10);\n        Assert.assertEquals(30 * rate.getReal(), Rotation.distance(attitude2.toRotation(), attitude1.toRotation()), 1.0e-10);\n        FieldRotation attitude3 = law.getAttitude(orbit.shiftedBy(zero.add(0.0)), date, frame).getRotation();\n        Assert.assertEquals(0, Rotation.distance(attitude3.toRotation(), law.getReferenceAttitude().getRotation()), 1.0e-10);\n\n    }\n\n    @Test\n    public void testSpinField() {\n        doTestSpin(Decimal64Field.getInstance());\n    }\n\n    private > void doTestSpin(final Field field) {\n\n        final T zero = field.getZero();\n        FieldAbsoluteDate date = new FieldAbsoluteDate<>(field,\n                                                            new DateComponents(1970, 01, 01),\n                                                            new TimeComponents(3, 25, 45.6789),\n                                                            TimeScalesFactory.getUTC());\n\n        final T rate = zero.add(2 * FastMath.PI \/ (12 * 60));\n        AttitudeProvider law =\n                        new FixedRate(new Attitude(date.toAbsoluteDate(), FramesFactory.getEME2000(),\n                                                   new Rotation(0.48, 0.64, 0.36, 0.48, false),\n                                                   new Vector3D(rate.getReal(), Vector3D.PLUS_K),\n                                                   Vector3D.ZERO));\n\n        FieldKeplerianOrbit orbit =\n            new FieldKeplerianOrbit<>(zero.add(7178000.0),\n                                      zero.add(1.e-4),\n                                      zero.add(FastMath.toRadians(50.)),\n                                      zero.add(FastMath.toRadians(10.)),\n                                      zero.add(FastMath.toRadians(20.)),\n                                      zero.add(FastMath.toRadians(30.)), PositionAngle.MEAN,\n                                      FramesFactory.getEME2000(), date, zero.add(3.986004415e14));\n\n        FieldPropagator propagator = new FieldKeplerianPropagator<>(orbit, law);\n\n        T h = zero.add(0.01);\n        FieldSpacecraftState sMinus = propagator.propagate(date.shiftedBy(h.negate()));\n        FieldSpacecraftState s0     = propagator.propagate(date);\n        FieldSpacecraftState sPlus  = propagator.propagate(date.shiftedBy(h));\n\n        \/\/ check spin is consistent with attitude evolution\n        double errorAngleMinus     = FieldRotation.distance(sMinus.shiftedBy(h).getAttitude().getRotation(),\n                                                            s0.getAttitude().getRotation()).getReal();\n        double evolutionAngleMinus = FieldRotation.distance(sMinus.getAttitude().getRotation(),\n                                                            s0.getAttitude().getRotation()).getReal();\n        Assert.assertEquals(0.0, errorAngleMinus, 1.0e-6 * evolutionAngleMinus);\n        double errorAnglePlus      = FieldRotation.distance(s0.getAttitude().getRotation(),\n                                                            sPlus.shiftedBy(h.negate()).getAttitude().getRotation()).getReal();\n        double evolutionAnglePlus  = FieldRotation.distance(s0.getAttitude().getRotation(),\n                                                            sPlus.getAttitude().getRotation()).getReal();\n        Assert.assertEquals(0.0, errorAnglePlus, 1.0e-6 * evolutionAnglePlus);\n\n        FieldVector3D spin0 = s0.getAttitude().getSpin();\n        FieldVector3D reference = FieldAngularCoordinates.estimateRate(sMinus.getAttitude().getRotation(),\n                                                                          sPlus.getAttitude().getRotation(),\n                                                                          h.multiply(2));\n        Assert.assertEquals(0.0, spin0.subtract(reference).getNorm().getReal(), 1.0e-14);\n\n    }\n\n    @Before\n    public void setUp() {\n        Utils.setDataRoot(\"regular-data\");\n    }\n\n}\n\n","avg_line_length":59.2659574468,"max_line_length":143,"alphanum_fraction":0.5951056064}
{"size":6890,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n * \n * Licensed under the Apache License, Version 2.0 (the \"License\"). You may not use this file except in compliance with\n * the License. A copy of the License is located at\n * \n * http:\/\/aws.amazon.com\/apache2.0\n * \n * or in the \"license\" file accompanying this file. This file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR\n * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions\n * and limitations under the License.\n *\/\npackage com.amazonaws.services.greengrass.model;\n\nimport java.io.Serializable;\nimport javax.annotation.Generated;\n\nimport com.amazonaws.AmazonWebServiceRequest;\n\n\/**\n * \n * @see AWS API Documentation<\/a>\n *\/\n@Generated(\"com.amazonaws:aws-java-sdk-code-generator\")\npublic class ListBulkDeploymentDetailedReportsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {\n\n    \/** The ID of the bulk deployment. *\/\n    private String bulkDeploymentId;\n    \/** The maximum number of results to be returned per request. *\/\n    private String maxResults;\n    \/** The token for the next set of results, or ''null'' if there are no additional results. *\/\n    private String nextToken;\n\n    \/**\n     * The ID of the bulk deployment.\n     * \n     * @param bulkDeploymentId\n     *        The ID of the bulk deployment.\n     *\/\n\n    public void setBulkDeploymentId(String bulkDeploymentId) {\n        this.bulkDeploymentId = bulkDeploymentId;\n    }\n\n    \/**\n     * The ID of the bulk deployment.\n     * \n     * @return The ID of the bulk deployment.\n     *\/\n\n    public String getBulkDeploymentId() {\n        return this.bulkDeploymentId;\n    }\n\n    \/**\n     * The ID of the bulk deployment.\n     * \n     * @param bulkDeploymentId\n     *        The ID of the bulk deployment.\n     * @return Returns a reference to this object so that method calls can be chained together.\n     *\/\n\n    public ListBulkDeploymentDetailedReportsRequest withBulkDeploymentId(String bulkDeploymentId) {\n        setBulkDeploymentId(bulkDeploymentId);\n        return this;\n    }\n\n    \/**\n     * The maximum number of results to be returned per request.\n     * \n     * @param maxResults\n     *        The maximum number of results to be returned per request.\n     *\/\n\n    public void setMaxResults(String maxResults) {\n        this.maxResults = maxResults;\n    }\n\n    \/**\n     * The maximum number of results to be returned per request.\n     * \n     * @return The maximum number of results to be returned per request.\n     *\/\n\n    public String getMaxResults() {\n        return this.maxResults;\n    }\n\n    \/**\n     * The maximum number of results to be returned per request.\n     * \n     * @param maxResults\n     *        The maximum number of results to be returned per request.\n     * @return Returns a reference to this object so that method calls can be chained together.\n     *\/\n\n    public ListBulkDeploymentDetailedReportsRequest withMaxResults(String maxResults) {\n        setMaxResults(maxResults);\n        return this;\n    }\n\n    \/**\n     * The token for the next set of results, or ''null'' if there are no additional results.\n     * \n     * @param nextToken\n     *        The token for the next set of results, or ''null'' if there are no additional results.\n     *\/\n\n    public void setNextToken(String nextToken) {\n        this.nextToken = nextToken;\n    }\n\n    \/**\n     * The token for the next set of results, or ''null'' if there are no additional results.\n     * \n     * @return The token for the next set of results, or ''null'' if there are no additional results.\n     *\/\n\n    public String getNextToken() {\n        return this.nextToken;\n    }\n\n    \/**\n     * The token for the next set of results, or ''null'' if there are no additional results.\n     * \n     * @param nextToken\n     *        The token for the next set of results, or ''null'' if there are no additional results.\n     * @return Returns a reference to this object so that method calls can be chained together.\n     *\/\n\n    public ListBulkDeploymentDetailedReportsRequest withNextToken(String nextToken) {\n        setNextToken(nextToken);\n        return this;\n    }\n\n    \/**\n     * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be\n     * redacted from this string using a placeholder value.\n     *\n     * @return A string representation of this object.\n     *\n     * @see java.lang.Object#toString()\n     *\/\n    @Override\n    public String toString() {\n        StringBuilder sb = new StringBuilder();\n        sb.append(\"{\");\n        if (getBulkDeploymentId() != null)\n            sb.append(\"BulkDeploymentId: \").append(getBulkDeploymentId()).append(\",\");\n        if (getMaxResults() != null)\n            sb.append(\"MaxResults: \").append(getMaxResults()).append(\",\");\n        if (getNextToken() != null)\n            sb.append(\"NextToken: \").append(getNextToken());\n        sb.append(\"}\");\n        return sb.toString();\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        if (this == obj)\n            return true;\n        if (obj == null)\n            return false;\n\n        if (obj instanceof ListBulkDeploymentDetailedReportsRequest == false)\n            return false;\n        ListBulkDeploymentDetailedReportsRequest other = (ListBulkDeploymentDetailedReportsRequest) obj;\n        if (other.getBulkDeploymentId() == null ^ this.getBulkDeploymentId() == null)\n            return false;\n        if (other.getBulkDeploymentId() != null && other.getBulkDeploymentId().equals(this.getBulkDeploymentId()) == false)\n            return false;\n        if (other.getMaxResults() == null ^ this.getMaxResults() == null)\n            return false;\n        if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)\n            return false;\n        if (other.getNextToken() == null ^ this.getNextToken() == null)\n            return false;\n        if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)\n            return false;\n        return true;\n    }\n\n    @Override\n    public int hashCode() {\n        final int prime = 31;\n        int hashCode = 1;\n\n        hashCode = prime * hashCode + ((getBulkDeploymentId() == null) ? 0 : getBulkDeploymentId().hashCode());\n        hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());\n        hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());\n        return hashCode;\n    }\n\n    @Override\n    public ListBulkDeploymentDetailedReportsRequest clone() {\n        return (ListBulkDeploymentDetailedReportsRequest) super.clone();\n    }\n\n}\n","avg_line_length":34.2786069652,"max_line_length":136,"alphanum_fraction":0.6461538462}
{"size":2537,"ext":"java","lang":"Java","max_stars_count":4.0,"content":"package org.opengis.cite.indoorgml10;\n\nimport static org.junit.Assert.*;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.InvalidPropertiesFormatException;\nimport java.util.Properties;\n\nimport javax.xml.parsers.DocumentBuilder;\nimport javax.xml.parsers.DocumentBuilderFactory;\nimport javax.xml.parsers.ParserConfigurationException;\nimport javax.xml.transform.Source;\n\nimport net.sf.saxon.s9api.XdmValue;\n\nimport org.junit.Before;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.opengis.cite.indoorgml10.util.XMLUtils;\nimport org.w3c.dom.Document;\n\n\/**\n * Verifies the results of executing a test run using the main controller\n * (TestNGController).\n * \n *\/\npublic class VerifyTestNGController {\n\n    private static DocumentBuilder docBuilder;\n    private Properties testRunProps;\n\n    @BeforeClass\n    public static void initParser() throws ParserConfigurationException {\n        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();\n        dbf.setNamespaceAware(true);\n        dbf.setValidating(false);\n        dbf.setFeature(\n                \"http:\/\/apache.org\/xml\/features\/nonvalidating\/load-external-dtd\",\n                false);\n        docBuilder = dbf.newDocumentBuilder();\n    }\n\n    @Before\n    public void loadDefaultTestRunProperties()\n            throws InvalidPropertiesFormatException, IOException {\n        this.testRunProps = new Properties();\n        this.testRunProps.loadFromXML(getClass().getResourceAsStream(\n                \"\/test-run-props.xml\"));\n    }\n\n    @Test\n    public void doTestRun() throws Exception {\n        URL testSubject = getClass().getResource(\"\/atom-feed-2.xml\");\n        this.testRunProps.setProperty(TestRunArg.IUT.toString(), testSubject\n                .toURI().toString());\n        ByteArrayOutputStream outStream = new ByteArrayOutputStream(1024);\n        this.testRunProps.storeToXML(outStream, \"Integration test\");\n        Document testRunArgs = docBuilder.parse(new ByteArrayInputStream(\n                outStream.toByteArray()));\n        TestNGController controller = new TestNGController();\n        Source results = controller.doTestRun(testRunArgs);\n        String xpath = \"\/testng-results\/@failed\";\n        XdmValue failed = XMLUtils.evaluateXPath2(results, xpath, null);\n        int numFailed = Integer.parseInt(failed.getUnderlyingValue()\n                .getStringValue());\n        assertEquals(\"Unexpected number of fail verdicts.\", 3, numFailed);\n    }\n}\n","avg_line_length":35.2361111111,"max_line_length":81,"alphanum_fraction":0.7154119038}
{"size":532,"ext":"java","lang":"Java","max_stars_count":3.0,"content":"package mindgo.items;\n\nimport arc.func.Cons;\nimport mindgo.logic.PlayerData;\nimport mindustry.Vars;\nimport mindustry.gen.Iconc;\n\npublic class Item {\n    public static final float MAX_DIST = Vars.tilesize * 20 * Vars.tilesize * 20;\n    public String name;\n    public char icon;\n    public Cons use;\n    public int maxStuck;\n\n    public Item(String name, char icon, int maxStuck, Cons use) {\n        this.name = name;\n        this.icon = icon;\n        this.use = use;\n        this.maxStuck = maxStuck;\n    }\n}\n","avg_line_length":24.1818181818,"max_line_length":81,"alphanum_fraction":0.6710526316}
{"size":1118,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package tasks;\n\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\npublic class Pr02GetVillainsNames implements Executable {\n    Connection connection;\n\n    public Pr02GetVillainsNames(Connection connection) {\n        this.connection = connection;\n    }\n\n    private void getVillainsNames() throws SQLException {\n        String query =\n                \"SELECT v.name, COUNT(mv.minion_id) as cm FROM villains as v JOIN minions_villains as mv ON v.id = mv.villain_id\" +\n                        \" GROUP BY mv.villain_id HAVING cm > ? ORDER BY cm DESC;\";\n\n        PreparedStatement preparedStatement =\n                this.connection.prepareStatement(query);\n\n        preparedStatement.setInt(1, 15);\n\n        ResultSet resultSet = preparedStatement.executeQuery();\n\n        while (resultSet.next()) {\n            System.out.println(\n                    String.format(\"%s %d\", resultSet.getString(\"name\"), resultSet.getInt(\"cm\")));\n        }\n    }\n\n    @Override\n    public void execute() throws SQLException {\n        this.getVillainsNames();\n    }\n}\n","avg_line_length":29.4210526316,"max_line_length":131,"alphanum_fraction":0.6511627907}
{"size":230,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.github.magento.models;\n\n\nimport lombok.Getter;\nimport lombok.Setter;\n\n\n\/**\n * Created by xschen on 12\/6\/2017.\n *\/\n@Getter\n@Setter\npublic class MagentoAttributeType {\n   private String value;\n   private String label;\n}\n","avg_line_length":13.5294117647,"max_line_length":35,"alphanum_fraction":0.7304347826}
{"size":16337,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * Copyright 2012 Google Inc.\n * Copyright 2014 Andreas Schildbach\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *    http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage com.google.bitcoin.store;\n\n\nimport com.google.bitcoin.core.*;\nimport com.google.bitcoin.core.TransactionConfidence.ConfidenceType;\nimport com.google.bitcoin.params.MainNetParams;\nimport com.google.bitcoin.params.UnitTestParams;\nimport com.google.bitcoin.script.ScriptBuilder;\nimport com.google.bitcoin.testing.FakeTxBuilder;\nimport com.google.bitcoin.utils.BriefLogFormatter;\nimport com.google.bitcoin.utils.Threading;\nimport com.google.protobuf.ByteString;\nimport org.bitcoinj.wallet.Protos;\nimport org.junit.Before;\nimport org.junit.Test;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.math.BigInteger;\nimport java.net.InetAddress;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.Iterator;\nimport java.util.Set;\n\nimport static com.google.bitcoin.core.Coin.*;\nimport static com.google.bitcoin.testing.FakeTxBuilder.createFakeTx;\nimport static org.junit.Assert.*;\n\npublic class WalletProtobufSerializerTest {\n    static final NetworkParameters params = UnitTestParams.get();\n    private ECKey myKey;\n    private ECKey myWatchedKey;\n    private Address myAddress;\n    private Wallet myWallet;\n\n    public static String WALLET_DESCRIPTION  = \"The quick brown fox lives in \\u4f26\\u6566\"; \/\/ Beijing in Chinese\n    private long mScriptCreationTime;\n\n    @Before\n    public void setUp() throws Exception {\n        BriefLogFormatter.initVerbose();\n        myWatchedKey = new ECKey();\n        myWallet = new Wallet(params);\n        myKey = new ECKey();\n        myKey.setCreationTimeSeconds(123456789L);\n        myWallet.importKey(myKey);\n        myAddress = myKey.toAddress(params);\n        myWallet = new Wallet(params);\n        myWallet.importKey(myKey);\n        mScriptCreationTime = new Date().getTime() \/ 1000 - 1234;\n        myWallet.addWatchedAddress(myWatchedKey.toAddress(params), mScriptCreationTime);\n        myWallet.setDescription(WALLET_DESCRIPTION);\n    }\n\n    @Test\n    public void empty() throws Exception {\n        \/\/ Check the base case of a wallet with one key and no transactions.\n        Wallet wallet1 = roundTrip(myWallet);\n        assertEquals(0, wallet1.getTransactions(true).size());\n        assertEquals(Coin.ZERO, wallet1.getBalance());\n        assertArrayEquals(myKey.getPubKey(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPubKey());\n        assertArrayEquals(myKey.getPrivKeyBytes(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPrivKeyBytes());\n        assertEquals(myKey.getCreationTimeSeconds(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getCreationTimeSeconds());\n        assertEquals(mScriptCreationTime,\n                wallet1.getWatchedScripts().get(0).getCreationTimeSeconds());\n        assertEquals(1, wallet1.getWatchedScripts().size());\n        assertEquals(ScriptBuilder.createOutputScript(myWatchedKey.toAddress(params)),\n                wallet1.getWatchedScripts().get(0));\n        assertEquals(WALLET_DESCRIPTION, wallet1.getDescription());\n    }\n\n    @Test\n    public void oneTx() throws Exception {\n        \/\/ Check basic tx serialization.\n        Coin v1 = COIN;\n        Transaction t1 = createFakeTx(params, v1, myAddress);\n        t1.getConfidence().markBroadcastBy(new PeerAddress(InetAddress.getByName(\"1.2.3.4\")));\n        t1.getConfidence().markBroadcastBy(new PeerAddress(InetAddress.getByName(\"5.6.7.8\")));\n        t1.getConfidence().setSource(TransactionConfidence.Source.NETWORK);\n        myWallet.receivePending(t1, null);\n        Wallet wallet1 = roundTrip(myWallet);\n        assertEquals(1, wallet1.getTransactions(true).size());\n        assertEquals(v1, wallet1.getBalance(Wallet.BalanceType.ESTIMATED));\n        Transaction t1copy = wallet1.getTransaction(t1.getHash());\n        assertArrayEquals(t1.bitcoinSerialize(), t1copy.bitcoinSerialize());\n        assertEquals(2, t1copy.getConfidence().numBroadcastPeers());\n        assertEquals(TransactionConfidence.Source.NETWORK, t1copy.getConfidence().getSource());\n        \n        Protos.Wallet walletProto = new WalletProtobufSerializer().walletToProto(myWallet);\n        assertEquals(Protos.Key.Type.ORIGINAL, walletProto.getKey(0).getType());\n        assertEquals(0, walletProto.getExtensionCount());\n        assertEquals(1, walletProto.getTransactionCount());\n        assertEquals(6, walletProto.getKeyCount());\n        \n        Protos.Transaction t1p = walletProto.getTransaction(0);\n        assertEquals(0, t1p.getBlockHashCount());\n        assertArrayEquals(t1.getHash().getBytes(), t1p.getHash().toByteArray());\n        assertEquals(Protos.Transaction.Pool.PENDING, t1p.getPool());\n        assertFalse(t1p.hasLockTime());\n        assertFalse(t1p.getTransactionInput(0).hasSequence());\n        assertArrayEquals(t1.getInputs().get(0).getOutpoint().getHash().getBytes(),\n                t1p.getTransactionInput(0).getTransactionOutPointHash().toByteArray());\n        assertEquals(0, t1p.getTransactionInput(0).getTransactionOutPointIndex());\n        assertEquals(t1p.getTransactionOutput(0).getValue(), v1.value);\n    }\n\n    @Test\n    public void doubleSpend() throws Exception {\n        \/\/ Check that we can serialize double spends correctly, as this is a slightly tricky case.\n        FakeTxBuilder.DoubleSpends doubleSpends = FakeTxBuilder.createFakeDoubleSpendTxns(params, myAddress);\n        \/\/ t1 spends to our wallet.\n        myWallet.receivePending(doubleSpends.t1, null);\n        \/\/ t2 rolls back t1 and spends somewhere else.\n        myWallet.receiveFromBlock(doubleSpends.t2, null, BlockChain.NewBlockType.BEST_CHAIN, 0);\n        Wallet wallet1 = roundTrip(myWallet);\n        assertEquals(1, wallet1.getTransactions(true).size());\n        Transaction t1 = wallet1.getTransaction(doubleSpends.t1.getHash());\n        assertEquals(ConfidenceType.DEAD, t1.getConfidence().getConfidenceType());\n        assertEquals(Coin.ZERO, wallet1.getBalance());\n\n        \/\/ TODO: Wallet should store overriding transactions even if they are not wallet-relevant.\n        \/\/ assertEquals(doubleSpends.t2, t1.getConfidence().getOverridingTransaction());\n    }\n    \n    @Test\n    public void testKeys() throws Exception {\n        for (int i = 0 ; i < 20 ; i++) {\n            myKey = new ECKey();\n            myAddress = myKey.toAddress(params);\n            myWallet = new Wallet(params);\n            myWallet.importKey(myKey);\n            Wallet wallet1 = roundTrip(myWallet);\n            assertArrayEquals(myKey.getPubKey(), wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPubKey());\n            assertArrayEquals(myKey.getPrivKeyBytes(), wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPrivKeyBytes());\n        }\n    }\n\n    @Test\n    public void testLastBlockSeenHash() throws Exception {\n        \/\/ Test the lastBlockSeenHash field works.\n\n        \/\/ LastBlockSeenHash should be empty if never set.\n        Wallet wallet = new Wallet(params);\n        Protos.Wallet walletProto = new WalletProtobufSerializer().walletToProto(wallet);\n        ByteString lastSeenBlockHash = walletProto.getLastSeenBlockHash();\n        assertTrue(lastSeenBlockHash.isEmpty());\n\n        \/\/ Create a block.\n        Block block = new Block(params, BlockTest.blockBytes);\n        Sha256Hash blockHash = block.getHash();\n        wallet.setLastBlockSeenHash(blockHash);\n        wallet.setLastBlockSeenHeight(1);\n\n        \/\/ Roundtrip the wallet and check it has stored the blockHash.\n        Wallet wallet1 = roundTrip(wallet);\n        assertEquals(blockHash, wallet1.getLastBlockSeenHash());\n        assertEquals(1, wallet1.getLastBlockSeenHeight());\n\n        \/\/ Test the Satoshi genesis block (hash of all zeroes) is roundtripped ok.\n        Block genesisBlock = MainNetParams.get().getGenesisBlock();\n        wallet.setLastBlockSeenHash(genesisBlock.getHash());\n        Wallet wallet2 = roundTrip(wallet);\n        assertEquals(genesisBlock.getHash(), wallet2.getLastBlockSeenHash());\n    }\n\n    @Test\n    public void testAppearedAtChainHeightDepthAndWorkDone() throws Exception {\n        \/\/ Test the TransactionConfidence appearedAtChainHeight, depth and workDone field are stored.\n\n        BlockChain chain = new BlockChain(params, myWallet, new MemoryBlockStore(params));\n\n        final ArrayList txns = new ArrayList(2);\n        myWallet.addEventListener(new AbstractWalletEventListener() {\n            @Override\n            public void onCoinsReceived(Wallet wallet, Transaction tx, Coin prevBalance, Coin newBalance) {\n                txns.add(tx);\n            }\n        });\n\n        \/\/ Start by building two blocks on top of the genesis block.\n        Block b1 = params.getGenesisBlock().createNextBlock(myAddress);\n        BigInteger work1 = b1.getWork();\n        assertTrue(work1.signum() > 0);\n\n        Block b2 = b1.createNextBlock(myAddress);\n        BigInteger work2 = b2.getWork();\n        assertTrue(work2.signum() > 0);\n\n        assertTrue(chain.add(b1));\n        assertTrue(chain.add(b2));\n\n        \/\/ We now have the following chain:\n        \/\/     genesis -> b1 -> b2\n\n        \/\/ Check the transaction confidence levels are correct before wallet roundtrip.\n        Threading.waitForUserCode();\n        assertEquals(2, txns.size());\n\n        TransactionConfidence confidence0 = txns.get(0).getConfidence();\n        TransactionConfidence confidence1 = txns.get(1).getConfidence();\n\n        assertEquals(1, confidence0.getAppearedAtChainHeight());\n        assertEquals(2, confidence1.getAppearedAtChainHeight());\n\n        assertEquals(2, confidence0.getDepthInBlocks());\n        assertEquals(1, confidence1.getDepthInBlocks());\n\n        \/\/ Roundtrip the wallet and check it has stored the depth and workDone.\n        Wallet rebornWallet = roundTrip(myWallet);\n\n        Set rebornTxns = rebornWallet.getTransactions(false);\n        assertEquals(2, rebornTxns.size());\n\n        \/\/ The transactions are not guaranteed to be in the same order so sort them to be in chain height order if required.\n        Iterator it = rebornTxns.iterator();\n        Transaction txA = it.next();\n        Transaction txB = it.next();\n\n        Transaction rebornTx0, rebornTx1;\n         if (txA.getConfidence().getAppearedAtChainHeight() == 1) {\n            rebornTx0 = txA;\n            rebornTx1 = txB;\n        } else {\n            rebornTx0 = txB;\n            rebornTx1 = txA;\n        }\n\n        TransactionConfidence rebornConfidence0 = rebornTx0.getConfidence();\n        TransactionConfidence rebornConfidence1 = rebornTx1.getConfidence();\n\n        assertEquals(1, rebornConfidence0.getAppearedAtChainHeight());\n        assertEquals(2, rebornConfidence1.getAppearedAtChainHeight());\n\n        assertEquals(2, rebornConfidence0.getDepthInBlocks());\n        assertEquals(1, rebornConfidence1.getDepthInBlocks());\n    }\n\n    private static Wallet roundTrip(Wallet wallet) throws Exception {\n        ByteArrayOutputStream output = new ByteArrayOutputStream();\n        \/\/System.out.println(WalletProtobufSerializer.walletToText(wallet));\n        new WalletProtobufSerializer().writeWallet(wallet, output);\n        ByteArrayInputStream test = new ByteArrayInputStream(output.toByteArray());\n        assertTrue(WalletProtobufSerializer.isWallet(test));\n        ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray());\n        return new WalletProtobufSerializer().readWallet(input);\n    }\n\n    @Test\n    public void testRoundTripNormalWallet() throws Exception {\n        Wallet wallet1 = roundTrip(myWallet);     \n        assertEquals(0, wallet1.getTransactions(true).size());\n        assertEquals(Coin.ZERO, wallet1.getBalance());\n        assertArrayEquals(myKey.getPubKey(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPubKey());\n        assertArrayEquals(myKey.getPrivKeyBytes(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getPrivKeyBytes());\n        assertEquals(myKey.getCreationTimeSeconds(),\n                wallet1.findKeyFromPubHash(myKey.getPubKeyHash()).getCreationTimeSeconds());\n    }\n\n    @Test\n    public void coinbaseTxns() throws Exception {\n        \/\/ Covers issue 420 where the outpoint index of a coinbase tx input was being mis-serialized.\n        Block b = params.getGenesisBlock().createNextBlockWithCoinbase(myKey.getPubKey(), FIFTY_COINS);\n        Transaction coinbase = b.getTransactions().get(0);\n        assertTrue(coinbase.isCoinBase());\n        BlockChain chain = new BlockChain(params, myWallet, new MemoryBlockStore(params));\n        assertTrue(chain.add(b));\n        \/\/ Wallet now has a coinbase tx in it.\n        assertEquals(1, myWallet.getTransactions(true).size());\n        assertTrue(myWallet.getTransaction(coinbase.getHash()).isCoinBase());\n        Wallet wallet2 = roundTrip(myWallet);\n        assertEquals(1, wallet2.getTransactions(true).size());\n        assertTrue(wallet2.getTransaction(coinbase.getHash()).isCoinBase());\n    }\n\n    @Test\n    public void tags() throws Exception {\n        myWallet.setTag(\"foo\", ByteString.copyFromUtf8(\"bar\"));\n        assertEquals(\"bar\", myWallet.getTag(\"foo\").toStringUtf8());\n        myWallet = roundTrip(myWallet);\n        assertEquals(\"bar\", myWallet.getTag(\"foo\").toStringUtf8());\n    }\n\n    @Test\n    public void testExtensions() throws Exception {\n        myWallet.addExtension(new SomeFooExtension(\"com.whatever.required\", true));\n        Protos.Wallet proto = new WalletProtobufSerializer().walletToProto(myWallet);\n        \/\/ Initial extension is mandatory: try to read it back into a wallet that doesn't know about it.\n        try {\n            new WalletProtobufSerializer().readWallet(params, null, proto);\n            fail();\n        } catch (UnreadableWalletException e) {\n            assertTrue(e.getMessage().contains(\"mandatory\"));\n        }\n        Wallet wallet = new WalletProtobufSerializer().readWallet(params,\n                new WalletExtension[]{ new SomeFooExtension(\"com.whatever.required\", true) },\n                proto);\n        assertTrue(wallet.getExtensions().containsKey(\"com.whatever.required\"));\n\n        \/\/ Non-mandatory extensions are ignored if the wallet doesn't know how to read them.\n        Wallet wallet2 = new Wallet(params);\n        wallet2.addExtension(new SomeFooExtension(\"com.whatever.optional\", false));\n        Protos.Wallet proto2 = new WalletProtobufSerializer().walletToProto(wallet2);\n        Wallet wallet5 = new WalletProtobufSerializer().readWallet(params, null, proto2);\n        assertEquals(0, wallet5.getExtensions().size());\n    }\n\n    @Test(expected = UnreadableWalletException.FutureVersion.class)\n    public void versions() throws Exception {\n        Protos.Wallet.Builder proto = Protos.Wallet.newBuilder(new WalletProtobufSerializer().walletToProto(myWallet));\n        proto.setVersion(2);\n        new WalletProtobufSerializer().readWallet(params, null, proto.build());\n    }\n\n    private static class SomeFooExtension implements WalletExtension {\n        private final byte[] data = new byte[]{1, 2, 3};\n\n        private final boolean isMandatory;\n        private final String id;\n\n        public SomeFooExtension(String id, boolean isMandatory) {\n            this.isMandatory = isMandatory;\n            this.id = id;\n        }\n\n        @Override\n        public String getWalletExtensionID() {\n            return id;\n        }\n\n        @Override\n        public boolean isWalletExtensionMandatory() {\n            return isMandatory;\n        }\n\n        @Override\n        public byte[] serializeWalletExtension() {\n            return data;\n        }\n\n        @Override\n        public void deserializeWalletExtension(Wallet wallet, byte[] data) {\n            assertArrayEquals(this.data, data);\n        }\n    }\n}\n","avg_line_length":44.2737127371,"max_line_length":124,"alphanum_fraction":0.6879476036}
{"size":463,"ext":"java","lang":"Java","max_stars_count":3579.0,"content":"package com.querydsl.apt.domain;\n\nimport static org.junit.Assert.assertEquals;\n\nimport javax.persistence.MappedSuperclass;\n\nimport org.junit.Test;\n\npublic class Properties4Test extends AbstractTest {\n\n    @MappedSuperclass\n    public abstract static class Naming {\n\n        public abstract boolean is8FRecord();\n\n    }\n\n    @Test\n    public void test() {\n        assertEquals(\"8FRecord\", QProperties4Test_Naming.naming._8FRecord.getMetadata().getName());\n    }\n}\n","avg_line_length":20.1304347826,"max_line_length":99,"alphanum_fraction":0.7300215983}
{"size":139181,"ext":"java","lang":"Java","max_stars_count":null,"content":"package cz.cuni.mff.perestroika.perestroika_problem_20_3_0_4_0_5_7;\n\nimport cz.cuni.mff.jpddl.PDDLDeadEnd;\nimport cz.cuni.mff.perestroika.domain.Domain;\nimport cz.cuni.mff.perestroika.domain.State;\nimport cz.cuni.mff.perestroika.problem.PerestroikaProblem;\nimport cz.cuni.mff.perestroika.problem1.DeadEnd;\n\npublic final class Problem extends PerestroikaProblem {\n\t\n\tstatic {\n\t\t\/\/ ENSURE STATIC INITIALIZATION OF THE CLASSES\n\t\tnew E_Locations();\n\t\tnew E_Resources();\n\t}\n\t\n\tpublic Domain domain;\n\t\n\tpublic State state;\n\t\n\tpublic Goal goal;\n\t\n\tpublic DeadEnd deadEnd;\n\t\n\tpublic Problem() {\n\t\tdomain = new Domain();\n\t\tstate = new State();\n\t\tgoal = new Goal();\n\t\tdeadEnd = new DeadEnd();\n\t\t\n\t\tstate.p_ActRound.set();\n\t\tstate.p_Alive.set();\n\n\t\t\n\t\tstate.p_AtAgent.set(E_Locations.l_1_1);\n\n\t\tstate.p_AtRes.set(E_Resources.r14, E_Locations.l_4_17);\n\t\tstate.p_AtRes.set(E_Resources.r22, E_Locations.l_7_10);\n\t\tstate.p_AtRes.set(E_Resources.r38, E_Locations.l_11_11);\n\t\tstate.p_AtRes.set(E_Resources.r41, E_Locations.l_12_4);\n\t\tstate.p_AtRes.set(E_Resources.r66, E_Locations.l_18_12);\n\t\tstate.p_AtRes.set(E_Resources.r1, E_Locations.l_1_6);\n\t\tstate.p_AtRes.set(E_Resources.r47, E_Locations.l_13_18);\n\t\tstate.p_AtRes.set(E_Resources.r29, E_Locations.l_9_1);\n\t\tstate.p_AtRes.set(E_Resources.r37, E_Locations.l_11_9);\n\t\tstate.p_AtRes.set(E_Resources.r16, E_Locations.l_5_10);\n\t\tstate.p_AtRes.set(E_Resources.r51, E_Locations.l_14_6);\n\t\tstate.p_AtRes.set(E_Resources.r15, E_Locations.l_5_7);\n\t\tstate.p_AtRes.set(E_Resources.r65, E_Locations.l_18_6);\n\t\tstate.p_AtRes.set(E_Resources.r2, E_Locations.l_1_9);\n\t\tstate.p_AtRes.set(E_Resources.r68, E_Locations.l_19_14);\n\t\tstate.p_AtRes.set(E_Resources.r39, E_Locations.l_11_14);\n\t\tstate.p_AtRes.set(E_Resources.r44, E_Locations.l_12_17);\n\t\tstate.p_AtRes.set(E_Resources.r70, E_Locations.l_20_9);\n\t\tstate.p_AtRes.set(E_Resources.r46, E_Locations.l_13_13);\n\t\tstate.p_AtRes.set(E_Resources.r26, E_Locations.l_8_8);\n\t\tstate.p_AtRes.set(E_Resources.r49, E_Locations.l_14_1);\n\t\tstate.p_AtRes.set(E_Resources.r18, E_Locations.l_6_12);\n\t\tstate.p_AtRes.set(E_Resources.r4, E_Locations.l_1_20);\n\t\tstate.p_AtRes.set(E_Resources.r33, E_Locations.l_10_13);\n\t\tstate.p_AtRes.set(E_Resources.r8, E_Locations.l_2_16);\n\t\tstate.p_AtRes.set(E_Resources.r25, E_Locations.l_8_3);\n\t\tstate.p_AtRes.set(E_Resources.r43, E_Locations.l_12_14);\n\t\tstate.p_AtRes.set(E_Resources.r64, E_Locations.l_18_3);\n\t\tstate.p_AtRes.set(E_Resources.r52, E_Locations.l_14_7);\n\t\tstate.p_AtRes.set(E_Resources.r3, E_Locations.l_1_16);\n\t\tstate.p_AtRes.set(E_Resources.r28, E_Locations.l_8_19);\n\t\tstate.p_AtRes.set(E_Resources.r11, E_Locations.l_3_4);\n\t\tstate.p_AtRes.set(E_Resources.r60, E_Locations.l_17_3);\n\t\tstate.p_AtRes.set(E_Resources.r24, E_Locations.l_7_19);\n\t\tstate.p_AtRes.set(E_Resources.r23, E_Locations.l_7_12);\n\t\tstate.p_AtRes.set(E_Resources.r63, E_Locations.l_17_20);\n\t\tstate.p_AtRes.set(E_Resources.r34, E_Locations.l_10_16);\n\t\tstate.p_AtRes.set(E_Resources.r56, E_Locations.l_15_1);\n\t\tstate.p_AtRes.set(E_Resources.r12, E_Locations.l_3_19);\n\t\tstate.p_AtRes.set(E_Resources.r19, E_Locations.l_6_15);\n\t\tstate.p_AtRes.set(E_Resources.r17, E_Locations.l_6_5);\n\t\tstate.p_AtRes.set(E_Resources.r69, E_Locations.l_19_19);\n\t\tstate.p_AtRes.set(E_Resources.r45, E_Locations.l_13_8);\n\t\tstate.p_AtRes.set(E_Resources.r5, E_Locations.l_2_2);\n\t\tstate.p_AtRes.set(E_Resources.r59, E_Locations.l_16_8);\n\t\tstate.p_AtRes.set(E_Resources.r9, E_Locations.l_2_20);\n\t\tstate.p_AtRes.set(E_Resources.r27, E_Locations.l_8_10);\n\t\tstate.p_AtRes.set(E_Resources.r48, E_Locations.l_13_19);\n\t\tstate.p_AtRes.set(E_Resources.r6, E_Locations.l_2_3);\n\t\tstate.p_AtRes.set(E_Resources.r10, E_Locations.l_3_1);\n\t\tstate.p_AtRes.set(E_Resources.r57, E_Locations.l_15_13);\n\t\tstate.p_AtRes.set(E_Resources.r31, E_Locations.l_9_10);\n\t\tstate.p_AtRes.set(E_Resources.r20, E_Locations.l_6_17);\n\t\tstate.p_AtRes.set(E_Resources.r40, E_Locations.l_12_2);\n\t\tstate.p_AtRes.set(E_Resources.r42, E_Locations.l_12_10);\n\t\tstate.p_AtRes.set(E_Resources.r7, E_Locations.l_2_8);\n\t\tstate.p_AtRes.set(E_Resources.r21, E_Locations.l_7_2);\n\t\tstate.p_AtRes.set(E_Resources.r54, E_Locations.l_14_19);\n\t\tstate.p_AtRes.set(E_Resources.r30, E_Locations.l_9_7);\n\t\tstate.p_AtRes.set(E_Resources.r61, E_Locations.l_17_9);\n\t\tstate.p_AtRes.set(E_Resources.r13, E_Locations.l_4_12);\n\t\tstate.p_AtRes.set(E_Resources.r53, E_Locations.l_14_10);\n\t\tstate.p_AtRes.set(E_Resources.r50, E_Locations.l_14_3);\n\t\tstate.p_AtRes.set(E_Resources.r35, E_Locations.l_11_1);\n\t\tstate.p_AtRes.set(E_Resources.r71, E_Locations.l_20_13);\n\t\tstate.p_AtRes.set(E_Resources.r58, E_Locations.l_16_6);\n\t\tstate.p_AtRes.set(E_Resources.r32, E_Locations.l_10_3);\n\t\tstate.p_AtRes.set(E_Resources.r62, E_Locations.l_17_10);\n\t\tstate.p_AtRes.set(E_Resources.r67, E_Locations.l_18_16);\n\t\tstate.p_AtRes.set(E_Resources.r36, E_Locations.l_11_7);\n\t\tstate.p_AtRes.set(E_Resources.r55, E_Locations.l_14_20);\n\t\t\n\n\t\tstate.p_Connected.set(E_Locations.l_14_15, E_Locations.l_14_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_19, E_Locations.l_6_18);\n\t\tstate.p_Connected.set(E_Locations.l_12_13, E_Locations.l_11_13);\n\t\tstate.p_Connected.set(E_Locations.l_19_14, E_Locations.l_18_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_5, E_Locations.l_19_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_13, E_Locations.l_13_14);\n\t\tstate.p_Connected.set(E_Locations.l_2_11, E_Locations.l_2_12);\n\t\tstate.p_Connected.set(E_Locations.l_8_18, E_Locations.l_7_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_18, E_Locations.l_20_19);\n\t\tstate.p_Connected.set(E_Locations.l_9_12, E_Locations.l_8_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_5, E_Locations.l_11_4);\n\t\tstate.p_Connected.set(E_Locations.l_4_14, E_Locations.l_4_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_3, E_Locations.l_9_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_16, E_Locations.l_3_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_20, E_Locations.l_4_20);\n\t\tstate.p_Connected.set(E_Locations.l_9_19, E_Locations.l_9_18);\n\t\tstate.p_Connected.set(E_Locations.l_10_17, E_Locations.l_9_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_3, E_Locations.l_12_2);\n\t\tstate.p_Connected.set(E_Locations.l_20_13, E_Locations.l_20_14);\n\t\tstate.p_Connected.set(E_Locations.l_9_5, E_Locations.l_9_6);\n\t\tstate.p_Connected.set(E_Locations.l_14_5, E_Locations.l_13_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_19, E_Locations.l_4_19);\n\t\tstate.p_Connected.set(E_Locations.l_4_19, E_Locations.l_3_19);\n\t\tstate.p_Connected.set(E_Locations.l_5_7, E_Locations.l_6_7);\n\t\tstate.p_Connected.set(E_Locations.l_12_3, E_Locations.l_12_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_10, E_Locations.l_13_11);\n\t\tstate.p_Connected.set(E_Locations.l_19_18, E_Locations.l_20_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_4, E_Locations.l_19_4);\n\t\tstate.p_Connected.set(E_Locations.l_10_14, E_Locations.l_11_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_16, E_Locations.l_13_15);\n\t\tstate.p_Connected.set(E_Locations.l_6_12, E_Locations.l_6_13);\n\t\tstate.p_Connected.set(E_Locations.l_1_18, E_Locations.l_1_17);\n\t\tstate.p_Connected.set(E_Locations.l_7_16, E_Locations.l_8_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_6, E_Locations.l_5_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_5, E_Locations.l_18_4);\n\t\tstate.p_Connected.set(E_Locations.l_9_4, E_Locations.l_10_4);\n\t\tstate.p_Connected.set(E_Locations.l_4_8, E_Locations.l_5_8);\n\t\tstate.p_Connected.set(E_Locations.l_12_4, E_Locations.l_12_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_12, E_Locations.l_9_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_5, E_Locations.l_2_6);\n\t\tstate.p_Connected.set(E_Locations.l_20_16, E_Locations.l_20_15);\n\t\tstate.p_Connected.set(E_Locations.l_13_10, E_Locations.l_14_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_9, E_Locations.l_5_9);\n\t\tstate.p_Connected.set(E_Locations.l_8_14, E_Locations.l_9_14);\n\t\tstate.p_Connected.set(E_Locations.l_4_3, E_Locations.l_4_4);\n\t\tstate.p_Connected.set(E_Locations.l_12_18, E_Locations.l_13_18);\n\t\tstate.p_Connected.set(E_Locations.l_2_20, E_Locations.l_3_20);\n\t\tstate.p_Connected.set(E_Locations.l_9_15, E_Locations.l_8_15);\n\t\tstate.p_Connected.set(E_Locations.l_19_3, E_Locations.l_19_4);\n\t\tstate.p_Connected.set(E_Locations.l_19_6, E_Locations.l_19_5);\n\t\tstate.p_Connected.set(E_Locations.l_16_13, E_Locations.l_15_13);\n\t\tstate.p_Connected.set(E_Locations.l_14_13, E_Locations.l_15_13);\n\t\tstate.p_Connected.set(E_Locations.l_3_6, E_Locations.l_2_6);\n\t\tstate.p_Connected.set(E_Locations.l_2_13, E_Locations.l_2_12);\n\t\tstate.p_Connected.set(E_Locations.l_15_12, E_Locations.l_15_13);\n\t\tstate.p_Connected.set(E_Locations.l_18_4, E_Locations.l_18_3);\n\t\tstate.p_Connected.set(E_Locations.l_19_12, E_Locations.l_18_12);\n\t\tstate.p_Connected.set(E_Locations.l_2_20, E_Locations.l_2_19);\n\t\tstate.p_Connected.set(E_Locations.l_14_7, E_Locations.l_14_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_15, E_Locations.l_4_15);\n\t\tstate.p_Connected.set(E_Locations.l_13_11, E_Locations.l_13_10);\n\t\tstate.p_Connected.set(E_Locations.l_6_10, E_Locations.l_6_11);\n\t\tstate.p_Connected.set(E_Locations.l_10_3, E_Locations.l_9_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_1, E_Locations.l_16_2);\n\t\tstate.p_Connected.set(E_Locations.l_4_11, E_Locations.l_3_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_19, E_Locations.l_11_18);\n\t\tstate.p_Connected.set(E_Locations.l_8_4, E_Locations.l_7_4);\n\t\tstate.p_Connected.set(E_Locations.l_17_5, E_Locations.l_17_4);\n\t\tstate.p_Connected.set(E_Locations.l_11_13, E_Locations.l_11_12);\n\t\tstate.p_Connected.set(E_Locations.l_4_11, E_Locations.l_5_11);\n\t\tstate.p_Connected.set(E_Locations.l_4_10, E_Locations.l_4_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_14, E_Locations.l_5_13);\n\t\tstate.p_Connected.set(E_Locations.l_10_8, E_Locations.l_10_9);\n\t\tstate.p_Connected.set(E_Locations.l_13_8, E_Locations.l_14_8);\n\t\tstate.p_Connected.set(E_Locations.l_3_2, E_Locations.l_2_2);\n\t\tstate.p_Connected.set(E_Locations.l_1_15, E_Locations.l_1_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_3, E_Locations.l_6_2);\n\t\tstate.p_Connected.set(E_Locations.l_10_3, E_Locations.l_11_3);\n\t\tstate.p_Connected.set(E_Locations.l_1_2, E_Locations.l_1_3);\n\t\tstate.p_Connected.set(E_Locations.l_19_10, E_Locations.l_20_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_19, E_Locations.l_6_19);\n\t\tstate.p_Connected.set(E_Locations.l_5_15, E_Locations.l_6_15);\n\t\tstate.p_Connected.set(E_Locations.l_19_18, E_Locations.l_19_19);\n\t\tstate.p_Connected.set(E_Locations.l_1_1, E_Locations.l_1_2);\n\t\tstate.p_Connected.set(E_Locations.l_10_4, E_Locations.l_10_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_14, E_Locations.l_6_15);\n\t\tstate.p_Connected.set(E_Locations.l_14_9, E_Locations.l_14_8);\n\t\tstate.p_Connected.set(E_Locations.l_7_5, E_Locations.l_7_4);\n\t\tstate.p_Connected.set(E_Locations.l_15_17, E_Locations.l_14_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_4, E_Locations.l_12_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_7, E_Locations.l_5_6);\n\t\tstate.p_Connected.set(E_Locations.l_20_1, E_Locations.l_19_1);\n\t\tstate.p_Connected.set(E_Locations.l_10_12, E_Locations.l_11_12);\n\t\tstate.p_Connected.set(E_Locations.l_15_9, E_Locations.l_15_10);\n\t\tstate.p_Connected.set(E_Locations.l_15_15, E_Locations.l_15_14);\n\t\tstate.p_Connected.set(E_Locations.l_9_14, E_Locations.l_9_13);\n\t\tstate.p_Connected.set(E_Locations.l_8_8, E_Locations.l_8_9);\n\t\tstate.p_Connected.set(E_Locations.l_10_7, E_Locations.l_10_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_20, E_Locations.l_18_20);\n\t\tstate.p_Connected.set(E_Locations.l_8_18, E_Locations.l_8_19);\n\t\tstate.p_Connected.set(E_Locations.l_14_14, E_Locations.l_14_13);\n\t\tstate.p_Connected.set(E_Locations.l_15_14, E_Locations.l_16_14);\n\t\tstate.p_Connected.set(E_Locations.l_15_13, E_Locations.l_16_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_12, E_Locations.l_7_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_4, E_Locations.l_17_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_6, E_Locations.l_10_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_10, E_Locations.l_3_9);\n\t\tstate.p_Connected.set(E_Locations.l_16_8, E_Locations.l_15_8);\n\t\tstate.p_Connected.set(E_Locations.l_1_6, E_Locations.l_1_5);\n\t\tstate.p_Connected.set(E_Locations.l_19_13, E_Locations.l_20_13);\n\t\tstate.p_Connected.set(E_Locations.l_10_8, E_Locations.l_9_8);\n\t\tstate.p_Connected.set(E_Locations.l_15_13, E_Locations.l_14_13);\n\t\tstate.p_Connected.set(E_Locations.l_16_2, E_Locations.l_17_2);\n\t\tstate.p_Connected.set(E_Locations.l_11_16, E_Locations.l_11_17);\n\t\tstate.p_Connected.set(E_Locations.l_16_14, E_Locations.l_16_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_16, E_Locations.l_16_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_12, E_Locations.l_7_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_9, E_Locations.l_1_10);\n\t\tstate.p_Connected.set(E_Locations.l_6_10, E_Locations.l_5_10);\n\t\tstate.p_Connected.set(E_Locations.l_3_15, E_Locations.l_2_15);\n\t\tstate.p_Connected.set(E_Locations.l_3_20, E_Locations.l_2_20);\n\t\tstate.p_Connected.set(E_Locations.l_2_5, E_Locations.l_1_5);\n\t\tstate.p_Connected.set(E_Locations.l_4_20, E_Locations.l_3_20);\n\t\tstate.p_Connected.set(E_Locations.l_10_16, E_Locations.l_10_15);\n\t\tstate.p_Connected.set(E_Locations.l_19_5, E_Locations.l_20_5);\n\t\tstate.p_Connected.set(E_Locations.l_12_8, E_Locations.l_12_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_17, E_Locations.l_1_16);\n\t\tstate.p_Connected.set(E_Locations.l_8_14, E_Locations.l_8_13);\n\t\tstate.p_Connected.set(E_Locations.l_16_18, E_Locations.l_17_18);\n\t\tstate.p_Connected.set(E_Locations.l_2_4, E_Locations.l_2_5);\n\t\tstate.p_Connected.set(E_Locations.l_7_16, E_Locations.l_6_16);\n\t\tstate.p_Connected.set(E_Locations.l_7_18, E_Locations.l_7_19);\n\t\tstate.p_Connected.set(E_Locations.l_14_8, E_Locations.l_14_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_13, E_Locations.l_8_12);\n\t\tstate.p_Connected.set(E_Locations.l_1_15, E_Locations.l_2_15);\n\t\tstate.p_Connected.set(E_Locations.l_17_20, E_Locations.l_17_19);\n\t\tstate.p_Connected.set(E_Locations.l_12_4, E_Locations.l_11_4);\n\t\tstate.p_Connected.set(E_Locations.l_6_5, E_Locations.l_6_6);\n\t\tstate.p_Connected.set(E_Locations.l_17_6, E_Locations.l_16_6);\n\t\tstate.p_Connected.set(E_Locations.l_2_7, E_Locations.l_2_6);\n\t\tstate.p_Connected.set(E_Locations.l_6_3, E_Locations.l_5_3);\n\t\tstate.p_Connected.set(E_Locations.l_12_2, E_Locations.l_12_1);\n\t\tstate.p_Connected.set(E_Locations.l_10_12, E_Locations.l_10_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_9, E_Locations.l_5_8);\n\t\tstate.p_Connected.set(E_Locations.l_2_3, E_Locations.l_3_3);\n\t\tstate.p_Connected.set(E_Locations.l_8_13, E_Locations.l_7_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_1, E_Locations.l_13_1);\n\t\tstate.p_Connected.set(E_Locations.l_14_18, E_Locations.l_15_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_17, E_Locations.l_3_17);\n\t\tstate.p_Connected.set(E_Locations.l_7_7, E_Locations.l_7_6);\n\t\tstate.p_Connected.set(E_Locations.l_17_8, E_Locations.l_16_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_19, E_Locations.l_10_18);\n\t\tstate.p_Connected.set(E_Locations.l_3_9, E_Locations.l_3_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_7, E_Locations.l_4_8);\n\t\tstate.p_Connected.set(E_Locations.l_16_15, E_Locations.l_16_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_14, E_Locations.l_12_13);\n\t\tstate.p_Connected.set(E_Locations.l_20_14, E_Locations.l_19_14);\n\t\tstate.p_Connected.set(E_Locations.l_5_17, E_Locations.l_5_18);\n\t\tstate.p_Connected.set(E_Locations.l_2_19, E_Locations.l_3_19);\n\t\tstate.p_Connected.set(E_Locations.l_3_16, E_Locations.l_3_15);\n\t\tstate.p_Connected.set(E_Locations.l_5_9, E_Locations.l_6_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_2, E_Locations.l_16_2);\n\t\tstate.p_Connected.set(E_Locations.l_18_6, E_Locations.l_17_6);\n\t\tstate.p_Connected.set(E_Locations.l_12_13, E_Locations.l_12_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_3, E_Locations.l_13_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_1, E_Locations.l_15_1);\n\t\tstate.p_Connected.set(E_Locations.l_12_17, E_Locations.l_12_18);\n\t\tstate.p_Connected.set(E_Locations.l_14_17, E_Locations.l_14_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_2, E_Locations.l_6_3);\n\t\tstate.p_Connected.set(E_Locations.l_17_9, E_Locations.l_17_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_8, E_Locations.l_13_9);\n\t\tstate.p_Connected.set(E_Locations.l_7_12, E_Locations.l_6_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_7, E_Locations.l_9_7);\n\t\tstate.p_Connected.set(E_Locations.l_12_7, E_Locations.l_12_6);\n\t\tstate.p_Connected.set(E_Locations.l_4_2, E_Locations.l_5_2);\n\t\tstate.p_Connected.set(E_Locations.l_20_18, E_Locations.l_19_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_7, E_Locations.l_5_7);\n\t\tstate.p_Connected.set(E_Locations.l_7_4, E_Locations.l_6_4);\n\t\tstate.p_Connected.set(E_Locations.l_18_8, E_Locations.l_18_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_4, E_Locations.l_4_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_11, E_Locations.l_9_12);\n\t\tstate.p_Connected.set(E_Locations.l_14_9, E_Locations.l_14_10);\n\t\tstate.p_Connected.set(E_Locations.l_19_4, E_Locations.l_19_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_11, E_Locations.l_16_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_1, E_Locations.l_12_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_17, E_Locations.l_4_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_1, E_Locations.l_17_1);\n\t\tstate.p_Connected.set(E_Locations.l_10_13, E_Locations.l_10_12);\n\t\tstate.p_Connected.set(E_Locations.l_20_8, E_Locations.l_19_8);\n\t\tstate.p_Connected.set(E_Locations.l_16_12, E_Locations.l_16_11);\n\t\tstate.p_Connected.set(E_Locations.l_12_20, E_Locations.l_11_20);\n\t\tstate.p_Connected.set(E_Locations.l_15_5, E_Locations.l_15_6);\n\t\tstate.p_Connected.set(E_Locations.l_7_3, E_Locations.l_7_2);\n\t\tstate.p_Connected.set(E_Locations.l_18_8, E_Locations.l_17_8);\n\t\tstate.p_Connected.set(E_Locations.l_8_20, E_Locations.l_9_20);\n\t\tstate.p_Connected.set(E_Locations.l_14_20, E_Locations.l_14_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_10, E_Locations.l_15_9);\n\t\tstate.p_Connected.set(E_Locations.l_14_3, E_Locations.l_13_3);\n\t\tstate.p_Connected.set(E_Locations.l_14_8, E_Locations.l_15_8);\n\t\tstate.p_Connected.set(E_Locations.l_6_18, E_Locations.l_7_18);\n\t\tstate.p_Connected.set(E_Locations.l_11_14, E_Locations.l_12_14);\n\t\tstate.p_Connected.set(E_Locations.l_6_13, E_Locations.l_7_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_1, E_Locations.l_11_1);\n\t\tstate.p_Connected.set(E_Locations.l_19_11, E_Locations.l_19_10);\n\t\tstate.p_Connected.set(E_Locations.l_9_5, E_Locations.l_9_4);\n\t\tstate.p_Connected.set(E_Locations.l_19_9, E_Locations.l_20_9);\n\t\tstate.p_Connected.set(E_Locations.l_20_17, E_Locations.l_20_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_7, E_Locations.l_17_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_18, E_Locations.l_10_18);\n\t\tstate.p_Connected.set(E_Locations.l_14_20, E_Locations.l_13_20);\n\t\tstate.p_Connected.set(E_Locations.l_7_1, E_Locations.l_7_2);\n\t\tstate.p_Connected.set(E_Locations.l_18_13, E_Locations.l_19_13);\n\t\tstate.p_Connected.set(E_Locations.l_3_19, E_Locations.l_4_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_2, E_Locations.l_5_2);\n\t\tstate.p_Connected.set(E_Locations.l_19_7, E_Locations.l_19_6);\n\t\tstate.p_Connected.set(E_Locations.l_16_9, E_Locations.l_15_9);\n\t\tstate.p_Connected.set(E_Locations.l_19_8, E_Locations.l_19_9);\n\t\tstate.p_Connected.set(E_Locations.l_17_7, E_Locations.l_16_7);\n\t\tstate.p_Connected.set(E_Locations.l_20_10, E_Locations.l_20_11);\n\t\tstate.p_Connected.set(E_Locations.l_9_1, E_Locations.l_9_2);\n\t\tstate.p_Connected.set(E_Locations.l_15_3, E_Locations.l_16_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_15, E_Locations.l_2_14);\n\t\tstate.p_Connected.set(E_Locations.l_10_10, E_Locations.l_11_10);\n\t\tstate.p_Connected.set(E_Locations.l_17_8, E_Locations.l_17_7);\n\t\tstate.p_Connected.set(E_Locations.l_18_14, E_Locations.l_18_13);\n\t\tstate.p_Connected.set(E_Locations.l_8_5, E_Locations.l_7_5);\n\t\tstate.p_Connected.set(E_Locations.l_8_10, E_Locations.l_8_9);\n\t\tstate.p_Connected.set(E_Locations.l_14_14, E_Locations.l_13_14);\n\t\tstate.p_Connected.set(E_Locations.l_2_2, E_Locations.l_1_2);\n\t\tstate.p_Connected.set(E_Locations.l_10_15, E_Locations.l_9_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_2, E_Locations.l_7_3);\n\t\tstate.p_Connected.set(E_Locations.l_1_6, E_Locations.l_1_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_16, E_Locations.l_4_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_3, E_Locations.l_14_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_10, E_Locations.l_9_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_7, E_Locations.l_5_8);\n\t\tstate.p_Connected.set(E_Locations.l_6_7, E_Locations.l_5_7);\n\t\tstate.p_Connected.set(E_Locations.l_19_12, E_Locations.l_20_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_2, E_Locations.l_10_3);\n\t\tstate.p_Connected.set(E_Locations.l_7_13, E_Locations.l_6_13);\n\t\tstate.p_Connected.set(E_Locations.l_3_4, E_Locations.l_3_3);\n\t\tstate.p_Connected.set(E_Locations.l_17_3, E_Locations.l_16_3);\n\t\tstate.p_Connected.set(E_Locations.l_8_19, E_Locations.l_8_20);\n\t\tstate.p_Connected.set(E_Locations.l_10_9, E_Locations.l_9_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_3, E_Locations.l_5_2);\n\t\tstate.p_Connected.set(E_Locations.l_10_18, E_Locations.l_10_17);\n\t\tstate.p_Connected.set(E_Locations.l_16_9, E_Locations.l_16_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_11, E_Locations.l_7_10);\n\t\tstate.p_Connected.set(E_Locations.l_2_16, E_Locations.l_2_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_2, E_Locations.l_15_2);\n\t\tstate.p_Connected.set(E_Locations.l_8_4, E_Locations.l_8_5);\n\t\tstate.p_Connected.set(E_Locations.l_16_5, E_Locations.l_15_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_17, E_Locations.l_8_17);\n\t\tstate.p_Connected.set(E_Locations.l_9_13, E_Locations.l_9_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_3, E_Locations.l_10_3);\n\t\tstate.p_Connected.set(E_Locations.l_15_7, E_Locations.l_15_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_10, E_Locations.l_18_10);\n\t\tstate.p_Connected.set(E_Locations.l_5_2, E_Locations.l_5_3);\n\t\tstate.p_Connected.set(E_Locations.l_15_16, E_Locations.l_15_17);\n\t\tstate.p_Connected.set(E_Locations.l_13_6, E_Locations.l_14_6);\n\t\tstate.p_Connected.set(E_Locations.l_12_12, E_Locations.l_12_11);\n\t\tstate.p_Connected.set(E_Locations.l_6_14, E_Locations.l_7_14);\n\t\tstate.p_Connected.set(E_Locations.l_10_14, E_Locations.l_9_14);\n\t\tstate.p_Connected.set(E_Locations.l_17_14, E_Locations.l_17_13);\n\t\tstate.p_Connected.set(E_Locations.l_7_10, E_Locations.l_8_10);\n\t\tstate.p_Connected.set(E_Locations.l_1_4, E_Locations.l_2_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_8, E_Locations.l_4_8);\n\t\tstate.p_Connected.set(E_Locations.l_18_1, E_Locations.l_19_1);\n\t\tstate.p_Connected.set(E_Locations.l_11_14, E_Locations.l_11_13);\n\t\tstate.p_Connected.set(E_Locations.l_17_6, E_Locations.l_17_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_12, E_Locations.l_1_12);\n\t\tstate.p_Connected.set(E_Locations.l_14_8, E_Locations.l_13_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_7, E_Locations.l_11_7);\n\t\tstate.p_Connected.set(E_Locations.l_5_16, E_Locations.l_5_15);\n\t\tstate.p_Connected.set(E_Locations.l_18_4, E_Locations.l_19_4);\n\t\tstate.p_Connected.set(E_Locations.l_17_16, E_Locations.l_17_17);\n\t\tstate.p_Connected.set(E_Locations.l_14_6, E_Locations.l_14_5);\n\t\tstate.p_Connected.set(E_Locations.l_20_2, E_Locations.l_19_2);\n\t\tstate.p_Connected.set(E_Locations.l_11_9, E_Locations.l_11_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_20, E_Locations.l_5_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_18, E_Locations.l_6_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_16, E_Locations.l_7_15);\n\t\tstate.p_Connected.set(E_Locations.l_14_8, E_Locations.l_14_9);\n\t\tstate.p_Connected.set(E_Locations.l_16_17, E_Locations.l_17_17);\n\t\tstate.p_Connected.set(E_Locations.l_13_14, E_Locations.l_13_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_5, E_Locations.l_10_6);\n\t\tstate.p_Connected.set(E_Locations.l_13_6, E_Locations.l_13_7);\n\t\tstate.p_Connected.set(E_Locations.l_17_4, E_Locations.l_16_4);\n\t\tstate.p_Connected.set(E_Locations.l_7_19, E_Locations.l_8_19);\n\t\tstate.p_Connected.set(E_Locations.l_2_14, E_Locations.l_2_13);\n\t\tstate.p_Connected.set(E_Locations.l_13_13, E_Locations.l_12_13);\n\t\tstate.p_Connected.set(E_Locations.l_15_6, E_Locations.l_16_6);\n\t\tstate.p_Connected.set(E_Locations.l_14_12, E_Locations.l_13_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_14, E_Locations.l_17_14);\n\t\tstate.p_Connected.set(E_Locations.l_17_9, E_Locations.l_16_9);\n\t\tstate.p_Connected.set(E_Locations.l_10_19, E_Locations.l_10_20);\n\t\tstate.p_Connected.set(E_Locations.l_4_9, E_Locations.l_3_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_12, E_Locations.l_5_13);\n\t\tstate.p_Connected.set(E_Locations.l_3_4, E_Locations.l_2_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_19, E_Locations.l_2_20);\n\t\tstate.p_Connected.set(E_Locations.l_7_19, E_Locations.l_7_20);\n\t\tstate.p_Connected.set(E_Locations.l_12_15, E_Locations.l_12_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_17, E_Locations.l_16_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_2, E_Locations.l_5_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_5, E_Locations.l_5_6);\n\t\tstate.p_Connected.set(E_Locations.l_4_10, E_Locations.l_5_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_12, E_Locations.l_12_12);\n\t\tstate.p_Connected.set(E_Locations.l_4_16, E_Locations.l_5_16);\n\t\tstate.p_Connected.set(E_Locations.l_13_16, E_Locations.l_12_16);\n\t\tstate.p_Connected.set(E_Locations.l_19_20, E_Locations.l_19_19);\n\t\tstate.p_Connected.set(E_Locations.l_8_8, E_Locations.l_7_8);\n\t\tstate.p_Connected.set(E_Locations.l_15_10, E_Locations.l_16_10);\n\t\tstate.p_Connected.set(E_Locations.l_12_16, E_Locations.l_11_16);\n\t\tstate.p_Connected.set(E_Locations.l_4_13, E_Locations.l_4_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_2, E_Locations.l_12_3);\n\t\tstate.p_Connected.set(E_Locations.l_18_20, E_Locations.l_18_19);\n\t\tstate.p_Connected.set(E_Locations.l_4_11, E_Locations.l_4_12);\n\t\tstate.p_Connected.set(E_Locations.l_5_2, E_Locations.l_6_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_4, E_Locations.l_6_5);\n\t\tstate.p_Connected.set(E_Locations.l_10_6, E_Locations.l_9_6);\n\t\tstate.p_Connected.set(E_Locations.l_7_11, E_Locations.l_7_12);\n\t\tstate.p_Connected.set(E_Locations.l_8_7, E_Locations.l_7_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_12, E_Locations.l_12_12);\n\t\tstate.p_Connected.set(E_Locations.l_13_18, E_Locations.l_13_19);\n\t\tstate.p_Connected.set(E_Locations.l_14_19, E_Locations.l_15_19);\n\t\tstate.p_Connected.set(E_Locations.l_2_6, E_Locations.l_1_6);\n\t\tstate.p_Connected.set(E_Locations.l_10_20, E_Locations.l_9_20);\n\t\tstate.p_Connected.set(E_Locations.l_11_1, E_Locations.l_11_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_4, E_Locations.l_7_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_20, E_Locations.l_14_20);\n\t\tstate.p_Connected.set(E_Locations.l_14_19, E_Locations.l_13_19);\n\t\tstate.p_Connected.set(E_Locations.l_16_12, E_Locations.l_15_12);\n\t\tstate.p_Connected.set(E_Locations.l_2_5, E_Locations.l_2_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_7, E_Locations.l_1_7);\n\t\tstate.p_Connected.set(E_Locations.l_16_1, E_Locations.l_17_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_5, E_Locations.l_1_4);\n\t\tstate.p_Connected.set(E_Locations.l_8_3, E_Locations.l_7_3);\n\t\tstate.p_Connected.set(E_Locations.l_11_7, E_Locations.l_11_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_10, E_Locations.l_10_10);\n\t\tstate.p_Connected.set(E_Locations.l_8_15, E_Locations.l_9_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_9, E_Locations.l_7_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_19, E_Locations.l_19_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_20, E_Locations.l_15_19);\n\t\tstate.p_Connected.set(E_Locations.l_11_17, E_Locations.l_11_16);\n\t\tstate.p_Connected.set(E_Locations.l_12_10, E_Locations.l_11_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_17, E_Locations.l_13_18);\n\t\tstate.p_Connected.set(E_Locations.l_11_3, E_Locations.l_12_3);\n\t\tstate.p_Connected.set(E_Locations.l_15_2, E_Locations.l_15_1);\n\t\tstate.p_Connected.set(E_Locations.l_3_9, E_Locations.l_4_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_11, E_Locations.l_5_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_14, E_Locations.l_10_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_13, E_Locations.l_16_12);\n\t\tstate.p_Connected.set(E_Locations.l_13_3, E_Locations.l_12_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_10, E_Locations.l_9_10);\n\t\tstate.p_Connected.set(E_Locations.l_19_17, E_Locations.l_19_16);\n\t\tstate.p_Connected.set(E_Locations.l_14_16, E_Locations.l_13_16);\n\t\tstate.p_Connected.set(E_Locations.l_20_8, E_Locations.l_20_7);\n\t\tstate.p_Connected.set(E_Locations.l_19_19, E_Locations.l_18_19);\n\t\tstate.p_Connected.set(E_Locations.l_1_20, E_Locations.l_2_20);\n\t\tstate.p_Connected.set(E_Locations.l_3_2, E_Locations.l_4_2);\n\t\tstate.p_Connected.set(E_Locations.l_12_5, E_Locations.l_12_4);\n\t\tstate.p_Connected.set(E_Locations.l_19_7, E_Locations.l_20_7);\n\t\tstate.p_Connected.set(E_Locations.l_13_15, E_Locations.l_14_15);\n\t\tstate.p_Connected.set(E_Locations.l_11_19, E_Locations.l_10_19);\n\t\tstate.p_Connected.set(E_Locations.l_17_8, E_Locations.l_18_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_17, E_Locations.l_13_17);\n\t\tstate.p_Connected.set(E_Locations.l_7_6, E_Locations.l_7_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_8, E_Locations.l_8_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_10, E_Locations.l_9_11);\n\t\tstate.p_Connected.set(E_Locations.l_17_18, E_Locations.l_17_17);\n\t\tstate.p_Connected.set(E_Locations.l_10_16, E_Locations.l_10_17);\n\t\tstate.p_Connected.set(E_Locations.l_16_4, E_Locations.l_16_3);\n\t\tstate.p_Connected.set(E_Locations.l_12_15, E_Locations.l_12_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_15, E_Locations.l_10_15);\n\t\tstate.p_Connected.set(E_Locations.l_12_15, E_Locations.l_13_15);\n\t\tstate.p_Connected.set(E_Locations.l_2_14, E_Locations.l_2_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_18, E_Locations.l_9_18);\n\t\tstate.p_Connected.set(E_Locations.l_17_15, E_Locations.l_17_16);\n\t\tstate.p_Connected.set(E_Locations.l_8_3, E_Locations.l_8_4);\n\t\tstate.p_Connected.set(E_Locations.l_3_4, E_Locations.l_4_4);\n\t\tstate.p_Connected.set(E_Locations.l_10_18, E_Locations.l_11_18);\n\t\tstate.p_Connected.set(E_Locations.l_10_11, E_Locations.l_9_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_8, E_Locations.l_1_7);\n\t\tstate.p_Connected.set(E_Locations.l_18_7, E_Locations.l_18_8);\n\t\tstate.p_Connected.set(E_Locations.l_4_4, E_Locations.l_3_4);\n\t\tstate.p_Connected.set(E_Locations.l_11_13, E_Locations.l_11_14);\n\t\tstate.p_Connected.set(E_Locations.l_6_9, E_Locations.l_6_8);\n\t\tstate.p_Connected.set(E_Locations.l_12_11, E_Locations.l_12_10);\n\t\tstate.p_Connected.set(E_Locations.l_11_13, E_Locations.l_12_13);\n\t\tstate.p_Connected.set(E_Locations.l_16_4, E_Locations.l_15_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_5, E_Locations.l_14_4);\n\t\tstate.p_Connected.set(E_Locations.l_7_8, E_Locations.l_7_9);\n\t\tstate.p_Connected.set(E_Locations.l_9_10, E_Locations.l_8_10);\n\t\tstate.p_Connected.set(E_Locations.l_14_2, E_Locations.l_14_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_14, E_Locations.l_10_13);\n\t\tstate.p_Connected.set(E_Locations.l_1_17, E_Locations.l_2_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_12, E_Locations.l_3_13);\n\t\tstate.p_Connected.set(E_Locations.l_18_5, E_Locations.l_18_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_20, E_Locations.l_19_20);\n\t\tstate.p_Connected.set(E_Locations.l_20_11, E_Locations.l_20_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_19, E_Locations.l_11_19);\n\t\tstate.p_Connected.set(E_Locations.l_13_7, E_Locations.l_12_7);\n\t\tstate.p_Connected.set(E_Locations.l_20_13, E_Locations.l_20_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_15, E_Locations.l_17_15);\n\t\tstate.p_Connected.set(E_Locations.l_18_15, E_Locations.l_18_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_11, E_Locations.l_13_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_19, E_Locations.l_16_19);\n\t\tstate.p_Connected.set(E_Locations.l_9_1, E_Locations.l_10_1);\n\t\tstate.p_Connected.set(E_Locations.l_15_4, E_Locations.l_15_3);\n\t\tstate.p_Connected.set(E_Locations.l_19_19, E_Locations.l_19_20);\n\t\tstate.p_Connected.set(E_Locations.l_15_15, E_Locations.l_16_15);\n\t\tstate.p_Connected.set(E_Locations.l_5_1, E_Locations.l_6_1);\n\t\tstate.p_Connected.set(E_Locations.l_11_12, E_Locations.l_11_13);\n\t\tstate.p_Connected.set(E_Locations.l_2_2, E_Locations.l_2_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_13, E_Locations.l_6_12);\n\t\tstate.p_Connected.set(E_Locations.l_12_6, E_Locations.l_12_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_15, E_Locations.l_11_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_18, E_Locations.l_5_19);\n\t\tstate.p_Connected.set(E_Locations.l_9_2, E_Locations.l_10_2);\n\t\tstate.p_Connected.set(E_Locations.l_11_10, E_Locations.l_10_10);\n\t\tstate.p_Connected.set(E_Locations.l_11_14, E_Locations.l_11_15);\n\t\tstate.p_Connected.set(E_Locations.l_17_20, E_Locations.l_16_20);\n\t\tstate.p_Connected.set(E_Locations.l_18_3, E_Locations.l_18_4);\n\t\tstate.p_Connected.set(E_Locations.l_15_18, E_Locations.l_15_17);\n\t\tstate.p_Connected.set(E_Locations.l_13_14, E_Locations.l_12_14);\n\t\tstate.p_Connected.set(E_Locations.l_11_18, E_Locations.l_11_17);\n\t\tstate.p_Connected.set(E_Locations.l_1_3, E_Locations.l_1_4);\n\t\tstate.p_Connected.set(E_Locations.l_12_16, E_Locations.l_12_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_4, E_Locations.l_9_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_12, E_Locations.l_14_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_6, E_Locations.l_10_7);\n\t\tstate.p_Connected.set(E_Locations.l_16_4, E_Locations.l_16_5);\n\t\tstate.p_Connected.set(E_Locations.l_1_7, E_Locations.l_2_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_17, E_Locations.l_11_18);\n\t\tstate.p_Connected.set(E_Locations.l_1_15, E_Locations.l_1_14);\n\t\tstate.p_Connected.set(E_Locations.l_7_18, E_Locations.l_6_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_4, E_Locations.l_14_4);\n\t\tstate.p_Connected.set(E_Locations.l_7_3, E_Locations.l_8_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_9, E_Locations.l_5_9);\n\t\tstate.p_Connected.set(E_Locations.l_6_1, E_Locations.l_5_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_18, E_Locations.l_5_17);\n\t\tstate.p_Connected.set(E_Locations.l_13_9, E_Locations.l_12_9);\n\t\tstate.p_Connected.set(E_Locations.l_16_1, E_Locations.l_15_1);\n\t\tstate.p_Connected.set(E_Locations.l_9_18, E_Locations.l_9_19);\n\t\tstate.p_Connected.set(E_Locations.l_9_11, E_Locations.l_9_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_8, E_Locations.l_4_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_10, E_Locations.l_7_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_17, E_Locations.l_6_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_7, E_Locations.l_18_6);\n\t\tstate.p_Connected.set(E_Locations.l_12_14, E_Locations.l_12_15);\n\t\tstate.p_Connected.set(E_Locations.l_15_19, E_Locations.l_14_19);\n\t\tstate.p_Connected.set(E_Locations.l_20_9, E_Locations.l_20_10);\n\t\tstate.p_Connected.set(E_Locations.l_8_7, E_Locations.l_8_6);\n\t\tstate.p_Connected.set(E_Locations.l_6_13, E_Locations.l_6_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_16, E_Locations.l_14_16);\n\t\tstate.p_Connected.set(E_Locations.l_1_4, E_Locations.l_1_3);\n\t\tstate.p_Connected.set(E_Locations.l_4_1, E_Locations.l_3_1);\n\t\tstate.p_Connected.set(E_Locations.l_17_13, E_Locations.l_17_14);\n\t\tstate.p_Connected.set(E_Locations.l_20_6, E_Locations.l_20_7);\n\t\tstate.p_Connected.set(E_Locations.l_9_17, E_Locations.l_9_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_16, E_Locations.l_20_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_10, E_Locations.l_18_11);\n\t\tstate.p_Connected.set(E_Locations.l_16_19, E_Locations.l_15_19);\n\t\tstate.p_Connected.set(E_Locations.l_12_7, E_Locations.l_13_7);\n\t\tstate.p_Connected.set(E_Locations.l_14_17, E_Locations.l_14_18);\n\t\tstate.p_Connected.set(E_Locations.l_6_9, E_Locations.l_6_10);\n\t\tstate.p_Connected.set(E_Locations.l_14_7, E_Locations.l_13_7);\n\t\tstate.p_Connected.set(E_Locations.l_3_12, E_Locations.l_3_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_6, E_Locations.l_2_5);\n\t\tstate.p_Connected.set(E_Locations.l_16_16, E_Locations.l_16_17);\n\t\tstate.p_Connected.set(E_Locations.l_9_15, E_Locations.l_9_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_3, E_Locations.l_13_3);\n\t\tstate.p_Connected.set(E_Locations.l_17_13, E_Locations.l_17_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_11, E_Locations.l_10_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_10, E_Locations.l_18_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_5, E_Locations.l_1_6);\n\t\tstate.p_Connected.set(E_Locations.l_8_11, E_Locations.l_9_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_20, E_Locations.l_12_20);\n\t\tstate.p_Connected.set(E_Locations.l_9_7, E_Locations.l_9_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_17, E_Locations.l_3_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_1, E_Locations.l_4_2);\n\t\tstate.p_Connected.set(E_Locations.l_3_11, E_Locations.l_3_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_9, E_Locations.l_9_10);\n\t\tstate.p_Connected.set(E_Locations.l_16_8, E_Locations.l_16_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_6, E_Locations.l_8_5);\n\t\tstate.p_Connected.set(E_Locations.l_7_5, E_Locations.l_6_5);\n\t\tstate.p_Connected.set(E_Locations.l_17_19, E_Locations.l_17_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_19, E_Locations.l_20_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_13, E_Locations.l_14_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_19, E_Locations.l_5_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_16, E_Locations.l_6_17);\n\t\tstate.p_Connected.set(E_Locations.l_19_5, E_Locations.l_18_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_13, E_Locations.l_2_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_19, E_Locations.l_17_19);\n\t\tstate.p_Connected.set(E_Locations.l_11_6, E_Locations.l_11_5);\n\t\tstate.p_Connected.set(E_Locations.l_1_7, E_Locations.l_1_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_3, E_Locations.l_18_3);\n\t\tstate.p_Connected.set(E_Locations.l_11_15, E_Locations.l_12_15);\n\t\tstate.p_Connected.set(E_Locations.l_9_3, E_Locations.l_8_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_7, E_Locations.l_9_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_13, E_Locations.l_9_13);\n\t\tstate.p_Connected.set(E_Locations.l_16_13, E_Locations.l_16_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_12, E_Locations.l_19_11);\n\t\tstate.p_Connected.set(E_Locations.l_15_5, E_Locations.l_14_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_8, E_Locations.l_2_9);\n\t\tstate.p_Connected.set(E_Locations.l_4_5, E_Locations.l_4_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_10, E_Locations.l_1_10);\n\t\tstate.p_Connected.set(E_Locations.l_16_3, E_Locations.l_16_4);\n\t\tstate.p_Connected.set(E_Locations.l_16_17, E_Locations.l_15_17);\n\t\tstate.p_Connected.set(E_Locations.l_7_15, E_Locations.l_6_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_5, E_Locations.l_9_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_18, E_Locations.l_4_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_7, E_Locations.l_19_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_7, E_Locations.l_8_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_3, E_Locations.l_17_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_9, E_Locations.l_15_9);\n\t\tstate.p_Connected.set(E_Locations.l_11_20, E_Locations.l_11_19);\n\t\tstate.p_Connected.set(E_Locations.l_11_16, E_Locations.l_11_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_5, E_Locations.l_10_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_13, E_Locations.l_14_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_14, E_Locations.l_16_14);\n\t\tstate.p_Connected.set(E_Locations.l_14_2, E_Locations.l_14_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_10, E_Locations.l_4_10);\n\t\tstate.p_Connected.set(E_Locations.l_2_7, E_Locations.l_2_8);\n\t\tstate.p_Connected.set(E_Locations.l_3_1, E_Locations.l_3_2);\n\t\tstate.p_Connected.set(E_Locations.l_7_9, E_Locations.l_6_9);\n\t\tstate.p_Connected.set(E_Locations.l_8_2, E_Locations.l_9_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_3, E_Locations.l_7_3);\n\t\tstate.p_Connected.set(E_Locations.l_14_12, E_Locations.l_14_13);\n\t\tstate.p_Connected.set(E_Locations.l_2_19, E_Locations.l_2_18);\n\t\tstate.p_Connected.set(E_Locations.l_19_11, E_Locations.l_18_11);\n\t\tstate.p_Connected.set(E_Locations.l_19_7, E_Locations.l_18_7);\n\t\tstate.p_Connected.set(E_Locations.l_14_1, E_Locations.l_13_1);\n\t\tstate.p_Connected.set(E_Locations.l_11_3, E_Locations.l_11_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_6, E_Locations.l_6_5);\n\t\tstate.p_Connected.set(E_Locations.l_13_18, E_Locations.l_13_17);\n\t\tstate.p_Connected.set(E_Locations.l_14_18, E_Locations.l_14_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_11, E_Locations.l_15_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_4, E_Locations.l_10_5);\n\t\tstate.p_Connected.set(E_Locations.l_6_20, E_Locations.l_6_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_20, E_Locations.l_7_19);\n\t\tstate.p_Connected.set(E_Locations.l_17_17, E_Locations.l_17_18);\n\t\tstate.p_Connected.set(E_Locations.l_18_14, E_Locations.l_18_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_2, E_Locations.l_16_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_19, E_Locations.l_1_20);\n\t\tstate.p_Connected.set(E_Locations.l_16_18, E_Locations.l_16_19);\n\t\tstate.p_Connected.set(E_Locations.l_14_19, E_Locations.l_14_18);\n\t\tstate.p_Connected.set(E_Locations.l_1_12, E_Locations.l_1_13);\n\t\tstate.p_Connected.set(E_Locations.l_1_10, E_Locations.l_1_11);\n\t\tstate.p_Connected.set(E_Locations.l_9_19, E_Locations.l_10_19);\n\t\tstate.p_Connected.set(E_Locations.l_4_9, E_Locations.l_4_10);\n\t\tstate.p_Connected.set(E_Locations.l_10_6, E_Locations.l_10_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_12, E_Locations.l_4_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_13, E_Locations.l_9_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_5, E_Locations.l_12_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_5, E_Locations.l_3_4);\n\t\tstate.p_Connected.set(E_Locations.l_11_1, E_Locations.l_12_1);\n\t\tstate.p_Connected.set(E_Locations.l_13_4, E_Locations.l_12_4);\n\t\tstate.p_Connected.set(E_Locations.l_12_16, E_Locations.l_13_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_17, E_Locations.l_19_17);\n\t\tstate.p_Connected.set(E_Locations.l_5_17, E_Locations.l_6_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_9, E_Locations.l_2_9);\n\t\tstate.p_Connected.set(E_Locations.l_2_18, E_Locations.l_2_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_2, E_Locations.l_7_1);\n\t\tstate.p_Connected.set(E_Locations.l_2_18, E_Locations.l_2_17);\n\t\tstate.p_Connected.set(E_Locations.l_20_2, E_Locations.l_20_1);\n\t\tstate.p_Connected.set(E_Locations.l_8_3, E_Locations.l_8_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_15, E_Locations.l_18_15);\n\t\tstate.p_Connected.set(E_Locations.l_1_14, E_Locations.l_2_14);\n\t\tstate.p_Connected.set(E_Locations.l_3_3, E_Locations.l_3_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_19, E_Locations.l_5_20);\n\t\tstate.p_Connected.set(E_Locations.l_9_12, E_Locations.l_9_13);\n\t\tstate.p_Connected.set(E_Locations.l_18_15, E_Locations.l_19_15);\n\t\tstate.p_Connected.set(E_Locations.l_2_9, E_Locations.l_2_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_15, E_Locations.l_18_16);\n\t\tstate.p_Connected.set(E_Locations.l_3_17, E_Locations.l_4_17);\n\t\tstate.p_Connected.set(E_Locations.l_19_17, E_Locations.l_18_17);\n\t\tstate.p_Connected.set(E_Locations.l_17_9, E_Locations.l_18_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_5, E_Locations.l_16_5);\n\t\tstate.p_Connected.set(E_Locations.l_4_3, E_Locations.l_3_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_19, E_Locations.l_7_19);\n\t\tstate.p_Connected.set(E_Locations.l_3_13, E_Locations.l_3_14);\n\t\tstate.p_Connected.set(E_Locations.l_3_14, E_Locations.l_3_15);\n\t\tstate.p_Connected.set(E_Locations.l_3_19, E_Locations.l_2_19);\n\t\tstate.p_Connected.set(E_Locations.l_17_10, E_Locations.l_17_11);\n\t\tstate.p_Connected.set(E_Locations.l_19_8, E_Locations.l_20_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_1, E_Locations.l_10_2);\n\t\tstate.p_Connected.set(E_Locations.l_5_4, E_Locations.l_5_3);\n\t\tstate.p_Connected.set(E_Locations.l_11_9, E_Locations.l_10_9);\n\t\tstate.p_Connected.set(E_Locations.l_11_11, E_Locations.l_10_11);\n\t\tstate.p_Connected.set(E_Locations.l_10_17, E_Locations.l_10_18);\n\t\tstate.p_Connected.set(E_Locations.l_8_18, E_Locations.l_9_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_18, E_Locations.l_20_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_16, E_Locations.l_15_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_11, E_Locations.l_8_11);\n\t\tstate.p_Connected.set(E_Locations.l_10_19, E_Locations.l_9_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_18, E_Locations.l_14_18);\n\t\tstate.p_Connected.set(E_Locations.l_15_11, E_Locations.l_15_10);\n\t\tstate.p_Connected.set(E_Locations.l_9_5, E_Locations.l_10_5);\n\t\tstate.p_Connected.set(E_Locations.l_1_19, E_Locations.l_1_18);\n\t\tstate.p_Connected.set(E_Locations.l_17_3, E_Locations.l_17_4);\n\t\tstate.p_Connected.set(E_Locations.l_12_18, E_Locations.l_12_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_7, E_Locations.l_14_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_19, E_Locations.l_8_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_12, E_Locations.l_20_11);\n\t\tstate.p_Connected.set(E_Locations.l_8_12, E_Locations.l_8_13);\n\t\tstate.p_Connected.set(E_Locations.l_20_20, E_Locations.l_20_19);\n\t\tstate.p_Connected.set(E_Locations.l_8_7, E_Locations.l_9_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_11, E_Locations.l_7_11);\n\t\tstate.p_Connected.set(E_Locations.l_8_16, E_Locations.l_8_15);\n\t\tstate.p_Connected.set(E_Locations.l_20_16, E_Locations.l_19_16);\n\t\tstate.p_Connected.set(E_Locations.l_19_1, E_Locations.l_18_1);\n\t\tstate.p_Connected.set(E_Locations.l_19_13, E_Locations.l_18_13);\n\t\tstate.p_Connected.set(E_Locations.l_3_18, E_Locations.l_3_17);\n\t\tstate.p_Connected.set(E_Locations.l_2_16, E_Locations.l_2_17);\n\t\tstate.p_Connected.set(E_Locations.l_14_6, E_Locations.l_13_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_12, E_Locations.l_18_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_8, E_Locations.l_6_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_5, E_Locations.l_12_5);\n\t\tstate.p_Connected.set(E_Locations.l_15_13, E_Locations.l_15_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_6, E_Locations.l_13_6);\n\t\tstate.p_Connected.set(E_Locations.l_20_19, E_Locations.l_20_20);\n\t\tstate.p_Connected.set(E_Locations.l_20_7, E_Locations.l_20_6);\n\t\tstate.p_Connected.set(E_Locations.l_6_17, E_Locations.l_5_17);\n\t\tstate.p_Connected.set(E_Locations.l_9_18, E_Locations.l_8_18);\n\t\tstate.p_Connected.set(E_Locations.l_7_11, E_Locations.l_6_11);\n\t\tstate.p_Connected.set(E_Locations.l_20_3, E_Locations.l_20_2);\n\t\tstate.p_Connected.set(E_Locations.l_5_8, E_Locations.l_5_9);\n\t\tstate.p_Connected.set(E_Locations.l_12_7, E_Locations.l_11_7);\n\t\tstate.p_Connected.set(E_Locations.l_16_8, E_Locations.l_17_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_1, E_Locations.l_14_2);\n\t\tstate.p_Connected.set(E_Locations.l_13_7, E_Locations.l_13_8);\n\t\tstate.p_Connected.set(E_Locations.l_15_8, E_Locations.l_15_7);\n\t\tstate.p_Connected.set(E_Locations.l_5_6, E_Locations.l_6_6);\n\t\tstate.p_Connected.set(E_Locations.l_11_12, E_Locations.l_11_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_8, E_Locations.l_5_7);\n\t\tstate.p_Connected.set(E_Locations.l_3_1, E_Locations.l_2_1);\n\t\tstate.p_Connected.set(E_Locations.l_4_7, E_Locations.l_3_7);\n\t\tstate.p_Connected.set(E_Locations.l_7_2, E_Locations.l_8_2);\n\t\tstate.p_Connected.set(E_Locations.l_16_6, E_Locations.l_16_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_17, E_Locations.l_10_17);\n\t\tstate.p_Connected.set(E_Locations.l_11_3, E_Locations.l_11_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_6, E_Locations.l_15_6);\n\t\tstate.p_Connected.set(E_Locations.l_14_3, E_Locations.l_14_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_12, E_Locations.l_2_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_17, E_Locations.l_11_17);\n\t\tstate.p_Connected.set(E_Locations.l_13_3, E_Locations.l_14_3);\n\t\tstate.p_Connected.set(E_Locations.l_13_5, E_Locations.l_12_5);\n\t\tstate.p_Connected.set(E_Locations.l_14_4, E_Locations.l_14_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_3, E_Locations.l_2_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_19, E_Locations.l_3_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_18, E_Locations.l_12_18);\n\t\tstate.p_Connected.set(E_Locations.l_12_9, E_Locations.l_12_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_18, E_Locations.l_14_19);\n\t\tstate.p_Connected.set(E_Locations.l_19_16, E_Locations.l_19_17);\n\t\tstate.p_Connected.set(E_Locations.l_5_20, E_Locations.l_6_20);\n\t\tstate.p_Connected.set(E_Locations.l_3_13, E_Locations.l_4_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_18, E_Locations.l_3_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_11, E_Locations.l_4_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_4, E_Locations.l_8_4);\n\t\tstate.p_Connected.set(E_Locations.l_3_8, E_Locations.l_3_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_20, E_Locations.l_14_20);\n\t\tstate.p_Connected.set(E_Locations.l_18_13, E_Locations.l_17_13);\n\t\tstate.p_Connected.set(E_Locations.l_8_6, E_Locations.l_8_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_15, E_Locations.l_7_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_10, E_Locations.l_10_11);\n\t\tstate.p_Connected.set(E_Locations.l_6_14, E_Locations.l_5_14);\n\t\tstate.p_Connected.set(E_Locations.l_6_17, E_Locations.l_6_16);\n\t\tstate.p_Connected.set(E_Locations.l_4_2, E_Locations.l_4_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_9, E_Locations.l_11_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_5, E_Locations.l_5_4);\n\t\tstate.p_Connected.set(E_Locations.l_4_17, E_Locations.l_4_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_3, E_Locations.l_6_4);\n\t\tstate.p_Connected.set(E_Locations.l_10_2, E_Locations.l_10_1);\n\t\tstate.p_Connected.set(E_Locations.l_6_20, E_Locations.l_7_20);\n\t\tstate.p_Connected.set(E_Locations.l_10_5, E_Locations.l_9_5);\n\t\tstate.p_Connected.set(E_Locations.l_16_20, E_Locations.l_15_20);\n\t\tstate.p_Connected.set(E_Locations.l_2_1, E_Locations.l_3_1);\n\t\tstate.p_Connected.set(E_Locations.l_14_7, E_Locations.l_15_7);\n\t\tstate.p_Connected.set(E_Locations.l_18_18, E_Locations.l_18_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_14, E_Locations.l_6_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_12, E_Locations.l_4_11);\n\t\tstate.p_Connected.set(E_Locations.l_13_15, E_Locations.l_13_14);\n\t\tstate.p_Connected.set(E_Locations.l_10_15, E_Locations.l_10_14);\n\t\tstate.p_Connected.set(E_Locations.l_14_16, E_Locations.l_14_15);\n\t\tstate.p_Connected.set(E_Locations.l_17_17, E_Locations.l_17_16);\n\t\tstate.p_Connected.set(E_Locations.l_17_11, E_Locations.l_17_12);\n\t\tstate.p_Connected.set(E_Locations.l_20_10, E_Locations.l_19_10);\n\t\tstate.p_Connected.set(E_Locations.l_11_17, E_Locations.l_12_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_20, E_Locations.l_4_20);\n\t\tstate.p_Connected.set(E_Locations.l_15_4, E_Locations.l_16_4);\n\t\tstate.p_Connected.set(E_Locations.l_15_17, E_Locations.l_15_16);\n\t\tstate.p_Connected.set(E_Locations.l_19_1, E_Locations.l_19_2);\n\t\tstate.p_Connected.set(E_Locations.l_10_2, E_Locations.l_11_2);\n\t\tstate.p_Connected.set(E_Locations.l_3_5, E_Locations.l_4_5);\n\t\tstate.p_Connected.set(E_Locations.l_17_4, E_Locations.l_18_4);\n\t\tstate.p_Connected.set(E_Locations.l_16_17, E_Locations.l_16_18);\n\t\tstate.p_Connected.set(E_Locations.l_18_1, E_Locations.l_18_2);\n\t\tstate.p_Connected.set(E_Locations.l_15_6, E_Locations.l_14_6);\n\t\tstate.p_Connected.set(E_Locations.l_10_9, E_Locations.l_10_8);\n\t\tstate.p_Connected.set(E_Locations.l_18_14, E_Locations.l_19_14);\n\t\tstate.p_Connected.set(E_Locations.l_8_4, E_Locations.l_9_4);\n\t\tstate.p_Connected.set(E_Locations.l_15_18, E_Locations.l_15_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_2, E_Locations.l_7_2);\n\t\tstate.p_Connected.set(E_Locations.l_20_5, E_Locations.l_20_6);\n\t\tstate.p_Connected.set(E_Locations.l_7_8, E_Locations.l_7_7);\n\t\tstate.p_Connected.set(E_Locations.l_10_10, E_Locations.l_10_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_4, E_Locations.l_6_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_7, E_Locations.l_14_8);\n\t\tstate.p_Connected.set(E_Locations.l_8_2, E_Locations.l_7_2);\n\t\tstate.p_Connected.set(E_Locations.l_12_17, E_Locations.l_13_17);\n\t\tstate.p_Connected.set(E_Locations.l_7_16, E_Locations.l_7_17);\n\t\tstate.p_Connected.set(E_Locations.l_9_3, E_Locations.l_9_4);\n\t\tstate.p_Connected.set(E_Locations.l_15_6, E_Locations.l_15_5);\n\t\tstate.p_Connected.set(E_Locations.l_6_7, E_Locations.l_6_6);\n\t\tstate.p_Connected.set(E_Locations.l_12_3, E_Locations.l_11_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_7, E_Locations.l_3_8);\n\t\tstate.p_Connected.set(E_Locations.l_16_9, E_Locations.l_16_8);\n\t\tstate.p_Connected.set(E_Locations.l_3_15, E_Locations.l_3_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_7, E_Locations.l_13_6);\n\t\tstate.p_Connected.set(E_Locations.l_17_1, E_Locations.l_18_1);\n\t\tstate.p_Connected.set(E_Locations.l_15_12, E_Locations.l_14_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_18, E_Locations.l_16_18);\n\t\tstate.p_Connected.set(E_Locations.l_9_9, E_Locations.l_10_9);\n\t\tstate.p_Connected.set(E_Locations.l_19_14, E_Locations.l_19_15);\n\t\tstate.p_Connected.set(E_Locations.l_20_5, E_Locations.l_19_5);\n\t\tstate.p_Connected.set(E_Locations.l_19_4, E_Locations.l_20_4);\n\t\tstate.p_Connected.set(E_Locations.l_18_9, E_Locations.l_18_10);\n\t\tstate.p_Connected.set(E_Locations.l_19_17, E_Locations.l_20_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_19, E_Locations.l_12_20);\n\t\tstate.p_Connected.set(E_Locations.l_13_1, E_Locations.l_14_1);\n\t\tstate.p_Connected.set(E_Locations.l_15_4, E_Locations.l_14_4);\n\t\tstate.p_Connected.set(E_Locations.l_1_1, E_Locations.l_2_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_10, E_Locations.l_6_10);\n\t\tstate.p_Connected.set(E_Locations.l_16_8, E_Locations.l_16_9);\n\t\tstate.p_Connected.set(E_Locations.l_3_2, E_Locations.l_3_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_7, E_Locations.l_4_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_17, E_Locations.l_2_18);\n\t\tstate.p_Connected.set(E_Locations.l_6_15, E_Locations.l_7_15);\n\t\tstate.p_Connected.set(E_Locations.l_11_8, E_Locations.l_10_8);\n\t\tstate.p_Connected.set(E_Locations.l_19_16, E_Locations.l_20_16);\n\t\tstate.p_Connected.set(E_Locations.l_8_6, E_Locations.l_9_6);\n\t\tstate.p_Connected.set(E_Locations.l_11_5, E_Locations.l_11_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_2, E_Locations.l_3_1);\n\t\tstate.p_Connected.set(E_Locations.l_3_15, E_Locations.l_3_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_10, E_Locations.l_5_9);\n\t\tstate.p_Connected.set(E_Locations.l_14_13, E_Locations.l_14_14);\n\t\tstate.p_Connected.set(E_Locations.l_5_15, E_Locations.l_5_16);\n\t\tstate.p_Connected.set(E_Locations.l_15_17, E_Locations.l_15_18);\n\t\tstate.p_Connected.set(E_Locations.l_14_10, E_Locations.l_14_11);\n\t\tstate.p_Connected.set(E_Locations.l_4_7, E_Locations.l_4_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_2, E_Locations.l_18_1);\n\t\tstate.p_Connected.set(E_Locations.l_8_13, E_Locations.l_9_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_12, E_Locations.l_12_13);\n\t\tstate.p_Connected.set(E_Locations.l_5_5, E_Locations.l_6_5);\n\t\tstate.p_Connected.set(E_Locations.l_8_1, E_Locations.l_7_1);\n\t\tstate.p_Connected.set(E_Locations.l_17_12, E_Locations.l_17_11);\n\t\tstate.p_Connected.set(E_Locations.l_18_11, E_Locations.l_18_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_19, E_Locations.l_4_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_2, E_Locations.l_12_2);\n\t\tstate.p_Connected.set(E_Locations.l_16_10, E_Locations.l_17_10);\n\t\tstate.p_Connected.set(E_Locations.l_12_19, E_Locations.l_13_19);\n\t\tstate.p_Connected.set(E_Locations.l_18_13, E_Locations.l_18_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_13, E_Locations.l_10_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_2, E_Locations.l_11_2);\n\t\tstate.p_Connected.set(E_Locations.l_12_13, E_Locations.l_13_13);\n\t\tstate.p_Connected.set(E_Locations.l_20_12, E_Locations.l_20_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_1, E_Locations.l_5_1);\n\t\tstate.p_Connected.set(E_Locations.l_18_4, E_Locations.l_18_5);\n\t\tstate.p_Connected.set(E_Locations.l_13_9, E_Locations.l_13_8);\n\t\tstate.p_Connected.set(E_Locations.l_19_20, E_Locations.l_20_20);\n\t\tstate.p_Connected.set(E_Locations.l_20_15, E_Locations.l_20_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_8, E_Locations.l_12_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_14, E_Locations.l_17_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_18, E_Locations.l_8_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_7, E_Locations.l_3_6);\n\t\tstate.p_Connected.set(E_Locations.l_4_14, E_Locations.l_3_14);\n\t\tstate.p_Connected.set(E_Locations.l_9_14, E_Locations.l_9_15);\n\t\tstate.p_Connected.set(E_Locations.l_6_19, E_Locations.l_5_19);\n\t\tstate.p_Connected.set(E_Locations.l_16_7, E_Locations.l_16_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_15, E_Locations.l_10_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_12, E_Locations.l_18_11);\n\t\tstate.p_Connected.set(E_Locations.l_12_10, E_Locations.l_12_9);\n\t\tstate.p_Connected.set(E_Locations.l_3_11, E_Locations.l_3_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_18, E_Locations.l_17_18);\n\t\tstate.p_Connected.set(E_Locations.l_6_8, E_Locations.l_6_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_16, E_Locations.l_1_15);\n\t\tstate.p_Connected.set(E_Locations.l_13_5, E_Locations.l_13_4);\n\t\tstate.p_Connected.set(E_Locations.l_18_5, E_Locations.l_19_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_12, E_Locations.l_2_11);\n\t\tstate.p_Connected.set(E_Locations.l_17_19, E_Locations.l_17_20);\n\t\tstate.p_Connected.set(E_Locations.l_18_6, E_Locations.l_19_6);\n\t\tstate.p_Connected.set(E_Locations.l_7_5, E_Locations.l_8_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_19, E_Locations.l_9_20);\n\t\tstate.p_Connected.set(E_Locations.l_3_4, E_Locations.l_3_5);\n\t\tstate.p_Connected.set(E_Locations.l_1_3, E_Locations.l_2_3);\n\t\tstate.p_Connected.set(E_Locations.l_4_10, E_Locations.l_4_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_9, E_Locations.l_5_10);\n\t\tstate.p_Connected.set(E_Locations.l_12_17, E_Locations.l_12_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_7, E_Locations.l_4_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_6, E_Locations.l_4_5);\n\t\tstate.p_Connected.set(E_Locations.l_1_16, E_Locations.l_1_17);\n\t\tstate.p_Connected.set(E_Locations.l_5_13, E_Locations.l_5_14);\n\t\tstate.p_Connected.set(E_Locations.l_7_4, E_Locations.l_7_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_12, E_Locations.l_9_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_19, E_Locations.l_12_19);\n\t\tstate.p_Connected.set(E_Locations.l_16_19, E_Locations.l_16_20);\n\t\tstate.p_Connected.set(E_Locations.l_19_2, E_Locations.l_20_2);\n\t\tstate.p_Connected.set(E_Locations.l_1_16, E_Locations.l_2_16);\n\t\tstate.p_Connected.set(E_Locations.l_13_14, E_Locations.l_13_13);\n\t\tstate.p_Connected.set(E_Locations.l_10_17, E_Locations.l_10_16);\n\t\tstate.p_Connected.set(E_Locations.l_6_6, E_Locations.l_6_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_9, E_Locations.l_4_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_17, E_Locations.l_16_17);\n\t\tstate.p_Connected.set(E_Locations.l_11_17, E_Locations.l_10_17);\n\t\tstate.p_Connected.set(E_Locations.l_2_4, E_Locations.l_2_3);\n\t\tstate.p_Connected.set(E_Locations.l_13_2, E_Locations.l_14_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_18, E_Locations.l_18_18);\n\t\tstate.p_Connected.set(E_Locations.l_14_10, E_Locations.l_13_10);\n\t\tstate.p_Connected.set(E_Locations.l_15_13, E_Locations.l_15_12);\n\t\tstate.p_Connected.set(E_Locations.l_16_5, E_Locations.l_16_6);\n\t\tstate.p_Connected.set(E_Locations.l_1_11, E_Locations.l_1_12);\n\t\tstate.p_Connected.set(E_Locations.l_3_10, E_Locations.l_3_11);\n\t\tstate.p_Connected.set(E_Locations.l_13_18, E_Locations.l_14_18);\n\t\tstate.p_Connected.set(E_Locations.l_3_13, E_Locations.l_2_13);\n\t\tstate.p_Connected.set(E_Locations.l_9_7, E_Locations.l_8_7);\n\t\tstate.p_Connected.set(E_Locations.l_13_15, E_Locations.l_12_15);\n\t\tstate.p_Connected.set(E_Locations.l_14_4, E_Locations.l_13_4);\n\t\tstate.p_Connected.set(E_Locations.l_1_3, E_Locations.l_1_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_3, E_Locations.l_14_2);\n\t\tstate.p_Connected.set(E_Locations.l_9_6, E_Locations.l_8_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_6, E_Locations.l_3_7);\n\t\tstate.p_Connected.set(E_Locations.l_9_11, E_Locations.l_8_11);\n\t\tstate.p_Connected.set(E_Locations.l_4_14, E_Locations.l_5_14);\n\t\tstate.p_Connected.set(E_Locations.l_20_15, E_Locations.l_20_14);\n\t\tstate.p_Connected.set(E_Locations.l_9_19, E_Locations.l_8_19);\n\t\tstate.p_Connected.set(E_Locations.l_4_2, E_Locations.l_3_2);\n\t\tstate.p_Connected.set(E_Locations.l_1_11, E_Locations.l_2_11);\n\t\tstate.p_Connected.set(E_Locations.l_3_12, E_Locations.l_4_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_7, E_Locations.l_17_8);\n\t\tstate.p_Connected.set(E_Locations.l_12_8, E_Locations.l_11_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_6, E_Locations.l_14_7);\n\t\tstate.p_Connected.set(E_Locations.l_9_13, E_Locations.l_10_13);\n\t\tstate.p_Connected.set(E_Locations.l_13_19, E_Locations.l_14_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_20, E_Locations.l_8_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_6, E_Locations.l_7_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_3, E_Locations.l_15_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_5, E_Locations.l_7_5);\n\t\tstate.p_Connected.set(E_Locations.l_13_6, E_Locations.l_12_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_9, E_Locations.l_19_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_4, E_Locations.l_13_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_18, E_Locations.l_1_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_19, E_Locations.l_12_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_5, E_Locations.l_15_4);\n\t\tstate.p_Connected.set(E_Locations.l_12_20, E_Locations.l_13_20);\n\t\tstate.p_Connected.set(E_Locations.l_16_7, E_Locations.l_17_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_16, E_Locations.l_6_15);\n\t\tstate.p_Connected.set(E_Locations.l_1_13, E_Locations.l_1_14);\n\t\tstate.p_Connected.set(E_Locations.l_8_8, E_Locations.l_8_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_17, E_Locations.l_2_16);\n\t\tstate.p_Connected.set(E_Locations.l_7_15, E_Locations.l_7_14);\n\t\tstate.p_Connected.set(E_Locations.l_15_14, E_Locations.l_15_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_14, E_Locations.l_7_15);\n\t\tstate.p_Connected.set(E_Locations.l_5_11, E_Locations.l_6_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_9, E_Locations.l_2_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_12, E_Locations.l_15_11);\n\t\tstate.p_Connected.set(E_Locations.l_9_6, E_Locations.l_9_7);\n\t\tstate.p_Connected.set(E_Locations.l_3_11, E_Locations.l_4_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_10, E_Locations.l_3_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_3, E_Locations.l_17_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_6, E_Locations.l_9_5);\n\t\tstate.p_Connected.set(E_Locations.l_10_2, E_Locations.l_9_2);\n\t\tstate.p_Connected.set(E_Locations.l_8_15, E_Locations.l_8_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_5, E_Locations.l_16_4);\n\t\tstate.p_Connected.set(E_Locations.l_18_3, E_Locations.l_19_3);\n\t\tstate.p_Connected.set(E_Locations.l_8_8, E_Locations.l_9_8);\n\t\tstate.p_Connected.set(E_Locations.l_13_8, E_Locations.l_12_8);\n\t\tstate.p_Connected.set(E_Locations.l_13_4, E_Locations.l_13_5);\n\t\tstate.p_Connected.set(E_Locations.l_15_11, E_Locations.l_14_11);\n\t\tstate.p_Connected.set(E_Locations.l_19_9, E_Locations.l_18_9);\n\t\tstate.p_Connected.set(E_Locations.l_18_12, E_Locations.l_19_12);\n\t\tstate.p_Connected.set(E_Locations.l_3_18, E_Locations.l_4_18);\n\t\tstate.p_Connected.set(E_Locations.l_6_8, E_Locations.l_7_8);\n\t\tstate.p_Connected.set(E_Locations.l_13_7, E_Locations.l_14_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_18, E_Locations.l_3_18);\n\t\tstate.p_Connected.set(E_Locations.l_3_5, E_Locations.l_2_5);\n\t\tstate.p_Connected.set(E_Locations.l_17_1, E_Locations.l_17_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_11, E_Locations.l_14_12);\n\t\tstate.p_Connected.set(E_Locations.l_5_14, E_Locations.l_5_15);\n\t\tstate.p_Connected.set(E_Locations.l_18_16, E_Locations.l_18_17);\n\t\tstate.p_Connected.set(E_Locations.l_2_3, E_Locations.l_1_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_15, E_Locations.l_11_15);\n\t\tstate.p_Connected.set(E_Locations.l_19_6, E_Locations.l_18_6);\n\t\tstate.p_Connected.set(E_Locations.l_14_3, E_Locations.l_15_3);\n\t\tstate.p_Connected.set(E_Locations.l_17_12, E_Locations.l_17_13);\n\t\tstate.p_Connected.set(E_Locations.l_5_20, E_Locations.l_5_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_18, E_Locations.l_7_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_3, E_Locations.l_15_4);\n\t\tstate.p_Connected.set(E_Locations.l_19_19, E_Locations.l_19_18);\n\t\tstate.p_Connected.set(E_Locations.l_16_15, E_Locations.l_17_15);\n\t\tstate.p_Connected.set(E_Locations.l_15_1, E_Locations.l_15_2);\n\t\tstate.p_Connected.set(E_Locations.l_11_3, E_Locations.l_10_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_18, E_Locations.l_16_17);\n\t\tstate.p_Connected.set(E_Locations.l_5_16, E_Locations.l_4_16);\n\t\tstate.p_Connected.set(E_Locations.l_8_10, E_Locations.l_9_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_13, E_Locations.l_18_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_17, E_Locations.l_13_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_4, E_Locations.l_11_3);\n\t\tstate.p_Connected.set(E_Locations.l_17_15, E_Locations.l_16_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_19, E_Locations.l_7_19);\n\t\tstate.p_Connected.set(E_Locations.l_19_13, E_Locations.l_19_12);\n\t\tstate.p_Connected.set(E_Locations.l_1_20, E_Locations.l_1_19);\n\t\tstate.p_Connected.set(E_Locations.l_5_9, E_Locations.l_4_9);\n\t\tstate.p_Connected.set(E_Locations.l_10_3, E_Locations.l_10_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_6, E_Locations.l_13_5);\n\t\tstate.p_Connected.set(E_Locations.l_15_19, E_Locations.l_15_20);\n\t\tstate.p_Connected.set(E_Locations.l_19_12, E_Locations.l_19_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_11, E_Locations.l_5_11);\n\t\tstate.p_Connected.set(E_Locations.l_19_10, E_Locations.l_18_10);\n\t\tstate.p_Connected.set(E_Locations.l_19_17, E_Locations.l_19_18);\n\t\tstate.p_Connected.set(E_Locations.l_1_8, E_Locations.l_1_9);\n\t\tstate.p_Connected.set(E_Locations.l_7_14, E_Locations.l_6_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_16, E_Locations.l_17_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_19, E_Locations.l_18_18);\n\t\tstate.p_Connected.set(E_Locations.l_2_11, E_Locations.l_2_10);\n\t\tstate.p_Connected.set(E_Locations.l_17_11, E_Locations.l_18_11);\n\t\tstate.p_Connected.set(E_Locations.l_16_10, E_Locations.l_15_10);\n\t\tstate.p_Connected.set(E_Locations.l_17_13, E_Locations.l_16_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_10, E_Locations.l_3_10);\n\t\tstate.p_Connected.set(E_Locations.l_11_18, E_Locations.l_11_19);\n\t\tstate.p_Connected.set(E_Locations.l_1_7, E_Locations.l_1_8);\n\t\tstate.p_Connected.set(E_Locations.l_4_16, E_Locations.l_3_16);\n\t\tstate.p_Connected.set(E_Locations.l_13_16, E_Locations.l_13_17);\n\t\tstate.p_Connected.set(E_Locations.l_19_3, E_Locations.l_20_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_4, E_Locations.l_5_4);\n\t\tstate.p_Connected.set(E_Locations.l_4_12, E_Locations.l_5_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_11, E_Locations.l_17_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_6, E_Locations.l_5_5);\n\t\tstate.p_Connected.set(E_Locations.l_6_2, E_Locations.l_6_1);\n\t\tstate.p_Connected.set(E_Locations.l_7_15, E_Locations.l_8_15);\n\t\tstate.p_Connected.set(E_Locations.l_5_3, E_Locations.l_4_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_6, E_Locations.l_4_6);\n\t\tstate.p_Connected.set(E_Locations.l_7_9, E_Locations.l_8_9);\n\t\tstate.p_Connected.set(E_Locations.l_9_15, E_Locations.l_9_16);\n\t\tstate.p_Connected.set(E_Locations.l_17_16, E_Locations.l_18_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_15, E_Locations.l_4_15);\n\t\tstate.p_Connected.set(E_Locations.l_12_5, E_Locations.l_13_5);\n\t\tstate.p_Connected.set(E_Locations.l_13_19, E_Locations.l_13_18);\n\t\tstate.p_Connected.set(E_Locations.l_11_12, E_Locations.l_10_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_2, E_Locations.l_9_3);\n\t\tstate.p_Connected.set(E_Locations.l_7_14, E_Locations.l_7_13);\n\t\tstate.p_Connected.set(E_Locations.l_17_14, E_Locations.l_18_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_15, E_Locations.l_19_14);\n\t\tstate.p_Connected.set(E_Locations.l_1_18, E_Locations.l_2_18);\n\t\tstate.p_Connected.set(E_Locations.l_11_2, E_Locations.l_11_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_19, E_Locations.l_2_19);\n\t\tstate.p_Connected.set(E_Locations.l_12_7, E_Locations.l_12_8);\n\t\tstate.p_Connected.set(E_Locations.l_19_4, E_Locations.l_18_4);\n\t\tstate.p_Connected.set(E_Locations.l_10_8, E_Locations.l_11_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_18, E_Locations.l_10_18);\n\t\tstate.p_Connected.set(E_Locations.l_5_13, E_Locations.l_4_13);\n\t\tstate.p_Connected.set(E_Locations.l_18_17, E_Locations.l_18_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_10, E_Locations.l_11_11);\n\t\tstate.p_Connected.set(E_Locations.l_16_2, E_Locations.l_16_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_5, E_Locations.l_11_5);\n\t\tstate.p_Connected.set(E_Locations.l_7_17, E_Locations.l_8_17);\n\t\tstate.p_Connected.set(E_Locations.l_2_13, E_Locations.l_1_13);\n\t\tstate.p_Connected.set(E_Locations.l_16_20, E_Locations.l_16_19);\n\t\tstate.p_Connected.set(E_Locations.l_12_16, E_Locations.l_12_17);\n\t\tstate.p_Connected.set(E_Locations.l_19_16, E_Locations.l_18_16);\n\t\tstate.p_Connected.set(E_Locations.l_15_15, E_Locations.l_15_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_4, E_Locations.l_4_4);\n\t\tstate.p_Connected.set(E_Locations.l_3_14, E_Locations.l_2_14);\n\t\tstate.p_Connected.set(E_Locations.l_10_17, E_Locations.l_11_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_19, E_Locations.l_3_20);\n\t\tstate.p_Connected.set(E_Locations.l_17_8, E_Locations.l_17_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_8, E_Locations.l_6_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_4, E_Locations.l_9_5);\n\t\tstate.p_Connected.set(E_Locations.l_8_11, E_Locations.l_8_10);\n\t\tstate.p_Connected.set(E_Locations.l_2_14, E_Locations.l_1_14);\n\t\tstate.p_Connected.set(E_Locations.l_18_6, E_Locations.l_18_5);\n\t\tstate.p_Connected.set(E_Locations.l_18_17, E_Locations.l_17_17);\n\t\tstate.p_Connected.set(E_Locations.l_15_8, E_Locations.l_16_8);\n\t\tstate.p_Connected.set(E_Locations.l_11_16, E_Locations.l_12_16);\n\t\tstate.p_Connected.set(E_Locations.l_14_16, E_Locations.l_15_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_11, E_Locations.l_19_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_11, E_Locations.l_11_10);\n\t\tstate.p_Connected.set(E_Locations.l_16_14, E_Locations.l_16_13);\n\t\tstate.p_Connected.set(E_Locations.l_8_9, E_Locations.l_8_10);\n\t\tstate.p_Connected.set(E_Locations.l_17_10, E_Locations.l_16_10);\n\t\tstate.p_Connected.set(E_Locations.l_17_2, E_Locations.l_18_2);\n\t\tstate.p_Connected.set(E_Locations.l_4_14, E_Locations.l_4_13);\n\t\tstate.p_Connected.set(E_Locations.l_8_12, E_Locations.l_7_12);\n\t\tstate.p_Connected.set(E_Locations.l_5_6, E_Locations.l_5_7);\n\t\tstate.p_Connected.set(E_Locations.l_19_18, E_Locations.l_18_18);\n\t\tstate.p_Connected.set(E_Locations.l_11_19, E_Locations.l_11_20);\n\t\tstate.p_Connected.set(E_Locations.l_3_16, E_Locations.l_2_16);\n\t\tstate.p_Connected.set(E_Locations.l_13_5, E_Locations.l_14_5);\n\t\tstate.p_Connected.set(E_Locations.l_14_20, E_Locations.l_15_20);\n\t\tstate.p_Connected.set(E_Locations.l_11_1, E_Locations.l_10_1);\n\t\tstate.p_Connected.set(E_Locations.l_5_16, E_Locations.l_6_16);\n\t\tstate.p_Connected.set(E_Locations.l_20_10, E_Locations.l_20_9);\n\t\tstate.p_Connected.set(E_Locations.l_19_8, E_Locations.l_18_8);\n\t\tstate.p_Connected.set(E_Locations.l_1_18, E_Locations.l_1_19);\n\t\tstate.p_Connected.set(E_Locations.l_11_4, E_Locations.l_10_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_3, E_Locations.l_2_4);\n\t\tstate.p_Connected.set(E_Locations.l_3_18, E_Locations.l_2_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_5, E_Locations.l_4_6);\n\t\tstate.p_Connected.set(E_Locations.l_9_14, E_Locations.l_8_14);\n\t\tstate.p_Connected.set(E_Locations.l_4_13, E_Locations.l_5_13);\n\t\tstate.p_Connected.set(E_Locations.l_18_2, E_Locations.l_18_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_12, E_Locations.l_10_12);\n\t\tstate.p_Connected.set(E_Locations.l_20_9, E_Locations.l_19_9);\n\t\tstate.p_Connected.set(E_Locations.l_16_11, E_Locations.l_15_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_12, E_Locations.l_6_12);\n\t\tstate.p_Connected.set(E_Locations.l_3_7, E_Locations.l_2_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_13, E_Locations.l_5_13);\n\t\tstate.p_Connected.set(E_Locations.l_15_11, E_Locations.l_16_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_7, E_Locations.l_3_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_17, E_Locations.l_4_18);\n\t\tstate.p_Connected.set(E_Locations.l_5_11, E_Locations.l_4_11);\n\t\tstate.p_Connected.set(E_Locations.l_12_15, E_Locations.l_11_15);\n\t\tstate.p_Connected.set(E_Locations.l_7_1, E_Locations.l_8_1);\n\t\tstate.p_Connected.set(E_Locations.l_16_15, E_Locations.l_15_15);\n\t\tstate.p_Connected.set(E_Locations.l_3_16, E_Locations.l_4_16);\n\t\tstate.p_Connected.set(E_Locations.l_1_14, E_Locations.l_1_15);\n\t\tstate.p_Connected.set(E_Locations.l_2_2, E_Locations.l_2_1);\n\t\tstate.p_Connected.set(E_Locations.l_14_10, E_Locations.l_15_10);\n\t\tstate.p_Connected.set(E_Locations.l_8_4, E_Locations.l_8_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_10, E_Locations.l_16_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_16, E_Locations.l_10_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_15, E_Locations.l_11_14);\n\t\tstate.p_Connected.set(E_Locations.l_17_2, E_Locations.l_16_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_6, E_Locations.l_17_5);\n\t\tstate.p_Connected.set(E_Locations.l_18_19, E_Locations.l_17_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_17, E_Locations.l_6_18);\n\t\tstate.p_Connected.set(E_Locations.l_7_15, E_Locations.l_7_16);\n\t\tstate.p_Connected.set(E_Locations.l_1_12, E_Locations.l_2_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_7, E_Locations.l_11_6);\n\t\tstate.p_Connected.set(E_Locations.l_16_7, E_Locations.l_16_6);\n\t\tstate.p_Connected.set(E_Locations.l_13_13, E_Locations.l_13_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_10, E_Locations.l_12_10);\n\t\tstate.p_Connected.set(E_Locations.l_1_10, E_Locations.l_2_10);\n\t\tstate.p_Connected.set(E_Locations.l_10_18, E_Locations.l_10_19);\n\t\tstate.p_Connected.set(E_Locations.l_1_5, E_Locations.l_2_5);\n\t\tstate.p_Connected.set(E_Locations.l_4_15, E_Locations.l_3_15);\n\t\tstate.p_Connected.set(E_Locations.l_15_6, E_Locations.l_15_7);\n\t\tstate.p_Connected.set(E_Locations.l_15_4, E_Locations.l_15_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_2, E_Locations.l_9_1);\n\t\tstate.p_Connected.set(E_Locations.l_18_2, E_Locations.l_19_2);\n\t\tstate.p_Connected.set(E_Locations.l_9_9, E_Locations.l_8_9);\n\t\tstate.p_Connected.set(E_Locations.l_17_5, E_Locations.l_17_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_15, E_Locations.l_19_16);\n\t\tstate.p_Connected.set(E_Locations.l_14_2, E_Locations.l_15_2);\n\t\tstate.p_Connected.set(E_Locations.l_4_12, E_Locations.l_4_13);\n\t\tstate.p_Connected.set(E_Locations.l_11_18, E_Locations.l_12_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_16, E_Locations.l_4_15);\n\t\tstate.p_Connected.set(E_Locations.l_9_1, E_Locations.l_8_1);\n\t\tstate.p_Connected.set(E_Locations.l_18_5, E_Locations.l_17_5);\n\t\tstate.p_Connected.set(E_Locations.l_14_10, E_Locations.l_14_9);\n\t\tstate.p_Connected.set(E_Locations.l_7_10, E_Locations.l_7_11);\n\t\tstate.p_Connected.set(E_Locations.l_10_6, E_Locations.l_11_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_14, E_Locations.l_20_14);\n\t\tstate.p_Connected.set(E_Locations.l_5_2, E_Locations.l_4_2);\n\t\tstate.p_Connected.set(E_Locations.l_18_18, E_Locations.l_19_18);\n\t\tstate.p_Connected.set(E_Locations.l_8_20, E_Locations.l_7_20);\n\t\tstate.p_Connected.set(E_Locations.l_14_4, E_Locations.l_15_4);\n\t\tstate.p_Connected.set(E_Locations.l_13_1, E_Locations.l_13_2);\n\t\tstate.p_Connected.set(E_Locations.l_3_8, E_Locations.l_3_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_5, E_Locations.l_10_5);\n\t\tstate.p_Connected.set(E_Locations.l_8_14, E_Locations.l_8_15);\n\t\tstate.p_Connected.set(E_Locations.l_4_18, E_Locations.l_5_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_9, E_Locations.l_14_9);\n\t\tstate.p_Connected.set(E_Locations.l_20_3, E_Locations.l_20_4);\n\t\tstate.p_Connected.set(E_Locations.l_8_20, E_Locations.l_8_19);\n\t\tstate.p_Connected.set(E_Locations.l_10_13, E_Locations.l_11_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_9, E_Locations.l_7_9);\n\t\tstate.p_Connected.set(E_Locations.l_19_11, E_Locations.l_20_11);\n\t\tstate.p_Connected.set(E_Locations.l_20_8, E_Locations.l_20_9);\n\t\tstate.p_Connected.set(E_Locations.l_3_12, E_Locations.l_2_12);\n\t\tstate.p_Connected.set(E_Locations.l_12_14, E_Locations.l_13_14);\n\t\tstate.p_Connected.set(E_Locations.l_2_8, E_Locations.l_3_8);\n\t\tstate.p_Connected.set(E_Locations.l_18_16, E_Locations.l_17_16);\n\t\tstate.p_Connected.set(E_Locations.l_7_6, E_Locations.l_7_7);\n\t\tstate.p_Connected.set(E_Locations.l_5_3, E_Locations.l_5_4);\n\t\tstate.p_Connected.set(E_Locations.l_2_9, E_Locations.l_3_9);\n\t\tstate.p_Connected.set(E_Locations.l_11_6, E_Locations.l_12_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_5, E_Locations.l_19_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_19, E_Locations.l_16_19);\n\t\tstate.p_Connected.set(E_Locations.l_20_6, E_Locations.l_20_5);\n\t\tstate.p_Connected.set(E_Locations.l_8_2, E_Locations.l_8_1);\n\t\tstate.p_Connected.set(E_Locations.l_17_17, E_Locations.l_18_17);\n\t\tstate.p_Connected.set(E_Locations.l_10_16, E_Locations.l_11_16);\n\t\tstate.p_Connected.set(E_Locations.l_18_7, E_Locations.l_19_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_15, E_Locations.l_2_16);\n\t\tstate.p_Connected.set(E_Locations.l_3_8, E_Locations.l_2_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_13, E_Locations.l_13_13);\n\t\tstate.p_Connected.set(E_Locations.l_13_5, E_Locations.l_13_6);\n\t\tstate.p_Connected.set(E_Locations.l_12_5, E_Locations.l_11_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_13, E_Locations.l_6_13);\n\t\tstate.p_Connected.set(E_Locations.l_14_19, E_Locations.l_14_20);\n\t\tstate.p_Connected.set(E_Locations.l_16_3, E_Locations.l_16_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_18, E_Locations.l_17_19);\n\t\tstate.p_Connected.set(E_Locations.l_16_16, E_Locations.l_15_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_7, E_Locations.l_10_7);\n\t\tstate.p_Connected.set(E_Locations.l_7_17, E_Locations.l_7_18);\n\t\tstate.p_Connected.set(E_Locations.l_8_10, E_Locations.l_8_11);\n\t\tstate.p_Connected.set(E_Locations.l_18_16, E_Locations.l_19_16);\n\t\tstate.p_Connected.set(E_Locations.l_17_5, E_Locations.l_18_5);\n\t\tstate.p_Connected.set(E_Locations.l_3_10, E_Locations.l_4_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_11, E_Locations.l_18_12);\n\t\tstate.p_Connected.set(E_Locations.l_7_6, E_Locations.l_8_6);\n\t\tstate.p_Connected.set(E_Locations.l_11_6, E_Locations.l_11_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_8, E_Locations.l_11_9);\n\t\tstate.p_Connected.set(E_Locations.l_7_4, E_Locations.l_7_5);\n\t\tstate.p_Connected.set(E_Locations.l_20_4, E_Locations.l_20_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_14, E_Locations.l_3_14);\n\t\tstate.p_Connected.set(E_Locations.l_15_19, E_Locations.l_15_18);\n\t\tstate.p_Connected.set(E_Locations.l_3_18, E_Locations.l_3_19);\n\t\tstate.p_Connected.set(E_Locations.l_19_10, E_Locations.l_19_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_16, E_Locations.l_14_16);\n\t\tstate.p_Connected.set(E_Locations.l_20_5, E_Locations.l_20_4);\n\t\tstate.p_Connected.set(E_Locations.l_6_4, E_Locations.l_6_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_1, E_Locations.l_1_1);\n\t\tstate.p_Connected.set(E_Locations.l_19_1, E_Locations.l_20_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_11, E_Locations.l_1_10);\n\t\tstate.p_Connected.set(E_Locations.l_3_3, E_Locations.l_3_2);\n\t\tstate.p_Connected.set(E_Locations.l_7_8, E_Locations.l_6_8);\n\t\tstate.p_Connected.set(E_Locations.l_14_11, E_Locations.l_15_11);\n\t\tstate.p_Connected.set(E_Locations.l_13_10, E_Locations.l_13_9);\n\t\tstate.p_Connected.set(E_Locations.l_6_11, E_Locations.l_6_10);\n\t\tstate.p_Connected.set(E_Locations.l_12_13, E_Locations.l_12_12);\n\t\tstate.p_Connected.set(E_Locations.l_16_7, E_Locations.l_15_7);\n\t\tstate.p_Connected.set(E_Locations.l_15_8, E_Locations.l_14_8);\n\t\tstate.p_Connected.set(E_Locations.l_5_4, E_Locations.l_5_5);\n\t\tstate.p_Connected.set(E_Locations.l_19_15, E_Locations.l_20_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_6, E_Locations.l_15_6);\n\t\tstate.p_Connected.set(E_Locations.l_17_2, E_Locations.l_17_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_15, E_Locations.l_16_16);\n\t\tstate.p_Connected.set(E_Locations.l_2_6, E_Locations.l_2_7);\n\t\tstate.p_Connected.set(E_Locations.l_8_5, E_Locations.l_8_4);\n\t\tstate.p_Connected.set(E_Locations.l_17_5, E_Locations.l_16_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_5, E_Locations.l_8_5);\n\t\tstate.p_Connected.set(E_Locations.l_14_17, E_Locations.l_15_17);\n\t\tstate.p_Connected.set(E_Locations.l_10_1, E_Locations.l_9_1);\n\t\tstate.p_Connected.set(E_Locations.l_4_6, E_Locations.l_5_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_16, E_Locations.l_19_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_16, E_Locations.l_8_17);\n\t\tstate.p_Connected.set(E_Locations.l_19_15, E_Locations.l_18_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_13, E_Locations.l_8_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_12, E_Locations.l_11_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_7, E_Locations.l_17_6);\n\t\tstate.p_Connected.set(E_Locations.l_1_13, E_Locations.l_1_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_20, E_Locations.l_8_20);\n\t\tstate.p_Connected.set(E_Locations.l_14_4, E_Locations.l_14_5);\n\t\tstate.p_Connected.set(E_Locations.l_7_5, E_Locations.l_7_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_17, E_Locations.l_16_17);\n\t\tstate.p_Connected.set(E_Locations.l_9_16, E_Locations.l_10_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_4, E_Locations.l_11_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_1, E_Locations.l_2_2);\n\t\tstate.p_Connected.set(E_Locations.l_11_10, E_Locations.l_11_9);\n\t\tstate.p_Connected.set(E_Locations.l_9_16, E_Locations.l_8_16);\n\t\tstate.p_Connected.set(E_Locations.l_17_16, E_Locations.l_17_15);\n\t\tstate.p_Connected.set(E_Locations.l_1_8, E_Locations.l_2_8);\n\t\tstate.p_Connected.set(E_Locations.l_5_14, E_Locations.l_4_14);\n\t\tstate.p_Connected.set(E_Locations.l_2_17, E_Locations.l_1_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_17, E_Locations.l_2_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_8, E_Locations.l_19_8);\n\t\tstate.p_Connected.set(E_Locations.l_3_20, E_Locations.l_3_19);\n\t\tstate.p_Connected.set(E_Locations.l_5_10, E_Locations.l_5_11);\n\t\tstate.p_Connected.set(E_Locations.l_7_12, E_Locations.l_8_12);\n\t\tstate.p_Connected.set(E_Locations.l_14_9, E_Locations.l_13_9);\n\t\tstate.p_Connected.set(E_Locations.l_6_15, E_Locations.l_6_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_20, E_Locations.l_10_20);\n\t\tstate.p_Connected.set(E_Locations.l_11_11, E_Locations.l_12_11);\n\t\tstate.p_Connected.set(E_Locations.l_13_9, E_Locations.l_13_10);\n\t\tstate.p_Connected.set(E_Locations.l_13_20, E_Locations.l_12_20);\n\t\tstate.p_Connected.set(E_Locations.l_4_4, E_Locations.l_5_4);\n\t\tstate.p_Connected.set(E_Locations.l_9_9, E_Locations.l_9_8);\n\t\tstate.p_Connected.set(E_Locations.l_8_17, E_Locations.l_8_18);\n\t\tstate.p_Connected.set(E_Locations.l_7_8, E_Locations.l_8_8);\n\t\tstate.p_Connected.set(E_Locations.l_15_1, E_Locations.l_16_1);\n\t\tstate.p_Connected.set(E_Locations.l_15_16, E_Locations.l_16_16);\n\t\tstate.p_Connected.set(E_Locations.l_2_9, E_Locations.l_2_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_7, E_Locations.l_18_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_11, E_Locations.l_7_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_2, E_Locations.l_10_2);\n\t\tstate.p_Connected.set(E_Locations.l_18_2, E_Locations.l_17_2);\n\t\tstate.p_Connected.set(E_Locations.l_20_14, E_Locations.l_20_15);\n\t\tstate.p_Connected.set(E_Locations.l_20_17, E_Locations.l_20_18);\n\t\tstate.p_Connected.set(E_Locations.l_10_11, E_Locations.l_11_11);\n\t\tstate.p_Connected.set(E_Locations.l_7_10, E_Locations.l_6_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_16, E_Locations.l_18_15);\n\t\tstate.p_Connected.set(E_Locations.l_10_3, E_Locations.l_10_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_10, E_Locations.l_17_9);\n\t\tstate.p_Connected.set(E_Locations.l_17_4, E_Locations.l_17_3);\n\t\tstate.p_Connected.set(E_Locations.l_3_9, E_Locations.l_3_8);\n\t\tstate.p_Connected.set(E_Locations.l_8_17, E_Locations.l_9_17);\n\t\tstate.p_Connected.set(E_Locations.l_16_12, E_Locations.l_16_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_1, E_Locations.l_7_1);\n\t\tstate.p_Connected.set(E_Locations.l_2_12, E_Locations.l_3_12);\n\t\tstate.p_Connected.set(E_Locations.l_8_16, E_Locations.l_7_16);\n\t\tstate.p_Connected.set(E_Locations.l_4_8, E_Locations.l_4_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_9, E_Locations.l_15_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_15, E_Locations.l_10_15);\n\t\tstate.p_Connected.set(E_Locations.l_18_3, E_Locations.l_18_2);\n\t\tstate.p_Connected.set(E_Locations.l_4_19, E_Locations.l_4_20);\n\t\tstate.p_Connected.set(E_Locations.l_2_11, E_Locations.l_1_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_10, E_Locations.l_2_9);\n\t\tstate.p_Connected.set(E_Locations.l_10_4, E_Locations.l_11_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_15, E_Locations.l_14_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_4, E_Locations.l_19_5);\n\t\tstate.p_Connected.set(E_Locations.l_12_12, E_Locations.l_13_12);\n\t\tstate.p_Connected.set(E_Locations.l_2_9, E_Locations.l_1_9);\n\t\tstate.p_Connected.set(E_Locations.l_5_5, E_Locations.l_4_5);\n\t\tstate.p_Connected.set(E_Locations.l_20_19, E_Locations.l_19_19);\n\t\tstate.p_Connected.set(E_Locations.l_3_5, E_Locations.l_3_6);\n\t\tstate.p_Connected.set(E_Locations.l_3_6, E_Locations.l_3_5);\n\t\tstate.p_Connected.set(E_Locations.l_6_15, E_Locations.l_5_15);\n\t\tstate.p_Connected.set(E_Locations.l_12_6, E_Locations.l_12_5);\n\t\tstate.p_Connected.set(E_Locations.l_5_1, E_Locations.l_5_2);\n\t\tstate.p_Connected.set(E_Locations.l_6_5, E_Locations.l_6_4);\n\t\tstate.p_Connected.set(E_Locations.l_7_13, E_Locations.l_8_13);\n\t\tstate.p_Connected.set(E_Locations.l_11_8, E_Locations.l_11_7);\n\t\tstate.p_Connected.set(E_Locations.l_10_12, E_Locations.l_10_13);\n\t\tstate.p_Connected.set(E_Locations.l_20_9, E_Locations.l_20_8);\n\t\tstate.p_Connected.set(E_Locations.l_2_20, E_Locations.l_1_20);\n\t\tstate.p_Connected.set(E_Locations.l_2_19, E_Locations.l_1_19);\n\t\tstate.p_Connected.set(E_Locations.l_13_19, E_Locations.l_13_20);\n\t\tstate.p_Connected.set(E_Locations.l_11_11, E_Locations.l_11_12);\n\t\tstate.p_Connected.set(E_Locations.l_10_11, E_Locations.l_10_12);\n\t\tstate.p_Connected.set(E_Locations.l_3_14, E_Locations.l_4_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_2, E_Locations.l_18_2);\n\t\tstate.p_Connected.set(E_Locations.l_4_6, E_Locations.l_4_7);\n\t\tstate.p_Connected.set(E_Locations.l_4_15, E_Locations.l_5_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_9, E_Locations.l_9_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_2, E_Locations.l_2_2);\n\t\tstate.p_Connected.set(E_Locations.l_12_6, E_Locations.l_11_6);\n\t\tstate.p_Connected.set(E_Locations.l_8_12, E_Locations.l_9_12);\n\t\tstate.p_Connected.set(E_Locations.l_17_11, E_Locations.l_16_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_16, E_Locations.l_1_16);\n\t\tstate.p_Connected.set(E_Locations.l_9_14, E_Locations.l_10_14);\n\t\tstate.p_Connected.set(E_Locations.l_6_10, E_Locations.l_6_9);\n\t\tstate.p_Connected.set(E_Locations.l_12_19, E_Locations.l_12_18);\n\t\tstate.p_Connected.set(E_Locations.l_4_6, E_Locations.l_3_6);\n\t\tstate.p_Connected.set(E_Locations.l_16_19, E_Locations.l_16_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_3, E_Locations.l_13_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_12, E_Locations.l_5_11);\n\t\tstate.p_Connected.set(E_Locations.l_7_7, E_Locations.l_6_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_5, E_Locations.l_5_5);\n\t\tstate.p_Connected.set(E_Locations.l_20_6, E_Locations.l_19_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_9, E_Locations.l_19_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_10, E_Locations.l_1_9);\n\t\tstate.p_Connected.set(E_Locations.l_14_5, E_Locations.l_14_6);\n\t\tstate.p_Connected.set(E_Locations.l_8_15, E_Locations.l_8_16);\n\t\tstate.p_Connected.set(E_Locations.l_13_17, E_Locations.l_14_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_10, E_Locations.l_17_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_7, E_Locations.l_8_7);\n\t\tstate.p_Connected.set(E_Locations.l_19_6, E_Locations.l_19_7);\n\t\tstate.p_Connected.set(E_Locations.l_3_3, E_Locations.l_4_3);\n\t\tstate.p_Connected.set(E_Locations.l_8_6, E_Locations.l_7_6);\n\t\tstate.p_Connected.set(E_Locations.l_2_8, E_Locations.l_1_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_20, E_Locations.l_10_19);\n\t\tstate.p_Connected.set(E_Locations.l_19_8, E_Locations.l_19_7);\n\t\tstate.p_Connected.set(E_Locations.l_14_15, E_Locations.l_15_15);\n\t\tstate.p_Connected.set(E_Locations.l_11_7, E_Locations.l_12_7);\n\t\tstate.p_Connected.set(E_Locations.l_11_13, E_Locations.l_10_13);\n\t\tstate.p_Connected.set(E_Locations.l_7_20, E_Locations.l_6_20);\n\t\tstate.p_Connected.set(E_Locations.l_7_9, E_Locations.l_7_8);\n\t\tstate.p_Connected.set(E_Locations.l_7_19, E_Locations.l_7_18);\n\t\tstate.p_Connected.set(E_Locations.l_7_17, E_Locations.l_7_16);\n\t\tstate.p_Connected.set(E_Locations.l_7_3, E_Locations.l_7_4);\n\t\tstate.p_Connected.set(E_Locations.l_14_12, E_Locations.l_14_11);\n\t\tstate.p_Connected.set(E_Locations.l_15_7, E_Locations.l_16_7);\n\t\tstate.p_Connected.set(E_Locations.l_12_9, E_Locations.l_12_10);\n\t\tstate.p_Connected.set(E_Locations.l_8_5, E_Locations.l_8_6);\n\t\tstate.p_Connected.set(E_Locations.l_5_19, E_Locations.l_6_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_2, E_Locations.l_14_2);\n\t\tstate.p_Connected.set(E_Locations.l_15_2, E_Locations.l_15_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_11, E_Locations.l_3_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_6, E_Locations.l_2_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_15, E_Locations.l_14_15);\n\t\tstate.p_Connected.set(E_Locations.l_9_11, E_Locations.l_10_11);\n\t\tstate.p_Connected.set(E_Locations.l_2_13, E_Locations.l_3_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_10, E_Locations.l_12_11);\n\t\tstate.p_Connected.set(E_Locations.l_12_19, E_Locations.l_11_19);\n\t\tstate.p_Connected.set(E_Locations.l_4_17, E_Locations.l_5_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_16, E_Locations.l_3_17);\n\t\tstate.p_Connected.set(E_Locations.l_10_9, E_Locations.l_10_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_12, E_Locations.l_7_13);\n\t\tstate.p_Connected.set(E_Locations.l_20_13, E_Locations.l_19_13);\n\t\tstate.p_Connected.set(E_Locations.l_17_6, E_Locations.l_18_6);\n\t\tstate.p_Connected.set(E_Locations.l_18_20, E_Locations.l_17_20);\n\t\tstate.p_Connected.set(E_Locations.l_15_12, E_Locations.l_16_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_4, E_Locations.l_17_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_1, E_Locations.l_4_1);\n\t\tstate.p_Connected.set(E_Locations.l_8_17, E_Locations.l_7_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_1, E_Locations.l_12_2);\n\t\tstate.p_Connected.set(E_Locations.l_16_4, E_Locations.l_17_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_18, E_Locations.l_6_18);\n\t\tstate.p_Connected.set(E_Locations.l_20_11, E_Locations.l_20_10);\n\t\tstate.p_Connected.set(E_Locations.l_4_12, E_Locations.l_3_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_16, E_Locations.l_9_17);\n\t\tstate.p_Connected.set(E_Locations.l_6_8, E_Locations.l_5_8);\n\t\tstate.p_Connected.set(E_Locations.l_6_20, E_Locations.l_5_20);\n\t\tstate.p_Connected.set(E_Locations.l_17_3, E_Locations.l_18_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_17, E_Locations.l_7_17);\n\t\tstate.p_Connected.set(E_Locations.l_8_2, E_Locations.l_8_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_16, E_Locations.l_9_15);\n\t\tstate.p_Connected.set(E_Locations.l_4_13, E_Locations.l_3_13);\n\t\tstate.p_Connected.set(E_Locations.l_6_16, E_Locations.l_7_16);\n\t\tstate.p_Connected.set(E_Locations.l_9_18, E_Locations.l_9_17);\n\t\tstate.p_Connected.set(E_Locations.l_20_4, E_Locations.l_20_3);\n\t\tstate.p_Connected.set(E_Locations.l_11_2, E_Locations.l_12_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_12, E_Locations.l_16_12);\n\t\tstate.p_Connected.set(E_Locations.l_16_5, E_Locations.l_17_5);\n\t\tstate.p_Connected.set(E_Locations.l_4_13, E_Locations.l_4_12);\n\t\tstate.p_Connected.set(E_Locations.l_8_11, E_Locations.l_8_12);\n\t\tstate.p_Connected.set(E_Locations.l_12_8, E_Locations.l_12_7);\n\t\tstate.p_Connected.set(E_Locations.l_6_7, E_Locations.l_7_7);\n\t\tstate.p_Connected.set(E_Locations.l_7_3, E_Locations.l_6_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_18, E_Locations.l_5_18);\n\t\tstate.p_Connected.set(E_Locations.l_19_3, E_Locations.l_19_2);\n\t\tstate.p_Connected.set(E_Locations.l_20_20, E_Locations.l_19_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_19, E_Locations.l_6_20);\n\t\tstate.p_Connected.set(E_Locations.l_4_4, E_Locations.l_4_3);\n\t\tstate.p_Connected.set(E_Locations.l_4_3, E_Locations.l_5_3);\n\t\tstate.p_Connected.set(E_Locations.l_18_8, E_Locations.l_18_9);\n\t\tstate.p_Connected.set(E_Locations.l_8_9, E_Locations.l_7_9);\n\t\tstate.p_Connected.set(E_Locations.l_10_1, E_Locations.l_11_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_12, E_Locations.l_1_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_2, E_Locations.l_1_1);\n\t\tstate.p_Connected.set(E_Locations.l_13_14, E_Locations.l_14_14);\n\t\tstate.p_Connected.set(E_Locations.l_7_18, E_Locations.l_8_18);\n\t\tstate.p_Connected.set(E_Locations.l_13_2, E_Locations.l_13_1);\n\t\tstate.p_Connected.set(E_Locations.l_13_17, E_Locations.l_12_17);\n\t\tstate.p_Connected.set(E_Locations.l_18_19, E_Locations.l_18_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_16, E_Locations.l_5_16);\n\t\tstate.p_Connected.set(E_Locations.l_11_9, E_Locations.l_12_9);\n\t\tstate.p_Connected.set(E_Locations.l_2_4, E_Locations.l_1_4);\n\t\tstate.p_Connected.set(E_Locations.l_5_6, E_Locations.l_4_6);\n\t\tstate.p_Connected.set(E_Locations.l_8_9, E_Locations.l_8_8);\n\t\tstate.p_Connected.set(E_Locations.l_16_11, E_Locations.l_17_11);\n\t\tstate.p_Connected.set(E_Locations.l_4_5, E_Locations.l_3_5);\n\t\tstate.p_Connected.set(E_Locations.l_2_17, E_Locations.l_3_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_9, E_Locations.l_11_9);\n\t\tstate.p_Connected.set(E_Locations.l_16_6, E_Locations.l_17_6);\n\t\tstate.p_Connected.set(E_Locations.l_9_7, E_Locations.l_10_7);\n\t\tstate.p_Connected.set(E_Locations.l_9_2, E_Locations.l_8_2);\n\t\tstate.p_Connected.set(E_Locations.l_5_14, E_Locations.l_6_14);\n\t\tstate.p_Connected.set(E_Locations.l_7_14, E_Locations.l_8_14);\n\t\tstate.p_Connected.set(E_Locations.l_1_14, E_Locations.l_1_13);\n\t\tstate.p_Connected.set(E_Locations.l_13_10, E_Locations.l_12_10);\n\t\tstate.p_Connected.set(E_Locations.l_7_6, E_Locations.l_6_6);\n\t\tstate.p_Connected.set(E_Locations.l_11_14, E_Locations.l_10_14);\n\t\tstate.p_Connected.set(E_Locations.l_18_10, E_Locations.l_19_10);\n\t\tstate.p_Connected.set(E_Locations.l_3_8, E_Locations.l_4_8);\n\t\tstate.p_Connected.set(E_Locations.l_18_18, E_Locations.l_18_17);\n\t\tstate.p_Connected.set(E_Locations.l_3_17, E_Locations.l_3_16);\n\t\tstate.p_Connected.set(E_Locations.l_19_18, E_Locations.l_19_17);\n\t\tstate.p_Connected.set(E_Locations.l_12_11, E_Locations.l_12_12);\n\t\tstate.p_Connected.set(E_Locations.l_12_2, E_Locations.l_13_2);\n\t\tstate.p_Connected.set(E_Locations.l_2_6, E_Locations.l_3_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_18, E_Locations.l_16_18);\n\t\tstate.p_Connected.set(E_Locations.l_5_15, E_Locations.l_5_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_10, E_Locations.l_16_9);\n\t\tstate.p_Connected.set(E_Locations.l_18_17, E_Locations.l_18_18);\n\t\tstate.p_Connected.set(E_Locations.l_19_20, E_Locations.l_18_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_15, E_Locations.l_6_14);\n\t\tstate.p_Connected.set(E_Locations.l_8_14, E_Locations.l_7_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_4, E_Locations.l_13_4);\n\t\tstate.p_Connected.set(E_Locations.l_16_11, E_Locations.l_16_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_9, E_Locations.l_18_8);\n\t\tstate.p_Connected.set(E_Locations.l_4_3, E_Locations.l_4_2);\n\t\tstate.p_Connected.set(E_Locations.l_12_8, E_Locations.l_13_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_4, E_Locations.l_8_4);\n\t\tstate.p_Connected.set(E_Locations.l_20_12, E_Locations.l_19_12);\n\t\tstate.p_Connected.set(E_Locations.l_16_14, E_Locations.l_15_14);\n\t\tstate.p_Connected.set(E_Locations.l_13_15, E_Locations.l_13_16);\n\t\tstate.p_Connected.set(E_Locations.l_5_13, E_Locations.l_5_12);\n\t\tstate.p_Connected.set(E_Locations.l_19_7, E_Locations.l_19_8);\n\t\tstate.p_Connected.set(E_Locations.l_16_14, E_Locations.l_17_14);\n\t\tstate.p_Connected.set(E_Locations.l_17_1, E_Locations.l_16_1);\n\t\tstate.p_Connected.set(E_Locations.l_12_18, E_Locations.l_11_18);\n\t\tstate.p_Connected.set(E_Locations.l_16_3, E_Locations.l_17_3);\n\t\tstate.p_Connected.set(E_Locations.l_16_6, E_Locations.l_16_7);\n\t\tstate.p_Connected.set(E_Locations.l_16_9, E_Locations.l_17_9);\n\t\tstate.p_Connected.set(E_Locations.l_4_15, E_Locations.l_4_16);\n\t\tstate.p_Connected.set(E_Locations.l_15_14, E_Locations.l_14_14);\n\t\tstate.p_Connected.set(E_Locations.l_16_18, E_Locations.l_15_18);\n\t\tstate.p_Connected.set(E_Locations.l_12_18, E_Locations.l_12_19);\n\t\tstate.p_Connected.set(E_Locations.l_19_11, E_Locations.l_19_12);\n\t\tstate.p_Connected.set(E_Locations.l_19_2, E_Locations.l_19_3);\n\t\tstate.p_Connected.set(E_Locations.l_9_20, E_Locations.l_10_20);\n\t\tstate.p_Connected.set(E_Locations.l_20_15, E_Locations.l_19_15);\n\t\tstate.p_Connected.set(E_Locations.l_3_1, E_Locations.l_4_1);\n\t\tstate.p_Connected.set(E_Locations.l_12_10, E_Locations.l_13_10);\n\t\tstate.p_Connected.set(E_Locations.l_2_8, E_Locations.l_2_7);\n\t\tstate.p_Connected.set(E_Locations.l_5_3, E_Locations.l_6_3);\n\t\tstate.p_Connected.set(E_Locations.l_8_12, E_Locations.l_8_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_19, E_Locations.l_5_18);\n\t\tstate.p_Connected.set(E_Locations.l_17_13, E_Locations.l_18_13);\n\t\tstate.p_Connected.set(E_Locations.l_12_20, E_Locations.l_12_19);\n\t\tstate.p_Connected.set(E_Locations.l_12_11, E_Locations.l_11_11);\n\t\tstate.p_Connected.set(E_Locations.l_4_18, E_Locations.l_4_17);\n\t\tstate.p_Connected.set(E_Locations.l_14_18, E_Locations.l_13_18);\n\t\tstate.p_Connected.set(E_Locations.l_8_19, E_Locations.l_9_19);\n\t\tstate.p_Connected.set(E_Locations.l_7_10, E_Locations.l_7_9);\n\t\tstate.p_Connected.set(E_Locations.l_11_4, E_Locations.l_12_4);\n\t\tstate.p_Connected.set(E_Locations.l_20_2, E_Locations.l_20_3);\n\t\tstate.p_Connected.set(E_Locations.l_2_4, E_Locations.l_3_4);\n\t\tstate.p_Connected.set(E_Locations.l_19_2, E_Locations.l_19_1);\n\t\tstate.p_Connected.set(E_Locations.l_16_13, E_Locations.l_17_13);\n\t\tstate.p_Connected.set(E_Locations.l_2_15, E_Locations.l_1_15);\n\t\tstate.p_Connected.set(E_Locations.l_13_20, E_Locations.l_13_19);\n\t\tstate.p_Connected.set(E_Locations.l_15_20, E_Locations.l_16_20);\n\t\tstate.p_Connected.set(E_Locations.l_1_17, E_Locations.l_1_18);\n\t\tstate.p_Connected.set(E_Locations.l_7_1, E_Locations.l_6_1);\n\t\tstate.p_Connected.set(E_Locations.l_6_11, E_Locations.l_6_12);\n\t\tstate.p_Connected.set(E_Locations.l_4_20, E_Locations.l_4_19);\n\t\tstate.p_Connected.set(E_Locations.l_9_13, E_Locations.l_8_13);\n\t\tstate.p_Connected.set(E_Locations.l_13_12, E_Locations.l_13_11);\n\t\tstate.p_Connected.set(E_Locations.l_15_7, E_Locations.l_15_6);\n\t\tstate.p_Connected.set(E_Locations.l_2_3, E_Locations.l_2_2);\n\t\tstate.p_Connected.set(E_Locations.l_13_11, E_Locations.l_12_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_16, E_Locations.l_5_17);\n\t\tstate.p_Connected.set(E_Locations.l_14_16, E_Locations.l_14_17);\n\t\tstate.p_Connected.set(E_Locations.l_17_15, E_Locations.l_17_14);\n\t\tstate.p_Connected.set(E_Locations.l_8_17, E_Locations.l_8_16);\n\t\tstate.p_Connected.set(E_Locations.l_3_10, E_Locations.l_2_10);\n\t\tstate.p_Connected.set(E_Locations.l_18_9, E_Locations.l_17_9);\n\t\tstate.p_Connected.set(E_Locations.l_1_9, E_Locations.l_1_8);\n\t\tstate.p_Connected.set(E_Locations.l_9_20, E_Locations.l_9_19);\n\t\tstate.p_Connected.set(E_Locations.l_18_6, E_Locations.l_18_7);\n\t\tstate.p_Connected.set(E_Locations.l_2_15, E_Locations.l_3_15);\n\t\tstate.p_Connected.set(E_Locations.l_8_10, E_Locations.l_7_10);\n\t\tstate.p_Connected.set(E_Locations.l_20_17, E_Locations.l_19_17);\n\t\tstate.p_Connected.set(E_Locations.l_6_18, E_Locations.l_6_17);\n\t\tstate.p_Connected.set(E_Locations.l_4_18, E_Locations.l_4_19);\n\t\tstate.p_Connected.set(E_Locations.l_8_1, E_Locations.l_8_2);\n\t\tstate.p_Connected.set(E_Locations.l_13_12, E_Locations.l_13_13);\n\t\tstate.p_Connected.set(E_Locations.l_7_2, E_Locations.l_6_2);\n\t\tstate.p_Connected.set(E_Locations.l_15_10, E_Locations.l_14_10);\n\t\tstate.p_Connected.set(E_Locations.l_15_14, E_Locations.l_15_13);\n\t\tstate.p_Connected.set(E_Locations.l_7_13, E_Locations.l_7_14);\n\t\tstate.p_Connected.set(E_Locations.l_8_16, E_Locations.l_9_16);\n\t\tstate.p_Connected.set(E_Locations.l_15_9, E_Locations.l_14_9);\n\t\tstate.p_Connected.set(E_Locations.l_15_1, E_Locations.l_14_1);\n\t\tstate.p_Connected.set(E_Locations.l_1_4, E_Locations.l_1_5);\n\t\tstate.p_Connected.set(E_Locations.l_14_14, E_Locations.l_14_15);\n\t\tstate.p_Connected.set(E_Locations.l_12_11, E_Locations.l_13_11);\n\t\tstate.p_Connected.set(E_Locations.l_15_9, E_Locations.l_16_9);\n\t\tstate.p_Connected.set(E_Locations.l_19_10, E_Locations.l_19_11);\n\t\tstate.p_Connected.set(E_Locations.l_15_8, E_Locations.l_15_9);\n\t\tstate.p_Connected.set(E_Locations.l_9_8, E_Locations.l_10_8);\n\t\tstate.p_Connected.set(E_Locations.l_2_5, E_Locations.l_3_5);\n\t\tstate.p_Connected.set(E_Locations.l_19_9, E_Locations.l_19_8);\n\t\tstate.p_Connected.set(E_Locations.l_4_15, E_Locations.l_4_14);\n\t\tstate.p_Connected.set(E_Locations.l_19_6, E_Locations.l_20_6);\n\t\tstate.p_Connected.set(E_Locations.l_16_20, E_Locations.l_17_20);\n\t\tstate.p_Connected.set(E_Locations.l_12_9, E_Locations.l_13_9);\n\t\tstate.p_Connected.set(E_Locations.l_17_12, E_Locations.l_18_12);\n\t\tstate.p_Connected.set(E_Locations.l_18_12, E_Locations.l_17_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_17, E_Locations.l_9_16);\n\t\tstate.p_Connected.set(E_Locations.l_20_14, E_Locations.l_20_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_8, E_Locations.l_3_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_16, E_Locations.l_9_16);\n\t\tstate.p_Connected.set(E_Locations.l_19_14, E_Locations.l_19_13);\n\t\tstate.p_Connected.set(E_Locations.l_17_19, E_Locations.l_18_19);\n\t\tstate.p_Connected.set(E_Locations.l_2_2, E_Locations.l_3_2);\n\t\tstate.p_Connected.set(E_Locations.l_3_14, E_Locations.l_3_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_5, E_Locations.l_5_5);\n\t\tstate.p_Connected.set(E_Locations.l_17_16, E_Locations.l_16_16);\n\t\tstate.p_Connected.set(E_Locations.l_8_1, E_Locations.l_9_1);\n\t\tstate.p_Connected.set(E_Locations.l_9_8, E_Locations.l_9_7);\n\t\tstate.p_Connected.set(E_Locations.l_13_11, E_Locations.l_14_11);\n\t\tstate.p_Connected.set(E_Locations.l_5_11, E_Locations.l_5_10);\n\t\tstate.p_Connected.set(E_Locations.l_9_8, E_Locations.l_9_9);\n\t\tstate.p_Connected.set(E_Locations.l_14_11, E_Locations.l_14_10);\n\t\tstate.p_Connected.set(E_Locations.l_20_11, E_Locations.l_19_11);\n\t\tstate.p_Connected.set(E_Locations.l_1_13, E_Locations.l_2_13);\n\t\tstate.p_Connected.set(E_Locations.l_4_2, E_Locations.l_4_1);\n\t\tstate.p_Connected.set(E_Locations.l_6_1, E_Locations.l_6_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_15, E_Locations.l_13_15);\n\t\tstate.p_Connected.set(E_Locations.l_16_12, E_Locations.l_17_12);\n\t\tstate.p_Connected.set(E_Locations.l_20_3, E_Locations.l_19_3);\n\t\tstate.p_Connected.set(E_Locations.l_7_7, E_Locations.l_7_8);\n\t\tstate.p_Connected.set(E_Locations.l_10_8, E_Locations.l_10_7);\n\t\tstate.p_Connected.set(E_Locations.l_20_7, E_Locations.l_20_8);\n\t\tstate.p_Connected.set(E_Locations.l_5_17, E_Locations.l_5_16);\n\t\tstate.p_Connected.set(E_Locations.l_16_3, E_Locations.l_15_3);\n\t\tstate.p_Connected.set(E_Locations.l_19_13, E_Locations.l_19_14);\n\t\tstate.p_Connected.set(E_Locations.l_17_9, E_Locations.l_17_8);\n\t\tstate.p_Connected.set(E_Locations.l_13_8, E_Locations.l_13_7);\n\t\tstate.p_Connected.set(E_Locations.l_10_20, E_Locations.l_11_20);\n\t\tstate.p_Connected.set(E_Locations.l_6_7, E_Locations.l_6_8);\n\t\tstate.p_Connected.set(E_Locations.l_17_11, E_Locations.l_17_10);\n\t\tstate.p_Connected.set(E_Locations.l_3_11, E_Locations.l_2_11);\n\t\tstate.p_Connected.set(E_Locations.l_13_2, E_Locations.l_13_3);\n\t\tstate.p_Connected.set(E_Locations.l_6_12, E_Locations.l_6_11);\n\t\tstate.p_Connected.set(E_Locations.l_11_9, E_Locations.l_11_8);\n\t\tstate.p_Connected.set(E_Locations.l_20_1, E_Locations.l_20_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_5, E_Locations.l_15_5);\n\t\tstate.p_Connected.set(E_Locations.l_9_3, E_Locations.l_9_2);\n\t\tstate.p_Connected.set(E_Locations.l_17_2, E_Locations.l_17_1);\n\t\tstate.p_Connected.set(E_Locations.l_2_10, E_Locations.l_2_11);\n\t\tstate.p_Connected.set(E_Locations.l_7_13, E_Locations.l_7_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_2, E_Locations.l_11_3);\n\t\tstate.p_Connected.set(E_Locations.l_10_7, E_Locations.l_10_6);\n\t\tstate.p_Connected.set(E_Locations.l_15_10, E_Locations.l_15_11);\n\t\tstate.p_Connected.set(E_Locations.l_14_11, E_Locations.l_13_11);\n\t\tstate.p_Connected.set(E_Locations.l_3_13, E_Locations.l_3_12);\n\t\tstate.p_Connected.set(E_Locations.l_11_6, E_Locations.l_10_6);\n\t\tstate.p_Connected.set(E_Locations.l_19_19, E_Locations.l_20_19);\n\t\tstate.p_Connected.set(E_Locations.l_6_12, E_Locations.l_5_12);\n\t\tstate.p_Connected.set(E_Locations.l_9_4, E_Locations.l_9_3);\n\t\tstate.p_Connected.set(E_Locations.l_14_12, E_Locations.l_15_12);\n\t\tstate.p_Connected.set(E_Locations.l_14_2, E_Locations.l_13_2);\n\t\tstate.p_Connected.set(E_Locations.l_14_14, E_Locations.l_15_14);\n\t\tstate.p_Connected.set(E_Locations.l_12_14, E_Locations.l_11_14);\n\t\t\n\n\t\tstate.p_Accessible.set(E_Locations.l_7_19);\n\t\tstate.p_Accessible.set(E_Locations.l_14_1);\n\t\tstate.p_Accessible.set(E_Locations.l_3_20);\n\t\tstate.p_Accessible.set(E_Locations.l_3_13);\n\t\tstate.p_Accessible.set(E_Locations.l_16_19);\n\t\tstate.p_Accessible.set(E_Locations.l_9_19);\n\t\tstate.p_Accessible.set(E_Locations.l_1_16);\n\t\tstate.p_Accessible.set(E_Locations.l_10_16);\n\t\tstate.p_Accessible.set(E_Locations.l_6_1);\n\t\tstate.p_Accessible.set(E_Locations.l_9_8);\n\t\tstate.p_Accessible.set(E_Locations.l_3_6);\n\t\tstate.p_Accessible.set(E_Locations.l_14_9);\n\t\tstate.p_Accessible.set(E_Locations.l_20_15);\n\t\tstate.p_Accessible.set(E_Locations.l_14_11);\n\t\tstate.p_Accessible.set(E_Locations.l_7_15);\n\t\tstate.p_Accessible.set(E_Locations.l_18_8);\n\t\tstate.p_Accessible.set(E_Locations.l_9_14);\n\t\tstate.p_Accessible.set(E_Locations.l_15_14);\n\t\tstate.p_Accessible.set(E_Locations.l_18_10);\n\t\tstate.p_Accessible.set(E_Locations.l_8_13);\n\t\tstate.p_Accessible.set(E_Locations.l_11_6);\n\t\tstate.p_Accessible.set(E_Locations.l_18_5);\n\t\tstate.p_Accessible.set(E_Locations.l_12_7);\n\t\tstate.p_Accessible.set(E_Locations.l_19_9);\n\t\tstate.p_Accessible.set(E_Locations.l_8_20);\n\t\tstate.p_Accessible.set(E_Locations.l_16_7);\n\t\tstate.p_Accessible.set(E_Locations.l_20_7);\n\t\tstate.p_Accessible.set(E_Locations.l_1_13);\n\t\tstate.p_Accessible.set(E_Locations.l_16_6);\n\t\tstate.p_Accessible.set(E_Locations.l_17_20);\n\t\tstate.p_Accessible.set(E_Locations.l_18_4);\n\t\tstate.p_Accessible.set(E_Locations.l_5_18);\n\t\tstate.p_Accessible.set(E_Locations.l_11_1);\n\t\tstate.p_Accessible.set(E_Locations.l_8_9);\n\t\tstate.p_Accessible.set(E_Locations.l_20_5);\n\t\tstate.p_Accessible.set(E_Locations.l_15_20);\n\t\tstate.p_Accessible.set(E_Locations.l_1_1);\n\t\tstate.p_Accessible.set(E_Locations.l_2_7);\n\t\tstate.p_Accessible.set(E_Locations.l_15_7);\n\t\tstate.p_Accessible.set(E_Locations.l_10_11);\n\t\tstate.p_Accessible.set(E_Locations.l_15_11);\n\t\tstate.p_Accessible.set(E_Locations.l_7_12);\n\t\tstate.p_Accessible.set(E_Locations.l_14_12);\n\t\tstate.p_Accessible.set(E_Locations.l_10_8);\n\t\tstate.p_Accessible.set(E_Locations.l_6_9);\n\t\tstate.p_Accessible.set(E_Locations.l_8_1);\n\t\tstate.p_Accessible.set(E_Locations.l_20_20);\n\t\tstate.p_Accessible.set(E_Locations.l_15_13);\n\t\tstate.p_Accessible.set(E_Locations.l_15_1);\n\t\tstate.p_Accessible.set(E_Locations.l_7_9);\n\t\tstate.p_Accessible.set(E_Locations.l_20_6);\n\t\tstate.p_Accessible.set(E_Locations.l_18_15);\n\t\tstate.p_Accessible.set(E_Locations.l_13_4);\n\t\tstate.p_Accessible.set(E_Locations.l_14_6);\n\t\tstate.p_Accessible.set(E_Locations.l_5_7);\n\t\tstate.p_Accessible.set(E_Locations.l_12_17);\n\t\tstate.p_Accessible.set(E_Locations.l_7_18);\n\t\tstate.p_Accessible.set(E_Locations.l_7_4);\n\t\tstate.p_Accessible.set(E_Locations.l_11_7);\n\t\tstate.p_Accessible.set(E_Locations.l_6_15);\n\t\tstate.p_Accessible.set(E_Locations.l_3_2);\n\t\tstate.p_Accessible.set(E_Locations.l_12_6);\n\t\tstate.p_Accessible.set(E_Locations.l_4_8);\n\t\tstate.p_Accessible.set(E_Locations.l_3_5);\n\t\tstate.p_Accessible.set(E_Locations.l_16_3);\n\t\tstate.p_Accessible.set(E_Locations.l_14_20);\n\t\tstate.p_Accessible.set(E_Locations.l_12_1);\n\t\tstate.p_Accessible.set(E_Locations.l_8_14);\n\t\tstate.p_Accessible.set(E_Locations.l_13_9);\n\t\tstate.p_Accessible.set(E_Locations.l_9_13);\n\t\tstate.p_Accessible.set(E_Locations.l_9_18);\n\t\tstate.p_Accessible.set(E_Locations.l_10_4);\n\t\tstate.p_Accessible.set(E_Locations.l_6_20);\n\t\tstate.p_Accessible.set(E_Locations.l_7_13);\n\t\tstate.p_Accessible.set(E_Locations.l_12_9);\n\t\tstate.p_Accessible.set(E_Locations.l_19_18);\n\t\tstate.p_Accessible.set(E_Locations.l_4_6);\n\t\tstate.p_Accessible.set(E_Locations.l_1_17);\n\t\tstate.p_Accessible.set(E_Locations.l_1_5);\n\t\tstate.p_Accessible.set(E_Locations.l_5_19);\n\t\tstate.p_Accessible.set(E_Locations.l_20_1);\n\t\tstate.p_Accessible.set(E_Locations.l_13_13);\n\t\tstate.p_Accessible.set(E_Locations.l_15_15);\n\t\tstate.p_Accessible.set(E_Locations.l_17_9);\n\t\tstate.p_Accessible.set(E_Locations.l_19_17);\n\t\tstate.p_Accessible.set(E_Locations.l_4_17);\n\t\tstate.p_Accessible.set(E_Locations.l_18_12);\n\t\tstate.p_Accessible.set(E_Locations.l_5_8);\n\t\tstate.p_Accessible.set(E_Locations.l_4_10);\n\t\tstate.p_Accessible.set(E_Locations.l_16_20);\n\t\tstate.p_Accessible.set(E_Locations.l_9_11);\n\t\tstate.p_Accessible.set(E_Locations.l_4_2);\n\t\tstate.p_Accessible.set(E_Locations.l_1_6);\n\t\tstate.p_Accessible.set(E_Locations.l_3_18);\n\t\tstate.p_Accessible.set(E_Locations.l_13_20);\n\t\tstate.p_Accessible.set(E_Locations.l_4_14);\n\t\tstate.p_Accessible.set(E_Locations.l_17_8);\n\t\tstate.p_Accessible.set(E_Locations.l_2_17);\n\t\tstate.p_Accessible.set(E_Locations.l_12_2);\n\t\tstate.p_Accessible.set(E_Locations.l_7_5);\n\t\tstate.p_Accessible.set(E_Locations.l_7_8);\n\t\tstate.p_Accessible.set(E_Locations.l_15_17);\n\t\tstate.p_Accessible.set(E_Locations.l_16_17);\n\t\tstate.p_Accessible.set(E_Locations.l_17_19);\n\t\tstate.p_Accessible.set(E_Locations.l_19_8);\n\t\tstate.p_Accessible.set(E_Locations.l_14_10);\n\t\tstate.p_Accessible.set(E_Locations.l_7_2);\n\t\tstate.p_Accessible.set(E_Locations.l_19_10);\n\t\tstate.p_Accessible.set(E_Locations.l_10_3);\n\t\tstate.p_Accessible.set(E_Locations.l_2_10);\n\t\tstate.p_Accessible.set(E_Locations.l_4_13);\n\t\tstate.p_Accessible.set(E_Locations.l_2_1);\n\t\tstate.p_Accessible.set(E_Locations.l_20_11);\n\t\tstate.p_Accessible.set(E_Locations.l_1_7);\n\t\tstate.p_Accessible.set(E_Locations.l_13_3);\n\t\tstate.p_Accessible.set(E_Locations.l_4_4);\n\t\tstate.p_Accessible.set(E_Locations.l_8_4);\n\t\tstate.p_Accessible.set(E_Locations.l_5_10);\n\t\tstate.p_Accessible.set(E_Locations.l_17_6);\n\t\tstate.p_Accessible.set(E_Locations.l_17_5);\n\t\tstate.p_Accessible.set(E_Locations.l_17_1);\n\t\tstate.p_Accessible.set(E_Locations.l_6_10);\n\t\tstate.p_Accessible.set(E_Locations.l_17_11);\n\t\tstate.p_Accessible.set(E_Locations.l_15_18);\n\t\tstate.p_Accessible.set(E_Locations.l_12_11);\n\t\tstate.p_Accessible.set(E_Locations.l_7_10);\n\t\tstate.p_Accessible.set(E_Locations.l_10_7);\n\t\tstate.p_Accessible.set(E_Locations.l_15_12);\n\t\tstate.p_Accessible.set(E_Locations.l_3_12);\n\t\tstate.p_Accessible.set(E_Locations.l_8_16);\n\t\tstate.p_Accessible.set(E_Locations.l_12_19);\n\t\tstate.p_Accessible.set(E_Locations.l_2_20);\n\t\tstate.p_Accessible.set(E_Locations.l_5_1);\n\t\tstate.p_Accessible.set(E_Locations.l_13_17);\n\t\tstate.p_Accessible.set(E_Locations.l_17_7);\n\t\tstate.p_Accessible.set(E_Locations.l_11_14);\n\t\tstate.p_Accessible.set(E_Locations.l_3_10);\n\t\tstate.p_Accessible.set(E_Locations.l_15_16);\n\t\tstate.p_Accessible.set(E_Locations.l_19_3);\n\t\tstate.p_Accessible.set(E_Locations.l_18_17);\n\t\tstate.p_Accessible.set(E_Locations.l_8_6);\n\t\tstate.p_Accessible.set(E_Locations.l_2_3);\n\t\tstate.p_Accessible.set(E_Locations.l_11_15);\n\t\tstate.p_Accessible.set(E_Locations.l_15_5);\n\t\tstate.p_Accessible.set(E_Locations.l_20_13);\n\t\tstate.p_Accessible.set(E_Locations.l_5_14);\n\t\tstate.p_Accessible.set(E_Locations.l_17_14);\n\t\tstate.p_Accessible.set(E_Locations.l_11_2);\n\t\tstate.p_Accessible.set(E_Locations.l_20_2);\n\t\tstate.p_Accessible.set(E_Locations.l_15_8);\n\t\tstate.p_Accessible.set(E_Locations.l_18_16);\n\t\tstate.p_Accessible.set(E_Locations.l_10_5);\n\t\tstate.p_Accessible.set(E_Locations.l_10_18);\n\t\tstate.p_Accessible.set(E_Locations.l_16_14);\n\t\tstate.p_Accessible.set(E_Locations.l_17_4);\n\t\tstate.p_Accessible.set(E_Locations.l_11_12);\n\t\tstate.p_Accessible.set(E_Locations.l_11_17);\n\t\tstate.p_Accessible.set(E_Locations.l_16_1);\n\t\tstate.p_Accessible.set(E_Locations.l_4_15);\n\t\tstate.p_Accessible.set(E_Locations.l_4_19);\n\t\tstate.p_Accessible.set(E_Locations.l_11_8);\n\t\tstate.p_Accessible.set(E_Locations.l_18_2);\n\t\tstate.p_Accessible.set(E_Locations.l_9_16);\n\t\tstate.p_Accessible.set(E_Locations.l_2_13);\n\t\tstate.p_Accessible.set(E_Locations.l_2_18);\n\t\tstate.p_Accessible.set(E_Locations.l_5_17);\n\t\tstate.p_Accessible.set(E_Locations.l_7_20);\n\t\tstate.p_Accessible.set(E_Locations.l_16_18);\n\t\tstate.p_Accessible.set(E_Locations.l_10_19);\n\t\tstate.p_Accessible.set(E_Locations.l_7_11);\n\t\tstate.p_Accessible.set(E_Locations.l_18_14);\n\t\tstate.p_Accessible.set(E_Locations.l_12_12);\n\t\tstate.p_Accessible.set(E_Locations.l_6_6);\n\t\tstate.p_Accessible.set(E_Locations.l_16_16);\n\t\tstate.p_Accessible.set(E_Locations.l_18_13);\n\t\tstate.p_Accessible.set(E_Locations.l_12_8);\n\t\tstate.p_Accessible.set(E_Locations.l_6_19);\n\t\tstate.p_Accessible.set(E_Locations.l_9_20);\n\t\tstate.p_Accessible.set(E_Locations.l_15_19);\n\t\tstate.p_Accessible.set(E_Locations.l_2_9);\n\t\tstate.p_Accessible.set(E_Locations.l_3_16);\n\t\tstate.p_Accessible.set(E_Locations.l_20_3);\n\t\tstate.p_Accessible.set(E_Locations.l_1_12);\n\t\tstate.p_Accessible.set(E_Locations.l_13_12);\n\t\tstate.p_Accessible.set(E_Locations.l_10_6);\n\t\tstate.p_Accessible.set(E_Locations.l_14_3);\n\t\tstate.p_Accessible.set(E_Locations.l_6_2);\n\t\tstate.p_Accessible.set(E_Locations.l_5_4);\n\t\tstate.p_Accessible.set(E_Locations.l_14_16);\n\t\tstate.p_Accessible.set(E_Locations.l_4_12);\n\t\tstate.p_Accessible.set(E_Locations.l_8_11);\n\t\tstate.p_Accessible.set(E_Locations.l_9_12);\n\t\tstate.p_Accessible.set(E_Locations.l_17_3);\n\t\tstate.p_Accessible.set(E_Locations.l_18_6);\n\t\tstate.p_Accessible.set(E_Locations.l_19_1);\n\t\tstate.p_Accessible.set(E_Locations.l_11_10);\n\t\tstate.p_Accessible.set(E_Locations.l_4_16);\n\t\tstate.p_Accessible.set(E_Locations.l_11_19);\n\t\tstate.p_Accessible.set(E_Locations.l_18_7);\n\t\tstate.p_Accessible.set(E_Locations.l_7_3);\n\t\tstate.p_Accessible.set(E_Locations.l_1_8);\n\t\tstate.p_Accessible.set(E_Locations.l_4_7);\n\t\tstate.p_Accessible.set(E_Locations.l_7_1);\n\t\tstate.p_Accessible.set(E_Locations.l_17_17);\n\t\tstate.p_Accessible.set(E_Locations.l_16_9);\n\t\tstate.p_Accessible.set(E_Locations.l_18_9);\n\t\tstate.p_Accessible.set(E_Locations.l_16_4);\n\t\tstate.p_Accessible.set(E_Locations.l_20_19);\n\t\tstate.p_Accessible.set(E_Locations.l_3_4);\n\t\tstate.p_Accessible.set(E_Locations.l_6_17);\n\t\tstate.p_Accessible.set(E_Locations.l_17_16);\n\t\tstate.p_Accessible.set(E_Locations.l_14_7);\n\t\tstate.p_Accessible.set(E_Locations.l_13_19);\n\t\tstate.p_Accessible.set(E_Locations.l_1_2);\n\t\tstate.p_Accessible.set(E_Locations.l_1_18);\n\t\tstate.p_Accessible.set(E_Locations.l_14_18);\n\t\tstate.p_Accessible.set(E_Locations.l_7_17);\n\t\tstate.p_Accessible.set(E_Locations.l_2_8);\n\t\tstate.p_Accessible.set(E_Locations.l_13_18);\n\t\tstate.p_Accessible.set(E_Locations.l_13_14);\n\t\tstate.p_Accessible.set(E_Locations.l_19_20);\n\t\tstate.p_Accessible.set(E_Locations.l_3_1);\n\t\tstate.p_Accessible.set(E_Locations.l_2_14);\n\t\tstate.p_Accessible.set(E_Locations.l_20_17);\n\t\tstate.p_Accessible.set(E_Locations.l_19_19);\n\t\tstate.p_Accessible.set(E_Locations.l_9_2);\n\t\tstate.p_Accessible.set(E_Locations.l_3_3);\n\t\tstate.p_Accessible.set(E_Locations.l_1_3);\n\t\tstate.p_Accessible.set(E_Locations.l_14_19);\n\t\tstate.p_Accessible.set(E_Locations.l_10_15);\n\t\tstate.p_Accessible.set(E_Locations.l_1_20);\n\t\tstate.p_Accessible.set(E_Locations.l_16_12);\n\t\tstate.p_Accessible.set(E_Locations.l_13_2);\n\t\tstate.p_Accessible.set(E_Locations.l_14_2);\n\t\tstate.p_Accessible.set(E_Locations.l_7_6);\n\t\tstate.p_Accessible.set(E_Locations.l_12_16);\n\t\tstate.p_Accessible.set(E_Locations.l_18_20);\n\t\tstate.p_Accessible.set(E_Locations.l_8_8);\n\t\tstate.p_Accessible.set(E_Locations.l_5_15);\n\t\tstate.p_Accessible.set(E_Locations.l_12_18);\n\t\tstate.p_Accessible.set(E_Locations.l_5_16);\n\t\tstate.p_Accessible.set(E_Locations.l_1_9);\n\t\tstate.p_Accessible.set(E_Locations.l_6_3);\n\t\tstate.p_Accessible.set(E_Locations.l_2_15);\n\t\tstate.p_Accessible.set(E_Locations.l_19_4);\n\t\tstate.p_Accessible.set(E_Locations.l_14_8);\n\t\tstate.p_Accessible.set(E_Locations.l_17_18);\n\t\tstate.p_Accessible.set(E_Locations.l_18_3);\n\t\tstate.p_Accessible.set(E_Locations.l_10_10);\n\t\tstate.p_Accessible.set(E_Locations.l_16_8);\n\t\tstate.p_Accessible.set(E_Locations.l_5_9);\n\t\tstate.p_Accessible.set(E_Locations.l_6_18);\n\t\tstate.p_Accessible.set(E_Locations.l_19_5);\n\t\tstate.p_Accessible.set(E_Locations.l_11_11);\n\t\tstate.p_Accessible.set(E_Locations.l_20_14);\n\t\tstate.p_Accessible.set(E_Locations.l_10_12);\n\t\tstate.p_Accessible.set(E_Locations.l_12_3);\n\t\tstate.p_Accessible.set(E_Locations.l_10_2);\n\t\tstate.p_Accessible.set(E_Locations.l_4_1);\n\t\tstate.p_Accessible.set(E_Locations.l_8_15);\n\t\tstate.p_Accessible.set(E_Locations.l_17_2);\n\t\tstate.p_Accessible.set(E_Locations.l_15_2);\n\t\tstate.p_Accessible.set(E_Locations.l_5_12);\n\t\tstate.p_Accessible.set(E_Locations.l_6_14);\n\t\tstate.p_Accessible.set(E_Locations.l_15_4);\n\t\tstate.p_Accessible.set(E_Locations.l_1_4);\n\t\tstate.p_Accessible.set(E_Locations.l_15_3);\n\t\tstate.p_Accessible.set(E_Locations.l_3_14);\n\t\tstate.p_Accessible.set(E_Locations.l_3_8);\n\t\tstate.p_Accessible.set(E_Locations.l_9_10);\n\t\tstate.p_Accessible.set(E_Locations.l_7_14);\n\t\tstate.p_Accessible.set(E_Locations.l_11_13);\n\t\tstate.p_Accessible.set(E_Locations.l_13_16);\n\t\tstate.p_Accessible.set(E_Locations.l_10_17);\n\t\tstate.p_Accessible.set(E_Locations.l_3_9);\n\t\tstate.p_Accessible.set(E_Locations.l_6_5);\n\t\tstate.p_Accessible.set(E_Locations.l_5_2);\n\t\tstate.p_Accessible.set(E_Locations.l_19_12);\n\t\tstate.p_Accessible.set(E_Locations.l_8_19);\n\t\tstate.p_Accessible.set(E_Locations.l_4_11);\n\t\tstate.p_Accessible.set(E_Locations.l_9_15);\n\t\tstate.p_Accessible.set(E_Locations.l_9_5);\n\t\tstate.p_Accessible.set(E_Locations.l_15_9);\n\t\tstate.p_Accessible.set(E_Locations.l_16_11);\n\t\tstate.p_Accessible.set(E_Locations.l_13_8);\n\t\tstate.p_Accessible.set(E_Locations.l_11_16);\n\t\tstate.p_Accessible.set(E_Locations.l_7_16);\n\t\tstate.p_Accessible.set(E_Locations.l_13_6);\n\t\tstate.p_Accessible.set(E_Locations.l_11_20);\n\t\tstate.p_Accessible.set(E_Locations.l_9_9);\n\t\tstate.p_Accessible.set(E_Locations.l_19_11);\n\t\tstate.p_Accessible.set(E_Locations.l_17_13);\n\t\tstate.p_Accessible.set(E_Locations.l_5_13);\n\t\tstate.p_Accessible.set(E_Locations.l_20_9);\n\t\tstate.p_Accessible.set(E_Locations.l_10_14);\n\t\tstate.p_Accessible.set(E_Locations.l_18_11);\n\t\tstate.p_Accessible.set(E_Locations.l_8_17);\n\t\tstate.p_Accessible.set(E_Locations.l_12_14);\n\t\tstate.p_Accessible.set(E_Locations.l_5_20);\n\t\tstate.p_Accessible.set(E_Locations.l_18_19);\n\t\tstate.p_Accessible.set(E_Locations.l_8_10);\n\t\tstate.p_Accessible.set(E_Locations.l_9_3);\n\t\tstate.p_Accessible.set(E_Locations.l_14_13);\n\t\tstate.p_Accessible.set(E_Locations.l_5_3);\n\t\tstate.p_Accessible.set(E_Locations.l_2_6);\n\t\tstate.p_Accessible.set(E_Locations.l_10_13);\n\t\tstate.p_Accessible.set(E_Locations.l_8_5);\n\t\tstate.p_Accessible.set(E_Locations.l_20_4);\n\t\tstate.p_Accessible.set(E_Locations.l_16_13);\n\t\tstate.p_Accessible.set(E_Locations.l_19_7);\n\t\tstate.p_Accessible.set(E_Locations.l_20_12);\n\t\tstate.p_Accessible.set(E_Locations.l_14_17);\n\t\tstate.p_Accessible.set(E_Locations.l_19_16);\n\t\tstate.p_Accessible.set(E_Locations.l_9_4);\n\t\tstate.p_Accessible.set(E_Locations.l_4_5);\n\t\tstate.p_Accessible.set(E_Locations.l_19_6);\n\t\tstate.p_Accessible.set(E_Locations.l_13_10);\n\t\tstate.p_Accessible.set(E_Locations.l_1_19);\n\t\tstate.p_Accessible.set(E_Locations.l_2_16);\n\t\tstate.p_Accessible.set(E_Locations.l_3_19);\n\t\tstate.p_Accessible.set(E_Locations.l_19_14);\n\t\tstate.p_Accessible.set(E_Locations.l_3_17);\n\t\tstate.p_Accessible.set(E_Locations.l_4_18);\n\t\tstate.p_Accessible.set(E_Locations.l_12_4);\n\t\tstate.p_Accessible.set(E_Locations.l_5_11);\n\t\tstate.p_Accessible.set(E_Locations.l_6_16);\n\t\tstate.p_Accessible.set(E_Locations.l_12_10);\n\t\tstate.p_Accessible.set(E_Locations.l_16_10);\n\t\tstate.p_Accessible.set(E_Locations.l_8_12);\n\t\tstate.p_Accessible.set(E_Locations.l_19_15);\n\t\tstate.p_Accessible.set(E_Locations.l_14_14);\n\t\tstate.p_Accessible.set(E_Locations.l_14_15);\n\t\tstate.p_Accessible.set(E_Locations.l_12_20);\n\t\tstate.p_Accessible.set(E_Locations.l_6_11);\n\t\tstate.p_Accessible.set(E_Locations.l_15_10);\n\t\tstate.p_Accessible.set(E_Locations.l_6_8);\n\t\tstate.p_Accessible.set(E_Locations.l_9_7);\n\t\tstate.p_Accessible.set(E_Locations.l_4_3);\n\t\tstate.p_Accessible.set(E_Locations.l_4_20);\n\t\tstate.p_Accessible.set(E_Locations.l_18_18);\n\t\tstate.p_Accessible.set(E_Locations.l_2_12);\n\t\tstate.p_Accessible.set(E_Locations.l_17_15);\n\t\tstate.p_Accessible.set(E_Locations.l_15_6);\n\t\tstate.p_Accessible.set(E_Locations.l_3_7);\n\t\tstate.p_Accessible.set(E_Locations.l_11_4);\n\t\tstate.p_Accessible.set(E_Locations.l_20_8);\n\t\tstate.p_Accessible.set(E_Locations.l_20_18);\n\t\tstate.p_Accessible.set(E_Locations.l_13_15);\n\t\tstate.p_Accessible.set(E_Locations.l_2_19);\n\t\tstate.p_Accessible.set(E_Locations.l_9_6);\n\t\tstate.p_Accessible.set(E_Locations.l_19_13);\n\t\tstate.p_Accessible.set(E_Locations.l_8_18);\n\t\tstate.p_Accessible.set(E_Locations.l_6_4);\n\t\tstate.p_Accessible.set(E_Locations.l_10_1);\n\t\tstate.p_Accessible.set(E_Locations.l_17_12);\n\t\tstate.p_Accessible.set(E_Locations.l_20_16);\n\t\tstate.p_Accessible.set(E_Locations.l_19_2);\n\t\tstate.p_Accessible.set(E_Locations.l_8_7);\n\t\tstate.p_Accessible.set(E_Locations.l_3_15);\n\t\tstate.p_Accessible.set(E_Locations.l_13_7);\n\t\tstate.p_Accessible.set(E_Locations.l_9_1);\n\t\tstate.p_Accessible.set(E_Locations.l_14_5);\n\t\tstate.p_Accessible.set(E_Locations.l_7_7);\n\t\tstate.p_Accessible.set(E_Locations.l_6_12);\n\t\tstate.p_Accessible.set(E_Locations.l_1_11);\n\t\tstate.p_Accessible.set(E_Locations.l_10_20);\n\t\tstate.p_Accessible.set(E_Locations.l_10_9);\n\t\tstate.p_Accessible.set(E_Locations.l_12_5);\n\t\tstate.p_Accessible.set(E_Locations.l_1_14);\n\t\tstate.p_Accessible.set(E_Locations.l_12_13);\n\t\tstate.p_Accessible.set(E_Locations.l_18_1);\n\t\tstate.p_Accessible.set(E_Locations.l_16_15);\n\t\tstate.p_Accessible.set(E_Locations.l_2_5);\n\t\tstate.p_Accessible.set(E_Locations.l_1_15);\n\t\tstate.p_Accessible.set(E_Locations.l_4_9);\n\t\tstate.p_Accessible.set(E_Locations.l_13_5);\n\t\tstate.p_Accessible.set(E_Locations.l_8_2);\n\t\tstate.p_Accessible.set(E_Locations.l_11_5);\n\t\tstate.p_Accessible.set(E_Locations.l_2_4);\n\t\tstate.p_Accessible.set(E_Locations.l_16_2);\n\t\tstate.p_Accessible.set(E_Locations.l_9_17);\n\t\tstate.p_Accessible.set(E_Locations.l_6_13);\n\t\tstate.p_Accessible.set(E_Locations.l_11_3);\n\t\tstate.p_Accessible.set(E_Locations.l_2_2);\n\t\tstate.p_Accessible.set(E_Locations.l_17_10);\n\t\tstate.p_Accessible.set(E_Locations.l_8_3);\n\t\tstate.p_Accessible.set(E_Locations.l_20_10);\n\t\tstate.p_Accessible.set(E_Locations.l_1_10);\n\t\tstate.p_Accessible.set(E_Locations.l_2_11);\n\t\tstate.p_Accessible.set(E_Locations.l_11_18);\n\t\tstate.p_Accessible.set(E_Locations.l_12_15);\n\t\tstate.p_Accessible.set(E_Locations.l_13_1);\n\t\tstate.p_Accessible.set(E_Locations.l_6_7);\n\t\tstate.p_Accessible.set(E_Locations.l_5_5);\n\t\tstate.p_Accessible.set(E_Locations.l_5_6);\n\t\tstate.p_Accessible.set(E_Locations.l_11_9);\n\t\tstate.p_Accessible.set(E_Locations.l_3_11);\n\t\tstate.p_Accessible.set(E_Locations.l_13_11);\n\t\tstate.p_Accessible.set(E_Locations.l_14_4);\n\t\tstate.p_Accessible.set(E_Locations.l_16_5);\n\t\t\n\n\t\tstate.p_Solid.set(E_Locations.l_16_8);\n\t\tstate.p_Solid.set(E_Locations.l_17_9);\n\t\tstate.p_Solid.set(E_Locations.l_8_4);\n\t\tstate.p_Solid.set(E_Locations.l_15_17);\n\t\tstate.p_Solid.set(E_Locations.l_2_3);\n\t\tstate.p_Solid.set(E_Locations.l_10_7);\n\t\tstate.p_Solid.set(E_Locations.l_3_19);\n\t\tstate.p_Solid.set(E_Locations.l_8_19);\n\t\tstate.p_Solid.set(E_Locations.l_18_13);\n\t\tstate.p_Solid.set(E_Locations.l_11_14);\n\t\tstate.p_Solid.set(E_Locations.l_16_18);\n\t\tstate.p_Solid.set(E_Locations.l_15_12);\n\t\tstate.p_Solid.set(E_Locations.l_8_16);\n\t\tstate.p_Solid.set(E_Locations.l_5_4);\n\t\tstate.p_Solid.set(E_Locations.l_17_14);\n\t\tstate.p_Solid.set(E_Locations.l_8_2);\n\t\tstate.p_Solid.set(E_Locations.l_5_20);\n\t\tstate.p_Solid.set(E_Locations.l_8_3);\n\t\tstate.p_Solid.set(E_Locations.l_16_14);\n\t\tstate.p_Solid.set(E_Locations.l_6_16);\n\t\tstate.p_Solid.set(E_Locations.l_16_10);\n\t\tstate.p_Solid.set(E_Locations.l_6_4);\n\t\tstate.p_Solid.set(E_Locations.l_10_13);\n\t\tstate.p_Solid.set(E_Locations.l_14_3);\n\t\tstate.p_Solid.set(E_Locations.l_7_19);\n\t\tstate.p_Solid.set(E_Locations.l_16_15);\n\t\tstate.p_Solid.set(E_Locations.l_3_1);\n\t\tstate.p_Solid.set(E_Locations.l_17_10);\n\t\tstate.p_Solid.set(E_Locations.l_1_8);\n\t\tstate.p_Solid.set(E_Locations.l_10_15);\n\t\tstate.p_Solid.set(E_Locations.l_16_4);\n\t\tstate.p_Solid.set(E_Locations.l_18_8);\n\t\tstate.p_Solid.set(E_Locations.l_10_20);\n\t\tstate.p_Solid.set(E_Locations.l_1_14);\n\t\tstate.p_Solid.set(E_Locations.l_13_19);\n\t\tstate.p_Solid.set(E_Locations.l_12_12);\n\t\tstate.p_Solid.set(E_Locations.l_20_7);\n\t\tstate.p_Solid.set(E_Locations.l_20_17);\n\t\tstate.p_Solid.set(E_Locations.l_6_3);\n\t\tstate.p_Solid.set(E_Locations.l_1_16);\n\t\tstate.p_Solid.set(E_Locations.l_18_3);\n\t\tstate.p_Solid.set(E_Locations.l_10_10);\n\t\tstate.p_Solid.set(E_Locations.l_1_1);\n\t\tstate.p_Solid.set(E_Locations.l_19_13);\n\t\tstate.p_Solid.set(E_Locations.l_19_5);\n\t\tstate.p_Solid.set(E_Locations.l_16_12);\n\t\tstate.p_Solid.set(E_Locations.l_14_12);\n\t\tstate.p_Solid.set(E_Locations.l_9_14);\n\t\tstate.p_Solid.set(E_Locations.l_14_4);\n\t\tstate.p_Solid.set(E_Locations.l_12_13);\n\t\tstate.p_Solid.set(E_Locations.l_18_1);\n\t\tstate.p_Solid.set(E_Locations.l_13_18);\n\t\tstate.p_Solid.set(E_Locations.l_9_17);\n\t\tstate.p_Solid.set(E_Locations.l_9_2);\n\t\tstate.p_Solid.set(E_Locations.l_9_8);\n\t\tstate.p_Solid.set(E_Locations.l_7_1);\n\t\tstate.p_Solid.set(E_Locations.l_11_11);\n\t\tstate.p_Solid.set(E_Locations.l_7_12);\n\t\tstate.p_Solid.set(E_Locations.l_3_4);\n\t\tstate.p_Solid.set(E_Locations.l_10_8);\n\t\tstate.p_Solid.set(E_Locations.l_11_9);\n\t\tstate.p_Solid.set(E_Locations.l_15_14);\n\t\tstate.p_Solid.set(E_Locations.l_15_13);\n\t\tstate.p_Solid.set(E_Locations.l_14_7);\n\t\tstate.p_Solid.set(E_Locations.l_12_16);\n\t\tstate.p_Solid.set(E_Locations.l_18_15);\n\t\tstate.p_Solid.set(E_Locations.l_10_16);\n\t\tstate.p_Solid.set(E_Locations.l_8_9);\n\t\tstate.p_Solid.set(E_Locations.l_19_19);\n\t\tstate.p_Solid.set(E_Locations.l_12_17);\n\t\tstate.p_Solid.set(E_Locations.l_4_6);\n\t\tstate.p_Solid.set(E_Locations.l_10_12);\n\t\tstate.p_Solid.set(E_Locations.l_14_19);\n\t\tstate.p_Solid.set(E_Locations.l_11_7);\n\t\tstate.p_Solid.set(E_Locations.l_14_20);\n\t\tstate.p_Solid.set(E_Locations.l_5_2);\n\t\tstate.p_Solid.set(E_Locations.l_20_9);\n\t\tstate.p_Solid.set(E_Locations.l_15_1);\n\t\tstate.p_Solid.set(E_Locations.l_16_6);\n\t\tstate.p_Solid.set(E_Locations.l_18_4);\n\t\tstate.p_Solid.set(E_Locations.l_14_6);\n\t\tstate.p_Solid.set(E_Locations.l_8_10);\n\t\tstate.p_Solid.set(E_Locations.l_9_3);\n\t\tstate.p_Solid.set(E_Locations.l_19_4);\n\t\tstate.p_Solid.set(E_Locations.l_11_16);\n\t\tstate.p_Solid.set(E_Locations.l_4_14);\n\t\tstate.p_Solid.set(E_Locations.l_13_6);\n\t\tstate.p_Solid.set(E_Locations.l_13_13);\n\t\tstate.p_Solid.set(E_Locations.l_7_14);\n\t\tstate.p_Solid.set(E_Locations.l_19_17);\n\t\tstate.p_Solid.set(E_Locations.l_2_16);\n\t\tstate.p_Solid.set(E_Locations.l_19_14);\n\t\tstate.p_Solid.set(E_Locations.l_12_4);\n\t\tstate.p_Solid.set(E_Locations.l_20_6);\n\t\tstate.p_Solid.set(E_Locations.l_5_11);\n\t\tstate.p_Solid.set(E_Locations.l_1_17);\n\t\tstate.p_Solid.set(E_Locations.l_12_2);\n\t\tstate.p_Solid.set(E_Locations.l_4_3);\n\t\tstate.p_Solid.set(E_Locations.l_5_10);\n\t\tstate.p_Solid.set(E_Locations.l_6_5);\n\t\tstate.p_Solid.set(E_Locations.l_14_10);\n\t\tstate.p_Solid.set(E_Locations.l_8_17);\n\t\tstate.p_Solid.set(E_Locations.l_12_14);\n\t\tstate.p_Solid.set(E_Locations.l_18_12);\n\t\tstate.p_Solid.set(E_Locations.l_9_18);\n\t\tstate.p_Solid.set(E_Locations.l_6_20);\n\t\tstate.p_Solid.set(E_Locations.l_9_11);\n\t\tstate.p_Solid.set(E_Locations.l_12_10);\n\t\tstate.p_Solid.set(E_Locations.l_13_8);\n\t\tstate.p_Solid.set(E_Locations.l_2_19);\n\t\tstate.p_Solid.set(E_Locations.l_19_18);\n\t\tstate.p_Solid.set(E_Locations.l_1_5);\n\t\tstate.p_Solid.set(E_Locations.l_9_7);\n\t\tstate.p_Solid.set(E_Locations.l_14_5);\n\t\tstate.p_Solid.set(E_Locations.l_6_12);\n\t\tstate.p_Solid.set(E_Locations.l_15_6);\n\t\tstate.p_Solid.set(E_Locations.l_20_8);\n\t\tstate.p_Solid.set(E_Locations.l_1_10);\n\t\tstate.p_Solid.set(E_Locations.l_11_2);\n\t\tstate.p_Solid.set(E_Locations.l_14_13);\n\t\tstate.p_Solid.set(E_Locations.l_14_14);\n\t\tstate.p_Solid.set(E_Locations.l_18_16);\n\t\tstate.p_Solid.set(E_Locations.l_17_12);\n\t\tstate.p_Solid.set(E_Locations.l_9_1);\n\t\tstate.p_Solid.set(E_Locations.l_7_2);\n\t\tstate.p_Solid.set(E_Locations.l_10_9);\n\t\tstate.p_Solid.set(E_Locations.l_2_5);\n\t\tstate.p_Solid.set(E_Locations.l_3_7);\n\t\tstate.p_Solid.set(E_Locations.l_18_14);\n\t\tstate.p_Solid.set(E_Locations.l_6_1);\n\t\tstate.p_Solid.set(E_Locations.l_20_2);\n\t\tstate.p_Solid.set(E_Locations.l_3_6);\n\t\tstate.p_Solid.set(E_Locations.l_17_4);\n\t\tstate.p_Solid.set(E_Locations.l_3_16);\n\t\tstate.p_Solid.set(E_Locations.l_20_3);\n\t\tstate.p_Solid.set(E_Locations.l_2_18);\n\t\tstate.p_Solid.set(E_Locations.l_1_13);\n\t\tstate.p_Solid.set(E_Locations.l_7_10);\n\t\tstate.p_Solid.set(E_Locations.l_17_20);\n\t\tstate.p_Solid.set(E_Locations.l_20_13);\n\t\tstate.p_Solid.set(E_Locations.l_11_1);\n\t\tstate.p_Solid.set(E_Locations.l_5_1);\n\t\tstate.p_Solid.set(E_Locations.l_18_6);\n\t\tstate.p_Solid.set(E_Locations.l_20_20);\n\t\tstate.p_Solid.set(E_Locations.l_18_5);\n\t\tstate.p_Solid.set(E_Locations.l_7_20);\n\t\tstate.p_Solid.set(E_Locations.l_14_1);\n\t\tstate.p_Solid.set(E_Locations.l_16_2);\n\t\tstate.p_Solid.set(E_Locations.l_11_3);\n\t\tstate.p_Solid.set(E_Locations.l_2_2);\n\t\tstate.p_Solid.set(E_Locations.l_10_5);\n\t\tstate.p_Solid.set(E_Locations.l_9_20);\n\t\tstate.p_Solid.set(E_Locations.l_12_6);\n\t\tstate.p_Solid.set(E_Locations.l_5_7);\n\t\tstate.p_Solid.set(E_Locations.l_2_8);\n\t\tstate.p_Solid.set(E_Locations.l_4_12);\n\t\tstate.p_Solid.set(E_Locations.l_2_15);\n\t\tstate.p_Solid.set(E_Locations.l_14_8);\n\t\tstate.p_Solid.set(E_Locations.l_12_7);\n\t\tstate.p_Solid.set(E_Locations.l_10_4);\n\t\tstate.p_Solid.set(E_Locations.l_13_4);\n\t\tstate.p_Solid.set(E_Locations.l_6_2);\n\t\tstate.p_Solid.set(E_Locations.l_8_8);\n\t\tstate.p_Solid.set(E_Locations.l_9_12);\n\t\tstate.p_Solid.set(E_Locations.l_17_3);\n\t\tstate.p_Solid.set(E_Locations.l_2_14);\n\t\tstate.p_Solid.set(E_Locations.l_6_15);\n\t\tstate.p_Solid.set(E_Locations.l_1_20);\n\t\tstate.p_Solid.set(E_Locations.l_4_17);\n\t\tstate.p_Solid.set(E_Locations.l_6_17);\n\t\tstate.p_Solid.set(E_Locations.l_9_10);\n\t\tstate.p_Solid.set(E_Locations.l_9_15);\n\t\tstate.p_Solid.set(E_Locations.l_4_13);\n\t\tstate.p_Solid.set(E_Locations.l_5_9);\n\t\tstate.p_Solid.set(E_Locations.l_7_8);\n\t\tstate.p_Solid.set(E_Locations.l_12_1);\n\t\tstate.p_Solid.set(E_Locations.l_19_8);\n\t\tstate.p_Solid.set(E_Locations.l_10_3);\n\t\tstate.p_Solid.set(E_Locations.l_2_20);\n\t\tstate.p_Solid.set(E_Locations.l_1_6);\n\t\tstate.p_Solid.set(E_Locations.l_1_9);\n\t\t\n\n\t\tstate.p_Medium.set(E_Locations.l_2_10);\n\t\tstate.p_Medium.set(E_Locations.l_13_12);\n\t\tstate.p_Medium.set(E_Locations.l_20_19);\n\t\tstate.p_Medium.set(E_Locations.l_4_15);\n\t\tstate.p_Medium.set(E_Locations.l_18_17);\n\t\tstate.p_Medium.set(E_Locations.l_18_18);\n\t\tstate.p_Medium.set(E_Locations.l_4_19);\n\t\tstate.p_Medium.set(E_Locations.l_9_5);\n\t\tstate.p_Medium.set(E_Locations.l_16_11);\n\t\tstate.p_Medium.set(E_Locations.l_2_4);\n\t\tstate.p_Medium.set(E_Locations.l_4_18);\n\t\tstate.p_Medium.set(E_Locations.l_5_8);\n\t\tstate.p_Medium.set(E_Locations.l_6_7);\n\t\tstate.p_Medium.set(E_Locations.l_18_2);\n\t\tstate.p_Medium.set(E_Locations.l_8_1);\n\t\tstate.p_Medium.set(E_Locations.l_4_2);\n\t\tstate.p_Medium.set(E_Locations.l_10_1);\n\t\tstate.p_Medium.set(E_Locations.l_20_16);\n\t\tstate.p_Medium.set(E_Locations.l_7_13);\n\t\tstate.p_Medium.set(E_Locations.l_17_17);\n\t\tstate.p_Medium.set(E_Locations.l_15_11);\n\t\tstate.p_Medium.set(E_Locations.l_10_14);\n\t\tstate.p_Medium.set(E_Locations.l_19_15);\n\t\tstate.p_Medium.set(E_Locations.l_20_11);\n\t\tstate.p_Medium.set(E_Locations.l_11_6);\n\t\tstate.p_Medium.set(E_Locations.l_1_4);\n\t\tstate.p_Medium.set(E_Locations.l_8_5);\n\t\tstate.p_Medium.set(E_Locations.l_11_20);\n\t\tstate.p_Medium.set(E_Locations.l_4_16);\n\t\tstate.p_Medium.set(E_Locations.l_13_3);\n\t\tstate.p_Medium.set(E_Locations.l_9_4);\n\t\tstate.p_Medium.set(E_Locations.l_1_19);\n\t\tstate.p_Medium.set(E_Locations.l_16_17);\n\t\tstate.p_Medium.set(E_Locations.l_6_13);\n\t\tstate.p_Medium.set(E_Locations.l_3_2);\n\t\tstate.p_Medium.set(E_Locations.l_13_16);\n\t\tstate.p_Medium.set(E_Locations.l_10_18);\n\t\tstate.p_Medium.set(E_Locations.l_17_16);\n\t\tstate.p_Medium.set(E_Locations.l_12_19);\n\t\tstate.p_Medium.set(E_Locations.l_19_2);\n\t\tstate.p_Medium.set(E_Locations.l_7_7);\n\t\tstate.p_Medium.set(E_Locations.l_5_15);\n\t\tstate.p_Medium.set(E_Locations.l_5_19);\n\t\tstate.p_Medium.set(E_Locations.l_15_15);\n\t\tstate.p_Medium.set(E_Locations.l_17_2);\n\t\tstate.p_Medium.set(E_Locations.l_17_8);\n\t\tstate.p_Medium.set(E_Locations.l_13_14);\n\t\tstate.p_Medium.set(E_Locations.l_16_9);\n\t\tstate.p_Medium.set(E_Locations.l_10_19);\n\t\tstate.p_Medium.set(E_Locations.l_5_18);\n\t\tstate.p_Medium.set(E_Locations.l_19_3);\n\t\tstate.p_Medium.set(E_Locations.l_15_7);\n\t\tstate.p_Medium.set(E_Locations.l_10_11);\n\t\tstate.p_Medium.set(E_Locations.l_2_11);\n\t\tstate.p_Medium.set(E_Locations.l_13_1);\n\t\tstate.p_Medium.set(E_Locations.l_18_11);\n\t\tstate.p_Medium.set(E_Locations.l_3_10);\n\t\tstate.p_Medium.set(E_Locations.l_13_20);\n\t\tstate.p_Medium.set(E_Locations.l_9_16);\n\t\tstate.p_Medium.set(E_Locations.l_1_15);\n\t\tstate.p_Medium.set(E_Locations.l_19_12);\n\t\tstate.p_Medium.set(E_Locations.l_13_2);\n\t\tstate.p_Medium.set(E_Locations.l_8_18);\n\t\tstate.p_Medium.set(E_Locations.l_5_3);\n\t\tstate.p_Medium.set(E_Locations.l_18_9);\n\t\tstate.p_Medium.set(E_Locations.l_5_17);\n\t\tstate.p_Medium.set(E_Locations.l_17_5);\n\t\tstate.p_Medium.set(E_Locations.l_6_10);\n\t\tstate.p_Medium.set(E_Locations.l_2_1);\n\t\tstate.p_Medium.set(E_Locations.l_20_4);\n\t\tstate.p_Medium.set(E_Locations.l_5_13);\n\t\tstate.p_Medium.set(E_Locations.l_6_19);\n\t\tstate.p_Medium.set(E_Locations.l_4_8);\n\t\tstate.p_Medium.set(E_Locations.l_16_3);\n\t\tstate.p_Medium.set(E_Locations.l_4_10);\n\t\tstate.p_Medium.set(E_Locations.l_8_7);\n\t\tstate.p_Medium.set(E_Locations.l_13_7);\n\t\tstate.p_Medium.set(E_Locations.l_12_18);\n\t\tstate.p_Medium.set(E_Locations.l_1_11);\n\t\tstate.p_Medium.set(E_Locations.l_19_16);\n\t\tstate.p_Medium.set(E_Locations.l_1_18);\n\t\tstate.p_Medium.set(E_Locations.l_16_16);\n\t\tstate.p_Medium.set(E_Locations.l_10_2);\n\t\tstate.p_Medium.set(E_Locations.l_2_9);\n\t\tstate.p_Medium.set(E_Locations.l_14_17);\n\t\tstate.p_Medium.set(E_Locations.l_14_16);\n\t\tstate.p_Medium.set(E_Locations.l_17_13);\n\t\t\n\n\t\tstate.p_Small.set(E_Locations.l_17_11);\n\t\tstate.p_Small.set(E_Locations.l_14_18);\n\t\tstate.p_Small.set(E_Locations.l_3_12);\n\t\tstate.p_Small.set(E_Locations.l_14_11);\n\t\tstate.p_Small.set(E_Locations.l_7_15);\n\t\tstate.p_Small.set(E_Locations.l_4_4);\n\t\tstate.p_Small.set(E_Locations.l_13_11);\n\t\tstate.p_Small.set(E_Locations.l_11_17);\n\t\tstate.p_Small.set(E_Locations.l_16_1);\n\t\tstate.p_Small.set(E_Locations.l_12_3);\n\t\tstate.p_Small.set(E_Locations.l_5_14);\n\t\tstate.p_Small.set(E_Locations.l_11_4);\n\t\tstate.p_Small.set(E_Locations.l_8_14);\n\t\tstate.p_Small.set(E_Locations.l_6_6);\n\t\tstate.p_Small.set(E_Locations.l_17_15);\n\t\tstate.p_Small.set(E_Locations.l_20_18);\n\t\tstate.p_Small.set(E_Locations.l_17_19);\n\t\tstate.p_Small.set(E_Locations.l_3_17);\n\t\tstate.p_Small.set(E_Locations.l_19_10);\n\t\tstate.p_Small.set(E_Locations.l_7_3);\n\t\tstate.p_Small.set(E_Locations.l_3_11);\n\t\tstate.p_Small.set(E_Locations.l_16_13);\n\t\tstate.p_Small.set(E_Locations.l_11_13);\n\t\tstate.p_Small.set(E_Locations.l_4_11);\n\t\tstate.p_Small.set(E_Locations.l_11_10);\n\t\tstate.p_Small.set(E_Locations.l_4_1);\n\t\tstate.p_Small.set(E_Locations.l_3_14);\n\t\tstate.p_Small.set(E_Locations.l_20_10);\n\t\tstate.p_Small.set(E_Locations.l_3_8);\n\t\tstate.p_Small.set(E_Locations.l_8_6);\n\t\tstate.p_Small.set(E_Locations.l_7_18);\n\t\tstate.p_Small.set(E_Locations.l_3_5);\n\t\tstate.p_Small.set(E_Locations.l_12_20);\n\t\tstate.p_Small.set(E_Locations.l_20_5);\n\t\tstate.p_Small.set(E_Locations.l_16_19);\n\t\tstate.p_Small.set(E_Locations.l_13_17);\n\t\tstate.p_Small.set(E_Locations.l_3_3);\n\t\tstate.p_Small.set(E_Locations.l_12_8);\n\t\tstate.p_Small.set(E_Locations.l_2_13);\n\t\tstate.p_Small.set(E_Locations.l_2_17);\n\t\tstate.p_Small.set(E_Locations.l_6_8);\n\t\tstate.p_Small.set(E_Locations.l_19_6);\n\t\tstate.p_Small.set(E_Locations.l_20_12);\n\t\tstate.p_Small.set(E_Locations.l_1_3);\n\t\tstate.p_Small.set(E_Locations.l_6_9);\n\t\tstate.p_Small.set(E_Locations.l_6_11);\n\t\tstate.p_Small.set(E_Locations.l_17_1);\n\t\tstate.p_Small.set(E_Locations.l_8_13);\n\t\tstate.p_Small.set(E_Locations.l_10_6);\n\t\tstate.p_Small.set(E_Locations.l_11_12);\n\t\tstate.p_Small.set(E_Locations.l_14_15);\n\t\tstate.p_Small.set(E_Locations.l_11_8);\n\t\tstate.p_Small.set(E_Locations.l_5_12);\n\t\tstate.p_Small.set(E_Locations.l_15_2);\n\t\tstate.p_Small.set(E_Locations.l_13_9);\n\t\tstate.p_Small.set(E_Locations.l_17_7);\n\t\tstate.p_Small.set(E_Locations.l_19_9);\n\t\t\n\n\t\tstate.p_Big.set(E_Locations.l_20_15);\n\t\tstate.p_Big.set(E_Locations.l_12_11);\n\t\tstate.p_Big.set(E_Locations.l_14_9);\n\t\tstate.p_Big.set(E_Locations.l_11_18);\n\t\tstate.p_Big.set(E_Locations.l_7_9);\n\t\tstate.p_Big.set(E_Locations.l_1_7);\n\t\tstate.p_Big.set(E_Locations.l_15_9);\n\t\tstate.p_Big.set(E_Locations.l_12_9);\n\t\tstate.p_Big.set(E_Locations.l_7_11);\n\t\tstate.p_Big.set(E_Locations.l_15_8);\n\t\tstate.p_Big.set(E_Locations.l_18_19);\n\t\tstate.p_Big.set(E_Locations.l_15_16);\n\t\tstate.p_Big.set(E_Locations.l_4_5);\n\t\tstate.p_Big.set(E_Locations.l_13_5);\n\t\tstate.p_Big.set(E_Locations.l_15_4);\n\t\tstate.p_Big.set(E_Locations.l_15_3);\n\t\tstate.p_Big.set(E_Locations.l_3_15);\n\t\tstate.p_Big.set(E_Locations.l_7_16);\n\t\tstate.p_Big.set(E_Locations.l_11_19);\n\t\tstate.p_Big.set(E_Locations.l_18_7);\n\t\tstate.p_Big.set(E_Locations.l_4_9);\n\t\tstate.p_Big.set(E_Locations.l_8_11);\n\t\tstate.p_Big.set(E_Locations.l_19_20);\n\t\tstate.p_Big.set(E_Locations.l_3_13);\n\t\tstate.p_Big.set(E_Locations.l_16_20);\n\t\tstate.p_Big.set(E_Locations.l_15_19);\n\t\tstate.p_Big.set(E_Locations.l_7_6);\n\t\tstate.p_Big.set(E_Locations.l_4_20);\n\t\tstate.p_Big.set(E_Locations.l_5_16);\n\t\tstate.p_Big.set(E_Locations.l_9_9);\n\t\tstate.p_Big.set(E_Locations.l_17_6);\n\t\tstate.p_Big.set(E_Locations.l_11_15);\n\t\tstate.p_Big.set(E_Locations.l_19_1);\n\t\tstate.p_Big.set(E_Locations.l_9_6);\n\t\tstate.p_Big.set(E_Locations.l_16_5);\n\t\tstate.p_Big.set(E_Locations.l_7_17);\n\t\tstate.p_Big.set(E_Locations.l_11_5);\n\t\tstate.p_Big.set(E_Locations.l_15_18);\n\t\tstate.p_Big.set(E_Locations.l_3_9);\n\t\tstate.p_Big.set(E_Locations.l_14_2);\n\t\tstate.p_Big.set(E_Locations.l_9_13);\n\t\tstate.p_Big.set(E_Locations.l_18_10);\n\t\tstate.p_Big.set(E_Locations.l_1_12);\n\t\tstate.p_Big.set(E_Locations.l_1_2);\n\t\tstate.p_Big.set(E_Locations.l_17_18);\n\t\tstate.p_Big.set(E_Locations.l_20_14);\n\t\tstate.p_Big.set(E_Locations.l_8_15);\n\t\tstate.p_Big.set(E_Locations.l_7_5);\n\t\tstate.p_Big.set(E_Locations.l_15_5);\n\t\tstate.p_Big.set(E_Locations.l_5_6);\n\t\tstate.p_Big.set(E_Locations.l_18_20);\n\t\tstate.p_Big.set(E_Locations.l_7_4);\n\t\tstate.p_Big.set(E_Locations.l_2_12);\n\t\tstate.p_Big.set(E_Locations.l_19_11);\n\t\tstate.p_Big.set(E_Locations.l_13_15);\n\t\tstate.p_Big.set(E_Locations.l_12_15);\n\t\tstate.p_Big.set(E_Locations.l_20_1);\n\t\tstate.p_Big.set(E_Locations.l_15_10);\n\t\tstate.p_Big.set(E_Locations.l_19_7);\n\t\tstate.p_Big.set(E_Locations.l_2_7);\n\t\tstate.p_Big.set(E_Locations.l_3_20);\n\t\tstate.p_Big.set(E_Locations.l_9_19);\n\t\tstate.p_Big.set(E_Locations.l_10_17);\n\t\tstate.p_Big.set(E_Locations.l_8_12);\n\t\tstate.p_Big.set(E_Locations.l_8_20);\n\t\tstate.p_Big.set(E_Locations.l_16_7);\n\t\tstate.p_Big.set(E_Locations.l_13_10);\n\t\tstate.p_Big.set(E_Locations.l_6_18);\n\t\tstate.p_Big.set(E_Locations.l_4_7);\n\t\tstate.p_Big.set(E_Locations.l_3_18);\n\t\tstate.p_Big.set(E_Locations.l_12_5);\n\t\tstate.p_Big.set(E_Locations.l_15_20);\n\t\tstate.p_Big.set(E_Locations.l_6_14);\n\t\tstate.p_Big.set(E_Locations.l_5_5);\n\t\tstate.p_Big.set(E_Locations.l_2_6);\n\t\t\n\n\t}\n\t\n\t@Override\n\tpublic String getName() {\n\t\treturn \"Perestroika-Problem\";\n\t}\n\t\n\t@Override\n\tpublic Domain getDomain() {\n\t\treturn domain;\n\t}\n\n\t@Override\n\tpublic State getState() {\n\t\treturn state;\n\t}\n\n\t@Override\n\tpublic Goal getGoal() {\n\t\treturn goal;\n\t}\t\n\t\n\t@Override\n\tpublic PDDLDeadEnd getDeadEnd() {\n\t\treturn deadEnd;\n\t}\n\t\n}","avg_line_length":56.3714054273,"max_line_length":67,"alphanum_fraction":0.8086448581}
{"size":10220,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/\/ Copyright (c) FIRST and other WPILib contributors.\n\/\/ Open Source Software; you can modify and\/or share it under the terms of\n\/\/ the WPILib BSD license file in the root directory of this project.\n\npackage edu.wpi.first.wpilibj.simulation;\n\nimport edu.wpi.first.wpilibj.system.LinearSystem;\nimport edu.wpi.first.wpilibj.system.NumericalIntegration;\nimport edu.wpi.first.wpilibj.system.plant.DCMotor;\nimport edu.wpi.first.wpilibj.system.plant.LinearSystemId;\nimport edu.wpi.first.wpiutil.math.Matrix;\nimport edu.wpi.first.wpiutil.math.VecBuilder;\nimport edu.wpi.first.wpiutil.math.numbers.N1;\nimport edu.wpi.first.wpiutil.math.numbers.N2;\n\n\/** Represents a simulated single jointed arm mechanism. *\/\npublic class SingleJointedArmSim extends LinearSystemSim {\n  \/\/ The gearbox for the arm.\n  private final DCMotor m_gearbox;\n\n  \/\/ The gearing between the motors and the output.\n  private final double m_gearing;\n\n  \/\/ The length of the arm.\n  @SuppressWarnings(\"MemberName\")\n  private final double m_r;\n\n  \/\/ The minimum angle that the arm is capable of.\n  private final double m_minAngle;\n\n  \/\/ The maximum angle that the arm is capable of.\n  private final double m_maxAngle;\n\n  \/\/ The mass of the arm.\n  private final double m_armMass;\n\n  \/\/ Whether the simulator should simulate gravity.\n  private final boolean m_simulateGravity;\n\n  \/**\n   * Creates a simulated arm mechanism.\n   *\n   * @param plant The linear system that represents the arm.\n   * @param gearbox The type of and number of motors in the arm gearbox.\n   * @param gearing The gearing of the arm (numbers greater than 1 represent reductions).\n   * @param armLengthMeters The length of the arm.\n   * @param minAngleRads The minimum angle that the arm is capable of.\n   * @param maxAngleRads The maximum angle that the arm is capable of.\n   * @param armMassKg The mass of the arm.\n   * @param simulateGravity Whether gravity should be simulated or not.\n   *\/\n  public SingleJointedArmSim(\n      LinearSystem plant,\n      DCMotor gearbox,\n      double gearing,\n      double armLengthMeters,\n      double minAngleRads,\n      double maxAngleRads,\n      double armMassKg,\n      boolean simulateGravity) {\n    this(\n        plant,\n        gearbox,\n        gearing,\n        armLengthMeters,\n        minAngleRads,\n        maxAngleRads,\n        armMassKg,\n        simulateGravity,\n        null);\n  }\n\n  \/**\n   * Creates a simulated arm mechanism.\n   *\n   * @param plant The linear system that represents the arm.\n   * @param gearbox The type of and number of motors in the arm gearbox.\n   * @param gearing The gearing of the arm (numbers greater than 1 represent reductions).\n   * @param armLengthMeters The length of the arm.\n   * @param minAngleRads The minimum angle that the arm is capable of.\n   * @param maxAngleRads The maximum angle that the arm is capable of.\n   * @param armMassKg The mass of the arm.\n   * @param simulateGravity Whether gravity should be simulated or not.\n   * @param measurementStdDevs The standard deviations of the measurements.\n   *\/\n  public SingleJointedArmSim(\n      LinearSystem plant,\n      DCMotor gearbox,\n      double gearing,\n      double armLengthMeters,\n      double minAngleRads,\n      double maxAngleRads,\n      double armMassKg,\n      boolean simulateGravity,\n      Matrix measurementStdDevs) {\n    super(plant, measurementStdDevs);\n    m_gearbox = gearbox;\n    m_gearing = gearing;\n    m_r = armLengthMeters;\n    m_minAngle = minAngleRads;\n    m_maxAngle = maxAngleRads;\n    m_armMass = armMassKg;\n    m_simulateGravity = simulateGravity;\n  }\n\n  \/**\n   * Creates a simulated arm mechanism.\n   *\n   * @param gearbox The type of and number of motors in the arm gearbox.\n   * @param gearing The gearing of the arm (numbers greater than 1 represent reductions).\n   * @param jKgMetersSquared The moment of inertia of the arm, can be calculated from CAD software.\n   * @param armLengthMeters The length of the arm.\n   * @param minAngleRads The minimum angle that the arm is capable of.\n   * @param maxAngleRads The maximum angle that the arm is capable of.\n   * @param armMassKg The mass of the arm.\n   * @param simulateGravity Whether gravity should be simulated or not.\n   *\/\n  @SuppressWarnings(\"ParameterName\")\n  public SingleJointedArmSim(\n      DCMotor gearbox,\n      double gearing,\n      double jKgMetersSquared,\n      double armLengthMeters,\n      double minAngleRads,\n      double maxAngleRads,\n      double armMassKg,\n      boolean simulateGravity) {\n    this(\n        gearbox,\n        gearing,\n        jKgMetersSquared,\n        armLengthMeters,\n        minAngleRads,\n        maxAngleRads,\n        armMassKg,\n        simulateGravity,\n        null);\n  }\n\n  \/**\n   * Creates a simulated arm mechanism.\n   *\n   * @param gearbox The type of and number of motors in the arm gearbox.\n   * @param gearing The gearing of the arm (numbers greater than 1 represent reductions).\n   * @param jKgMetersSquared The moment of inertia of the arm; can be calculated from CAD software.\n   * @param armLengthMeters The length of the arm.\n   * @param minAngleRads The minimum angle that the arm is capable of.\n   * @param maxAngleRads The maximum angle that the arm is capable of.\n   * @param armMassKg The mass of the arm.\n   * @param simulateGravity Whether gravity should be simulated or not.\n   * @param measurementStdDevs The standard deviations of the measurements.\n   *\/\n  @SuppressWarnings(\"ParameterName\")\n  public SingleJointedArmSim(\n      DCMotor gearbox,\n      double gearing,\n      double jKgMetersSquared,\n      double armLengthMeters,\n      double minAngleRads,\n      double maxAngleRads,\n      double armMassKg,\n      boolean simulateGravity,\n      Matrix measurementStdDevs) {\n    super(\n        LinearSystemId.createSingleJointedArmSystem(gearbox, jKgMetersSquared, gearing),\n        measurementStdDevs);\n    m_gearbox = gearbox;\n    m_gearing = gearing;\n    m_r = armLengthMeters;\n    m_minAngle = minAngleRads;\n    m_maxAngle = maxAngleRads;\n    m_armMass = armMassKg;\n    m_simulateGravity = simulateGravity;\n  }\n\n  \/**\n   * Returns whether the arm would hit the lower limit.\n   *\n   * @param currentAngleRads The current arm height.\n   * @return Whether the arm would hit the lower limit.\n   *\/\n  public boolean wouldHitLowerLimit(double currentAngleRads) {\n    return currentAngleRads < this.m_minAngle;\n  }\n\n  \/**\n   * Returns whether the arm would hit the upper limit.\n   *\n   * @param currentAngleRads The current arm height.\n   * @return Whether the arm would hit the upper limit.\n   *\/\n  public boolean wouldHitUpperLimit(double currentAngleRads) {\n    return currentAngleRads > this.m_maxAngle;\n  }\n\n  \/**\n   * Returns whether the arm has hit the lower limit.\n   *\n   * @return Whether the arm has hit the lower limit.\n   *\/\n  public boolean hasHitLowerLimit() {\n    return wouldHitLowerLimit(getAngleRads());\n  }\n\n  \/**\n   * Returns whether the arm has hit the upper limit.\n   *\n   * @return Whether the arm has hit the upper limit.\n   *\/\n  public boolean hasHitUpperLimit() {\n    return wouldHitUpperLimit(getAngleRads());\n  }\n\n  \/**\n   * Returns the current arm angle.\n   *\n   * @return The current arm angle.\n   *\/\n  public double getAngleRads() {\n    return m_y.get(0, 0);\n  }\n\n  \/**\n   * Returns the current arm velocity.\n   *\n   * @return The current arm velocity.\n   *\/\n  public double getVelocityRadPerSec() {\n    return m_x.get(1, 0);\n  }\n\n  \/**\n   * Returns the arm current draw.\n   *\n   * @return The aram current draw.\n   *\/\n  @Override\n  public double getCurrentDrawAmps() {\n    \/\/ Reductions are greater than 1, so a reduction of 10:1 would mean the motor is\n    \/\/ spinning 10x faster than the output\n    var motorVelocity = m_x.get(1, 0) * m_gearing;\n    return m_gearbox.getCurrent(motorVelocity, m_u.get(0, 0)) * Math.signum(m_u.get(0, 0));\n  }\n\n  \/**\n   * Sets the input voltage for the arm.\n   *\n   * @param volts The input voltage.\n   *\/\n  public void setInputVoltage(double volts) {\n    setInput(volts);\n  }\n\n  \/**\n   * Calculates a rough estimate of the moment of inertia of an arm given its length and mass.\n   *\n   * @param lengthMeters The length of the arm.\n   * @param massKg The mass of the arm.\n   * @return The calculated moment of inertia.\n   *\/\n  public static double estimateMOI(double lengthMeters, double massKg) {\n    return 1.0 \/ 3.0 * massKg * lengthMeters * lengthMeters;\n  }\n\n  \/**\n   * Updates the state of the arm.\n   *\n   * @param currentXhat The current state estimate.\n   * @param u The system inputs (voltage).\n   * @param dtSeconds The time difference between controller updates.\n   *\/\n  @Override\n  @SuppressWarnings({\"ParameterName\", \"LambdaParameterName\"})\n  protected Matrix updateX(Matrix currentXhat, Matrix u, double dtSeconds) {\n    \/\/ Horizontal case:\n    \/\/ Torque = F * r = I * alpha\n    \/\/ alpha = F * r \/ I\n    \/\/ Since F = mg,\n    \/\/ alpha = m * g * r \/ I\n    \/\/ Finally, multiply RHS by cos(theta) to account for the arm angle\n    \/\/ This acceleration is added to the linear system dynamics x-dot = Ax + Bu\n    \/\/ We therefore find that f(x, u) = Ax + Bu + [[0] [m * g * r \/ I *\n    \/\/ cos(theta)]]\n    Matrix updatedXhat =\n        NumericalIntegration.rkf45(\n            (Matrix x, Matrix u_) -> {\n              Matrix xdot = m_plant.getA().times(x).plus(m_plant.getB().times(u_));\n              if (m_simulateGravity) {\n                xdot =\n                    xdot.plus(\n                        VecBuilder.fill(\n                            0,\n                            m_armMass\n                                * m_r\n                                * -9.8\n                                * 3.0\n                                \/ (m_armMass * m_r * m_r)\n                                * Math.cos(x.get(0, 0))));\n              }\n              return xdot;\n            },\n            currentXhat,\n            u,\n            dtSeconds);\n\n    \/\/ We check for collision after updating xhat\n    if (wouldHitLowerLimit(updatedXhat.get(0, 0))) {\n      return VecBuilder.fill(m_minAngle, 0);\n    }\n    if (wouldHitUpperLimit(updatedXhat.get(0, 0))) {\n      return VecBuilder.fill(m_maxAngle, 0);\n    }\n    return updatedXhat;\n  }\n}\n","avg_line_length":32.3417721519,"max_line_length":100,"alphanum_fraction":0.6599804305}
{"size":1306,"ext":"java","lang":"Java","max_stars_count":112.0,"content":"\/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage org.apache.tajo.exception;\n\nimport org.apache.tajo.error.Errors.ResultCode;\nimport org.apache.tajo.rpc.protocolrecords.PrimitiveProtos.ReturnState;\n\npublic class UndefinedPartitionException extends TajoException {\n\n  private static final long serialVersionUID = 277182608283894938L;\n\n  public UndefinedPartitionException(ReturnState state) {\n    super(state);\n  }\n\n  public UndefinedPartitionException(String partitionName) {\n    super(ResultCode.UNDEFINED_PARTITION, partitionName);\n  }\n}\n","avg_line_length":36.2777777778,"max_line_length":75,"alphanum_fraction":0.7748851455}
{"size":5987,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/**\n * Copyright 2008 Sakaiproject Licensed under the\n * Educational Community License, Version 2.0 (the \"License\"); you may\n * not use this file except in compliance with the License. You may\n * obtain a copy of the License at\n *\n * http:\/\/www.osedu.org\/licenses\/ECL-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an \"AS IS\"\n * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n *\/\npackage org.sakaiproject.oaai.service;\n\nimport java.io.BufferedWriter;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileWriter;\nimport java.io.InputStream;\nimport java.io.StringWriter;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Locale;\n\nimport org.apache.commons.io.IOUtils;\nimport org.apache.commons.lang.NullArgumentException;\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\nimport org.sakaiproject.component.cover.ServerConfigurationService;\nimport org.sakaiproject.oaai.Constants;\n\npublic class FileService {\n\n    private final Log log = LogFactory.getLog(FileService.class);\n\n    private static String DEFAULT_CSV_STORAGE_DIRECTORY = \"oaai\/\";\n    private String storagePath = \"\";\n\n    public void init() {\n        storagePath = createStoragePath();\n        \/\/ create the root directory\n        createNewDirectory(\"\");\n    }\n\n    \/**\n     * Creates a string representing the path to the storage directory\n     * @return the path string\n     *\/\n    private String createStoragePath() {\n        String storagePath = ServerConfigurationService.getString(\"oaai.storage.path\", \"\");\n        if (StringUtils.isBlank(storagePath)) {\n            String rootDirectory = ServerConfigurationService.getString(\"bodyPath@org.sakaiproject.content.api.ContentHostingService\", \"\");\n            rootDirectory = addTrailingSlash(rootDirectory);\n\n            storagePath = addTrailingSlash(rootDirectory + DEFAULT_CSV_STORAGE_DIRECTORY);\n        }\n\n        return storagePath;\n    }\n\n    private String createNewDirectory(String directoryName) {\n        File newDirectory = new File(storagePath + directoryName);\n\n        \/\/ if the directory does not exist, create it\n        if (!newDirectory.exists()) {\n            try{\n                newDirectory.mkdir();\n            } catch(Exception e){\n                log.error(\"Cannot create new directory: \" + e, e);\n            }\n        }\n\n        String path = newDirectory.getPath();\n\n        return path;\n    }\n\n    public String createDatedDirectoryName() {\n        Date date = new Date();\n        SimpleDateFormat sdf = new SimpleDateFormat(Constants.DATE_FORMAT_FILE_NAME, Locale.ENGLISH);\n        String directoryName = sdf.format(date);\n\n        return directoryName;\n    }\n\n    public File createNewFile(String datedDirectory, String fileName) {\n        if (StringUtils.isBlank(datedDirectory)) {\n            throw new NullArgumentException(\"File directory cannot be null or blank\");\n        }\n        if (StringUtils.isBlank(fileName)) {\n            throw new NullArgumentException(\"File name cannot be null or blank\");\n        }\n\n        File newFile = null;\n\n        String directory = createNewDirectory(datedDirectory);\n        directory = addTrailingSlash(directory);\n\n        try {\n            newFile = new File(directory + fileName);\n            newFile.createNewFile();\n        } catch (Exception e) {\n            log.error(\"Error creating new file: \" + e, e);\n        }\n\n        return newFile;\n    }\n\n    public boolean writeStringToFile(File file, String dataString) {\n        try {\n            BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file.getAbsoluteFile()));\n            bufferedWriter.write(dataString);\n            bufferedWriter.close();\n\n            return true;\n        } catch (Exception e) {\n            log.error(\"Error writing string to file: \" + e, e);\n\n            return false;\n        }\n    }\n\n    public List parseDirectory() {\n        List folders = new ArrayList();\n        File directory = new File(storagePath);\n\n        for (File subDirectory : directory.listFiles()) {\n            if (subDirectory.isDirectory()) {\n                folders.add(subDirectory.getName());\n            }\n        }\n\n        return folders;\n    }\n\n    public File getFile(String datedDirectory, String fileName) {\n        if (StringUtils.isBlank(datedDirectory)) {\n            throw new NullArgumentException(\"File directory cannot be null or blank\");\n        }\n        if (StringUtils.isBlank(fileName)) {\n            throw new NullArgumentException(\"File name cannot be null or blank\");\n        }\n\n        datedDirectory = addTrailingSlash(datedDirectory);\n\n        File file = new File(storagePath + datedDirectory + fileName);\n\n        return file;\n    }\n\n    public String readFileIntoString(String datedDirectory, String fileName) {\n        String fileString = \"\";\n\n        try {\n            File file = getFile(datedDirectory, fileName);\n\n            InputStream inputStream = new FileInputStream(file);\n            StringWriter writer = new StringWriter();\n            IOUtils.copy(inputStream, writer);\n            fileString = writer.toString();\n        } catch (Exception e) {\n            log.error(\"Error reading file into string: \" + e, e);\n        }\n\n        return fileString;\n    }\n\n    public boolean saveStringToFile(String dataString, String directory, String name) {\n        File file = createNewFile(directory, name);\n        boolean success = writeStringToFile(file, dataString);\n\n        return success;\n    }\n\n    private String addTrailingSlash(String path) {\n        if (!StringUtils.endsWith(path, \"\/\")) {\n            path += \"\/\";\n        }\n\n        return path;\n    }\n\n}\n","avg_line_length":31.6772486772,"max_line_length":139,"alphanum_fraction":0.6520795056}
{"size":2356,"ext":"java","lang":"Java","max_stars_count":79.0,"content":"\/*\n * MIT License\n *\n * Copyright (c) 2020-present Cloudogu GmbH and Contributors\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and\/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n *\/\n    \npackage sonia.scm.legacy;\n\nimport com.google.inject.Inject;\nimport sonia.scm.api.v2.resources.Enrich;\nimport sonia.scm.api.v2.resources.HalAppender;\nimport sonia.scm.api.v2.resources.HalEnricher;\nimport sonia.scm.api.v2.resources.HalEnricherContext;\nimport sonia.scm.api.v2.resources.Index;\nimport sonia.scm.api.v2.resources.LinkBuilder;\nimport sonia.scm.api.v2.resources.ScmPathInfoStore;\nimport sonia.scm.plugin.Extension;\n\nimport javax.inject.Provider;\n\n@Extension\n@Enrich(Index.class)\npublic class LegacyIndexHalEnricher implements HalEnricher {\n\n  private Provider scmPathInfoStoreProvider;\n\n  @Inject\n  public LegacyIndexHalEnricher(Provider scmPathInfoStoreProvider) {\n    this.scmPathInfoStoreProvider = scmPathInfoStoreProvider;\n  }\n\n  private String createLink() {\n    return new LinkBuilder(scmPathInfoStoreProvider.get().get(), LegacyRepositoryService.class)\n      .method(\"getNameAndNamespaceForRepositoryId\")\n      .parameters(\"REPOID\")\n      .href()\n      .replace(\"REPOID\", \"{id}\");\n  }\n\n  @Override\n  public void enrich(HalEnricherContext context, HalAppender appender) {\n    appender.appendLink(\"nameAndNamespace\", createLink());\n  }\n}\n","avg_line_length":37.3968253968,"max_line_length":95,"alphanum_fraction":0.7707979626}
{"size":1102,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package com.dmytrobilokha.disturber.network.dto;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.fasterxml.jackson.annotation.JsonProperty;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n\/**\n * The matrix Timeline DTO\n *\/\n@JsonIgnoreProperties(ignoreUnknown = true)\npublic class TimelineDto {\n\n    @JsonProperty(value = \"limited\", required = true)\n    private Boolean limited;\n    @JsonProperty(value = \"prev_batch\", required = true)\n    private String previousBatch;\n    @JsonProperty(value = \"events\", required = true)\n    private List events = new ArrayList<>();\n\n    public Boolean getLimited() {\n        return limited;\n    }\n\n    public void setLimited(Boolean limited) {\n        this.limited = limited;\n    }\n\n    public String getPreviousBatch() {\n        return previousBatch;\n    }\n\n    public void setPreviousBatch(String previousBatch) {\n        this.previousBatch = previousBatch;\n    }\n\n    public List getEvents() {\n        return events;\n    }\n\n    public void setEvents(List events) {\n        this.events = events;\n    }\n}\n","avg_line_length":23.9565217391,"max_line_length":61,"alphanum_fraction":0.6896551724}
{"size":1193,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage com.noctarius.castmapr.core;\n\nimport java.util.HashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.noctarius.castmapr.spi.Collector;\n\npublic class CollectorImpl\n    implements Collector\n{\n\n    public final Map> emitted = new HashMap>();\n\n    @Override\n    public void emit( Key key, Value value )\n    {\n        List values = emitted.get( key );\n        if ( values == null )\n        {\n            values = new LinkedList();\n            emitted.put( key, values );\n        }\n        values.add( value );\n    }\n}\n","avg_line_length":28.4047619048,"max_line_length":81,"alphanum_fraction":0.6823134954}
{"size":816,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2018-2019 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *    http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage su.ch;\n\nimport org.junit.jupiter.api.Test;\n\nimport static su.ch.IsComparable.isComparable;\n\nclass PairTest {\n    @Test void equalsAndHashcode() {\n        isComparable(Pair.class);\n    }\n}\n","avg_line_length":29.1428571429,"max_line_length":75,"alphanum_fraction":0.7316176471}
{"size":6003,"ext":"java","lang":"Java","max_stars_count":111.0,"content":"package org.mapfish.print.http;\n\nimport com.sun.net.httpserver.HttpExchange;\nimport com.sun.net.httpserver.HttpHandler;\nimport com.sun.net.httpserver.HttpsServer;\nimport org.apache.http.auth.AuthScope;\nimport org.apache.http.auth.Credentials;\nimport org.junit.AfterClass;\nimport org.junit.BeforeClass;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.mapfish.print.AbstractMapfishSpringTest;\nimport org.mapfish.print.config.Configuration;\nimport org.mapfish.print.config.ConfigurationFactory;\nimport org.mapfish.print.processor.http.matcher.DnsHostMatcher;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.test.context.ContextConfiguration;\nimport org.springframework.test.context.junit4.SpringJUnit4ClassRunner;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\n\n@RunWith(SpringJUnit4ClassRunner.class)\n@ContextConfiguration(locations = {\n        AbstractMapfishSpringTest.DEFAULT_SPRING_XML,\n        \"classpath:\/org\/mapfish\/print\/http\/proxy\/application-context-proxy-test.xml\"\n})\npublic class HttpCredentialTest {\n    private static final String USERNAME = \"username\";\n    private static final String PASSWORD = \"password\";\n    private static final int HTTPS_PROXY_PORT = 21433;\n    private static HttpsServer httpsServer;\n\n    @Autowired\n    ConfigurationFactory configurationFactory;\n    @Autowired\n    private MfClientHttpRequestFactoryImpl requestFactory;\n\n    @BeforeClass\n    public static void setUp() throws Exception {\n        httpsServer = HttpProxyTest.createHttpsServer(HTTPS_PROXY_PORT);\n    }\n\n    @AfterClass\n    public static void tearDown() {\n        httpsServer.stop(0);\n    }\n\n    @Test\n    public void testValidate() {\n        final HttpCredential credential = new HttpCredential();\n        Configuration configuration = new Configuration();\n\n        List errors = new ArrayList<>();\n        credential.validate(errors, configuration);\n        assertEquals(1, errors.size());\n\n        errors.clear();\n        credential.validate(errors, configuration);\n        assertEquals(1, errors.size());\n\n        credential.setUsername(\"username\");\n\n        errors.clear();\n        credential.validate(errors, configuration);\n        assertEquals(0, errors.size());\n    }\n\n    @Test\n    public void testToCredentials() throws Exception {\n        final HttpCredential credential = new HttpCredential();\n        credential.setUsername(USERNAME);\n        credential.setPassword(PASSWORD);\n\n        final DnsHostMatcher matcher = new DnsHostMatcher();\n        matcher.setHost(HttpProxyTest.LOCALHOST);\n        credential.setMatchers(Collections.singletonList(matcher));\n\n        AuthScope authscope = AuthScope.ANY;\n        final Credentials object = credential.toCredentials(authscope);\n        assertNotNull(object);\n        assertEquals(USERNAME, object.getUserPrincipal().getName());\n        assertEquals(PASSWORD, object.getPassword());\n\n        authscope = new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM,\n                                  AuthScope.ANY_SCHEME);\n        assertNotNull(credential.toCredentials(authscope));\n\n        authscope = new AuthScope(AuthScope.ANY_HOST, HttpProxyTest.HTTPS_PROXY_PORT, AuthScope.ANY_REALM,\n                                  AuthScope.ANY_SCHEME);\n        assertNotNull(credential.toCredentials(authscope));\n\n        authscope = new AuthScope(AuthScope.ANY_HOST, 80, AuthScope.ANY_REALM, AuthScope.ANY_SCHEME);\n        assertNotNull(credential.toCredentials(authscope));\n\n        authscope =\n                new AuthScope(\"google.com\", AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthScope.ANY_SCHEME);\n        assertNull(credential.toCredentials(authscope));\n\n        authscope = new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, \"http\");\n        assertNotNull(credential.toCredentials(authscope));\n    }\n\n    @Test\n    public void testToHttpsBehaviour() throws Exception {\n        final String message = \"Message from server\";\n\n        final String path = \"\/username\";\n        httpsServer.createContext(path, new HttpHandler() {\n            @Override\n            public void handle(HttpExchange httpExchange) throws IOException {\n                final String authorization = httpExchange.getRequestHeaders().getFirst(\"Authorization\");\n                if (authorization == null) {\n                    httpExchange.getResponseHeaders().add(\"WWW-Authenticate\", \"Basic realm=\\\"Test Site\\\"\");\n                    httpExchange.sendResponseHeaders(401, 0);\n                    httpExchange.close();\n                } else {\n                    final String expectedAuth = \"Basic dXNlcm5hbWU6cGFzc3dvcmQ=\";\n                    if (authorization.equals(expectedAuth)) {\n                        HttpProxyTest.respond(httpExchange, message, 200);\n                    } else {\n                        final String errorMessage =\n                                \"Expected authorization:\\n'\" + expectedAuth + \"' but got:\\n'\" +\n                                        authorization + \"'\";\n                        HttpProxyTest.respond(httpExchange, errorMessage, 500);\n                    }\n                }\n            }\n        });\n\n        final HttpCredential credential = new HttpCredential();\n        credential.setUsername(USERNAME);\n        credential.setPassword(PASSWORD);\n\n        final DnsHostMatcher matcher = new DnsHostMatcher();\n        matcher.setHost(HttpProxyTest.LOCALHOST);\n        credential.setMatchers(Collections.singletonList(matcher));\n\n        final String target = \"https:\/\/\" + HttpProxyTest.LOCALHOST + \":\" + HTTPS_PROXY_PORT;\n        HttpProxyTest\n                .assertCorrectResponse(this.configurationFactory, this.requestFactory, credential, message,\n                                       target, path);\n    }\n}\n","avg_line_length":39.7549668874,"max_line_length":107,"alphanum_fraction":0.6764950858}
{"size":845,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2019 dc-square GmbH\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *       http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage com.hivemq.extensions.executor.task;\n\nimport com.hivemq.annotations.NotNull;\n\n\/**\n * A marker interface for extension tasks.\n *\n * @author Georg Held\n *\/\npublic interface PluginTask {\n\n    @NotNull ClassLoader getPluginClassLoader();\n}\n","avg_line_length":28.1666666667,"max_line_length":75,"alphanum_fraction":0.7372781065}
{"size":1391,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package tech.teslex.telegroo.api.methods.defaults;\n\nimport com.fasterxml.jackson.databind.JavaType;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport tech.teslex.telegroo.api.context.Context;\nimport tech.teslex.telegroo.telegram.api.TelegramResult;\nimport tech.teslex.telegroo.telegram.api.methods.interfaces.SetStickerPositionInSetMethod;\nimport tech.teslex.telegroo.telegram.api.methods.objects.SetStickerPositionInSet;\n\nimport java.util.Map;\nimport java.util.function.Consumer;\n\npublic interface DefaultSetStickerPositionInSetMethod extends SetStickerPositionInSetMethod> {\n\n\t\/**\n\t * @return update context\n\t *\/\n\tContext getContext();\n\n\t\/**\n\t * @return object mapper\n\t *\/\n\tObjectMapper getObjectMapper();\n\n\t@Override\n\tdefault TelegramResult setStickerPositionInSet(Map data) {\n\t\tthrow new AssertionError();\n\t}\n\n\t@Override\n\tdefault TelegramResult setStickerPositionInSet(Consumer data) {\n\t\tSetStickerPositionInSet method = SetStickerPositionInSet.create();\n\t\tdata.accept(method);\n\n\t\treturn setStickerPositionInSet(method);\n\t}\n\n\t@Override\n\tdefault TelegramResult setStickerPositionInSet(SetStickerPositionInSet data) {\n\t\tJavaType type = getObjectMapper()\n\t\t\t\t.getTypeFactory()\n\t\t\t\t.constructType(Object.class);\n\n\t\treturn getContext()\n\t\t\t\t.getTelegramClient()\n\t\t\t\t.call(data)\n\t\t\t\t.asTelegramResult(type);\n\t}\n}\n","avg_line_length":27.82,"max_line_length":117,"alphanum_fraction":0.7958303379}
{"size":3379,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package frc.robot.subsystems.base;\n\nimport edu.wpi.first.wpilibj.DoubleSolenoid;\nimport frc.robot.subsystems.base.BaseLimitSensor;\n\n\/**\n *  Base class that deploys the hatch and extends the hatch mechanism\n *\/\npublic class BaseHatchMechanism\n{\n    \/\/ Solenoids\n    private final DoubleSolenoid hatchDeploySolenoid;\n    private final DoubleSolenoid hatchLatchSolenoid;\n\n    \/\/ Limit Sensors\n    private final BaseLimitSensor hatchDetector;\n\n    \/**\n     *  Intializes the required solenoids and sensors\n     *  @param hatchDeploySolenoid\n     *     - Type: Solenoid\n     *     - Controls the slider mechanism that moves the hatch to the rocket when retracted.\n     *       This object must be created such that energizing the forward channel pushes the\n     *       hatch off the gripper and energizing the reverse channel retracts the piston.\n     *  @param hatchLatchSolenoid\n     *     - Type: Solenoid\n     *     - Controls the hatch mechanism to either open or close the mechanism. The forward channel\n     *       opens the gripper and the reverse channel closes it.\n     *  @param hatchDetector\n     *     - Type: BaseLimitSensor\n     *     - Detects whether the hatch is on the mechanism or not\n     *\/\n    public BaseHatchMechanism(DoubleSolenoid hatchDeploySolenoid, DoubleSolenoid hatchLatchSolenoid, BaseLimitSensor hatchDetector)\n    {\n        this.hatchDeploySolenoid = hatchDeploySolenoid;\n        this.hatchLatchSolenoid  = hatchLatchSolenoid;\n        this.hatchDetector       = hatchDetector;\n\n        hatchDisable();\n    }\n\n    \/**\n     *  Push the hatch off the mechanism using the piston.\n     *\/\n    public void extendHatchPiston()\n    {\n        hatchDeploySolenoid.set(DoubleSolenoid.Value.kForward);\n    }\n\n    \/**\n     *  Retracts piston pushing the hatch to the rocket\n     *\/\n    public void retractHatchPiston()\n    {\n        hatchDeploySolenoid.set(DoubleSolenoid.Value.kReverse);\n    }\n\n    \/**\n     * Query the current state of the hatch deploy piston. Returns True if extended, false otherwise.\n     *\/\n    public boolean isHatchPistonExtended()\n    {\n        return ((hatchDeploySolenoid.get() == DoubleSolenoid.Value.kForward) ? true : false);\n    }\n\n    \/**\n     *  Opens the latch gripper to lock the hatch onto the mechanism.\n     *\/\n    public void latchHatch()\n    {\n        hatchLatchSolenoid.set(DoubleSolenoid.Value.kForward);\n    }\n\n    \/**\n     *  Close the hatch gripper to allow the hatch to be expelled.\n     *\/\n    public void unlatchHatch()\n    {\n        hatchLatchSolenoid.set(DoubleSolenoid.Value.kReverse);\n    }\n\n    \/**\n     * Query the current state of the hatch latch. Returns True if locked, false otherwise.\n     *\/\n    public boolean isHatchLatched()\n    {\n        return ((hatchLatchSolenoid.get() == DoubleSolenoid.Value.kForward) ? true : false);\n    }\n\n    \/**\n     *  Detects whether there is a hatch on the mechanism\n     *  @return\n     *      - Type: boolean\n     *      - Will be true if hatch is on sensor, and false if no hatch is on the sensor\n     *\/\n    public boolean hatchIsPresent()\n    {\n        return hatchDetector.getIsTriggered();\n    }\n\n    \/**\n     * Disables (turns off) both channels of both solenoids used in the mechanism.\n     *\/\n    public void hatchDisable()\n    {\n        hatchLatchSolenoid.set(DoubleSolenoid.Value.kOff);\n        hatchDeploySolenoid.set(DoubleSolenoid.Value.kOff);\n    }\n}","avg_line_length":31.0,"max_line_length":131,"alphanum_fraction":0.6629180231}
{"size":70654,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"package com.denizenscript.denizen.objects;\n\nimport com.denizenscript.denizen.Denizen;\nimport com.denizenscript.denizen.events.BukkitScriptEvent;\nimport com.denizenscript.denizen.npc.traits.*;\nimport com.denizenscript.denizen.scripts.commands.npc.EngageCommand;\nimport com.denizenscript.denizen.scripts.containers.core.AssignmentScriptContainer;\nimport com.denizenscript.denizen.scripts.containers.core.InteractScriptContainer;\nimport com.denizenscript.denizen.scripts.containers.core.InteractScriptHelper;\nimport com.denizenscript.denizen.scripts.triggers.AbstractTrigger;\nimport com.denizenscript.denizencore.flags.AbstractFlagTracker;\nimport com.denizenscript.denizencore.flags.FlaggableObject;\nimport com.denizenscript.denizencore.tags.ObjectTagProcessor;\nimport com.denizenscript.denizencore.utilities.Deprecations;\nimport com.denizenscript.denizencore.utilities.debugging.Debug;\nimport com.denizenscript.denizencore.objects.*;\nimport com.denizenscript.denizen.npc.DenizenNPCHelper;\nimport com.denizenscript.denizen.tags.core.NPCTagBase;\nimport com.denizenscript.denizencore.objects.core.ElementTag;\nimport com.denizenscript.denizencore.objects.core.ListTag;\nimport com.denizenscript.denizencore.objects.core.ScriptTag;\nimport com.denizenscript.denizencore.tags.Attribute;\nimport com.denizenscript.denizencore.tags.TagContext;\nimport com.denizenscript.denizencore.utilities.CoreUtilities;\nimport net.citizensnpcs.api.CitizensAPI;\nimport net.citizensnpcs.api.ai.Navigator;\nimport net.citizensnpcs.api.ai.TeleportStuckAction;\nimport net.citizensnpcs.api.event.DespawnReason;\nimport net.citizensnpcs.api.npc.NPC;\nimport net.citizensnpcs.api.npc.NPCRegistry;\nimport net.citizensnpcs.api.trait.Trait;\nimport net.citizensnpcs.api.trait.trait.Equipment;\nimport net.citizensnpcs.api.trait.trait.Owner;\nimport net.citizensnpcs.api.util.DataKey;\nimport net.citizensnpcs.api.util.MemoryDataKey;\nimport net.citizensnpcs.npc.ai.NPCHolder;\nimport net.citizensnpcs.npc.skin.SkinnableEntity;\nimport net.citizensnpcs.trait.*;\nimport net.citizensnpcs.trait.waypoint.*;\nimport net.citizensnpcs.util.Anchor;\nimport net.citizensnpcs.util.Pose;\nimport org.bukkit.*;\nimport org.bukkit.entity.*;\nimport org.bukkit.inventory.Inventory;\nimport org.bukkit.inventory.InventoryHolder;\n\nimport java.util.*;\n\npublic class NPCTag implements ObjectTag, Adjustable, InventoryHolder, EntityFormObject, FlaggableObject {\n\n    \/\/ <--[ObjectType]\n    \/\/ @name NPCTag\n    \/\/ @prefix n\n    \/\/ @base EntityTag\n    \/\/ @implements FlaggableObject\n    \/\/ @format\n    \/\/ The identity format for NPCs is the NPC's id number.\n    \/\/ For example, 'n@5'.\n    \/\/ Or, an NPC's id number, followed by a comma, followed by a custom registry name.\n    \/\/ For example 'n@12,specialnpcs'\n    \/\/\n    \/\/ @description\n    \/\/ An NPCTag represents an NPC configured through Citizens.\n    \/\/\n    \/\/ This object type is flaggable.\n    \/\/ Flags on this object type will be stored in the Citizens saves.yml file, under the 'denizen_flags' trait.\n    \/\/\n    \/\/ -->\n\n    public static NPCRegistry getRegistryByName(String name) {\n        NPCRegistry registry = CitizensAPI.getNamedNPCRegistry(name);\n        if (registry != null) {\n            return registry;\n        }\n        for (NPCRegistry possible : CitizensAPI.getNPCRegistries()) {\n            if (possible.getName().equals(name)) {\n                return possible;\n            }\n        }\n        return null;\n    }\n\n    public static NPCTag fromEntity(Entity entity) {\n        return new NPCTag(((NPCHolder) entity).getNPC());\n    }\n\n    @Deprecated\n    public static NPCTag valueOf(String string) {\n        return valueOf(string, null);\n    }\n\n    @Fetchable(\"n\")\n    public static NPCTag valueOf(String string, TagContext context) {\n        if (string == null) {\n            return null;\n        }\n        if (string.startsWith(\"n@\")) {\n            string = string.substring(\"n@\".length());\n        }\n        NPCRegistry registry;\n        int commaIndex = string.indexOf(',');\n        String idText = string;\n        if (commaIndex == -1) {\n            registry = CitizensAPI.getNPCRegistry();\n        }\n        else {\n            registry = getRegistryByName(string.substring(commaIndex + 1));\n            if (registry == null) {\n                if (context == null || context.showErrors()) {\n                    Debug.echoError(\"Unknown NPC registry for '\" + string + \"'.\");\n                }\n                return null;\n            }\n            idText = string.substring(0, commaIndex);\n        }\n        if (ArgumentHelper.matchesInteger(idText)) {\n            int id = Integer.parseInt(idText);\n            NPC npc = registry.getById(id);\n            if (npc != null) {\n                return new NPCTag(npc);\n            }\n            else if (context == null || context.showErrors()) {\n                Debug.echoError(\"NPC '\" + id + \"' does not exist in \" + registry.getName() + \".\");\n            }\n        }\n        return null;\n    }\n\n    public static boolean matches(String string) {\n        if (CoreUtilities.toLowerCase(string).startsWith(\"n@\")) {\n            return true;\n        }\n        if (valueOf(string, CoreUtilities.noDebugContext) != null) {\n            return true;\n        }\n        return false;\n    }\n\n    public boolean isValid() {\n        return npc != null && npc.getOwningRegistry().getById(npc.getId()) != null;\n    }\n\n    @Override\n    public AbstractFlagTracker getFlagTracker() {\n        return npc.getOrAddTrait(DenizenFlagsTrait.class).fullFlagData;\n    }\n\n    public boolean hasFlag(String flag) {\n        DenizenFlagsTrait flagTrait = npc.getTraitNullable(DenizenFlagsTrait.class);\n        if (flagTrait == null) {\n            return false;\n        }\n        return flagTrait.fullFlagData.hasFlag(flag);\n    }\n\n    @Override\n    public void reapplyTracker(AbstractFlagTracker tracker) {\n        \/\/ Nothing to do.\n    }\n\n    public NPC npc;\n\n    public NPCTag(NPC citizensNPC) {\n        this.npc = citizensNPC;\n    }\n\n    public NPC getCitizen() {\n        return npc;\n    }\n\n    public Entity getEntity() {\n        try {\n            return getCitizen().getEntity();\n        }\n        catch (NullPointerException ex) {\n            Debug.echoError(\"Uh oh! Denizen has encountered a NPE while trying to fetch an NPC entity. \" +\n                    \"Has this NPC been removed?\");\n            if (Debug.verbose) {\n                Debug.echoError(ex);\n            }\n            return null;\n        }\n    }\n\n    public LivingEntity getLivingEntity() {\n        try {\n            if (getCitizen().getEntity() instanceof LivingEntity) {\n                return (LivingEntity) getCitizen().getEntity();\n            }\n            else {\n                Debug.log(\"Uh oh! Tried to get the living entity of a non-living NPC!\");\n                return null;\n            }\n        }\n        catch (NullPointerException ex) {\n            Debug.echoError(\"Uh oh! Denizen has encountered a NPE while trying to fetch an NPC livingEntity. \" +\n                    \"Has this NPC been removed?\");\n            if (Debug.verbose) {\n                Debug.echoError(ex);\n            }\n            return null;\n        }\n    }\n\n    @Override\n    public EntityTag getDenizenEntity() {\n        try {\n            return new EntityTag(getCitizen().getEntity());\n        }\n        catch (NullPointerException ex) {\n            Debug.echoError(\"Uh oh! Denizen has encountered a NPE while trying to fetch an NPC EntityTag. \" +\n                    \"Has this NPC been removed?\");\n            if (Debug.verbose) {\n                Debug.echoError(ex);\n            }\n            return null;\n        }\n    }\n\n    @Override\n    public Inventory getInventory() {\n        return DenizenNPCHelper.getInventory(getCitizen());\n    }\n\n    public InventoryTag getDenizenInventory() {\n        return new InventoryTag(getInventory(), this);\n    }\n\n    public EntityType getEntityType() {\n        return getCitizen().getEntity().getType();\n    }\n\n    public Navigator getNavigator() {\n        return getCitizen().getNavigator();\n    }\n\n    public int getId() {\n        return npc.getId();\n    }\n\n    public String getName() {\n        return getCitizen().getName();\n    }\n\n    public List getInteractScripts() {\n        return InteractScriptHelper.getInteractScripts(this);\n    }\n\n    public List getInteractScripts(PlayerTag player, Class triggerType) {\n        return InteractScriptHelper.getInteractScripts(this, player, true, triggerType);\n    }\n\n    public List getInteractScriptsQuietly(PlayerTag player, Class triggerType) {\n        return InteractScriptHelper.getInteractScripts(this, player, false, triggerType);\n    }\n\n    public void destroy() {\n        getCitizen().destroy();\n    }\n\n    @Override\n    public LocationTag getLocation() {\n        if (isSpawned()) {\n            return new LocationTag(getEntity().getLocation());\n        }\n        else {\n            return new LocationTag(getCitizen().getStoredLocation());\n        }\n    }\n\n    public LocationTag getEyeLocation() {\n        if (isSpawned() && getCitizen().getEntity() instanceof LivingEntity) {\n            return new LocationTag(((LivingEntity) getCitizen().getEntity()).getEyeLocation());\n        }\n        else if (isSpawned()) {\n            return new LocationTag(getEntity().getLocation());\n        }\n        else {\n            return new LocationTag(getCitizen().getStoredLocation());\n        }\n    }\n\n    public World getWorld() {\n        if (isSpawned()) {\n            return getEntity().getWorld();\n        }\n        else {\n            return null;\n        }\n    }\n\n    @Override\n    public String toString() {\n        return identify();\n    }\n\n    public boolean isEngaged() {\n        return EngageCommand.getEngaged(getCitizen());\n    }\n\n    public boolean isSpawned() {\n        return npc.isSpawned();\n    }\n\n    public UUID getOwner() {\n        return getCitizen().getOrAddTrait(Owner.class).getOwnerId();\n    }\n\n    public Equipment getEquipmentTrait() {\n        return getCitizen().getOrAddTrait(Equipment.class);\n    }\n\n    public NicknameTrait getNicknameTrait() {\n        return getCitizen().getOrAddTrait(NicknameTrait.class);\n    }\n\n    public FishingTrait getFishingTrait() {\n        return getCitizen().getOrAddTrait(FishingTrait.class);\n    }\n\n    public net.citizensnpcs.api.trait.trait.Inventory getInventoryTrait() {\n        return getCitizen().getOrAddTrait(net.citizensnpcs.api.trait.trait.Inventory.class);\n    }\n\n    public PushableTrait getPushableTrait() {\n        return getCitizen().getOrAddTrait(PushableTrait.class);\n    }\n\n    public LookClose getLookCloseTrait() {\n        return getCitizen().getOrAddTrait(LookClose.class);\n    }\n\n    public TriggerTrait getTriggerTrait() {\n        return getCitizen().getOrAddTrait(TriggerTrait.class);\n    }\n\n    public ListTag action(String actionName, PlayerTag player, Map context) {\n        ListTag result = new ListTag();\n        if (getCitizen() != null) {\n            if (getCitizen().hasTrait(AssignmentTrait.class)) {\n                for (AssignmentScriptContainer container : getCitizen().getOrAddTrait(AssignmentTrait.class).containerCache) {\n                    if (container != null) {\n                        ListTag singleResult = Denizen.getInstance().npcHelper.getActionHandler().doAction(actionName, this, player, container, context);\n                        if (singleResult != null) {\n                            result.addAll(singleResult);\n                        }\n                    }\n                }\n            }\n        }\n        return result;\n    }\n\n    public ListTag action(String actionName, PlayerTag player) {\n        return action(actionName, player, null);\n    }\n\n    private String prefix = \"npc\";\n\n    @Override\n    public String getPrefix() {\n        return prefix;\n    }\n\n    @Override\n    public String debuggable() {\n        if (npc.getOwningRegistry() == CitizensAPI.getNPCRegistry()) {\n            return \"n@\" + npc.getId() + \" (\" + getName() + \")\";\n        }\n        else {\n            return \"n@\" + npc.getId() + \",\" + npc.getOwningRegistry().getName() + \" (\" + getName() + \")\";\n        }\n    }\n\n    @Override\n    public boolean isUnique() {\n        return true;\n    }\n\n    @Override\n    public String getObjectType() {\n        return \"NPC\";\n    }\n\n    @Override\n    public String identify() {\n        if (npc.getOwningRegistry() == CitizensAPI.getNPCRegistry()) {\n            return \"n@\" + npc.getId();\n        }\n        else {\n            return \"n@\" + npc.getId() + \",\" + npc.getOwningRegistry().getName();\n        }\n    }\n\n    @Override\n    public String identifySimple() {\n        return identify();\n    }\n\n    @Override\n    public NPCTag setPrefix(String prefix) {\n        this.prefix = prefix;\n        return this;\n    }\n\n    @Override\n    public boolean equals(Object o) {\n        if (o == null) {\n            return false;\n        }\n        if (!(o instanceof NPCTag)) {\n            return false;\n        }\n        return getId() == ((NPCTag) o).getId();\n    }\n\n    @Override\n    public int hashCode() {\n        return getId();\n    }\n\n    public static void registerTags() {\n\n        AbstractFlagTracker.registerFlagHandlers(tagProcessor);\n\n        \/\/ Defined in EntityTag\n        tagProcessor.registerTag(ElementTag.class, \"is_npc\", (attribute, object) -> {\n            return new ElementTag(true);\n        });\n\n        \/\/ Defined in EntityTag\n        tagProcessor.registerTag(ObjectTag.class, \"location\", (attribute, object) -> {\n            if (attribute.startsWith(\"previous_location\", 2)) {\n                attribute.fulfill(1);\n                Deprecations.npcPreviousLocationTag.warn(attribute.context);\n                return NPCTagBase.previousLocations.get(object.getId());\n            }\n            if (object.isSpawned()) {\n                return new EntityTag(object).doLocationTag(attribute);\n            }\n            return object.getLocation();\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns LocationTag\n        \/\/ @description\n        \/\/ Returns the NPC's previous navigated location.\n        \/\/ -->\n        tagProcessor.registerTag(LocationTag.class, \"previous_location\", (attribute, object) -> {\n            return NPCTagBase.previousLocations.get(object.getId());\n        });\n\n        \/\/ Defined in EntityTag\n        tagProcessor.registerTag(LocationTag.class, \"eye_location\", (attribute, object) -> {\n            return object.getEyeLocation();\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns true if the NPC has a nickname.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_nickname\", (attribute, object) -> {\n            NPC citizen = object.getCitizen();\n            return new ElementTag(citizen.hasTrait(NicknameTrait.class) && citizen.getOrAddTrait(NicknameTrait.class).hasNickname());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns true if the NPC is sitting. Relates to <@link command sit>.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_sitting\", (attribute, object) -> {\n            NPC citizen = object.getCitizen();\n            return new ElementTag(citizen.hasTrait(SittingTrait.class) && citizen.getOrAddTrait(SittingTrait.class).isSitting());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns true if the NPC is sleeping. Relates to <@link command sleep>.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_sleeping\", (attribute, object) -> {\n            NPC citizen = object.getCitizen();\n            return new ElementTag(citizen.hasTrait(SleepingTrait.class) && citizen.getOrAddTrait(SleepingTrait.class).isSleeping());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the NPC's display name, as set by the Nickname trait (or the default NPC name).\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"nickname\", (attribute, object) -> {\n            return new ElementTag(object.getCitizen().hasTrait(NicknameTrait.class) ? object.getCitizen().getOrAddTrait(NicknameTrait.class)\n                    .getNickname() : object.getName());\n        });\n\n        \/\/ Documented in EntityTag\n        tagProcessor.registerTag(ElementTag.class, \"name\", (attribute, object) -> {\n            if (attribute.startsWith(\"nickname\", 2)) {\n                Deprecations.npcNicknameTag.warn(attribute.context);\n                attribute.fulfill(1);\n                return new ElementTag(object.getCitizen().hasTrait(NicknameTrait.class) ? object.getCitizen().getOrAddTrait(NicknameTrait.class)\n                        .getNickname() : object.getName());\n            }\n            return new ElementTag(object.getName());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag\n        \/\/ @description\n        \/\/ Returns a list of all of the NPC's traits.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"traits\", (attribute, object) -> {\n            List list = new ArrayList<>();\n            for (Trait trait : object.getCitizen().getTraits()) {\n                list.add(trait.getName());\n            }\n            return new ListTag(list);\n        }, \"list_traits\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC has a specified trait.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_trait\", (attribute, object) -> {\n            if (attribute.hasParam()) {\n                Class trait = CitizensAPI.getTraitFactory().getTraitClass(attribute.getParam());\n                if (trait != null) {\n                    return new ElementTag(object.getCitizen().hasTrait(trait));\n                }\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is pushable.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"pushable\", (attribute, object) -> {\n            return new ElementTag(object.getPushableTrait().isPushable());\n        }, \"is_pushable\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC has a specified trigger.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_trigger\", (attribute, object) -> {\n            if (!attribute.hasParam()) {\n                return null;\n            }\n            if (!object.getCitizen().hasTrait(TriggerTrait.class)) {\n                return new ElementTag(false);\n            }\n            TriggerTrait trait = object.getCitizen().getOrAddTrait(TriggerTrait.class);\n            return new ElementTag(trait.hasTrigger(attribute.getParam()));\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC has anchors assigned.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_anchors\", (attribute, object) -> {\n            return (new ElementTag(object.getCitizen().getOrAddTrait(Anchors.class).getAnchors().size() > 0));\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag\n        \/\/ @description\n        \/\/ Returns a list of anchor names currently assigned to the NPC.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"list_anchors\", (attribute, object) -> {\n            ListTag list = new ListTag();\n            for (Anchor anchor : object.getCitizen().getOrAddTrait(Anchors.class).getAnchors()) {\n                list.add(anchor.getName());\n            }\n            return list;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns LocationTag\n        \/\/ @description\n        \/\/ Returns the location associated with the specified anchor, or null if it doesn't exist.\n        \/\/ -->\n        tagProcessor.registerTag(ObjectTag.class, \"anchor\", (attribute, object) -> {\n            Anchors trait = object.getCitizen().getOrAddTrait(Anchors.class);\n            if (attribute.hasParam()) {\n                Anchor anchor = trait.getAnchor(attribute.getParam());\n                    if (anchor != null) {\n                        return new LocationTag(anchor.getLocation());\n                    }\n                    else {\n                        attribute.echoError(\"NPC Anchor '\" + attribute.getParam() + \"' is not defined.\");\n                        return null;\n                    }\n            }\n            else if (attribute.startsWith(\"list\", 2)) {\n                attribute.fulfill(1);\n                Deprecations.npcAnchorListTag.warn(attribute.context);\n                ListTag list = new ListTag();\n                for (Anchor anchor : trait.getAnchors()) {\n                    list.add(anchor.getName());\n                }\n                return list;\n            }\n            else {\n                attribute.echoError(\"npc.anchor[...] tag must have an input.\");\n            }\n            return null;\n        }, \"anchors\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the specified constant from the NPC.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"constant\", (attribute, object) -> {\n            if (attribute.hasParam()) {\n                if (object.getCitizen().hasTrait(ConstantsTrait.class)\n                        && object.getCitizen().getOrAddTrait(ConstantsTrait.class).getConstant(attribute.getParam()) != null) {\n                    return new ElementTag(object.getCitizen().getOrAddTrait(ConstantsTrait.class)\n                            .getConstant(attribute.getParam()));\n                }\n                else {\n                    return null;\n                }\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns true if the NPC has the specified pose, otherwise returns false.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_pose\", (attribute, object) -> {\n            if (attribute.hasParam()) {\n                return new ElementTag(object.getCitizen().getOrAddTrait(Poses.class).hasPose(attribute.getParam()));\n            }\n            else {\n                return null;\n            }\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns LocationTag\n        \/\/ @description\n        \/\/ Returns the pose as a LocationTag with x, y, and z set to 0, and the world set to the first\n        \/\/ possible available world Bukkit knows about.\n        \/\/ -->\n        tagProcessor.registerTag(LocationTag.class, \"pose\", (attribute, object) -> {\n            if (attribute.hasParam()) {\n                Pose pose = object.getCitizen().getOrAddTrait(Poses.class).getPose(attribute.getParam());\n                return new LocationTag(org.bukkit.Bukkit.getWorlds().get(0), 0, 0, 0, pose.getYaw(), pose.getPitch());\n            }\n            else {\n                return null;\n            }\n        }, \"get_pose\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns NPCTag\n        \/\/ @description\n        \/\/ Returns the NPCTag of a hologram attached to this NPC as its nameplate (if any).\n        \/\/ Note that this can regenerate at any time.\n        \/\/ -->\n        tagProcessor.registerTag(ObjectTag.class, \"name_hologram_npc\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(HologramTrait.class)) {\n                return null;\n            }\n            HologramTrait hologram = object.getCitizen().getTraitNullable(HologramTrait.class);\n            Entity entity = hologram.getNameEntity();\n            if (entity == null) {\n                return null;\n            }\n            return new EntityTag(entity).getDenizenObject();\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag(NPCTag)\n        \/\/ @description\n        \/\/ Returns the list of hologram NPCs attached to an NPC (if any).\n        \/\/ Note that these can regenerate at any time.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"hologram_npcs\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(HologramTrait.class)) {\n                return null;\n            }\n            HologramTrait hologram = object.getCitizen().getTraitNullable(HologramTrait.class);\n            Collection stands = hologram.getHologramEntities();\n            if (stands == null || stands.isEmpty()) {\n                return null;\n            }\n            ListTag output = new ListTag();\n            for (ArmorStand stand : stands) {\n                output.addObject(new EntityTag(stand).getDenizenObject());\n            }\n            return output;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag\n        \/\/ @mechanism NPCTag.hologram_lines\n        \/\/ @description\n        \/\/ Returns the list of hologram lines attached to an NPC.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"hologram_lines\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(HologramTrait.class)) {\n                return null;\n            }\n            HologramTrait hologram = object.getCitizen().getTraitNullable(HologramTrait.class);\n            return new ListTag(hologram.getLines());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @mechanism NPCTag.hologram_direction\n        \/\/ @description\n        \/\/ Returns the direction of an NPC's hologram as \"BOTTOM_UP\" or \"TOP_DOWN\".\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"hologram_direction\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(HologramTrait.class)) {\n                return null;\n            }\n            HologramTrait hologram = object.getCitizen().getTraitNullable(HologramTrait.class);\n            return new ElementTag(hologram.getDirection().name());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.hologram_line_height\n        \/\/ @description\n        \/\/ Returns the line height for an NPC's hologram. Can be -1, indicating a default value should be used.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"hologram_line_height\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(HologramTrait.class)) {\n                return null;\n            }\n            HologramTrait hologram = object.getCitizen().getTraitNullable(HologramTrait.class);\n            return new ElementTag(hologram.getLineHeight());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is currently sneaking. Only works for player-type NPCs.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_sneaking\", (attribute, object) -> {\n            if (!object.isSpawned() && object.getEntity() instanceof Player) {\n                return null;\n            }\n            return new ElementTag(((Player) object.getEntity()).isSneaking());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is currently engaged.\n        \/\/ See <@link command engage>\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"engaged\", (attribute, object) -> {\n            return new ElementTag(object.isEngaged());\n        }, \"is_engaged\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is currently invulnerable.\n        \/\/ See <@link command vulnerable>\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"invulnerable\", (attribute, object) -> {\n            return new ElementTag(object.getCitizen().data().get(NPC.DEFAULT_PROTECTED_METADATA, true));\n        }, \"vulnerable\");\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Number)\n        \/\/ @description\n        \/\/ Returns the NPC's ID number.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"id\", (attribute, object) -> {\n            return new ElementTag(object.getId());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns PlayerTag\n        \/\/ @mechanism NPCTag.owner\n        \/\/ @description\n        \/\/ Returns the owner of the NPC as a PlayerTag, if any.\n        \/\/ -->\n        tagProcessor.registerTag(ObjectTag.class, \"owner\", (attribute, object) -> {\n            UUID owner = object.getOwner();\n            if (owner == null) {\n                return null;\n            }\n            OfflinePlayer player = Bukkit.getOfflinePlayer(owner);\n            if (player.isOnline() || player.hasPlayedBefore()) {\n                return new PlayerTag(player);\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.skin\n        \/\/ @description\n        \/\/ Returns whether the NPC has a custom skin.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"has_skin\", (attribute, object) -> {\n            return new ElementTag(object.getCitizen().hasTrait(SkinTrait.class) && object.getCitizen().getOrAddTrait(SkinTrait.class).getSkinName() != null);\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @mechanism NPCTag.skin_blob\n        \/\/ @description\n        \/\/ Returns the NPC's custom skin blob, if any.\n        \/\/ In the format: \"texture;signature\" (two values separated by a semicolon).\n        \/\/ See also <@link language Player Entity Skins (Skin Blobs)>.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"skin_blob\", (attribute, object) -> {\n            if (object.getCitizen().hasTrait(SkinTrait.class)) {\n                SkinTrait skin = object.getCitizen().getOrAddTrait(SkinTrait.class);\n                String tex = skin.getTexture();\n                String sign = \"\";\n                if (skin.getSignature() != null) {\n                    sign = \";\" + skin.getSignature();\n                }\n                return new ElementTag(tex + sign);\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the NPC's current skin blob, formatted for input to a Player Skull item.\n        \/\/ In the format: \"UUID|Texture\" (two values separated by pipes).\n        \/\/ See also <@link language Player Entity Skins (Skin Blobs)>.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"skull_skin\", (attribute, object) -> {\n            if (!object.getCitizen().hasTrait(SkinTrait.class)) {\n                return null;\n            }\n            SkinTrait skin = object.getCitizen().getOrAddTrait(SkinTrait.class);\n            return new ElementTag(skin.getSkinName() + \"|\" + skin.getTexture());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @mechanism NPCTag.skin\n        \/\/ @description\n        \/\/ Returns the NPC's custom skin, if any.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"skin\", (attribute, object) -> {\n            if (object.getCitizen().hasTrait(SkinTrait.class)) {\n                return new ElementTag(object.getCitizen().getOrAddTrait(SkinTrait.class).getSkinName());\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.auto_update_skin\n        \/\/ @description\n        \/\/ Returns whether the NPC is set to automatically update skins from name.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"auto_update_skin\", (attribute, object) -> {\n            if (object.getCitizen().hasTrait(SkinTrait.class)) {\n                return new ElementTag(object.getCitizen().getOrAddTrait(SkinTrait.class).shouldUpdateSkins());\n            }\n            return null;\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns InventoryTag\n        \/\/ @description\n        \/\/ Returns the InventoryTag of the NPC.\n        \/\/ -->\n        tagProcessor.registerTag(InventoryTag.class, \"inventory\", (attribute, object) -> {\n            return object.getDenizenInventory();\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is spawned.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_spawned\", (attribute, object) -> {\n            return new ElementTag(object.isSpawned());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is protected.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_protected\", (attribute, object) -> {\n            return new ElementTag(object.getCitizen().isProtected());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.lookclose\n        \/\/ @description\n        \/\/ Returns whether the NPC has lookclose enabled.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"lookclose\", (attribute, object) -> {\n            NPC citizen = object.getCitizen();\n            if (citizen.hasTrait(LookClose.class)) {\n                \/\/ There is no method to check if the NPC has LookClose enabled...\n                \/\/ LookClose.toString() returns \"LookClose{\" + enabled + \"}\"\n                String lookclose = citizen.getOrAddTrait(LookClose.class).toString();\n                lookclose = lookclose.substring(10, lookclose.length() - 1);\n                return new ElementTag(Boolean.valueOf(lookclose));\n            }\n            return new ElementTag(false);\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.controllable\n        \/\/ @description\n        \/\/ Returns whether the NPC has controllable enabled.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"controllable\", (attribute, object) -> {\n            if (object.getCitizen().hasTrait(Controllable.class)) {\n                return new ElementTag(object.getCitizen().getOrAddTrait(Controllable.class).isEnabled());\n            }\n            return new ElementTag(false);\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.targetable\n        \/\/ @description\n        \/\/ Returns whether the NPC is targetable.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"targetable\", (attribute, object) -> {\n            boolean targetable = object.getCitizen().data().get(NPC.TARGETABLE_METADATA, object.getCitizen().data().get(NPC.DEFAULT_PROTECTED_METADATA, true));\n            return new ElementTag(targetable);\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @mechanism NPCTag.teleport_on_stuck\n        \/\/ @description\n        \/\/ Returns whether the NPC teleports when it is stuck.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"teleport_on_stuck\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getDefaultParameters().stuckAction() == TeleportStuckAction.INSTANCE);\n        });\n\n        tagProcessor.registerTag(ElementTag.class, \"has_script\", (attribute, object) -> {\n            Deprecations.hasScriptTags.warn(attribute.context);\n            NPC citizen = object.getCitizen();\n            return new ElementTag(citizen.hasTrait(AssignmentTrait.class));\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ScriptTag\n        \/\/ @deprecated Use 'NPCTag.scripts' (plural) instead.\n        \/\/ @description\n        \/\/ Deprecated variant of <@link tag NPCTag.scripts>.\n        \/\/ -->\n        tagProcessor.registerTag(ScriptTag.class, \"script\", (attribute, object) -> {\n            Deprecations.npcScriptSingle.warn(attribute.context);\n            NPC citizen = object.getCitizen();\n            if (!citizen.hasTrait(AssignmentTrait.class)) {\n                return null;\n            }\n            else {\n                for (AssignmentScriptContainer container : citizen.getOrAddTrait(AssignmentTrait.class).containerCache) {\n                    if (container != null) {\n                        return new ScriptTag(container);\n                    }\n                }\n                return null;\n            }\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag(ScriptTag)\n        \/\/ @description\n        \/\/ Returns a list of all assignment scripts on the NPC. Returns null if none.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"scripts\", (attribute, object) -> {\n            NPC citizen = object.getCitizen();\n            if (!citizen.hasTrait(AssignmentTrait.class)) {\n                return null;\n            }\n            else {\n                ListTag result = new ListTag();\n                for (AssignmentScriptContainer container : citizen.getOrAddTrait(AssignmentTrait.class).containerCache) {\n                    if (container != null) {\n                       result.addObject(new ScriptTag(container));\n                    }\n                }\n                return result;\n            }\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.distance_margin\n        \/\/ @description\n        \/\/ Returns the NPC's current pathfinding distance margin. That is, how close it needs to get to its destination (in block-lengths).\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"distance_margin\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getDefaultParameters().distanceMargin());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.path_distance_margin\n        \/\/ @description\n        \/\/ Returns the NPC's current pathfinding distance margin. That is, how close it needs to get to individual points along its path.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"path_distance_margin\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getDefaultParameters().pathDistanceMargin());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is currently navigating.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_navigating\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().isNavigating());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.speed\n        \/\/ @description\n        \/\/ Returns the current speed of the NPC.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"speed\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().speed());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.range\n        \/\/ @description\n        \/\/ Returns the NPC's current maximum pathfinding range.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"range\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().range());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @mechanism NPCTag.attack_range\n        \/\/ @description\n        \/\/ Returns the NPC's current navigator attack range limit.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"attack_range\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().attackRange());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the NPC's current navigator attack strategy.\n        \/\/ Not related to Sentinel combat.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"attack_strategy\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().attackStrategy().toString());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Returns the NPC's current movement speed modifier (a multiplier applied over their base speed).\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"speed_modifier\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().speedModifier());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Returns the NPC's base navigation speed.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"base_speed\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().baseSpeed());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC will avoid water.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"avoid_water\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getLocalParameters().avoidWater());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns LocationTag\n        \/\/ @description\n        \/\/ Returns the location the NPC is currently navigating towards (if any).\n        \/\/ -->\n        tagProcessor.registerTag(LocationTag.class, \"target_location\", (attribute, object) -> {\n            if (object.getNavigator().getTargetAsLocation() == null) {\n                return null;\n            }\n            return new LocationTag(object.getNavigator().getTargetAsLocation());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns LocationTag\n        \/\/ @mechanism NPCTag.navigator_look_at\n        \/\/ @description\n        \/\/ Returns the location the NPC will currently look at while moving, if any.\n        \/\/ -->\n        tagProcessor.registerTag(LocationTag.class, \"navigator_look_at\", (attribute, object) -> {\n            if (object.getNavigator().getLocalParameters().lookAtFunction() == null) {\n                return null;\n            }\n            Location res = object.getNavigator().getLocalParameters().lookAtFunction().apply(object.getNavigator());\n            if (res == null) {\n                return null;\n            }\n            return new LocationTag(res);\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Returns whether the NPC is currently targeting an entity for the Citizens internal punching pathfinder.\n        \/\/ Not compatible with Sentinel.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"is_fighting\", (attribute, object) -> {\n            return new ElementTag(object.getNavigator().getEntityTarget() != null && object.getNavigator().getEntityTarget().isAggressive());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the entity type of the NPC's current navigation target (if any).\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"target_type\", (attribute, object) -> {\n            if (object.getNavigator().getTargetType() == null) {\n                return null;\n            }\n            return new ElementTag(object.getNavigator().getTargetType().toString());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns EntityTag\n        \/\/ @description\n        \/\/ Returns the entity being targeted by the NPC's current navigation (if any).\n        \/\/ -->\n        tagProcessor.registerTag(EntityTag.class, \"target_entity\", (attribute, object) -> {\n            if (object.getNavigator().getEntityTarget() == null || object.getNavigator().getEntityTarget().getTarget() == null) {\n                return null;\n            }\n            return new EntityTag(object.getNavigator().getEntityTarget().getTarget());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the name of the registry this NPC came from.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"registry_name\", (attribute, object) -> {\n            return new ElementTag(object.getCitizen().getOwningRegistry().getName());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute ]>\n        \/\/ @returns ElementTag\n        \/\/ @description\n        \/\/ Returns the value of a Citizens NPC metadata key.\n        \/\/ -->\n        tagProcessor.registerTag(ElementTag.class, \"citizens_data\", (attribute, object) -> {\n            if (!attribute.hasParam()) {\n                return null;\n            }\n            Object val = object.getCitizen().data().get(attribute.getParam());\n            if (val == null) {\n                return null;\n            }\n            return new ElementTag(val.toString());\n        });\n\n        \/\/ <--[tag]\n        \/\/ @attribute \n        \/\/ @returns ListTag\n        \/\/ @description\n        \/\/ Returns a list of Citizens NPC metadata keys.\n        \/\/ -->\n        tagProcessor.registerTag(ListTag.class, \"citizens_data_keys\", (attribute, object) -> {\n            DataKey holder = new MemoryDataKey();\n            object.getCitizen().data().saveTo(holder);\n            ListTag result = new ListTag();\n            for (DataKey key : holder.getSubKeys()) {\n                result.addObject(new ElementTag(key.name(), true));\n            }\n            return result;\n        });\n\n        tagProcessor.registerTag(NPCTag.class, \"navigator\", (attribute, object) -> {\n            Deprecations.oldNPCNavigator.warn(attribute.context);\n            return object;\n        });\n    }\n\n    public static ObjectTagProcessor tagProcessor = new ObjectTagProcessor<>();\n\n    @Override\n    public ObjectTag getObjectAttribute(Attribute attribute) {\n        return tagProcessor.getObjectAttribute(this, attribute);\n    }\n\n    @Override\n    public ObjectTag getNextObjectTypeDown() {\n        if (getEntity() != null) {\n            return new EntityTag(this);\n        }\n        return new ElementTag(identify());\n    }\n\n    public void applyProperty(Mechanism mechanism) {\n        Debug.echoError(\"Cannot apply properties to an NPC!\");\n    }\n\n    @Override\n    public void adjust(Mechanism mechanism) {\n\n        \/\/ TODO: For all the mechanism tags, add the @Mechanism link!\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name set_assignment\n        \/\/ @input ScriptTag\n        \/\/ @description\n        \/\/ Sets the NPC's assignment script. Equivalent to 'clear_assignments' + 'add_assignment'.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"set_assignment\") && mechanism.requireObject(ScriptTag.class)) {\n            AssignmentTrait trait = getCitizen().getOrAddTrait(AssignmentTrait.class);\n            trait.clearAssignments(null);\n            trait.addAssignmentScript((AssignmentScriptContainer) mechanism.valueAsType(ScriptTag.class).getContainer(), null);\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name add_assignment\n        \/\/ @input ScriptTag\n        \/\/ @description\n        \/\/ Adds an assignment script to the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"add_assignment\") && mechanism.requireObject(ScriptTag.class)) {\n            getCitizen().getOrAddTrait(AssignmentTrait.class).addAssignmentScript((AssignmentScriptContainer) mechanism.valueAsType(ScriptTag.class).getContainer(), null);\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name remove_assignment\n        \/\/ @input ScriptTag\n        \/\/ @description\n        \/\/ Removes an assignment script from the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"remove_assignment\")) {\n            if (npc.hasTrait(AssignmentTrait.class)) {\n                if (mechanism.hasValue()) {\n                    AssignmentTrait trait = getCitizen().getOrAddTrait(AssignmentTrait.class);\n                    trait.removeAssignmentScript(mechanism.getValue().asString(), null);\n                    trait.checkAutoRemove();\n                }\n                else {\n                    Deprecations.assignmentRemove.warn(mechanism.context);\n                    getCitizen().getOrAddTrait(AssignmentTrait.class).clearAssignments(null);\n                    npc.removeTrait(AssignmentTrait.class);\n                }\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name clear_assignments\n        \/\/ @input None\n        \/\/ @description\n        \/\/ Removes all the NPC's assignment scripts.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"clear_assignments\")) {\n            if (npc.hasTrait(AssignmentTrait.class)) {\n                getCitizen().getOrAddTrait(AssignmentTrait.class).clearAssignments(null);\n                npc.removeTrait(AssignmentTrait.class);\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name hologram_lines\n        \/\/ @input ListTag\n        \/\/ @description\n        \/\/ Sets the NPC's hologram line list.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"hologram_lines\") && mechanism.requireObject(ListTag.class)) {\n            HologramTrait hologram = getCitizen().getOrAddTrait(HologramTrait.class);\n            hologram.clear();\n            for (String str : mechanism.valueAsType(ListTag.class)) {\n                hologram.addLine(str);\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name hologram_direction\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the NPC's hologram direction, as either BOTTOM_UP or TOP_DOWN.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"hologram_direction\") && mechanism.requireEnum(false, HologramTrait.HologramDirection.values())) {\n            HologramTrait hologram = getCitizen().getOrAddTrait(HologramTrait.class);\n            hologram.setDirection(HologramTrait.HologramDirection.valueOf(mechanism.getValue().asString().toUpperCase()));\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name hologram_line_height\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the NPC's hologram line height. Can be -1 to indicate a default value.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"hologram_line_height\") && mechanism.requireDouble()) {\n            HologramTrait hologram = getCitizen().getOrAddTrait(HologramTrait.class);\n            hologram.setLineHeight(mechanism.getValue().asDouble());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name set_nickname\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the NPC's nickname.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"set_nickname\")) {\n            getNicknameTrait().setNickname(mechanism.getValue().asString());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name remove_nickname\n        \/\/ @input None\n        \/\/ @description\n        \/\/ Removes the NPC's nickname.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"remove_nickname\")) {\n            getNicknameTrait().removeNickname();\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name set_entity_type\n        \/\/ @input EntityTag\n        \/\/ @description\n        \/\/ Sets the NPC's entity type.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"set_entity_type\") && mechanism.requireObject(EntityTag.class)) {\n            getCitizen().setBukkitEntityType(mechanism.valueAsType(EntityTag.class).getBukkitEntityType());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name name\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the name of the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"name\") || mechanism.matches(\"set_name\")) {\n            getCitizen().setName(mechanism.getValue().asString().length() > 64 ? mechanism.getValue().asString().substring(0, 64) : mechanism.getValue().asString());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name owner\n        \/\/ @input PlayerTag\n        \/\/ @description\n        \/\/ Sets the owner of the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"owner\")) {\n            if (PlayerTag.matches(mechanism.getValue().asString())) {\n                getCitizen().getOrAddTrait(Owner.class).setOwner(mechanism.valueAsType(PlayerTag.class).getPlayerEntity());\n            }\n            else {\n                getCitizen().getOrAddTrait(Owner.class).setOwner(mechanism.getValue().asString());\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name skin_blob\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the skin blob of an NPC, in the form of \"texture;signature;name\".\n        \/\/ Call with no value to clear the custom skin value.\n        \/\/ See also <@link language Player Entity Skins (Skin Blobs)>.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"skin_blob\")) {\n            if (!mechanism.hasValue()) {\n                if (getCitizen().hasTrait(SkinTrait.class)) {\n                    getCitizen().getOrAddTrait(SkinTrait.class).clearTexture();\n                    if (getCitizen().isSpawned()) {\n                        getCitizen().despawn(DespawnReason.PENDING_RESPAWN);\n                        getCitizen().spawn(getCitizen().getStoredLocation());\n                    }\n                }\n            }\n            else {\n                SkinTrait skinTrait = getCitizen().getOrAddTrait(SkinTrait.class);\n                String[] dat = mechanism.getValue().asString().split(\";\");\n                if (dat.length < 2) {\n                    Debug.echoError(\"Invalid skin_blob input. Must specify texture;signature;name in full.\");\n                    return;\n                }\n                skinTrait.setSkinPersistent(dat.length > 2 ? dat[2] : UUID.randomUUID().toString(), dat[1], dat[0]);\n                if (getCitizen().isSpawned() && getCitizen().getEntity() instanceof SkinnableEntity) {\n                    ((SkinnableEntity) getCitizen().getEntity()).getSkinTracker().notifySkinChange(true);\n                }\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name skin\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the skin of an NPC by name.\n        \/\/ Call with no value to clear the custom skin value.\n        \/\/ See also <@link language Player Entity Skins (Skin Blobs)>.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"skin\")) {\n            if (!mechanism.hasValue()) {\n                if (getCitizen().hasTrait(SkinTrait.class)) {\n                    getCitizen().getOrAddTrait(SkinTrait.class).clearTexture();\n                }\n            }\n            else {\n                SkinTrait skinTrait = getCitizen().getOrAddTrait(SkinTrait.class);\n                skinTrait.setSkinName(mechanism.getValue().asString());\n            }\n            if (getCitizen().isSpawned()) {\n                getCitizen().despawn(DespawnReason.PENDING_RESPAWN);\n                getCitizen().spawn(getCitizen().getStoredLocation());\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name auto_update_skin\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether the NPC will automatically update its skin based on the skin name used.\n        \/\/ If true, the NPC's skin will change when the relevant account owner changes their skin.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"auto_update_skin\") && mechanism.requireBoolean()) {\n            getCitizen().getOrAddTrait(SkinTrait.class).setShouldUpdateSkins(mechanism.getValue().asBoolean());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name item_type\n        \/\/ @input ItemTag\n        \/\/ @description\n        \/\/ Sets the item type of the item.\n        \/\/ -->\n        if (mechanism.matches(\"item_type\") && mechanism.requireObject(ItemTag.class)) {\n            ItemTag item = mechanism.valueAsType(ItemTag.class);\n            Material mat = item.getMaterial().getMaterial();\n            switch (getEntity().getType()) {\n                case DROPPED_ITEM:\n                    ((org.bukkit.entity.Item) getEntity()).getItemStack().setType(mat);\n                    break;\n                case ITEM_FRAME:\n                    ((ItemFrame) getEntity()).getItem().setType(mat);\n                    break;\n                case FALLING_BLOCK:\n                    getCitizen().data().setPersistent(NPC.ITEM_ID_METADATA, mat.name());\n                    getCitizen().data().setPersistent(NPC.ITEM_DATA_METADATA, 0);\n                    break;\n                default:\n                    Debug.echoError(\"NPC is the not an item type!\");\n                    break;\n            }\n            if (getCitizen().isSpawned()) {\n                getCitizen().despawn();\n                getCitizen().spawn(getCitizen().getStoredLocation());\n            }\n        }\n\n        if (mechanism.matches(\"spawn\")) {\n            Deprecations.npcSpawnMechanism.warn(mechanism.context);\n            if (mechanism.requireObject(\"Invalid LocationTag specified. Assuming last known NPC location.\", LocationTag.class)) {\n                getCitizen().spawn(mechanism.valueAsType(LocationTag.class));\n            }\n            else {\n                getCitizen().spawn(getCitizen().getStoredLocation());\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name range\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the maximum movement distance of the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"range\") && mechanism.requireFloat()) {\n            getCitizen().getNavigator().getDefaultParameters().range(mechanism.getValue().asFloat());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name attack_range\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the maximum attack distance of the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"attack_range\") && mechanism.requireFloat()) {\n            getCitizen().getNavigator().getDefaultParameters().attackRange(mechanism.getValue().asFloat());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name speed\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the movement speed of the NPC.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"speed\") && mechanism.requireFloat()) {\n            getCitizen().getNavigator().getDefaultParameters().speedModifier(mechanism.getValue().asFloat());\n        }\n\n        if (mechanism.matches(\"despawn\")) {\n            Deprecations.npcDespawnMech.warn(mechanism.context);\n            getCitizen().despawn(DespawnReason.PLUGIN);\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name set_sneaking\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether the NPC is sneaking or not. Only works for player-type NPCs.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"set_sneaking\") && mechanism.requireBoolean()) {\n            if (!getCitizen().hasTrait(SneakingTrait.class)) {\n                getCitizen().addTrait(SneakingTrait.class);\n            }\n            SneakingTrait trait = getCitizen().getOrAddTrait(SneakingTrait.class);\n            if (trait.isSneaking() && !mechanism.getValue().asBoolean()) {\n                trait.stand();\n            }\n            else if (!trait.isSneaking() && mechanism.getValue().asBoolean()) {\n                trait.sneak();\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name set_protected\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether or not the NPC is protected.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"set_protected\") && mechanism.requireBoolean()) {\n            getCitizen().setProtected(mechanism.getValue().asBoolean());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name lookclose\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets the NPC's lookclose value.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"lookclose\") && mechanism.requireBoolean()) {\n            getLookCloseTrait().lookClose(mechanism.getValue().asBoolean());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name controllable\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether the NPC is controllable.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"controllable\") && mechanism.requireBoolean()) {\n            getCitizen().getOrAddTrait(Controllable.class).setEnabled(mechanism.getValue().asBoolean());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name targetable\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether the NPC is targetable.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"targetable\") && mechanism.requireBoolean()) {\n            getCitizen().data().setPersistent(NPC.TARGETABLE_METADATA, mechanism.getValue().asBoolean());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name teleport_on_stuck\n        \/\/ @input ElementTag(Boolean)\n        \/\/ @description\n        \/\/ Sets whether the NPC teleports when it is stuck.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"teleport_on_stuck\") && mechanism.requireBoolean()) {\n            if (mechanism.getValue().asBoolean()) {\n                getNavigator().getDefaultParameters().stuckAction(TeleportStuckAction.INSTANCE);\n            }\n            else {\n                getNavigator().getDefaultParameters().stuckAction(null);\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name distance_margin\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the NPC's distance margin.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if ((mechanism.matches(\"distance_margin\") || mechanism.matches(\"set_distance\")) && mechanism.requireDouble()) {\n            getNavigator().getDefaultParameters().distanceMargin(mechanism.getValue().asDouble());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name path_distance_margin\n        \/\/ @input ElementTag(Decimal)\n        \/\/ @description\n        \/\/ Sets the NPC's path distance margin.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"path_distance_margin\") && mechanism.requireDouble()) {\n            getNavigator().getDefaultParameters().pathDistanceMargin(mechanism.getValue().asDouble());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name navigator_look_at\n        \/\/ @input LocationTag\n        \/\/ @description\n        \/\/ Sets the location the NPC will currently look at while moving.\n        \/\/ Give no value to let the NPC automatically look where it's going.\n        \/\/ Should be set after the NPC has started moving.\n        \/\/ @tags\n        \/\/ \n        \/\/ -->\n        if (mechanism.matches(\"navigator_look_at\")) {\n            if (mechanism.hasValue() && mechanism.requireObject(LocationTag.class)) {\n                final LocationTag loc = mechanism.valueAsType(LocationTag.class);\n                getNavigator().getLocalParameters().lookAtFunction((n) -> loc);\n            }\n            else {\n                getNavigator().getLocalParameters().lookAtFunction(null);\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name name_visible\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets whether the NPC's nameplate is visible. Input is 'true' (always visible), 'false' (never visible), or 'hover' (only visible while looking at the NPC).\n        \/\/ @tags\n        \/\/ TODO\n        \/\/ -->\n        if (mechanism.matches(\"name_visible\")) {\n            getCitizen().data().setPersistent(NPC.NAMEPLATE_VISIBLE_METADATA, mechanism.getValue().asString());\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name glow_color\n        \/\/ @input ElementTag\n        \/\/ @description\n        \/\/ Sets the color the NPC will glow with, when it's glowing. Input must be from <@link url https:\/\/hub.spigotmc.org\/javadocs\/spigot\/org\/bukkit\/ChatColor.html>.\n        \/\/ @tags\n        \/\/ TODO\n        \/\/ -->\n        if (mechanism.matches(\"glow_color\") && mechanism.requireEnum(false, ChatColor.values())) {\n            getCitizen().getOrAddTrait(ScoreboardTrait.class).setColor(ChatColor.valueOf(mechanism.getValue().asString().toUpperCase()));\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name clear_waypoints\n        \/\/ @input None\n        \/\/ @description\n        \/\/ Clears all waypoint locations in the NPC's path.\n        \/\/ @tags\n        \/\/ TODO\n        \/\/ -->\n        if (mechanism.matches(\"clear_waypoints\")) {\n            Waypoints wp = getCitizen().getOrAddTrait(Waypoints.class);\n            if ((wp.getCurrentProvider() instanceof WaypointProvider.EnumerableWaypointProvider)) {\n                ((List) ((WaypointProvider.EnumerableWaypointProvider) wp.getCurrentProvider()).waypoints()).clear();\n            }\n            else if ((wp.getCurrentProvider() instanceof WanderWaypointProvider)) {\n                List locs = ((WanderWaypointProvider) wp.getCurrentProvider()).getRegionCentres();\n                for (Location loc : locs) {\n                    locs.remove(loc); \/\/ Manual clear to ensure recalculation for the forwarding list\n                }\n\n            }\n        }\n\n        \/\/ <--[mechanism]\n        \/\/ @object NPCTag\n        \/\/ @name add_waypoint\n        \/\/ @input LocationTag\n        \/\/ @description\n        \/\/ Add a waypoint location to the NPC's path.\n        \/\/ @tags\n        \/\/ TODO\n        \/\/ -->\n        if (mechanism.matches(\"add_waypoint\") && mechanism.requireObject(LocationTag.class)) {\n            Location target = mechanism.valueAsType(LocationTag.class).clone();\n            Waypoints wp = getCitizen().getOrAddTrait(Waypoints.class);\n            if ((wp.getCurrentProvider() instanceof LinearWaypointProvider)) {\n                ((LinearWaypointProvider) wp.getCurrentProvider()).addWaypoint(new Waypoint(target));\n            }\n            else if ((wp.getCurrentProvider() instanceof WaypointProvider.EnumerableWaypointProvider)) {\n                ((List) ((WaypointProvider.EnumerableWaypointProvider) wp.getCurrentProvider()).waypoints()).add(new Waypoint(target));\n            }\n            else if ((wp.getCurrentProvider() instanceof WanderWaypointProvider)) {\n                ((WanderWaypointProvider) wp.getCurrentProvider()).getRegionCentres().add(target);\n            }\n        }\n\n        CoreUtilities.autoPropertyMechanism(this, mechanism);\n\n        \/\/ Pass along to EntityTag mechanism handler if not already handled.\n        if (!mechanism.fulfilled()) {\n            if (isSpawned()) {\n                new EntityTag(getEntity()).adjust(mechanism);\n            }\n        }\n    }\n\n    @Override\n    public boolean advancedMatches(String matcher) {\n        return isSpawned() && BukkitScriptEvent.tryEntity(getDenizenEntity(), matcher);\n    }\n}\n","avg_line_length":37.9452201933,"max_line_length":171,"alphanum_fraction":0.5710363178}
{"size":168,"ext":"java","lang":"Java","max_stars_count":null,"content":"package Recursion.Q4;\r\n\r\npublic class TestRecursionQ4 {\r\n\tpublic static void main(String[] args) {\r\n\t\tMyFrameRecursionQ4 mf2 = new MyFrameRecursionQ4(\"First\");\r\n\t}\r\n}\r\n","avg_line_length":21.0,"max_line_length":60,"alphanum_fraction":0.7202380952}
{"size":2658,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2014 Click Travel Ltd\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *   http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n *\/\npackage com.clicktravel.cheddar.event;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.mockito.Mockito.doReturn;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport org.junit.Test;\nimport org.mockito.ArgumentCaptor;\n\nimport com.clicktravel.cheddar.infrastructure.messaging.TypedMessage;\nimport com.clicktravel.common.random.Randoms;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport com.fasterxml.jackson.databind.node.ObjectNode;\n\npublic class EventMessageHandlerTest {\n\n    @Test\n    public void shouldHandleDomainEvent_withMessage() throws Exception {\n        \/\/ Given\n        final String testValue = Randoms.randomString(5);\n        final ObjectMapper mapper = new ObjectMapper();\n        final ObjectNode rootNode = mapper.createObjectNode();\n        rootNode.put(\"testValue\", testValue);\n        final String serializedEvent = mapper.writeValueAsString(rootNode);\n\n        final TypedMessage message = mock(TypedMessage.class);\n        final String eventType = Randoms.randomString(5);\n        when(message.getType()).thenReturn(eventType);\n        when(message.getPayload()).thenReturn(serializedEvent);\n\n        final TestConcreteEventHandler mockDomainEventHandler = mock(TestConcreteEventHandler.class);\n\n        final TestConcreteEventHandler concreteDomainEventHandler = new TestConcreteEventHandler();\n        doReturn(concreteDomainEventHandler.getEventClass()).when(mockDomainEventHandler).getEventClass();\n\n        final EventMessageHandler eventMessageHandler = new EventMessageHandler<>();\n        eventMessageHandler.registerEventHandler(eventType, mockDomainEventHandler);\n\n        \/\/ When\n        eventMessageHandler.handle(message);\n\n        \/\/ Then\n        final ArgumentCaptor domainEventCaptor = ArgumentCaptor.forClass(TestConcreteEvent.class);\n        verify(mockDomainEventHandler).handle(domainEventCaptor.capture());\n        assertEquals(\"testType\", domainEventCaptor.getValue().type());\n    }\n\n}\n","avg_line_length":39.671641791,"max_line_length":117,"alphanum_fraction":0.7562076749}
{"size":7746,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n * \n * Licensed under the Apache License, Version 2.0 (the \"License\"). You may not use this file except in compliance with\n * the License. A copy of the License is located at\n * \n * http:\/\/aws.amazon.com\/apache2.0\n * \n * or in the \"license\" file accompanying this file. This file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR\n * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions\n * and limitations under the License.\n *\/\npackage com.amazonaws.services.sqs.model.transform;\n\nimport java.util.Map;\nimport javax.annotation.Generated;\n\nimport com.amazonaws.SdkClientException;\nimport com.amazonaws.Request;\nimport com.amazonaws.DefaultRequest;\nimport com.amazonaws.http.HttpMethodName;\nimport com.amazonaws.services.sqs.model.*;\nimport com.amazonaws.transform.Marshaller;\nimport com.amazonaws.util.StringUtils;\n\n\/**\n * SendMessageBatchRequest Marshaller\n *\/\n\n@Generated(\"com.amazonaws:aws-java-sdk-code-generator\")\npublic class SendMessageBatchRequestMarshaller implements Marshaller, SendMessageBatchRequest> {\n\n    public Request marshall(SendMessageBatchRequest sendMessageBatchRequest) {\n\n        if (sendMessageBatchRequest == null) {\n            throw new SdkClientException(\"Invalid argument passed to marshall(...)\");\n        }\n\n        Request request = new DefaultRequest(sendMessageBatchRequest, \"AmazonSQS\");\n        request.addParameter(\"Action\", \"SendMessageBatch\");\n        request.addParameter(\"Version\", \"2012-11-05\");\n        request.setHttpMethod(HttpMethodName.POST);\n\n        if (sendMessageBatchRequest.getQueueUrl() != null) {\n            request.addParameter(\"QueueUrl\", StringUtils.fromString(sendMessageBatchRequest.getQueueUrl()));\n        }\n\n        com.amazonaws.internal.SdkInternalList entriesList = (com.amazonaws.internal.SdkInternalList) sendMessageBatchRequest\n                .getEntries();\n        if (!entriesList.isEmpty() || !entriesList.isAutoConstruct()) {\n            int entriesListIndex = 1;\n\n            for (SendMessageBatchRequestEntry entriesListValue : entriesList) {\n\n                if (entriesListValue.getId() != null) {\n                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".Id\", StringUtils.fromString(entriesListValue.getId()));\n                }\n\n                if (entriesListValue.getMessageBody() != null) {\n                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageBody\",\n                            StringUtils.fromString(entriesListValue.getMessageBody()));\n                }\n\n                if (entriesListValue.getDelaySeconds() != null) {\n                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".DelaySeconds\",\n                            StringUtils.fromInteger(entriesListValue.getDelaySeconds()));\n                }\n\n                java.util.Map messageAttributes = entriesListValue.getMessageAttributes();\n                int messageAttributesListIndex = 1;\n                for (Map.Entry entry : messageAttributes.entrySet()) {\n                    if (entry.getKey() != null) {\n                        request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex + \".Name\",\n                                StringUtils.fromString(entry.getKey()));\n                    }\n                    if (entry.getValue() != null) {\n\n                        if (entry.getValue().getStringValue() != null) {\n                            request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex\n                                    + \".Value.StringValue\", StringUtils.fromString(entry.getValue().getStringValue()));\n                        }\n\n                        if (entry.getValue().getBinaryValue() != null) {\n                            request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex\n                                    + \".Value.BinaryValue\", StringUtils.fromByteBuffer(entry.getValue().getBinaryValue()));\n                        }\n\n                        com.amazonaws.internal.SdkInternalList stringListValuesList = (com.amazonaws.internal.SdkInternalList) entry.getValue()\n                                .getStringListValues();\n                        if (!stringListValuesList.isEmpty() || !stringListValuesList.isAutoConstruct()) {\n                            int stringListValuesListIndex = 1;\n\n                            for (String stringListValuesListValue : stringListValuesList) {\n                                if (stringListValuesListValue != null) {\n                                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex\n                                            + \".Value.StringListValue.\" + stringListValuesListIndex, StringUtils.fromString(stringListValuesListValue));\n                                }\n                                stringListValuesListIndex++;\n                            }\n                        }\n\n                        com.amazonaws.internal.SdkInternalList binaryListValuesList = (com.amazonaws.internal.SdkInternalList) entry\n                                .getValue().getBinaryListValues();\n                        if (!binaryListValuesList.isEmpty() || !binaryListValuesList.isAutoConstruct()) {\n                            int binaryListValuesListIndex = 1;\n\n                            for (java.nio.ByteBuffer binaryListValuesListValue : binaryListValuesList) {\n                                if (binaryListValuesListValue != null) {\n                                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex\n                                            + \".Value.BinaryListValue.\" + binaryListValuesListIndex, StringUtils.fromByteBuffer(binaryListValuesListValue));\n                                }\n                                binaryListValuesListIndex++;\n                            }\n                        }\n\n                        if (entry.getValue().getDataType() != null) {\n                            request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageAttribute.\" + messageAttributesListIndex\n                                    + \".Value.DataType\", StringUtils.fromString(entry.getValue().getDataType()));\n                        }\n                    }\n                    messageAttributesListIndex++;\n                }\n\n                if (entriesListValue.getMessageDeduplicationId() != null) {\n                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageDeduplicationId\",\n                            StringUtils.fromString(entriesListValue.getMessageDeduplicationId()));\n                }\n\n                if (entriesListValue.getMessageGroupId() != null) {\n                    request.addParameter(\"SendMessageBatchRequestEntry.\" + entriesListIndex + \".MessageGroupId\",\n                            StringUtils.fromString(entriesListValue.getMessageGroupId()));\n                }\n                entriesListIndex++;\n            }\n        }\n\n        return request;\n    }\n\n}\n","avg_line_length":54.9361702128,"max_line_length":185,"alphanum_fraction":0.6080557707}
{"size":1003,"ext":"java","lang":"Java","max_stars_count":383.0,"content":"package org.gitlab.api.models;\n\nimport com.fasterxml.jackson.annotation.JsonProperty;\n\npublic class GitlabTag {\n\n    public final static String URL = \"\/repository\/tags\";\n\n    @JsonProperty(\"commit\")\n    private GitlabCommit commit;\n\n    @JsonProperty(\"release\")\n    private GitlabRelease release;\n\n    @JsonProperty(\"name\")\n    private String name;\n\n    @JsonProperty(\"message\")\n    private String message;\n\n    public GitlabCommit getCommit() {\n        return commit;\n    }\n\n    public void setCommit(GitlabCommit commit) {\n        this.commit = commit;\n    }\n\n    public GitlabRelease getRelease() {\n        return release;\n    }\n\n    public void setRelease(GitlabRelease release) {\n        this.release = release;\n    }\n\n    public String getName() {\n        return name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    public String getMessage() {\n        return message;\n    }\n\n    public void setMessage(String message) {\n        this.message = message;\n    }\n}\n","avg_line_length":18.9245283019,"max_line_length":56,"alphanum_fraction":0.6370887338}
{"size":3550,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage someone_else;\n\nimport data.Dataset;\nimport data.LinearScaleParam;\nimport data.SVMFileReader;\nimport data.SoftScaleParam;\nimport org.junit.Test;\n\nimport java.io.*;\n\n\/**\n * Created by edwardlol on 2017\/4\/20.\n *\/\npublic final class ScaleDemos {\n    \/\/~ Methods ----------------------------------------------------------------\n\n    \/**\n     * Test the scale boundary.\n     *\/\n    @Test\n    public void linearScaleBoundaryTest() {\n        SVMFileReader reader = SVMFileReader.getInstance();\n        Dataset data = reader.read(\".\/datasets\/train\");\n        data.linearScale();\n        data.record(\".\/results\/default_linear_scale\");\n        data.linearScale(0, 1);\n        data.record(\".\/results\/customed_linear_scale\");\n    }\n\n    \/**\n     * Test the serializability of LinearScaleParam.\n     * The two output files should have same content.\n     *\/\n    @Test\n    public void linearScaleSerializableTest() {\n        SVMFileReader reader = SVMFileReader.getInstance();\n\n        Dataset data1 = reader.read(\".\/datasets\/train\");\n        Dataset data2 = reader.read(\".\/datasets\/train\");\n\n        LinearScaleParam param1 = data1.linearScale();\n        data1.record(\".\/results\/linear_scale_out\");\n        try {\n            ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(\"results\/linear_scale_param\"));\n            oos.writeObject(param1);\n            oos.close();\n\n            ObjectInputStream ois = new ObjectInputStream(new FileInputStream(\"results\/linear_scale_param\"));\n            LinearScaleParam param2 = (LinearScaleParam) ois.readObject();\n            ois.close();\n\n            data2.linearScaleFrom(param2);\n            data2.record(\".\/results\/linear_scale_in\");\n        } catch (IOException | ClassNotFoundException e) {\n            e.printStackTrace();\n        }\n    }\n\n    @Test\n    public void softScaleTest() {\n        SVMFileReader reader = SVMFileReader.getInstance();\n        Dataset data = reader.read(\".\/datasets\/train\");\n        data.softScale();\n        data.record(\".\/results\/soft_scale\");\n    }\n\n    @Test\n    public void softScaleSerializableTest() {\n        SVMFileReader reader = SVMFileReader.getInstance();\n\n        Dataset data1 = reader.read(\".\/datasets\/train\");\n        Dataset data2 = reader.read(\".\/datasets\/train\");\n\n        SoftScaleParam param1 = data1.softScale();\n        data1.record(\".\/results\/soft_scale_out\");\n        try {\n            ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(\"results\/soft_scale_param\"));\n            oos.writeObject(param1);\n            oos.close();\n\n            ObjectInputStream ois = new ObjectInputStream(new FileInputStream(\"results\/soft_scale_param\"));\n            SoftScaleParam param2 = (SoftScaleParam) ois.readObject();\n            ois.close();\n\n            data2.softScaleFrom(param2);\n            data2.record(\".\/results\/soft_scale_in\");\n        } catch (IOException | ClassNotFoundException e) {\n            e.printStackTrace();\n        }\n    }\n}\n\n\/\/ End ScaleDemos.java\n","avg_line_length":33.1775700935,"max_line_length":112,"alphanum_fraction":0.6402816901}
{"size":1348,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage org.apache.shardingsphere.underlying.rewrite.sql.impl;\n\nimport org.apache.shardingsphere.underlying.rewrite.context.SQLRewriteContext;\nimport org.apache.shardingsphere.underlying.rewrite.sql.token.pojo.SQLToken;\n\n\/**\n * Default SQL builder.\n *\/\npublic final class DefaultSQLBuilder extends AbstractSQLBuilder {\n    \n    public DefaultSQLBuilder(final SQLRewriteContext context) {\n        super(context);\n    }\n    \n    @Override\n    protected String getSQLTokenText(final SQLToken sqlToken) {\n        return sqlToken.toString();\n    }\n}\n","avg_line_length":36.4324324324,"max_line_length":78,"alphanum_fraction":0.7544510386}
{"size":1591,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.wanghaocun.cache.service;\n\nimport com.wanghaocun.cache.domain.Comment;\nimport com.wanghaocun.cache.repository.CommentRepository;\nimport org.jetbrains.annotations.NotNull;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.cache.annotation.CacheEvict;\nimport org.springframework.cache.annotation.CachePut;\nimport org.springframework.cache.annotation.Cacheable;\nimport org.springframework.stereotype.Service;\n\nimport java.util.Optional;\n\n\/**\n * @author wanghc\n * @since 2020-11-30\n **\/\n@Service\npublic class CommentService {\n\n    @Autowired\n    CommentRepository commentRepository;\n\n    @Cacheable(cacheNames = \"comment\", unless = \"#result==null\")\n    public Comment findCommentById(Integer id) {\n        Optional comment = commentRepository.findById(id);\n        return comment.orElse(null);\n    }\n\n    @CachePut(cacheNames = \"comment\", key = \"#result.id\")\n    public Comment updateComment(@NotNull Comment comment) {\n        boolean exists = commentRepository.existsById(comment.getId());\n        if (!exists) {\n            return null;\n        }\n        commentRepository.updateComment(comment.getAuthor(), comment.getId());\n        Optional commentOptional = commentRepository.findById(comment.getId());\n\n        return commentOptional.orElse(null);\n    }\n\n    @CacheEvict(cacheNames = \"comment\")\n    public void deleteComment(int commentId) {\n        boolean exists = commentRepository.existsById(commentId);\n        if (!exists) {\n            return;\n        }\n        commentRepository.deleteById(commentId);\n    }\n}\n","avg_line_length":31.1960784314,"max_line_length":88,"alphanum_fraction":0.7159019485}
{"size":4257,"ext":"java","lang":"Java","max_stars_count":null,"content":"package dev.anyroad.threadlocal;\n\nimport com.alibaba.ttl.TransmittableThreadLocal;\nimport com.alibaba.ttl.threadpool.TtlExecutors;\nimport lombok.extern.slf4j.Slf4j;\nimport org.junit.jupiter.api.DisplayName;\nimport org.junit.jupiter.api.Test;\n\nimport java.util.concurrent.*;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.fail;\n\n@Slf4j\npublic class TransmittableThreadLocalTest {\n    @Test\n    @DisplayName(\"Test basic TransmittableThreadLocal behavior - get value in the child thread\")\n    public void transmittableThreadLocalBasic() throws InterruptedException {\n        ThreadLocal threadLocal = new TransmittableThreadLocal<>();\n\n        String mainThreadData = \"main thread\";\n        threadLocal.set(mainThreadData);\n\n        ThreadLocalData dataFromChildThread = new ThreadLocalData(\"original data\");\n\n        Thread childThread = new Thread(() -> dataFromChildThread.setData(threadLocal.get()));\n\n        childThread.start();\n        childThread.join();\n\n        assertEquals(mainThreadData, dataFromChildThread.getData());\n    }\n\n    @Test\n    @DisplayName(\"Modify value in parent ThreadLocal after starting child thread\")\n    public void inheritableThreadChangeInParentThread() throws InterruptedException {\n        ThreadLocal threadLocal = new TransmittableThreadLocal<>();\n\n        String mainThreadData = \"main thread\";\n        threadLocal.set(mainThreadData);\n\n        ThreadLocalData dataFromChildThread = new ThreadLocalData(\"original data\");\n\n        CountDownLatch threadStartedLatch = new CountDownLatch(1);\n        CountDownLatch valueChangedLatch = new CountDownLatch(1);\n\n        Thread childThread = new Thread(() -> {\n            threadStartedLatch.countDown();\n            try {\n                valueChangedLatch.await();\n            } catch (InterruptedException e) {\n                fail(\"Exception during latch await: \" + e);\n            }\n            dataFromChildThread.setData(threadLocal.get());\n\n        });\n\n        childThread.start();\n\n        threadStartedLatch.await();\n\n        threadLocal.set(\"main new thread\");\n        valueChangedLatch.countDown();\n\n        childThread.join();\n\n        assertEquals(mainThreadData, dataFromChildThread.getData());\n    }\n\n    @Test\n    @DisplayName(\"Thread created in Thread Pool inherit value\")\n    public void transmittableThreadLocalWithThreadPool() throws InterruptedException {\n        ThreadLocal threadLocal = new TransmittableThreadLocal<>();\n\n        String mainThreadData = \"main thread\";\n        threadLocal.set(mainThreadData);\n\n        ThreadLocalData dataFromChildThread = new ThreadLocalData(\"original data\");\n\n        ExecutorService executorService = TtlExecutors.getTtlExecutorService(Executors.newSingleThreadExecutor());\n        executorService.submit(() -> dataFromChildThread.setData(threadLocal.get()));\n        executorService.shutdown();\n        executorService.awaitTermination(1, TimeUnit.SECONDS);\n\n        assertEquals(mainThreadData, dataFromChildThread.getData());\n    }\n\n    @Test\n    @DisplayName(\"Second runnable submitted to Thread Pool gets updated value\")\n    public void transmittableThreadLocalWithThreadPoolSecondRunnable() throws InterruptedException, ExecutionException {\n        ThreadLocal threadLocal = new TransmittableThreadLocal<>();\n\n        String mainOriginalThreadData = \"main thread\";\n        threadLocal.set(mainOriginalThreadData);\n\n        ThreadLocalData dataFromChildThread = new ThreadLocalData(\"original data\");\n\n        ExecutorService executorService = TtlExecutors.getTtlExecutorService(Executors.newSingleThreadExecutor());\n\n        Future future = executorService.submit(() -> {\n            dataFromChildThread.setData(threadLocal.get());\n        });\n\n        future.get();\n\n        assertEquals(mainOriginalThreadData, dataFromChildThread.getData());\n\n        String mainNewThreadData = \"main new thread\";\n        threadLocal.set(mainNewThreadData);\n\n        executorService.submit(() -> dataFromChildThread.setData(threadLocal.get()));\n\n        executorService.shutdown();\n        executorService.awaitTermination(1, TimeUnit.SECONDS);\n\n        assertEquals(mainNewThreadData, dataFromChildThread.getData());\n    }\n\n}\n","avg_line_length":35.475,"max_line_length":120,"alphanum_fraction":0.7105943152}
{"size":32354,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*******************************************************************************\n * Copyright (c) Contributors to the Eclipse Foundation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0 \n *******************************************************************************\/\n\npackage org.osgi.test.cases.remoteserviceadmin.junit;\n\nimport static org.osgi.framework.Constants.*;\nimport static org.osgi.framework.Constants.SERVICE_EXPORTED_CONFIGS;\nimport static org.osgi.framework.Constants.SERVICE_IMPORTED_CONFIGS;\nimport static org.osgi.framework.Constants.SERVICE_INTENTS;\nimport static org.osgi.service.remoteserviceadmin.RemoteConstants.*;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TreeMap;\n\nimport org.osgi.framework.Bundle;\nimport org.osgi.framework.BundleContext;\nimport org.osgi.framework.ServiceReference;\nimport org.osgi.framework.Version;\nimport org.osgi.service.remoteserviceadmin.EndpointDescription;\nimport org.osgi.test.support.MockFactory;\n\nimport junit.framework.TestCase;\n\npublic class EndpointDescriptionTests extends TestCase {\n\n\tpublic void testFromMap() {\n\t\tEndpointDescription ed;\n\t\tMap props;\n\n\t\tprops = new HashMap();\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tString testUUID = \"testUUID\";\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, someId);\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"wrong remote id\", someId, ed.getId());\n\n\t\tassertEquals(\"remote service id should be zero\", 0l, ed.getServiceId());\n\t\tassertNull(\"remote framework uuid should be null\", ed\n\t\t\t\t.getFrameworkUUID());\n\n\t\tprops.put(ENDPOINT_FRAMEWORK_UUID, testUUID);\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"wrong uuid\", testUUID, ed.getFrameworkUUID());\n\n\t\tprops.put(ENDPOINT_SERVICE_ID, \"not a valid long\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid endpoint.id property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(ENDPOINT_SERVICE_ID, new Object());\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid endpoint.id property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tLong someServiceId = Long.valueOf(12l);\n\t\tprops.put(ENDPOINT_SERVICE_ID, someServiceId);\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"wrong id\", someServiceId.longValue(), ed.getServiceId());\n\n\t\tprops.put(OBJECTCLASS, \"not a String[]\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid objectClass property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(OBJECTCLASS, new String[] {});\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid objectClass property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tString[] objectClass = new String[] {\"com.acme.Foo\", \"com.acme.FOO\"};\n\t\tprops.put(OBJECTCLASS, objectClass);\n\t\ted = newEndpointDescription(props);\n\t\ttestMutability(ed);\n\t\tList interfs = ed.getInterfaces();\n\t\tassertEquals(\"should have 2 interfaces\", 2, interfs.size());\n\t\tassertEquals(\"first interface wrong\", objectClass[0], interfs.get(0));\n\t\tassertEquals(\"second interface wrong\", objectClass[1], interfs.get(1));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[0])));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[1])));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(\"xxx\"));\n\n\t\tprops.put(ENDPOINT_PACKAGE_VERSION_ + getPackageName(objectClass[0]),\n\t\t\t\t\"bad version\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid package version property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tVersion someVersion = new Version(1, 2, 3, \"somequalifier\");\n\t\tprops.put(ENDPOINT_PACKAGE_VERSION_ + getPackageName(objectClass[0]),\n\t\t\t\tsomeVersion.toString());\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"package version wrong\", someVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[0])));\n\n\t\tprops\n\t\t\t\t.remove(ENDPOINT_PACKAGE_VERSION_\n\t\t\t\t\t\t+ getPackageName(objectClass[0]));\n\t\tprops.put(ENDPOINT_PACKAGE_VERSION_ + getPackageName(objectClass[1]),\n\t\t\t\t\"bad version\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"invalid package version property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(ENDPOINT_PACKAGE_VERSION_ + getPackageName(objectClass[1]),\n\t\t\t\tsomeVersion.toString());\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"package version wrong\", someVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[1])));\n\n\t}\n\n\tpublic void testBadMap() {\n\t\t@SuppressWarnings(\"unused\")\n\t\tEndpointDescription ed;\n\t\tMap props = new HashMap();\n\t\tString testUUID = \"testUUID\";\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, someId);\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\n\t\tprops.put(\"foo\", \"bar\");\n\t\tprops.put(\"Foo\", \"bar\");\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"duplicate keys\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"duplicate keys\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.remove(\"Foo\");\n\t\tprops.put(\"foo\", \"bar\");\n\t\t@SuppressWarnings({\n\t\t\t\t\"rawtypes\", \"unchecked\"\n\t\t})\n\t\tMap bad = (Map) props;\n\t\tbad.put(this, \"bar\");\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"non string key\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"non string key\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t}\n\n\tpublic void testFromReference() {\n\t\tEndpointDescription ed;\n\t\tMap props = new HashMap();\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, null);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, someId);\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, null);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, null);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"missing required properties\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\ted = newEndpointDescription(ref, null);\n\t\tassertEquals(\"wrong remote id\", someId, ed.getId());\n\n\t\tassertEquals(\"remote service id should be zero\", 0l, ed.getServiceId());\n\t\tassertEquals(\"wrong uuid\", testUUID, ed.getFrameworkUUID());\n\n\t\tprops.put(ENDPOINT_FRAMEWORK_UUID, \"newUUID\");\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\ted = newEndpointDescription(props);\n\t\tassertEquals(\"wrong uuid\", \"newUUID\", ed.getFrameworkUUID());\n\t\tprops.remove(ENDPOINT_FRAMEWORK_UUID);\n\n\t\tprops.put(ENDPOINT_SERVICE_ID, \"not a valid long\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"invalid endpoint.id property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.put(ENDPOINT_SERVICE_ID, new Object());\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"invalid endpoint.id property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tLong someServiceId = Long.valueOf(12l);\n\t\tprops.remove(ENDPOINT_SERVICE_ID);\n\t\tserviceProps.put(SERVICE_ID, someServiceId);\n\t\ted = newEndpointDescription(ref, props);\n\t\tassertEquals(\"wrong id\", someServiceId.longValue(), ed.getServiceId());\n\n\t\tString[] objectClass = new String[] {\"com.acme.Foo\", \"com.acme.FOO\"};\n\t\tprops.put(OBJECTCLASS, objectClass);\n\t\tserviceProps.put(OBJECTCLASS, \"not a String[]\");\n\t\ted = newEndpointDescription(ref, props);\n\n\t\tserviceProps.put(\"OBJECTCLASS\", objectClass);\n\t\tprops.put(OBJECTCLASS, \"not a String[]\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"invalid objectClass property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tprops.remove(OBJECTCLASS);\n\t\ted = newEndpointDescription(ref, props);\n\t\ttestMutability(ed);\n\t\tList interfs = ed.getInterfaces();\n\t\tassertEquals(\"should have 2 interfaces\", 2, interfs.size());\n\t\tassertEquals(\"first interface wrong\", objectClass[0], interfs.get(0));\n\t\tassertEquals(\"second interface wrong\", objectClass[1], interfs.get(1));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[0])));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[1])));\n\t\tassertEquals(\"package version wrong\", Version.emptyVersion, ed\n\t\t\t\t.getPackageVersion(\"xxx\"));\n\n\t\tserviceProps.put(ENDPOINT_PACKAGE_VERSION_\n\t\t\t\t+ getPackageName(objectClass[0]), \"bad version\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"invalid package version property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tVersion someVersion = new Version(1, 2, 3, \"somequalifier\");\n\t\tserviceProps.put(ENDPOINT_PACKAGE_VERSION_\n\t\t\t\t+ getPackageName(objectClass[0]), someVersion.toString());\n\t\ted = newEndpointDescription(ref, props);\n\t\tassertEquals(\"package version wrong\", someVersion, ed\n\t\t\t\t.getPackageVersion(getPackageName(objectClass[0])));\n\n\t\tserviceProps.put(ENDPOINT_PACKAGE_VERSION_\n\t\t\t\t+ getPackageName(objectClass[1]), \"bad version\");\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, props);\n\t\t\tfail(\"invalid package version property\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t}\n\n\tpublic void testIntents() {\n\t\tEndpointDescription ed;\n\t\tMap props = new HashMap();\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, someId);\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tList intents;\n\n\t\ted = newEndpointDescription(props);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertTrue(\"intents not empty\", intents.isEmpty());\n\t\ttestListMutability(intents);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertTrue(\"intents not empty\", intents.isEmpty());\n\t\ttestListMutability(intents);\n\n\t\tprops.put(SERVICE_INTENTS, this);\n\t\tserviceProps.put(SERVICE_INTENTS, this);\n\n\t\ted = newEndpointDescription(props);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertTrue(\"intents not empty\", intents.isEmpty());\n\t\ttestListMutability(intents);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertTrue(\"intents not empty\", intents.isEmpty());\n\t\ttestListMutability(intents);\n\n\t\tString scalarIntent = \"some.intent\";\n\t\tprops.put(SERVICE_INTENTS, scalarIntent);\n\t\tserviceProps.put(SERVICE_INTENTS, scalarIntent);\n\n\t\ted = newEndpointDescription(props);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"only one element\", 1, intents.size());\n\t\tassertEquals(\"wrong intent value\", scalarIntent, intents.get(0));\n\t\ttestListMutability(intents);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"only one element\", 1, intents.size());\n\t\tassertEquals(\"wrong intent value\", scalarIntent, intents.get(0));\n\t\ttestListMutability(intents);\n\n\t\tString[] arrayIntents = new String[] {\"some.intent1\", \"some.intent2\"};\n\t\tprops.put(SERVICE_INTENTS, arrayIntents);\n\t\tserviceProps.put(SERVICE_INTENTS, arrayIntents);\n\n\t\ted = newEndpointDescription(props);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", arrayIntents.length, intents\n\t\t\t\t.size());\n\t\tfor (int i = 0; i < arrayIntents.length; i++) {\n\t\t\tassertEquals(\"wrong intent value\", arrayIntents[i], intents.get(i));\n\t\t}\n\t\ttestListMutability(intents);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", arrayIntents.length, intents\n\t\t\t\t.size());\n\t\tfor (int i = 0; i < arrayIntents.length; i++) {\n\t\t\tassertEquals(\"wrong intent value\", arrayIntents[i], intents.get(i));\n\t\t}\n\t\ttestListMutability(intents);\n\n\t\tList listIntents = Arrays.asList(\"some.intent3\",\n\t\t\t\t\"some.intent4\", \"some.intent5\");\n\t\tprops.put(SERVICE_INTENTS, listIntents);\n\t\tserviceProps.put(SERVICE_INTENTS, listIntents);\n\n\t\ted = newEndpointDescription(props);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", listIntents.size(), intents\n\t\t\t\t.size());\n\t\tfor (int i = 0; i < listIntents.size(); i++) {\n\t\t\tassertEquals(\"wrong intent value\", listIntents.get(i), intents\n\t\t\t\t\t.get(i));\n\t\t}\n\t\ttestListMutability(intents);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tintents = ed.getIntents();\n\t\tassertNotNull(\"intents null\", intents);\n\t\tassertFalse(\"intents not empty\", intents.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", listIntents.size(), intents\n\t\t\t\t.size());\n\t\tfor (int i = 0; i < listIntents.size(); i++) {\n\t\t\tassertEquals(\"wrong intent value\", listIntents.get(i), intents\n\t\t\t\t\t.get(i));\n\t\t}\n\t\ttestListMutability(intents);\n\n\t}\n\n\tpublic void testConfigurationsTypes() {\n\t\tEndpointDescription ed;\n\t\tMap props = new HashMap();\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, someId);\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tList configTypes;\n\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, this);\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, this);\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(props);\n\t\t\tfail(\"config type empty\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\ttry {\n\t\t\ted = newEndpointDescription(ref, null);\n\t\t\tfail(\"config type empty\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\n\t\tString scalarConfigType = \"some.configtype\";\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, scalarConfigType);\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, scalarConfigType);\n\n\t\ted = newEndpointDescription(props);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"only one element\", 1, configTypes.size());\n\t\tassertEquals(\"wrong configtype value\", scalarConfigType, configTypes\n\t\t\t\t.get(0));\n\t\ttestListMutability(configTypes);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"only one element\", 1, configTypes.size());\n\t\tassertEquals(\"wrong configtype value\", scalarConfigType, configTypes\n\t\t\t\t.get(0));\n\t\ttestListMutability(configTypes);\n\n\t\tString[] arrayConfigTypes = new String[] {\"some.configtype1\",\n\t\t\t\t\"some.configtype2\"};\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, arrayConfigTypes);\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, arrayConfigTypes);\n\n\t\ted = newEndpointDescription(props);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", arrayConfigTypes.length,\n\t\t\t\tconfigTypes.size());\n\t\tfor (int i = 0; i < arrayConfigTypes.length; i++) {\n\t\t\tassertEquals(\"wrong configtype value\", arrayConfigTypes[i],\n\t\t\t\t\tconfigTypes.get(i));\n\t\t}\n\t\ttestListMutability(configTypes);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", arrayConfigTypes.length,\n\t\t\t\tconfigTypes.size());\n\t\tfor (int i = 0; i < arrayConfigTypes.length; i++) {\n\t\t\tassertEquals(\"wrong configtype value\", arrayConfigTypes[i],\n\t\t\t\t\tconfigTypes.get(i));\n\t\t}\n\t\ttestListMutability(configTypes);\n\n\t\tList listConfigTypes = Arrays.asList(\"some.configtype3\",\n\t\t\t\t\"some.configtype4\", \"some.configtype5\");\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, listConfigTypes);\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, listConfigTypes);\n\n\t\ted = newEndpointDescription(props);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", listConfigTypes.size(),\n\t\t\t\tconfigTypes.size());\n\t\tfor (int i = 0; i < listConfigTypes.size(); i++) {\n\t\t\tassertEquals(\"wrong configtype value\", listConfigTypes.get(i),\n\t\t\t\t\tconfigTypes.get(i));\n\t\t}\n\t\ttestListMutability(configTypes);\n\n\t\ted = newEndpointDescription(ref, null);\n\t\tconfigTypes = ed.getConfigurationTypes();\n\t\tassertNotNull(\"configtypes null\", configTypes);\n\t\tassertFalse(\"configtypes not empty\", configTypes.isEmpty());\n\t\tassertEquals(\"wrong number of elements\", listConfigTypes.size(),\n\t\t\t\tconfigTypes.size());\n\t\tfor (int i = 0; i < listConfigTypes.size(); i++) {\n\t\t\tassertEquals(\"wrong configtype value\", listConfigTypes.get(i),\n\t\t\t\t\tconfigTypes.get(i));\n\t\t}\n\t\ttestListMutability(configTypes);\n\n\t}\n\n\tpublic void testHashcode() {\n\t\tMap props = new HashMap();\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, \"  \" + someId + \"\\t\");\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tEndpointDescription ed1, ed2, ed3;\n\n\t\ted1 = newEndpointDescription(props);\n\t\ted2 = newEndpointDescription(ref, null);\n\t\tprops.put(ENDPOINT_ID, \"other.id\");\n\t\ted3 = newEndpointDescription(props);\n\n\t\tassertTrue(\"hashCode should equal\", ed1.hashCode() == ed2.hashCode());\n\t\tassertTrue(\"hashCode should not equal\", ed1.hashCode() != ed3\n\t\t\t\t.hashCode());\n\t\tassertTrue(\"hashCode should not equal\", ed2.hashCode() != ed3\n\t\t\t\t.hashCode());\n\t\tassertEquals(\"id not trimmed of whitespace\", someId, ed1.getId());\n\t}\n\n\t@SuppressWarnings(\"unlikely-arg-type\")\n\tpublic void testEquals() {\n\t\tMap props = new HashMap();\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, \"  \" + someId + \"\\t\");\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tEndpointDescription ed1, ed2, ed3;\n\n\t\ted1 = newEndpointDescription(props);\n\t\ted2 = newEndpointDescription(ref, null);\n\t\tprops.put(ENDPOINT_ID, \"other.id\");\n\t\ted3 = newEndpointDescription(props);\n\n\t\tassertTrue(\"should equal\", ed1.equals(ed2));\n\t\tassertTrue(\"should equal\", ed2.equals(ed1));\n\t\tassertFalse(\"should not equal\", ed1.equals(ed3));\n\t\tassertFalse(\"should not equal\", ed3.equals(ed1));\n\t\tassertFalse(\"should not equal\", ed2.equals(ed3));\n\t\tassertFalse(\"should not equal\", ed3.equals(ed1));\n\t\tassertTrue(\"should equal\", ed1.equals(ed1));\n\t\tassertTrue(\"should equal\", ed2.equals(ed2));\n\t\tassertTrue(\"should equal\", ed3.equals(ed3));\n\t\tassertFalse(\"should not equal\", ed2.equals(this));\n\t\tassertEquals(\"id not trimmed of whitespace\", someId, ed1.getId());\n\t}\n\n\tpublic void testIsSame() {\n\t\tMap props = new HashMap();\n\t\tprops.put(ENDPOINT_ID, \"id1\");\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, \"id2\");\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tLong someID = Long.valueOf(12l);\n\t\tprops.put(ENDPOINT_SERVICE_ID, someID);\n\t\tserviceProps.put(SERVICE_ID, someID);\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tEndpointDescription ed1, ed2, ed3, ed4;\n\n\t\ted1 = newEndpointDescription(props);\n\t\ted2 = newEndpointDescription(ref, null);\n\t\ted3 = newEndpointDescription(props);\n\t\tprops.put(ENDPOINT_ID, \"id4\");\n\t\tprops.put(ENDPOINT_FRAMEWORK_UUID, testUUID);\n\t\ted4 = newEndpointDescription(props);\n\n\t\tassertTrue(\"should be same\", ed1.isSameService(ed1));\n\t\tassertTrue(\"should be same\", ed3.isSameService(ed3));\n\t\tassertTrue(\"should be same\", ed1.isSameService(ed3));\n\t\tassertTrue(\"should be same\", ed3.isSameService(ed1));\n\t\tassertTrue(\"should be same\", ed2.isSameService(ed4));\n\t\tassertTrue(\"should be same\", ed4.isSameService(ed2));\n\t\tassertFalse(\"should not be same\", ed1.isSameService(ed2));\n\t\tassertFalse(\"should not be same\", ed2.isSameService(ed1));\n\t}\n\n\tpublic void testMatches() {\n\t\tMap props = new HashMap();\n\t\tprops.put(ENDPOINT_ID, \"id1\");\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, \"id2\");\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tLong someID = Long.valueOf(12l);\n\t\tprops.put(ENDPOINT_SERVICE_ID, someID);\n\t\tserviceProps.put(SERVICE_ID, someID);\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tEndpointDescription ed1, ed2;\n\n\t\ted1 = newEndpointDescription(props);\n\t\ted2 = newEndpointDescription(ref, null);\n\n\t\tString filter = \"(\" + ENDPOINT_ID + \"=id1)\";\n\t\tassertTrue(\"filter does not match\", ed1.matches(filter));\n\t\tassertFalse(\"filter matches\", ed2.matches(filter));\n\n\t\tfilter = \"(ENDPOINT.id=id2)\";\n\t\tassertTrue(\"filter does not match\", ed2.matches(filter));\n\t\tassertFalse(\"filter matches\", ed1.matches(filter));\n\n\t\tfilter = \"(\" + ENDPOINT_FRAMEWORK_UUID + \"=\" + testUUID + \")\";\n\t\tassertTrue(\"filter does not match\", ed2.matches(filter));\n\t\tassertFalse(\"filter matches\", ed1.matches(filter));\n\n\t\tfilter = \"(\" + ENDPOINT_FRAMEWORK_UUID + \"=*)\";\n\t\tassertTrue(\"filter does not match\", ed2.matches(filter));\n\t\tassertFalse(\"filter matches\", ed1.matches(filter));\n\n\t\ttry {\n\t\t\ted1.matches(\"(xx=foo\");\n\t\t\tfail(\"invalid filter syntax allowed\");\n\t\t}\n\t\tcatch (IllegalArgumentException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t}\n\n\tpublic void testStripServiceExported() {\n\t\tMap props = new HashMap();\n\t\tString someId = \"someId\";\n\t\tprops.put(ENDPOINT_ID, \"  \" + someId + \"\\t\");\n\t\tprops.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tprops.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tprops.put(\"SERVICE.EXPORTED.CONFIGS\", \"config\");\n\t\tString testUUID = \"testUUID\";\n\t\tBundleContext testContext = newMockBundleContext(testUUID);\n\t\tBundle testBundle = newMockBundle(1, \"testName\", \"testLocation\",\n\t\t\t\ttestContext);\n\t\tMap serviceProps = new TreeMap(\n\t\t\t\tString.CASE_INSENSITIVE_ORDER);\n\t\tserviceProps.put(ENDPOINT_ID, someId);\n\t\tserviceProps.put(OBJECTCLASS, new String[] {\"foo\"});\n\t\tserviceProps.put(SERVICE_IMPORTED_CONFIGS, \"config\");\n\t\tserviceProps.put(\"Service.Exported.Configs\", \"config\");\n\t\tServiceReference< ? > ref = newMockServiceReference(testBundle,\n\t\t\t\tserviceProps);\n\t\tEndpointDescription ed1, ed2;\n\n\t\ted1 = newEndpointDescription(props);\n\t\ted2 = newEndpointDescription(ref, null);\n\n\t\tassertFalse(\"service.exported.* property not removed\", ed1\n\t\t\t\t.getProperties().containsKey(SERVICE_EXPORTED_CONFIGS));\n\t\tassertFalse(\"service.exported.* property not removed\", ed2\n\t\t\t\t.getProperties().containsKey(SERVICE_EXPORTED_CONFIGS));\n\t}\n\n\tprivate EndpointDescription newEndpointDescription(\n\t\t\tServiceReference< ? > ref,\n\t\t\tMap props) {\n\t\tEndpointDescription ed = new EndpointDescription(ref, props);\n\t\ttestMutability(ed);\n\t\treturn ed;\n\t}\n\n\tprivate EndpointDescription newEndpointDescription(Map props) {\n\t\tEndpointDescription ed = new EndpointDescription(props);\n\t\ttestMutability(ed);\n\t\treturn ed;\n\t}\n\n\tprivate void testMutability(EndpointDescription ed) {\n\t\tMap p = ed.getProperties();\n\t\ttry {\n\t\t\tif (!p.isEmpty()) {\n\t\t\t\tp.clear();\n\t\t\t\tfail(\"properties is mutable\");\n\t\t\t}\n\t\t}\n\t\tcatch (RuntimeException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t\ttry {\n\t\t\tp.put(\"foo\", \"bar\");\n\t\t\tfail(\"properties is mutable\");\n\t\t}\n\t\tcatch (RuntimeException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t\tList interfs = ed.getInterfaces();\n\t\ttestListMutability(interfs);\n\t}\n\n\t@SuppressWarnings(\"unchecked\")\n\tprivate  void testListMutability(List list) {\n\t\ttry {\n\t\t\tif (!list.isEmpty()) {\n\t\t\t\tlist.clear();\n\t\t\t\tfail(\"list is mutable\");\n\t\t\t}\n\t\t}\n\t\tcatch (RuntimeException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t\ttry {\n\t\t\tlist.add((T) \"foo\");\n\t\t\tfail(\"list is mutable\");\n\t\t}\n\t\tcatch (RuntimeException e) {\n\t\t\t\/\/ expected\n\t\t}\n\t}\n\n\tprivate String getPackageName(String className) {\n\t\tint index = className.lastIndexOf('.');\n\t\tif (index == -1) {\n\t\t\treturn \"\";\n\t\t}\n\t\treturn className.substring(0, index);\n\t}\n\n\tpublic static BundleContext newMockBundleContext(String uuid) {\n\t\treturn MockFactory.newMock(BundleContext.class,\n\t\t\t\tnew MockBundleContext(uuid));\n\t}\n\n\tprivate static class MockBundleContext {\n\t\tprivate final String\tuuid;\n\n\t\tMockBundleContext(String uuid) {\n\t\t\tthis.uuid = uuid;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic String getProperty(String key) {\n\t\t\tif (key.equals(\"org.osgi.framework.uuid\")) {\n\t\t\t\treturn uuid;\n\t\t\t}\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tpublic static Bundle newMockBundle(long id, String name, String location,\n\t\t\tBundleContext context) {\n\t\treturn MockFactory.newMock(Bundle.class, new MockBundle(id,\n\t\t\t\tname, location, context));\n\t}\n\n\tprivate static class MockBundle {\n\t\tprivate final long\t\t\tid;\n\t\tprivate final String\t\tname;\n\t\tprivate final String\t\tlocation;\n\t\tprivate final BundleContext\tcontext;\n\n\t\tMockBundle(long id, String name, String location, BundleContext context) {\n\t\t\tthis.id = id;\n\t\t\tthis.name = name;\n\t\t\tthis.location = location;\n\t\t\tthis.context = context;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic long getBundleId() {\n\t\t\treturn id;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic String getLocation() {\n\t\t\treturn location;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic String getSymbolicName() {\n\t\t\treturn name;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic BundleContext getBundleContext() {\n\t\t\treturn context;\n\t\t}\n\t}\n\n\tpublic static ServiceReference< ? > newMockServiceReference(Bundle bundle,\n\t\t\tMap properties) {\n\t\treturn MockFactory.newMock(ServiceReference.class,\n\t\t\t\tnew MockServiceReference(bundle, properties));\n\t}\n\n\tprivate static class MockServiceReference {\n\t\tprivate final Bundle\t\t\t\tbundle;\n\t\tprivate final Map\tproperties;\n\n\t\tMockServiceReference(Bundle bundle, Map properties) {\n\t\t\tthis.bundle = bundle;\n\t\t\tthis.properties = properties;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic Bundle getBundle() {\n\t\t\treturn bundle;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic Object getProperty(String key) {\n\t\t\tObject result = properties.get(key);\n\t\t\tif (result != null) {\n\t\t\t\treturn result;\n\t\t\t}\n\t\t\tfor (Iterator iter = properties.keySet().iterator(); iter\n\t\t\t\t\t.hasNext();) {\n\t\t\t\tString k = iter.next();\n\t\t\t\tif (k.equalsIgnoreCase(key)) {\n\t\t\t\t\treturn properties.get(k);\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn null;\n\t\t}\n\n\t\t@SuppressWarnings(\"unused\")\n\t\tpublic String[] getPropertyKeys() {\n\t\t\tString[] result = new String[properties.size()];\n\t\t\tproperties.keySet().toArray(result);\n\t\t\treturn result;\n\t\t}\n\t}\n\n}\n","avg_line_length":32.1930348259,"max_line_length":81,"alphanum_fraction":0.7144711628}
{"size":7212,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/\/ Generated by delombok at Mon Jul 15 18:44:01 PDT 2019\n\/\/ Generated by com.stripe.generator.entity.SdkBuilder\npackage com.stripe.model.issuing;\n\nimport com.google.gson.annotations.SerializedName;\nimport com.stripe.model.StripeObject;\n\npublic class MerchantData extends StripeObject {\n  \/**\n   * A categorization of the seller's type of business. See our [merchant categories\n   * guide](https:\/\/stripe.com\/docs\/issuing\/merchant-categories) for a list of possible values.\n   *\/\n  @SerializedName(\"category\")\n  String category;\n  \/**\n   * City where the seller is located.\n   *\/\n  @SerializedName(\"city\")\n  String city;\n  \/**\n   * Country where the seller is located.\n   *\/\n  @SerializedName(\"country\")\n  String country;\n  \/**\n   * Name of the seller.\n   *\/\n  @SerializedName(\"name\")\n  String name;\n  \/**\n   * Identifier assigned to the seller by the card brand.\n   *\/\n  @SerializedName(\"network_id\")\n  String networkId;\n  \/**\n   * Postal code where the seller is located.\n   *\/\n  @SerializedName(\"postal_code\")\n  String postalCode;\n  \/**\n   * State where the seller is located.\n   *\/\n  @SerializedName(\"state\")\n  String state;\n\n  \/**\n   * A categorization of the seller's type of business. See our [merchant categories\n   * guide](https:\/\/stripe.com\/docs\/issuing\/merchant-categories) for a list of possible values.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getCategory() {\n    return this.category;\n  }\n\n  \/**\n   * City where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getCity() {\n    return this.city;\n  }\n\n  \/**\n   * Country where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getCountry() {\n    return this.country;\n  }\n\n  \/**\n   * Name of the seller.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getName() {\n    return this.name;\n  }\n\n  \/**\n   * Identifier assigned to the seller by the card brand.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getNetworkId() {\n    return this.networkId;\n  }\n\n  \/**\n   * Postal code where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getPostalCode() {\n    return this.postalCode;\n  }\n\n  \/**\n   * State where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public String getState() {\n    return this.state;\n  }\n\n  \/**\n   * A categorization of the seller's type of business. See our [merchant categories\n   * guide](https:\/\/stripe.com\/docs\/issuing\/merchant-categories) for a list of possible values.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setCategory(final String category) {\n    this.category = category;\n  }\n\n  \/**\n   * City where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setCity(final String city) {\n    this.city = city;\n  }\n\n  \/**\n   * Country where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setCountry(final String country) {\n    this.country = country;\n  }\n\n  \/**\n   * Name of the seller.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setName(final String name) {\n    this.name = name;\n  }\n\n  \/**\n   * Identifier assigned to the seller by the card brand.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setNetworkId(final String networkId) {\n    this.networkId = networkId;\n  }\n\n  \/**\n   * Postal code where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setPostalCode(final String postalCode) {\n    this.postalCode = postalCode;\n  }\n\n  \/**\n   * State where the seller is located.\n   *\/\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public void setState(final String state) {\n    this.state = state;\n  }\n\n  @java.lang.Override\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public boolean equals(final java.lang.Object o) {\n    if (o == this) return true;\n    if (!(o instanceof MerchantData)) return false;\n    final MerchantData other = (MerchantData) o;\n    if (!other.canEqual((java.lang.Object) this)) return false;\n    final java.lang.Object this$category = this.getCategory();\n    final java.lang.Object other$category = other.getCategory();\n    if (this$category == null ? other$category != null : !this$category.equals(other$category)) return false;\n    final java.lang.Object this$city = this.getCity();\n    final java.lang.Object other$city = other.getCity();\n    if (this$city == null ? other$city != null : !this$city.equals(other$city)) return false;\n    final java.lang.Object this$country = this.getCountry();\n    final java.lang.Object other$country = other.getCountry();\n    if (this$country == null ? other$country != null : !this$country.equals(other$country)) return false;\n    final java.lang.Object this$name = this.getName();\n    final java.lang.Object other$name = other.getName();\n    if (this$name == null ? other$name != null : !this$name.equals(other$name)) return false;\n    final java.lang.Object this$networkId = this.getNetworkId();\n    final java.lang.Object other$networkId = other.getNetworkId();\n    if (this$networkId == null ? other$networkId != null : !this$networkId.equals(other$networkId)) return false;\n    final java.lang.Object this$postalCode = this.getPostalCode();\n    final java.lang.Object other$postalCode = other.getPostalCode();\n    if (this$postalCode == null ? other$postalCode != null : !this$postalCode.equals(other$postalCode)) return false;\n    final java.lang.Object this$state = this.getState();\n    final java.lang.Object other$state = other.getState();\n    if (this$state == null ? other$state != null : !this$state.equals(other$state)) return false;\n    return true;\n  }\n\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  protected boolean canEqual(final java.lang.Object other) {\n    return other instanceof MerchantData;\n  }\n\n  @java.lang.Override\n  @java.lang.SuppressWarnings(\"all\")\n  \/\/ @lombok.Generated\n  public int hashCode() {\n    final int PRIME = 59;\n    int result = 1;\n    final java.lang.Object $category = this.getCategory();\n    result = result * PRIME + ($category == null ? 43 : $category.hashCode());\n    final java.lang.Object $city = this.getCity();\n    result = result * PRIME + ($city == null ? 43 : $city.hashCode());\n    final java.lang.Object $country = this.getCountry();\n    result = result * PRIME + ($country == null ? 43 : $country.hashCode());\n    final java.lang.Object $name = this.getName();\n    result = result * PRIME + ($name == null ? 43 : $name.hashCode());\n    final java.lang.Object $networkId = this.getNetworkId();\n    result = result * PRIME + ($networkId == null ? 43 : $networkId.hashCode());\n    final java.lang.Object $postalCode = this.getPostalCode();\n    result = result * PRIME + ($postalCode == null ? 43 : $postalCode.hashCode());\n    final java.lang.Object $state = this.getState();\n    result = result * PRIME + ($state == null ? 43 : $state.hashCode());\n    return result;\n  }\n}\n","avg_line_length":30.6893617021,"max_line_length":117,"alphanum_fraction":0.6704104271}
{"size":419,"ext":"java","lang":"Java","max_stars_count":null,"content":"package javaTest;\n\npublic class RunPoint {\n\n    public static void main(String[] args) {\n\n        Point p1 = new Point(1, 0);\n\n        System.out.println(\"\u041a\u043e\u043e\u0440\u0434\u0438\u043d\u0442\u044b \u0442\u043e\u0447\u043a\u0438 p1.x = \" + p1.x + \", p1.y =\" + p1.y);\n\n\n        Point p2 = new Point(3, 0);\n\n\n        System.out.println(\"\u041a\u043e\u043e\u0440\u0434\u0438\u043d\u0442\u044b \u0442\u043e\u0447\u043a\u0438 p2.x = \" + p2.x + \", p2.y = \" + p2.y);\n\n        System.out.println(\"\u0420\u0430\u0441\u0441\u0442\u043e\u044f\u043d\u0438\u0435 \u043c\u0435\u0436\u0434\u0443 p1 \u0438 p2 = \" + p1.distance(p2));\n\n    }\n}\n","avg_line_length":19.9523809524,"max_line_length":82,"alphanum_fraction":0.5489260143}
{"size":10085,"ext":"java","lang":"Java","max_stars_count":10.0,"content":"\/**\r\n * TestTwissProbe.java\r\n * \r\n * Created      : December, 2006\r\n * Author       : Christopher K. Allen\r\n *\/\r\npackage xal.model.probe;\r\n\r\n\r\nimport junit.framework.JUnit4TestAdapter;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\n\r\nimport xal.model.alg.TwissTracker;\r\nimport xal.tools.beam.PhaseVector;\r\nimport xal.tools.beam.Twiss;\r\nimport xal.tools.beam.Twiss3D;\r\nimport xal.tools.math.r3.R3;\r\n\r\n\r\n\/**\r\n * Class for performing JUnit 4.x test on the TwissProbe<\/code> class\r\n * in the xal.model.probe<\/code> package.\r\n * \r\n * @author Christopher K. Allen\r\n *\r\n *\/\r\npublic class TestTwissProbe {\r\n\r\n    \r\n    \r\n    \r\n    \/*\r\n     * Global Constants\r\n     *\/\r\n    \r\n    \r\n    \/\/\r\n    \/\/ File Names\r\n    \/\/\r\n    \r\n    \/** archive save file *\/\r\n    private static final String STR_FILE_SAVE = \".\/build\/tests\/xal\/model\/probe\/TwissProbe_SaveTest.xml\";\r\n    \r\n    \/** archive load file *\/\r\n    private static final String STR_FILE_LOAD = \".\/common\/core\/test\/resources\/xal\/model\/simdb-LI_MEBT1-twissprobe-pmq.probe\";\r\n    \r\n    \/** archive save\/load file *\/\r\n    private static final String STR_FILE_SAVELOAD = \".\/build\/tests\/xal\/model\/probe\/TwissProbe_SaveLoadTest.xml\";\r\n\r\n    \r\n    \/\/\r\n    \/\/ Numerical - Algorithm Parameters\r\n    \/\/\r\n    \r\n\/\/    \/** solution precision *\/\r\n\/\/    private static final double     ERROR_TOLERANCE = 1.0e-5;\r\n    \r\n    \/** testing step length *\/\r\n    private static final double     ALG_STEPSIZE = 0.01;\r\n    \r\n    \/** emittance growth flag *\/\r\n    private static final boolean    ALG_EMITGROWTH = true;\r\n    \r\n    \/** debug mode flag *\/\r\n    private static final boolean    ALG_DEBUGMODE = false;\r\n    \r\n    \r\n    \r\n\/\/    \/\/\r\n\/\/    \/\/  Relativistic Parameters\r\n\/\/    \/\/\r\n\/\/    \r\n\/\/    \/** relativistic parameter for stationary beam *\/\r\n\/\/    private static final double      GAMMA_STAT = 1.0;\r\n\/\/    \r\n\/\/    \/** relativistic parameter for MEBT *\/\r\n\/\/    private static final double      GAMMA_MEBT = 1.003193863;\r\n\/\/    \r\n\/\/    \/** relativistic parameter for L3BT *\/\r\n\/\/    private static final double      GAMMA_L3BT = 1.192678;\r\n    \r\n    \r\n    \r\n    \/\/\r\n    \/\/ Beam Parameters\r\n    \/\/\r\n\r\n    \/** comment string  *\/\r\n    private static final String     PROBE_COMMENT = \"TestTwissProbe\";\r\n\r\n    \/** starting position *\/\r\n    private static final double     PROBE_S = 0.15;\r\n    \r\n    \/** beam energy *\/\r\n    private static final double     PROBE_W = 2.5e6;\r\n\r\n    \/** Bunch frequency *\/\r\n    private static final double     BUNCH_FREQ = 3.24e8;\r\n    \r\n    \/** beam current *\/\r\n    private static final double     BUNCH_CURRENT = 0.025;\r\n\r\n    \r\n    \r\n    \/** beam centroid offset *\/\r\n    private static final PhaseVector STATE_CENTROID = new PhaseVector(0.0, 0.0, 0.001, -0.010, 0.0, 0.0);\r\n    \r\n    \/** rotation angle for ellipsoid *\/\r\n    private static final double      STATE_ANGLE = 30.0*(Math.PI\/180.0);\r\n    \r\n    \r\n    \r\n    \r\n    \/** JPARC MEBT x-plane Twiss parameters *\/\r\n    private static final Twiss       TWISS_X = new Twiss(-1.2187, 0.13174, 3.1309642E-6);\r\n    \r\n    \/** JPARC MEBT y-plane Twiss parameters *\/\r\n    private static final Twiss       TWISS_Y = new Twiss(2.1885, 0.22344, 2.5075842000000002E-6);\r\n    \r\n    \/** JPARC MEBT z-plane Twiss parameters *\/\r\n    private static final Twiss       TWISS_Z = new Twiss(0.08, 0.7819530229746938, 3.106895634426948E-6);\r\n    \r\n    \r\n    \r\n\r\n    \r\n    \r\n    \r\n    \/*\r\n     * Local Attributes\r\n     *\/\r\n    \r\n    \r\n    \r\n    \/*\r\n     * Global Methods\r\n     *\/\r\n\r\n    \r\n    \/**\r\n     * Return a JUnit 3.x version TestBeamEllipsoid<\/code> instance that encapsulates \r\n     * this test suite.  This is a convenience method for attaching to old JUnit testing\r\n     * frameworks, for example, using Eclipse.\r\n     * \r\n     * @return  a JUnit 3.8 type test object adaptor\r\n     *\/\r\n    public static junit.framework.Test  getJUnitTest()  {\r\n        return new JUnit4TestAdapter(TestTwissProbe.class);\r\n    }\r\n    \r\n    \/**\r\n     * Create a TwissProbe<\/code> object with the above class constant \r\n     * parameters.\r\n     * \r\n     * @return  new TwissProbe<\/code> instance \r\n     *\/\r\n    public static TwissProbe    createTestProbe() {\r\n\r\n        \/\/ Create the algorithm instance and initialize it\r\n        TwissTracker    alg = new TwissTracker();\r\n        alg.setStepSize(TestTwissProbe.ALG_STEPSIZE);\r\n        alg.setEmittanceGrowth(TestTwissProbe.ALG_EMITGROWTH);\r\n        alg.setDebugMode(TestTwissProbe.ALG_DEBUGMODE);\r\n        \r\n        \/\/ Create the probe instance and initialize it\r\n        TwissProbe  probe   = new TwissProbe();\r\n\r\n        probe.setAlgorithm( alg );\r\n        \r\n        probe.setComment(TestTwissProbe.PROBE_COMMENT);\r\n        probe.setPosition(TestTwissProbe.PROBE_S);\r\n        probe.setKineticEnergy(TestTwissProbe.PROBE_W);\r\n        probe.setBunchFrequency(TestTwissProbe.BUNCH_FREQ);\r\n        probe.setBeamCurrent(TestTwissProbe.BUNCH_CURRENT);\r\n        probe.setBetatronPhase(new R3(0, 0, TestTwissProbe.STATE_ANGLE) );\r\n        probe.setCentroid( TestTwissProbe.STATE_CENTROID );\r\n        probe.setTwiss( new Twiss3D(TWISS_X, TWISS_Y, TWISS_Z) );\r\n        \r\n        return probe;\r\n    }\r\n    \r\n    \r\n    \r\n    \r\n    \/*\r\n     * Initialization\r\n     *\/\r\n    \r\n    \r\n    \/**\r\n     *  Create a new TestR3x3JacobiDecomposition<\/code> class for \r\n     *  JUnit 4.x testing of the TableSchema<\/code> class. \r\n     *\/\r\n    public TestTwissProbe() {\r\n        super();\r\n    }\r\n    \r\n    \r\n    \/**\r\n     * Setup the test fixture by creating a the test matrices.\r\n     *\/\r\n    @Before public void setup() {\r\n    }\r\n    \r\n\r\n    \r\n    \r\n    \/*\r\n     * Tests\r\n     *\/\r\n    \r\n    \/**\r\n     * Test the ability of a TwissProbe<\/code> to store itself.  \r\n     *\/\r\n    @Test   public void testArchiveSave()  {\r\n\/\/        \r\n\/\/        TwissProbe probe = TestTwissProbe.createTestProbe();\r\n\/\/        \r\n\/\/        try {\r\n\/\/            ProbeXmlWriter.writeXml(probe, TestTwissProbe.STR_FILE_SAVE);\r\n\/\/\r\n\/\/        } catch (IOException e) {\r\n\/\/            e.printStackTrace();\r\n\/\/            Assert.fail(\"TestTwissProbe#testArchiveSave() - unable to save probe to \" + STR_FILE_SAVE);\r\n\/\/            return;\r\n\/\/            \r\n\/\/        }\r\n    }\r\n    \r\n    \/**\r\n     * Test the ability to recover a TwissProbe<\/code> object from\r\n     * a data store.\r\n     *\/\r\n    @Test   public void testArchiveLoad()   {\r\n\/\/        \r\n\/\/        try {\r\n\/\/            Probe probe = ProbeXmlParser.parse(TestTwissProbe.STR_FILE_LOAD);\r\n\/\/            Assert.assertTrue(probe instanceof TwissProbe);\r\n\/\/            \r\n\/\/        } catch (ParsingException e) {\r\n\/\/            e.printStackTrace();\r\n\/\/            Assert.fail(\"TestTwissProbe#testArchiveLoad() - unable to parse file \" + STR_FILE_LOAD);\r\n\/\/            return;\r\n\/\/            \r\n\/\/        }\r\n    }\r\n    \r\n    \/**\r\n     * Test the ability of a TwissProbe<\/code> to save itself then recover its state\r\n     * from the file, i.e., data persistence.\r\n     *\/\r\n    @Test   public void testArchiveSaveRestore()    {\r\n\/\/        \r\n\/\/        TwissProbe probeInit = TestTwissProbe.createTestProbe();\r\n\/\/        TwissProbe probeRest;\r\n\/\/\r\n\/\/        \/\/ Save probe to file then recover it\r\n\/\/        try {\r\n\/\/            ProbeXmlWriter.writeXml(probeInit, TestTwissProbe.STR_FILE_SAVELOAD);\r\n\/\/\r\n\/\/            probeRest = (TwissProbe)ProbeXmlParser.parse(TestTwissProbe.STR_FILE_SAVELOAD);\r\n\/\/            \r\n\/\/        } catch (IOException e) {\r\n\/\/            e.printStackTrace();\r\n\/\/            Assert.fail(\"TestTwissProbe#testArchiveSaveRestore() - unable to save probe to \" + STR_FILE_SAVELOAD);\r\n\/\/            return;\r\n\/\/\r\n\/\/        } catch (ParsingException e) {\r\n\/\/            e.printStackTrace();\r\n\/\/            Assert.fail(\"TestTwissProbe#testArchiveSaveRestore() - unable to parse file \" + STR_FILE_SAVELOAD);\r\n\/\/            return;\r\n\/\/            \r\n\/\/        }\r\n\/\/        \r\n\/\/        \/\/ Check values of the algorithm\r\n\/\/        IAlgorithm alg = probeRest.getAlgorithm();\r\n\/\/        if (!(alg instanceof TwissTracker) )    {\r\n\/\/            Assert.fail(\"TestTestProbe#testArchiveSaveRestore() - algorithm type failure \" + alg.getType());\r\n\/\/            return;\r\n\/\/        }\r\n\/\/        TwissTracker    algTwiss = (TwissTracker)alg;\r\n\/\/        Assert.assertEquals(TestTwissProbe.ALG_STEPSIZE, algTwiss.getStepSize(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.ALG_DEBUGMODE, algTwiss.getDebugMode());\r\n\/\/        Assert.assertEquals(TestTwissProbe.ALG_EMITGROWTH, algTwiss.getEmittanceGrowthFlag());\r\n\/\/        \r\n\/\/        \/\/ Check the values of the probe\r\n\/\/        Assert.assertEquals(TestTwissProbe.PROBE_S, probeRest.getPosition(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.PROBE_W, probeRest.getKineticEnergy(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.BUNCH_CURRENT, probeRest.getBeamCurrent(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.BUNCH_FREQ, probeRest.getBunchFrequency(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.STATE_ANGLE, probeRest.getBetatronPhase().getz(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_X.getAlpha(), probeRest.getTwiss(IND_3D.X).getAlpha(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_X.getBeta(), probeRest.getTwiss(IND_3D.X).getBeta(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_X.getEmittance(), probeRest.getTwiss(IND_3D.X).getEmittance(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Y.getAlpha(), probeRest.getTwiss(IND_3D.Y).getAlpha(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Y.getBeta(), probeRest.getTwiss(IND_3D.Y).getBeta(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Y.getEmittance(), probeRest.getTwiss(IND_3D.Y).getEmittance(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Z.getAlpha(), probeRest.getTwiss(IND_3D.Z).getAlpha(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Z.getBeta(), probeRest.getTwiss(IND_3D.Z).getBeta(), 0.0);\r\n\/\/        Assert.assertEquals(TestTwissProbe.TWISS_Z.getEmittance(), probeRest.getTwiss(IND_3D.Z).getEmittance(), 0.0);\r\n    }\r\n    \r\n    \r\n}\r\n    \r\n","avg_line_length":32.7435064935,"max_line_length":126,"alphanum_fraction":0.5978185424}
{"size":4067,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.anysoftkeyboard.addons;\n\nimport android.content.Context;\nimport android.content.res.Resources;\nimport android.util.SparseIntArray;\n\nimport com.anysoftkeyboard.AnySoftKeyboardTestRunner;\nimport com.menny.android.anysoftkeyboard.R;\n\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mockito;\nimport org.robolectric.RuntimeEnvironment;\n\n@RunWith(AnySoftKeyboardTestRunner.class)\npublic class SupportTest {\n\n    @Test\n    public void testSamePackageSameValues() {\n        SparseIntArray sparseIntArray = new SparseIntArray();\n        int[] backwardCompatibleStyleable = Support.createBackwardCompatibleStyleable(R.styleable.KeyboardLayout,\n                RuntimeEnvironment.application,\n                RuntimeEnvironment.application,\n                sparseIntArray);\n\n        Assert.assertSame(backwardCompatibleStyleable, R.styleable.KeyboardLayout);\n        Assert.assertEquals(backwardCompatibleStyleable.length, sparseIntArray.size());\n        for (int attrId : backwardCompatibleStyleable) {\n            Assert.assertEquals(attrId, sparseIntArray.get(attrId));\n        }\n    }\n\n    @Test\n    public void testDifferentPackageDifferentValues() {\n        \/\/this is a long setup\n        Context remoteContext = Mockito.mock(Context.class);\n        Mockito.doReturn(\"com.some.other.package\").when(remoteContext).getPackageName();\n        Resources remoteRes = Mockito.mock(Resources.class);\n        Mockito.doReturn(123).when(remoteRes).getIdentifier(Mockito.anyString(), Mockito.anyString(), Mockito.anyString());\n        Mockito.doReturn(remoteRes).when(remoteContext).getResources();\n\n        \/\/starting test\n        SparseIntArray sparseIntArray = new SparseIntArray();\n        int[] backwardCompatibleStyleable = Support.createBackwardCompatibleStyleable(R.styleable.KeyboardLayout,\n                RuntimeEnvironment.application,\n                remoteContext,\n                sparseIntArray);\n\n        Mockito.verify(remoteRes).getIdentifier(\"showPreview\", \"attr\", \"com.some.other.package\");\n        Mockito.verifyNoMoreInteractions(remoteRes);\n\n        Assert.assertNotSame(backwardCompatibleStyleable, R.styleable.KeyboardLayout);\n        Assert.assertEquals(backwardCompatibleStyleable.length, R.styleable.KeyboardLayout.length);\n        Assert.assertEquals(backwardCompatibleStyleable.length, sparseIntArray.size());\n        for (int attrId : backwardCompatibleStyleable) {\n            if (attrId == 123) {\n                Assert.assertEquals(R.attr.showPreview, sparseIntArray.get(123));\n            } else {\n                Assert.assertEquals(attrId, sparseIntArray.get(attrId));\n            }\n        }\n    }\n\n    @Test\n    public void testDifferentPackageNoValue() {\n        \/\/this is a long setup\n        Context remoteContext = Mockito.mock(Context.class);\n        Mockito.doReturn(\"com.some.other.package\").when(remoteContext).getPackageName();\n        Resources remoteRes = Mockito.mock(Resources.class);\n        Mockito.doReturn(0).when(remoteRes).getIdentifier(Mockito.anyString(), Mockito.anyString(), Mockito.anyString());\n        Mockito.doReturn(remoteRes).when(remoteContext).getResources();\n\n        \/\/starting test\n        SparseIntArray sparseIntArray = new SparseIntArray();\n        int[] backwardCompatibleStyleable = Support.createBackwardCompatibleStyleable(R.styleable.KeyboardLayout,\n                RuntimeEnvironment.application,\n                remoteContext,\n                sparseIntArray);\n\n        Mockito.verify(remoteRes).getIdentifier(\"showPreview\", \"attr\", \"com.some.other.package\");\n        Mockito.verifyNoMoreInteractions(remoteRes);\n\n        Assert.assertNotSame(backwardCompatibleStyleable, R.styleable.KeyboardLayout);\n        Assert.assertEquals(backwardCompatibleStyleable.length, R.styleable.KeyboardLayout.length - 1);\n        Assert.assertEquals(backwardCompatibleStyleable.length, sparseIntArray.size());\n        for (int attrId : backwardCompatibleStyleable) {\n            Assert.assertEquals(attrId, sparseIntArray.get(attrId));\n        }\n    }\n}","avg_line_length":44.6923076923,"max_line_length":123,"alphanum_fraction":0.710597492}
{"size":20862,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright (C) 2010 Daniel Nilsson\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage net.margaritov.preference.colorpicker;\n\nimport android.content.Context;\nimport android.graphics.Canvas;\nimport android.graphics.Color;\nimport android.graphics.ComposeShader;\nimport android.graphics.LinearGradient;\nimport android.graphics.Paint;\nimport android.graphics.Point;\nimport android.graphics.PorterDuff;\nimport android.graphics.RectF;\nimport android.graphics.Shader;\nimport android.graphics.Paint.Align;\nimport android.graphics.Paint.Style;\nimport android.graphics.Shader.TileMode;\nimport android.util.AttributeSet;\nimport android.view.MotionEvent;\nimport android.view.View;\n\n\/**\n * Displays a color picker to the user and allow them\n * to select a color. A slider for the alpha channel is\n * also available. Enable it by setting\n * setAlphaSliderVisible(boolean) to true.\n * @author Daniel Nilsson\n *\/\npublic class ColorPickerView extends View {\n\n\tprivate final static int\tPANEL_SAT_VAL = 0;\n\tprivate final static int\tPANEL_HUE = 1;\n\tprivate final static int\tPANEL_ALPHA = 2;\n\n\t\/**\n\t * The width in pixels of the border\n\t * surrounding all color panels.\n\t *\/\n\tprivate final static float\tBORDER_WIDTH_PX = 1;\n\n\t\/**\n\t * The width in dp of the hue panel.\n\t *\/\n\tprivate float \t\tHUE_PANEL_WIDTH = 30f;\n\t\/**\n\t * The height in dp of the alpha panel\n\t *\/\n\tprivate float\t\tALPHA_PANEL_HEIGHT = 20f;\n\t\/**\n\t * The distance in dp between the different\n\t * color panels.\n\t *\/\n\tprivate float \t\tPANEL_SPACING = 10f;\n\t\/**\n\t * The radius in dp of the color palette tracker circle.\n\t *\/\n\tprivate float \t\tPALETTE_CIRCLE_TRACKER_RADIUS = 5f;\n\t\/**\n\t * The dp which the tracker of the hue or alpha panel\n\t * will extend outside of its bounds.\n\t *\/\n\tprivate float\t\tRECTANGLE_TRACKER_OFFSET = 2f;\n\n\n\tprivate float \t\tmDensity = 1f;\n\n\tprivate OnColorChangedListener\tmListener;\n\n\tprivate Paint \t\tmSatValPaint;\n\tprivate Paint\t\tmSatValTrackerPaint;\n\n\tprivate Paint\t\tmHuePaint;\n\tprivate Paint\t\tmHueTrackerPaint;\n\n\tprivate Paint\t\tmAlphaPaint;\n\tprivate Paint\t\tmAlphaTextPaint;\n\n\tprivate Paint\t\tmBorderPaint;\n\n\tprivate Shader\t\tmValShader;\n\tprivate Shader\t\tmSatShader;\n\tprivate Shader\t\tmHueShader;\n\tprivate Shader\t\tmAlphaShader;\n\n\tprivate int\t\t\tmAlpha = 0xff;\n\tprivate float\t\tmHue = 360f;\n\tprivate float \t\tmSat = 0f;\n\tprivate float \t\tmVal = 0f;\n\n\tprivate String\t\tmAlphaSliderText = \"\";\n\tprivate int \t\tmSliderTrackerColor = 0xff1c1c1c;\n\tprivate int \t\tmBorderColor = 0xff6E6E6E;\n\tprivate boolean\t\tmShowAlphaPanel = false;\n\n\t\/*\n\t * To remember which panel that has the \"focus\" when\n\t * processing hardware button data.\n\t *\/\n\tprivate int\t\t\tmLastTouchedPanel = PANEL_SAT_VAL;\n\n\t\/**\n\t * Offset from the edge we must have or else\n\t * the finger tracker will get clipped when\n\t * it is drawn outside of the view.\n\t *\/\n\tprivate float \t\tmDrawingOffset;\n\n\n\t\/*\n\t * Distance form the edges of the view\n\t * of where we are allowed to draw.\n\t *\/\n\tprivate RectF\tmDrawingRect;\n\n\tprivate RectF\tmSatValRect;\n\tprivate RectF \tmHueRect;\n\tprivate RectF\tmAlphaRect;\n\n\tprivate AlphaPatternDrawable\tmAlphaPattern;\n\n\tprivate Point\tmStartTouchPoint = null;\n\n\tpublic interface OnColorChangedListener {\n\t\tpublic void onColorChanged(int color);\n\t}\n\n\tpublic ColorPickerView(Context context){\n\t\tthis(context, null);\n\t}\n\n\tpublic ColorPickerView(Context context, AttributeSet attrs) {\n\t\tthis(context, attrs, 0);\n\t}\n\n\tpublic ColorPickerView(Context context, AttributeSet attrs, int defStyle) {\n\t\tsuper(context, attrs, defStyle);\n\t\tinit();\n\t}\n\n\tprivate void init(){\n\t\tmDensity = getContext().getResources().getDisplayMetrics().density;\n\t\tPALETTE_CIRCLE_TRACKER_RADIUS *= mDensity;\n\t\tRECTANGLE_TRACKER_OFFSET *= mDensity;\n\t\tHUE_PANEL_WIDTH *= mDensity;\n\t\tALPHA_PANEL_HEIGHT *= mDensity;\n\t\tPANEL_SPACING = PANEL_SPACING * mDensity;\n\n\t\tmDrawingOffset = calculateRequiredOffset();\n\n\t\tinitPaintTools();\n\n\t\t\/\/Needed for receiving trackball motion events.\n\t\tsetFocusable(true);\n\t\tsetFocusableInTouchMode(true);\n\t}\n\n\tprivate void initPaintTools(){\n\n\t\tmSatValPaint = new Paint();\n\t\tmSatValTrackerPaint = new Paint();\n\t\tmHuePaint = new Paint();\n\t\tmHueTrackerPaint = new Paint();\n\t\tmAlphaPaint = new Paint();\n\t\tmAlphaTextPaint = new Paint();\n\t\tmBorderPaint = new Paint();\n\n\n\t\tmSatValTrackerPaint.setStyle(Style.STROKE);\n\t\tmSatValTrackerPaint.setStrokeWidth(2f * mDensity);\n\t\tmSatValTrackerPaint.setAntiAlias(true);\n\n\t\tmHueTrackerPaint.setColor(mSliderTrackerColor);\n\t\tmHueTrackerPaint.setStyle(Style.STROKE);\n\t\tmHueTrackerPaint.setStrokeWidth(2f * mDensity);\n\t\tmHueTrackerPaint.setAntiAlias(true);\n\n\t\tmAlphaTextPaint.setColor(0xff1c1c1c);\n\t\tmAlphaTextPaint.setTextSize(14f * mDensity);\n\t\tmAlphaTextPaint.setAntiAlias(true);\n\t\tmAlphaTextPaint.setTextAlign(Align.CENTER);\n\t\tmAlphaTextPaint.setFakeBoldText(true);\n\n\n\t}\n\n\tprivate float calculateRequiredOffset(){\n\t\tfloat offset = Math.max(PALETTE_CIRCLE_TRACKER_RADIUS, RECTANGLE_TRACKER_OFFSET);\n\t\toffset = Math.max(offset, BORDER_WIDTH_PX * mDensity);\n\n\t\treturn offset * 1.5f;\n\t}\n\n\tprivate int[] buildHueColorArray(){\n\n\t\tint[] hue = new int[361];\n\n\t\tint count = 0;\n\t\tfor(int i = hue.length -1; i >= 0; i--, count++){\n\t\t\thue[count] = Color.HSVToColor(new float[]{i, 1f, 1f});\n\t\t}\n\n\t\treturn hue;\n\t}\n\n\n\t@Override\n\tprotected void onDraw(Canvas canvas) {\n\n\t\tif(mDrawingRect.width() <= 0 || mDrawingRect.height() <= 0) return;\n\n\t\tdrawSatValPanel(canvas);\n\t\tdrawHuePanel(canvas);\n\t\tdrawAlphaPanel(canvas);\n\n\t}\n\n\tprivate void drawSatValPanel(Canvas canvas){\n\n\t\tfinal RectF\trect = mSatValRect;\n\n\t\tif(BORDER_WIDTH_PX > 0){\n\t\t\tmBorderPaint.setColor(mBorderColor);\n\t\t\tcanvas.drawRect(mDrawingRect.left, mDrawingRect.top, rect.right + BORDER_WIDTH_PX, rect.bottom + BORDER_WIDTH_PX, mBorderPaint);\n\t\t}\n\n\t\tif (mValShader == null) {\n\t\t\tmValShader = new LinearGradient(rect.left, rect.top, rect.left, rect.bottom,\n\t\t\t\t\t0xffffffff, 0xff000000, TileMode.CLAMP);\n\t\t}\n\n\t\tint rgb = Color.HSVToColor(new float[]{mHue,1f,1f});\n\n\t\tmSatShader = new LinearGradient(rect.left, rect.top, rect.right, rect.top,\n\t\t\t\t0xffffffff, rgb, TileMode.CLAMP);\n\t\tComposeShader mShader = new ComposeShader(mValShader, mSatShader, PorterDuff.Mode.MULTIPLY);\n\t\tmSatValPaint.setShader(mShader);\n\n\t\tcanvas.drawRect(rect, mSatValPaint);\n\n\t\tPoint p = satValToPoint(mSat, mVal);\n\n\t\tmSatValTrackerPaint.setColor(0xff000000);\n\t\tcanvas.drawCircle(p.x, p.y, PALETTE_CIRCLE_TRACKER_RADIUS - 1f * mDensity, mSatValTrackerPaint);\n\n\t\tmSatValTrackerPaint.setColor(0xffdddddd);\n\t\tcanvas.drawCircle(p.x, p.y, PALETTE_CIRCLE_TRACKER_RADIUS, mSatValTrackerPaint);\n\n\t}\n\n\tprivate void drawHuePanel(Canvas canvas){\n\n\t\tfinal RectF rect = mHueRect;\n\n\t\tif(BORDER_WIDTH_PX > 0){\n\t\t\tmBorderPaint.setColor(mBorderColor);\n\t\t\tcanvas.drawRect(rect.left - BORDER_WIDTH_PX,\n\t\t\t\t\trect.top - BORDER_WIDTH_PX,\n\t\t\t\t\trect.right + BORDER_WIDTH_PX,\n\t\t\t\t\trect.bottom + BORDER_WIDTH_PX,\n\t\t\t\t\tmBorderPaint);\n\t\t}\n\n\t\tif (mHueShader == null) {\n\t\t\tmHueShader = new LinearGradient(rect.left, rect.top, rect.left, rect.bottom, buildHueColorArray(), null, TileMode.CLAMP);\n\t\t\tmHuePaint.setShader(mHueShader);\n\t\t}\n\n\t\tcanvas.drawRect(rect, mHuePaint);\n\n\t\tfloat rectHeight = 4 * mDensity \/ 2;\n\n\t\tPoint p = hueToPoint(mHue);\n\n\t\tRectF r = new RectF();\n\t\tr.left = rect.left - RECTANGLE_TRACKER_OFFSET;\n\t\tr.right = rect.right + RECTANGLE_TRACKER_OFFSET;\n\t\tr.top = p.y - rectHeight;\n\t\tr.bottom = p.y + rectHeight;\n\n\n\t\tcanvas.drawRoundRect(r, 2, 2, mHueTrackerPaint);\n\n\t}\n\n\tprivate void drawAlphaPanel(Canvas canvas){\n\n\t\tif(!mShowAlphaPanel || mAlphaRect == null || mAlphaPattern == null) return;\n\n\t\tfinal RectF rect = mAlphaRect;\n\n\t\tif(BORDER_WIDTH_PX > 0){\n\t\t\tmBorderPaint.setColor(mBorderColor);\n\t\t\tcanvas.drawRect(rect.left - BORDER_WIDTH_PX,\n\t\t\t\t\trect.top - BORDER_WIDTH_PX,\n\t\t\t\t\trect.right + BORDER_WIDTH_PX,\n\t\t\t\t\trect.bottom + BORDER_WIDTH_PX,\n\t\t\t\t\tmBorderPaint);\n\t\t}\n\n\n\t\tmAlphaPattern.draw(canvas);\n\n\t\tfloat[] hsv = new float[]{mHue,mSat,mVal};\n\t\tint color = Color.HSVToColor(hsv);\n\t\tint acolor = Color.HSVToColor(0, hsv);\n\n\t\tmAlphaShader = new LinearGradient(rect.left, rect.top, rect.right, rect.top,\n\t\t\t\tcolor, acolor, TileMode.CLAMP);\n\n\n\t\tmAlphaPaint.setShader(mAlphaShader);\n\n\t\tcanvas.drawRect(rect, mAlphaPaint);\n\n\t\tif(mAlphaSliderText != null && mAlphaSliderText!= \"\"){\n\t\t\tcanvas.drawText(mAlphaSliderText, rect.centerX(), rect.centerY() + 4 * mDensity, mAlphaTextPaint);\n\t\t}\n\n\t\tfloat rectWidth = 4 * mDensity \/ 2;\n\n\t\tPoint p = alphaToPoint(mAlpha);\n\n\t\tRectF r = new RectF();\n\t\tr.left = p.x - rectWidth;\n\t\tr.right = p.x + rectWidth;\n\t\tr.top = rect.top - RECTANGLE_TRACKER_OFFSET;\n\t\tr.bottom = rect.bottom + RECTANGLE_TRACKER_OFFSET;\n\n\t\tcanvas.drawRoundRect(r, 2, 2, mHueTrackerPaint);\n\n\t}\n\n\n\tprivate Point hueToPoint(float hue){\n\n\t\tfinal RectF rect = mHueRect;\n\t\tfinal float height = rect.height();\n\n\t\tPoint p = new Point();\n\n\t\tp.y = (int) (height - (hue * height \/ 360f) + rect.top);\n\t\tp.x = (int) rect.left;\n\n\t\treturn p;\n\t}\n\n\tprivate Point satValToPoint(float sat, float val){\n\n\t\tfinal RectF rect = mSatValRect;\n\t\tfinal float height = rect.height();\n\t\tfinal float width = rect.width();\n\n\t\tPoint p = new Point();\n\n\t\tp.x = (int) (sat * width + rect.left);\n\t\tp.y = (int) ((1f - val) * height + rect.top);\n\n\t\treturn p;\n\t}\n\n\tprivate Point alphaToPoint(int alpha){\n\n\t\tfinal RectF rect = mAlphaRect;\n\t\tfinal float width = rect.width();\n\n\t\tPoint p = new Point();\n\n\t\tp.x = (int) (width - (alpha * width \/ 0xff) + rect.left);\n\t\tp.y = (int) rect.top;\n\n\t\treturn p;\n\n\t}\n\n\tprivate float[] pointToSatVal(float x, float y){\n\n\t\tfinal RectF rect = mSatValRect;\n\t\tfloat[] result = new float[2];\n\n\t\tfloat width = rect.width();\n\t\tfloat height = rect.height();\n\n\t\tif (x < rect.left){\n\t\t\tx = 0f;\n\t\t}\n\t\telse if(x > rect.right){\n\t\t\tx = width;\n\t\t}\n\t\telse{\n\t\t\tx = x - rect.left;\n\t\t}\n\n\t\tif (y < rect.top){\n\t\t\ty = 0f;\n\t\t}\n\t\telse if(y > rect.bottom){\n\t\t\ty = height;\n\t\t}\n\t\telse{\n\t\t\ty = y - rect.top;\n\t\t}\n\n\n\t\tresult[0] = 1.f \/ width * x;\n\t\tresult[1] = 1.f - (1.f \/ height * y);\n\n\t\treturn result;\n\t}\n\n\tprivate float pointToHue(float y){\n\n\t\tfinal RectF rect = mHueRect;\n\n\t\tfloat height = rect.height();\n\n\t\tif (y < rect.top){\n\t\t\ty = 0f;\n\t\t}\n\t\telse if(y > rect.bottom){\n\t\t\ty = height;\n\t\t}\n\t\telse{\n\t\t\ty = y - rect.top;\n\t\t}\n\n\t\treturn 360f - (y * 360f \/ height);\n\t}\n\n\tprivate int pointToAlpha(int x){\n\n\t\tfinal RectF rect = mAlphaRect;\n\t\tfinal int width = (int) rect.width();\n\n\t\tif(x < rect.left){\n\t\t\tx = 0;\n\t\t}\n\t\telse if(x > rect.right){\n\t\t\tx = width;\n\t\t}\n\t\telse{\n\t\t\tx = x - (int)rect.left;\n\t\t}\n\n\t\treturn 0xff - (x * 0xff \/ width);\n\n\t}\n\n\n\t@Override\n\tpublic boolean onTrackballEvent(MotionEvent event) {\n\n\t\tfloat x = event.getX();\n\t\tfloat y = event.getY();\n\n\t\tboolean update = false;\n\n\n\t\tif(event.getAction() == MotionEvent.ACTION_MOVE){\n\n\t\t\tswitch(mLastTouchedPanel){\n\n\t\t\tcase PANEL_SAT_VAL:\n\n\t\t\t\tfloat sat, val;\n\n\t\t\t\tsat = mSat + x\/50f;\n\t\t\t\tval = mVal - y\/50f;\n\n\t\t\t\tif(sat < 0f){\n\t\t\t\t\tsat = 0f;\n\t\t\t\t}\n\t\t\t\telse if(sat > 1f){\n\t\t\t\t\tsat = 1f;\n\t\t\t\t}\n\n\t\t\t\tif(val < 0f){\n\t\t\t\t\tval = 0f;\n\t\t\t\t}\n\t\t\t\telse if(val > 1f){\n\t\t\t\t\tval = 1f;\n\t\t\t\t}\n\n\t\t\t\tmSat = sat;\n\t\t\t\tmVal = val;\n\n\t\t\t\tupdate = true;\n\n\t\t\t\tbreak;\n\n\t\t\tcase PANEL_HUE:\n\n\t\t\t\tfloat hue = mHue - y * 10f;\n\n\t\t\t\tif(hue < 0f){\n\t\t\t\t\thue = 0f;\n\t\t\t\t}\n\t\t\t\telse if(hue > 360f){\n\t\t\t\t\thue = 360f;\n\t\t\t\t}\n\n\t\t\t\tmHue = hue;\n\n\t\t\t\tupdate = true;\n\n\t\t\t\tbreak;\n\n\t\t\tcase PANEL_ALPHA:\n\n\t\t\t\tif(!mShowAlphaPanel || mAlphaRect == null){\n\t\t\t\t\tupdate = false;\n\t\t\t\t}\n\t\t\t\telse{\n\n\t\t\t\t\tint alpha = (int) (mAlpha - x*10);\n\n\t\t\t\t\tif(alpha < 0){\n\t\t\t\t\t\talpha = 0;\n\t\t\t\t\t}\n\t\t\t\t\telse if(alpha > 0xff){\n\t\t\t\t\t\talpha = 0xff;\n\t\t\t\t\t}\n\n\t\t\t\t\tmAlpha = alpha;\n\n\n\t\t\t\t\tupdate = true;\n\t\t\t\t}\n\n\t\t\t\tbreak;\n\t\t\t}\n\n\n\t\t}\n\n\n\t\tif(update){\n\n\t\t\tif(mListener != null){\n\t\t\t\tmListener.onColorChanged(Color.HSVToColor(mAlpha, new float[]{mHue, mSat, mVal}));\n\t\t\t}\n\n\t\t\tinvalidate();\n\t\t\treturn true;\n\t\t}\n\n\n\t\treturn super.onTrackballEvent(event);\n\t}\n\n\t@Override\n\tpublic boolean onTouchEvent(MotionEvent event) {\n\n\t\tboolean update = false;\n\n\t\tswitch(event.getAction()){\n\n\t\tcase MotionEvent.ACTION_DOWN:\n\n\t\t\tmStartTouchPoint = new Point((int)event.getX(), (int)event.getY());\n\n\t\t\tupdate = moveTrackersIfNeeded(event);\n\n\t\t\tbreak;\n\n\t\tcase MotionEvent.ACTION_MOVE:\n\n\t\t\tupdate = moveTrackersIfNeeded(event);\n\n\t\t\tbreak;\n\n\t\tcase MotionEvent.ACTION_UP:\n\n\t\t\tmStartTouchPoint = null;\n\n\t\t\tupdate = moveTrackersIfNeeded(event);\n\n\t\t\tbreak;\n\n\t\t}\n\n\t\tif(update){\n\n\t\t\tif(mListener != null){\n\t\t\t\tmListener.onColorChanged(Color.HSVToColor(mAlpha, new float[]{mHue, mSat, mVal}));\n\t\t\t}\n\n\t\t\tinvalidate();\n\t\t\treturn true;\n\t\t}\n\n\n\t\treturn super.onTouchEvent(event);\n\t}\n\n\tprivate boolean moveTrackersIfNeeded(MotionEvent event){\n\n\t\tif(mStartTouchPoint == null) return false;\n\n\t\tboolean update = false;\n\n\t\tint startX = mStartTouchPoint.x;\n\t\tint startY = mStartTouchPoint.y;\n\n\n\t\tif(mHueRect.contains(startX, startY)){\n\t\t\tmLastTouchedPanel = PANEL_HUE;\n\n\t\t\tmHue = pointToHue(event.getY());\n\n\t\t\tupdate = true;\n\t\t}\n\t\telse if(mSatValRect.contains(startX, startY)){\n\n\t\t\tmLastTouchedPanel = PANEL_SAT_VAL;\n\n\t\t\tfloat[] result = pointToSatVal(event.getX(), event.getY());\n\n\t\t\tmSat = result[0];\n\t\t\tmVal = result[1];\n\n\t\t\tupdate = true;\n\t\t}\n\t\telse if(mAlphaRect != null && mAlphaRect.contains(startX, startY)){\n\n\t\t\tmLastTouchedPanel = PANEL_ALPHA;\n\n\t\t\tmAlpha = pointToAlpha((int)event.getX());\n\n\t\t\tupdate = true;\n\t\t}\n\n\n\t\treturn update;\n\t}\n\n\t@Override\n\tprotected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {\n\n\t\tint width = 0;\n\t\tint height = 0;\n\t\t\n\t\tint widthMode = MeasureSpec.getMode(widthMeasureSpec);\n\t\tint heightMode = MeasureSpec.getMode(heightMeasureSpec);\n\t\t\n\t\tint widthAllowed = MeasureSpec.getSize(widthMeasureSpec);\n\t\tint heightAllowed = MeasureSpec.getSize(heightMeasureSpec);\n\t\t\n\t\twidthAllowed = chooseWidth(widthMode, widthAllowed);\n\t\theightAllowed = chooseHeight(heightMode, heightAllowed);\n\t\t\n\t\tif(!mShowAlphaPanel){\n\t\t\t\n\t\t\theight = (int) (widthAllowed - PANEL_SPACING - HUE_PANEL_WIDTH);\n\n\t\t\t\/\/If calculated height (based on the width) is more than the allowed height.\n\t\t\tif(height > heightAllowed || getTag().equals(\"landscape\")) {\n\t\t\t\theight = heightAllowed;\n\t\t\t\twidth = (int) (height + PANEL_SPACING + HUE_PANEL_WIDTH);\n\t\t\t}\n\t\t\telse{\n\t\t\t\twidth = widthAllowed;\n\t\t\t}\n\t\t}\n\t\telse{\n\n\t\t\twidth = (int) (heightAllowed - ALPHA_PANEL_HEIGHT + HUE_PANEL_WIDTH);\n\n\t\t\tif(width > widthAllowed){\n\t\t\t\twidth = widthAllowed;\n\t\t\t\theight = (int) (widthAllowed - HUE_PANEL_WIDTH + ALPHA_PANEL_HEIGHT);\n\t\t\t}\n\t\t\telse{\n\t\t\t\theight = heightAllowed;\n\t\t\t}\n\n\t\t}\n\t\t\n\t\tsetMeasuredDimension(width, height);\n\t}\n\n\tprivate int chooseWidth(int mode, int size){\n\t\tif (mode == MeasureSpec.AT_MOST || mode == MeasureSpec.EXACTLY) {\n\t\t\treturn size;\n\t\t} else { \/\/ (mode == MeasureSpec.UNSPECIFIED)\n\t\t\treturn getPrefferedWidth();\n\t\t}\n\t}\n\n\tprivate int chooseHeight(int mode, int size){\n\t\tif (mode == MeasureSpec.AT_MOST || mode == MeasureSpec.EXACTLY) {\n\t\t\treturn size;\n\t\t} else { \/\/ (mode == MeasureSpec.UNSPECIFIED)\n\t\t\treturn getPrefferedHeight();\n\t\t}\n\t}\n\n\tprivate int getPrefferedWidth(){\n\n\t\tint width = getPrefferedHeight();\n\n\t\tif(mShowAlphaPanel){\n\t\t\twidth -= (PANEL_SPACING + ALPHA_PANEL_HEIGHT);\n\t\t}\n\n\n\t\treturn (int) (width + HUE_PANEL_WIDTH + PANEL_SPACING);\n\n\t}\n\n\tprivate int getPrefferedHeight(){\n\n\t\tint height = (int)(200 * mDensity);\n\n\t\tif(mShowAlphaPanel){\n\t\t\theight += PANEL_SPACING + ALPHA_PANEL_HEIGHT;\n\t\t}\n\n\t\treturn height;\n\t}\n\n\n\n\t@Override\n\tprotected void onSizeChanged(int w, int h, int oldw, int oldh) {\n\t\tsuper.onSizeChanged(w, h, oldw, oldh);\n\n\t\tmDrawingRect = new RectF();\n\t\tmDrawingRect.left = mDrawingOffset + getPaddingLeft();\n\t\tmDrawingRect.right  = w - mDrawingOffset - getPaddingRight();\n\t\tmDrawingRect.top = mDrawingOffset + getPaddingTop();\n\t\tmDrawingRect.bottom = h - mDrawingOffset - getPaddingBottom();\n\n\t\tsetUpSatValRect();\n\t\tsetUpHueRect();\n\t\tsetUpAlphaRect();\n\t}\n\n\tprivate void setUpSatValRect(){\n\n\t\tfinal RectF\tdRect = mDrawingRect;\n\t\tfloat panelSide = dRect.height() - BORDER_WIDTH_PX * 2;\n\n\t\tif(mShowAlphaPanel){\n\t\t\tpanelSide -= PANEL_SPACING + ALPHA_PANEL_HEIGHT;\n\t\t}\n\n\t\tfloat left = dRect.left + BORDER_WIDTH_PX;\n\t\tfloat top = dRect.top + BORDER_WIDTH_PX;\n\t\tfloat bottom = top + panelSide;\n\t\tfloat right = left + panelSide;\n\n\t\tmSatValRect = new RectF(left,top, right, bottom);\n\t}\n\n\tprivate void setUpHueRect(){\n\t\tfinal RectF\tdRect = mDrawingRect;\n\n\t\tfloat left = dRect.right - HUE_PANEL_WIDTH + BORDER_WIDTH_PX;\n\t\tfloat top = dRect.top + BORDER_WIDTH_PX;\n\t\tfloat bottom = dRect.bottom - BORDER_WIDTH_PX - (mShowAlphaPanel ? (PANEL_SPACING + ALPHA_PANEL_HEIGHT) : 0);\n\t\tfloat right = dRect.right - BORDER_WIDTH_PX;\n\n\t\tmHueRect = new RectF(left, top, right, bottom);\n\t}\n\n\tprivate void setUpAlphaRect() {\n\n\t\tif(!mShowAlphaPanel) return;\n\n\t\tfinal RectF\tdRect = mDrawingRect;\n\n\t\tfloat left = dRect.left + BORDER_WIDTH_PX;\n\t\tfloat top = dRect.bottom - ALPHA_PANEL_HEIGHT + BORDER_WIDTH_PX;\n\t\tfloat bottom = dRect.bottom - BORDER_WIDTH_PX;\n\t\tfloat right = dRect.right - BORDER_WIDTH_PX;\n\n\t\tmAlphaRect = new RectF(left, top, right, bottom);\n\n\t\tmAlphaPattern = new AlphaPatternDrawable((int) (5 * mDensity));\n\t\tmAlphaPattern.setBounds(\n\t\t\tMath.round(mAlphaRect.left), \n\t\t\tMath.round(mAlphaRect.top), \n\t\t\tMath.round(mAlphaRect.right), \n\t\t\tMath.round(mAlphaRect.bottom)\n\t\t);\n\n\t}\n\n\n\t\/**\n\t * Set a OnColorChangedListener to get notified when the color\n\t * selected by the user has changed.\n\t * @param listener\n\t *\/\n\tpublic void setOnColorChangedListener(OnColorChangedListener listener){\n\t\tmListener = listener;\n\t}\n\n\t\/**\n\t * Set the color of the border surrounding all panels.\n\t * @param color\n\t *\/\n\tpublic void setBorderColor(int color){\n\t\tmBorderColor = color;\n\t\tinvalidate();\n\t}\n\n\t\/**\n\t * Get the color of the border surrounding all panels.\n\t *\/\n\tpublic int getBorderColor(){\n\t\treturn mBorderColor;\n\t}\n\n\t\/**\n\t * Get the current color this view is showing.\n\t * @return the current color.\n\t *\/\n\tpublic int getColor(){\n\t\treturn Color.HSVToColor(mAlpha, new float[]{mHue,mSat,mVal});\n\t}\n\n\t\/**\n\t * Set the color the view should show.\n\t * @param color The color that should be selected.\n\t *\/\n\tpublic void setColor(int color){\n\t\tsetColor(color, false);\n\t}\n\n\t\/**\n\t * Set the color this view should show.\n\t * @param color The color that should be selected.\n\t * @param callback If you want to get a callback to\n\t * your OnColorChangedListener.\n\t *\/\n\tpublic void setColor(int color, boolean callback){\n\n\t\tint alpha = Color.alpha(color);\n\t\tint red = Color.red(color);\n\t\tint blue = Color.blue(color);\n\t\tint green = Color.green(color);\n\n\t\tfloat[] hsv = new float[3];\n\n\t\tColor.RGBToHSV(red, green, blue, hsv);\n\n\t\tmAlpha = alpha;\n\t\tmHue = hsv[0];\n\t\tmSat = hsv[1];\n\t\tmVal = hsv[2];\n\n\t\tif(callback && mListener != null){\n\t\t\tmListener.onColorChanged(Color.HSVToColor(mAlpha, new float[]{mHue, mSat, mVal}));\n\t\t}\n\n\t\tinvalidate();\n\t}\n\n\t\/**\n\t * Get the drawing offset of the color picker view.\n\t * The drawing offset is the distance from the side of\n\t * a panel to the side of the view minus the padding.\n\t * Useful if you want to have your own panel below showing\n\t * the currently selected color and want to align it perfectly.\n\t * @return The offset in pixels.\n\t *\/\n\tpublic float getDrawingOffset(){\n\t\treturn mDrawingOffset;\n\t}\n\n\t\/**\n\t * Set if the user is allowed to adjust the alpha panel. Default is false.\n\t * If it is set to false no alpha will be set.\n\t * @param visible\n\t *\/\n\tpublic void setAlphaSliderVisible(boolean visible){\n\n\t\tif(mShowAlphaPanel != visible){\n\t\t\tmShowAlphaPanel = visible;\n\n\t\t\t\/*\n\t\t\t * Reset all shader to force a recreation.\n\t\t\t * Otherwise they will not look right after\n\t\t\t * the size of the view has changed.\n\t\t\t *\/\n\t\t\tmValShader = null;\n\t\t\tmSatShader = null;\n\t\t\tmHueShader = null;\n\t\t\tmAlphaShader = null;;\n\n\t\t\trequestLayout();\n\t\t}\n\n\t}\n\n\tpublic void setSliderTrackerColor(int color){\n\t\tmSliderTrackerColor = color;\n\n\t\tmHueTrackerPaint.setColor(mSliderTrackerColor);\n\n\t\tinvalidate();\n\t}\n\n\tpublic int getSliderTrackerColor(){\n\t\treturn mSliderTrackerColor;\n\t}\n\n\t\/**\n\t * Set the text that should be shown in the\n\t * alpha slider. Set to null to disable text.\n\t * @param res string resource id.\n\t *\/\n\tpublic void setAlphaSliderText(int res){\n\t\tString text = getContext().getString(res);\n\t\tsetAlphaSliderText(text);\n\t}\n\n\t\/**\n\t * Set the text that should be shown in the\n\t * alpha slider. Set to null to disable text.\n\t * @param text Text that should be shown.\n\t *\/\n\tpublic void setAlphaSliderText(String text){\n\t\tmAlphaSliderText = text;\n\t\tinvalidate();\n\t}\n\n\t\/**\n\t * Get the current value of the text\n\t * that will be shown in the alpha\n\t * slider.\n\t * @return\n\t *\/\n\tpublic String getAlphaSliderText(){\n\t\treturn mAlphaSliderText;\n\t}\n}","avg_line_length":21.9138655462,"max_line_length":131,"alphanum_fraction":0.688332854}
{"size":2463,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*******************************************************************************\n * Copyright (c) 2006-2013 Bruno Ranschaert\n * Released under the MIT License: http:\/\/opensource.org\/licenses\/MIT\n * Library \"jsontools\"\n ******************************************************************************\/\npackage com.sdicons.json.mapper.helper;\n\nimport java.math.BigDecimal;\n\nimport com.sdicons.json.mapper.JSONMapper;\nimport com.sdicons.json.mapper.MapperException;\nimport com.sdicons.json.model.JSONDecimal;\nimport com.sdicons.json.model.JSONInteger;\nimport com.sdicons.json.model.JSONString;\nimport com.sdicons.json.model.JSONValue;\n\npublic class BigDecimalMapper implements ClassMapper {\n    private static final String BDM001 = \"JSONMapper\/BigDecimalMapper\/001: JSON->Java. Cannot map value '%s' to a BigDecimal.\";\n    private static final String BDM002 = \"JSONMapper\/BigDecimalMapper\/002: JSON->Java. Cannot map JSON class '%s' to Java BigDecimal.\";\n    private static final String BDM003 = \"JSONMapper\/BigDecimalMapper\/003: Java->JSON. Cannot map Java class '%s' to JSONDecimal.\";\n    private static final String BDM004 = \"JSONMapper\/BigDecimalMapper\/004: JSON->Java. Cannot convert to Java class '%s'.\";\n    \n    public Class getHelpedClass() {\n        return BigDecimal.class;\n    }\n    \n    public Object toJava(JSONMapper mapper, JSONValue aValue, Class aRequestedClass) throws MapperException {\n        if (!aRequestedClass.isAssignableFrom(BigDecimal.class)) \n            throw new MapperException(String.format(BDM004, aRequestedClass.getName()));\n        \n        if (aValue.isString()) {\n            try {\n                return new BigDecimal(((JSONString) aValue).getValue());\n            }\n            catch (NumberFormatException e) {\n                throw new MapperException(String.format(BDM001, ((JSONString) aValue).getValue()), e);\n            }\n        }\n        else if (aValue.isDecimal()) return ((JSONDecimal) aValue).getValue();\n        else if (aValue.isInteger()) return new BigDecimal(((JSONInteger) aValue).getValue());\n        else throw new MapperException(String.format(BDM002, aValue.getClass().getName()));\n    }\n    \n    public JSONValue toJSON(JSONMapper mapper, Object aPojo) throws MapperException {\n        if (!BigDecimal.class.isAssignableFrom(aPojo.getClass())) throw new MapperException(String.format(BDM003, aPojo.getClass().getName()));\n        return new JSONDecimal(new BigDecimal(aPojo.toString()));\n    }\n}\n","avg_line_length":50.2653061224,"max_line_length":143,"alphanum_fraction":0.6581404791}
{"size":105795,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright 2017-2018, Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http:\/\/apache.org\/licenses\/LICENSE-2.0.html).\n *\/\npackage io.strimzi.operator.cluster.operator.assembly;\n\nimport io.fabric8.kubernetes.api.model.ConfigMap;\nimport io.fabric8.kubernetes.api.model.HasMetadata;\nimport io.fabric8.kubernetes.api.model.LoadBalancerIngress;\nimport io.fabric8.kubernetes.api.model.OwnerReference;\nimport io.fabric8.kubernetes.api.model.OwnerReferenceBuilder;\nimport io.fabric8.kubernetes.api.model.PersistentVolumeClaim;\nimport io.fabric8.kubernetes.api.model.Pod;\nimport io.fabric8.kubernetes.api.model.PodTemplateSpec;\nimport io.fabric8.kubernetes.api.model.Secret;\nimport io.fabric8.kubernetes.api.model.Service;\nimport io.fabric8.kubernetes.api.model.apps.Deployment;\nimport io.fabric8.kubernetes.api.model.apps.StatefulSet;\nimport io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder;\nimport io.fabric8.kubernetes.client.KubernetesClient;\nimport io.fabric8.kubernetes.client.dsl.Resource;\nimport io.fabric8.openshift.api.model.Route;\nimport io.fabric8.openshift.api.model.RouteIngress;\nimport io.strimzi.api.kafka.KafkaAssemblyList;\nimport io.strimzi.api.kafka.model.CertificateAuthority;\nimport io.strimzi.api.kafka.model.DoneableKafka;\nimport io.strimzi.api.kafka.model.ExternalLogging;\nimport io.strimzi.api.kafka.model.JbodStorage;\nimport io.strimzi.api.kafka.model.Kafka;\nimport io.strimzi.api.kafka.model.PersistentClaimStorage;\nimport io.strimzi.api.kafka.model.SingleVolumeStorage;\nimport io.strimzi.api.kafka.model.Storage;\nimport io.strimzi.certs.CertManager;\nimport io.strimzi.operator.cluster.ClusterOperator;\nimport io.strimzi.operator.cluster.KafkaUpgradeException;\nimport io.strimzi.operator.cluster.model.AbstractModel;\nimport io.strimzi.operator.cluster.model.Ca;\nimport io.strimzi.operator.cluster.model.ClientsCa;\nimport io.strimzi.operator.cluster.model.ClusterCa;\nimport io.strimzi.operator.cluster.model.EntityOperator;\nimport io.strimzi.operator.cluster.model.EntityTopicOperator;\nimport io.strimzi.operator.cluster.model.EntityUserOperator;\nimport io.strimzi.operator.cluster.model.ImagePullPolicy;\nimport io.strimzi.operator.cluster.model.KafkaCluster;\nimport io.strimzi.operator.cluster.model.KafkaConfiguration;\nimport io.strimzi.operator.cluster.model.KafkaUpgrade;\nimport io.strimzi.operator.cluster.model.KafkaVersion;\nimport io.strimzi.operator.cluster.model.ModelUtils;\nimport io.strimzi.operator.cluster.model.TopicOperator;\nimport io.strimzi.operator.cluster.model.ZookeeperCluster;\nimport io.strimzi.operator.cluster.operator.resource.KafkaSetOperator;\nimport io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier;\nimport io.strimzi.operator.cluster.operator.resource.StatefulSetOperator;\nimport io.strimzi.operator.cluster.operator.resource.ZookeeperSetOperator;\nimport io.strimzi.operator.common.Annotations;\nimport io.strimzi.operator.common.Reconciliation;\nimport io.strimzi.operator.common.model.Labels;\nimport io.strimzi.operator.common.model.ResourceType;\nimport io.strimzi.operator.common.operator.resource.ClusterRoleBindingOperator;\nimport io.strimzi.operator.common.operator.resource.ConfigMapOperator;\nimport io.strimzi.operator.common.operator.resource.DeploymentOperator;\nimport io.strimzi.operator.common.operator.resource.PvcOperator;\nimport io.strimzi.operator.common.operator.resource.ReconcileResult;\nimport io.strimzi.operator.common.operator.resource.RoleBindingOperator;\nimport io.strimzi.operator.common.operator.resource.RouteOperator;\nimport io.strimzi.operator.common.operator.resource.ServiceAccountOperator;\nimport io.strimzi.operator.common.operator.resource.ServiceOperator;\nimport io.vertx.core.CompositeFuture;\nimport io.vertx.core.Future;\nimport io.vertx.core.Vertx;\nimport org.apache.logging.log4j.LogManager;\nimport org.apache.logging.log4j.Logger;\nimport org.quartz.CronExpression;\n\nimport java.text.ParseException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TimeZone;\nimport java.util.function.BiFunction;\nimport java.util.function.Supplier;\nimport java.util.stream.Collectors;\n\nimport static io.strimzi.operator.cluster.model.KafkaCluster.ANNO_STRIMZI_IO_FROM_VERSION;\nimport static io.strimzi.operator.cluster.model.KafkaCluster.ANNO_STRIMZI_IO_KAFKA_VERSION;\nimport static io.strimzi.operator.cluster.model.KafkaCluster.ANNO_STRIMZI_IO_TO_VERSION;\nimport static io.strimzi.operator.cluster.model.KafkaCluster.ENV_VAR_KAFKA_CONFIGURATION;\nimport static io.strimzi.operator.cluster.model.KafkaConfiguration.INTERBROKER_PROTOCOL_VERSION;\nimport static io.strimzi.operator.cluster.model.KafkaConfiguration.LOG_MESSAGE_FORMAT_VERSION;\nimport static io.strimzi.operator.cluster.model.KafkaVersion.compareDottedVersions;\nimport static io.strimzi.operator.cluster.model.TopicOperator.ANNO_STRIMZI_IO_LOGGING;\n\n\/**\n * 

Assembly operator for a \"Kafka\" assembly, which manages:<\/p>\n *

    \n *
  • A ZooKeeper cluster StatefulSet and related Services<\/li>\n *
  • A Kafka cluster StatefulSet and related Services<\/li>\n *
  • Optionally, a TopicOperator Deployment<\/li>\n * <\/ul>\n *\/\n@SuppressWarnings({\"checkstyle:ClassFanOutComplexity\"})\npublic class KafkaAssemblyOperator extends AbstractAssemblyOperator> {\n private static final Logger log = LogManager.getLogger(KafkaAssemblyOperator.class.getName());\n\n public static final String ANNO_STRIMZI_IO_MANUAL_ROLLING_UPDATE = Annotations.STRIMZI_DOMAIN + \"\/manual-rolling-update\";\n @Deprecated\n public static final String ANNO_OP_STRIMZI_IO_MANUAL_ROLLING_UPDATE = \"operator.strimzi.io\/manual-rolling-update\";\n\n private final long operationTimeoutMs;\n\n private final ZookeeperSetOperator zkSetOperations;\n private final KafkaSetOperator kafkaSetOperations;\n private final ServiceOperator serviceOperations;\n private final RouteOperator routeOperations;\n private final PvcOperator pvcOperations;\n private final DeploymentOperator deploymentOperations;\n private final ConfigMapOperator configMapOperations;\n private final ServiceAccountOperator serviceAccountOperator;\n private final RoleBindingOperator roleBindingOperator;\n private final ClusterRoleBindingOperator clusterRoleBindingOperator;\n\n private final KafkaVersion.Lookup versions;\n\n \/**\n * @param vertx The Vertx instance\n * @param isOpenShift Whether we're running with OpenShift\n *\/\n public KafkaAssemblyOperator(Vertx vertx, boolean isOpenShift,\n long operationTimeoutMs,\n CertManager certManager,\n ResourceOperatorSupplier supplier,\n KafkaVersion.Lookup versions,\n ImagePullPolicy imagePullPolicy) {\n super(vertx, isOpenShift, ResourceType.KAFKA, certManager, supplier.kafkaOperator, supplier.secretOperations, supplier.networkPolicyOperator, supplier.podDisruptionBudgetOperator, imagePullPolicy);\n this.operationTimeoutMs = operationTimeoutMs;\n this.serviceOperations = supplier.serviceOperations;\n this.routeOperations = supplier.routeOperations;\n this.zkSetOperations = supplier.zkSetOperations;\n this.kafkaSetOperations = supplier.kafkaSetOperations;\n this.configMapOperations = supplier.configMapOperations;\n this.pvcOperations = supplier.pvcOperations;\n this.deploymentOperations = supplier.deploymentOperations;\n this.serviceAccountOperator = supplier.serviceAccountOperator;\n this.roleBindingOperator = supplier.roleBindingOperator;\n this.clusterRoleBindingOperator = supplier.clusterRoleBindingOperator;\n this.versions = versions;\n }\n\n @Override\n public Future createOrUpdate(Reconciliation reconciliation, Kafka kafkaAssembly) {\n Future chainFuture = Future.future();\n if (kafkaAssembly.getSpec() == null) {\n log.error(\"{} spec cannot be null\", kafkaAssembly.getMetadata().getName());\n return Future.failedFuture(\"Spec cannot be null\");\n }\n createReconciliationState(reconciliation, kafkaAssembly)\n .reconcileCas()\n .compose(state -> state.clusterOperatorSecret())\n \/\/ Roll everything if a new CA is added to the trust store.\n .compose(state -> state.rollingUpdateForNewCaKey())\n .compose(state -> state.zkManualPodCleaning())\n .compose(state -> state.zkManualRollingUpdate())\n .compose(state -> state.getZookeeperDescription())\n .compose(state -> state.zkScaleUpStep())\n .compose(state -> state.zkScaleDown())\n .compose(state -> state.zkService())\n .compose(state -> state.zkHeadlessService())\n .compose(state -> state.zkAncillaryCm())\n .compose(state -> state.zkNodesSecret())\n .compose(state -> state.zkNetPolicy())\n .compose(state -> state.zkPodDisruptionBudget())\n .compose(state -> state.zkStatefulSet())\n .compose(state -> state.zkScaleUp())\n .compose(state -> state.zkRollingUpdate(this::dateSupplier))\n .compose(state -> state.zkServiceEndpointReadiness())\n .compose(state -> state.zkHeadlessServiceEndpointReadiness())\n .compose(state -> state.zkPersistentClaimDeletion())\n .compose(state -> state.kafkaUpgrade())\n .compose(state -> state.kafkaManualPodCleaning())\n .compose(state -> state.kafkaManualRollingUpdate())\n .compose(state -> state.getKafkaClusterDescription())\n .compose(state -> state.kafkaInitServiceAccount())\n .compose(state -> state.kafkaInitClusterRoleBinding())\n .compose(state -> state.kafkaScaleDown())\n .compose(state -> state.kafkaService())\n .compose(state -> state.kafkaHeadlessService())\n .compose(state -> state.kafkaExternalBootstrapService())\n .compose(state -> state.kafkaReplicaServices())\n .compose(state -> state.kafkaBootstrapRoute())\n .compose(state -> state.kafkaReplicaRoutes())\n .compose(state -> state.kafkaExternalBootstrapServiceReady())\n .compose(state -> state.kafkaReplicaServicesReady())\n .compose(state -> state.kafkaBootstrapRouteReady())\n .compose(state -> state.kafkaReplicaRoutesReady())\n .compose(state -> state.kafkaGenerateCertificates())\n .compose(state -> state.kafkaAncillaryCm())\n .compose(state -> state.kafkaBrokersSecret())\n .compose(state -> state.kafkaNetPolicy())\n .compose(state -> state.kafkaPodDisruptionBudget())\n .compose(state -> state.kafkaStatefulSet())\n .compose(state -> state.kafkaRollingUpdate(this::dateSupplier))\n .compose(state -> state.kafkaScaleUp())\n .compose(state -> state.kafkaServiceEndpointReady())\n .compose(state -> state.kafkaHeadlessServiceEndpointReady())\n .compose(state -> state.kafkaPersistentClaimDeletion())\n\n .compose(state -> state.getTopicOperatorDescription())\n .compose(state -> state.topicOperatorServiceAccount())\n .compose(state -> state.topicOperatorRoleBinding())\n .compose(state -> state.topicOperatorAncillaryCm())\n .compose(state -> state.topicOperatorSecret())\n .compose(state -> state.topicOperatorDeployment(this::dateSupplier))\n\n .compose(state -> state.getEntityOperatorDescription())\n .compose(state -> state.entityOperatorServiceAccount())\n .compose(state -> state.entityOperatorTopicOpRoleBinding())\n .compose(state -> state.entityOperatorUserOpRoleBinding())\n .compose(state -> state.entityOperatorTopicOpAncillaryCm())\n .compose(state -> state.entityOperatorUserOpAncillaryCm())\n .compose(state -> state.entityOperatorSecret())\n .compose(state -> state.entityOperatorDeployment(this::dateSupplier))\n\n .compose(state -> chainFuture.complete(), chainFuture);\n\n return chainFuture;\n }\n\n ReconciliationState createReconciliationState(Reconciliation reconciliation, Kafka kafkaAssembly) {\n return new ReconciliationState(reconciliation, kafkaAssembly);\n }\n\n \/**\n * Hold the mutable state during a reconciliation\n *\/\n class ReconciliationState {\n\n private final String namespace;\n private final String name;\n private final Kafka kafkaAssembly;\n private final Reconciliation reconciliation;\n\n \/* test *\/ ClusterCa clusterCa;\n \/* test *\/ ClientsCa clientsCa;\n\n private ZookeeperCluster zkCluster;\n private Service zkService;\n private Service zkHeadlessService;\n private ConfigMap zkMetricsAndLogsConfigMap;\n \/* test *\/ ReconcileResult zkDiffs;\n private boolean zkAncillaryCmChange;\n\n private KafkaCluster kafkaCluster = null;\n private Service kafkaService;\n private Service kafkaHeadlessService;\n private ConfigMap kafkaMetricsAndLogsConfigMap;\n \/* test *\/ ReconcileResult kafkaDiffs;\n private Set kafkaExternalBootstrapDnsName = new HashSet<>();\n private Set kafkaExternalAddresses = new HashSet<>();\n private Map> kafkaExternalDnsNames = new HashMap<>();\n private boolean kafkaAncillaryCmChange;\n\n \/* test *\/ TopicOperator topicOperator;\n \/* test *\/ Deployment toDeployment = null;\n private ConfigMap toMetricsAndLogsConfigMap = null;\n\n \/* test *\/ EntityOperator entityOperator;\n \/* test *\/ Deployment eoDeployment = null;\n private ConfigMap topicOperatorMetricsAndLogsConfigMap = null;\n private ConfigMap userOperatorMetricsAndLogsConfigMap;\n\n ReconciliationState(Reconciliation reconciliation, Kafka kafkaAssembly) {\n this.reconciliation = reconciliation;\n this.kafkaAssembly = kafkaAssembly;\n this.namespace = kafkaAssembly.getMetadata().getNamespace();\n this.name = kafkaAssembly.getMetadata().getName();\n }\n\n \/**\n * Asynchronously reconciles the cluster and clients CA secrets.\n * The cluster CA secret has to have the name determined by {@link AbstractModel#clusterCaCertSecretName(String)}.\n * The clients CA secret has to have the name determined by {@link KafkaCluster#clientsCaCertSecretName(String)}.\n * Within both the secrets the current certificate is stored under the key {@code ca.crt}\n * and the current key is stored under the key {@code ca.key}.\n *\/\n Future reconcileCas() {\n Labels selectorLabels = Labels.EMPTY.withKind(reconciliation.type().toString()).withCluster(reconciliation.name());\n Labels caLabels = Labels.userLabels(kafkaAssembly.getMetadata().getLabels()).withKind(reconciliation.type().toString()).withCluster(reconciliation.name());\n Future result = Future.future();\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n String clusterCaCertName = AbstractModel.clusterCaCertSecretName(name);\n String clusterCaKeyName = AbstractModel.clusterCaKeySecretName(name);\n String clientsCaCertName = KafkaCluster.clientsCaCertSecretName(name);\n String clientsCaKeyName = KafkaCluster.clientsCaKeySecretName(name);\n Secret clusterCaCertSecret = null;\n Secret clusterCaKeySecret = null;\n Secret clientsCaCertSecret = null;\n Secret clientsCaKeySecret = null;\n List clusterSecrets = secretOperations.list(reconciliation.namespace(), selectorLabels);\n for (Secret secret : clusterSecrets) {\n String secretName = secret.getMetadata().getName();\n if (secretName.equals(clusterCaCertName)) {\n clusterCaCertSecret = secret;\n } else if (secretName.equals(clusterCaKeyName)) {\n clusterCaKeySecret = secret;\n } else if (secretName.equals(clientsCaCertName)) {\n clientsCaCertSecret = secret;\n } else if (secretName.equals(clientsCaKeyName)) {\n clientsCaKeySecret = secret;\n }\n }\n OwnerReference ownerRef = new OwnerReferenceBuilder()\n .withApiVersion(kafkaAssembly.getApiVersion())\n .withKind(kafkaAssembly.getKind())\n .withName(kafkaAssembly.getMetadata().getName())\n .withUid(kafkaAssembly.getMetadata().getUid())\n .withBlockOwnerDeletion(false)\n .withController(false)\n .build();\n\n CertificateAuthority clusterCaConfig = kafkaAssembly.getSpec().getClusterCa();\n this.clusterCa = new ClusterCa(certManager, name, clusterCaCertSecret, clusterCaKeySecret,\n ModelUtils.getCertificateValidity(clusterCaConfig),\n ModelUtils.getRenewalDays(clusterCaConfig),\n clusterCaConfig == null || clusterCaConfig.isGenerateCertificateAuthority(),\n clusterCaConfig != null ? clusterCaConfig.getCertificateExpirationPolicy() : null);\n clusterCa.createRenewOrReplace(\n reconciliation.namespace(), reconciliation.name(), caLabels.toMap(),\n ownerRef);\n\n this.clusterCa.initCaSecrets(clusterSecrets);\n\n CertificateAuthority clientsCaConfig = kafkaAssembly.getSpec().getClientsCa();\n this.clientsCa = new ClientsCa(certManager,\n clientsCaCertName, clientsCaCertSecret,\n clientsCaKeyName, clientsCaKeySecret,\n ModelUtils.getCertificateValidity(clientsCaConfig),\n ModelUtils.getRenewalDays(clientsCaConfig),\n clientsCaConfig == null || clientsCaConfig.isGenerateCertificateAuthority(),\n clientsCaConfig != null ? clientsCaConfig.getCertificateExpirationPolicy() : null);\n clientsCa.createRenewOrReplace(reconciliation.namespace(), reconciliation.name(),\n caLabels.toMap(), ownerRef);\n\n secretOperations.reconcile(reconciliation.namespace(), clusterCaCertName, this.clusterCa.caCertSecret())\n .compose(ignored -> secretOperations.reconcile(reconciliation.namespace(), clusterCaKeyName, this.clusterCa.caKeySecret()))\n .compose(ignored -> secretOperations.reconcile(reconciliation.namespace(), clientsCaCertName, this.clientsCa.caCertSecret()))\n .compose(ignored -> secretOperations.reconcile(reconciliation.namespace(), clientsCaKeyName, this.clientsCa.caKeySecret()))\n .compose(ignored -> {\n future.complete(this);\n }, future);\n } catch (Throwable e) {\n future.fail(e);\n }\n }, true,\n result.completer()\n );\n return result;\n }\n\n \/**\n * Perform a rolling update of the cluster so that CA certificates get added to their truststores,\n * or expired CA certificates get removed from their truststores.\n * Note this is only necessary when the CA certificate has changed due to a new CA key.\n * It is not necessary when the CA certificate is replace while retaining the existing key.\n *\/\n Future rollingUpdateForNewCaKey() {\n List reason = new ArrayList<>(4);\n if (this.clusterCa.keyReplaced()) {\n reason.add(\"trust new cluster CA certificate signed by new key\");\n }\n if (this.clientsCa.keyReplaced()) {\n reason.add(\"trust new clients CA certificate signed by new key\");\n }\n if (!reason.isEmpty()) {\n String reasons = reason.stream().collect(Collectors.joining(\", \"));\n return zkSetOperations.getAsync(namespace, ZookeeperCluster.zookeeperClusterName(name))\n .compose(ss -> {\n return zkSetOperations.maybeRollingUpdate(ss, pod -> {\n log.debug(\"{}: Rolling Pod {} to {}\", reconciliation, pod.getMetadata().getName(), reasons);\n return true;\n });\n })\n .compose(i -> kafkaSetOperations.getAsync(namespace, KafkaCluster.kafkaClusterName(name)))\n .compose(ss -> {\n return kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n log.debug(\"{}: Rolling Pod {} to {}\", reconciliation, pod.getMetadata().getName(), reasons);\n return true;\n });\n })\n .compose(i -> deploymentOperations.getAsync(namespace, TopicOperator.topicOperatorName(name)))\n .compose(dep -> {\n if (dep != null) {\n log.debug(\"{}: Rolling Deployment {} to {}\", reconciliation, TopicOperator.topicOperatorName(name), reasons);\n return deploymentOperations.rollingUpdate(namespace, TopicOperator.topicOperatorName(name), operationTimeoutMs);\n } else {\n return Future.succeededFuture();\n }\n })\n .compose(i -> deploymentOperations.getAsync(namespace, EntityOperator.entityOperatorName(name)))\n .compose(dep -> {\n if (dep != null) {\n log.debug(\"{}: Rolling Deployment {} to {}\", reconciliation, EntityOperator.entityOperatorName(name), reasons);\n return deploymentOperations.rollingUpdate(namespace, EntityOperator.entityOperatorName(name), operationTimeoutMs);\n } else {\n return Future.succeededFuture();\n }\n })\n .map(i -> this);\n } else {\n return Future.succeededFuture(this);\n }\n }\n\n Future kafkaManualRollingUpdate() {\n Future futss = kafkaSetOperations.getAsync(namespace, KafkaCluster.kafkaClusterName(name));\n if (futss != null) {\n return futss.compose(ss -> {\n if (ss != null) {\n if (Annotations.booleanAnnotation(ss, ANNO_STRIMZI_IO_MANUAL_ROLLING_UPDATE,\n false, ANNO_OP_STRIMZI_IO_MANUAL_ROLLING_UPDATE)) {\n return kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n\n log.debug(\"{}: Rolling Kafka pod {} due to manual rolling update\",\n reconciliation, pod.getMetadata().getName());\n return true;\n });\n }\n }\n return Future.succeededFuture();\n }).map(i -> this);\n }\n return Future.succeededFuture(this);\n }\n\n Future zkManualRollingUpdate() {\n Future futss = zkSetOperations.getAsync(namespace, ZookeeperCluster.zookeeperClusterName(name));\n if (futss != null) {\n return futss.compose(ss -> {\n if (ss != null) {\n if (Annotations.booleanAnnotation(ss, ANNO_STRIMZI_IO_MANUAL_ROLLING_UPDATE,\n false, ANNO_OP_STRIMZI_IO_MANUAL_ROLLING_UPDATE)) {\n\n return zkSetOperations.maybeRollingUpdate(ss, pod -> {\n\n log.debug(\"{}: Rolling Zookeeper pod {} to manual rolling update\",\n reconciliation, pod.getMetadata().getName());\n return true;\n });\n }\n }\n return Future.succeededFuture();\n }).map(i -> this);\n }\n return Future.succeededFuture(this);\n }\n\n \/**\n * If the SS exists, complete any pending rolls\n *\n * @return A Future which completes with the current state of the SS, or with null if the SS never existed.\n *\/\n public Future waitForQuiescence(String namespace, String statefulSetName) {\n return kafkaSetOperations.getAsync(namespace, statefulSetName).compose(ss -> {\n if (ss != null) {\n return kafkaSetOperations.maybeRollingUpdate(ss,\n pod -> {\n boolean notUpToDate = !isPodUpToDate(ss, pod);\n if (notUpToDate) {\n log.debug(\"Rolling pod {} prior to upgrade\", pod.getMetadata().getName());\n }\n return notUpToDate;\n }).map(ignored -> ss);\n } else {\n return Future.succeededFuture(ss);\n }\n });\n }\n\n Future kafkaUpgrade() {\n \/\/ Wait until the SS is not being updated (it shouldn't be, but there's no harm in checking)\n String kafkaSsName = KafkaCluster.kafkaClusterName(name);\n return waitForQuiescence(namespace, kafkaSsName).compose(\n ss -> {\n if (ss == null) {\n return Future.succeededFuture(this);\n }\n log.debug(\"Does SS {} need to be upgraded?\", ss.getMetadata().getName());\n Future result;\n \/\/ Get the current version of the cluster\n KafkaVersion currentVersion = versions.version(Annotations.annotations(ss).get(ANNO_STRIMZI_IO_KAFKA_VERSION));\n log.debug(\"SS {} has current version {}\", ss.getMetadata().getName(), currentVersion);\n String fromVersionAnno = Annotations.annotations(ss).get(ANNO_STRIMZI_IO_FROM_VERSION);\n KafkaVersion fromVersion;\n if (fromVersionAnno != null) { \/\/ We're mid-upgrade\n fromVersion = versions.version(fromVersionAnno);\n } else {\n fromVersion = currentVersion;\n }\n log.debug(\"SS {} is from version {}\", ss.getMetadata().getName(), fromVersion);\n String toVersionAnno = Annotations.annotations(ss).get(ANNO_STRIMZI_IO_TO_VERSION);\n KafkaVersion toVersion;\n if (toVersionAnno != null) { \/\/ We're mid-upgrade\n toVersion = versions.version(toVersionAnno);\n } else {\n toVersion = versions.version(kafkaAssembly.getSpec().getKafka().getVersion());\n }\n log.debug(\"SS {} is to version {}\", ss.getMetadata().getName(), toVersion);\n KafkaUpgrade upgrade = new KafkaUpgrade(fromVersion, toVersion);\n log.debug(\"Kafka upgrade {}\", upgrade);\n if (upgrade.isNoop()) {\n log.debug(\"Kafka.spec.kafka.version unchanged\");\n result = Future.succeededFuture();\n } else {\n String image = versions.kafkaImage(kafkaAssembly.getSpec().getKafka().getImage(), toVersion.version());\n Future f = Future.succeededFuture(ss);\n if (upgrade.isUpgrade()) {\n if (currentVersion.equals(fromVersion)) {\n f = f.compose(ignored -> kafkaUpgradePhase1(ss, upgrade, image));\n }\n result = f.compose(ss2 -> kafkaUpgradePhase2(ss2, upgrade));\n } else {\n if (currentVersion.equals(fromVersion)) {\n f = f.compose(ignored -> kafkaDowngradePhase1(ss, upgrade));\n }\n result = f.compose(ignored -> kafkaDowngradePhase2(ss, upgrade, image));\n }\n\n }\n return result.map(this);\n });\n }\n\n \/**\n *

    Initial upgrade phase.\n * If a message format change is required, check that it's set in the Kafka.spec.kafka.config\n * Set inter.broker.protocol.version if it's not set\n * Perform a rolling update.\n *\/\n private Future kafkaUpgradePhase1(StatefulSet ss, KafkaUpgrade upgrade, String upgradedImage) {\n log.info(\"{}: {}, phase 1\", reconciliation, upgrade);\n\n Map annotations = Annotations.annotations(ss);\n Map env = ModelUtils.getKafkaContainerEnv(ss);\n String string = env.getOrDefault(ENV_VAR_KAFKA_CONFIGURATION, \"\");\n log.debug(\"Current config {}\", string);\n KafkaConfiguration currentKafkaConfig = KafkaConfiguration.unvalidated(string);\n String oldMessageFormat = currentKafkaConfig.getConfigOption(LOG_MESSAGE_FORMAT_VERSION);\n if (upgrade.requiresMessageFormatChange() &&\n oldMessageFormat == null) {\n \/\/ We need to ensure both new and old versions are using the same version (so they agree during the upgrade).\n \/\/ If the msg version is given in the CR and it's the same as the current (live) msg version\n \/\/ then we're good. If the current live msg version is not given (i.e. the default) and\n \/\/ the msg version is given in the CR then we're also good.\n\n \/\/ Force the user to explicitly set the log.message.format.version\n \/\/ to match the old version\n return Future.failedFuture(new KafkaUpgradeException(upgrade + \" requires a message format change \" +\n \"from \" + upgrade.from().messageVersion() + \" to \" + upgrade.to().messageVersion() + \". \" +\n \"You must explicitly set \" +\n LOG_MESSAGE_FORMAT_VERSION + \": \\\"\" + upgrade.from().messageVersion() + \"\\\"\" +\n \" in Kafka.spec.kafka.config to perform the upgrade. \" +\n \"Then you can upgrade client applications. \" +\n \"And finally you can remove \" + LOG_MESSAGE_FORMAT_VERSION +\n \" from Kafka.spec.kafka.config\"));\n }\n \/\/ Otherwise both versions use the same message format, so we don't care.\n\n String lowerVersionProtocol = currentKafkaConfig.getConfigOption(INTERBROKER_PROTOCOL_VERSION);\n boolean twoPhase;\n if (lowerVersionProtocol == null) {\n if (!upgrade.requiresProtocolChange()) {\n \/\/ In this case we just need a single rolling update\n twoPhase = false;\n } else {\n twoPhase = true;\n \/\/ Set proto version and message version in Kafka config, if they're not already set\n lowerVersionProtocol = currentKafkaConfig.getConfigOption(INTERBROKER_PROTOCOL_VERSION, upgrade.from().protocolVersion());\n log.info(\"{}: Upgrade: Setting {} to {}\", reconciliation, INTERBROKER_PROTOCOL_VERSION, lowerVersionProtocol);\n currentKafkaConfig.setConfigOption(INTERBROKER_PROTOCOL_VERSION, lowerVersionProtocol);\n env.put(ENV_VAR_KAFKA_CONFIGURATION, currentKafkaConfig.getConfiguration());\n \/\/ Store upgrade state in annotations\n annotations.put(ANNO_STRIMZI_IO_FROM_VERSION, upgrade.from().version());\n annotations.put(ANNO_STRIMZI_IO_TO_VERSION, upgrade.to().version());\n }\n } else {\n \/\/ There's no need for the next phase of update because the user has\n \/\/ inter.broker.protocol.version set explicitly: The CO shouldn't remove it.\n \/\/ We're done, so remove the annotations.\n twoPhase = false;\n log.info(\"{}: Upgrade: Removing annotations {}, {}\",\n reconciliation, ANNO_STRIMZI_IO_FROM_VERSION, ANNO_STRIMZI_IO_TO_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_FROM_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_TO_VERSION);\n }\n log.info(\"{}: Upgrade: Setting annotation {}={}\",\n reconciliation, ANNO_STRIMZI_IO_KAFKA_VERSION, upgrade.to().version());\n annotations.put(ANNO_STRIMZI_IO_KAFKA_VERSION, upgrade.to().version());\n \/\/ update the annotations, image and environment\n StatefulSet newSs = new StatefulSetBuilder(ss)\n .editMetadata()\n .withAnnotations(annotations)\n .endMetadata()\n .editSpec()\n .editTemplate()\n .editSpec()\n .editFirstContainer()\n .withImage(upgradedImage)\n .withEnv(ModelUtils.envAsList(env))\n .endContainer()\n .endSpec()\n .endTemplate()\n .endSpec()\n .build();\n\n \/\/ patch and rolling upgrade\n String name = KafkaCluster.kafkaClusterName(this.name);\n log.info(\"{}: Upgrade: Patch + rolling update of {}\", reconciliation, name);\n return kafkaSetOperations.reconcile(namespace, name, newSs)\n .compose(result -> kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n log.info(\"{}: Upgrade: Patch + rolling update of {}: Pod {}\", reconciliation, name, pod.getMetadata().getName());\n return true;\n }).map(result.resource()))\n .compose(ss2 -> {\n log.info(\"{}: {}, phase 1 of {} completed: {}\", reconciliation, upgrade,\n twoPhase ? 2 : 1,\n twoPhase ? \"change in \" + INTERBROKER_PROTOCOL_VERSION + \" requires 2nd phase\"\n : \"no change to \" + INTERBROKER_PROTOCOL_VERSION + \" because it is explicitly configured\"\n );\n return Future.succeededFuture(twoPhase ? ss2 : null);\n });\n }\n\n \/**\n * Final upgrade phase\n * Note: The log.message.format.version is left at the old version.\n * It is a manual action to remove that once the user has updated all their clients.\n *\/\n private Future kafkaUpgradePhase2(StatefulSet ss, KafkaUpgrade upgrade) {\n if (ss == null) {\n \/\/ It was a one-phase update\n return Future.succeededFuture();\n }\n \/\/ Cluster is now using new binaries, but old proto version\n log.info(\"{}: {}, phase 2\", reconciliation, upgrade);\n \/\/ Remove the strimzi.io\/from-version and strimzi.io\/to-version since this is the last phase\n Map annotations = Annotations.annotations(ss);\n log.info(\"{}: Upgrade: Removing annotations {}, {}\",\n reconciliation, ANNO_STRIMZI_IO_FROM_VERSION, ANNO_STRIMZI_IO_TO_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_FROM_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_TO_VERSION);\n\n \/\/ Remove inter.broker.protocol.version (so the new version's default is used)\n Map env = ModelUtils.getKafkaContainerEnv(ss);\n KafkaConfiguration currentKafkaConfig = KafkaConfiguration.unvalidated(env.get(ENV_VAR_KAFKA_CONFIGURATION));\n\n log.info(\"{}: Upgrade: Removing Kafka config {}, will default to {}\",\n reconciliation, INTERBROKER_PROTOCOL_VERSION, upgrade.to().protocolVersion());\n currentKafkaConfig.removeConfigOption(INTERBROKER_PROTOCOL_VERSION);\n env.put(ENV_VAR_KAFKA_CONFIGURATION, currentKafkaConfig.getConfiguration());\n\n \/\/ Update to new proto version and rolling upgrade\n currentKafkaConfig.removeConfigOption(INTERBROKER_PROTOCOL_VERSION);\n\n StatefulSet newSs = new StatefulSetBuilder(ss)\n .editMetadata()\n .withAnnotations(annotations)\n .endMetadata()\n .editSpec()\n .editTemplate()\n .editSpec()\n .editFirstContainer()\n .withEnv(ModelUtils.envAsList(env))\n .endContainer()\n .endSpec()\n .endTemplate()\n .endSpec()\n .build();\n\n \/\/ Reconcile the SS and perform a rolling update of the pods\n log.info(\"{}: Upgrade: Patch + rolling update of {}\", reconciliation, name);\n return kafkaSetOperations.reconcile(namespace, KafkaCluster.kafkaClusterName(name), newSs)\n .compose(ignored -> kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n log.info(\"{}: Upgrade: Patch + rolling update of {}: Pod {}\", reconciliation, name, pod.getMetadata().getName());\n return true;\n }))\n .compose(ignored -> {\n log.info(\"{}: {}, phase 2 of 2 completed\", reconciliation, upgrade);\n return Future.succeededFuture();\n });\n }\n\n \/**\n *

    Initial downgrade phase.\n *

      \n *
    1. Set the log.message.format.version to the old version<\/li>\n *
    2. Set the inter.broker.protocol.version to the old version<\/li>\n *
    3. Set the strimzi.io\/upgrade-phase=1 (to record progress of the upgrade in case of CO failure)<\/li>\n *
    4. Reconcile the SS and perform a rolling update of the pods<\/li>\n * <\/ol>\n *\/\n private Future kafkaDowngradePhase1(StatefulSet ss, KafkaUpgrade upgrade) {\n log.info(\"{}: {}, phase 1\", reconciliation, upgrade);\n\n Map annotations = Annotations.annotations(ss);\n Map env = ModelUtils.getKafkaContainerEnv(ss);\n KafkaConfiguration currentKafkaConfig = KafkaConfiguration.unvalidated(env.getOrDefault(ENV_VAR_KAFKA_CONFIGURATION, \"\"));\n\n String oldMessageFormat = currentKafkaConfig.getConfigOption(LOG_MESSAGE_FORMAT_VERSION);\n \/\/ Force the user to explicitly set log.message.format.version\n \/\/ (Controller shouldn't break clients)\n if (oldMessageFormat == null || !oldMessageFormat.equals(upgrade.to().messageVersion())) {\n return Future.failedFuture(new KafkaUpgradeException(\n String.format(\"Cannot downgrade Kafka cluster %s in namespace %s to version %s \" +\n \"because the current cluster is configured with %s=%s. \" +\n \"Downgraded brokers would not be able to understand existing \" +\n \"messages with the message version %s. \",\n name, namespace, upgrade.to(),\n LOG_MESSAGE_FORMAT_VERSION, oldMessageFormat,\n oldMessageFormat)));\n }\n\n String lowerVersionProtocol = currentKafkaConfig.getConfigOption(INTERBROKER_PROTOCOL_VERSION);\n String phases;\n if (lowerVersionProtocol == null\n || compareDottedVersions(lowerVersionProtocol, upgrade.to().protocolVersion()) > 0) {\n phases = \"2 (change in \" + INTERBROKER_PROTOCOL_VERSION + \" requires 2nd phase)\";\n \/\/ Set proto version and message version in Kafka config, if they're not already set\n lowerVersionProtocol = currentKafkaConfig.getConfigOption(INTERBROKER_PROTOCOL_VERSION, upgrade.to().protocolVersion());\n log.info(\"{}: Downgrade: Setting {} to {}\", reconciliation, INTERBROKER_PROTOCOL_VERSION, lowerVersionProtocol);\n currentKafkaConfig.setConfigOption(INTERBROKER_PROTOCOL_VERSION, lowerVersionProtocol);\n env.put(ENV_VAR_KAFKA_CONFIGURATION, currentKafkaConfig.getConfiguration());\n \/\/ Store upgrade state in annotations\n annotations.put(ANNO_STRIMZI_IO_FROM_VERSION, upgrade.from().version());\n annotations.put(ANNO_STRIMZI_IO_TO_VERSION, upgrade.to().version());\n } else {\n \/\/ In this case there's no need for this phase of update, because the both old and new\n \/\/ brokers speaking protocol of the lower version.\n phases = \"2 (1st phase skips rolling update)\";\n log.info(\"{}: {}, phase 1 of {} completed\", reconciliation, upgrade, phases);\n return Future.succeededFuture(ss);\n }\n\n \/\/ update the annotations, image and environment\n StatefulSet newSs = new StatefulSetBuilder(ss)\n .editMetadata()\n .withAnnotations(annotations)\n .endMetadata()\n .editSpec()\n .editTemplate()\n .editSpec()\n .editFirstContainer()\n .withEnv(ModelUtils.envAsList(env))\n .endContainer()\n .endSpec()\n .endTemplate()\n .endSpec()\n .build();\n\n \/\/ patch and rolling upgrade\n String name = KafkaCluster.kafkaClusterName(this.name);\n log.info(\"{}: Downgrade: Patch + rolling update of {}\", reconciliation, name);\n return kafkaSetOperations.reconcile(namespace, name, newSs)\n .compose(result -> kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n log.info(\"{}: Downgrade: Patch + rolling update of {}: Pod {}\", reconciliation, name, pod.getMetadata().getName());\n return true;\n }).map(result.resource()))\n .compose(ss2 -> {\n log.info(\"{}: {}, phase 1 of {} completed\", reconciliation, upgrade, phases);\n return Future.succeededFuture(ss2);\n });\n }\n\n \/**\n *

      Final downgrade phase\n *

        \n *
      1. Update the strimzi.io\/kafka-version to the new version<\/li>\n *
      2. Remove the strimzi.io\/from-kafka-version since this is the last phase<\/li>\n *
      3. Remove the strimzi.io\/to-kafka-version since this is the last phase<\/li>\n *
      4. Remove inter.broker.protocol.version (so the new version's default is used)<\/li>\n *
      5. Update the image in the SS<\/li>\n *
      6. Reconcile the SS and perform a rolling update of the pods<\/li>\n * <\/ol>\n *\/\n private Future kafkaDowngradePhase2(StatefulSet ss, KafkaUpgrade downgrade, String downgradedImage) {\n log.info(\"{}: {}, phase 2\", reconciliation, downgrade);\n \/\/ Remove the strimzi.io\/from-version and strimzi.io\/to-version since this is the last phase\n\n Map annotations = Annotations.annotations(ss);\n\n log.info(\"{}: Upgrade: Removing annotations {}, {}\",\n reconciliation, ANNO_STRIMZI_IO_FROM_VERSION, ANNO_STRIMZI_IO_TO_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_FROM_VERSION);\n annotations.remove(ANNO_STRIMZI_IO_TO_VERSION);\n annotations.put(ANNO_STRIMZI_IO_KAFKA_VERSION, downgrade.to().version());\n\n \/\/ Remove inter.broker.protocol.version (so the new version's default is used)\n Map env = ModelUtils.getKafkaContainerEnv(ss);\n KafkaConfiguration currentKafkaConfig = KafkaConfiguration.unvalidated(env.getOrDefault(ENV_VAR_KAFKA_CONFIGURATION, \"\"));\n log.info(\"{}: Upgrade: Removing Kafka config {}, will default to {}\",\n reconciliation, INTERBROKER_PROTOCOL_VERSION, downgrade.to().protocolVersion());\n currentKafkaConfig.removeConfigOption(INTERBROKER_PROTOCOL_VERSION);\n env.put(ENV_VAR_KAFKA_CONFIGURATION, currentKafkaConfig.getConfiguration());\n\n StatefulSet newSs = new StatefulSetBuilder(ss)\n .editMetadata()\n .withAnnotations(annotations)\n .endMetadata()\n .editSpec()\n .editTemplate()\n .editSpec()\n .editFirstContainer()\n .withImage(downgradedImage)\n .withEnv(ModelUtils.envAsList(env))\n .endContainer()\n .endSpec()\n .endTemplate()\n .endSpec()\n .build();\n\n \/\/ Reconcile the SS and perform a rolling update of the pods\n log.info(\"{}: Upgrade: Patch + rolling update of {}\", reconciliation, name);\n return kafkaSetOperations.reconcile(namespace, KafkaCluster.kafkaClusterName(name), newSs)\n .compose(ignored -> kafkaSetOperations.maybeRollingUpdate(ss, pod -> {\n log.info(\"{}: Upgrade: Patch + rolling update of {}: Pod {}\", reconciliation, name, pod.getMetadata().getName());\n return true;\n }))\n .compose(ignored -> {\n log.info(\"{}: {}, phase 2 of 2 completed\", reconciliation, downgrade);\n return Future.succeededFuture();\n });\n }\n\n Future getZookeeperDescription() {\n Future fut = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n this.zkCluster = ZookeeperCluster.fromCrd(kafkaAssembly, versions);\n\n ConfigMap logAndMetricsConfigMap = zkCluster.generateMetricsAndLogConfigMap(zkCluster.getLogging() instanceof ExternalLogging ?\n configMapOperations.get(kafkaAssembly.getMetadata().getNamespace(), ((ExternalLogging) zkCluster.getLogging()).getName()) :\n null);\n\n this.zkService = zkCluster.generateService();\n this.zkHeadlessService = zkCluster.generateHeadlessService();\n this.zkMetricsAndLogsConfigMap = zkCluster.generateMetricsAndLogConfigMap(logAndMetricsConfigMap);\n\n future.complete(this);\n } catch (Throwable e) {\n future.fail(e);\n }\n }, true,\n res -> {\n if (res.succeeded()) {\n fut.complete((ReconciliationState) res.result());\n } else {\n fut.fail(res.cause());\n }\n }\n );\n\n return fut;\n }\n\n Future withZkDiff(Future> r) {\n return r.map(rr -> {\n this.zkDiffs = rr;\n return this;\n });\n }\n\n Future withVoid(Future r) {\n return r.map(this);\n }\n\n Future zkScaleDown() {\n return withVoid(zkSetOperations.scaleDown(namespace, zkCluster.getName(), zkCluster.getReplicas()));\n }\n\n Future zkService() {\n return withVoid(serviceOperations.reconcile(namespace, zkCluster.getServiceName(), zkService));\n }\n\n Future zkHeadlessService() {\n return withVoid(serviceOperations.reconcile(namespace, zkCluster.getHeadlessServiceName(), zkHeadlessService));\n }\n\n Future getReconciliationStateOfConfigMap(AbstractModel cluster, ConfigMap configMap, BiFunction>, Future> function) {\n Future result = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n ConfigMap current = configMapOperations.get(namespace, cluster.getAncillaryConfigName());\n boolean onlyMetricsSettingChanged = onlyMetricsSettingChanged(current, configMap);\n future.complete(onlyMetricsSettingChanged);\n }, res -> {\n if (res.succeeded()) {\n boolean onlyMetricsSettingChanged = res.result();\n function.apply(onlyMetricsSettingChanged, configMapOperations.reconcile(namespace, cluster.getAncillaryConfigName(), configMap)).setHandler(res2 -> {\n if (res2.succeeded()) {\n result.complete(res2.result());\n } else {\n result.fail(res2.cause());\n }\n });\n } else {\n result.fail(res.cause());\n }\n });\n return result;\n }\n\n Future zkAncillaryCm() {\n return getReconciliationStateOfConfigMap(zkCluster, zkMetricsAndLogsConfigMap, this::withZkAncillaryCmChanged);\n }\n\n Future zkNodesSecret() {\n return withVoid(secretOperations.reconcile(namespace, ZookeeperCluster.nodesSecretName(name),\n zkCluster.generateNodesSecret(clusterCa, kafkaAssembly)));\n }\n\n Future zkNetPolicy() {\n return withVoid(networkPolicyOperator.reconcile(namespace, ZookeeperCluster.policyName(name), zkCluster.generateNetworkPolicy()));\n }\n\n Future zkPodDisruptionBudget() {\n return withVoid(podDisruptionBudgetOperator.reconcile(namespace, zkCluster.getName(), zkCluster.generatePodDisruptionBudget()));\n }\n\n Future zkStatefulSet() {\n StatefulSet zkSs = zkCluster.generateStatefulSet(isOpenShift, imagePullPolicy);\n Annotations.annotations(zkSs.getSpec().getTemplate()).put(Ca.ANNO_STRIMZI_IO_CLUSTER_CA_CERT_GENERATION, String.valueOf(getCaCertGeneration(this.clusterCa)));\n return withZkDiff(zkSetOperations.reconcile(namespace, zkCluster.getName(), zkSs));\n }\n\n Future zkRollingUpdate(Supplier dateSupplier) {\n return withVoid(zkSetOperations.maybeRollingUpdate(zkDiffs.resource(), pod ->\n isPodToRestart(zkDiffs.resource(), pod, zkAncillaryCmChange, dateSupplier, this.clusterCa)\n ));\n }\n\n \/**\n * Scale up is divided by scaling up Zookeeper cluster in steps.\n * Scaling up from N to M (N > 0 and M>N) replicas is done in M-N steps.\n * Each step performs scale up by one replica and full tolling update of Zookeeper cluster.\n * This approach ensures a valid configuration of each Zk pod.\n * Together with modified `maybeRollingUpdate` the quorum is not lost after the scale up operation is performed.\n * There is one special case of scaling from standalone (single one) Zookeeper pod.\n * In this case quorum cannot be preserved.\n *\/\n Future zkScaleUpStep() {\n Future futss = zkSetOperations.getAsync(namespace, ZookeeperCluster.zookeeperClusterName(name));\n return withVoid(futss.map(ss -> ss == null ? 0 : ss.getSpec().getReplicas())\n .compose(currentReplicas -> {\n if (currentReplicas > 0 && zkCluster.getReplicas() > currentReplicas) {\n zkCluster.setReplicas(currentReplicas + 1);\n }\n Future result = Future.succeededFuture(zkCluster.getReplicas() + 1);\n return result;\n }));\n }\n\n Future zkScaleUp() {\n return withVoid(zkSetOperations.scaleUp(namespace, zkCluster.getName(), zkCluster.getReplicas()));\n }\n\n Future zkServiceEndpointReadiness() {\n return withVoid(serviceOperations.endpointReadiness(namespace, zkService, 1_000, operationTimeoutMs));\n }\n\n Future zkHeadlessServiceEndpointReadiness() {\n return withVoid(serviceOperations.endpointReadiness(namespace, zkHeadlessService, 1_000, operationTimeoutMs));\n }\n\n Future withZkAncillaryCmChanged(boolean onlyMetricsSettingChanged, Future> r) {\n return r.map(rr -> {\n if (onlyMetricsSettingChanged) {\n log.debug(\"Only metrics setting changed - not triggering rolling update\");\n this.zkAncillaryCmChange = false;\n } else {\n this.zkAncillaryCmChange = rr instanceof ReconcileResult.Patched;\n }\n return this;\n });\n }\n\n Future zkManualPodCleaning() {\n String reason = \"manual pod cleaning\";\n Future futss = zkSetOperations.getAsync(namespace, ZookeeperCluster.zookeeperClusterName(name));\n if (futss != null) {\n return futss.compose(ss -> {\n if (ss != null) {\n log.debug(\"{}: Cleaning Pods for StatefulSet {} to {}\", reconciliation, ss.getMetadata().getName(), reason);\n return zkSetOperations.maybeDeletePodAndPvc(ss);\n }\n return Future.succeededFuture();\n }).map(i -> this);\n }\n return Future.succeededFuture(this);\n }\n\n Future zkPersistentClaimDeletion() {\n return persistentClaimDeletion(zkCluster.getStorage(), zkCluster.getReplicas(),\n (storage, i) -> AbstractModel.VOLUME_NAME + \"-\" + ZookeeperCluster.zookeeperClusterName(reconciliation.name()) + \"-\" + i);\n }\n\n private Future getKafkaClusterDescription() {\n Future fut = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n this.kafkaCluster = KafkaCluster.fromCrd(kafkaAssembly, versions);\n\n ConfigMap logAndMetricsConfigMap = kafkaCluster.generateMetricsAndLogConfigMap(\n kafkaCluster.getLogging() instanceof ExternalLogging ?\n configMapOperations.get(kafkaAssembly.getMetadata().getNamespace(), ((ExternalLogging) kafkaCluster.getLogging()).getName()) :\n null);\n this.kafkaService = kafkaCluster.generateService();\n this.kafkaHeadlessService = kafkaCluster.generateHeadlessService();\n this.kafkaMetricsAndLogsConfigMap = logAndMetricsConfigMap;\n\n future.complete(this);\n } catch (Throwable e) {\n future.fail(e);\n }\n }, true,\n res -> {\n if (res.succeeded()) {\n fut.complete(res.result());\n } else {\n fut.fail(res.cause());\n }\n }\n );\n return fut;\n }\n\n Future withKafkaDiff(Future> r) {\n return r.map(rr -> {\n this.kafkaDiffs = rr;\n return this;\n });\n }\n\n Future withKafkaAncillaryCmChanged(boolean onlyMetricsSettingChanged, Future> r) {\n return r.map(rr -> {\n if (onlyMetricsSettingChanged) {\n log.debug(\"Only metrics setting changed - not triggering rolling update\");\n this.kafkaAncillaryCmChange = false;\n } else {\n this.kafkaAncillaryCmChange = rr instanceof ReconcileResult.Patched;\n }\n return this;\n });\n }\n\n Future kafkaInitServiceAccount() {\n return withVoid(serviceAccountOperator.reconcile(namespace,\n KafkaCluster.initContainerServiceAccountName(kafkaCluster.getCluster()),\n kafkaCluster.generateInitContainerServiceAccount()));\n }\n\n Future kafkaInitClusterRoleBinding() {\n ClusterRoleBindingOperator.ClusterRoleBinding desired = kafkaCluster.generateClusterRoleBinding(namespace);\n Future fut = clusterRoleBindingOperator.reconcile(\n KafkaCluster.initContainerClusterRoleBindingName(namespace, name),\n desired);\n\n Future replacementFut = Future.future();\n\n fut.setHandler(res -> {\n if (res.failed()) {\n if (desired == null && res.cause().getMessage().contains(\"403: Forbidden\")) {\n log.debug(\"Ignoring forbidden access to ClusterRoleBindings which seems not needed while Kafka rack awareness is disabled.\");\n replacementFut.complete();\n } else {\n replacementFut.fail(res.cause());\n }\n } else {\n replacementFut.complete();\n }\n });\n\n return withVoid(replacementFut);\n }\n\n Future kafkaScaleDown() {\n return withVoid(kafkaSetOperations.scaleDown(namespace, kafkaCluster.getName(), kafkaCluster.getReplicas()));\n }\n\n Future kafkaService() {\n return withVoid(serviceOperations.reconcile(namespace, kafkaCluster.getServiceName(), kafkaService));\n }\n\n Future kafkaHeadlessService() {\n return withVoid(serviceOperations.reconcile(namespace, kafkaCluster.getHeadlessServiceName(), kafkaHeadlessService));\n }\n\n Future kafkaExternalBootstrapService() {\n return withVoid(serviceOperations.reconcile(namespace, KafkaCluster.externalBootstrapServiceName(name), kafkaCluster.generateExternalBootstrapService()));\n }\n\n Future kafkaReplicaServices() {\n int replicas = kafkaCluster.getReplicas();\n List serviceFutures = new ArrayList<>(replicas);\n\n for (int i = 0; i < replicas; i++) {\n serviceFutures.add(serviceOperations.reconcile(namespace, KafkaCluster.externalServiceName(name, i), kafkaCluster.generateExternalService(i)));\n }\n\n return withVoid(CompositeFuture.join(serviceFutures));\n }\n\n Future kafkaBootstrapRoute() {\n Route route = kafkaCluster.generateExternalBootstrapRoute();\n\n if (routeOperations != null) {\n return withVoid(routeOperations.reconcile(namespace, KafkaCluster.serviceName(name), route));\n } else if (route != null) {\n log.warn(\"{}: Exposing Kafka cluster {} using OpenShift Routes is available only on OpenShift\", reconciliation, name);\n return withVoid(Future.failedFuture(\"Exposing Kafka cluster \" + name + \" using OpenShift Routes is available only on OpenShift\"));\n }\n\n return withVoid(Future.succeededFuture());\n }\n\n Future kafkaReplicaRoutes() {\n int replicas = kafkaCluster.getReplicas();\n List routeFutures = new ArrayList<>(replicas);\n\n for (int i = 0; i < replicas; i++) {\n Route route = kafkaCluster.generateExternalRoute(i);\n\n if (routeOperations != null) {\n routeFutures.add(routeOperations.reconcile(namespace, KafkaCluster.externalServiceName(name, i), route));\n } else if (route != null) {\n log.warn(\"{}: Exposing Kafka cluster {} using OpenShift Routes is available only on OpenShift\", reconciliation, name);\n return withVoid(Future.failedFuture(\"Exposing Kafka cluster \" + name + \" using OpenShift Routes is available only on OpenShift\"));\n }\n }\n\n return withVoid(CompositeFuture.join(routeFutures));\n }\n\n Future kafkaExternalBootstrapServiceReady() {\n if (!kafkaCluster.isExposedWithLoadBalancer() && !kafkaCluster.isExposedWithNodePort()) {\n return withVoid(Future.succeededFuture());\n }\n\n if (kafkaCluster.getExternalListenerBootstrapOverride() != null && kafkaCluster.getExternalListenerBootstrapOverride().getAddress() != null) {\n log.trace(\"{}: Adding address {} from overrides to certificate DNS names\", reconciliation, kafkaCluster.getExternalListenerBootstrapOverride().getAddress());\n this.kafkaExternalBootstrapDnsName.add(kafkaCluster.getExternalListenerBootstrapOverride().getAddress());\n }\n\n Future blockingFuture = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n String serviceName = KafkaCluster.externalBootstrapServiceName(name);\n Future address = null;\n\n if (kafkaCluster.isExposedWithNodePort()) {\n address = serviceOperations.hasNodePort(namespace, serviceName, 1_000, operationTimeoutMs);\n } else {\n address = serviceOperations.hasIngressAddress(namespace, serviceName, 1_000, operationTimeoutMs);\n }\n\n address.setHandler(res -> {\n if (res.succeeded()) {\n if (kafkaCluster.isExposedWithLoadBalancer()) {\n String bootstrapAddress = null;\n\n if (serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getHostname() != null) {\n bootstrapAddress = serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getHostname();\n } else {\n bootstrapAddress = serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getIp();\n }\n\n if (log.isTraceEnabled()) {\n log.trace(\"{}: Found address {} for Service {}\", reconciliation, bootstrapAddress, serviceName);\n }\n\n this.kafkaExternalBootstrapDnsName.add(bootstrapAddress);\n }\n\n future.complete();\n } else {\n log.warn(\"{}: No address found for Service {}\", reconciliation, serviceName);\n future.fail(\"No address found for Service \" + serviceName);\n }\n });\n }, res -> {\n if (res.succeeded()) {\n blockingFuture.complete();\n } else {\n blockingFuture.fail(res.cause());\n }\n });\n\n return withVoid(blockingFuture);\n }\n\n Future kafkaReplicaServicesReady() {\n if (!kafkaCluster.isExposedWithLoadBalancer() && !kafkaCluster.isExposedWithNodePort()) {\n return withVoid(Future.succeededFuture());\n }\n\n Future blockingFuture = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n int replicas = kafkaCluster.getReplicas();\n List routeFutures = new ArrayList<>(replicas);\n\n for (int i = 0; i < replicas; i++) {\n String serviceName = KafkaCluster.externalServiceName(name, i);\n Future routeFuture = Future.future();\n\n Future address = null;\n Set dnsNames = new HashSet<>();\n\n String dnsOverride = kafkaCluster.getExternalServiceAdvertisedHostOverride(i);\n if (dnsOverride != null) {\n dnsNames.add(dnsOverride);\n }\n\n if (kafkaCluster.isExposedWithNodePort()) {\n address = serviceOperations.hasNodePort(namespace, serviceName, 1_000, operationTimeoutMs);\n } else {\n address = serviceOperations.hasIngressAddress(namespace, serviceName, 1_000, operationTimeoutMs);\n }\n\n int podNumber = i;\n\n address.setHandler(res -> {\n if (res.succeeded()) {\n if (kafkaCluster.isExposedWithLoadBalancer()) {\n \/\/ Get the advertised URL\n String serviceAddress = null;\n\n if (serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getHostname() != null) {\n serviceAddress = serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getHostname();\n } else {\n serviceAddress = serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress().get(0).getIp();\n }\n\n if (log.isTraceEnabled()) {\n log.trace(\"{}: Found address {} for Service {}\", reconciliation, serviceAddress, serviceName);\n }\n\n this.kafkaExternalAddresses.add(kafkaCluster.getExternalAdvertisedUrl(podNumber, serviceAddress, \"9094\"));\n\n \/\/ Collect the DNS names for certificates\n for (LoadBalancerIngress ingress : serviceOperations.get(namespace, serviceName).getStatus().getLoadBalancer().getIngress()) {\n if (ingress.getHostname() != null) {\n dnsNames.add(ingress.getHostname());\n } else {\n dnsNames.add(ingress.getIp());\n }\n }\n } else if (kafkaCluster.isExposedWithNodePort()) {\n \/\/ Get the advertised URL\n String port = serviceOperations.get(namespace, serviceName).getSpec().getPorts()\n .get(0).getNodePort().toString();\n\n if (log.isTraceEnabled()) {\n log.trace(\"{}: Found port {} for Service {}\", reconciliation, port, serviceName);\n }\n\n this.kafkaExternalAddresses.add(kafkaCluster.getExternalAdvertisedUrl(podNumber, \"\", port));\n }\n\n this.kafkaExternalDnsNames.put(podNumber, dnsNames);\n\n routeFuture.complete();\n } else {\n log.warn(\"{}: No address found for Service {}\", reconciliation, serviceName);\n routeFuture.fail(\"No address found for Service \" + serviceName);\n }\n });\n\n routeFutures.add(routeFuture);\n }\n\n CompositeFuture.join(routeFutures).setHandler(res -> {\n if (res.succeeded()) {\n future.complete();\n } else {\n future.fail(res.cause());\n }\n });\n }, res -> {\n if (res.succeeded()) {\n blockingFuture.complete();\n } else {\n blockingFuture.fail(res.cause());\n }\n });\n\n return withVoid(blockingFuture);\n }\n\n Future kafkaBootstrapRouteReady() {\n if (routeOperations == null || !kafkaCluster.isExposedWithRoute()) {\n return withVoid(Future.succeededFuture());\n }\n\n if (kafkaCluster.getExternalListenerBootstrapOverride() != null && kafkaCluster.getExternalListenerBootstrapOverride().getAddress() != null) {\n log.trace(\"{}: Adding address {} from overrides to certificate DNS names\", reconciliation, kafkaCluster.getExternalListenerBootstrapOverride().getAddress());\n this.kafkaExternalBootstrapDnsName.add(kafkaCluster.getExternalListenerBootstrapOverride().getAddress());\n }\n\n Future blockingFuture = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n String routeName = KafkaCluster.serviceName(name);\n \/\/Future future = Future.future();\n Future address = routeOperations.hasAddress(namespace, routeName, 1_000, operationTimeoutMs);\n\n address.setHandler(res -> {\n if (res.succeeded()) {\n String bootstrapAddress = routeOperations.get(namespace, routeName).getStatus().getIngress().get(0).getHost();\n this.kafkaExternalBootstrapDnsName.add(bootstrapAddress);\n\n if (log.isTraceEnabled()) {\n log.trace(\"{}: Found address {} for Route {}\", reconciliation, bootstrapAddress, routeName);\n }\n\n future.complete();\n } else {\n log.warn(\"{}: No address found for Route {}\", reconciliation, routeName);\n future.fail(\"No address found for Route \" + routeName);\n }\n });\n }, res -> {\n if (res.succeeded()) {\n blockingFuture.complete();\n } else {\n blockingFuture.fail(res.cause());\n }\n });\n\n return withVoid(blockingFuture);\n }\n\n Future kafkaReplicaRoutesReady() {\n if (routeOperations == null || !kafkaCluster.isExposedWithRoute()) {\n return withVoid(Future.succeededFuture());\n }\n\n Future blockingFuture = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n int replicas = kafkaCluster.getReplicas();\n List routeFutures = new ArrayList<>(replicas);\n\n for (int i = 0; i < replicas; i++) {\n String routeName = KafkaCluster.externalServiceName(name, i);\n Future routeFuture = Future.future();\n Future address = routeOperations.hasAddress(namespace, routeName, 1_000, operationTimeoutMs);\n int podNumber = i;\n\n Set dnsNames = new HashSet<>();\n\n String dnsOverride = kafkaCluster.getExternalServiceAdvertisedHostOverride(i);\n if (dnsOverride != null) {\n dnsNames.add(dnsOverride);\n }\n\n address.setHandler(res -> {\n if (res.succeeded()) {\n Route route = routeOperations.get(namespace, routeName);\n\n \/\/ Get the advertised URL\n String routeAddress = route.getStatus().getIngress().get(0).getHost();\n this.kafkaExternalAddresses.add(kafkaCluster.getExternalAdvertisedUrl(podNumber, routeAddress, \"443\"));\n\n if (log.isTraceEnabled()) {\n log.trace(\"{}: Found address {} for Route {}\", reconciliation, routeAddress, routeName);\n }\n\n \/\/ Collect the DNS names for certificates\n for (RouteIngress ingress : route.getStatus().getIngress()) {\n dnsNames.add(ingress.getHost());\n }\n\n this.kafkaExternalDnsNames.put(podNumber, dnsNames);\n\n routeFuture.complete();\n } else {\n log.warn(\"{}: No address found for Route {}\", reconciliation, routeName);\n routeFuture.fail(\"No address found for Route \" + routeName);\n }\n });\n\n routeFutures.add(routeFuture);\n }\n\n CompositeFuture.join(routeFutures).setHandler(res -> {\n if (res.succeeded()) {\n future.complete();\n } else {\n future.fail(res.cause());\n }\n });\n }, res -> {\n if (res.succeeded()) {\n blockingFuture.complete();\n } else {\n blockingFuture.fail(res.cause());\n }\n });\n\n return withVoid(blockingFuture);\n }\n\n Future kafkaGenerateCertificates() {\n Future result = Future.future();\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n kafkaCluster.generateCertificates(kafkaAssembly,\n clusterCa, kafkaExternalBootstrapDnsName, kafkaExternalDnsNames);\n future.complete(this);\n } catch (Throwable e) {\n future.fail(e);\n }\n },\n true,\n result.completer());\n return result;\n }\n\n Future kafkaAncillaryCm() {\n return getReconciliationStateOfConfigMap(kafkaCluster, kafkaMetricsAndLogsConfigMap, this::withKafkaAncillaryCmChanged);\n }\n\n Future kafkaBrokersSecret() {\n return withVoid(secretOperations.reconcile(namespace, KafkaCluster.brokersSecretName(name), kafkaCluster.generateBrokersSecret()));\n }\n\n Future kafkaNetPolicy() {\n return withVoid(networkPolicyOperator.reconcile(namespace, KafkaCluster.policyName(name), kafkaCluster.generateNetworkPolicy()));\n }\n\n Future kafkaPodDisruptionBudget() {\n return withVoid(podDisruptionBudgetOperator.reconcile(namespace, kafkaCluster.getName(), kafkaCluster.generatePodDisruptionBudget()));\n }\n\n Future kafkaStatefulSet() {\n kafkaCluster.setExternalAddresses(kafkaExternalAddresses);\n StatefulSet kafkaSs = kafkaCluster.generateStatefulSet(isOpenShift, imagePullPolicy);\n PodTemplateSpec template = kafkaSs.getSpec().getTemplate();\n Annotations.annotations(template).put(\n Ca.ANNO_STRIMZI_IO_CLUSTER_CA_CERT_GENERATION,\n String.valueOf(getCaCertGeneration(this.clusterCa)));\n Annotations.annotations(template).put(\n Ca.ANNO_STRIMZI_IO_CLIENTS_CA_CERT_GENERATION,\n String.valueOf(getCaCertGeneration(this.clientsCa)));\n return withKafkaDiff(kafkaSetOperations.reconcile(namespace, kafkaCluster.getName(), kafkaSs));\n }\n\n Future kafkaRollingUpdate(Supplier dateSupplier) {\n return withVoid(kafkaSetOperations.maybeRollingUpdate(kafkaDiffs.resource(), pod ->\n isPodToRestart(kafkaDiffs.resource(), pod, kafkaAncillaryCmChange, dateSupplier, this.clusterCa, this.clientsCa)\n ));\n }\n\n Future kafkaScaleUp() {\n return withVoid(kafkaSetOperations.scaleUp(namespace, kafkaCluster.getName(), kafkaCluster.getReplicas()));\n }\n\n Future kafkaServiceEndpointReady() {\n return withVoid(serviceOperations.endpointReadiness(namespace, kafkaService, 1_000, operationTimeoutMs));\n }\n\n Future kafkaHeadlessServiceEndpointReady() {\n return withVoid(serviceOperations.endpointReadiness(namespace, kafkaHeadlessService, 1_000, operationTimeoutMs));\n }\n\n Future kafkaManualPodCleaning() {\n String reason = \"manual pod cleaning\";\n Future futss = kafkaSetOperations.getAsync(namespace, KafkaCluster.kafkaClusterName(name));\n if (futss != null) {\n return futss.compose(ss -> {\n if (ss != null) {\n log.debug(\"{}: Cleaning Pods for StatefulSet {} to {}\", reconciliation, ss.getMetadata().getName(), reason);\n return kafkaSetOperations.maybeDeletePodAndPvc(ss);\n }\n return Future.succeededFuture();\n }).map(i -> this);\n }\n return Future.succeededFuture(this);\n }\n\n Future kafkaPersistentClaimDeletion() {\n return persistentClaimDeletion(kafkaCluster.getStorage(), kafkaCluster.getReplicas(),\n (storage, i) -> {\n String name = ModelUtils.getVolumePrefix(storage.getId());\n return name + \"-\" + KafkaCluster.kafkaClusterName(reconciliation.name()) + \"-\" + i;\n });\n }\n\n private final Future getTopicOperatorDescription() {\n Future fut = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n this.topicOperator = TopicOperator.fromCrd(kafkaAssembly);\n\n if (topicOperator != null) {\n ConfigMap logAndMetricsConfigMap = topicOperator.generateMetricsAndLogConfigMap(\n topicOperator.getLogging() instanceof ExternalLogging ?\n configMapOperations.get(kafkaAssembly.getMetadata().getNamespace(), ((ExternalLogging) topicOperator.getLogging()).getName()) :\n null);\n this.toDeployment = topicOperator.generateDeployment(isOpenShift, imagePullPolicy);\n this.toMetricsAndLogsConfigMap = logAndMetricsConfigMap;\n Annotations.annotations(this.toDeployment.getSpec().getTemplate()).put(\n ANNO_STRIMZI_IO_LOGGING,\n this.toMetricsAndLogsConfigMap.getData().get(\"log4j2.properties\"));\n } else {\n this.toDeployment = null;\n this.toMetricsAndLogsConfigMap = null;\n }\n\n future.complete(this);\n } catch (Throwable e) {\n future.fail(e);\n }\n }, true,\n res -> {\n if (res.succeeded()) {\n fut.complete(res.result());\n } else {\n fut.fail(res.cause());\n }\n }\n );\n return fut;\n }\n\n Future topicOperatorServiceAccount() {\n return withVoid(serviceAccountOperator.reconcile(namespace,\n TopicOperator.topicOperatorServiceAccountName(name),\n toDeployment != null ? topicOperator.generateServiceAccount() : null));\n }\n\n Future topicOperatorRoleBinding() {\n String watchedNamespace = topicOperator != null ? topicOperator.getWatchedNamespace() : null;\n return withVoid(roleBindingOperator.reconcile(\n watchedNamespace != null && !watchedNamespace.isEmpty() ?\n watchedNamespace : namespace,\n TopicOperator.roleBindingName(name),\n toDeployment != null ? topicOperator.generateRoleBinding(namespace) : null));\n }\n\n Future topicOperatorAncillaryCm() {\n return withVoid(configMapOperations.reconcile(namespace,\n toDeployment != null ? topicOperator.getAncillaryConfigName() : TopicOperator.metricAndLogConfigsName(name),\n toMetricsAndLogsConfigMap));\n }\n\n Future topicOperatorDeployment(Supplier dateSupplier) {\n if (this.topicOperator != null) {\n Future future = deploymentOperations.getAsync(namespace, this.topicOperator.getName());\n return future.compose(dep -> {\n \/\/ getting the current cluster CA generation from the current deployment, if exists\n int caCertGeneration = getDeploymentCaCertGeneration(dep, this.clusterCa);\n \/\/ if maintenance windows are satisfied, the cluster CA generation could be changed\n \/\/ and EO needs a rolling update updating the related annotation\n boolean isSatisfiedBy = isMaintenanceTimeWindowsSatisfied(dateSupplier);\n if (isSatisfiedBy) {\n caCertGeneration = getCaCertGeneration(this.clusterCa);\n }\n Annotations.annotations(toDeployment.getSpec().getTemplate()).put(\n Ca.ANNO_STRIMZI_IO_CLUSTER_CA_CERT_GENERATION, String.valueOf(caCertGeneration));\n return withVoid(deploymentOperations.reconcile(namespace, TopicOperator.topicOperatorName(name), toDeployment));\n }).map(i -> this);\n } else {\n return withVoid(deploymentOperations.reconcile(namespace, TopicOperator.topicOperatorName(name), null));\n }\n }\n\n Future topicOperatorSecret() {\n return withVoid(secretOperations.reconcile(namespace, TopicOperator.secretName(name), topicOperator == null ? null : topicOperator.generateSecret(clusterCa)));\n }\n\n private final Future getEntityOperatorDescription() {\n Future fut = Future.future();\n\n vertx.createSharedWorkerExecutor(\"kubernetes-ops-pool\").executeBlocking(\n future -> {\n try {\n EntityOperator entityOperator = EntityOperator.fromCrd(kafkaAssembly);\n\n if (entityOperator != null) {\n EntityTopicOperator topicOperator = entityOperator.getTopicOperator();\n EntityUserOperator userOperator = entityOperator.getUserOperator();\n\n ConfigMap topicOperatorLogAndMetricsConfigMap = topicOperator != null ?\n topicOperator.generateMetricsAndLogConfigMap(topicOperator.getLogging() instanceof ExternalLogging ?\n configMapOperations.get(kafkaAssembly.getMetadata().getNamespace(), ((ExternalLogging) topicOperator.getLogging()).getName()) :\n null) : null;\n\n ConfigMap userOperatorLogAndMetricsConfigMap = userOperator != null ?\n userOperator.generateMetricsAndLogConfigMap(userOperator.getLogging() instanceof ExternalLogging ?\n configMapOperations.get(kafkaAssembly.getMetadata().getNamespace(), ((ExternalLogging) userOperator.getLogging()).getName()) :\n null) : null;\n\n Map annotations = new HashMap();\n annotations.put(ANNO_STRIMZI_IO_LOGGING, topicOperatorLogAndMetricsConfigMap.getData().get(\"log4j2.properties\") + userOperatorLogAndMetricsConfigMap.getData().get(\"log4j2.properties\"));\n\n this.entityOperator = entityOperator;\n this.eoDeployment = entityOperator.generateDeployment(isOpenShift, annotations, imagePullPolicy);\n this.topicOperatorMetricsAndLogsConfigMap = topicOperatorLogAndMetricsConfigMap;\n this.userOperatorMetricsAndLogsConfigMap = userOperatorLogAndMetricsConfigMap;\n }\n\n future.complete(this);\n } catch (Throwable e) {\n future.fail(e);\n }\n }, true,\n res -> {\n if (res.succeeded()) {\n fut.complete(res.result());\n } else {\n fut.fail(res.cause());\n }\n }\n );\n return fut;\n }\n\n Future entityOperatorServiceAccount() {\n return withVoid(serviceAccountOperator.reconcile(namespace,\n EntityOperator.entityOperatorServiceAccountName(name),\n eoDeployment != null ? entityOperator.generateServiceAccount() : null));\n }\n\n Future entityOperatorTopicOpRoleBinding() {\n String watchedNamespace = entityOperator != null && entityOperator.getTopicOperator() != null ?\n entityOperator.getTopicOperator().getWatchedNamespace() : null;\n return withVoid(roleBindingOperator.reconcile(\n watchedNamespace != null && !watchedNamespace.isEmpty() ?\n watchedNamespace : namespace,\n EntityTopicOperator.roleBindingName(name),\n eoDeployment != null && entityOperator.getTopicOperator() != null ?\n entityOperator.getTopicOperator().generateRoleBinding(namespace) : null));\n }\n\n Future entityOperatorUserOpRoleBinding() {\n String watchedNamespace = entityOperator != null && entityOperator.getUserOperator() != null ?\n entityOperator.getUserOperator().getWatchedNamespace() : null;\n return withVoid(roleBindingOperator.reconcile(\n watchedNamespace != null && !watchedNamespace.isEmpty() ?\n watchedNamespace : namespace,\n EntityUserOperator.roleBindingName(name),\n eoDeployment != null && entityOperator.getUserOperator() != null ?\n entityOperator.getUserOperator().generateRoleBinding(namespace) : null));\n }\n\n Future entityOperatorTopicOpAncillaryCm() {\n return withVoid(configMapOperations.reconcile(namespace,\n eoDeployment != null && entityOperator.getTopicOperator() != null ?\n entityOperator.getTopicOperator().getAncillaryConfigName() : EntityTopicOperator.metricAndLogConfigsName(name),\n topicOperatorMetricsAndLogsConfigMap));\n }\n\n Future entityOperatorUserOpAncillaryCm() {\n return withVoid(configMapOperations.reconcile(namespace,\n eoDeployment != null && entityOperator.getUserOperator() != null ?\n entityOperator.getUserOperator().getAncillaryConfigName() : EntityUserOperator.metricAndLogConfigsName(name),\n userOperatorMetricsAndLogsConfigMap));\n }\n\n Future entityOperatorDeployment(Supplier dateSupplier) {\n if (this.entityOperator != null) {\n Future future = deploymentOperations.getAsync(namespace, this.entityOperator.getName());\n return future.compose(dep -> {\n \/\/ getting the current cluster CA generation from the current deployment, if exists\n int clusterCaCertGeneration = getDeploymentCaCertGeneration(dep, this.clusterCa);\n int clientsCaCertGeneration = getDeploymentCaCertGeneration(dep, this.clientsCa);\n \/\/ if maintenance windows are satisfied, the cluster CA generation could be changed\n \/\/ and EO needs a rolling update updating the related annotation\n boolean isSatisfiedBy = isMaintenanceTimeWindowsSatisfied(dateSupplier);\n if (isSatisfiedBy) {\n clusterCaCertGeneration = getCaCertGeneration(this.clusterCa);\n clientsCaCertGeneration = getCaCertGeneration(this.clientsCa);\n }\n Annotations.annotations(eoDeployment.getSpec().getTemplate()).put(\n Ca.ANNO_STRIMZI_IO_CLUSTER_CA_CERT_GENERATION, String.valueOf(clusterCaCertGeneration));\n Annotations.annotations(eoDeployment.getSpec().getTemplate()).put(\n Ca.ANNO_STRIMZI_IO_CLIENTS_CA_CERT_GENERATION, String.valueOf(clientsCaCertGeneration));\n return withVoid(deploymentOperations.reconcile(namespace, EntityOperator.entityOperatorName(name), eoDeployment));\n }).map(i -> this);\n } else {\n return withVoid(deploymentOperations.reconcile(namespace, EntityOperator.entityOperatorName(name), null));\n }\n }\n\n Future entityOperatorSecret() {\n return withVoid(secretOperations.reconcile(namespace, EntityOperator.secretName(name),\n entityOperator == null ? null : entityOperator.generateSecret(clusterCa)));\n }\n\n private boolean isPodUpToDate(StatefulSet ss, Pod pod) {\n final int ssGeneration = StatefulSetOperator.getSsGeneration(ss);\n final int podGeneration = StatefulSetOperator.getPodGeneration(pod);\n log.debug(\"Rolling update of {}\/{}: pod {} has {}={}; ss has {}={}\",\n ss.getMetadata().getNamespace(), ss.getMetadata().getName(), pod.getMetadata().getName(),\n StatefulSetOperator.ANNO_STRIMZI_IO_GENERATION, podGeneration,\n StatefulSetOperator.ANNO_STRIMZI_IO_GENERATION, ssGeneration);\n return ssGeneration == podGeneration;\n }\n\n private boolean isPodCaCertUpToDate(Pod pod, Ca ca) {\n final int caCertGeneration = getCaCertGeneration(ca);\n String podAnnotation = getCaCertAnnotation(ca);\n final int podCaCertGeneration =\n Annotations.intAnnotation(pod, podAnnotation, Ca.INIT_GENERATION);\n return caCertGeneration == podCaCertGeneration;\n }\n\n private boolean isPodToRestart(StatefulSet ss, Pod pod, boolean isAncillaryCmChange, Supplier dateSupplier, Ca... cas) {\n boolean isPodUpToDate = isPodUpToDate(ss, pod);\n boolean isPodCaCertUpToDate = true;\n boolean isCaCertsChanged = false;\n for (Ca ca: cas) {\n isCaCertsChanged |= ca.certRenewed() || ca.certsRemoved();\n isPodCaCertUpToDate &= isPodCaCertUpToDate(pod, ca);\n }\n\n boolean isPodToRestart = !isPodUpToDate || !isPodCaCertUpToDate || isAncillaryCmChange || isCaCertsChanged;\n boolean isSatisfiedBy = true;\n \/\/ it makes sense to check maintenance windows if pod restarting is needed\n if (isPodToRestart) {\n isSatisfiedBy = isMaintenanceTimeWindowsSatisfied(dateSupplier);\n }\n\n if (log.isDebugEnabled()) {\n List reasons = new ArrayList<>();\n for (Ca ca: cas) {\n if (ca.certRenewed()) {\n reasons.add(ca + \" certificate renewal\");\n }\n if (ca.certsRemoved()) {\n reasons.add(ca + \" certificate removal\");\n }\n if (!isPodCaCertUpToDate(pod, ca)) {\n reasons.add(\"Pod has old \" + ca + \" certificate generation\");\n }\n }\n if (isAncillaryCmChange) {\n reasons.add(\"ancillary CM change\");\n }\n if (!isPodUpToDate) {\n reasons.add(\"Pod has old generation\");\n }\n if (!reasons.isEmpty()) {\n if (isSatisfiedBy) {\n log.debug(\"{}: Rolling pod {} due to {}\",\n reconciliation, pod.getMetadata().getName(), reasons);\n } else {\n log.debug(\"{}: Potential pod {} rolling due to {} but maintenance time windows not satisfied\",\n reconciliation, pod.getMetadata().getName(), reasons);\n }\n }\n }\n return isSatisfiedBy && isPodToRestart;\n }\n\n private boolean isMaintenanceTimeWindowsSatisfied(Supplier dateSupplier) {\n String currentCron = null;\n try {\n boolean isSatisfiedBy = getMaintenanceTimeWindows() == null || getMaintenanceTimeWindows().isEmpty();\n if (!isSatisfiedBy) {\n Date date = dateSupplier.get();\n for (String cron : getMaintenanceTimeWindows()) {\n currentCron = cron;\n CronExpression cronExpression = new CronExpression(cron);\n \/\/ the user defines the cron expression in \"UTC\/GMT\" timezone but CO pod\n \/\/ can be running on a different one, so setting it on the cron expression\n cronExpression.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\n if (cronExpression.isSatisfiedBy(date)) {\n isSatisfiedBy = true;\n break;\n }\n }\n }\n return isSatisfiedBy;\n } catch (ParseException e) {\n log.warn(\"The provided maintenance time windows list contains {} which is not a valid cron expression\", currentCron);\n return false;\n }\n }\n\n private List getMaintenanceTimeWindows() {\n return kafkaAssembly.getSpec().getMaintenanceTimeWindows();\n }\n\n private int getDeploymentCaCertGeneration(Deployment dep, Ca ca) {\n int caCertGeneration = 0;\n if (dep != null) {\n caCertGeneration =\n Annotations.intAnnotation(\n dep.getSpec().getTemplate(), getCaCertAnnotation(ca), 0);\n }\n return caCertGeneration;\n }\n\n private int getCaCertGeneration(Ca ca) {\n return Annotations.intAnnotation(ca.caCertSecret(), Ca.ANNO_STRIMZI_IO_CA_CERT_GENERATION,\n Ca.INIT_GENERATION);\n }\n\n private String getCaCertAnnotation(Ca ca) {\n return ca instanceof ClientsCa ?\n Ca.ANNO_STRIMZI_IO_CLIENTS_CA_CERT_GENERATION :\n Ca.ANNO_STRIMZI_IO_CLUSTER_CA_CERT_GENERATION;\n }\n\n private PersistentVolumeClaim annotateDeleteClaim(String namespace, String pvcName, boolean isDeleteClaim) {\n PersistentVolumeClaim pvc = pvcOperations.get(namespace, pvcName);\n \/\/ this is called during a reconcile even when user is trying to change from ephemeral to persistent which\n \/\/ is not allowed, so the PVC doesn't exist\n if (pvc != null) {\n Annotations.annotations(pvc).put(AbstractModel.ANNO_STRIMZI_IO_DELETE_CLAIM, String.valueOf(isDeleteClaim));\n }\n return pvc;\n }\n\n Future clusterOperatorSecret() {\n Labels labels = Labels.userLabels(kafkaAssembly.getMetadata().getLabels()).withKind(reconciliation.type().toString()).withCluster(reconciliation.name());\n\n OwnerReference ownerRef = new OwnerReferenceBuilder()\n .withApiVersion(kafkaAssembly.getApiVersion())\n .withKind(kafkaAssembly.getKind())\n .withName(kafkaAssembly.getMetadata().getName())\n .withUid(kafkaAssembly.getMetadata().getUid())\n .withBlockOwnerDeletion(false)\n .withController(false)\n .build();\n\n Secret secret = ModelUtils.buildSecret(clusterCa, clusterCa.clusterOperatorSecret(), namespace, ClusterOperator.secretName(name), \"cluster-operator\", \"cluster-operator\", labels, ownerRef);\n\n return withVoid(secretOperations.reconcile(namespace, ClusterOperator.secretName(name),\n secret));\n }\n\n private Future persistentClaimDeletion(Storage storage, int replicas, BiFunction pvcName) {\n if (storage instanceof PersistentClaimStorage) {\n for (int i = 0; i < replicas; i++) {\n PersistentVolumeClaim pvc = annotateDeleteClaim(reconciliation.namespace(),\n pvcName.apply((PersistentClaimStorage) storage, i),\n ((PersistentClaimStorage) storage).isDeleteClaim());\n if (pvc != null) {\n pvcOperations.reconcile(namespace, pvc.getMetadata().getName(), pvc);\n }\n }\n } else if (storage instanceof JbodStorage) {\n JbodStorage jbodStorage = (JbodStorage) storage;\n for (int i = 0; i < replicas; i++) {\n for (SingleVolumeStorage volume : jbodStorage.getVolumes()) {\n if (volume instanceof PersistentClaimStorage) {\n PersistentVolumeClaim pvc = annotateDeleteClaim(reconciliation.namespace(),\n pvcName.apply((PersistentClaimStorage) volume, i),\n ((PersistentClaimStorage) volume).isDeleteClaim());\n if (pvc != null) {\n pvcOperations.reconcile(namespace, pvc.getMetadata().getName(), pvc);\n }\n }\n }\n }\n }\n return Future.succeededFuture(this);\n }\n }\n\n private final Future deleteKafka(Reconciliation reconciliation) {\n String namespace = reconciliation.namespace();\n String name = reconciliation.name();\n String kafkaSsName = KafkaCluster.kafkaClusterName(name);\n\n Labels pvcSelector = Labels.forCluster(name).withKind(Kafka.RESOURCE_KIND).withName(kafkaSsName);\n return deletePersistentVolumeClaim(namespace, pvcSelector);\n }\n\n private final Future deleteZk(Reconciliation reconciliation) {\n String namespace = reconciliation.namespace();\n String name = reconciliation.name();\n String zkSsName = ZookeeperCluster.zookeeperClusterName(name);\n\n Labels pvcSelector = Labels.forCluster(name).withKind(Kafka.RESOURCE_KIND).withName(zkSsName);\n return deletePersistentVolumeClaim(namespace, pvcSelector);\n }\n\n @Override\n protected Future delete(Reconciliation reconciliation) {\n return deleteKafka(reconciliation)\n .compose(i -> deleteZk(reconciliation))\n .map((Void) null);\n }\n\n @Override\n protected List getResources(String namespace, Labels selector) {\n \/\/ TODO: Search for PVCs!\n return Collections.EMPTY_LIST;\n }\n\n private Date dateSupplier() {\n return new Date();\n }\n\n \/**\n * Delete Persistent Volume Claims in the specified {@code namespace} and having the\n * labels described by {@code pvcSelector} if the related {@link AbstractModel#ANNO_STRIMZI_IO_DELETE_CLAIM}\n * annotation is\n *\n * @param namespace namespace where the Persistent Volume Claims to delete are\n * @param pvcSelector labels to select the Persistent Volume Claims to delete\n * @return\n *\/\n private Future deletePersistentVolumeClaim(String namespace, Labels pvcSelector) {\n List pvcs = pvcOperations.list(namespace, pvcSelector);\n List result = new ArrayList<>();\n\n for (PersistentVolumeClaim pvc: pvcs) {\n if (Annotations.booleanAnnotation(pvc, AbstractModel.ANNO_STRIMZI_IO_DELETE_CLAIM,\n false, AbstractModel.ANNO_CO_STRIMZI_IO_DELETE_CLAIM)) {\n log.debug(\"Delete selected PVCs with labels\", pvcSelector);\n result.add(pvcOperations.reconcile(namespace, pvc.getMetadata().getName(), null));\n }\n }\n return CompositeFuture.join(result);\n }\n}","avg_line_length":54.3095482546,"max_line_length":213,"alphanum_fraction":0.5777021598} {"size":700,"ext":"java","lang":"Java","max_stars_count":11.0,"content":"package com.kenshoo.matcher;\r\n\r\nimport org.hamcrest.Description;\r\nimport org.hamcrest.TypeSafeMatcher;\r\nimport java.util.Collection;\r\n\r\nimport static org.jooq.lambda.Seq.seq;\r\n\r\npublic class AllItemsAreDifferent extends TypeSafeMatcher> {\r\n\r\n @Override\r\n protected boolean matchesSafely(Collection items) {\r\n return ((int)seq(items).distinct().count()) == items.size();\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"There are duplicate items in the collection\");\r\n }\r\n\r\n public static AllItemsAreDifferent allItemsAreDifferent() {\r\n return new AllItemsAreDifferent<>();\r\n }\r\n}\r\n","avg_line_length":28.0,"max_line_length":79,"alphanum_fraction":0.6957142857} {"size":2270,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*\n * Licensed to the Apache Software Foundation (ASF) under one or more contributor license\n * agreements. See the NOTICE file distributed with this work for additional information regarding\n * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance with the License. You may obtain a\n * copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under the License\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing permissions and limitations under\n * the License.\n *\/\npackage org.apache.geode.internal.cache;\n\nimport static org.assertj.core.api.Assertions.assertThat;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport org.junit.Test;\nimport org.junit.experimental.categories.Category;\n\nimport org.apache.geode.distributed.internal.membership.InternalDistributedMember;\nimport org.apache.geode.internal.cache.DistributedCacheOperation.CacheOperationMessage;\nimport org.apache.geode.internal.cache.persistence.PersistentMemberID;\nimport org.apache.geode.test.junit.categories.UnitTest;\n\n@Category(UnitTest.class)\npublic class DistributedCacheOperationTest {\n\n @Test\n public void shouldBeMockable() throws Exception {\n DistributedCacheOperation mockDistributedCacheOperation = mock(DistributedCacheOperation.class);\n CacheOperationMessage mockCacheOperationMessage = mock(CacheOperationMessage.class);\n Map persistentIds = new HashMap<>();\n when(mockDistributedCacheOperation.supportsDirectAck()).thenReturn(false);\n\n mockDistributedCacheOperation.waitForAckIfNeeded(mockCacheOperationMessage, persistentIds);\n\n verify(mockDistributedCacheOperation, times(1)).waitForAckIfNeeded(mockCacheOperationMessage,\n persistentIds);\n\n assertThat(mockDistributedCacheOperation.supportsDirectAck()).isFalse();\n }\n}\n","avg_line_length":43.6538461538,"max_line_length":100,"alphanum_fraction":0.8088105727} {"size":1269,"ext":"java","lang":"Java","max_stars_count":4.0,"content":"package com.organization.commons.base;\n\nimport java.util.*;\n\npublic class LangUtil {\n static final Random RANDOM = new Random();\n\n public static T[] concatenateArrays(T[] first, T[]... remaining) {\n int len = first.length;\n for (T[] array : remaining) {\n len += array.length;\n }\n T[] finalArr = Arrays.copyOf(first, len);\n\n int offset = first.length;\n for (T[] array : remaining) {\n System.arraycopy(array, 0, finalArr, offset, array.length);\n offset += array.length;\n }\n return finalArr;\n }\n\n \/**\n * Returns a pseudo-random number between min and max inclusive.\n * The difference between min and max can be at most\n * Integer.MAX_VALUE - 1<\/code>.\n *\/\n public static int randInt(int min, int max) {\n return RANDOM.nextInt((max - min) + 1) + min;\n }\n\n \/**\n * Get the keys of a Map by value.\n *\/\n public static Set getKeysByValue(Map map, E value) {\n Set keys = new HashSet();\n for (Map.Entry entry : map.entrySet()) {\n if (Objects.equals(value, entry.getValue())) {\n keys.add(entry.getKey());\n }\n }\n return keys;\n }\n\n}\n","avg_line_length":27.5869565217,"max_line_length":74,"alphanum_fraction":0.5476753349} {"size":7457,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/**\r\n * $RCSfile: ,v $\r\n * $Revision: $\r\n * $Date: $\r\n * \r\n * Copyright (C) 2004-2011 Jive Software. All rights reserved.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\/\r\n\r\npackage net.java.sipmack.media;\r\n\r\nimport java.io.IOException;\r\nimport java.net.ServerSocket;\r\n\r\nimport javax.media.format.AudioFormat;\r\nimport javax.media.rtp.ReceiveStreamListener;\r\n\r\nimport org.jivesoftware.spark.phone.PhoneManager;\r\nimport org.jivesoftware.sparkimpl.settings.local.SettingsManager;\r\n\r\n\r\n\/**\r\n * This Class implements a complete JingleMediaSession.\r\n * It sould be used to transmit and receive audio captured from the Mic.\r\n * This Class should be automaticly controlled by JingleSession.\r\n * But you could also use in any VOIP application.\r\n * For better NAT Traversal support this implementation don\ufffdt support only receive or only transmit.\r\n * To receive you MUST transmit. So the only implemented and functionally methods are startTransmit() and stopTransmit()\r\n *\r\n * @author Thiago Camargo\r\n *\/\r\npublic class AudioMediaSession {\r\n\r\n private AudioChannel audioChannel;\r\n private String locator = \"javasound:\/\/\";\r\n \/\/ AudioFormat of the Session\r\n private AudioFormat audioFormat;\r\n \/\/ Local Transport details\r\n private TransportCandidate local;\r\n \/\/ Remote Transport details\r\n private TransportCandidate remote;\r\n\r\n \/**\r\n * Creates a AudioMediaSession with defined payload type, remote and local candidates\r\n *\r\n * @param audioFormat jmf AudioFormat\r\n * @param remote The remote information. The candidate that the jmf will be sent to.\r\n * @param local The local information. The candidate that will receive the jmf\r\n *\/\r\n public AudioMediaSession(final AudioFormat audioFormat, final TransportCandidate remote,\r\n final TransportCandidate local) {\r\n this(audioFormat, remote, local, SettingsManager.getLocalPreferences().getAudioDevice());\r\n }\r\n\r\n \/**\r\n * Creates a AudioMediaSession with defined payload type, remote and local candidates\r\n *\r\n * @param audioFormat Payload of the jmf\r\n * @param remote The remote information. The candidate that the jmf will be sent to.\r\n * @param local The local information. The candidate that will receive the jmf\r\n *\/\r\n public AudioMediaSession(final AudioFormat audioFormat, final TransportCandidate remote,\r\n final TransportCandidate local, String locator) {\r\n this.local = local;\r\n this.remote = remote;\r\n this.audioFormat = audioFormat;\r\n if (locator != null && !locator.equals(\"\"))\r\n this.locator = locator;\r\n \r\n initialize();\r\n }\r\n\r\n\r\n \/**\r\n * Returns the AudioFormat of the Media Session\r\n *\r\n * @return\r\n *\/\r\n public AudioFormat getAudioFormat() {\r\n return audioFormat;\r\n }\r\n\r\n \/**\r\n * Returns the Media Session local Candidate\r\n *\r\n * @return\r\n *\/\r\n public TransportCandidate getLocal() {\r\n return local;\r\n }\r\n\r\n \/**\r\n * Returns the Media Session remote Candidate\r\n *\r\n * @return\r\n *\/\r\n public TransportCandidate getRemote() {\r\n return remote;\r\n }\r\n\r\n \/**\r\n * Initialize the Audio Channel to make it able to send and receive audio\r\n *\/\r\n public void initialize() {\r\n\r\n String ip;\r\n String localIp;\r\n int localPort;\r\n int remotePort;\r\n\r\n if (this.getLocal().getSymmetric() != null) {\r\n ip = this.getLocal().getIp();\r\n localIp = this.getLocal().getLocalIp();\r\n localPort = getFreePort();\r\n remotePort = this.getLocal().getSymmetric().getPort();\r\n\r\n System.out.println(this.getLocal().getConnection() + \" \" + ip + \": \" + localPort + \"->\" + remotePort);\r\n\r\n } else {\r\n ip = this.getRemote().getIp();\r\n localIp = this.getLocal().getLocalIp();\r\n localPort = this.getLocal().getPort();\r\n remotePort = this.getRemote().getPort();\r\n }\r\n\r\n audioChannel = new AudioChannel(PhoneManager.getMediaLocator(locator), localIp, ip, localPort, remotePort, audioFormat);\r\n }\r\n\r\n \/**\r\n * Add Receive Listeners. It monitors RTCP packets and signalling.\r\n *\r\n * @param listener listener to add\r\n *\/\r\n public void addReceiverListener(ReceiveStreamListener listener) {\r\n audioChannel.addReceiverListener(listener);\r\n }\r\n\r\n \/**\r\n * Removes Receive Listener.\r\n *\r\n * @param listener listener to remove\r\n *\/\r\n public void removeReceiverListener(ReceiveStreamListener listener) {\r\n audioChannel.removeReceiverListener(listener);\r\n }\r\n\r\n \/**\r\n * Starts transmission and for NAT Traversal reasons start receiving also.\r\n *\/\r\n public void startTrasmit() {\r\n audioChannel.start();\r\n }\r\n\r\n \/**\r\n * Set transmit activity. If the active is true, the instance should trasmit.\r\n * If it is set to false, the instance should pause transmit.\r\n *\r\n * @param active\r\n *\/\r\n public void setTrasmit(boolean active) {\r\n audioChannel.setTrasmit(active);\r\n }\r\n\r\n \/**\r\n * For NAT Reasons this method does nothing. Use startTransmit() to start transmit and receive jmf\r\n *\/\r\n public void startReceive() {\r\n \/\/ Do nothing\r\n }\r\n\r\n \/**\r\n * Stops transmission and for NAT Traversal reasons stop receiving also.\r\n *\/\r\n public void stopTrasmit() {\r\n if (audioChannel != null)\r\n audioChannel.stop();\r\n }\r\n\r\n \/**\r\n * For NAT Reasons this method does nothing. Use startTransmit() to start transmit and receive jmf\r\n *\/\r\n public void stopReceive() {\r\n \/\/ Do nothing\r\n }\r\n\r\n \/**\r\n * Closes and finalizes the session.\r\n * Very important to release static MediaLocator.\r\n *\/\r\n public void close(){\r\n\r\n stopTrasmit();\r\n stopReceive();\r\n PhoneManager.setUsingMediaLocator(false);\r\n\r\n }\r\n\r\n \/**\r\n * Obtain a free port we can use.\r\n *\r\n * @return A free port number.\r\n *\/\r\n protected int getFreePort() {\r\n ServerSocket ss;\r\n int freePort = 0;\r\n\r\n for (int i = 0; i < 10; i++) {\r\n freePort = (int) (10000 + Math.round(Math.random() * 10000));\r\n freePort = freePort % 2 == 0 ? freePort : freePort + 1;\r\n try {\r\n ss = new ServerSocket(freePort);\r\n freePort = ss.getLocalPort();\r\n ss.close();\r\n return freePort;\r\n }\r\n catch (IOException e) {\r\n e.printStackTrace();\r\n }\r\n }\r\n try {\r\n ss = new ServerSocket(0);\r\n freePort = ss.getLocalPort();\r\n ss.close();\r\n }\r\n catch (IOException e) {\r\n e.printStackTrace();\r\n }\r\n return freePort;\r\n }\r\n}\r\n","avg_line_length":30.8140495868,"max_line_length":129,"alphanum_fraction":0.6030575298} {"size":3242,"ext":"java","lang":"Java","max_stars_count":null,"content":"package ch.zhaw.securitylab.DIMBA.activity.stock;\n\nimport android.app.TaskStackBuilder;\nimport android.content.Intent;\nimport android.content.SharedPreferences;\nimport android.os.Bundle;\nimport android.preference.PreferenceManager;\nimport android.view.View;\nimport android.widget.AdapterView;\nimport android.widget.ArrayAdapter;\nimport android.widget.ListView;\n\nimport ch.zhaw.securitylab.DIMBA.DIMBA;\nimport ch.zhaw.securitylab.DIMBA.R;\nimport ch.zhaw.securitylab.DIMBA.activity.ActivityDIMBAAbstract;\nimport ch.zhaw.securitylab.DIMBA.activity.ToolbarMode;\n\npublic class ActivityAuthStockList extends ActivityDIMBAAbstract implements AdapterView.OnItemClickListener {\n\n private SharedPreferences defaultPreferences;\n private ListView stockListView;\n private ListView forexListView;\n private ListView forexValuesView;\n private ListView commodityListView;\n private String[] stockNames;\n private String[] forexNames;\n private String[] commodityNames;\n\n public ActivityAuthStockList() {\n super(R.layout.activity_auth_stock_list, ToolbarMode.NAV_AUTH, R.id.nav_go_Stock);\n }\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n defaultPreferences = PreferenceManager.getDefaultSharedPreferences(DIMBA.get());\n\n runOnUiThread(() -> {\n forexNames = new String[]{\"USD\/CHF + 1.2%\",\n \"CHF\/EUR + 0.1%\",\n \"EUR\/USD - 0.7%\"};\n addList(forexListView,forexNames, R.id.ForexList);\n\n commodityNames = new String[]{\n \"Crude Oil - 0.1%\",\n \"Gold + 0.5%\",\n \"Gas + 4.5%\"};\n addList(commodityListView,commodityNames, R.id.CommodityList);\n\n stockNames = new String[]{\"APPL + 0.3%\",\n \"GOOGL + 0.3%\",\n \"MSFT + 0.8%\"};\n addList(stockListView,stockNames, R.id.StockList);\n });\n }\n\n public void addList(ListView lv, String[] list, int id) {\n lv = findViewById(id);\n ArrayAdapter stockAdapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, list);\n lv.setAdapter(stockAdapter);\n lv.setOnItemClickListener(this);\n }\n\n @Override\n public void onItemClick(AdapterView parent, View view, int position, long id) {\n Class clazz = ActivityAuthStock.class;\n\n TaskStackBuilder.create(this)\n .addParentStack(clazz)\n .addNextIntent(new Intent(this, clazz))\n .startActivities();\n }\n}\n","avg_line_length":43.2266666667,"max_line_length":116,"alphanum_fraction":0.5311536089} {"size":2231,"ext":"java","lang":"Java","max_stars_count":null,"content":"package org.hisp.dhis.schema.descriptors;\n\n\/*\n * Copyright (c) 2004-2018, University of Oslo\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n * Redistributions of source code must retain the above copyright notice, this\n * list of conditions and the following disclaimer.\n *\n * Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and\/or other materials provided with the distribution.\n * Neither the name of the HISP project nor the names of its contributors may\n * be used to endorse or promote products derived from this software without\n * specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR\n * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\/\n\nimport org.hisp.dhis.program.ProgramStageInstance;\nimport org.hisp.dhis.schema.Schema;\nimport org.hisp.dhis.schema.SchemaDescriptor;\n\npublic class ProgramStageInstanceSchemaDescriptor implements SchemaDescriptor\n{\n\n public static final String SINGULAR = \"programStageInstance\";\n\n public static final String PLURAL = \"programStageInstances\";\n\n public static final String API_ENDPOINT = \"\/\" + PLURAL;\n\n @Override\n public Schema getSchema()\n {\n Schema schema = new Schema( ProgramStageInstance.class, SINGULAR, PLURAL );\n schema.setRelativeApiEndpoint( API_ENDPOINT );\n\n return schema;\n }\n}\n","avg_line_length":42.0943396226,"max_line_length":83,"alphanum_fraction":0.7691618108} {"size":6150,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright (C) 2018-2019 Lightbend Inc. \n *\/\n\n\/\/ Protocol Buffers - Google's data interchange format\n\/\/ Copyright 2008 Google Inc. All rights reserved.\n\/\/ http:\/\/code.google.com\/p\/protobuf\/\n\/\/\n\/\/ Redistribution and use in source and binary forms, with or without\n\/\/ modification, are permitted provided that the following conditions are\n\/\/ met:\n\/\/\n\/\/ * Redistributions of source code must retain the above copyright\n\/\/ notice, this list of conditions and the following disclaimer.\n\/\/ * Redistributions in binary form must reproduce the above\n\/\/ copyright notice, this list of conditions and the following disclaimer\n\/\/ in the documentation and\/or other materials provided with the\n\/\/ distribution.\n\/\/ * Neither the name of Google Inc. nor the names of its\n\/\/ contributors may be used to endorse or promote products derived from\n\/\/ this software without specific prior written permission.\n\/\/\n\/\/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\/\/ \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\/\/ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n\/\/ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n\/\/ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n\/\/ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n\/\/ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n\/\/ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n\/\/ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n\/\/ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\/\/ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\npackage akka.protobuf;\n\n\/**\n * This class is used internally by the Protocol Buffer library and generated message\n * implementations. It is public only because those generated messages do not reside in the {@code\n * protobuf} package. Others should not use this class directly.\n *\n *

        This class contains constants and helper functions useful for dealing with the Protocol Buffer\n * wire format.\n *\n * @author kenton@google.com Kenton Varda\n *\/\npublic final class WireFormat {\n \/\/ Do not allow instantiation.\n private WireFormat() {}\n\n public static final int WIRETYPE_VARINT = 0;\n public static final int WIRETYPE_FIXED64 = 1;\n public static final int WIRETYPE_LENGTH_DELIMITED = 2;\n public static final int WIRETYPE_START_GROUP = 3;\n public static final int WIRETYPE_END_GROUP = 4;\n public static final int WIRETYPE_FIXED32 = 5;\n\n static final int TAG_TYPE_BITS = 3;\n static final int TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1;\n\n \/** Given a tag value, determines the wire type (the lower 3 bits). *\/\n static int getTagWireType(final int tag) {\n return tag & TAG_TYPE_MASK;\n }\n\n \/** Given a tag value, determines the field number (the upper 29 bits). *\/\n public static int getTagFieldNumber(final int tag) {\n return tag >>> TAG_TYPE_BITS;\n }\n\n \/** Makes a tag value given a field number and wire type. *\/\n static int makeTag(final int fieldNumber, final int wireType) {\n return (fieldNumber << TAG_TYPE_BITS) | wireType;\n }\n\n \/**\n * Lite equivalent to {@link Descriptors.FieldDescriptor.JavaType}. This is only here to support\n * the lite runtime and should not be used by users.\n *\/\n public enum JavaType {\n INT(0),\n LONG(0L),\n FLOAT(0F),\n DOUBLE(0D),\n BOOLEAN(false),\n STRING(\"\"),\n BYTE_STRING(ByteString.EMPTY),\n ENUM(null),\n MESSAGE(null);\n\n JavaType(final Object defaultDefault) {\n this.defaultDefault = defaultDefault;\n }\n\n \/** The default default value for fields of this type, if it's a primitive type. *\/\n Object getDefaultDefault() {\n return defaultDefault;\n }\n\n private final Object defaultDefault;\n }\n\n \/**\n * Lite equivalent to {@link Descriptors.FieldDescriptor.Type}. This is only here to support the\n * lite runtime and should not be used by users.\n *\/\n public enum FieldType {\n DOUBLE(JavaType.DOUBLE, WIRETYPE_FIXED64),\n FLOAT(JavaType.FLOAT, WIRETYPE_FIXED32),\n INT64(JavaType.LONG, WIRETYPE_VARINT),\n UINT64(JavaType.LONG, WIRETYPE_VARINT),\n INT32(JavaType.INT, WIRETYPE_VARINT),\n FIXED64(JavaType.LONG, WIRETYPE_FIXED64),\n FIXED32(JavaType.INT, WIRETYPE_FIXED32),\n BOOL(JavaType.BOOLEAN, WIRETYPE_VARINT),\n STRING(JavaType.STRING, WIRETYPE_LENGTH_DELIMITED) {\n public boolean isPackable() {\n return false;\n }\n },\n GROUP(JavaType.MESSAGE, WIRETYPE_START_GROUP) {\n public boolean isPackable() {\n return false;\n }\n },\n MESSAGE(JavaType.MESSAGE, WIRETYPE_LENGTH_DELIMITED) {\n public boolean isPackable() {\n return false;\n }\n },\n BYTES(JavaType.BYTE_STRING, WIRETYPE_LENGTH_DELIMITED) {\n public boolean isPackable() {\n return false;\n }\n },\n UINT32(JavaType.INT, WIRETYPE_VARINT),\n ENUM(JavaType.ENUM, WIRETYPE_VARINT),\n SFIXED32(JavaType.INT, WIRETYPE_FIXED32),\n SFIXED64(JavaType.LONG, WIRETYPE_FIXED64),\n SINT32(JavaType.INT, WIRETYPE_VARINT),\n SINT64(JavaType.LONG, WIRETYPE_VARINT);\n\n FieldType(final JavaType javaType, final int wireType) {\n this.javaType = javaType;\n this.wireType = wireType;\n }\n\n private final JavaType javaType;\n private final int wireType;\n\n public JavaType getJavaType() {\n return javaType;\n }\n\n public int getWireType() {\n return wireType;\n }\n\n public boolean isPackable() {\n return true;\n }\n }\n\n \/\/ Field numbers for fields in MessageSet wire format.\n static final int MESSAGE_SET_ITEM = 1;\n static final int MESSAGE_SET_TYPE_ID = 2;\n static final int MESSAGE_SET_MESSAGE = 3;\n\n \/\/ Tag numbers.\n static final int MESSAGE_SET_ITEM_TAG = makeTag(MESSAGE_SET_ITEM, WIRETYPE_START_GROUP);\n static final int MESSAGE_SET_ITEM_END_TAG = makeTag(MESSAGE_SET_ITEM, WIRETYPE_END_GROUP);\n static final int MESSAGE_SET_TYPE_ID_TAG = makeTag(MESSAGE_SET_TYPE_ID, WIRETYPE_VARINT);\n static final int MESSAGE_SET_MESSAGE_TAG =\n makeTag(MESSAGE_SET_MESSAGE, WIRETYPE_LENGTH_DELIMITED);\n}\n","avg_line_length":34.9431818182,"max_line_length":100,"alphanum_fraction":0.7211382114} {"size":5989,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * android-spackle https:\/\/github.com\/twofortyfouram\/android-spackle\n * Copyright (C) 2009\u20132017 two forty four a.m. LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use\n * this file except in compliance with the License. You may obtain a copy of the\n * License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed\n * under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR\n * CONDITIONS OF ANY KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations under the License.\n *\/\n\npackage com.twofortyfouram.spackle;\n\nimport android.content.Context;\nimport android.content.pm.ApplicationInfo;\nimport android.content.pm.PackageInfo;\nimport android.content.pm.PackageManager;\nimport android.content.pm.PackageManager.NameNotFoundException;\n\nimport androidx.annotation.NonNull;\nimport net.jcip.annotations.ThreadSafe;\n\nimport com.twofortyfouram.log.Lumberjack;\n\nimport static com.twofortyfouram.assertion.Assertions.assertNotNull;\n\n\/**\n * Determines information about the build of the app that is\n * running.\n *\/\n@ThreadSafe\npublic final class AppBuildInfo {\n\n \/**\n * Determines whether the application running is debuggable. This is determined from the\n * application info object, as an alternative to {@code BuildInfo} which is useless for\n * libraries.\n *\n * @param context Application context.\n * @return True if the application is debuggable.\n *\/\n public static boolean isDebuggable(@NonNull final Context context) {\n final PackageInfo packageInfo = getMyPackageInfo(context, 0);\n\n final boolean isDebuggable = (0 != (packageInfo.applicationInfo.flags\n & ApplicationInfo.FLAG_DEBUGGABLE));\n\n return isDebuggable;\n }\n\n \/**\n * Gets the \"versionCode\" in the AndroidManifest.\n *\n * @param context Application context.\n * @return versionCode of the app.\n * @see android.content.pm.PackageInfo#versionCode\n *\/\n public static int getVersionCode(@NonNull final Context context) {\n final int versionCode = getMyPackageInfo(context, 0).versionCode;\n\n Lumberjack.v(\"versionCode=%d\", versionCode); \/\/$NON-NLS-1$\n\n return versionCode;\n }\n\n \/**\n * Gets the \"versionName\" in the AndroidManifest.\n *\n * @param context Application context.\n * @return versionName of the app.\n * @see android.content.pm.PackageInfo#versionName\n *\/\n @NonNull\n public static String getVersionName(@NonNull final Context context) {\n String versionName = getMyPackageInfo(context, 0).versionName;\n\n if (null == versionName) {\n versionName = \"\"; \/\/$NON-NLS-1$\n }\n\n Lumberjack.v(\"versionName=%s\", versionName); \/\/$NON-NLS-1$\n\n return versionName;\n }\n\n \/**\n * Gets the name of the application or the package name if the application has no name.\n *\n * @param context Application context.\n * @return Label of the application from the Android Manifest or the package name if no label\n * was set.\n *\/\n @NonNull\n public static String getApplicationName(@NonNull final Context context) {\n assertNotNull(context, \"context\");\n\n final ApplicationInfo info = context.getApplicationInfo();\n\n CharSequence name = context.getPackageManager().getApplicationLabel(info);\n\n if (null == name) {\n name = context.getPackageName();\n }\n\n final String nameString = name.toString();\n\n return nameString;\n }\n\n \/**\n * Note: this method is known to throw RuntimeException on some Android devices when the\n * Android Package Manager dies. There's nothing we can do about that error.\n *\n * @param context Application context.\n * @param flags Flags to pass to the package manager.\n * @return PackageInfo for the current package.\n *\/\n @NonNull\n \/*package*\/ static PackageInfo getMyPackageInfo(@NonNull final Context context,\n final int flags) {\n final PackageManager packageManager = context.getPackageManager();\n final String packageName = context.getPackageName();\n\n try {\n return packageManager.getPackageInfo(packageName, flags);\n } catch (final NameNotFoundException e) {\n \/\/ The app's own package must exist, so this should never occur.\n throw new AssertionError(e);\n }\n }\n\n \/**\n * Gets the time in epoch milliseconds when the app was last updated.\n *\n * @param context Application context.\n * @return long representing the Epoch timestamp in milliseconds when the\n * app was last updated.\n *\/\n public static long getLastUpdateWallTimeMillis(@NonNull final Context context) {\n final long lastUpdateTimeMillis = getMyPackageInfo(context, 0).lastUpdateTime;\n\n Lumberjack.v(\"Last update time was %d [milliseconds]\", lastUpdateTimeMillis); \/\/$NON-NLS-1$\n\n return lastUpdateTimeMillis;\n }\n\n \/**\n * Gets the time in epoch milliseconds when the app was installed.\n *\n * @param context Application context.\n * @return long representing the Epoch timestamp in milliseconds when the\n * app was installed.\n *\/\n public static long getInstallWallTimeMillis(@NonNull final Context context) {\n final long installTimeMillis = getMyPackageInfo(context, 0).firstInstallTime;\n\n Lumberjack.v(\"Install time was %d [milliseconds]\", installTimeMillis); \/\/$NON-NLS-1$\n\n return installTimeMillis;\n }\n\n \/**\n * Private constructor prevents instantiation.\n *\n * @throws UnsupportedOperationException because this class cannot be\n * instantiated.\n *\/\n private AppBuildInfo() {\n throw new UnsupportedOperationException(\"This class is non-instantiable\"); \/\/$NON-NLS-1$\n }\n}\n","avg_line_length":33.8361581921,"max_line_length":99,"alphanum_fraction":0.6797462014} {"size":6561,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\npackage com.facebook.litho.specmodels.generator;\n\nimport static com.facebook.litho.specmodels.generator.ComponentBodyGenerator.LOCAL_STATE_CONTAINER_NAME;\nimport static com.facebook.litho.specmodels.generator.ComponentBodyGenerator.PREDICATE_NEEDS_STATE;\nimport static com.facebook.litho.specmodels.generator.GeneratorConstants.STATE_CONTAINER_IMPL_GETTER;\n\nimport com.facebook.litho.annotations.OnCreateTreeProp;\nimport com.facebook.litho.annotations.State;\nimport com.facebook.litho.annotations.TreeProp;\nimport com.facebook.litho.specmodels.model.ClassNames;\nimport com.facebook.litho.specmodels.model.DelegateMethod;\nimport com.facebook.litho.specmodels.model.MethodParamModelUtils;\nimport com.facebook.litho.specmodels.model.SpecMethodModel;\nimport com.facebook.litho.specmodels.model.SpecModel;\nimport com.facebook.litho.specmodels.model.SpecModelUtils;\nimport com.facebook.litho.specmodels.model.TreePropModel;\nimport com.squareup.javapoet.CodeBlock;\nimport com.squareup.javapoet.MethodSpec;\nimport com.squareup.javapoet.ParameterizedTypeName;\nimport com.squareup.javapoet.TypeName;\nimport java.util.List;\nimport javax.lang.model.element.Modifier;\n\n\/** Class that generates the tree prop methods for a Component. *\/\npublic class TreePropGenerator {\n\n private TreePropGenerator() {}\n\n public static TypeSpecDataHolder generate(SpecModel specModel) {\n return TypeSpecDataHolder.newBuilder()\n .addTypeSpecDataHolder(generatePopulateTreeProps(specModel))\n .addTypeSpecDataHolder(generateGetTreePropsForChildren(specModel))\n .build();\n }\n\n static TypeSpecDataHolder generatePopulateTreeProps(SpecModel specModel) {\n if (specModel.getTreeProps().isEmpty()) {\n return TypeSpecDataHolder.newBuilder().build();\n }\n\n final MethodSpec.Builder method =\n MethodSpec.methodBuilder(\"populateTreeProps\")\n .addAnnotation(Override.class)\n .addModifiers(Modifier.PROTECTED)\n .addParameter(ClassNames.TREE_PROPS, \"treeProps\")\n .beginControlFlow(\"if (treeProps == null)\")\n .addStatement(\"return\")\n .endControlFlow();\n\n for (TreePropModel treeProp : specModel.getTreeProps()) {\n method.addStatement(\n \"$L = treeProps.get($L.class)\",\n treeProp.getName(),\n findTypeByTypeName(treeProp.getTypeName()));\n }\n\n return TypeSpecDataHolder.newBuilder().addMethod(method.build()).build();\n }\n\n static TypeSpecDataHolder generateGetTreePropsForChildren(SpecModel specModel) {\n List> onCreateTreePropsMethods =\n SpecModelUtils.getMethodModelsWithAnnotation(specModel, OnCreateTreeProp.class);\n\n if (onCreateTreePropsMethods.isEmpty()) {\n return TypeSpecDataHolder.newBuilder().build();\n }\n\n final String delegateName = SpecModelUtils.getSpecAccessor(specModel);\n final MethodSpec.Builder builder =\n MethodSpec.methodBuilder(\"getTreePropsForChildren\")\n .addAnnotation(Override.class)\n .addModifiers(Modifier.PROTECTED)\n .returns(ClassNames.TREE_PROPS)\n .addParameter(specModel.getContextClass(), \"c\")\n .addParameter(ClassNames.TREE_PROPS, \"parentTreeProps\")\n .addStatement(\n \"final $T childTreeProps = $T.acquire(parentTreeProps)\",\n ClassNames.TREE_PROPS,\n ClassNames.TREE_PROPS);\n\n final boolean requiresState =\n onCreateTreePropsMethods.stream()\n .anyMatch(method -> method.methodParams.stream().anyMatch(PREDICATE_NEEDS_STATE));\n\n if (requiresState) {\n builder.addStatement(\n \"$L $L = $L\",\n StateContainerGenerator.getStateContainerClassName(specModel),\n LOCAL_STATE_CONTAINER_NAME,\n STATE_CONTAINER_IMPL_GETTER + \"(c)\");\n }\n\n for (SpecMethodModel onCreateTreePropsMethod : onCreateTreePropsMethods) {\n final CodeBlock.Builder block = CodeBlock.builder();\n block\n .add(\n \"childTreeProps.put($L.class, $L.$L(\\n\",\n findTypeByTypeName(onCreateTreePropsMethod.returnType),\n delegateName,\n onCreateTreePropsMethod.name)\n .indent()\n .indent();\n\n for (int i = 0, size = onCreateTreePropsMethod.methodParams.size(); i < size; i++) {\n if (i == 0) {\n block.add(\"($T) $L\", specModel.getContextClass(), \"c\");\n } else if (MethodParamModelUtils.isAnnotatedWith(\n onCreateTreePropsMethod.methodParams.get(i), State.class)) {\n block.add(\n \"$L.$L\",\n LOCAL_STATE_CONTAINER_NAME,\n onCreateTreePropsMethod.methodParams.get(i).getName());\n } else if (MethodParamModelUtils.isAnnotatedWith(\n onCreateTreePropsMethod.methodParams.get(i), TreeProp.class)) {\n block.add(\n \"useTreePropsFromContext() ? (($T) $T.getTreePropFromParent(parentTreeProps,\"\n + TreePropGenerator.findTypeByTypeName(\n onCreateTreePropsMethod.methodParams.get(i).getTypeName())\n + \".class\"\n + \")) : $L\",\n onCreateTreePropsMethod.methodParams.get(i).getTypeName(),\n ClassNames.COMPONENT,\n onCreateTreePropsMethod.methodParams.get(i).getName());\n } else {\n block.add(\"$L\", onCreateTreePropsMethod.methodParams.get(i).getName());\n }\n\n if (i < size - 1) {\n block.add(\",\\n\");\n }\n }\n\n builder.addCode(block.add(\"));\\n\").unindent().unindent().build());\n }\n\n builder.addStatement(\"return childTreeProps\");\n\n return TypeSpecDataHolder.newBuilder().addMethod(builder.build()).build();\n }\n\n public static TypeName findTypeByTypeName(final TypeName typeName) {\n if (typeName instanceof ParameterizedTypeName) {\n return ((ParameterizedTypeName) typeName).rawType;\n }\n return typeName;\n }\n}\n","avg_line_length":40.006097561,"max_line_length":104,"alphanum_fraction":0.6927297668} {"size":432,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.vidolima.doco.annotation;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n\/**\n * Use this annotation on embedded fields.\n *\/\n@Documented\n@Target(ElementType.FIELD)\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface DocumentEmbed {\n String name() default \"\";\n}","avg_line_length":25.4117647059,"max_line_length":44,"alphanum_fraction":0.7962962963} {"size":7187,"ext":"java","lang":"Java","max_stars_count":377.0,"content":"\/*\n * Licensed to the Technische Universit\u00e4t Darmstadt under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The Technische Universit\u00e4t Darmstadt \n * licenses this file to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.\n * \n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage de.tudarmstadt.ukp.inception.ui.kb.search;\n\nimport static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.SPAN_TYPE;\nimport static de.tudarmstadt.ukp.clarin.webanno.model.AnchoringMode.TOKENS;\nimport static de.tudarmstadt.ukp.clarin.webanno.model.OverlapMode.NO_OVERLAP;\nimport static java.util.Arrays.asList;\nimport static org.assertj.core.api.Assertions.assertThat;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.when;\nimport static org.mockito.MockitoAnnotations.initMocks;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\nimport org.apache.uima.cas.CAS;\nimport org.apache.uima.fit.factory.JCasBuilder;\nimport org.apache.uima.fit.factory.JCasFactory;\nimport org.apache.uima.jcas.JCas;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.mockito.Mock;\n\nimport de.tudarmstadt.ukp.clarin.webanno.api.AnnotationSchemaService;\nimport de.tudarmstadt.ukp.clarin.webanno.api.annotation.feature.BooleanFeatureSupport;\nimport de.tudarmstadt.ukp.clarin.webanno.api.annotation.feature.FeatureSupportRegistryImpl;\nimport de.tudarmstadt.ukp.clarin.webanno.api.annotation.feature.NumberFeatureSupport;\nimport de.tudarmstadt.ukp.clarin.webanno.api.annotation.feature.StringFeatureSupport;\nimport de.tudarmstadt.ukp.clarin.webanno.model.AnnotationFeature;\nimport de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer;\nimport de.tudarmstadt.ukp.clarin.webanno.model.Project;\nimport de.tudarmstadt.ukp.dkpro.core.api.ner.type.NamedEntity;\nimport de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;\nimport de.tudarmstadt.ukp.inception.kb.KnowledgeBaseService;\nimport de.tudarmstadt.ukp.inception.kb.config.KnowledgeBasePropertiesImpl;\nimport de.tudarmstadt.ukp.inception.kb.graph.KBHandle;\nimport de.tudarmstadt.ukp.inception.kb.graph.KBInstance;\nimport de.tudarmstadt.ukp.inception.kb.model.KnowledgeBase;\nimport de.tudarmstadt.ukp.inception.search.FeatureIndexingSupportRegistryImpl;\nimport de.tudarmstadt.ukp.inception.search.PrimitiveUimaIndexingSupport;\nimport de.tudarmstadt.ukp.inception.search.index.mtas.MtasUimaParser;\nimport de.tudarmstadt.ukp.inception.search.index.mtas.MtasUtils;\nimport de.tudarmstadt.ukp.inception.ui.kb.feature.ConceptFeatureSupport;\nimport mtas.analysis.token.MtasToken;\nimport mtas.analysis.token.MtasTokenCollection;\n\npublic class ConceptFeatureIndexingSupportTest\n{\n private Project project;\n private KnowledgeBase kb;\n private @Mock AnnotationSchemaService annotationSchemaService;\n private @Mock KnowledgeBaseService kbService;\n private FeatureSupportRegistryImpl featureSupportRegistry;\n private FeatureIndexingSupportRegistryImpl featureIndexingSupportRegistry;\n private JCas jcas;\n\n @BeforeEach\n public void setup() throws Exception\n {\n initMocks(this);\n\n project = new Project();\n project.setId(1l);\n project.setName(\"test project\");\n\n kb = new KnowledgeBase();\n\n featureSupportRegistry = new FeatureSupportRegistryImpl(asList(new StringFeatureSupport(),\n new BooleanFeatureSupport(), new NumberFeatureSupport(),\n new ConceptFeatureSupport(kbService, new KnowledgeBasePropertiesImpl())));\n featureSupportRegistry.init();\n\n featureIndexingSupportRegistry = new FeatureIndexingSupportRegistryImpl(\n asList(new PrimitiveUimaIndexingSupport(featureSupportRegistry),\n new ConceptFeatureIndexingSupport(featureSupportRegistry, kbService)));\n featureIndexingSupportRegistry.init();\n\n \/\/ Resetting the JCas is faster than re-creating it\n if (jcas == null) {\n jcas = JCasFactory.createJCas();\n }\n else {\n jcas.reset();\n }\n }\n\n @Test\n public void testConceptFeature() throws Exception\n {\n JCasBuilder builder = new JCasBuilder(jcas);\n builder.add(\"I\", Token.class);\n builder.add(\" \");\n builder.add(\"am\", Token.class);\n builder.add(\" \");\n int begin = builder.getPosition();\n builder.add(\"John\", Token.class);\n builder.add(\" \");\n builder.add(\"Smith\", Token.class);\n NamedEntity ne = new NamedEntity(jcas, begin, builder.getPosition());\n ne.setIdentifier(\"urn:dummy-concept\");\n ne.addToIndexes();\n builder.add(\" \");\n builder.add(\".\", Token.class);\n\n AnnotationLayer layer = new AnnotationLayer(NamedEntity.class.getName(), \"Named Entity\",\n SPAN_TYPE, project, true, TOKENS, NO_OVERLAP);\n when(annotationSchemaService.listAnnotationLayer(any(Project.class)))\n .thenReturn(asList(layer));\n\n when(kbService.readInstance(any(Project.class), any(String.class)))\n .thenReturn(Optional.of(new KBInstance(\"urn:dummy-concept\", \"Dummy concept\")));\n\n KBHandle kbHandle = new KBHandle(\"urn:dummy-concept\", \"Dummy concept\");\n kbHandle.setKB(kb);\n when(kbService.readHandle(any(Project.class), any(String.class)))\n .thenReturn(Optional.of(kbHandle));\n\n List dummyValue = new ArrayList();\n dummyValue.add(new KBHandle(\"urn:dummy-parent-concept\", \"Dummy Parent Concept\"));\n\n when(kbService.getParentConceptList(any(KnowledgeBase.class), any(String.class),\n any(Boolean.class))).thenReturn(dummyValue);\n\n MtasUimaParser sut = new MtasUimaParser(\n asList(new AnnotationFeature(1l, layer, \"value\", CAS.TYPE_NAME_STRING),\n new AnnotationFeature(2l, layer, \"identifier\", \"kb:\")),\n annotationSchemaService, featureIndexingSupportRegistry);\n MtasTokenCollection tc = sut.createTokenCollection(jcas.getCas());\n MtasUtils.print(tc);\n\n List tokens = new ArrayList<>();\n tc.iterator().forEachRemaining(tokens::add);\n\n assertThat(tokens).filteredOn(t -> t.getPrefix().startsWith(\"Named_Entity\"))\n .extracting(MtasToken::getPrefix) \/\/\n .contains(\"Named_Entity\", \"Named_Entity.identifier\",\n \"Named_Entity.identifier-exact\");\n\n assertThat(tokens).filteredOn(t -> t.getPrefix().startsWith(\"Named_Entity\"))\n .extracting(MtasToken::getPostfix)\n .contains(\"\", \"urn:dummy-concept\", \"Dummy concept\");\n }\n}\n","avg_line_length":44.6397515528,"max_line_length":98,"alphanum_fraction":0.7253374148} {"size":2252,"ext":"java","lang":"Java","max_stars_count":null,"content":"import java.util.Date;\n\nimport org.junit.Test;\nimport org.sdmlib.models.classes.Card;\nimport org.sdmlib.models.classes.ClassModel;\nimport org.sdmlib.models.classes.Clazz;\nimport org.sdmlib.models.classes.DataType;\n\npublic class modelgen {\n\t@Test\n\tpublic void genhmsModel() {\n\t\tClassModel model = new ClassModel();\n\t\tClazz hmsModel = model.createClazz(\"HMSModel\");\n\t\thmsModel.withAttribute(\"id\", DataType.INT);\n\t\thmsModel.withAttribute(\"modTime\", DataType.ref(Date.class));\n\t\t\n\t\tClazz role = model.createClazz(\"Role\");\n\t\thmsModel.withAttribute(\"value\", DataType.STRING);\n\t\t\t\t\n\t\tClazz user = model.createClazz(\"User\");\n\t\tuser.withSuperClazz(hmsModel);\n\t\tuser.withAttribute(\"email\", DataType.STRING);\n\t\tuser.withAttribute(\"firstName\", DataType.STRING);\n\t\tuser.withAttribute(\"lastName\", DataType.STRING);\n\t\tuser.withAttribute(\"password\", DataType.STRING);\n\t\tuser.withAttribute(\"matrikelNumber\", DataType.STRING);\n\t\tuser.withAttribute(\"lastLogin\", DataType.ref(Date.class));\n\t\tuser.withAttribute(\"emailValidated\", DataType.STRING);\n\t\t\n\t\tuser.withAssoc(role, \"role\", Card.MANY);\n\t\t\n\t\tClazz lecture = model.createClazz(\"Lecture\");\n\t\tlecture.withSuperClazz(hmsModel);\n\t\tlecture.withAttribute(\"name\", DataType.STRING);\n\t\tlecture.withAttribute(\"description\", DataType.STRING);\n\t\tlecture.withAttribute(\"questionsDuty ???\", DataType.STRING); \/\/ ???\n\t\tlecture.withAttribute(\"closingdate\", DataType.ref(Date.class));\n\t\tlecture.withAttribute(\"optionalDuties\", DataType.INT);\n\t\tlecture.withAttribute(\"lowerProcentualBounderyOfDuties\", DataType.INT);\n\t\tlecture.withAttribute(\"minimumPercentageForExamination\", DataType.INT);\n\t\t\n\t\tClazz semester = model.createClazz(\"Semester\");\n\t\tsemester.withSuperClazz(hmsModel);\n\t\tsemester.withAttribute(\"semester\", DataType.STRING);\n\t\tsemester.withAssoc(lecture, \"lectures\", Card.MANY, \"semester\", Card.ONE);\n\n\t\t\/\/ Message\n\t\tClazz message = model.createClazz(\"Message\");\n\t\tmessage.withSuperClazz(hmsModel);\n\t\tmessage.withAttribute(\"body\", DataType.STRING);\n\t\tmessage.withAttribute(\"date\", DataType.ref(Date.class));\n\t\tmessage.withAssoc(hmsModel, \"parent\", Card.ONE);\n\/\/\t\tmessage.withAttribute(\"parent\", DataType.ref(hmsModel));\n\t\tmessage.withAssoc(user, \"sender\", Card.ONE, \"messages\",Card.MANY);\n\t\t\n\t\tmodel.dumpHTML(\"model\");\n\t}\n}\n","avg_line_length":38.1694915254,"max_line_length":75,"alphanum_fraction":0.7504440497} {"size":832,"ext":"java","lang":"Java","max_stars_count":null,"content":"package com.webank.wedpr.utils;\n\n\/**\n * @author aaronchu\n * @Description\n * @data 2020\/06\/19\n *\/\npublic class EnvironmentUtils {\n\n public static String getResourceTailByOs(String osName){\n if(osName == null || osName.isEmpty()){\n throw new IllegalArgumentException(\"osName cannot be null or empty\");\n }\n osName = osName.toLowerCase();\n if(osName.contains(\"windows\")) return \".dll\";\n if(osName.contains(\"linux\")) {\n String osArch = System.getProperty(\"os.arch\").toLowerCase();\n if(\"aarch64\".equals(osArch)){\n return \"_arm.so\";\n }\n return \".so\";\n }\n if(osName.contains(\"mac\")) return \".dylib\";\n throw new IllegalArgumentException(\"does not support os :\"+osName);\n }\n\n private EnvironmentUtils(){}\n}\n","avg_line_length":28.6896551724,"max_line_length":81,"alphanum_fraction":0.5901442308} {"size":10467,"ext":"java","lang":"Java","max_stars_count":null,"content":"package gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements;\n\n\/***************************************************************************\/\n\/* THIS IS AUTOGENERATED CODE - DO NOT MODIFY OR YOUR CHANGES WILL BE LOST *\/\n\/* THIS CODE CAN BE REGENERATED USING 'xsd-codegen' *\/\n\/***************************************************************************\/\npublic class SimpleRestrictionType_AttributeGroup extends gw.xml.XmlElement implements gw.internal.xml.IXmlGeneratedClass {\n\n public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Id = new javax.xml.namespace.QName( \"\", \"id\", \"\" );\n public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Ref = new javax.xml.namespace.QName( \"\", \"ref\", \"\" );\n public static final javax.xml.namespace.QName $ELEMENT_QNAME_Annotation = new javax.xml.namespace.QName( \"http:\/\/www.w3.org\/2001\/XMLSchema\", \"annotation\", \"xs\" );\n public static final javax.xml.namespace.QName $QNAME = new javax.xml.namespace.QName( \"http:\/\/www.w3.org\/2001\/XMLSchema\", \"attributeGroup\", \"xs\" );\n public static final gw.util.concurrent.LockingLazyVar TYPE = new gw.util.concurrent.LockingLazyVar( gw.lang.reflect.TypeSystem.getGlobalLock() ) {\n @Override\n protected gw.lang.reflect.IType init() {\n return gw.lang.reflect.TypeSystem.getByFullName( \"gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup\" );\n }\n };\n private static final gw.util.concurrent.LockingLazyVar TYPEINSTANCETYPE = new gw.util.concurrent.LockingLazyVar( gw.lang.reflect.TypeSystem.getGlobalLock() ) {\n @Override\n protected gw.lang.reflect.IType init() {\n return gw.lang.reflect.TypeSystem.getByFullName( \"gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef\" );\n }\n };\n\n public SimpleRestrictionType_AttributeGroup() {\n this( new gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef() );\n }\n\n public SimpleRestrictionType_AttributeGroup( gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef typeInstance ) {\n super( $QNAME, TYPE.get(), TYPEINSTANCETYPE.get(), typeInstance );\n }\n\n protected SimpleRestrictionType_AttributeGroup( javax.xml.namespace.QName qname, gw.lang.reflect.IType type, gw.lang.reflect.IType schemaDefinedTypeInstanceType, gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AnyType typeInstance ) {\n super( qname, type, schemaDefinedTypeInstanceType, typeInstance );\n }\n\n\n public gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef getTypeInstance() {\n \/\/noinspection RedundantCast\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef) super.getTypeInstance();\n }\n\n public void setTypeInstance( gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AttributeGroupRef param ) {\n super.setTypeInstance( param );\n }\n\n\n public gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation Annotation() {\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation) TYPE.get().getTypeInfo().getProperty( \"Annotation\" ).getAccessor().getValue( this );\n }\n\n public void setAnnotation$( gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation param ) {\n TYPE.get().getTypeInfo().getProperty( \"Annotation\" ).getAccessor().setValue( this, param );\n }\n\n\n public java.lang.String Id() {\n return (java.lang.String) TYPE.get().getTypeInfo().getProperty( \"Id\" ).getAccessor().getValue( this );\n }\n\n public void setId$( java.lang.String param ) {\n TYPE.get().getTypeInfo().getProperty( \"Id\" ).getAccessor().setValue( this, param );\n }\n\n\n public javax.xml.namespace.QName Ref() {\n return (javax.xml.namespace.QName) TYPE.get().getTypeInfo().getProperty( \"Ref\" ).getAccessor().getValue( this );\n }\n\n public void setRef$( javax.xml.namespace.QName param ) {\n TYPE.get().getTypeInfo().getProperty( \"Ref\" ).getAccessor().setValue( this, param );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( byte[] byteArray ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( byte[].class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { byteArray } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( byte[] byteArray, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( byte[].class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { byteArray, options } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.File file ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.File.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { file } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.File file, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.File.class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { file, options } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.InputStream inputStream ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.InputStream.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { inputStream } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.InputStream inputStream, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.InputStream.class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { inputStream, options } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.Reader reader ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.Reader.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { reader } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.io.Reader reader, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.io.Reader.class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { reader, options } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.lang.String xmlString ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.lang.String.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { xmlString } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.lang.String xmlString, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.lang.String.class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { xmlString, options } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.net.URL url ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.net.URL.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { url } );\n }\n\n public static gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup parse( java.net.URL url, gw.xml.XmlParseOptions options ) {\n \/\/noinspection RedundantArrayCreation\n return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.SimpleRestrictionType_AttributeGroup) TYPE.get().getTypeInfo().getMethod( \"parse\", gw.lang.reflect.TypeSystem.get( java.net.URL.class ), gw.lang.reflect.TypeSystem.get( gw.xml.XmlParseOptions.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { url, options } );\n }\n\n @SuppressWarnings( {\"UnusedDeclaration\"} )\n private static final long FINGERPRINT = 2110283714877373226L;\n\n}\n","avg_line_length":75.3021582734,"max_line_length":365,"alphanum_fraction":0.7556128786} {"size":3457,"ext":"java","lang":"Java","max_stars_count":null,"content":"\/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n *\/\n\npackage io.opentelemetry.sdk.trace;\n\nimport io.opentelemetry.api.trace.Span;\nimport io.opentelemetry.context.Context;\nimport io.opentelemetry.sdk.common.CompletableResultCode;\nimport java.io.Closeable;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.concurrent.TimeUnit;\n\n\/**\n * SpanProcessor is the interface {@code TracerSdk} uses to allow synchronous hooks for when a\n * {@code Span} is started or when a {@code Span} is ended.\n *\/\npublic interface SpanProcessor extends Closeable {\n\n \/**\n * Returns a {@link SpanProcessor} which simply delegates all processing to the {@code processors}\n * in order.\n *\/\n static SpanProcessor composite(SpanProcessor... processors) {\n return composite(Arrays.asList(processors));\n }\n\n \/**\n * Returns a {@link SpanProcessor} which simply delegates all processing to the {@code processors}\n * in order.\n *\/\n static SpanProcessor composite(Iterable processors) {\n List processorsList = new ArrayList<>();\n for (SpanProcessor processor : processors) {\n processorsList.add(processor);\n }\n if (processorsList.isEmpty()) {\n return NoopSpanProcessor.getInstance();\n }\n if (processorsList.size() == 1) {\n return processorsList.get(0);\n }\n return MultiSpanProcessor.create(processorsList);\n }\n\n \/**\n * Called when a {@link io.opentelemetry.api.trace.Span} is started, if the {@link\n * Span#isRecording()} returns true.\n *\n *

        This method is called synchronously on the execution thread, should not throw or block the\n * execution thread.\n *\n * @param parentContext the parent {@code Context} of the span that just started.\n * @param span the {@code ReadableSpan} that just started.\n *\/\n void onStart(Context parentContext, ReadWriteSpan span);\n\n \/**\n * Returns {@code true} if this {@link SpanProcessor} requires start events.\n *\n * @return {@code true} if this {@link SpanProcessor} requires start events.\n *\/\n boolean isStartRequired();\n\n \/**\n * Called when a {@link io.opentelemetry.api.trace.Span} is ended, if the {@link\n * Span#isRecording()} returns true.\n *\n *

        This method is called synchronously on the execution thread, should not throw or block the\n * execution thread.\n *\n * @param span the {@code ReadableSpan} that just ended.\n *\/\n void onEnd(ReadableSpan span);\n\n \/**\n * Returns {@code true} if this {@link SpanProcessor} requires end events.\n *\n * @return {@code true} if this {@link SpanProcessor} requires end events.\n *\/\n boolean isEndRequired();\n\n \/**\n * Processes all span events that have not yet been processed and closes used resources.\n *\n * @return a {@link CompletableResultCode} which completes when shutdown is finished.\n *\/\n default CompletableResultCode shutdown() {\n return forceFlush();\n }\n\n \/**\n * Processes all span events that have not yet been processed.\n *\n * @return a {@link CompletableResultCode} which completes when currently queued spans are\n * finished processing.\n *\/\n default CompletableResultCode forceFlush() {\n return CompletableResultCode.ofSuccess();\n }\n\n \/**\n * Closes this {@link SpanProcessor} after processing any remaining spans, releasing any\n * resources.\n *\/\n @Override\n default void close() {\n shutdown().join(10, TimeUnit.SECONDS);\n }\n}\n","avg_line_length":30.3245614035,"max_line_length":100,"alphanum_fraction":0.7000289268} {"size":459,"ext":"java","lang":"Java","max_stars_count":null,"content":"package Infrastructure;\n\npublic class CloudWebDriverFactory implements WebDriverFactory {\n public String create() {\n String testBrowser = ConfigurationManager.getInstance().getTestbrowser();\n switch (testBrowser) {\n case \"chrome\":\n return \"cloud new chrome driver\";\n case \"firefox\":\n return \"cloud new firefox driver\";\n default:\n return \"\";\n }\n\n\n }\n}\n","avg_line_length":25.5,"max_line_length":81,"alphanum_fraction":0.5729847495} {"size":7610,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/*\n * Copyright (c) 2017 Ampool, Inc. All rights reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License. You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under the License\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing permissions and limitations under\n * the License. See accompanying LICENSE file.\n *\/\npackage io.ampool.monarch.table.coprocessor;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\nimport io.ampool.monarch.table.Admin;\nimport io.ampool.monarch.table.Bytes;\nimport io.ampool.monarch.table.MTable;\nimport io.ampool.monarch.table.MTableDUnitHelper;\nimport io.ampool.monarch.table.MTableDescriptor;\nimport io.ampool.monarch.table.MTableType;\nimport io.ampool.monarch.table.Put;\nimport io.ampool.monarch.table.Row;\nimport io.ampool.monarch.table.Scan;\nimport io.ampool.monarch.table.client.MClientCache;\nimport io.ampool.monarch.table.client.MClientCacheFactory;\nimport io.ampool.monarch.table.internal.SingleVersionRow;\nimport io.ampool.monarch.types.BasicTypes;\nimport org.apache.geode.internal.logging.LogService;\nimport org.apache.geode.test.dunit.standalone.DUnitLauncher;\nimport org.apache.geode.test.junit.categories.MonarchTest;\nimport org.apache.logging.log4j.Logger;\nimport org.junit.Test;\nimport org.junit.experimental.categories.Category;\nimport org.junit.runner.RunWith;\n\nimport java.util.Iterator;\nimport java.util.Map;\nimport junitparams.JUnitParamsRunner;\nimport junitparams.Parameters;\n\n@Category(MonarchTest.class)\n@RunWith(JUnitParamsRunner.class)\npublic class MTableCoprocessorSCDType2PutObserverDUnitTest2 extends MTableDUnitHelper {\n private static final Logger logger = LogService.getLogger();\n private final int numRecords = 100;\n\n public MTableCoprocessorSCDType2PutObserverDUnitTest2() {\n super();\n }\n\n @Override\n public void postSetUp() throws Exception {\n super.postSetUp();\n startServerOn(vm0, DUnitLauncher.getLocatorString());\n startServerOn(vm1, DUnitLauncher.getLocatorString());\n startServerOn(vm2, DUnitLauncher.getLocatorString());\n createClientCache();\n }\n\n @Override\n public void tearDown2() throws Exception {\n closeMClientCache();\n super.tearDown2();\n }\n\n private Object[] getParameters() {\n return new Object[][] {{1}, {2}, {3}, {4}, {5}};\n }\n\n \/**\n * Create the table with this schema customer_id customer_name location start_date end_date\n * ------------------------------------------------------------------ 1 Marston Illions\n * 01-Mar-2010 20-Fdb-2011 1 Marston Seattle 21-Feb-2011 NULL\n * ------------------------------------------------------------------\n *\/\n private MTableDescriptor getTableDescriptor(final String tableName) {\n MTableDescriptor mTableDescriptor = new MTableDescriptor(MTableType.ORDERED_VERSIONED);\n mTableDescriptor.setTableName(tableName);\n mTableDescriptor.addColumn(\"customer_id\", BasicTypes.INT);\n mTableDescriptor.addColumn(\"customer_name\", BasicTypes.STRING);\n mTableDescriptor.addColumn(\"location\", BasicTypes.STRING);\n mTableDescriptor.addColumn(\"start_date\", BasicTypes.STRING);\n mTableDescriptor.addColumn(\"end_date\", BasicTypes.STRING);\n mTableDescriptor.addCoprocessor(\"io.ampool.monarch.table.coprocessor.MTableSCDType2Observer2\");\n mTableDescriptor.setMaxVersions(5);\n return mTableDescriptor;\n }\n\n\n private void createMTableWithCoprocessor(final String tableName) {\n MClientCache anyInstance = MClientCacheFactory.getAnyInstance();\n Admin admin = anyInstance.getAdmin();\n MTable mTable = admin.createMTable(tableName, getTableDescriptor(tableName));\n assertNotNull(mTable);\n assertTrue(admin.existsMTable(tableName));\n }\n\n private void ingestRecords(final String tableName, final int numberOfVersions) {\n MTable mTable = MClientCacheFactory.getAnyInstance().getMTable(tableName);\n for (int j = 1; j <= numberOfVersions; j++) {\n for (int i = 0; i < numRecords; i++) {\n Put put = new Put(Bytes.toBytes(i));\n put.addColumn(\"customer_id\", i);\n put.addColumn(\"customer_name\", \"ABC\" + i);\n put.addColumn(\"location\", \"Location\" + (j));\n put.addColumn(\"start_date\", String.valueOf(10000 + (j * 1000)));\n put.addColumn(\"end_date\", null);\n put.setTimeStamp(j);\n mTable.put(put);\n }\n }\n }\n\n private void scanRecords(final String tableName, final int numberOfVersions) {\n MTable mTable = MClientCacheFactory.getAnyInstance().getMTable(tableName);\n Scan scan = new Scan();\n scan.setMaxVersions();\n Iterator iterator = mTable.getScanner(scan).iterator();\n int records = 0;\n while (iterator.hasNext()) {\n records++;\n Row res = iterator.next();\n Map allVersions = res.getAllVersions();\n assertEquals(numberOfVersions, allVersions.size());\n String startdate = null;\n for (long i = numberOfVersions; i >= 1; i--) {\n SingleVersionRow singleVersionRow = allVersions.get(i);\n if (i == numberOfVersions) {\n startdate = assertlatestVersion(singleVersionRow, i);\n } else {\n String expectedEndDate = String.valueOf(Long.parseLong(startdate) - 1);\n startdate = assertNextVersion(singleVersionRow, i, expectedEndDate);\n }\n System.out.println(singleVersionRow.getCells());\n }\n }\n assertEquals(numRecords, records);\n }\n\n private String assertlatestVersion(SingleVersionRow singleVersionRow, long version) {\n final StringBuilder sb = new StringBuilder();\n assertNotNull(singleVersionRow);\n assertFalse(singleVersionRow.getCells().isEmpty());\n singleVersionRow.getCells().forEach((C) -> {\n if (Bytes.toString(C.getColumnName()).equals(\"end_date\")) {\n assertNull(C.getColumnValue());\n }\n if (Bytes.toString(C.getColumnName()).equals(\"start_date\")) {\n assertNotNull(C.getColumnValue());\n assertEquals(C.getColumnValue(), String.valueOf(10000 + (version * 1000)));\n sb.append(C.getColumnValue());\n }\n });\n return sb.toString();\n }\n\n private String assertNextVersion(SingleVersionRow singleVersionRow, long version,\n String expectedEndDate) {\n final StringBuilder sb = new StringBuilder();\n assertNotNull(singleVersionRow);\n assertFalse(singleVersionRow.getCells().isEmpty());\n singleVersionRow.getCells().forEach((C) -> {\n if (Bytes.toString(C.getColumnName()).equals(\"end_date\")) {\n assertNotNull(C.getColumnValue());\n assertEquals(C.getColumnValue(), expectedEndDate);\n }\n if (Bytes.toString(C.getColumnName()).equals(\"start_date\")) {\n assertNotNull(C.getColumnValue());\n assertEquals(C.getColumnValue(), String.valueOf(10000 + (version * 1000)));\n sb.append(C.getColumnValue());\n }\n });\n return sb.toString();\n }\n\n @Test\n @Parameters(method = \"getParameters\")\n public void testSCDType2UpdateInObserver(final int numberOfVersions) {\n final String tableName = getTestMethodName();\n createMTableWithCoprocessor(tableName);\n ingestRecords(tableName, numberOfVersions);\n scanRecords(tableName, numberOfVersions);\n deleteMTable(tableName);\n }\n}\n","avg_line_length":39.4300518135,"max_line_length":100,"alphanum_fraction":0.7139290407} {"size":1905,"ext":"java","lang":"Java","max_stars_count":3.0,"content":"\/*\n * Copyright 2002-2013 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage org.springframework.security.samples.config;\n\nimport static org.junit.Assert.assertNotNull;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.ComponentScan;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.security.oauth.examples.sparklr.config.SecurityConfiguration;\nimport org.springframework.security.oauth2.provider.endpoint.AuthorizationEndpoint;\nimport org.springframework.security.web.FilterChainProxy;\nimport org.springframework.test.context.ContextConfiguration;\nimport org.springframework.test.context.junit4.SpringJUnit4ClassRunner;\nimport org.springframework.test.context.web.WebAppConfiguration;\n\n\/**\n * @author Rob Winch\n *\n *\/\n@RunWith(SpringJUnit4ClassRunner.class)\n@ContextConfiguration\n@WebAppConfiguration\npublic class ApplicationConfigurationTests {\n\n @Configuration\n @ComponentScan(basePackageClasses = SecurityConfiguration.class)\n public static class Config {}\n\n @Autowired\n private FilterChainProxy springSecurityFilterChain;\n\n @Autowired\n private AuthorizationEndpoint endpoint;\n\n @Test\n public void securityConfigurationLoads() {\n \tassertNotNull(endpoint);\n }\n}\n","avg_line_length":34.0178571429,"max_line_length":88,"alphanum_fraction":0.7963254593} {"size":4118,"ext":"java","lang":"Java","max_stars_count":1.0,"content":"\/**\r\n * Copyright 2013 Canada Health Infoway, Inc.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\r\n * Author: $LastChangedBy$\r\n * Last modified: $LastChangedDate$\r\n * Revision: $LastChangedRevision$\r\n *\/\r\n\r\npackage ca.infoway.messagebuilder.marshalling.hl7.formatter;\r\n\r\nimport java.math.BigDecimal;\r\n\r\nimport ca.infoway.messagebuilder.datatype.BareANY;\r\nimport ca.infoway.messagebuilder.error.Hl7Error;\r\nimport ca.infoway.messagebuilder.error.Hl7ErrorCode;\r\nimport ca.infoway.messagebuilder.marshalling.hl7.DataTypeHandler;\r\nimport ca.infoway.messagebuilder.marshalling.hl7.ModelToXmlResult;\r\nimport ca.infoway.messagebuilder.marshalling.hl7.RealConfFormat;\r\nimport ca.infoway.messagebuilder.marshalling.hl7.RealFormat;\r\nimport ca.infoway.messagebuilder.platform.NumberFormatter;\r\n\/**\r\n * REAL.CONF - BigDecimal [0,1]\r\n *\r\n * Represents a REAL.CONF object as an element:\r\n *\r\n * <element-name value=\"0.1234\"><\/element-name>\r\n *\r\n * If an object is null, value is replaced by a nullFlavor. So the element would look\r\n * like this:\r\n *\r\n * <element-name nullFlavor=\"something\" \/>\r\n *\r\n * http:\/\/www.hl7.org\/v3ballot\/html\/infrastructure\/itsxml\/datatypes-its-xml.htm#dtimpl-REAL\r\n *\r\n * The REAL.CONF variant defined by CHI can only contain positive values between 0 to 1 (inclusive). CHI also \r\n * defines maximum length 1 character to the left of the decimal point and 4 characters to the right.\r\n *\/\r\n@DataTypeHandler({\"REAL.CONF\"})\r\npublic class RealConfPropertyFormatter extends AbstractValueNullFlavorPropertyFormatter{\r\n\r\n\tprivate NumberFormatter numberFormatter = new NumberFormatter();\r\n\tprivate RealFormat realFormat = new RealConfFormat();\r\n\t\r\n @Override\r\n protected String getValue(BigDecimal bigDecimal, FormatContext context, BareANY bareAny) {\r\n \tvalidate(context, bigDecimal);\r\n \treturn this.numberFormatter.format(\r\n \t\t\tbigDecimal, \r\n \t\t\tthis.realFormat.getMaxValueLength(),\r\n \t\t\tthis.realFormat.getMaxIntegerPartLength(),\r\n \t\t\tdetermineScale(bigDecimal), \r\n \t\t\ttrue);\r\n }\r\n\r\n\tprivate int determineScale(BigDecimal bigDecimal) {\r\n\t\tboolean useBigDecimalScale = (bigDecimal.scale() >= 0 && bigDecimal.scale() < this.realFormat.getMaxDecimalPartLength());\r\n\t\treturn useBigDecimalScale ? bigDecimal.scale() : this.realFormat.getMaxDecimalPartLength();\r\n\t}\r\n \r\n\tprivate void validate(FormatContext context, BigDecimal bigDecimal) {\r\n\t\tModelToXmlResult modelToXmlResult = context.getModelToXmlResult();\r\n \tif (bigDecimal.compareTo(BigDecimal.ZERO) < 0 || bigDecimal.compareTo(BigDecimal.ONE) > 0){\r\n \t\trecordValueMustBeBetweenZeroAndOneError(context.getPropertyPath(), modelToXmlResult);\r\n \t}\r\n \tif (bigDecimal.scale() > realFormat.getMaxDecimalPartLength()) {\r\n \t\trecordTooManyDigitsToRightOfDecimalError(context.getPropertyPath(), modelToXmlResult);\r\n \t}\r\n\t}\r\n\r\n\tprivate void recordValueMustBeBetweenZeroAndOneError(String propertyPath, ModelToXmlResult modelToXmlResult) {\r\n\t\tmodelToXmlResult.addHl7Error(new Hl7Error(Hl7ErrorCode.DATA_TYPE_ERROR, \"Value for REAL.CONF must be between 0 and 1 (inclusive). Value may have been modified to fit format requirements.\", propertyPath));\r\n\t}\r\n\t\r\n\tprivate void recordTooManyDigitsToRightOfDecimalError(String propertyPath, ModelToXmlResult modelToXmlResult) {\r\n\t\tmodelToXmlResult.addHl7Error(new Hl7Error(Hl7ErrorCode.DATA_TYPE_ERROR, \"Value for REAL.CONF must have no more than \" + realFormat.getMaxDecimalPartLength() + \" digits to the right of the decimal. Value has been modified to fit format requirements.\", propertyPath));\r\n\t}\r\n\t\r\n}\r\n","avg_line_length":45.2527472527,"max_line_length":269,"alphanum_fraction":0.7532782904}