name
stringlengths
12
178
code_snippet
stringlengths
8
36.5k
score
float64
3.26
3.68
incubator-hugegraph-toolchain_JDBCVendor_buildGetHeaderSql_rdh
/** * NOTE: don't add a semicolon(;) at the end of oracle sql */@Override public String buildGetHeaderSql(JDBCSource source) { return String.format((("SELECT COLUMN_NAME " + "FROM USER_TAB_COLUMNS ") + "WHERE TABLE_NAME = %s ") + "ORDER BY COLUMN_ID", this.escape(source.table())); }
3.26
incubator-hugegraph-toolchain_JDBCVendor_buildGteClauseInFlattened_rdh
/** * For database which unsupported to select by where (a, b, c) >= (va, vb, vc) * (a, b, c) >= (va, vb, vc) will be convert as follows: * ("a" = va AND "b" = vb AND "c" >= vc) * OR * ("a" = va AND "b" > vb) * OR * ("a" > va) */ public String buildGteClauseInFlattened(Line nextStartRow) { E.checkNotNull(nextStartRow, "nextStartRow"); StringBuilder builder = new StringBuilder(); String[] names = nextStartRow.names(); Object[] values = nextStartRow.values();for (int i = 0, n = names.length; i < n; i++) { builder.append("("); for (int j = 0; j < (n - i); j++) { String name = names[j]; Object value = values[j]; String operator = " = "; boolean appendAnd = true; if (j == ((n - i) - 1)) { appendAnd = false; if (i == 0) { operator = " >= "; } else { operator = " > "; } }builder.append("\"").append(name).append("\"").append(operator).append(this.escapeIfNeeded(value)); if (appendAnd) { builder.append(" AND "); } } builder.append(")"); if (i != (n - 1)) { builder.append(" OR "); } } return builder.toString(); }
3.26
incubator-hugegraph-toolchain_JDBCVendor_m0_rdh
/** * For database which support to select by where (a, b, c) >= (va, vb, vc) */ public String m0(Line nextStartRow) { E.checkNotNull(nextStartRow, "nextStartRow"); StringBuilder builder = new StringBuilder(); String[] names = nextStartRow.names(); Object[] values = nextStartRow.values(); builder.append("("); for (int i = 0, n = names.length; i < n; i++) { builder.append(names[i]); if (i != (n - 1)) { builder.append(", "); } } builder.append(") >= ("); for (int i = 0, n = values.length; i < n; i++) { Object value = values[i]; builder.append(this.escapeIfNeeded(value)); if (i != (n - 1)) { builder.append(", "); } }builder.append(")"); return builder.toString(); }
3.26
incubator-hugegraph-toolchain_MetricsManager_all_rdh
/** * The nesting level is too deep, may need to optimize the server first */ public Map<String, Map<String, Object>> all() { return this.metricsAPI.all(); }
3.26
incubator-hugegraph-toolchain_ResultSet_parseResultClass_rdh
/** * TODO: Still need to constantly add and optimize */ private Class<?> parseResultClass(Object object) { if (object.getClass().equals(LinkedHashMap.class)) { @SuppressWarnings("unchecked") Map<String, Object> map = ((Map<String, Object>) (object)); String type = ((String) (map.get("type"))); if (type != null) { if ("vertex".equals(type)) { return Vertex.class; } else if ("edge".equals(type)) { return Edge.class; } } else if (map.get("labels") != null) { return Path.class; } } return object.getClass(); }
3.26
incubator-hugegraph-toolchain_FailLogger_writeHeaderIfNeeded_rdh
/** * Write head to a specialized file, every input struct has one */ private void writeHeaderIfNeeded() { // header() == null means no need header if (this.struct.input().header() == null) { return; } String header = JsonUtil.toJson(this.struct.input().header()); /* The files under failure path are like: mapping/failure-data/input-1.header */ String fileName = this.struct.id() + Constants.HEADER_SUFFIX; String filePath = Paths.get(this.file.getParent(), fileName).toString(); File headerFile = new File(filePath); String charset = this.struct.input().charset();try { FileUtils.writeStringToFile(headerFile, header, charset);} catch (IOException e) { throw new LoadException("Failed to write header '%s'", e);} }
3.26
incubator-hugegraph-toolchain_ElementBuilder_retainField_rdh
/** * Retain only the key-value pairs needed by the current vertex or edge */ protected boolean retainField(String fieldName, Object fieldValue) { ElementMapping mapping = this.mapping(); Set<String> selectedFields = mapping.selectedFields(); Set<String> ignoredFields = mapping.ignoredFields(); // Retain selected fields or remove ignored fields if ((!selectedFields.isEmpty()) && (!selectedFields.contains(fieldName))) { return false; } if ((!ignoredFields.isEmpty()) && ignoredFields.contains(fieldName)) { return false; } String mappedKey = mapping.mappingField(fieldName); Set<String> nullableKeys = this.schemaLabel().nullableKeys(); Set<Object> nullValues = mapping.nullValues(); if (nullableKeys.isEmpty() || nullValues.isEmpty()) { return true; }return (!nullableKeys.contains(mappedKey)) || (!nullValues.contains(fieldValue)); }
3.26
incubator-hugegraph-toolchain_FileUtil_countLines_rdh
/** * NOTE: If there is no blank line at the end of the file, * one line will be missing */ public static int countLines(File file) { if (!file.exists()) { throw new IllegalArgumentException(String.format("The file %s doesn't exist", file)); } long fileLength = file.length(); try (FileInputStream fis = new FileInputStream(file);BufferedInputStream bis = new BufferedInputStream(fis)) { /* The last character may be an EOL or a non-EOL character. If it is the EOL, need to add 1 line; if it is the non-EOL, also need to add 1 line, because the next character means the EOF and should also be counted as a line. */ int number = 0; for (int i = 0; i < (fileLength - 1); i++) { if (bis.read() == '\n') { number++; } } if (fileLength > 0) { number++; } return number; } catch (IOException e) { throw new InternalException("Failed to count lines of file %s", file); } }
3.26
incubator-hugegraph-toolchain_FileLineFetcher_checkMatchHeader_rdh
/** * Just match header for second or subsequent file first line */ private boolean checkMatchHeader(String line) { if ((!this.source().format().needHeader()) || (this.offset() != FIRST_LINE_OFFSET)) { return false; } assert this.source().header() != null; String[] columns = this.parser.split(line); return Arrays.equals(this.source().header(), columns); }
3.26
incubator-hugegraph-toolchain_FileLineFetcher_readHeader_rdh
/** * Read the first line of the first non-empty file as a header */ @Override public String[] readHeader(List<Readable> readables) { String[] header = null; for (Readable readable : readables) { this.openReader(readable); assert this.reader != null; try { String line = this.reader.readLine(); if (!StringUtils.isEmpty(line)) {header = this.parser.split(line); break; } } catch (IOException e) { throw new LoadException("Failed to read header from '%s'", e, readable); } finally { try { this.closeReader(); } catch (IOException e) { LOG.warn("Failed to close reader of '{}'", readable); } } } return header; }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_splicing_rdh
/** * Concat multiple parts into a single id with ID_SPLITOR * * @param parts * the string id values to be spliced * @return spliced id object */ public static Id splicing(String... parts) { String escaped = IdUtil.escape(ID_SPLITOR, ESCAPE, parts); return IdGenerator.of(escaped); }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_split_rdh
/** * Split a composite id into multiple ids with IDS_SPLITOR * * @param ids * the string id value to be split * @return split string values */ public static String[] split(String ids) { return IdUtil.unescape(ids, IDS_SPLITOR_STR, ESCAPE_STR); }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_m0_rdh
/** * Concat property values with NAME_SPLITOR * * @param values * the property values to be contacted * @return contacted string value */ public static String m0(Object... values) { return concatValues(Arrays.asList(values)); }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_parse_rdh
/** * Parse a single id into multiple parts with ID_SPLITOR * * @param id * the id object to be parsed * @return parsed string id parts */ public static String[] parse(Id id) { return IdUtil.unescape(id.asString(), ID_SPLITOR_STR, ESCAPE_STR); }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_concat_rdh
/** * Generate a string id of HugeVertex from Vertex name */ // public Id generate(HugeVertex vertex) { // /* // * Hash for row-key which will be evenly distributed. // * We can also use LongEncoding.encode() to encode the int/long hash // * if needed. // * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, id); // */ // // TODO: use binary Id with binary fields instead of string id // return splicing(vertex.schemaLabel().id().asString(), vertex.name()); // } /** * Concat multiple ids into one composite id with IDS_SPLITOR * * @param ids * the string id values to be contacted * @return contacted string value */ public static String concat(String... ids) { // NOTE: must support string id when using this method return IdUtil.escape(IDS_SPLITOR, ESCAPE, ids); }
3.26
incubator-hugegraph-toolchain_SplicingIdGenerator_concatValues_rdh
/** * Concat property values with NAME_SPLITOR * * @param values * the property values to be concatted * @return concatted string value */ public static String concatValues(List<?> values) { // Convert the object list to string array int valuesSize = values.size(); String[] parts = new String[valuesSize]; for (int i = 0; i < valuesSize; i++) { parts[i] = values.get(i).toString(); } return IdUtil.escape(NAME_SPLITOR, ESCAPE, parts);}
3.26
incubator-hugegraph-toolchain_LicenseService_m0_rdh
/** * Keep 2 method for future use now */ private static long m0(HugeClient client, String graph) { Map<String, Object> metrics = client.metrics().backend(graph); Object dataSize = metrics.get(METRICS_DATA_SIZE); if (dataSize == null) { return 0L;} Ex.check(dataSize instanceof String, "The backend metrics data_size must be String type, " + "but got '%s'(%s)", dataSize, dataSize.getClass()); // Unit is MB return displaySizeToMB(((String) (dataSize))); }
3.26
incubator-hugegraph-toolchain_PropertyKeyController_delete_rdh
/** * Should request "check_using" before delete */ @DeleteMapping public void delete(@PathVariable("connId") int connId, @RequestParam List<String> names, @RequestParam(name = "skip_using", defaultValue = "false") boolean skipUsing) { for (String name : names) { this.service.checkExist(name, connId);if (this.service.checkUsing(name, connId)) { if (skipUsing) { continue; } else { throw new ExternalException("schema.propertykey.in-using", name); } }this.service.remove(name, connId); } }
3.26
incubator-hugegraph-toolchain_DataTypeUtils_checkDataType_rdh
/** * Check type of the value valid */ private static boolean checkDataType(String key, Object value, DataType dataType) { if ((value instanceof Number) && dataType.isNumber()) { return parseNumber(key, value, dataType) != null; } return dataType.clazz().isInstance(value); }
3.26
incubator-hugegraph-toolchain_DataTypeUtil_checkDataType_rdh
/** * Check type of the value valid */ private static boolean checkDataType(String key, Object value, DataType dataType) { if ((value instanceof Number) && dataType.isNumber()) { return parseNumber(key, value, dataType) != null; } return dataType.clazz().isInstance(value); }
3.26
incubator-hugegraph-toolchain_DataTypeUtil_checkCollectionDataType_rdh
/** * Check type of all the values(maybe some list properties) valid */ private static boolean checkCollectionDataType(String key, Collection<?> values, DataType dataType) { for (Object value : values) { if (!checkDataType(key, value, dataType)) { return false; } } return true; }
3.26
incubator-hugegraph-toolchain_DataTypeUtil_parseMultiValues_rdh
/** * collection format: "obj1,obj2,...,objn" or "[obj1,obj2,...,objn]" ..etc * TODO: After parsing to json, the order of the collection changed * in some cases (such as list<date>) */ private static Object parseMultiValues(String key, Object values, DataType dataType, Cardinality cardinality, InputSource source) { // JSON file should not parse again if ((values instanceof Collection) && checkCollectionDataType(key, ((Collection<?>) (values)), dataType)) { return values; } E.checkState(values instanceof String, "The value(key='%s') must be String type, " + "but got '%s'(%s)", key, values); String rawValue = ((String) (values)); List<Object> valueColl = split(key, rawValue, source); Collection<Object> results = (cardinality == Cardinality.LIST) ? InsertionOrderUtil.newList() : InsertionOrderUtil.newSet(); valueColl.forEach(value -> { results.add(parseSingleValue(key, value, dataType, source)); }); E.checkArgument(checkCollectionDataType(key, results, dataType), "Not all collection elems %s match with data type %s", results, dataType); return results; }
3.26
incubator-hugegraph-toolchain_PropertyIndexService_list_rdh
/** * The sort result like that, content is 'name' * --------------+------------------------+--------------------------------- * base_value | index label name | fields * --------------+------------------------+--------------------------------- * xxxname | xxxByName | name * --------------+------------------------+--------------------------------- * | personByName | name * person +------------------------+--------------------------------- * | personByAgeAndName | age name * --------------+------------------------+--------------------------------- * | softwareByName | name * software +------------------------+--------------------------------- * | softwareByPriveAndName | price name * --------------+------------------------+--------------------------------- */ public IPage<PropertyIndex> list(int connId, HugeType type, String content, int pageNo, int pageSize) { HugeClient client = this.client(connId); List<IndexLabel> indexLabels = client.schema().getIndexLabels(); Map<String, List<PropertyIndex>> matchedResults = new HashMap<>(); Map<String, List<PropertyIndex>> unMatchResults = new HashMap<>(); for (IndexLabel indexLabel : indexLabels) {if (!indexLabel.baseType().equals(type)) { continue; } String v14 = indexLabel.baseValue(); List<PropertyIndex> groupedIndexes; // Collect indexlabels that contains content boolean match = v14.contains(content); if (match) { groupedIndexes = matchedResults.computeIfAbsent(v14, k -> new ArrayList<>()); } else { groupedIndexes = unMatchResults.computeIfAbsent(v14, k -> new ArrayList<>()); } match = (match || indexLabel.name().contains(content)) || indexLabel.indexFields().stream().anyMatch(f -> f.contains(content)); if (match) { groupedIndexes.add(convert(indexLabel)); } } // Sort matched results by relevance if (!StringUtils.isEmpty(content)) { for (Map.Entry<String, List<PropertyIndex>> entry : matchedResults.entrySet()) { List<PropertyIndex> groupedIndexes = entry.getValue(); groupedIndexes.sort(new Comparator<PropertyIndex>() { final int highScore = 2; final int lowScore = 1; @Override public int compare(PropertyIndex o1, PropertyIndex o2) { int o1Score = 0; if (o1.getName().contains(content)) { o1Score += highScore;} if (o1.getFields().stream().anyMatch(field -> field.contains(content))) { o1Score += lowScore; }int o2Score = 0; if (o2.getName().contains(content)) { o2Score += highScore; } if (o2.getFields().stream().anyMatch(field -> field.contains(content))) {o2Score += lowScore; } return o2Score - o1Score; } }); } } List<PropertyIndex> all = new ArrayList<>(); matchedResults.values().forEach(all::addAll); unMatchResults.values().forEach(all::addAll); return PageUtil.page(all, pageNo, pageSize); }
3.26
incubator-hugegraph-toolchain_LoadTaskService_updateLoadTaskProgress_rdh
/** * Update progress periodically */ @Async @Scheduled(fixedDelay = 1 * 1000) @Transactional(isolation = Isolation.READ_COMMITTED) public void updateLoadTaskProgress() { for (LoadTask task : this.runningTaskContainer.values()) { if (!task.getStatus().inRunning()) { continue; } task.lock(); try { if (task.getStatus().inRunning()) { LoadContext context = task.context(); long readLines = context.newProgress().totalInputRead(); if (readLines == 0L) { /* When the Context is just constructed, newProgress is empty. Only after parsing is started will use oldProgress and incrementally update newProgress, if get totalInputRead value during this process, it will return 0, so need read it from oldProgress */ readLines = context.oldProgress().totalInputRead(); } task.setFileReadLines(readLines); task.setCurrDuration(context.summary().totalTime()); this.update(task); } } finally { task.unlock(); } } }
3.26
incubator-hugegraph-toolchain_PropertyKeyService_checkUsing_rdh
/** * Check the property key is being used, used means that there is * any vertex label or edge label contains the property(name) */ public boolean checkUsing(String name, int connId) { HugeClient client = this.client(connId); List<VertexLabel> vertexLabels = client.schema().getVertexLabels(); for (VertexLabel vertexLabel : vertexLabels) { if (vertexLabel.properties().contains(name)) { return true; } } List<EdgeLabel> edgeLabels = client.schema().getEdgeLabels(); for (EdgeLabel edgeLabel : edgeLabels) { if (edgeLabel.properties().contains(name)) { return true; } } return false; }
3.26
incubator-hugegraph-toolchain_EdgeLabelController_checkDisplayFields_rdh
/** * TODO:merge with VertexLabelController.checkDisplayFields */ private static void checkDisplayFields(EdgeLabelEntity entity) {EdgeLabelStyle style = entity.getStyle();List<String> displayFields = style.getDisplayFields(); if (!CollectionUtils.isEmpty(displayFields)) { Set<String> nullableProps = entity.getNullableProps(); Ex.check(!CollectionUtil.hasIntersection(displayFields, nullableProps), "schema.display-fields.cannot-be-nullable"); } }
3.26
incubator-hugegraph-toolchain_EdgeLabelController_delete_rdh
/** * Delete edge label doesn't need check checkUsing */ @DeleteMapping public void delete(@PathVariable("connId") int connId, @RequestParam("names") List<String> names) { for (String name : names) { this.elService.checkExist(name, connId); this.elService.remove(name, connId); } }
3.26
incubator-hugegraph-toolchain_HugeGraphLoader_stopThenShutdown_rdh
/** * TODO: How to distinguish load task finished normally or abnormally */ private synchronized void stopThenShutdown() { if (this.context.closed()) { return; } f0.info("Stop loading then shutdown HugeGraphLoader"); try { this.context.stopLoading(); if (this.manager != null) { // Wait all insert tasks stopped before exit this.manager.waitFinished(); this.manager.shutdown(); }} finally { try { this.context.unsetLoadingMode(); } finally { this.context.close(); } } }
3.26
incubator-hugegraph-toolchain_HugeGraphLoader_loadStruct_rdh
/** * TODO: Separate classes: ReadHandler -> ParseHandler -> InsertHandler * Let load task worked in pipeline mode */ private void loadStruct(InputStruct struct, InputReader reader) { f0.info("Start loading '{}'", struct); LoadMetrics metrics = this.context.summary().metrics(struct); metrics.startInFlight(); ParseTaskBuilder taskBuilder = new ParseTaskBuilder(this.context, struct); final int batchSize = this.context.options().batchSize; List<Line> lines = new ArrayList<>(batchSize); for (boolean finished = false; !finished;) { if (this.context.stopped()) { break; } try {// Read next line from data source if (reader.hasNext()) { Line next = reader.next(); if (Objects.nonNull(next)) {lines.add(next); metrics.increaseReadSuccess(); } } else { finished = true; } } catch (ReadException e) { metrics.increaseReadFailure(); this.handleReadFailure(struct, e); } // If read max allowed lines, stop loading boolean reachedMaxReadLines = this.reachedMaxReadLines(); if (reachedMaxReadLines) { finished = true; } if ((lines.size() >= batchSize) || finished) { List<ParseTaskBuilder.ParseTask> tasks = taskBuilder.build(lines); for (ParseTaskBuilder.ParseTask task : tasks) { this.executeParseTask(struct, task.mapping(), task); } // Confirm offset to avoid lost records reader.confirmOffset(); this.context.newProgress().markLoaded(struct, finished); this.handleParseFailure(); if (reachedMaxReadLines) { f0.warn("Read lines exceed limit, stopped loading tasks"); this.context.stopLoading(); } lines = new ArrayList<>(batchSize); } } metrics.stopInFlight(); f0.info("Finish loading '{}'", struct); }
3.26
incubator-hugegraph-toolchain_HugeGraphLoader_executeParseTask_rdh
/** * Execute parse task sync */ private void executeParseTask(InputStruct struct, ElementMapping mapping, ParseTaskBuilder.ParseTask task) { long start = System.currentTimeMillis(); // Sync parse List<List<Record>> batches = task.get(); long end = System.currentTimeMillis(); this.context.summary().addTimeRange(mapping.type(), start, end); if (this.context.options().dryRun || CollectionUtils.isEmpty(batches)) { return; } // Async load for (List<Record> batch : batches) { this.manager.submitBatch(struct, mapping, batch); } }
3.26
hibernate-validator_ConstraintDefinitionContribution_getConstraintType_rdh
/** * Returns the constraint annotation type for which this instance provides constraint validator instances. */ public Class<A> getConstraintType() { return constraintType; }
3.26
hibernate-validator_ConstraintDefinitionContribution_getValidatorDescriptors_rdh
/** * Returns a list of constraint validator descriptors for the constraint type of this instance. */ public List<ConstraintValidatorDescriptor<A>> getValidatorDescriptors() { return validatorDescriptors; } /** * Whether or not the existing constraint validators should be kept or not. * * @return {@code true} if the existing constraint validators for the constraint type wrapped by this instance should be kept, {@code false}
3.26
hibernate-validator_GroupSequenceCheck_getGroupSequence_rdh
/** * Find a {@code jakarta.validation.GroupSequence} annotation if one is present on given type ({@link TypeMirror}). */ private AnnotationMirror getGroupSequence(TypeMirror typeMirror) { // the annotation can be present only on TypeKind.DECLARED elements if (TypeKind.DECLARED.equals(typeMirror.getKind())) { for (AnnotationMirror annotationMirror : typeUtils.asElement(typeMirror).getAnnotationMirrors()) { if (AnnotationType.GROUP_SEQUENCE_ANNOTATION.equals(constraintHelper.getAnnotationType(annotationMirror))) { return annotationMirror; } } } return null; }
3.26
hibernate-validator_GroupSequenceCheck_redefinesDefaultGroupSequence_rdh
/** * Check if the given {@link TypeMirror} redefines the default group sequence for the annotated class. * <p> * Note that it is only the case if the annotated element is a class. */ private boolean redefinesDefaultGroupSequence(TypeElement annotatedElement, TypeMirror typeMirror) { return ElementKind.CLASS.equals(annotatedElement.getKind()) && typeUtils.isSameType(annotatedElement.asType(), typeMirror); }
3.26
hibernate-validator_HibernateConstraintViolationBuilder_enableExpressionLanguage_rdh
/** * Enable Expression Language with the default Expression Language feature level for the constraint violation * created by this builder if the chosen {@code MessageInterpolator} supports it. * <p> * If you enable this, you need to make sure your message template does not contain any unescaped user input (such as * the validated value): use {@code addExpressionVariable()} to inject properly escaped variables into the template. * * @since 6.2 */ @Incubating default HibernateConstraintViolationBuilder enableExpressionLanguage() { return enableExpressionLanguage(ExpressionLanguageFeatureLevel.DEFAULT); } /** * Enable Expression Language for the constraint violation created by this builder if the chosen * {@code MessageInterpolator} supports it. * <p> * If you enable this, you need to make sure your message template does not contain any unescaped user input (such as * the validated value): use {@code addExpressionVariable()}
3.26
hibernate-validator_ExecutableMetaData_addToExecutablesByDeclaringType_rdh
/** * Merges the given executable with the metadata contributed by other * providers for the same executable in the hierarchy. * * @param executable * The executable to merge. */ private void addToExecutablesByDeclaringType(ConstrainedExecutable executable) { Class<?> beanClass = executable.getCallable().getDeclaringClass(); ConstrainedExecutable mergedExecutable = executablesByDeclaringType.get(beanClass); if (mergedExecutable != null) {mergedExecutable = mergedExecutable.merge(executable); } else { mergedExecutable = executable; } executablesByDeclaringType.put(beanClass, mergedExecutable); }
3.26
hibernate-validator_ExecutableMetaData_assertCorrectnessOfConfiguration_rdh
/** * <p> * Checks the configuration of this method for correctness as per the * rules outlined in the Bean Validation specification, section 4.5.5 * ("Method constraints in inheritance hierarchies"). * </p> * <p> * In particular, overriding methods in sub-types may not add parameter * constraints and the return value of an overriding method may not be * marked as cascaded if the return value is marked as cascaded already * on the overridden method. * </p> * * @throws jakarta.validation.ConstraintDeclarationException * In case any of the rules mandated by the * specification are violated. */ private void assertCorrectnessOfConfiguration() { for (Entry<Class<?>, ConstrainedExecutable> entry : executablesByDeclaringType.entrySet()) { for (Entry<Class<?>, ConstrainedExecutable> otherEntry : executablesByDeclaringType.entrySet()) { for (MethodConfigurationRule rule : rules) { rule.apply(entry.getValue(), otherEntry.getValue()); } } } }
3.26
hibernate-validator_ExecutableMetaData_findParameterMetaData_rdh
/** * Finds the one executable from the underlying hierarchy with parameter * constraints. If no executable in the hierarchy is parameter constrained, * the parameter meta data from this builder's base executable is returned. * * @return The parameter meta data for this builder's executable. */ private List<ParameterMetaData> findParameterMetaData() { List<ParameterMetaData.Builder> parameterBuilders = null; for (ConstrainedExecutable oneExecutable : constrainedExecutables) { if (parameterBuilders == null) { parameterBuilders = newArrayList(); for (ConstrainedParameter oneParameter : oneExecutable.getAllParameterMetaData()) { parameterBuilders.add(new ParameterMetaData.Builder(callable.getDeclaringClass(), oneParameter, constraintCreationContext, parameterNameProvider)); } } else { int i = 0; for (ConstrainedParameter oneParameter : oneExecutable.getAllParameterMetaData()) { parameterBuilders.get(i).add(oneParameter); i++; } } } List<ParameterMetaData> parameterMetaDatas = newArrayList(); for (ParameterMetaData.Builder oneBuilder : parameterBuilders) { parameterMetaDatas.add(oneBuilder.build()); } return parameterMetaDatas; }
3.26
hibernate-validator_ExecutableMetaData_getParameterMetaData_rdh
/** * Returns meta data for the specified parameter of the represented executable. * * @param parameterIndex * the index of the parameter * @return Meta data for the specified parameter. Will never be {@code null}. */ public ParameterMetaData getParameterMetaData(int parameterIndex) { return parameterMetaDataList.get(parameterIndex); }
3.26
hibernate-validator_AnnotationMessageCheck_checkMessage_rdh
/** * Verifies that message passed as parameter is valid (passes a regexp check). * * @param message * a message to verify * @return {@code true} if message is valid, {@code false} otherwise */ protected boolean checkMessage(String message) { return MESSAGE_PATTERN.matcher(message).matches(); }
3.26
hibernate-validator_ModCheckValidator_isCheckDigitValid_rdh
/** * Check if the input passes the Mod10 (Luhn algorithm implementation only) or Mod11 test * * @param digits * the digits over which to calculate the Mod10 or Mod11 checksum * @param checkDigit * the check digit * @return {@code true} if the mod 10/11 result matches the check digit, {@code false} otherwise */@Override public boolean isCheckDigitValid(List<Integer> digits, char checkDigit) { int modResult = -1; int checkValue = extractDigit(checkDigit); if (f0.equals(ModType.MOD11)) { modResult = ModUtil.calculateMod11Check(digits, multiplier); if ((modResult == 10) || (modResult == 11)) { modResult = 0; } } else {modResult = ModUtil.calculateLuhnMod10Check(digits); } return checkValue == modResult; }
3.26
hibernate-validator_ModUtil_calculateLuhnMod10Check_rdh
/** * Calculate Luhn Modulo 10 checksum (Luhn algorithm implementation) * * @param digits * The digits over which to calculate the checksum * @return the result of the mod10 checksum calculation */ public static int calculateLuhnMod10Check(final List<Integer> digits) { int sum = 0; boolean even = true; for (int index = digits.size() - 1; index >= 0; index--) { int digit = digits.get(index); if (even) {digit <<= 1; } if (digit > 9) { digit -= 9; } sum += digit; even = !even; } return (10 - (sum % 10)) % 10; }
3.26
hibernate-validator_ModUtil_calculateMod10Check_rdh
/** * Calculate Generic Modulo 10 checksum * * @param digits * The digits over which to calculate the checksum * @param multiplier * Multiplier used for the odd digits in the algorithm * @param weight * Multiplier used for the even digits in the algorithm * @return the result of the mod10 checksum calculation */ public static int calculateMod10Check(final List<Integer> digits, int multiplier, int weight) { int sum = 0; boolean even = true; for (int index = digits.size() - 1; index >= 0; index--) { int digit = digits.get(index); if (even) { digit *= multiplier; } else { digit *= weight; } sum += digit; even = !even; }return (10 - (sum % 10)) % 10; }
3.26
hibernate-validator_ModUtil_calculateMod11Check_rdh
/** * Calculate Modulo 11 checksum assuming that the threshold is Integer.MAX_VALUE * * @param digits * the digits for which to calculate the checksum * @return the result of the mod11 checksum calculation */ public static int calculateMod11Check(final List<Integer> digits) { return calculateMod11Check(digits, Integer.MAX_VALUE); }
3.26
hibernate-validator_ModUtil_calculateModXCheckWithWeights_rdh
/** * Calculate Modulo {@code moduloParam} checksum with given weights. If no weights are provided then weights similar to Modulo 11 checksum will be used. * In case when there will be not enough weights provided the ones provided will be used in a looped manner. * * @param digits * the digits for which to calculate the checksum * @param moduloParam * modulo parameter to be used * @param weights * weights for the sum. * @return the result of mod checksum calculation */ public static int calculateModXCheckWithWeights(final List<Integer> digits, int moduloParam, final int threshold, int... weights) { int sum = 0; int multiplier = 1; for (int index = digits.size() - 1; index >= 0; index--) { if (weights.length != 0) { multiplier = weights[(weights.length - (index % weights.length)) - 1]; } else { multiplier++; if (multiplier > threshold) {multiplier = 2; } } sum += digits.get(index) * multiplier; } return moduloParam - (sum % moduloParam); }
3.26
hibernate-validator_ConstraintValidatorFactoryImpl_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_INNValidator_checkChecksumPersonalINN_rdh
/** * Check the digits for personal INN using algorithm from * <a href="https://ru.wikipedia.org/wiki/%D0%98%D0%B4%D0%B5%D0%BD%D1%82%D0%B8%D1%84%D0%B8%D0%BA%D0%B0%D1%86%D0%B8%D0%BE%D0%BD%D0%BD%D1%8B%D0%B9_%D0%BD%D0%BE%D0%BC%D0%B5%D1%80_%D0%BD%D0%B0%D0%BB%D0%BE%D0%B3%D0%BE%D0%BF%D0%BB%D0%B0%D1%82%D0%B5%D0%BB%D1%8C%D1%89%D0%B8%D0%BA%D0%B0#%D0%92%D1%8B%D1%87%D0%B8%D1%81%D0%BB%D0%B5%D0%BD%D0%B8%D0%B5_%D0%BA%D0%BE%D0%BD%D1%82%D1%80%D0%BE%D0%BB%D1%8C%D0%BD%D1%8B%D1%85_%D1%86%D0%B8%D1%84%D1%80">Wikipedia</a>. */ private static boolean checkChecksumPersonalINN(int[] digits) { final int checkSum11 = getCheckSum(digits, INDIVIDUAL_WEIGHTS_11); final int checkSum12 = getCheckSum(digits, INDIVIDUAL_WEIGHTS_12); final boolean isCheckSum11Correct = checkSum11 == digits[digits.length - 2]; final boolean isCheckSum12Correct = checkSum12 == digits[digits.length - 1]; return isCheckSum11Correct && isCheckSum12Correct; }
3.26
hibernate-validator_INNValidator_checkChecksumJuridicalINN_rdh
/** * Check the digits for juridical INN using algorithm from * <a href="https://ru.wikipedia.org/wiki/%D0%98%D0%B4%D0%B5%D0%BD%D1%82%D0%B8%D1%84%D0%B8%D0%BA%D0%B0%D1%86%D0%B8%D0%BE%D0%BD%D0%BD%D1%8B%D0%B9_%D0%BD%D0%BE%D0%BC%D0%B5%D1%80_%D0%BD%D0%B0%D0%BB%D0%BE%D0%B3%D0%BE%D0%BF%D0%BB%D0%B0%D1%82%D0%B5%D0%BB%D1%8C%D1%89%D0%B8%D0%BA%D0%B0#%D0%92%D1%8B%D1%87%D0%B8%D1%81%D0%BB%D0%B5%D0%BD%D0%B8%D0%B5_%D0%BA%D0%BE%D0%BD%D1%82%D1%80%D0%BE%D0%BB%D1%8C%D0%BD%D1%8B%D1%85_%D1%86%D0%B8%D1%84%D1%80">Wikipedia</a>. */ private static boolean checkChecksumJuridicalINN(int[] digits) { final int checkSum = getCheckSum(digits, JURIDICAL_WEIGHTS); return digits[digits.length - 1] == checkSum; }
3.26
hibernate-validator_AnnotationProxy_equals_rdh
/** * Performs an equality check as described in {@link Annotation#equals(Object)}. * * @param obj * The object to compare * @return Whether the given object is equal to this annotation proxy or not * @see Annotation#equals(Object) */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!descriptor.getType().isInstance(obj)) { return false; }Annotation other = descriptor.getType().cast(obj); Map<String, Object> otherAttributes = getAnnotationAttributes(other); if (descriptor.getAttributes().size() != otherAttributes.size()) { return false; } // compare annotation member values for (Entry<String, Object> member : descriptor.getAttributes().entrySet()) { Object value = member.getValue(); Object otherValue = otherAttributes.get(member.getKey()); if (!areEqual(value, otherValue)) { return false; } } return true; } /** * Calculates the hash code of this annotation proxy as described in * {@link Annotation#hashCode()}
3.26
hibernate-validator_AnnotationProxy_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_PathImpl_hashCode_rdh
// deferred hash code building @Override public int hashCode() { if (f1 == (-1)) { f1 = buildHashCode(); }return f1; }
3.26
hibernate-validator_PathImpl_isValidJavaIdentifier_rdh
/** * Validate that the given identifier is a valid Java identifier according to the Java Language Specification, * <a href="http://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.8">chapter 3.8</a> * * @param identifier * string identifier to validate * @return true if the given identifier is a valid Java Identifier * @throws IllegalArgumentException * if the given identifier is {@code null} */ private static boolean isValidJavaIdentifier(String identifier) { Contracts.assertNotNull(identifier, "identifier param cannot be null"); if ((identifier.length() == 0) || (!Character.isJavaIdentifierStart(((int) (identifier.charAt(0)))))) { return false; } for (int i = 1; i < identifier.length(); i++) { if (!Character.isJavaIdentifierPart(((int) (identifier.charAt(i))))) { return false; } } return true; }
3.26
hibernate-validator_CollectionHelper_iterableFromArray_rdh
/** * Builds an {@link Iterable} for a given array. It is (un)necessarily ugly because we have to deal with array of primitives. * * @param object * a given array * @return an {@code Iterable} providing iterators over the array */ // Reflection is used to ensure the correct types are used @SuppressWarnings({ "unchecked", "rawtypes" }) public static Iterable<?> iterableFromArray(Object object) {return new ArrayIterable(accessorFromArray(object), object); }
3.26
hibernate-validator_CollectionHelper_getInitialCapacityFromExpectedSize_rdh
/** * As the default loadFactor is of 0.75, we need to calculate the initial capacity from the expected size to avoid * resizing the collection when we populate the collection with all the initial elements. We use a calculation * similar to what is done in {@link HashMap#putAll(Map)}. * * @param expectedSize * the expected size of the collection * @return the initial capacity of the collection */ private static int getInitialCapacityFromExpectedSize(int expectedSize) { if (expectedSize < 3) { return expectedSize + 1; }return ((int) ((((float) (expectedSize)) / 0.75F) + 1.0F)); }
3.26
hibernate-validator_CollectionHelper_iteratorFromArray_rdh
/** * Builds an {@link Iterator} for a given array. It is (un)necessarily ugly because we have to deal with array of primitives. * * @param object * a given array * @return an {@code Iterator} iterating over the array */ // Reflection is used to ensure the correct types are used @SuppressWarnings({ "unchecked", "rawtypes" }) public static Iterator<?> iteratorFromArray(Object object) { return new ArrayIterator(accessorFromArray(object), object);}
3.26
hibernate-validator_REGONValidator_getWeights_rdh
/** * * @param digits * a list of digits to be verified. They are used to determine a size of REGON number - is it 9 or 14 digit number * @return an array of weights to be used to calculate a checksum */ @Override protected int[] getWeights(List<Integer> digits) { if (digits.size() == 8) { return WEIGHTS_REGON_9; } else if (digits.size() == 13) { return WEIGHTS_REGON_14; } else { return new int[]{ }; } }
3.26
hibernate-validator_ValidationBootstrapParameters_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_ExecutableHelper_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_ExecutableHelper_instanceMethodParametersResolveToSameTypes_rdh
/** * Whether the parameters of the two given instance methods resolve to the same types or not. Takes type parameters into account. * * @param subTypeMethod * a method on a supertype * @param superTypeMethod * a method on a subtype * @return {@code true} if the parameters of the two methods resolve to the same types, {@code false otherwise}. */ private boolean instanceMethodParametersResolveToSameTypes(Method subTypeMethod, Method superTypeMethod) { return instanceMethodParametersResolveToSameTypes(subTypeMethod.getDeclaringClass(), subTypeMethod, superTypeMethod); }
3.26
hibernate-validator_ExecutableHelper_getExecutableAsString_rdh
/** * Returns a string representation of an executable with the given name and parameter types in the form * {@code <name>(<parameterType 0> ... <parameterType n>)}, e.g. for logging purposes. * * @param name * the name of the executable * @param parameterTypes * the types of the executable's parameters * @return A string representation of the given executable. */ public static String getExecutableAsString(String name, Class<?>... parameterTypes) {StringBuilder signature = new StringBuilder((name.length() + 2) + (parameterTypes.length * 25)); signature.append(name).append('('); boolean separator = false; for (Class<?> parameterType : parameterTypes) { if (separator) { signature.append(", "); } else { separator = true; } signature.append(parameterType.getSimpleName()); } signature.append(')'); return signature.toString(); }
3.26
hibernate-validator_ConfigurationSource_getPriority_rdh
/** * Returns this sources priority. Can be used to determine which * configuration shall apply in case of conflicting configurations by * several providers. * * @return This source's priority. */public int getPriority() { return priority; } /** * Returns that configuration source from the given two sources, which has * the higher priority. * * @param a * A configuration source. * @param b * Another configuration source. * @return The source with the higher priority. Will be source {@code a}
3.26
hibernate-validator_PredefinedScopeBeanMetaDataManager_createBeanMetaData_rdh
/** * Creates a {@link org.hibernate.validator.internal.metadata.aggregated.BeanMetaData} containing the meta data from all meta * data providers for the given type and its hierarchy. * * @param <T> * The type of interest. * @param clazz * The type's class. * @return A bean meta data object for the given type. */ private static <T> BeanMetaDataImpl<T> createBeanMetaData(ConstraintCreationContext constraintCreationContext, ExecutableHelper executableHelper, ExecutableParameterNameProvider parameterNameProvider, JavaBeanHelper javaBeanHelper, ValidationOrderGenerator validationOrderGenerator, List<MetaDataProvider> optionalMetaDataProviders, MethodValidationConfiguration methodValidationConfiguration, List<MetaDataProvider> metaDataProviders, Class<T> clazz) { BeanMetaDataBuilder<T> builder = BeanMetaDataBuilder.getInstance(constraintCreationContext, executableHelper, parameterNameProvider, validationOrderGenerator, clazz, methodValidationConfiguration); for (MetaDataProvider provider : metaDataProviders) { for (BeanConfiguration<? super T> beanConfiguration : m0(provider, clazz)) { builder.add(beanConfiguration); } }return builder.build(); }
3.26
hibernate-validator_PredefinedScopeBeanMetaDataManager_getAnnotationProcessingOptionsFromNonDefaultProviders_rdh
/** * * @return returns the annotation ignores from the non annotation based meta data providers */ private static AnnotationProcessingOptions getAnnotationProcessingOptionsFromNonDefaultProviders(List<MetaDataProvider> optionalMetaDataProviders) { AnnotationProcessingOptions options = new AnnotationProcessingOptionsImpl(); for (MetaDataProvider metaDataProvider : optionalMetaDataProviders) { options.merge(metaDataProvider.getAnnotationProcessingOptions()); } return options; } /** * Returns a list with the configurations for all types contained in the given type's hierarchy (including * implemented interfaces) starting at the specified type. * * @param beanClass * The type of interest. * @param <T> * The type of the class to get the configurations for. * @return A set with the configurations for the complete hierarchy of the given type. May be empty, but never {@code null}
3.26
hibernate-validator_JavaBeanField_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private static <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_JavaBeanField_getAccessible_rdh
/** * Returns an accessible copy of the given member. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private static Field getAccessible(Field original) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(HibernateValidatorPermission.ACCESS_PRIVATE_MEMBERS); } Class<?> clazz = original.getDeclaringClass(); return run(GetDeclaredField.andMakeAccessible(clazz, original.getName())); }
3.26
hibernate-validator_Contracts_assertNotNull_rdh
/** * Asserts that the given object is not {@code null}. * * @param o * The object to check. * @param message * A message text which will be used as message of the resulting * exception if the given object is {@code null}. * @throws IllegalArgumentException * In case the given object is {@code null}. */ public static void assertNotNull(Object o, String message) { if (o == null) { throw LOG.getIllegalArgumentException(message); }}
3.26
hibernate-validator_Contracts_assertValueNotNull_rdh
/** * Asserts that the given object is not {@code null}. * * @param o * The object to check. * @param name * The name of the value to check. A message of the form * "&lt;name&gt; must not be null" will be used as message of * the resulting exception if the given object is {@code null}. * @throws IllegalArgumentException * In case the given object is {@code null}. */ public static void assertValueNotNull(Object o, String name) { if (o == null) { throw LOG.getIllegalArgumentException(MESSAGES.mustNotBeNull(name)); } }
3.26
hibernate-validator_NotEmptyValidatorForArraysOfInt_isValid_rdh
/** * Checks the array is not {@code null} and not empty. * * @param array * the array to validate * @param constraintValidatorContext * context in which the constraint is evaluated * @return returns {@code true} if the array is not {@code null} and the array is not empty */ @Override public boolean isValid(int[] array, ConstraintValidatorContext constraintValidatorContext) { if (array == null) { return false; } return array.length > 0; }
3.26
hibernate-validator_MetaConstraints_getWrappedValueType_rdh
/** * Returns the sub-types binding for the single type parameter of the super-type. E.g. for {@code IntegerProperty} * and {@code Property<T>}, {@code Integer} would be returned. */ private static Class<?> getWrappedValueType(TypeResolutionHelper typeResolutionHelper, Type declaredType, ValueExtractorDescriptor valueExtractorDescriptor) { ResolvedType resolvedType = typeResolutionHelper.getTypeResolver().resolve(declaredType); List<ResolvedType> resolvedTypeParameters = resolvedType.typeParametersFor(valueExtractorDescriptor.getContainerType()); if ((resolvedTypeParameters == null) || resolvedTypeParameters.isEmpty()) { throw LOG.getNoValueExtractorFoundForUnwrapException(declaredType); } return resolvedTypeParameters.get(TypeVariables.getTypeParameterIndex(valueExtractorDescriptor.getExtractedTypeParameter())).getErasedType(); }
3.26
hibernate-validator_TypeResolutionHelper_getTypeResolver_rdh
/** * * @return the typeResolver */public TypeResolver getTypeResolver() {return typeResolver; }
3.26
hibernate-validator_AnnotationFactory_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private static <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26
hibernate-validator_ValidatorImpl_validateCascadedConstraints_rdh
/** * Validates all cascaded constraints for the given bean using the current group set in the execution context. * This method must always be called after validateConstraints for the same context. * * @param validationContext * The execution context * @param valueContext * Collected information for single validation */ private void validateCascadedConstraints(BaseBeanValidationContext<?> validationContext, ValueContext<?, Object> valueContext) { Validatable validatable = valueContext.getCurrentValidatable(); ValueContext.ValueState<Object> v54 = valueContext.getCurrentValueState(); for (Cascadable cascadable : validatable.getCascadables()) { valueContext.appendNode(cascadable);if (isCascadeRequired(validationContext, valueContext.getCurrentBean(), valueContext.getPropertyPath(), cascadable.getConstraintLocationKind())) { Object value = getCascadableValue(validationContext, valueContext.getCurrentBean(), cascadable); CascadingMetaData cascadingMetaData = cascadable.getCascadingMetaData(); if (value != null) { CascadingMetaData effectiveCascadingMetaData = cascadingMetaData.addRuntimeContainerSupport(valueExtractorManager, value.getClass()); // validate cascading on the annotated object if (effectiveCascadingMetaData.isCascading()) { validateCascadedAnnotatedObjectForCurrentGroup(value, validationContext, valueContext, effectiveCascadingMetaData); } if (effectiveCascadingMetaData.isContainer()) { ContainerCascadingMetaData containerCascadingMetaData = effectiveCascadingMetaData.as(ContainerCascadingMetaData.class); if (containerCascadingMetaData.hasContainerElementsMarkedForCascading()) { // validate cascading on the container elements validateCascadedContainerElementsForCurrentGroup(value, validationContext, valueContext, containerCascadingMetaData.getContainerElementTypesCascadingMetaData()); } } } } // reset the value context valueContext.resetValueState(v54); } }
3.26
hibernate-validator_ValidatorImpl_validateInContext_rdh
/** * Validates the given object using the available context information. * * @param validationContext * the global validation context * @param valueContext * the current validation context * @param validationOrder * Contains the information which and in which order groups have to be executed * @param <T> * The root bean type * @return Set of constraint violations or the empty set if there were no violations. */ private <T, U> Set<ConstraintViolation<T>> validateInContext(BaseBeanValidationContext<T> validationContext, BeanValueContext<U, Object> valueContext, ValidationOrder validationOrder) { if (valueContext.getCurrentBean() == null) { return Collections.emptySet(); } BeanMetaData<U> beanMetaData = valueContext.getCurrentBeanMetaData(); if (beanMetaData.isDefaultGroupSequenceRedefined()) { validationOrder.assertDefaultGroupSequenceIsExpandable(beanMetaData.getDefaultGroupSequence(valueContext.getCurrentBean())); } // process first single groups. For these we can optimise object traversal by first running all validations on the current bean // before traversing the object. Iterator<Group> groupIterator = validationOrder.getGroupIterator(); while (groupIterator.hasNext()) { Group group = groupIterator.next(); valueContext.setCurrentGroup(group.getDefiningClass()); validateConstraintsForCurrentGroup(validationContext, valueContext); if (shouldFailFast(validationContext)) { return validationContext.getFailingConstraints(); } } groupIterator = validationOrder.getGroupIterator(); while (groupIterator.hasNext()) { Group group = groupIterator.next(); valueContext.setCurrentGroup(group.getDefiningClass()); validateCascadedConstraints(validationContext, valueContext); if (shouldFailFast(validationContext)) { return validationContext.getFailingConstraints(); } } // now we process sequences. For sequences I have to traverse the object graph since I have to stop processing when an error occurs. Iterator<Sequence> sequenceIterator = validationOrder.getSequenceIterator(); while (sequenceIterator.hasNext()) { Sequence sequence = sequenceIterator.next(); for (GroupWithInheritance groupOfGroups : sequence) { int numberOfViolations = validationContext.getFailingConstraints().size(); for (Group group : groupOfGroups) { valueContext.setCurrentGroup(group.getDefiningClass()); validateConstraintsForCurrentGroup(validationContext, valueContext); if (shouldFailFast(validationContext)) { return validationContext.getFailingConstraints(); } validateCascadedConstraints(validationContext, valueContext); if (shouldFailFast(validationContext)) {return validationContext.getFailingConstraints(); } }if (validationContext.getFailingConstraints().size() > numberOfViolations) { break; } } } return validationContext.getFailingConstraints(); }
3.26
hibernate-validator_ValidatorImpl_validateReturnValueForGroup_rdh
// TODO GM: if possible integrate with validateParameterForGroup() private <T> void validateReturnValueForGroup(BaseBeanValidationContext<T> validationContext, ExecutableMetaData executableMetaData, T bean, Object value, Group group) { Contracts.assertNotNull(executableMetaData, "executableMetaData may not be null"); // TODO GM: define behavior with respect to redefined default sequences. Should only the // sequence from the validated bean be honored or also default sequence definitions up in // the inheritance tree? // For now a redefined default sequence will only be considered if specified at the bean // hosting the validated itself, but no other default sequence from parent types if (group.isDefaultGroup()) { Iterator<Sequence> defaultGroupSequence = validationContext.getRootBeanMetaData().getDefaultValidationSequence(bean); while (defaultGroupSequence.hasNext()) { Sequence sequence = defaultGroupSequence.next(); int numberOfViolations = validationContext.getFailingConstraints().size(); for (GroupWithInheritance expandedGroup : sequence) { for (Group defaultGroupSequenceElement : expandedGroup) { validateReturnValueForSingleGroup(validationContext, executableMetaData, bean, value, defaultGroupSequenceElement.getDefiningClass()); if (shouldFailFast(validationContext)) { return; } } // stop processing after first group with errors occurred if (validationContext.getFailingConstraints().size() > numberOfViolations) { return; } } } } else { validateReturnValueForSingleGroup(validationContext, executableMetaData, bean, value, group.getDefiningClass()); } }
3.26
hibernate-validator_AbstractMessageInterpolator_interpolateMessage_rdh
/** * Runs the message interpolation according to algorithm specified in the Bean Validation specification. * <p> * Note: * <p> * Look-ups in user bundles is recursive whereas look-ups in default bundle are not! * * @param message * the message to interpolate * @param context * the context for this interpolation * @param locale * the {@code Locale} to use for the resource bundle. * @return the interpolated message. */ private String interpolateMessage(String message, Context context, Locale locale) throws MessageDescriptorFormatException { // if the message does not contain any message parameter, we can ignore the next steps and just return // the unescaped message. It avoids storing the message in the cache and a cache lookup. if (message.indexOf('{') < 0) { return replaceEscapedLiterals(message); } String resolvedMessage = null; // either retrieve message from cache, or if message is not yet there or caching is disabled, // perform message resolution algorithm (step 1) if (cachingEnabled) { resolvedMessage = resolvedMessages.computeIfAbsent(new LocalizedMessage(message, locale), lm -> m0(message, locale)); } else { resolvedMessage = m0(message, locale); } // there's no need for steps 2-3 unless there's `{param}`/`${expr}` in the message if (resolvedMessage.indexOf('{') > (-1)) { // resolve parameter expressions (step 2) resolvedMessage = interpolateExpression(new TokenIterator(getParameterTokens(resolvedMessage, tokenizedParameterMessages, InterpolationTermType.PARAMETER)), context, locale); // resolve EL expressions (step 3) // in the standard Hibernate Validator execution flow, the context is always an instance of // HibernateMessageInterpolatorContext // but it can be a spec Context in the Jakarta Bean Validation TCK. if ((!(context instanceof HibernateMessageInterpolatorContext)) || (((HibernateMessageInterpolatorContext) (context)).getExpressionLanguageFeatureLevel() != ExpressionLanguageFeatureLevel.NONE)) { resolvedMessage = interpolateExpression(new TokenIterator(getParameterTokens(resolvedMessage, tokenizedELMessages, InterpolationTermType.EL)), context, locale); }
3.26
hibernate-validator_ValidationXmlTestHelper_runWithCustomValidationXml_rdh
/** * Executes the given runnable, using the specified file as replacement for * {@code META-INF/validation.xml}. * * @param validationXmlName * The file to be used as validation.xml file. * @param runnable * The runnable to execute. */ public void runWithCustomValidationXml(final String validationXmlName, Runnable runnable) { ClassLoader previousContextCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(new ClassLoader(previousContextCl) { @Override public InputStream getResourceAsStream(String name) { if ("META-INF/validation.xml".equals(name)) { return clazz.getResourceAsStream(validationXmlName); } return super.getResourceAsStream(name); } }); runnable.run(); } finally { Thread.currentThread().setContextClassLoader(previousContextCl); } }
3.26
hibernate-validator_AnnotationParametersAbstractCheck_canCheckThisAnnotation_rdh
/** * Verify that this check class can process such annotation. * * @param annotation * annotation you want to process by this class * @return {@code true} if such annotation can be processed, {@code false} otherwise. */ protected boolean canCheckThisAnnotation(AnnotationMirror annotation) { return annotationClasses.contains(annotation.getAnnotationType().asElement().toString()); }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitTypeAsClass_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * class type declaration. The following checks are performed: * </p> * <ul> * <li> * Constraint annotations may at types supported by the constraints.</li> * <li> * </ul> */ @Override public Void visitTypeAsClass(TypeElement e, List<AnnotationMirror> p) { checkConstraints(e, p); return null; }
3.26
hibernate-validator_ConstraintAnnotationVisitor_checkConstraints_rdh
/** * Retrieves the checks required for the given element and annotations, * executes them and reports all occurred errors. * * @param annotatedElement * The element to check. * @param mirrors * The annotations to check. */ private void checkConstraints(Element annotatedElement, List<AnnotationMirror> mirrors) { for (AnnotationMirror oneAnnotationMirror : mirrors) { try { ConstraintChecks constraintChecks = constraintCheckFactory.getConstraintChecks(annotatedElement, oneAnnotationMirror); reportIssues(constraintChecks.execute(annotatedElement, oneAnnotationMirror)); }// HV-293: if single constraints can't be properly checked, report this and // proceed with next constraints catch (Exception e) { if (verbose) { messager.getDelegate().printMessage(Kind.NOTE, e.getMessage() != null ? e.getMessage() : e.toString(), annotatedElement, oneAnnotationMirror); } } } }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitTypeAsInterface_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * interface type declaration. The following checks are performed: * </p> * <ul> * <li> * Constraint annotations may at types supported by the constraints.</li> * <li> * </ul> */ @Override public Void visitTypeAsInterface(TypeElement e, List<AnnotationMirror> p) { checkConstraints(e, p); return null; }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitTypeAsEnum_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * enum type declaration. The following checks are performed: * </p> * <ul> * <li> * Constraint annotations may at types supported by the constraints.</li> * <li> * </ul> */ @Override public Void visitTypeAsEnum(TypeElement e, List<AnnotationMirror> p) { checkConstraints(e, p); return null; }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitTypeAsAnnotationType_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * annotation type declaration. The following checks are performed: * </p> * <ul> * <li> * The only annotation types allowed to be annotated with other constraint * annotations are composed constraint annotation type declarations.</li> * </ul> */ @Override public Void visitTypeAsAnnotationType(TypeElement annotationType, List<AnnotationMirror> mirrors) { checkConstraints(annotationType, mirrors); return null; }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitVariableAsParameter_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * method parameter. The following checks are performed: * </p> * <ul> * <li> * Constraint annotation parameter values are meaningful and valid. * </li> * </ul> */ @Override public Void visitVariableAsParameter(VariableElement annotatedField, List<AnnotationMirror> mirrors) {checkConstraints(annotatedField, mirrors); return null; }
3.26
hibernate-validator_ConstraintAnnotationVisitor_visitExecutableAsMethod_rdh
/** * <p> * Checks whether the given annotations are correctly specified at the given * method. The following checks are performed: * </p> * <ul> * <li> * Constraint annotations may only be given at non-static, JavaBeans getter * methods which's return type is supported by the constraints.</li> * <li> * The {@code @Valid} annotation may only be given at non-static, * non-primitive JavaBeans getter methods.</li> * </ul> */ @Override public Void visitExecutableAsMethod(ExecutableElement method, List<AnnotationMirror> mirrors) { checkConstraints(method, mirrors);return null; } /** * <p> * Checks whether the given annotations are correctly specified at the given * field. The following checks are performed: * </p> * <ul> * <li> * Constraint annotations may only be given at non-static fields which's * type is supported by the constraints.</li> * <li> * The {@code @Valid}
3.26
hibernate-validator_MessagerAdapter_reportWarning_rdh
/** * Reports the given warning. Message parameters will be put into the template * retrieved from the resource bundle if applicable. * * @param warning * The warning to report. */ private void reportWarning(ConstraintCheckIssue warning) { report(warning, Kind.WARNING); }
3.26
hibernate-validator_MessagerAdapter_report_rdh
/** * Reports the given issue. Message parameters will be put into the template * retrieved from the resource bundle if applicable. * * @param issue * The issue to report. * @param kind * Kind of diagnostics to be used for reporting a given issue. */ private void report(ConstraintCheckIssue issue, Kind kind) { String message = errorMessages.getString(issue.getMessageKey()); if (issue.getMessageParameters() != null) { MessageFormat messageFormat = new MessageFormat(message, Locale.getDefault()); message = messageFormat.format(issue.getMessageParameters()); } messager.printMessage(kind, message, issue.getElement(), issue.getAnnotationMirror()); }
3.26
hibernate-validator_MessagerAdapter_reportErrors_rdh
/** * Reports the given errors against the underlying {@link Messager} using * the specified {@link Kind}. * * @param errors * A set with errors to report. May be empty but must not be * null. */ public void reportErrors(Collection<ConstraintCheckIssue> errors) { for (ConstraintCheckIssue error : errors) { reportError(error); } }
3.26
hibernate-validator_MessagerAdapter_reportError_rdh
/** * Reports the given error. Message parameters will be put into the template * retrieved from the resource bundle if applicable. * * @param error * The error to report. */ private void reportError(ConstraintCheckIssue error) { report(error, diagnosticKind); }
3.26
hibernate-validator_MessagerAdapter_getDelegate_rdh
/** * Returns the messager used by this adapter. * * @return The underlying messager. */ public Messager getDelegate() { return messager; }
3.26
hibernate-validator_MessagerAdapter_reportWarnings_rdh
/** * Reports the given warnings against the underlying {@link Messager} using * the specified {@link Kind}. * * @param warnings * A set with errors to report. May be empty but must not be * null. */public void reportWarnings(Collection<ConstraintCheckIssue> warnings) {for (ConstraintCheckIssue warning : warnings) { reportWarning(warning); } }
3.26
hibernate-validator_PositiveOrZeroValidatorForFloat_isValid_rdh
/** * Check that the number being validated is positive or zero. * * @author Hardy Ferentschik * @author Xavier Sosnovsky * @author Guillaume Smet * @author Marko Bekhta */public class PositiveOrZeroValidatorForFloat implements ConstraintValidator<PositiveOrZero, Float> { @Override public boolean isValid(Float value, ConstraintValidatorContext context) { // null values are valid if (value == null) { return true; } return NumberSignHelper.signum(value, InfinityNumberComparatorHelper.LESS_THAN) >= 0; }
3.26
hibernate-validator_GetDeclaredField_andMakeAccessible_rdh
/** * Before using this method, you need to check the {@code HibernateValidatorPermission.ACCESS_PRIVATE_MEMBERS} * permission against the security manager. */ public static GetDeclaredField andMakeAccessible(Class<?> clazz, String fieldName) { return new GetDeclaredField(clazz, fieldName, true); }
3.26
hibernate-validator_MethodValidationConfiguration_allowMultipleCascadedValidationOnReturnValues_rdh
/** * Define whether more than one constraint on a return value may be marked for cascading validation are allowed. * The default value is {@code false}, i.e. do not allow. * * "One must not mark a method return value for cascaded validation more than once in a line of a class hierarchy. * In other words, overriding methods on sub types (be it sub classes/interfaces or interface implementations) * cannot mark the return value for cascaded validation if the return value has already been marked on the * overridden method of the super type or interface." * * @param allow * flag determining whether validation will allow multiple cascaded validation on return values. * @return {@code this} following the chaining method pattern */ public Builder allowMultipleCascadedValidationOnReturnValues(boolean allow) { this.allowMultipleCascadedValidationOnReturnValues = allow; return this; }
3.26
hibernate-validator_MethodValidationConfiguration_getConfiguredRuleSet_rdh
/** * Return an unmodifiable Set of MethodConfigurationRule that are to be * enforced based on the configuration. * * @return a set of method configuration rules based on this configuration state */ public Set<MethodConfigurationRule> getConfiguredRuleSet() { return configuredRuleSet; }
3.26
hibernate-validator_MethodValidationConfiguration_allowOverridingMethodAlterParameterConstraint_rdh
/** * Define whether overriding methods that override constraints should throw a {@code ConstraintDefinitionException}. * The default value is {@code false}, i.e. do not allow. * * See Section 5.6.5 of the Jakarta Bean Validation Specification, specifically * <pre> * "In sub types (be it sub classes/interfaces or interface implementations), no parameter constraints may * be declared on overridden or implemented methods, nor may parameters be marked for cascaded validation. * This would pose a strengthening of preconditions to be fulfilled by the caller." * </pre> * * @param allow * flag determining whether validation will allow overriding to alter parameter constraints. * @return {@code this} following the chaining method pattern */ public Builder allowOverridingMethodAlterParameterConstraint(boolean allow) { this.allowOverridingMethodAlterParameterConstraint = allow; return this; }
3.26
hibernate-validator_MethodValidationConfiguration_isAllowParallelMethodsDefineParameterConstraints_rdh
/** * * @return {@code true} if constraints on methods in parallel class hierarchy are allowed, {@code false} otherwise. */ public boolean isAllowParallelMethodsDefineParameterConstraints() { return this.f0; }
3.26
hibernate-validator_MethodValidationConfiguration_allowParallelMethodsDefineParameterConstraints_rdh
/** * Define whether parallel methods that define constraints should throw a {@code ConstraintDefinitionException}. The * default value is {@code false}, i.e. do not allow. * * See Section 5.6.5 of the Jakarta Bean Validation Specification, specifically * "If a sub type overrides/implements a method originally defined in several parallel types of the hierarchy * (e.g. two interfaces not extending each other, or a class and an interface not implemented by said class), * no parameter constraints may be declared for that method at all nor parameters be marked for cascaded validation. * This again is to avoid an unexpected strengthening of preconditions to be fulfilled by the caller." * * @param allow * flag determining whether validation will allow parameter constraints in parallel hierarchies * @return {@code this} following the chaining method pattern */ public Builder allowParallelMethodsDefineParameterConstraints(boolean allow) { this.allowParallelMethodsDefineParameterConstraints = allow; return this; }
3.26
hibernate-validator_JavaBeanHelper_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run();}
3.26
hibernate-validator_AbstractElementVisitor_reportIssues_rdh
/** * Reports provided issues using {@link javax.annotation.processing.Messager} API based on their * kind ({@link ConstraintCheckIssue.IssueKind}). * * @param foundIssues * a collection of issues to be reported */ protected void reportIssues(Collection<ConstraintCheckIssue> foundIssues) { Set<ConstraintCheckIssue> warnings = CollectionHelper.newHashSet(); Set<ConstraintCheckIssue> errors = CollectionHelper.newHashSet(); for (ConstraintCheckIssue issue : foundIssues) { if (issue.isError()) { errors.add(issue); } else if (issue.isWarning()) { warnings.add(issue); } } messager.reportErrors(errors); messager.reportWarnings(warnings); }
3.26
hibernate-validator_ConstraintTypeStaxBuilder_run_rdh
/** * Runs the given privileged action, using a privileged block if required. * * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ @IgnoreForbiddenApisErrors(reason = "SecurityManager is deprecated in JDK17") private static <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged(action) : action.run(); }
3.26

Java Code Readability Merged & Modified

This dataset contains 69276 Java code snippets along with a readability score, mined from Github and automatically processed and labelled.

You can download the dataset using Hugging Face:

from datasets import load_dataset
ds = load_dataset("se2p/code-readability-krod")

The snippets are not split into train and test (and validation) set. Thus, the whole dataset is in the train set:

ds = ds['train']
ds_as_list = ds.to_list() # Convert the dataset to whatever format suits you best

The dataset is structured as follows:

{
  "code_snippet": ...,  # Java source code snippet
  "score": ...          # Readability score
  "name": ...           # The name of the code snippet refering to its origin
}

The main goal of this repository is to train code readability classifiers for Java source code.

Dataset Details

Uses

The dataset can be used for training Java code readability classifiers.

Dataset Structure

Each entry of the dataset consists of a code_snippet and a score (and a name). The code_snippet (string) is the code snippet that was downloaded from GitHub. Each snippet has a readability score assigned. The score is based on a five point Likert scale, with 1 being very unreadable and 5 being very readable.

Dataset Creation

Curation Rationale

To advance code readability classification, the creation of datasets in this research field is of high importance. We provide a new dataset generated with a new approach. Previous datasets for code readability classification are mostly generated by humans manually annotating the readability of code. Those datasets are relatively small, with combined size of only 421 samples. As our approach allows automation, we can provide a different scale of code snippets. We share this dataset on Hugging Face to share access and make the ease of usage easy.

Source Data

The initial source of code snippets are from 100 public GitHub which can be found at the end of this file.

Data Collection and Processing

The Data Collection and Preprocessing for this Hugging Face dataset involved two main steps. First, GitHub repositories known for high code quality were mined and labeled as highly readable. The extracted methods are labeled with a score of 3.68. Second, the code was intentionally modified to reduce readability. The resulting code was labelled with a score of 3.26. This resulted in an automatically generated training dataset for source code readability classification.

Who are the source data producers?

The source data producers are the people that wrote the used open source Java projects.

Personal and Sensitive Information

The ratings of the code snippets are automatically assigned. Thus, no personal or sensitive information is contained in this dataset.

Bias, Risks, and Limitations

The assigned labels are not accurate as they are only an average estimate based on a survey. The average score for the mined code snippets of the survey was 3.68. The average score for the modified code snippets of the survey was 3.26.

Recommendations

The dataset should be used to train Java code readability classifiers. We recommend fine-tuning and evaluation on manually labeled data.

Dataset Card Authors

Lukas Krodinger, Chair of Software Engineering II, University of Passau.

Dataset Card Contact

Feel free to contact me via E-Mail if you have any questions or remarks.

Appendix

Origin Repositories

Downloads last month
39
Edit dataset card