diff --git "a/validation.csv" "b/validation.csv" new file mode 100644--- /dev/null +++ "b/validation.csv" @@ -0,0 +1,11455 @@ +,label,code +0,0," private static void initKeyPair(SecureRandom prng) throws NoSuchAlgorithmException { + String sigAlg = signatureAlgorithm.toLowerCase(); + if ( sigAlg.endsWith(""withdsa"") ) { + // + // Admittedly, this is a kludge. However for Sun JCE, even though + // ""SHA1withDSA"" is a valid signature algorithm name, if one calls + // KeyPairGenerator kpg = KeyPairGenerator.getInstance(""SHA1withDSA""); + // that will throw a NoSuchAlgorithmException with an exception + // message of ""SHA1withDSA KeyPairGenerator not available"". Since + // SHA1withDSA and DSA keys should be identical, we use ""DSA"" + // in the case that SHA1withDSA or SHAwithDSA was specified. This is + // all just to make these 2 work as expected. Sigh. (Note: + // this was tested with JDK 1.6.0_21, but likely fails with earlier + // versions of the JDK as well.) + // + sigAlg = ""DSA""; + } else if ( sigAlg.endsWith(""withrsa"") ) { + // Ditto for RSA. + sigAlg = ""RSA""; + } + KeyPairGenerator keyGen = KeyPairGenerator.getInstance(sigAlg); + keyGen.initialize(signatureKeyLength, prng); + KeyPair pair = keyGen.generateKeyPair(); + privateKey = pair.getPrivate(); + publicKey = pair.getPublic(); + } +" +1,0," public ZipArchiveEntry getNextZipEntry() throws IOException { + uncompressedCount = 0; + + boolean firstEntry = true; + if (closed || hitCentralDirectory) { + return null; + } + if (current != null) { + closeEntry(); + firstEntry = false; + } + + long currentHeaderOffset = getBytesRead(); + try { + if (firstEntry) { + // split archives have a special signature before the + // first local file header - look for it and fail with + // the appropriate error message if this is a split + // archive. + readFirstLocalFileHeader(lfhBuf); + } else { + readFully(lfhBuf); + } + } catch (final EOFException e) { + return null; + } + + final ZipLong sig = new ZipLong(lfhBuf); + if (!sig.equals(ZipLong.LFH_SIG)) { + if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG) || isApkSigningBlock(lfhBuf)) { + hitCentralDirectory = true; + skipRemainderOfArchive(); + return null; + } + throw new ZipException(String.format(""Unexpected record signature: 0X%X"", sig.getValue())); + } + + int off = WORD; + current = new CurrentEntry(); + + final int versionMadeBy = ZipShort.getValue(lfhBuf, off); + off += SHORT; + current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK); + + final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(lfhBuf, off); + final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); + final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; + current.hasDataDescriptor = gpFlag.usesDataDescriptor(); + current.entry.setGeneralPurposeBit(gpFlag); + + off += SHORT; + + current.entry.setMethod(ZipShort.getValue(lfhBuf, off)); + off += SHORT; + + final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(lfhBuf, off)); + current.entry.setTime(time); + off += WORD; + + ZipLong size = null, cSize = null; + if (!current.hasDataDescriptor) { + current.entry.setCrc(ZipLong.getValue(lfhBuf, off)); + off += WORD; + + cSize = new ZipLong(lfhBuf, off); + off += WORD; + + size = new ZipLong(lfhBuf, off); + off += WORD; + } else { + off += 3 * WORD; + } + + final int fileNameLen = ZipShort.getValue(lfhBuf, off); + + off += SHORT; + + final int extraLen = ZipShort.getValue(lfhBuf, off); + off += SHORT; // NOSONAR - assignment as documentation + + final byte[] fileName = new byte[fileNameLen]; + readFully(fileName); + current.entry.setName(entryEncoding.decode(fileName), fileName); + if (hasUTF8Flag) { + current.entry.setNameSource(ZipArchiveEntry.NameSource.NAME_WITH_EFS_FLAG); + } + + final byte[] extraData = new byte[extraLen]; + readFully(extraData); + current.entry.setExtra(extraData); + + if (!hasUTF8Flag && useUnicodeExtraFields) { + ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null); + } + + processZip64Extra(size, cSize); + + current.entry.setLocalHeaderOffset(currentHeaderOffset); + current.entry.setDataOffset(getBytesRead()); + current.entry.setStreamContiguous(true); + + ZipMethod m = ZipMethod.getMethodByCode(current.entry.getMethod()); + if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) { + if (ZipUtil.canHandleEntryData(current.entry) && m != ZipMethod.STORED && m != ZipMethod.DEFLATED) { + InputStream bis = new BoundedInputStream(in, current.entry.getCompressedSize()); + switch (m) { + case UNSHRINKING: + current.in = new UnshrinkingInputStream(bis); + break; + case IMPLODING: + current.in = new ExplodingInputStream( + current.entry.getGeneralPurposeBit().getSlidingDictionarySize(), + current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(), + bis); + break; + case BZIP2: + current.in = new BZip2CompressorInputStream(bis); + break; + case ENHANCED_DEFLATED: + current.in = new Deflate64CompressorInputStream(bis); + break; + default: + // we should never get here as all supported methods have been covered + // will cause an error when read is invoked, don't throw an exception here so people can + // skip unsupported entries + break; + } + } + } else if (m == ZipMethod.ENHANCED_DEFLATED) { + current.in = new Deflate64CompressorInputStream(in); + } + + entriesRead++; + return current.entry; + } + + /** + * Fills the given array with the first local file header and + * deals with splitting/spanning markers that may prefix the first + * LFH. + */ +" +2,0," private static void assertCorrectConfig(User user, String unixPath) { + assertThat(user.getConfigFile().getFile().getPath(), endsWith(unixPath.replace('/', File.separatorChar))); + } + +" +3,0," public void testRepositoryCreation() throws Exception { + Client client = client(); + + File location = randomRepoPath(); + + logger.info(""--> creating repository""); + PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", location) + ).get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + + logger.info(""--> verify the repository""); + int numberOfFiles = location.listFiles().length; + VerifyRepositoryResponse verifyRepositoryResponse = client.admin().cluster().prepareVerifyRepository(""test-repo-1"").get(); + assertThat(verifyRepositoryResponse.getNodes().length, equalTo(cluster().numDataAndMasterNodes())); + + logger.info(""--> verify that we didn't leave any files as a result of verification""); + assertThat(location.listFiles().length, equalTo(numberOfFiles)); + + logger.info(""--> check that repository is really there""); + ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().clear().setMetaData(true).get(); + MetaData metaData = clusterStateResponse.getState().getMetaData(); + RepositoriesMetaData repositoriesMetaData = metaData.custom(RepositoriesMetaData.TYPE); + assertThat(repositoriesMetaData, notNullValue()); + assertThat(repositoriesMetaData.repository(""test-repo-1""), notNullValue()); + assertThat(repositoriesMetaData.repository(""test-repo-1"").type(), equalTo(""fs"")); + + logger.info(""--> creating another repository""); + putRepositoryResponse = client.admin().cluster().preparePutRepository(""test-repo-2"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", randomRepoPath()) + ).get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + + logger.info(""--> check that both repositories are in cluster state""); + clusterStateResponse = client.admin().cluster().prepareState().clear().setMetaData(true).get(); + metaData = clusterStateResponse.getState().getMetaData(); + repositoriesMetaData = metaData.custom(RepositoriesMetaData.TYPE); + assertThat(repositoriesMetaData, notNullValue()); + assertThat(repositoriesMetaData.repositories().size(), equalTo(2)); + assertThat(repositoriesMetaData.repository(""test-repo-1""), notNullValue()); + assertThat(repositoriesMetaData.repository(""test-repo-1"").type(), equalTo(""fs"")); + assertThat(repositoriesMetaData.repository(""test-repo-2""), notNullValue()); + assertThat(repositoriesMetaData.repository(""test-repo-2"").type(), equalTo(""fs"")); + + logger.info(""--> check that both repositories can be retrieved by getRepositories query""); + GetRepositoriesResponse repositoriesResponse = client.admin().cluster().prepareGetRepositories().get(); + assertThat(repositoriesResponse.repositories().size(), equalTo(2)); + assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-1""), notNullValue()); + assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-2""), notNullValue()); + + logger.info(""--> delete repository test-repo-1""); + client.admin().cluster().prepareDeleteRepository(""test-repo-1"").get(); + repositoriesResponse = client.admin().cluster().prepareGetRepositories().get(); + assertThat(repositoriesResponse.repositories().size(), equalTo(1)); + assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-2""), notNullValue()); + + logger.info(""--> delete repository test-repo-2""); + client.admin().cluster().prepareDeleteRepository(""test-repo-2"").get(); + repositoriesResponse = client.admin().cluster().prepareGetRepositories().get(); + assertThat(repositoriesResponse.repositories().size(), equalTo(0)); + } + +" +4,0," protected Blob getBlob(ResultSet resultSet, int columnIndex, Metadata m) throws SQLException { + byte[] bytes = resultSet.getBytes(columnIndex); + if (!resultSet.wasNull()) { + return new SerialBlob(bytes); + } + return null; + } +" +5,0," private T run(PrivilegedAction action) { + return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run(); + } +" +6,0," protected boolean statusDropsConnection(int status) { + return status == 400 /* SC_BAD_REQUEST */ || + status == 408 /* SC_REQUEST_TIMEOUT */ || + status == 411 /* SC_LENGTH_REQUIRED */ || + status == 413 /* SC_REQUEST_ENTITY_TOO_LARGE */ || + status == 414 /* SC_REQUEST_URI_TOO_LARGE */ || + status == 500 /* SC_INTERNAL_SERVER_ERROR */ || + status == 503 /* SC_SERVICE_UNAVAILABLE */ || + status == 501 /* SC_NOT_IMPLEMENTED */; + } + +" +7,0," private ControllerInfo getControllerInfo(Annotations annotation, String s) { + String[] data = s.split("":""); + if (data.length != 3) { + print(""Wrong format of the controller %s, should be in the format ::"", s); + return null; + } + String type = data[0]; + IpAddress ip = IpAddress.valueOf(data[1]); + int port = Integer.parseInt(data[2]); + if (annotation != null) { + return new ControllerInfo(ip, port, type, annotation); + } + return new ControllerInfo(ip, port, type); + } +" +8,0," public Authentication authenticate(Authentication authentication) throws AuthenticationException { + Assert.isInstanceOf(UsernamePasswordAuthenticationToken.class, authentication, + messages.getMessage(""LdapAuthenticationProvider.onlySupports"", + ""Only UsernamePasswordAuthenticationToken is supported"")); + + final UsernamePasswordAuthenticationToken userToken = (UsernamePasswordAuthenticationToken)authentication; + + String username = userToken.getName(); + String password = (String) authentication.getCredentials(); + + if (logger.isDebugEnabled()) { + logger.debug(""Processing authentication request for user: "" + username); + } + + if (!StringUtils.hasLength(username)) { + throw new BadCredentialsException(messages.getMessage(""LdapAuthenticationProvider.emptyUsername"", + ""Empty Username"")); + } + + if (!StringUtils.hasLength(password)) { + throw new BadCredentialsException(messages.getMessage(""AbstractLdapAuthenticationProvider.emptyPassword"", + ""Empty Password"")); + } + + Assert.notNull(password, ""Null password was supplied in authentication token""); + + DirContextOperations userData = doAuthentication(userToken); + + UserDetails user = userDetailsContextMapper.mapUserFromContext(userData, authentication.getName(), + loadUserAuthorities(userData, authentication.getName(), (String)authentication.getCredentials())); + + return createSuccessfulAuthentication(userToken, user); + } + +" +9,0," protected IgnoreCsrfProtectionRegistry chainRequestMatchers( + List requestMatchers) { + CsrfConfigurer.this.ignoredCsrfProtectionMatchers.addAll(requestMatchers); + return this; + } + } +}" +10,0," private String[] getParts(String encodedJWT) { + String[] parts = encodedJWT.split(""\\.""); + // Secured JWT XXXXX.YYYYY.ZZZZZ, Unsecured JWT XXXXX.YYYYY. + if (parts.length == 3 || (parts.length == 2 && encodedJWT.endsWith("".""))) { + return parts; + } + + throw new InvalidJWTException(""The encoded JWT is not properly formatted. Expected a three part dot separated string.""); + } +" +11,0," public String getAlgorithm() { + + return (this.algorithm); + + } + + + /** + * Set the message digest algorithm for this Manager. + * + * @param algorithm The new message digest algorithm + */ +" +12,0," public ChannelRequestMatcherRegistry getRegistry() { + return REGISTRY; + } + + @Override +" +13,0," public String getDefaultWebXml() { + if( defaultWebXml == null ) { + defaultWebXml=Constants.DefaultWebXml; + } + + return (this.defaultWebXml); + + } + + + /** + * Set the location of the default deployment descriptor + * + * @param path Absolute/relative path to the default web.xml + */ +" +14,0," public Collection getRequiredPermissions(String regionName) { + return Collections.singletonList(ResourcePermissions.DATA_MANAGE); + } + +" +15,0," private void enableAllocation(String index) { + client().admin().indices().prepareUpdateSettings(index).setSettings(ImmutableSettings.builder().put( + ""index.routing.allocation.enable"", ""all"" + )).get(); + } +" +16,0," public void parse(InputStream stream, ContentHandler ignore, + Metadata metadata, ParseContext context) throws IOException, + SAXException, TikaException { + //Test to see if we should avoid parsing + if (parserState.recursiveParserWrapperHandler.hasHitMaximumEmbeddedResources()) { + return; + } + // Work out what this thing is + String objectName = getResourceName(metadata, parserState); + String objectLocation = this.location + objectName; + + metadata.add(AbstractRecursiveParserWrapperHandler.EMBEDDED_RESOURCE_PATH, objectLocation); + + + //get a fresh handler + ContentHandler localHandler = parserState.recursiveParserWrapperHandler.getNewContentHandler(); + parserState.recursiveParserWrapperHandler.startEmbeddedDocument(localHandler, metadata); + + Parser preContextParser = context.get(Parser.class); + context.set(Parser.class, new EmbeddedParserDecorator(getWrappedParser(), objectLocation, parserState)); + long started = System.currentTimeMillis(); + try { + super.parse(stream, localHandler, metadata, context); + } catch (SAXException e) { + boolean wlr = isWriteLimitReached(e); + if (wlr == true) { + metadata.add(WRITE_LIMIT_REACHED, ""true""); + } else { + if (catchEmbeddedExceptions) { + ParserUtils.recordParserFailure(this, e, metadata); + } else { + throw e; + } + } + } catch(CorruptedFileException e) { + throw e; + } catch (TikaException e) { + if (catchEmbeddedExceptions) { + ParserUtils.recordParserFailure(this, e, metadata); + } else { + throw e; + } + } finally { + context.set(Parser.class, preContextParser); + long elapsedMillis = System.currentTimeMillis() - started; + metadata.set(RecursiveParserWrapperHandler.PARSE_TIME_MILLIS, Long.toString(elapsedMillis)); + parserState.recursiveParserWrapperHandler.endEmbeddedDocument(localHandler, metadata); + } + } + } + + /** + * This tracks the state of the parse of a single document. + * In future versions, this will allow the RecursiveParserWrapper to be thread safe. + */ + private class ParserState { + private int unknownCount = 0; + private final AbstractRecursiveParserWrapperHandler recursiveParserWrapperHandler; + private ParserState(AbstractRecursiveParserWrapperHandler handler) { + this.recursiveParserWrapperHandler = handler; + } + + + } +} +" +17,0," public static void beforeTests() throws Exception { + initCore(""solrconfig.xml"",""schema.xml""); + handler = new UpdateRequestHandler(); + } + + @Test +" +18,0," public void setUp() throws Exception { + interceptor = new I18nInterceptor(); + interceptor.init(); + params = new HashMap(); + session = new HashMap(); + + Map ctx = new HashMap(); + ctx.put(ActionContext.PARAMETERS, params); + ctx.put(ActionContext.SESSION, session); + ac = new ActionContext(ctx); + + Action action = new Action() { + public String execute() throws Exception { + return SUCCESS; + } + }; + mai = new MockActionInvocation(); + ((MockActionInvocation) mai).setAction(action); + ((MockActionInvocation) mai).setInvocationContext(ac); + } + + @After +" +19,0," private MockHttpServletRequestBuilder createChangePasswordRequest(ScimUser user, String code, boolean useCSRF, String password, String passwordConfirmation) throws Exception { + MockHttpServletRequestBuilder post = post(""/reset_password.do""); + if (useCSRF) { + post.with(csrf()); + } + post.param(""code"", code) + .param(""email"", user.getPrimaryEmail()) + .param(""password"", password) + .param(""password_confirmation"", passwordConfirmation); + return post; + } +" +20,0," protected Log getLog() { + return log; + } + + // ----------------------------------------------------------- Constructors + + +" +21,0," public int getCount() { + return iCount; + } + +" +22,0," public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder holder, + ParserContext parserContext) { + BeanDefinition filterChainProxy = holder.getBeanDefinition(); + + ManagedList securityFilterChains = new ManagedList(); + Element elt = (Element) node; + + MatcherType matcherType = MatcherType.fromElement(elt); + + List filterChainElts = DomUtils.getChildElementsByTagName(elt, + Elements.FILTER_CHAIN); + + for (Element chain : filterChainElts) { + String path = chain + .getAttribute(HttpSecurityBeanDefinitionParser.ATT_PATH_PATTERN); + String filters = chain + .getAttribute(HttpSecurityBeanDefinitionParser.ATT_FILTERS); + + if (!StringUtils.hasText(path)) { + parserContext.getReaderContext().error( + ""The attribute '"" + + HttpSecurityBeanDefinitionParser.ATT_PATH_PATTERN + + ""' must not be empty"", elt); + } + + if (!StringUtils.hasText(filters)) { + parserContext.getReaderContext().error( + ""The attribute '"" + HttpSecurityBeanDefinitionParser.ATT_FILTERS + + ""'must not be empty"", elt); + } + + BeanDefinition matcher = matcherType.createMatcher(parserContext, path, null); + + if (filters.equals(HttpSecurityBeanDefinitionParser.OPT_FILTERS_NONE)) { + securityFilterChains.add(createSecurityFilterChain(matcher, + new ManagedList(0))); + } + else { + String[] filterBeanNames = StringUtils + .tokenizeToStringArray(filters, "",""); + ManagedList filterChain = new ManagedList(filterBeanNames.length); + + for (String name : filterBeanNames) { + filterChain.add(new RuntimeBeanReference(name)); + } + + securityFilterChains.add(createSecurityFilterChain(matcher, filterChain)); + } + } + + filterChainProxy.getConstructorArgumentValues().addGenericArgumentValue( + securityFilterChains); + + return holder; + } + +" +23,0," public XObject execute(XPathContext xctxt) throws javax.xml.transform.TransformerException + { + + String fullName = m_arg0.execute(xctxt).str(); + int indexOfNSSep = fullName.indexOf(':'); + String result = null; + String propName = """"; + + // List of properties where the name of the + // property argument is to be looked for. + Properties xsltInfo = new Properties(); + + loadPropertyFile(XSLT_PROPERTIES, xsltInfo); + + if (indexOfNSSep > 0) + { + String prefix = (indexOfNSSep >= 0) + ? fullName.substring(0, indexOfNSSep) : """"; + String namespace; + + namespace = xctxt.getNamespaceContext().getNamespaceForPrefix(prefix); + propName = (indexOfNSSep < 0) + ? fullName : fullName.substring(indexOfNSSep + 1); + + if (namespace.startsWith(""http://www.w3.org/XSL/Transform"") + || namespace.equals(""http://www.w3.org/1999/XSL/Transform"")) + { + result = xsltInfo.getProperty(propName); + + if (null == result) + { + warn(xctxt, XPATHErrorResources.WG_PROPERTY_NOT_SUPPORTED, + new Object[]{ fullName }); //""XSL Property not supported: ""+fullName); + + return XString.EMPTYSTRING; + } + } + else + { + warn(xctxt, XPATHErrorResources.WG_DONT_DO_ANYTHING_WITH_NS, + new Object[]{ namespace, + fullName }); //""Don't currently do anything with namespace ""+namespace+"" in property: ""+fullName); + + try + { + //if secure procession is enabled only handle required properties do not not map any valid system property + if(!xctxt.isSecureProcessing()) + { + result = System.getProperty(fullName); + } + else + { + warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION, + new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName); + result = xsltInfo.getProperty(propName); + } + if (null == result) + { + return XString.EMPTYSTRING; + } + } + catch (SecurityException se) + { + warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION, + new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName); + + return XString.EMPTYSTRING; + } + } + } + else + { + try + { + //if secure procession is enabled only handle required properties do not not map any valid system property + if(!xctxt.isSecureProcessing()) + { + result = System.getProperty(fullName); + } + else + { + warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION, + new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName); + result = xsltInfo.getProperty(propName); + } + if (null == result) + { + return XString.EMPTYSTRING; + } + } + catch (SecurityException se) + { + warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION, + new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName); + + return XString.EMPTYSTRING; + } + } + + if (propName.equals(""version"") && result.length() > 0) + { + try + { + // Needs to return the version number of the spec we conform to. + return new XString(""1.0""); + } + catch (Exception ex) + { + return new XString(result); + } + } + else + return new XString(result); + } + + /** + * Retrieve a propery bundle from a specified file + * + * @param file The string name of the property file. The name + * should already be fully qualified as path/filename + * @param target The target property bag the file will be placed into. + */ +" +24,0," public boolean equals(Object obj) { + if ( this == obj ) { + return true; + } + if ( !super.equals( obj ) ) { + return false; + } + if ( getClass() != obj.getClass() ) { + return false; + } + ConstrainedExecutable other = (ConstrainedExecutable) obj; + if ( executable == null ) { + if ( other.executable != null ) { + return false; + } + } + else if ( !executable.equals( other.executable ) ) { + return false; + } + return true; + } +" +25,0," public static HierarchicalConfiguration loadXml(InputStream xmlStream) { + try { + XMLConfiguration cfg = new XMLConfiguration(); + DocumentBuilderFactory dbfactory = DocumentBuilderFactory.newInstance(); + //Disabling DTDs in order to avoid XXE xml-based attacks. + disableFeature(dbfactory, DISALLOW_DTD_FEATURE); + disableFeature(dbfactory, DISALLOW_EXTERNAL_DTD); + dbfactory.setXIncludeAware(false); + dbfactory.setExpandEntityReferences(false); + cfg.setDocumentBuilder(dbfactory.newDocumentBuilder()); + cfg.load(xmlStream); + return cfg; + } catch (ConfigurationException | ParserConfigurationException e) { + throw new IllegalArgumentException(""Cannot load xml from Stream"", e); + } + } + +" +26,0," void noteBytesRead(int pBytes) { + /* Indicates, that the given number of bytes have been read from + * the input stream. + */ + bytesRead += pBytes; + notifyListener(); + } + + /** + * Called to indicate, that a new file item has been detected. + */ +" +27,0," private static DocumentBuilder getBuilder() throws ParserConfigurationException { + ClassLoader loader = Thread.currentThread().getContextClassLoader(); + if (loader == null) { + loader = DOMUtils.class.getClassLoader(); + } + if (loader == null) { + DocumentBuilderFactory dbf = createDocumentBuilderFactory(); + return dbf.newDocumentBuilder(); + } + DocumentBuilder builder = DOCUMENT_BUILDERS.get(loader); + if (builder == null) { + DocumentBuilderFactory dbf = createDocumentBuilderFactory(); + builder = dbf.newDocumentBuilder(); + DOCUMENT_BUILDERS.put(loader, builder); + } + return builder; + } + + /** + * This function is much like getAttribute, but returns null, not """", for a nonexistent attribute. + * + * @param e + * @param attributeName + */ +" +28,0," public StandardInterceptUrlRegistry getRegistry() { + return REGISTRY; + } + + /** + * Adds an {@link ObjectPostProcessor} for this class. + * + * @param objectPostProcessor + * @return the {@link UrlAuthorizationConfigurer} for further customizations + */ +" +29,0," public synchronized XPathRecordReader addField(String name, String xpath, boolean multiValued, int flags) { + addField0(xpath, name, multiValued, false, flags); + return this; + } + + /** + * Splits the XPATH into a List of xpath segments and calls build() to + * construct a tree of Nodes representing xpath segments. The resulting + * tree structure ends up describing all the Xpaths we are interested in. + * + * @param xpath The xpath expression for this field + * @param name The name for this field in the emitted record + * @param multiValued If 'true' then the emitted record will have values in + * a List<String> + * @param isRecord Flags that this XPATH is from a forEach statement + * @param flags The only supported flag is 'FLATTEN' + */ +" +30,0," public void setConstants(ContainerBuilder builder) { + for (Object keyobj : keySet()) { + String key = (String)keyobj; + builder.factory(String.class, key, + new LocatableConstantFactory(getProperty(key), getPropertyLocation(key))); + } + } + } +} +" +31,0," public CommandLauncher launch(String host, TaskListener listener) throws IOException, InterruptedException { + return new CommandLauncher(command,new EnvVars(""SLAVE"",host)); + } + + @Extension @Symbol(""command"") +" +32,0," private void parseCSSStyleSheet(String sheet) throws SVGParseException + { + CSSParser cssp = new CSSParser(MediaType.screen); + svgDocument.addCSSRules(cssp.parse(sheet)); + } + +" +33,0," public void test_unsecuredJWT_validation() throws Exception { + JWT jwt = new JWT().setSubject(""123456789""); + Signer signer = new UnsecuredSigner(); + Verifier hmacVerifier = HMACVerifier.newVerifier(""too many secrets""); + + String encodedUnsecuredJWT = JWTEncoder.getInstance().encode(jwt, signer); + + // Ensure that attempting to decode an un-secured JWT fails when we provide a verifier + expectException(MissingVerifierException.class, () -> JWT.getDecoder().decode(encodedUnsecuredJWT, hmacVerifier)); + + String encodedUnsecuredJWT_withKid = JWTEncoder.getInstance().encode(jwt, signer, (header) -> header.set(""kid"", ""abc"")); + String encodedUnsecuredJWT_withoutKid = JWTEncoder.getInstance().encode(jwt, signer); + + Map verifierMap = new HashMap<>(); + verifierMap.put(null, hmacVerifier); + verifierMap.put(""abc"", hmacVerifier); + + // Ensure that attempting to decode an un-secured JWT fails when we provide a verifier with or without using a kid + expectException(MissingVerifierException.class, () -> JWT.getDecoder().decode(encodedUnsecuredJWT_withKid, verifierMap)); + expectException(MissingVerifierException.class, () -> JWT.getDecoder().decode(encodedUnsecuredJWT_withoutKid, verifierMap)); + } + + @Test +" +34,0," public String getConnectionName() { + return connectionName; + } + + /** + * Set the username to use to connect to the database. + * + * @param connectionName Username + */ +" +35,0," protected Log getLog() { + return log; + } + + @Override +" +36,0," public BigInteger[] decode( + byte[] encoding) + throws IOException + { + BigInteger[] sig = new BigInteger[2]; + + byte[] first = new byte[encoding.length / 2]; + byte[] second = new byte[encoding.length / 2]; + + System.arraycopy(encoding, 0, first, 0, first.length); + System.arraycopy(encoding, first.length, second, 0, second.length); + + sig[0] = new BigInteger(1, first); + sig[1] = new BigInteger(1, second); + + return sig; + } + } +}" +37,0," public ScimGroup mapRow(ResultSet rs, int rowNum) throws SQLException { + int pos = 1; + String id = rs.getString(pos++); + String name = rs.getString(pos++); + String description = rs.getString(pos++); + Date created = rs.getTimestamp(pos++); + Date modified = rs.getTimestamp(pos++); + int version = rs.getInt(pos++); + String zoneId = rs.getString(pos++); + ScimGroup group = new ScimGroup(id, name, zoneId); + group.setDescription(description); + ScimMeta meta = new ScimMeta(created, modified, version); + group.setMeta(meta); + return group; + } + } +} +" +38,0," public void testSendStringMessage() throws Exception { + sendMessageAndHaveItTransformed(""HeyHello world!""); + } + +" +39,0," public void testSingletonPatternInSerialization() { + final Object[] singletones = new Object[] { + ExceptionFactory.INSTANCE, + }; + + for (final Object original : singletones) { + TestUtils.assertSameAfterSerialization( + ""Singletone patern broken for "" + original.getClass(), + original + ); + } + } + +" +40,0," private void doTestParameterNameLengthRestriction( ParametersInterceptor parametersInterceptor, + int paramNameMaxLength ) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < paramNameMaxLength + 1; i++) { + sb.append(""x""); + } + + Map actual = new LinkedHashMap(); + parametersInterceptor.setValueStackFactory(createValueStackFactory(actual)); + ValueStack stack = createStubValueStack(actual); + + Map parameters = new HashMap(); + parameters.put(sb.toString(), """"); + parameters.put(""huuhaa"", """"); + + Action action = new SimpleAction(); + parametersInterceptor.setParameters(action, stack, parameters); + assertEquals(1, actual.size()); + } + +" +41,0," public boolean equals(Object obj) { + if (obj instanceof CharEntry) { + return value.equals(((CharEntry) obj).value); + } + return false; + } + + } + + +} +" +42,0," protected boolean readMessage(AjpMessage message) + throws IOException { + + byte[] buf = message.getBuffer(); + int headerLength = message.getHeaderLength(); + + read(buf, 0, headerLength); + + int messageLength = message.processHeader(true); + if (messageLength < 0) { + // Invalid AJP header signature + // TODO: Throw some exception and close the connection to frontend. + return false; + } + else if (messageLength == 0) { + // Zero length message. + return true; + } + else { + if (messageLength > buf.length) { + // Message too long for the buffer + // Need to trigger a 400 response + throw new IllegalArgumentException(sm.getString( + ""ajpprocessor.header.tooLong"", + Integer.valueOf(messageLength), + Integer.valueOf(buf.length))); + } + read(buf, headerLength, messageLength); + return true; + } + } +" +43,0," public boolean allPresentAndPositive() { + return lockoutPeriodSeconds >= 0 && lockoutAfterFailures >= 0 && countFailuresWithin >= 0; + } +" +44,0," public boolean event(org.apache.coyote.Request req, + org.apache.coyote.Response res, SocketStatus status) { + + Request request = (Request) req.getNote(ADAPTER_NOTES); + Response response = (Response) res.getNote(ADAPTER_NOTES); + + if (request.getWrapper() != null) { + + boolean error = false; + try { + if (status == SocketStatus.OPEN) { + request.getEvent().setEventType(CometEvent.EventType.READ); + request.getEvent().setEventSubType(null); + } else if (status == SocketStatus.DISCONNECT) { + request.getEvent().setEventType(CometEvent.EventType.ERROR); + request.getEvent().setEventSubType(CometEvent.EventSubType.CLIENT_DISCONNECT); + error = true; + } else if (status == SocketStatus.ERROR) { + request.getEvent().setEventType(CometEvent.EventType.ERROR); + request.getEvent().setEventSubType(CometEvent.EventSubType.IOEXCEPTION); + error = true; + } else if (status == SocketStatus.STOP) { + request.getEvent().setEventType(CometEvent.EventType.END); + request.getEvent().setEventSubType(CometEvent.EventSubType.SERVER_SHUTDOWN); + } else if (status == SocketStatus.TIMEOUT) { + request.getEvent().setEventType(CometEvent.EventType.ERROR); + request.getEvent().setEventSubType(CometEvent.EventSubType.TIMEOUT); + } + + // Calling the container + connector.getContainer().getPipeline().getFirst().event(request, response, request.getEvent()); + + if (response.isClosed() || !request.isComet()) { + res.action(ActionCode.ACTION_COMET_END, null); + } + return (!error); + } catch (Throwable t) { + if (!(t instanceof IOException)) { + log.error(sm.getString(""coyoteAdapter.service""), t); + } + error = true; + // FIXME: Since there's likely some structures kept in the servlet or elsewhere, + // a cleanup event of some sort could be needed ? + return false; + } finally { + // Recycle the wrapper request and response + if (error || response.isClosed() || !request.isComet()) { + request.recycle(); + request.setFilterChain(null); + response.recycle(); + } + } + + } else { + return false; + } + } + + + /** + * Service method. + */ +" +45,0," public void testSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { + logger.info(""--> creating repository""); + assertAcked(client().admin().cluster().preparePutRepository(""test-repo"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", randomRepoPath().getAbsolutePath()) + .put(""compress"", randomBoolean()) + .put(""chunk_size"", randomIntBetween(100, 1000)))); + String[] indicesBefore = new String[randomIntBetween(2,5)]; + String[] indicesAfter = new String[randomIntBetween(2,5)]; + for (int i = 0; i < indicesBefore.length; i++) { + indicesBefore[i] = ""index_before_"" + i; + createIndex(indicesBefore[i]); + } + for (int i = 0; i < indicesAfter.length; i++) { + indicesAfter[i] = ""index_after_"" + i; + createIndex(indicesAfter[i]); + } + String[] indices = new String[indicesBefore.length + indicesAfter.length]; + System.arraycopy(indicesBefore, 0, indices, 0, indicesBefore.length); + System.arraycopy(indicesAfter, 0, indices, indicesBefore.length, indicesAfter.length); + ensureYellow(); + logger.info(""--> indexing some data""); + IndexRequestBuilder[] buildersBefore = new IndexRequestBuilder[randomIntBetween(10, 200)]; + for (int i = 0; i < buildersBefore.length; i++) { + buildersBefore[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), ""foo"", Integer.toString(i)).setSource(""{ \""foo\"" : \""bar\"" } ""); + } + IndexRequestBuilder[] buildersAfter = new IndexRequestBuilder[randomIntBetween(10, 200)]; + for (int i = 0; i < buildersAfter.length; i++) { + buildersAfter[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), ""bar"", Integer.toString(i)).setSource(""{ \""foo\"" : \""bar\"" } ""); + } + indexRandom(true, buildersBefore); + indexRandom(true, buildersAfter); + assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length))); + long[] counts = new long[indices.length]; + for (int i = 0; i < indices.length; i++) { + counts[i] = client().prepareCount(indices[i]).get().getCount(); + } + + logger.info(""--> snapshot subset of indices before upgrage""); + CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap-1"").setWaitForCompletion(true).setIndices(""index_before_*"").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + + assertThat(client().admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap-1"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); + + logger.info(""--> delete some data from indices that were already snapshotted""); + int howMany = randomIntBetween(1, buildersBefore.length); + + for (int i = 0; i < howMany; i++) { + IndexRequestBuilder indexRequestBuilder = RandomPicks.randomFrom(getRandom(), buildersBefore); + IndexRequest request = indexRequestBuilder.request(); + client().prepareDelete(request.index(), request.type(), request.id()).get(); + } + refresh(); + final long numDocs = client().prepareCount(indices).get().getCount(); + assertThat(client().prepareCount(indices).get().getCount(), lessThan((long) (buildersBefore.length + buildersAfter.length))); + + + client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""none"")).get(); + backwardsCluster().allowOnAllNodes(indices); + logClusterState(); + boolean upgraded; + do { + logClusterState(); + CountResponse countResponse = client().prepareCount().get(); + assertHitCount(countResponse, numDocs); + upgraded = backwardsCluster().upgradeOneNode(); + ensureYellow(); + countResponse = client().prepareCount().get(); + assertHitCount(countResponse, numDocs); + } while (upgraded); + client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""all"")).get(); + + logger.info(""--> close indices""); + client().admin().indices().prepareClose(""index_before_*"").get(); + + logger.info(""--> verify repository""); + client().admin().cluster().prepareVerifyRepository(""test-repo"").get(); + + logger.info(""--> restore all indices from the snapshot""); + RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap-1"").setWaitForCompletion(true).execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + + ensureYellow(); + assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length))); + for (int i = 0; i < indices.length; i++) { + assertThat(counts[i], equalTo(client().prepareCount(indices[i]).get().getCount())); + } + + logger.info(""--> snapshot subset of indices after upgrade""); + createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap-2"").setWaitForCompletion(true).setIndices(""index_*"").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + + // Test restore after index deletion + logger.info(""--> delete indices""); + String index = RandomPicks.randomFrom(getRandom(), indices); + cluster().wipeIndices(index); + logger.info(""--> restore one index after deletion""); + restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap-2"").setWaitForCompletion(true).setIndices(index).execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + ensureYellow(); + assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length))); + for (int i = 0; i < indices.length; i++) { + assertThat(counts[i], equalTo(client().prepareCount(indices[i]).get().getCount())); + } + } + +" +46,0," public XMSSPrivateKeyParameters getNextKey() + { + /* prepare authentication path for next leaf */ + int treeHeight = this.params.getHeight(); + if (this.getIndex() < ((1 << treeHeight) - 1)) + { + return new XMSSPrivateKeyParameters.Builder(params) + .withSecretKeySeed(secretKeySeed).withSecretKeyPRF(secretKeyPRF) + .withPublicSeed(publicSeed).withRoot(root) + .withBDSState(bdsState.getNextState(publicSeed, secretKeySeed, (OTSHashAddress)new OTSHashAddress.Builder().build())).build(); + } + else + { + return new XMSSPrivateKeyParameters.Builder(params) + .withSecretKeySeed(secretKeySeed).withSecretKeyPRF(secretKeyPRF) + .withPublicSeed(publicSeed).withRoot(root) + .withBDSState(new BDS(params, getIndex() + 1)).build(); // no more nodes left. + } + } + +" +47,0," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) { + final class Debug implements RecipientProviderUtilities.IDebug { + private final ExtendedEmailPublisherDescriptor descriptor + = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class); + + private final PrintStream logger = context.getListener().getLogger(); + + public void send(final String format, final Object... args) { + descriptor.debug(logger, format, args); + } + } + final Debug debug = new Debug(); + // looking for Upstream build. + Run cur = context.getRun(); + Cause.UpstreamCause upc = cur.getCause(Cause.UpstreamCause.class); + while (upc != null) { + // UpstreamCause.getUpStreamProject() returns the full name, so use getItemByFullName + Job p = (Job) Jenkins.getActiveInstance().getItemByFullName(upc.getUpstreamProject()); + if (p == null) { + context.getListener().getLogger().print(""There is a break in the project linkage, could not retrieve upstream project information""); + break; + } + cur = p.getBuildByNumber(upc.getUpstreamBuild()); + upc = cur.getCause(Cause.UpstreamCause.class); + } + addUserTriggeringTheBuild(cur, to, cc, bcc, env, context, debug); + } + +" +48,0," public void setSslSupport(SSLSupport sslSupport) { + this.sslSupport = sslSupport; + } +" +49,0," public void loadPropertyFile(String file, Properties target) + { + try + { + // Use SecuritySupport class to provide privileged access to property file + InputStream is = SecuritySupport.getResourceAsStream(ObjectFactory.findClassLoader(), + file); + + // get a buffered version + BufferedInputStream bis = new BufferedInputStream(is); + + target.load(bis); // and load up the property bag from this + bis.close(); // close out after reading + } + catch (Exception ex) + { + // ex.printStackTrace(); + throw new org.apache.xml.utils.WrappedRuntimeException(ex); + } + } +" +50,0," protected MimeTypes getMimeTypes() { + return embeddedDocumentUtil.getMimeTypes(); + } + +" +51,0," public int getKDFInfo() { + final int unusedBit28 = 0x8000000; // 1000000000000000000000000000 + + // kdf version is bits 1-27, bit 28 (reserved) should be 0, and + // bits 29-32 are the MAC algorithm indicating which PRF to use for the KDF. + int kdfVers = getKDFVersion(); + assert kdfVers > 0 && kdfVers <= 99991231 : ""KDF version (YYYYMMDD, max 99991231) out of range: "" + kdfVers; + int kdfInfo = kdfVers; + int macAlg = kdfPRFAsInt(); + assert macAlg >= 0 && macAlg <= 15 : ""MAC algorithm indicator must be between 0 to 15 inclusion; value is: "" + macAlg; + + // Make sure bit28 is cleared. (Reserved for future use.) + kdfInfo &= ~unusedBit28; + + // Set MAC algorithm bits in high (MSB) nibble. + kdfInfo |= (macAlg << 28); + + return kdfInfo; + } +" +52,0," private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { + stream.defaultReadObject(); + EVIL_BIT = 1; + } + + } + +} +" +53,0," public void onStartup(Set> c, ServletContext ctx) + throws ServletException { + Servlet s; + + if (createServlet) { + s = ctx.createServlet(servlet.getClass()); + } else { + s = servlet; + } + ServletRegistration.Dynamic r = ctx.addServlet(""servlet"", s); + r.addMapping(""/""); + } + } +} +" +54,0," public ResetPasswordResponse resetPassword(String code, String newPassword) throws InvalidPasswordException { + try { + passwordValidator.validate(newPassword); + return changePasswordCodeAuthenticated(code, newPassword); + } catch (RestClientException e) { + throw new UaaException(e.getMessage()); + } + } + +" +55,0," public Api getApi() { + return new Api(this); + } + + /** + * Returns the instance of this class. + * If {@link jenkins.model.Jenkins#getInstance()} isn't available + * or the plugin class isn't registered null will be returned. + * + * @return the instance. + */ + @CheckForNull +" +56,0," protected int readMessage(AjpMessage message, boolean blockFirstRead) + throws IOException { + + byte[] buf = message.getBuffer(); + int headerLength = message.getHeaderLength(); + + int bytesRead = read(buf, 0, headerLength, blockFirstRead); + + if (bytesRead == 0) { + return 0; + } + + int messageLength = message.processHeader(true); + if (messageLength < 0) { + // Invalid AJP header signature + throw new IOException(sm.getString(""ajpmessage.invalidLength"", + Integer.valueOf(messageLength))); + } + else if (messageLength == 0) { + // Zero length message. + return bytesRead; + } + else { + if (messageLength > buf.length) { + // Message too long for the buffer + // Need to trigger a 400 response + throw new IllegalArgumentException(sm.getString( + ""ajpprocessor.header.tooLong"", + Integer.valueOf(messageLength), + Integer.valueOf(buf.length))); + } + bytesRead += read(buf, headerLength, messageLength, true); + return bytesRead; + } + } + + +" +57,0," public boolean isHA() { + return false; + } + + @Override +" +58,0," public XMSSMTPrivateKeyParameters getNextKey() + { + BDSStateMap newState = new BDSStateMap(bdsState, params, this.getIndex(), publicSeed, secretKeySeed); + + return new XMSSMTPrivateKeyParameters.Builder(params).withIndex(index + 1) + .withSecretKeySeed(secretKeySeed).withSecretKeyPRF(secretKeyPRF) + .withPublicSeed(publicSeed).withRoot(root) + .withBDSState(newState).build(); + } +" +59,0," public TransformerFactory createTransformerFactory() { + TransformerFactory factory = TransformerFactory.newInstance(); + // Enable the Security feature by default + try { + factory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true); + } catch (TransformerConfigurationException e) { + LOG.warn(""TransformerFactory doesn't support the feature {} with value {}, due to {}."", new Object[]{javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, ""true"", e}); + } + factory.setErrorListener(new XmlErrorListener()); + return factory; + } + +" +60,0," long getTimeStamp(); + +" +61,0," public void init(KeyGenerationParameters param) + { + this.param = (RSAKeyGenerationParameters)param; + } + +" +62,0," public Document getMetaData(Idp config) throws RuntimeException { + try { + //Return as text/xml + Crypto crypto = CertsUtils.createCrypto(config.getCertificate()); + + W3CDOMStreamWriter writer = new W3CDOMStreamWriter(); + + writer.writeStartDocument(""UTF-8"", ""1.0""); + + String referenceID = IDGenerator.generateID(""_""); + writer.writeStartElement(""md"", ""EntityDescriptor"", SAML2_METADATA_NS); + writer.writeAttribute(""ID"", referenceID); + + writer.writeAttribute(""entityID"", config.getIdpUrl().toString()); + + writer.writeNamespace(""md"", SAML2_METADATA_NS); + writer.writeNamespace(""fed"", WS_FEDERATION_NS); + writer.writeNamespace(""wsa"", WS_ADDRESSING_NS); + writer.writeNamespace(""auth"", WS_FEDERATION_NS); + writer.writeNamespace(""xsi"", SCHEMA_INSTANCE_NS); + + writeFederationMetadata(writer, config, crypto); + + writer.writeEndElement(); // EntityDescriptor + + writer.writeEndDocument(); + + writer.close(); + + if (LOG.isDebugEnabled()) { + String out = DOM2Writer.nodeToString(writer.getDocument()); + LOG.debug(""***************** unsigned ****************""); + LOG.debug(out); + LOG.debug(""***************** unsigned ****************""); + } + + Document result = SignatureUtils.signMetaInfo(crypto, null, config.getCertificatePassword(), + writer.getDocument(), referenceID); + if (result != null) { + return result; + } else { + throw new RuntimeException(""Failed to sign the metadata document: result=null""); + } + } catch (Exception e) { + LOG.error(""Error creating service metadata information "", e); + throw new RuntimeException(""Error creating service metadata information: "" + e.getMessage()); + } + + } + +" +63,0," protected void publish(ApplicationEvent event) { + if (publisher!=null) { + publisher.publishEvent(event); + } + } +" +64,0," private static File getUnsanitizedLegacyConfigFileFor(String id) { + return new File(getRootDir(), idStrategy().legacyFilenameOf(id) + ""/config.xml""); + } + + /** + * Gets the directory where Hudson stores user information. + */ +" +65,0," protected void _initFactories(XMLInputFactory xmlIn, XMLOutputFactory xmlOut) + { + // Better ensure namespaces get built properly, so: + xmlOut.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, Boolean.TRUE); + // and for parser, force coalescing as well (much simpler to use) + xmlIn.setProperty(XMLInputFactory.IS_COALESCING, Boolean.TRUE); + } + + /** + * Note: compared to base implementation by {@link JsonFactory}, + * here the copy will actually share underlying XML input and + * output factories, as there is no way to make copies of those. + * + * @since 2.1 + */ + @Override +" +66,0," public ScimUser retrieve(String id) { + try { + ScimUser u = jdbcTemplate.queryForObject(USER_BY_ID_QUERY, mapper, id); + return u; + } catch (EmptyResultDataAccessException e) { + throw new ScimResourceNotFoundException(""User "" + id + "" does not exist""); + } + } + + @Override +" +67,0," private T run(PrivilegedAction action) { + return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run(); + } +" +68,0," public boolean getMapperDirectoryRedirectEnabled() { return false; } +" +69,0," protected void setUpResources(JAXRSServerFactoryBean sf) { + sf.setResourceClasses(TikaResource.class); + sf.setResourceProvider(TikaResource.class, + new SingletonResourceProvider(new TikaResource())); + } + + @Override +" +70,0," public HttpBinding getBinding() { + if (this.binding == null) { + this.binding = new AttachmentHttpBinding(); + this.binding.setTransferException(isTransferException()); + if (getComponent() != null) { + this.binding.setAllowJavaSerializedObject(getComponent().isAllowJavaSerializedObject()); + } + this.binding.setHeaderFilterStrategy(getHeaderFilterStrategy()); + } + return this.binding; + } + + @Override +" +71,0," public void testNamedEntity() throws Exception { + assertU(""\n""+ + ""\n]>""+ + """"+ + ""1""+ + ""&wacky;"" + + """"); + + assertU(""""); + assertQ(req(""foo_s:zzz""), + ""//*[@numFound='1']"" + ); + } + + @Test +" +72,0," private FreeStyleProject createDownstreamProject() throws Exception { + FreeStyleProject dp = createFreeStyleProject(""downstream""); + + // Hm, no setQuietPeriod, have to submit form.. + WebClient webClient = new WebClient(); + HtmlPage page = webClient.getPage(dp,""configure""); + HtmlForm form = page.getFormByName(""config""); + form.getInputByName(""hasCustomQuietPeriod"").click(); + form.getInputByName(""quiet_period"").setValueAttribute(""0""); + submit(form); + assertEquals(""set quiet period"", 0, dp.getQuietPeriod()); + + return dp; + } + +" +73,0," public void setOkStatusCodeRange(String okStatusCodeRange) { + this.okStatusCodeRange = okStatusCodeRange; + } +" +74,0," public boolean shouldParseEmbedded(Metadata metadata) { + DocumentSelector selector = context.get(DocumentSelector.class); + if (selector != null) { + return selector.select(metadata); + } + + FilenameFilter filter = context.get(FilenameFilter.class); + if (filter != null) { + String name = metadata.get(Metadata.RESOURCE_NAME_KEY); + if (name != null) { + return filter.accept(ABSTRACT_PATH, name); + } + } + + return true; + } + +" +75,0," public void testLockTryingToDelete() throws Exception { + String lockType = ""native""; // test does not work with simple locks + Settings nodeSettings = ImmutableSettings.builder() + .put(""gateway.type"", ""local"") // don't delete things! + .put(""index.store.fs.fs_lock"", lockType) + .build(); + String IDX = ""test""; + logger.info(""--> lock type: {}"", lockType); + + internalCluster().startNode(nodeSettings); + Settings idxSettings = ImmutableSettings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(""index.store.fs.fs_lock"", lockType) + .build(); + + // only one node, so all primaries will end up on node1 + prepareCreate(IDX).setSettings(idxSettings).addMapping(""doc"", ""foo"", ""type=string,index=not_analyzed"").get(); + ensureGreen(IDX); + client().prepareIndex(IDX, ""doc"").setSource(""foo"", ""bar"").get(); + flushAndRefresh(IDX); + NodeEnvironment env = internalCluster().getDataNodeInstance(NodeEnvironment.class); + logger.info(""--> data paths: [{}]"", env.nodeDataPaths()); + Path[] shardPaths = env.shardDataPaths(new ShardId(""test"", 0), ImmutableSettings.builder().build()); + logger.info(""--> paths: [{}]"", shardPaths); + // Should not be able to acquire the lock because it's already open + try { + NodeEnvironment.acquireFSLockForPaths(ImmutableSettings.EMPTY, shardPaths); + fail(""should not have been able to acquire the lock""); + } catch (ElasticsearchException e) { + assertTrue(""msg: "" + e.getMessage(), e.getMessage().contains(""unable to acquire write.lock"")); + } + // Test without the regular shard lock to assume we can acquire it + // (worst case, meaning that the shard lock could be acquired and + // we're green to delete the shard's directory) + ShardLock sLock = new DummyShardLock(new ShardId(""test"", 0)); + try { + env.deleteShardDirectoryUnderLock(sLock, ImmutableSettings.builder().build()); + fail(""should not have been able to delete the directory""); + } catch (ElasticsearchException e) { + assertTrue(""msg: "" + e.getMessage(), e.getMessage().contains(""unable to acquire write.lock"")); + } + } +" +76,0," public KeyPair generateKeyPair() + { + if (!initialised) + { + Integer paramStrength = Integers.valueOf(strength); + + if (params.containsKey(paramStrength)) + { + param = (DSAKeyGenerationParameters)params.get(paramStrength); + } + else + { + synchronized (lock) + { + // we do the check again in case we were blocked by a generator for + // our key size. + if (params.containsKey(paramStrength)) + { + param = (DSAKeyGenerationParameters)params.get(paramStrength); + } + else + { + DSAParametersGenerator pGen; + DSAParameterGenerationParameters dsaParams; + + // Typical combination of keysize and size of q. + // keysize = 1024, q's size = 160 + // keysize = 2048, q's size = 224 + // keysize = 2048, q's size = 256 + // keysize = 3072, q's size = 256 + // For simplicity if keysize is greater than 1024 then we choose q's size to be 256. + // For legacy keysize that is less than 1024-bit, we just use the 186-2 style parameters + if (strength == 1024) + { + pGen = new DSAParametersGenerator(); + if (Properties.isOverrideSet(""org.bouncycastle.dsa.FIPS186-2for1024bits"")) + { + pGen.init(strength, certainty, random); + } + else + { + dsaParams = new DSAParameterGenerationParameters(1024, 160, certainty, random); + pGen.init(dsaParams); + } + } + else if (strength > 1024) + { + dsaParams = new DSAParameterGenerationParameters(strength, 256, certainty, random); + pGen = new DSAParametersGenerator(new SHA256Digest()); + pGen.init(dsaParams); + } + else + { + pGen = new DSAParametersGenerator(); + pGen.init(strength, certainty, random); + } + param = new DSAKeyGenerationParameters(random, pGen.generateParameters()); + + params.put(paramStrength, param); + } + } + } + + engine.init(param); + initialised = true; + } + + AsymmetricCipherKeyPair pair = engine.generateKeyPair(); + DSAPublicKeyParameters pub = (DSAPublicKeyParameters)pair.getPublic(); + DSAPrivateKeyParameters priv = (DSAPrivateKeyParameters)pair.getPrivate(); + + return new KeyPair(new BCDSAPublicKey(pub), new BCDSAPrivateKey(priv)); + } +" +77,0," Attributes setPropertiesFromAttributes( + StylesheetHandler handler, String rawName, Attributes attributes, + ElemTemplateElement target, boolean throwError) + throws org.xml.sax.SAXException + { + + XSLTElementDef def = getElemDef(); + AttributesImpl undefines = null; + boolean isCompatibleMode = ((null != handler.getStylesheet() + && handler.getStylesheet().getCompatibleMode()) + || !throwError); + if (isCompatibleMode) + undefines = new AttributesImpl(); + + + // Keep track of which XSLTAttributeDefs have been processed, so + // I can see which default values need to be set. + List processedDefs = new ArrayList(); + + // Keep track of XSLTAttributeDefs that were invalid + List errorDefs = new ArrayList(); + int nAttrs = attributes.getLength(); + + for (int i = 0; i < nAttrs; i++) + { + String attrUri = attributes.getURI(i); + // Hack for Crimson. -sb + if((null != attrUri) && (attrUri.length() == 0) + && (attributes.getQName(i).startsWith(""xmlns:"") || + attributes.getQName(i).equals(""xmlns""))) + { + attrUri = org.apache.xalan.templates.Constants.S_XMLNAMESPACEURI; + } + String attrLocalName = attributes.getLocalName(i); + XSLTAttributeDef attrDef = def.getAttributeDef(attrUri, attrLocalName); + + if (null == attrDef) + { + if (!isCompatibleMode) + { + + // Then barf, because this element does not allow this attribute. + handler.error(XSLTErrorResources.ER_ATTR_NOT_ALLOWED, new Object[]{attributes.getQName(i), rawName}, null);//""\""""+attributes.getQName(i)+""\"""" + //+ "" attribute is not allowed on the "" + rawName + // + "" element!"", null); + } + else + { + undefines.addAttribute(attrUri, attrLocalName, + attributes.getQName(i), + attributes.getType(i), + attributes.getValue(i)); + } + } + else + { + //handle secure processing + if(handler.getStylesheetProcessor()==null) + System.out.println(""stylesheet processor null""); + if(attrDef.getName().compareTo(""*"")==0 && handler.getStylesheetProcessor().isSecureProcessing()) + { + //foreign attributes are not allowed in secure processing mode + // Then barf, because this element does not allow this attribute. + handler.error(XSLTErrorResources.ER_ATTR_NOT_ALLOWED, new Object[]{attributes.getQName(i), rawName}, null);//""\""""+attributes.getQName(i)+""\"""" + //+ "" attribute is not allowed on the "" + rawName + // + "" element!"", null); + } + else + { + + + boolean success = attrDef.setAttrValue(handler, attrUri, attrLocalName, + attributes.getQName(i), attributes.getValue(i), + target); + + // Now we only add the element if it passed a validation check + if (success) + processedDefs.add(attrDef); + else + errorDefs.add(attrDef); + } + } + } + + XSLTAttributeDef[] attrDefs = def.getAttributes(); + int nAttrDefs = attrDefs.length; + + for (int i = 0; i < nAttrDefs; i++) + { + XSLTAttributeDef attrDef = attrDefs[i]; + String defVal = attrDef.getDefault(); + + if (null != defVal) + { + if (!processedDefs.contains(attrDef)) + { + attrDef.setDefAttrValue(handler, target); + } + } + + if (attrDef.getRequired()) + { + if ((!processedDefs.contains(attrDef)) && (!errorDefs.contains(attrDef))) + handler.error( + XSLMessages.createMessage( + XSLTErrorResources.ER_REQUIRES_ATTRIB, new Object[]{ rawName, + attrDef.getName() }), null); + } + } + + return undefines; + } +" +78,0," public static void beforeClass() throws Exception { + SUITE_SEED = randomLong(); + initializeSuiteScope(); + } + +" +79,0," public void connect(HttpConsumer consumer) throws Exception { + } + + /** + * Disconnects the URL specified on the endpoint from the specified processor. + * + * @param consumer the consumer + * @throws Exception can be thrown + */ +" +80,0," private void writeAttribute(SessionAttribute sessionAttribute) throws IOException { + write(""""); + writeDirectly(htmlEncodeButNotSpace(sessionAttribute.getName())); + write(""""); + write(String.valueOf(sessionAttribute.getType())); + write(""""); + if (sessionAttribute.isSerializable()) { + write(""#oui#""); + } else { + write(""#non#""); + } + write(""""); + write(integerFormat.format(sessionAttribute.getSerializedSize())); + write(""""); + writeDirectly(htmlEncodeButNotSpace(String.valueOf(sessionAttribute.getContent()))); + write(""""); + } +" +81,0," public boolean isUseRouteBuilder() { + return false; + } + + @Test + @Ignore +" +82,0," public String getDisplayName() { + return ""Developers""; + } + } +} +" +83,0," private T run(PrivilegedExceptionAction action) throws JAXBException { + try { + return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run(); + } + catch ( JAXBException e ) { + throw e; + } + catch ( Exception e ) { + throw log.getErrorParsingMappingFileException( e ); + } + } + + // JAXB closes the underlying input stream +" +84,0," protected void checkIllegalTypes(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) + throws JsonMappingException + { + // There are certain nasty classes that could cause problems, mostly + // via default typing -- catch them here. + String full = type.getRawClass().getName(); + + if (_cfgIllegalClassNames.contains(full)) { + throw JsonMappingException.from(ctxt, + String.format(""Illegal type (%s) to deserialize: prevented for security reasons"", full)); + } + } +" +85,0," public FileVisitResult postVisitDirectory(Path dir, IOException ioe) + throws IOException { + // NO-OP + return FileVisitResult.CONTINUE; + }}); + } +} +" +86,0," public JettyHttpEndpoint getEndpoint() { + return (JettyHttpEndpoint) super.getEndpoint(); + } + +" +87,0," public static File randomRepoPath(Settings settings) { + Environment environment = new Environment(settings); + File[] repoFiles = environment.repoFiles(); + assert repoFiles.length > 0; + File path; + do { + path = new File(repoFiles[0], randomAsciiOfLength(10)); + } while (path.exists()); + return path; + } + +" +88,0," public void setup() { + this.request = new MockHttpServletRequest(); + this.request.setMethod(""GET""); + this.response = new MockHttpServletResponse(); + this.chain = new MockFilterChain(); + } + + @After +" +89,0," public static Context getCurrentContext() + { + return __context.get(); + } + +" +90,0," protected Log getLog() { + return log; + } + + // ----------------------------------------------------------- Constructors + + +" +91,0," protected void checkIllegalTypes(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) + throws JsonMappingException + { + // There are certain nasty classes that could cause problems, mostly + // via default typing -- catch them here. + String full = type.getRawClass().getName(); + + if (_cfgIllegalClassNames.contains(full)) { + ctxt.reportBadTypeDefinition(beanDesc, + ""Illegal type (%s) to deserialize: prevented for security reasons"", full); + } + } +" +92,0," public String toString() { + return xpathExpression; + } +" +93,0," protected void setLocale(HttpServletRequest request) { + if (defaultLocale == null) { + defaultLocale = request.getLocale(); + } + } + +" +94,0," protected boolean isWithinLengthLimit( String name ) { + return name.length() <= paramNameMaxLength; + } + +" +95,0," private CoderResult decodeHasArray(ByteBuffer in, CharBuffer out) { + int outRemaining = out.remaining(); + int pos = in.position(); + int limit = in.limit(); + final byte[] bArr = in.array(); + final char[] cArr = out.array(); + final int inIndexLimit = limit + in.arrayOffset(); + int inIndex = pos + in.arrayOffset(); + int outIndex = out.position() + out.arrayOffset(); + // if someone would change the limit in process, + // he would face consequences + for (; inIndex < inIndexLimit && outRemaining > 0; inIndex++) { + int jchar = bArr[inIndex]; + if (jchar < 0) { + jchar = jchar & 0x7F; + int tail = remainingBytes[jchar]; + if (tail == -1) { + in.position(inIndex - in.arrayOffset()); + out.position(outIndex - out.arrayOffset()); + return CoderResult.malformedForLength(1); + } + if (inIndexLimit - inIndex < 1 + tail) { + // Apache Tomcat added tests - detect invalid sequences as + // early as possible + if (jchar == 0x74 && inIndexLimit > inIndex + 1) { + if ((bArr[inIndex + 1] & 0xFF) > 0x8F) { + // 11110100 1yyyxxxx xxxxxxxx xxxxxxxx + // Any non-zero y is > max code point + return CoderResult.unmappableForLength(4); + } + } + if (jchar == 0x60 && inIndexLimit > inIndex +1) { + if ((bArr[inIndex + 1] & 0x7F) == 0) { + // 11100000 10000000 10xxxxxx + // should have been + // 00xxxxxx + return CoderResult.malformedForLength(3); + } + } + if (jchar == 0x70 && inIndexLimit > inIndex +1) { + if ((bArr[inIndex + 1] & 0x7F) < 0x10) { + // 11110000 1000zzzz 1oyyyyyy 1oxxxxxx + // should have been + // 111ozzzz 1oyyyyyy 1oxxxxxx + return CoderResult.malformedForLength(4); + } + } + break; + } + for (int i = 0; i < tail; i++) { + int nextByte = bArr[inIndex + i + 1] & 0xFF; + if ((nextByte & 0xC0) != 0x80) { + in.position(inIndex - in.arrayOffset()); + out.position(outIndex - out.arrayOffset()); + return CoderResult.malformedForLength(1 + i); + } + jchar = (jchar << 6) + nextByte; + } + jchar -= remainingNumbers[tail]; + if (jchar < lowerEncodingLimit[tail]) { + // Should have been encoded in fewer octets + in.position(inIndex - in.arrayOffset()); + out.position(outIndex - out.arrayOffset()); + return CoderResult.malformedForLength(1); + } + inIndex += tail; + } + // Apache Tomcat added test + if (jchar >= 0xD800 && jchar <= 0xDFFF) { + return CoderResult.unmappableForLength(3); + } + // Apache Tomcat added test + if (jchar > 0x10FFFF) { + return CoderResult.unmappableForLength(4); + } + if (jchar <= 0xffff) { + cArr[outIndex++] = (char) jchar; + outRemaining--; + } else { + if (outRemaining < 2) { + return CoderResult.OVERFLOW; + } + cArr[outIndex++] = (char) ((jchar >> 0xA) + 0xD7C0); + cArr[outIndex++] = (char) ((jchar & 0x3FF) + 0xDC00); + outRemaining -= 2; + } + } + in.position(inIndex - in.arrayOffset()); + out.position(outIndex - out.arrayOffset()); + return (outRemaining == 0 && inIndex < inIndexLimit) ? CoderResult.OVERFLOW + : CoderResult.UNDERFLOW; + } +" +96,0," public static CipherText fromPortableSerializedBytes(byte[] bytes) + throws EncryptionException + { + CipherTextSerializer cts = new CipherTextSerializer(bytes); + return cts.asCipherText(); + } + + ///////////////////////// P U B L I C M E T H O D S //////////////////// + + /** + * Obtain the String representing the cipher transformation used to encrypt + * the plaintext. The cipher transformation represents the cipher algorithm, + * the cipher mode, and the padding scheme used to do the encryption. An + * example would be ""AES/CBC/PKCS5Padding"". See Appendix A in the + * + * Java Cryptography Architecture Reference Guide + * for information about standard supported cipher transformation names. + *

+ * The cipher transformation name is usually sufficient to be passed to + * {@link javax.crypto.Cipher#getInstance(String)} to create a + * Cipher object to decrypt the ciphertext. + * + * @return The cipher transformation name used to encrypt the plaintext + * resulting in this ciphertext. + */ +" +97,0," protected void _verifyException(Throwable t, Class expExcType, + String... patterns) throws Exception + { + Class actExc = t.getClass(); + if (!expExcType.isAssignableFrom(actExc)) { + fail(""Expected Exception of type '""+expExcType.getName()+""', got '"" + +actExc.getName()+""', message: ""+t.getMessage()); + } + for (String pattern : patterns) { + verifyException(t, pattern); + } + } +" +98,0," public String getCompression() { + switch (compressionLevel) { + case 0: + return ""off""; + case 1: + return ""on""; + case 2: + return ""force""; + } + return ""off""; + } + + + /** + * Set compression level. + */ +" +99,0," public void close() throws IOException { + if (authFilter != null) { + authFilter.destroy(); + } + } +" +100,0," public void destroy() { + } + }; + +" +101,0," public O getOrBuild() { + if (isUnbuilt()) { + try { + return build(); + } + catch (Exception e) { + logger.debug(""Failed to perform build. Returning null"", e); + return null; + } + } + else { + return getObject(); + } + } + + /** + * Applies a {@link SecurityConfigurerAdapter} to this {@link SecurityBuilder} and + * invokes {@link SecurityConfigurerAdapter#setBuilder(SecurityBuilder)}. + * + * @param configurer + * @return + * @throws Exception + */ + @SuppressWarnings(""unchecked"") +" +102,0," private void testModified() + throws Exception + { + ECNamedCurveParameterSpec namedCurve = ECNamedCurveTable.getParameterSpec(""P-256""); + org.bouncycastle.jce.spec.ECPublicKeySpec pubSpec = new org.bouncycastle.jce.spec.ECPublicKeySpec(namedCurve.getCurve().createPoint(PubX, PubY), namedCurve); + KeyFactory kFact = KeyFactory.getInstance(""EC"", ""BC""); + PublicKey pubKey = kFact.generatePublic(pubSpec); + Signature sig = Signature.getInstance(""SHA256WithECDSA"", ""BC""); + + for (int i = 0; i != MODIFIED_SIGNATURES.length; i++) + { + sig.initVerify(pubKey); + + sig.update(Strings.toByteArray(""Hello"")); + + boolean failed; + + try + { + failed = !sig.verify(Hex.decode(MODIFIED_SIGNATURES[i])); + System.err.println(ASN1Dump.dumpAsString(ASN1Primitive.fromByteArray(Hex.decode(MODIFIED_SIGNATURES[i])))); + } + catch (SignatureException e) + { + failed = true; + } + + isTrue(""sig verified when shouldn't: "" + i, failed); + } + } + +" +103,0," protected void populateParams() { + super.populateParams(); + + UIBean uiBean = (UIBean) component; + uiBean.setCssClass(cssClass); + uiBean.setCssStyle(cssStyle); + uiBean.setCssErrorClass(cssErrorClass); + uiBean.setCssErrorStyle(cssErrorStyle); + uiBean.setTitle(title); + uiBean.setDisabled(disabled); + uiBean.setLabel(label); + uiBean.setLabelSeparator(labelSeparator); + uiBean.setLabelposition(labelposition); + uiBean.setRequiredPosition(requiredPosition); + uiBean.setErrorPosition(errorPosition); + uiBean.setName(name); + uiBean.setRequiredLabel(requiredLabel); + uiBean.setTabindex(tabindex); + uiBean.setValue(value); + uiBean.setTemplate(template); + uiBean.setTheme(theme); + uiBean.setTemplateDir(templateDir); + uiBean.setOnclick(onclick); + uiBean.setOndblclick(ondblclick); + uiBean.setOnmousedown(onmousedown); + uiBean.setOnmouseup(onmouseup); + uiBean.setOnmouseover(onmouseover); + uiBean.setOnmousemove(onmousemove); + uiBean.setOnmouseout(onmouseout); + uiBean.setOnfocus(onfocus); + uiBean.setOnblur(onblur); + uiBean.setOnkeypress(onkeypress); + uiBean.setOnkeydown(onkeydown); + uiBean.setOnkeyup(onkeyup); + uiBean.setOnselect(onselect); + uiBean.setOnchange(onchange); + uiBean.setTooltip(tooltip); + uiBean.setTooltipConfig(tooltipConfig); + uiBean.setJavascriptTooltip(javascriptTooltip); + uiBean.setTooltipCssClass(tooltipCssClass); + uiBean.setTooltipDelay(tooltipDelay); + uiBean.setTooltipIconPath(tooltipIconPath); + uiBean.setAccesskey(accesskey); + uiBean.setKey(key); + uiBean.setId(id); + + uiBean.setDynamicAttributes(dynamicAttributes); + } + +" +104,0," public long getTimestamp() { + return timestamp; + } + } +} +" +105,0," public boolean isUseRouteBuilder() { + return false; + } + + @Test +" +106,0," public ServerSocket createSocket (int port, int backlog, + InetAddress ifAddress) + throws IOException + { + if (!initialized) init(); + ServerSocket socket = sslProxy.createServerSocket(port, backlog, + ifAddress); + initServerSocket(socket); + return socket; + } + +" +107,0," public static void init() throws Exception { + + idpHttpsPort = System.getProperty(""idp.https.port""); + Assert.assertNotNull(""Property 'idp.https.port' null"", idpHttpsPort); + rpHttpsPort = System.getProperty(""rp.https.port""); + Assert.assertNotNull(""Property 'rp.https.port' null"", rpHttpsPort); + + idpServer = startServer(true, idpHttpsPort); + + WSSConfig.init(); + } + +" +108,0," public void testPrivateKeySerialisation() + throws Exception + { + String stream = ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArO0ABXNyACJzdW4ucm1pLnNlcnZlci5BY3RpdmF0aW9uR3JvdXBJbXBsT+r9SAwuMqcCAARaAA1ncm91cEluYWN0aXZlTAAGYWN0aXZldAAVTGphdmEvdXRpbC9IYXNodGFibGU7TAAHZ3JvdXBJRHQAJ0xqYXZhL3JtaS9hY3RpdmF0aW9uL0FjdGl2YXRpb25Hcm91cElEO0wACWxvY2tlZElEc3QAEExqYXZhL3V0aWwvTGlzdDt4cgAjamF2YS5ybWkuYWN0aXZhdGlvbi5BY3RpdmF0aW9uR3JvdXCVLvKwBSnVVAIAA0oAC2luY2FybmF0aW9uTAAHZ3JvdXBJRHEAfgACTAAHbW9uaXRvcnQAJ0xqYXZhL3JtaS9hY3RpdmF0aW9uL0FjdGl2YXRpb25Nb25pdG9yO3hyACNqYXZhLnJtaS5zZXJ2ZXIuVW5pY2FzdFJlbW90ZU9iamVjdEUJEhX14n4xAgADSQAEcG9ydEwAA2NzZnQAKExqYXZhL3JtaS9zZXJ2ZXIvUk1JQ2xpZW50U29ja2V0RmFjdG9yeTtMAANzc2Z0AChMamF2YS9ybWkvc2VydmVyL1JNSVNlcnZlclNvY2tldEZhY3Rvcnk7eHIAHGphdmEucm1pLnNlcnZlci5SZW1vdGVTZXJ2ZXLHGQcSaPM5+wIAAHhyABxqYXZhLnJtaS5zZXJ2ZXIuUmVtb3RlT2JqZWN002G0kQxhMx4DAAB4cHcSABBVbmljYXN0U2VydmVyUmVmeAAAFbNwcAAAAAAAAAAAcHAAcHBw""; + + XMSSParameters params = new XMSSParameters(10, new SHA256Digest()); + + byte[] output = Base64.decode(new String(stream).getBytes(""UTF-8"")); + + + //Simple Exploit + + try + { + new XMSSPrivateKeyParameters.Builder(params).withPrivateKey(output, params).build(); + } + catch (IllegalArgumentException e) + { + assertTrue(e.getCause() instanceof IOException); + } + + //Same Exploit other method + + XMSS xmss2 = new XMSS(params, new SecureRandom()); + + xmss2.generateKeys(); + + byte[] publicKey = xmss2.exportPublicKey(); + + try + { + xmss2.importState(output, publicKey); + } + catch (IllegalArgumentException e) + { + assertTrue(e.getCause() instanceof IOException); + } + } + +" +109,0," public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) { + this.allowJavaSerializedObject = allowJavaSerializedObject; + } + +" +110,0," public Collection getRequiredPermissions(String regionName) { + return Collections.singletonList(new ResourcePermission(ResourcePermission.Resource.DATA, + ResourcePermission.Operation.READ, regionName)); + } + +" +111,0," abstract protected JDBCTableReader getTableReader(Connection connection, + String tableName, + EmbeddedDocumentUtil embeddedDocumentUtil); + +" +112,0," public FederationConfig getFederationConfig() { + return federationConfig; + } + +" +113,0," protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + throws ServletException, IOException { + + String identityZoneId = request.getHeader(HEADER); + if (StringUtils.hasText(identityZoneId)) { + if (!isAuthorizedToSwitchToIdentityZone(identityZoneId)) { + response.sendError(HttpServletResponse.SC_FORBIDDEN, ""User is not authorized to switch to IdentityZone with id ""+identityZoneId); + return; + } + IdentityZone originalIdentityZone = IdentityZoneHolder.get(); + try { + + IdentityZone identityZone = null; + try { + identityZone = dao.retrieve(identityZoneId); + } catch (ZoneDoesNotExistsException ex) { + } catch (EmptyResultDataAccessException ex) { + } catch (Exception ex) { + throw ex; + } + if (identityZone == null) { + response.sendError(HttpServletResponse.SC_NOT_FOUND, ""Identity zone with id ""+identityZoneId+"" does not exist""); + return; + } + stripScopesFromAuthentication(identityZoneId, request); + IdentityZoneHolder.set(identityZone); + filterChain.doFilter(request, response); + } finally { + IdentityZoneHolder.set(originalIdentityZone); + } + } else { + filterChain.doFilter(request, response); + } + } +" +114,0," public BeanDefinition createMatcher(ParserContext pc, String path, String method) { + if ((""/**"".equals(path) || ""**"".equals(path)) && method == null) { + return new RootBeanDefinition(AnyRequestMatcher.class); + } + + BeanDefinitionBuilder matcherBldr = BeanDefinitionBuilder + .rootBeanDefinition(type); + + if (this == mvc) { + if (!pc.getRegistry().isBeanNameInUse(HANDLER_MAPPING_INTROSPECTOR_BEAN_NAME)) { + BeanDefinitionBuilder hmifb = BeanDefinitionBuilder + .rootBeanDefinition(HandlerMappingIntrospectorFactoryBean.class); + pc.getRegistry().registerBeanDefinition(HANDLER_MAPPING_INTROSPECTOR_FACTORY_BEAN_NAME, + hmifb.getBeanDefinition()); + + RootBeanDefinition hmi = new RootBeanDefinition(HANDLER_MAPPING_INTROSPECTOR_BEAN_NAME); + hmi.setFactoryBeanName(HANDLER_MAPPING_INTROSPECTOR_FACTORY_BEAN_NAME); + hmi.setFactoryMethodName(""createHandlerMappingIntrospector""); + pc.getRegistry().registerBeanDefinition(HANDLER_MAPPING_INTROSPECTOR_BEAN_NAME, hmi); + } + matcherBldr.addConstructorArgReference(HANDLER_MAPPING_INTROSPECTOR_BEAN_NAME); + } + + matcherBldr.addConstructorArgValue(path); + if (this == mvc) { + matcherBldr.addPropertyValue(""method"", method); + } + else { + matcherBldr.addConstructorArgValue(method); + } + + if (this == ciRegex) { + matcherBldr.addConstructorArgValue(true); + } + + return matcherBldr.getBeanDefinition(); + } + +" +115,0," protected Settings nodeSettings(int nodeOrdinal) { + return ImmutableSettings.builder() + // we really need local GW here since this also checks for corruption etc. + // and we need to make sure primaries are not just trashed if we don't have replicas + .put(super.nodeSettings(nodeOrdinal)).put(""gateway.type"", ""local"") + .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName()) + // speed up recoveries + .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10) + .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) + .build(); + } + + /** + * Tests that we can actually recover from a corruption on the primary given that we have replica shards around. + */ + @Test +" +116,0," private void writeFederationMetadata( + XMLStreamWriter writer, Idp config, Crypto crypto + ) throws XMLStreamException { + + writer.writeStartElement(""md"", ""RoleDescriptor"", WS_FEDERATION_NS); + writer.writeAttribute(SCHEMA_INSTANCE_NS, ""type"", ""fed:SecurityTokenServiceType""); + writer.writeAttribute(""protocolSupportEnumeration"", WS_FEDERATION_NS); + if (config.getServiceDescription() != null && config.getServiceDescription().length() > 0 ) { + writer.writeAttribute(""ServiceDescription"", config.getServiceDescription()); + } + if (config.getServiceDisplayName() != null && config.getServiceDisplayName().length() > 0 ) { + writer.writeAttribute(""ServiceDisplayName"", config.getServiceDisplayName()); + } + + //http://docs.oasis-open.org/security/saml/v2.0/saml-schema-metadata-2.0.xsd + //missing organization, contactperson + + //KeyDescriptor + writer.writeStartElement("""", ""KeyDescriptor"", SAML2_METADATA_NS); + writer.writeAttribute(""use"", ""signing""); + writer.writeStartElement("""", ""KeyInfo"", ""http://www.w3.org/2000/09/xmldsig#""); + writer.writeStartElement("""", ""X509Data"", ""http://www.w3.org/2000/09/xmldsig#""); + writer.writeStartElement("""", ""X509Certificate"", ""http://www.w3.org/2000/09/xmldsig#""); + + try { + String keyAlias = crypto.getDefaultX509Identifier(); + X509Certificate cert = CertsUtils.getX509Certificate(crypto, keyAlias); + writer.writeCharacters(Base64.encode(cert.getEncoded())); + } catch (Exception ex) { + LOG.error(""Failed to add certificate information to metadata. Metadata incomplete"", ex); + } + + writer.writeEndElement(); // X509Certificate + writer.writeEndElement(); // X509Data + writer.writeEndElement(); // KeyInfo + writer.writeEndElement(); // KeyDescriptor + + + // SecurityTokenServiceEndpoint + writer.writeStartElement(""fed"", ""SecurityTokenServiceEndpoint"", WS_FEDERATION_NS); + writer.writeStartElement(""wsa"", ""EndpointReference"", WS_ADDRESSING_NS); + + writer.writeStartElement(""wsa"", ""Address"", WS_ADDRESSING_NS); + writer.writeCharacters(config.getStsUrl().toString()); + + writer.writeEndElement(); // Address + writer.writeEndElement(); // EndpointReference + writer.writeEndElement(); // SecurityTokenServiceEndpoint + + + // PassiveRequestorEndpoint + writer.writeStartElement(""fed"", ""PassiveRequestorEndpoint"", WS_FEDERATION_NS); + writer.writeStartElement(""wsa"", ""EndpointReference"", WS_ADDRESSING_NS); + + writer.writeStartElement(""wsa"", ""Address"", WS_ADDRESSING_NS); + writer.writeCharacters(config.getIdpUrl().toString()); + + writer.writeEndElement(); // Address + writer.writeEndElement(); // EndpointReference + writer.writeEndElement(); // PassiveRequestorEndpoint + + + // create ClaimsType section + if (config.getClaimTypesOffered() != null && config.getClaimTypesOffered().size() > 0) { + writer.writeStartElement(""fed"", ""ClaimTypesOffered"", WS_FEDERATION_NS); + for (Claim claim : config.getClaimTypesOffered()) { + + writer.writeStartElement(""auth"", ""ClaimType"", WS_FEDERATION_NS); + writer.writeAttribute(""Uri"", claim.getClaimType().toString()); + writer.writeAttribute(""Optional"", ""true""); + writer.writeEndElement(); // ClaimType + + } + writer.writeEndElement(); // ClaimTypesOffered + } + + writer.writeEndElement(); // RoleDescriptor + } + + +" +117,0," public Set getSupportedTypes(ParseContext context) { + return null; + } + + @Override +" +118,0," public Container getContainer() { + + return (container); + + } + + + /** + * Set the Container with which this Realm has been associated. + * + * @param container The associated Container + */ +" +119,0," public Set getSupportedTypes(ParseContext context) { + return null; + } + + @Override +" +120,0," public void testSendEntityMessage() throws Exception { + + MockEndpoint endpoint = getMockEndpoint(""mock:result""); + endpoint.expectedMessageCount(1); + //String message = ""]>&xxe;""; + + String message = """"; + template.sendBody(""direct:start2"", message); + + assertMockEndpointsSatisfied(); + + List list = endpoint.getReceivedExchanges(); + Exchange exchange = list.get(0); + String xml = exchange.getIn().getBody(String.class); + + System.out.println(xml); + } + +" +121,0," public void execute(FunctionContext context) { + RegionFunctionContext rfc = (RegionFunctionContext) context; + context.getResultSender().lastResult(rfc.getDataSet().size()); + } + + @Override +" +122,0," public DescriptorImpl getDescriptor() { + return Hudson.getInstance().getDescriptorByType(DescriptorImpl.class); + } + + /** + * Returns the Missed Events playback manager. + * @return GerritMissedEventsPlaybackManager + */ +" +123,0," public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) { + this.allowJavaSerializedObject = allowJavaSerializedObject; + } + +" +124,0," public int doRead(ByteChunk chunk, Request req) throws IOException { + checkError(); + + if (endChunk) { + return -1; + } + + if(needCRLFParse) { + needCRLFParse = false; + parseCRLF(false); + } + + if (remaining <= 0) { + if (!parseChunkHeader()) { + throwIOException(sm.getString(""chunkedInputFilter.invalidHeader"")); + } + if (endChunk) { + parseEndChunk(); + return -1; + } + } + + int result = 0; + + if (pos >= lastValid) { + if (readBytes() < 0) { + throwIOException(sm.getString(""chunkedInputFilter.eos"")); + } + } + + if (remaining > (lastValid - pos)) { + result = lastValid - pos; + remaining = remaining - result; + chunk.setBytes(buf, pos, result); + pos = lastValid; + } else { + result = remaining; + chunk.setBytes(buf, pos, remaining); + pos = pos + remaining; + remaining = 0; + //we need a CRLF + if ((pos+1) >= lastValid) { + //if we call parseCRLF we overrun the buffer here + //so we defer it to the next call BZ 11117 + needCRLFParse = true; + } else { + parseCRLF(false); //parse the CRLF immediately + } + } + + return result; + } + + + // ---------------------------------------------------- InputFilter Methods + + /** + * Read the content length from the request. + */ + @Override +" +125,0," public byte getValue() { return value; } +" +126,0," public final Permission getRequiredPermission() { + return Jenkins.ADMINISTER; + } + + /** + * Starts the server's project list updater, send command queue and event manager. + * + */ +" +127,0," public Object compile(String expression, Map context) throws OgnlException { + Object tree; + if (enableExpressionCache) { + tree = expressions.get(expression); + if (tree == null) { + tree = Ognl.parseExpression(expression); + expressions.putIfAbsent(expression, tree); + } + } else { + tree = Ognl.parseExpression(expression); + } + + if (!enableEvalExpression && isEvalExpression(tree, context)) { + throw new OgnlException(""Eval expressions has been disabled""); + } + + return tree; + } + + /** + * Copies the properties in the object ""from"" and sets them in the object ""to"" + * using specified type converter, or {@link com.opensymphony.xwork2.conversion.impl.XWorkConverter} if none + * is specified. + * + * @param from the source object + * @param to the target object + * @param context the action context we're running under + * @param exclusions collection of method names to excluded from copying ( can be null) + * @param inclusions collection of method names to included copying (can be null) + * note if exclusions AND inclusions are supplied and not null nothing will get copied. + */ +" +128,0," public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamException { + SolrInputDocument doc = new SolrInputDocument(); + + String attrName = """"; + for (int i = 0; i < parser.getAttributeCount(); i++) { + attrName = parser.getAttributeLocalName(i); + if (""boost"".equals(attrName)) { + doc.setDocumentBoost(Float.parseFloat(parser.getAttributeValue(i))); + } else { + log.warn(""Unknown attribute doc/@"" + attrName); + } + } + + StringBuilder text = new StringBuilder(); + String name = null; + float boost = 1.0f; + boolean isNull = false; + String update = null; + + while (true) { + int event = parser.next(); + switch (event) { + // Add everything to the text + case XMLStreamConstants.SPACE: + case XMLStreamConstants.CDATA: + case XMLStreamConstants.CHARACTERS: + text.append(parser.getText()); + break; + + case XMLStreamConstants.END_ELEMENT: + if (""doc"".equals(parser.getLocalName())) { + return doc; + } else if (""field"".equals(parser.getLocalName())) { + Object v = isNull ? null : text.toString(); + if (update != null) { + Map extendedValue = new HashMap(1); + extendedValue.put(update, v); + v = extendedValue; + } + doc.addField(name, v, boost); + boost = 1.0f; + } + break; + + case XMLStreamConstants.START_ELEMENT: + text.setLength(0); + String localName = parser.getLocalName(); + if (!""field"".equals(localName)) { + log.warn(""unexpected XML tag doc/"" + localName); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + ""unexpected XML tag doc/"" + localName); + } + boost = 1.0f; + update = null; + String attrVal = """"; + for (int i = 0; i < parser.getAttributeCount(); i++) { + attrName = parser.getAttributeLocalName(i); + attrVal = parser.getAttributeValue(i); + if (""name"".equals(attrName)) { + name = attrVal; + } else if (""boost"".equals(attrName)) { + boost = Float.parseFloat(attrVal); + } else if (""null"".equals(attrName)) { + isNull = StrUtils.parseBoolean(attrVal); + } else if (""update"".equals(attrName)) { + update = attrVal; + } else { + log.warn(""Unknown attribute doc/field/@"" + attrName); + } + } + break; + } + } + } +" +129,0," private AsymmetricCipherKeyPair genKeyPair() + { + if (!initialized) + { + initializeDefault(); + } + + // initialize authenticationPaths and treehash instances + byte[][][] currentAuthPaths = new byte[numLayer][][]; + byte[][][] nextAuthPaths = new byte[numLayer - 1][][]; + Treehash[][] currentTreehash = new Treehash[numLayer][]; + Treehash[][] nextTreehash = new Treehash[numLayer - 1][]; + + Vector[] currentStack = new Vector[numLayer]; + Vector[] nextStack = new Vector[numLayer - 1]; + + Vector[][] currentRetain = new Vector[numLayer][]; + Vector[][] nextRetain = new Vector[numLayer - 1][]; + + for (int i = 0; i < numLayer; i++) + { + currentAuthPaths[i] = new byte[heightOfTrees[i]][mdLength]; + currentTreehash[i] = new Treehash[heightOfTrees[i] - K[i]]; + + if (i > 0) + { + nextAuthPaths[i - 1] = new byte[heightOfTrees[i]][mdLength]; + nextTreehash[i - 1] = new Treehash[heightOfTrees[i] - K[i]]; + } + + currentStack[i] = new Vector(); + if (i > 0) + { + nextStack[i - 1] = new Vector(); + } + } + + // initialize roots + byte[][] currentRoots = new byte[numLayer][mdLength]; + byte[][] nextRoots = new byte[numLayer - 1][mdLength]; + // initialize seeds + byte[][] seeds = new byte[numLayer][mdLength]; + // initialize seeds[] by copying starting-seeds of first trees of each + // layer + for (int i = 0; i < numLayer; i++) + { + System.arraycopy(currentSeeds[i], 0, seeds[i], 0, mdLength); + } + + // initialize rootSigs + currentRootSigs = new byte[numLayer - 1][mdLength]; + + // ------------------------- + // ------------------------- + // --- calculation of current authpaths and current rootsigs (AUTHPATHS, + // SIG)------ + // from bottom up to the root + for (int h = numLayer - 1; h >= 0; h--) + { + GMSSRootCalc tree; + + // on lowest layer no lower root is available, so just call + // the method with null as first parameter + if (h == numLayer - 1) + { + tree = this.generateCurrentAuthpathAndRoot(null, currentStack[h], seeds[h], h); + } + else + // otherwise call the method with the former computed root + // value + { + tree = this.generateCurrentAuthpathAndRoot(currentRoots[h + 1], currentStack[h], seeds[h], h); + } + + // set initial values needed for the private key construction + for (int i = 0; i < heightOfTrees[h]; i++) + { + System.arraycopy(tree.getAuthPath()[i], 0, currentAuthPaths[h][i], 0, mdLength); + } + currentRetain[h] = tree.getRetain(); + currentTreehash[h] = tree.getTreehash(); + System.arraycopy(tree.getRoot(), 0, currentRoots[h], 0, mdLength); + } + + // --- calculation of next authpaths and next roots (AUTHPATHS+, ROOTS+) + // ------ + for (int h = numLayer - 2; h >= 0; h--) + { + GMSSRootCalc tree = this.generateNextAuthpathAndRoot(nextStack[h], seeds[h + 1], h + 1); + + // set initial values needed for the private key construction + for (int i = 0; i < heightOfTrees[h + 1]; i++) + { + System.arraycopy(tree.getAuthPath()[i], 0, nextAuthPaths[h][i], 0, mdLength); + } + nextRetain[h] = tree.getRetain(); + nextTreehash[h] = tree.getTreehash(); + System.arraycopy(tree.getRoot(), 0, nextRoots[h], 0, mdLength); + + // create seed for the Merkle tree after next (nextNextSeeds) + // SEEDs++ + System.arraycopy(seeds[h + 1], 0, this.nextNextSeeds[h], 0, mdLength); + } + // ------------ + + // generate JDKGMSSPublicKey + GMSSPublicKeyParameters publicKey = new GMSSPublicKeyParameters(currentRoots[0], gmssPS); + + // generate the JDKGMSSPrivateKey + GMSSPrivateKeyParameters privateKey = new GMSSPrivateKeyParameters(currentSeeds, nextNextSeeds, currentAuthPaths, + nextAuthPaths, currentTreehash, nextTreehash, currentStack, nextStack, currentRetain, nextRetain, nextRoots, currentRootSigs, gmssPS, digestProvider); + + // return the KeyPair + return (new AsymmetricCipherKeyPair(publicKey, privateKey)); + } + + /** + * calculates the authpath for tree in layer h which starts with seed[h] + * additionally computes the rootSignature of underlaying root + * + * @param currentStack stack used for the treehash instance created by this method + * @param lowerRoot stores the root of the lower tree + * @param seed starting seeds + * @param h actual layer + */ +" +130,0," public static void redirectToSavedRequest(ServletRequest request, ServletResponse response, String fallbackUrl) + throws IOException { + String successUrl = null; + boolean contextRelative = true; + SavedRequest savedRequest = WebUtils.getAndClearSavedRequest(request); + if (savedRequest != null && savedRequest.getMethod().equalsIgnoreCase(AccessControlFilter.GET_METHOD)) { + successUrl = savedRequest.getRequestUrl(); + contextRelative = false; + } + + if (successUrl == null) { + successUrl = fallbackUrl; + } + + if (successUrl == null) { + throw new IllegalStateException(""Success URL not available via saved request or via the "" + + ""successUrlFallback method parameter. One of these must be non-null for "" + + ""issueSuccessRedirect() to work.""); + } + + WebUtils.issueRedirect(request, response, successUrl, null, contextRelative); + } + +" +131,0," private void doTestRewrite(String config, String request, String expectedURI) throws Exception { + Tomcat tomcat = getTomcatInstance(); + + // No file system docBase required + Context ctx = tomcat.addContext("""", null); + + RewriteValve rewriteValve = new RewriteValve(); + ctx.getPipeline().addValve(rewriteValve); + + rewriteValve.setConfiguration(config); + + // Note: URLPatterns should be URL encoded + // (http://svn.apache.org/r285186) + Tomcat.addServlet(ctx, ""snoop"", new SnoopServlet()); + ctx.addServletMapping(""/a/%255A"", ""snoop""); + ctx.addServletMapping(""/c/*"", ""snoop""); + Tomcat.addServlet(ctx, ""default"", new DefaultServlet()); + ctx.addServletMapping(""/"", ""default""); + + tomcat.start(); + + ByteChunk res = getUrl(""http://localhost:"" + getPort() + request); + + String body = res.toString(); + RequestDescriptor requestDesc = SnoopResult.parse(body); + String requestURI = requestDesc.getRequestInfo(""REQUEST-URI""); + Assert.assertEquals(expectedURI, requestURI); + } +" +132,0," protected void setLocation(String location) { + this.location = location; + } + +" +133,0," protected String adjustFilterForJoin(String filter) { + if (StringUtils.hasText(filter)) { + filter = filter.replace(""displayName"", ""g.displayName""); + filter = filter.replace(""externalGroup"", ""gm.external_group""); + filter = filter.replace(""groupId"", ""g.id""); + filter = filter.replace(""origin"", ""gm.origin""); + } + return filter; + } + + @Override +" +134,0," private static void disableFeature(DocumentBuilderFactory dbfactory, String feature) { + try { + dbfactory.setFeature(feature, true); + } catch (ParserConfigurationException e) { + // This should catch a failed setFeature feature + log.info(""ParserConfigurationException was thrown. The feature '"" + + feature + ""' is probably not supported by your XML processor.""); + } + } +" +135,0," private static LinkedHashMap> processMap( + LinkedHashMap> requestMap, + ExpressionParser parser) { + Assert.notNull(parser, ""SecurityExpressionHandler returned a null parser object""); + + LinkedHashMap> requestToExpressionAttributesMap = new LinkedHashMap>( + requestMap); + + for (Map.Entry> entry : requestMap + .entrySet()) { + RequestMatcher request = entry.getKey(); + Assert.isTrue(entry.getValue().size() == 1, + ""Expected a single expression attribute for "" + request); + ArrayList attributes = new ArrayList(1); + String expression = entry.getValue().toArray(new ConfigAttribute[1])[0] + .getAttribute(); + logger.debug(""Adding web access control expression '"" + expression + ""', for "" + + request); + + AbstractVariableEvaluationContextPostProcessor postProcessor = createPostProcessor( + request); + try { + attributes.add(new WebExpressionConfigAttribute( + parser.parseExpression(expression), postProcessor)); + } + catch (ParseException e) { + throw new IllegalArgumentException( + ""Failed to parse expression '"" + expression + ""'""); + } + + requestToExpressionAttributesMap.put(request, attributes); + } + + return requestToExpressionAttributesMap; + } + +" +136,0," public T create() { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(512); + ByteArrayInputStream bais = null; + try { + final ObjectOutputStream out = new ObjectOutputStream(baos); + out.writeObject(iPrototype); + + bais = new ByteArrayInputStream(baos.toByteArray()); + final ObjectInputStream in = new ObjectInputStream(bais); + return (T) in.readObject(); + + } catch (final ClassNotFoundException ex) { + throw new FunctorException(ex); + } catch (final IOException ex) { + throw new FunctorException(ex); + } finally { + try { + if (bais != null) { + bais.close(); + } + } catch (final IOException ex) { + // ignore + } + try { + baos.close(); + } catch (final IOException ex) { + // ignore + } + } + } + } + +} +" +137,0," private File getFile(String fn) throws IOException { + File tmp = null; + + String path = System.getProperty(""java.io.tmpdir"") + TMP; + File dir = new File(path); + if (!dir.exists()) { + LOGGER.fine(""Creating directory. Path: "" + dir.getCanonicalPath()); + Files.createDirectories(dir.toPath()); + } + tmp = new File(dir, fn); + LOGGER.fine(""Temp file: "" + tmp.getCanonicalPath()); + + return tmp; + } +" +138,0," private Object readResolve() { + throw new UnsupportedOperationException(); + } + + /* Traverseproc implementation */ + @Override +" +139,0," public Document getMetaData(Idp config, TrustedIdp serviceConfig) throws ProcessingException { + + try { + Crypto crypto = CertsUtils.createCrypto(config.getCertificate()); + + W3CDOMStreamWriter writer = new W3CDOMStreamWriter(); + + writer.writeStartDocument(""UTF-8"", ""1.0""); + + String referenceID = IDGenerator.generateID(""_""); + writer.writeStartElement(""md"", ""EntityDescriptor"", SAML2_METADATA_NS); + writer.writeAttribute(""ID"", referenceID); + + String serviceURL = config.getIdpUrl().toString(); + writer.writeAttribute(""entityID"", serviceURL); + + writer.writeNamespace(""md"", SAML2_METADATA_NS); + writer.writeNamespace(""fed"", WS_FEDERATION_NS); + writer.writeNamespace(""wsa"", WS_ADDRESSING_NS); + writer.writeNamespace(""auth"", WS_FEDERATION_NS); + writer.writeNamespace(""xsi"", SCHEMA_INSTANCE_NS); + + if (""http://docs.oasis-open.org/wsfed/federation/200706"".equals(serviceConfig.getProtocol())) { + writeFederationMetadata(writer, serviceConfig, serviceURL); + } else if (""urn:oasis:names:tc:SAML:2.0:profiles:SSO:browser"".equals(serviceConfig.getProtocol())) { + writeSAMLMetadata(writer, serviceConfig, serviceURL, crypto); + } + + writer.writeEndElement(); // EntityDescriptor + + writer.writeEndDocument(); + + writer.close(); + + if (LOG.isDebugEnabled()) { + String out = DOM2Writer.nodeToString(writer.getDocument()); + LOG.debug(""***************** unsigned ****************""); + LOG.debug(out); + LOG.debug(""***************** unsigned ****************""); + } + + Document result = SignatureUtils.signMetaInfo(crypto, null, config.getCertificatePassword(), + writer.getDocument(), referenceID); + if (result != null) { + return result; + } else { + throw new RuntimeException(""Failed to sign the metadata document: result=null""); + } + } catch (ProcessingException e) { + throw e; + } catch (Exception e) { + LOG.error(""Error creating service metadata information "", e); + throw new ProcessingException(""Error creating service metadata information: "" + e.getMessage()); + } + + } + +" +140,0," protected BlobStore blobStore() { + return blobStore; + } + + /** + * {@inheritDoc} + */ + @Override +" +141,0," public void recycle(boolean socketClosing) { + super.recycle(socketClosing); + + inputBuffer.clear(); + inputBuffer.limit(0); + outputBuffer.clear(); + + } +" +142,0," public void winzipBackSlashWorkaround() throws Exception { + ZipArchiveInputStream in = null; + try { + in = new ZipArchiveInputStream(new FileInputStream(getFile(""test-winzip.zip""))); + ZipArchiveEntry zae = in.getNextZipEntry(); + zae = in.getNextZipEntry(); + zae = in.getNextZipEntry(); + assertEquals(""\u00e4/"", zae.getName()); + } finally { + if (in != null) { + in.close(); + } + } + } + + /** + * @see ""https://issues.apache.org/jira/browse/COMPRESS-189"" + */ + @Test +" +143,0," public long getAvailable() { + + return (this.available); + + } + + + /** + * Set the available date/time for this servlet, in milliseconds since the + * epoch. If this date/time is Long.MAX_VALUE, it is considered to mean + * that unavailability is permanent and any request for this servlet will return + * an SC_NOT_FOUND error. If this date/time is in the future, any request for + * this servlet will return an SC_SERVICE_UNAVAILABLE error. + * + * @param available The new available date/time + */ + @Override +" +144,0," public String toString() + { + return name; + } + } + +} +" +145,0," public T transform(final Class input) { + try { + if (input == null) { + throw new FunctorException( + ""InstantiateTransformer: Input object was not an instanceof Class, it was a null object""); + } + final Constructor con = input.getConstructor(iParamTypes); + return con.newInstance(iArgs); + } catch (final NoSuchMethodException ex) { + throw new FunctorException(""InstantiateTransformer: The constructor must exist and be public ""); + } catch (final InstantiationException ex) { + throw new FunctorException(""InstantiateTransformer: InstantiationException"", ex); + } catch (final IllegalAccessException ex) { + throw new FunctorException(""InstantiateTransformer: Constructor must be public"", ex); + } catch (final InvocationTargetException ex) { + throw new FunctorException(""InstantiateTransformer: Constructor threw an exception"", ex); + } + } + +" +146,0," public String getTreeDigest() + { + return DigestUtil.getXMSSDigestName(treeDigest); + } +" +147,0," public void run() { + try { + LOGGER.info(""Initiating a re-keying of secrets. See ""+getLogFile()); + StreamTaskListener listener = new StreamTaskListener(getLogFile()); + try { + PrintStream log = listener.getLogger(); + log.println(""Started re-keying "" + new Date()); + int count = rewriter.rewriteRecursive(Jenkins.getInstance().getRootDir(), listener); + log.printf(""Completed re-keying %d files on %s\n"",count,new Date()); + new RekeySecretAdminMonitor().done.on(); + LOGGER.info(""Secret re-keying completed""); + } catch (Exception e) { + LOGGER.log(Level.SEVERE, ""Fatal failure in re-keying secrets"",e); + e.printStackTrace(listener.error(""Fatal failure in rewriting secrets"")); + } + } catch (IOException e) { + LOGGER.log(Level.SEVERE, ""Catastrophic failure to rewrite secrets"",e); + } + } + } + + private static final Logger LOGGER = Logger.getLogger(RekeySecretAdminMonitor.class.getName()); + +} +" +148,0," protected BlobPath basePath() { + return basePath; + } +" +149,0," public ServerSocket createServerSocket(int port) throws IOException { + SSLServerSocket sslServerSocket = + (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, 0, bindAddress); + if (getEnabledCipherSuites() != null) { + sslServerSocket.setEnabledCipherSuites(getEnabledCipherSuites()); + } + if (getEnabledProtocols() == null) { + sslServerSocket.setEnabledProtocols(defaultProtocols); + } else { + sslServerSocket.setEnabledProtocols(getEnabledProtocols()); + } + sslServerSocket.setNeedClientAuth(getNeedClientAuth()); + return sslServerSocket; + } + } +} +" +150,0," protected boolean statusDropsConnection(int status) { + return status == 400 /* SC_BAD_REQUEST */ || + status == 408 /* SC_REQUEST_TIMEOUT */ || + status == 411 /* SC_LENGTH_REQUIRED */ || + status == 413 /* SC_REQUEST_ENTITY_TOO_LARGE */ || + status == 414 /* SC_REQUEST_URI_TOO_LARGE */ || + status == 500 /* SC_INTERNAL_SERVER_ERROR */ || + status == 503 /* SC_SERVICE_UNAVAILABLE */ || + status == 501 /* SC_NOT_IMPLEMENTED */; + } + +" +151,0," public void setExpressionParser(ExpressionParser expressionParser) { + this.expressionParser = expressionParser; + } + + /** + * Sets the service to use to expose formatters for field values. + * @param conversionService the conversion service + */ +" +152,0," void setAllowJavaSerializedObject(boolean allowJavaSerializedObject); + + /** + * Gets the header filter strategy + * + * @return the strategy + */ +" +153,0," protected String savedRequestURL(Session session) { + + SavedRequest saved = + (SavedRequest) session.getNote(Constants.FORM_REQUEST_NOTE); + if (saved == null) { + return (null); + } + StringBuilder sb = new StringBuilder(saved.getRequestURI()); + if (saved.getQueryString() != null) { + sb.append('?'); + sb.append(saved.getQueryString()); + } + return (sb.toString()); + + } + + +" +154,0," public static String getAttributeValueEmptyNull(Element e, String attributeName) { + Attr node = e.getAttributeNode(attributeName); + if (node == null) { + return null; + } + return node.getValue(); + } + + /** + * Get the trimmed text content of a node or null if there is no text + */ +" +155,0," private boolean evaluate(String text) { + try { + InputSource inputSource = new InputSource(new StringReader(text)); + Document inputDocument = builder.parse(inputSource); + return ((Boolean) xpath.evaluate(xpathExpression, inputDocument, XPathConstants.BOOLEAN)).booleanValue(); + } catch (Exception e) { + return false; + } + } + + @Override +" +156,0," public void destroy() { + // NO-OP + } + +" +157,0," public void initialize( + AlgorithmParameterSpec params, + SecureRandom random) + throws InvalidAlgorithmParameterException + { + if (!(params instanceof DSAParameterSpec)) + { + throw new InvalidAlgorithmParameterException(""parameter object not a DSAParameterSpec""); + } + DSAParameterSpec dsaParams = (DSAParameterSpec)params; + + param = new DSAKeyGenerationParameters(random, new DSAParameters(dsaParams.getP(), dsaParams.getQ(), dsaParams.getG())); + + engine.init(param); + initialised = true; + } + +" +158,0," public static String nodeMode() { + Builder builder = ImmutableSettings.builder(); + if (Strings.isEmpty(System.getProperty(""es.node.mode"")) && Strings.isEmpty(System.getProperty(""es.node.local""))) { + return ""local""; // default if nothing is specified + } + if (Strings.hasLength(System.getProperty(""es.node.mode""))) { + builder.put(""node.mode"", System.getProperty(""es.node.mode"")); + } + if (Strings.hasLength(System.getProperty(""es.node.local""))) { + builder.put(""node.local"", System.getProperty(""es.node.local"")); + } + if (DiscoveryNode.localNode(builder.build())) { + return ""local""; + } else { + return ""network""; + } + } + + @Override +" +159,0," public AsymmetricCipherKeyPair generateKeyPair() + { + return genKeyPair(); + } +" +160,0," public synchronized void save() throws IOException { + super.save(); + updateTransientActions(); + } + + @Override +" +161,0," public static ASN1Integer getInstance( + ASN1TaggedObject obj, + boolean explicit) + { + ASN1Primitive o = obj.getObject(); + + if (explicit || o instanceof ASN1Integer) + { + return getInstance(o); + } + else + { + return new ASN1Integer(ASN1OctetString.getInstance(obj.getObject()).getOctets()); + } + } + +" +162,0," public void testNoRewrite() throws Exception { + doTestRewrite("""", ""/a/%255A"", ""/a/%255A""); + } + + @Test +" +163,0," public void execute(String key, ActionMapping mapping) { + String name = key.substring(ACTION_PREFIX.length()); + if (allowDynamicMethodCalls) { + int bang = name.indexOf('!'); + if (bang != -1) { + String method = name.substring(bang + 1); + mapping.setMethod(method); + name = name.substring(0, bang); + } + } + mapping.setName(cleanupActionName(name)); + } + }); + + } + }; + } + + /** + * Adds a parameter action. Should only be called during initialization + * + * @param prefix The string prefix to trigger the action + * @param parameterAction The parameter action to execute + * @since 2.1.0 + */ +" +164,0," public Collection getRequiredPermissions(String regionName) { + return Collections.singletonList(new ResourcePermission(ResourcePermission.Resource.DATA, + ResourcePermission.Operation.READ, regionName)); + } + +" +165,0," public T create() { + // needed for post-serialization + if (iConstructor == null) { + findConstructor(); + } + + try { + return iConstructor.newInstance(iArgs); + } catch (final InstantiationException ex) { + throw new FunctorException(""InstantiateFactory: InstantiationException"", ex); + } catch (final IllegalAccessException ex) { + throw new FunctorException(""InstantiateFactory: Constructor must be public"", ex); + } catch (final InvocationTargetException ex) { + throw new FunctorException(""InstantiateFactory: Constructor threw an exception"", ex); + } + } + +" +166,0," public void parseEmbedded( + InputStream stream, ContentHandler handler, Metadata metadata, boolean outputHtml) + throws SAXException, IOException { + if(outputHtml) { + AttributesImpl attributes = new AttributesImpl(); + attributes.addAttribute("""", ""class"", ""class"", ""CDATA"", ""package-entry""); + handler.startElement(XHTML, ""div"", ""div"", attributes); + } + + String name = metadata.get(Metadata.RESOURCE_NAME_KEY); + if (name != null && name.length() > 0 && outputHtml) { + handler.startElement(XHTML, ""h1"", ""h1"", new AttributesImpl()); + char[] chars = name.toCharArray(); + handler.characters(chars, 0, chars.length); + handler.endElement(XHTML, ""h1"", ""h1""); + } + + // Use the delegate parser to parse this entry + try (TemporaryResources tmp = new TemporaryResources()) { + final TikaInputStream newStream = TikaInputStream.get(new CloseShieldInputStream(stream), tmp); + if (stream instanceof TikaInputStream) { + final Object container = ((TikaInputStream) stream).getOpenContainer(); + if (container != null) { + newStream.setOpenContainer(container); + } + } + DELEGATING_PARSER.parse( + newStream, + new EmbeddedContentHandler(new BodyContentHandler(handler)), + metadata, context); + } catch (EncryptedDocumentException ede) { + // TODO: can we log a warning that we lack the password? + // For now, just skip the content + } catch (CorruptedFileException e) { + throw new IOExceptionWithCause(e); + } catch (TikaException e) { + // TODO: can we log a warning somehow? + // Could not parse the entry, just skip the content + } + + if(outputHtml) { + handler.endElement(XHTML, ""div"", ""div""); + } + } + +" +167,0," private void checkParams() + { + if (vi == null) + { + throw new IllegalArgumentException(""no layers defined.""); + } + if (vi.length > 1) + { + for (int i = 0; i < vi.length - 1; i++) + { + if (vi[i] >= vi[i + 1]) + { + throw new IllegalArgumentException( + ""v[i] has to be smaller than v[i+1]""); + } + } + } + else + { + throw new IllegalArgumentException( + ""Rainbow needs at least 1 layer, such that v1 < v2.""); + } + } + + /** + * Getter for the number of layers + * + * @return the number of layers + */ +" +168,0," public void markPaused() { + paused = true; + } + } + + // ---------------------------------------------------- Wrapper Inner Class + + + protected static class MappedWrapper extends MapElement { + + public final boolean jspWildCard; + public final boolean resourceOnly; + + public MappedWrapper(String name, Wrapper wrapper, boolean jspWildCard, + boolean resourceOnly) { + super(name, wrapper); + this.jspWildCard = jspWildCard; + this.resourceOnly = resourceOnly; + } + } +} +" +169,0," public String getUsername() { + return username; + } + +" +170,0," public boolean isMapHeaders() { + return mapHeaders; + } + + /** + * If this option is enabled, then during binding from Spark to Camel Message then the headers will be mapped as well + * (eg added as header to the Camel Message as well). You can turn off this option to disable this. + * The headers can still be accessed from the org.apache.camel.component.sparkrest.SparkMessage message with the + * method getRequest() that returns the Spark HTTP request instance. + */ +" +171,0," public int[] getVi() + { + return this.vi; + } +" +172,0," public void setUp() throws Exception { + int randomInt = new SecureRandom().nextInt(); + + String adminAccessToken = testClient.getOAuthAccessToken(""admin"", ""adminsecret"", ""client_credentials"", ""clients.read clients.write clients.secret""); + + String scimClientId = ""scim"" + randomInt; + testClient.createScimClient(adminAccessToken, scimClientId); + + String scimAccessToken = testClient.getOAuthAccessToken(scimClientId, ""scimsecret"", ""client_credentials"", ""scim.read scim.write password.write""); + + userEmail = ""user"" + randomInt + ""@example.com""; + testClient.createUser(scimAccessToken, userEmail, userEmail, PASSWORD, true); + } + + @Test +" +173,0," public final void invoke(Request request, Response response) + throws IOException, ServletException { + + // Select the Context to be used for this Request + Context context = request.getContext(); + if (context == null) { + response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + sm.getString(""standardHost.noContext"")); + return; + } + + if (request.isAsyncSupported()) { + request.setAsyncSupported(context.getPipeline().isAsyncSupported()); + } + + boolean asyncAtStart = request.isAsync(); + boolean asyncDispatching = request.isAsyncDispatching(); + + try { + context.bind(Globals.IS_SECURITY_ENABLED, MY_CLASSLOADER); + + if (!asyncAtStart && !context.fireRequestInitEvent(request.getRequest())) { + // Don't fire listeners during async processing (the listener + // fired for the request that called startAsync()). + // If a request init listener throws an exception, the request + // is aborted. + return; + } + + // Ask this Context to process this request. Requests that are in + // async mode and are not being dispatched to this resource must be + // in error and have been routed here to check for application + // defined error pages. + try { + if (!asyncAtStart || asyncDispatching) { + context.getPipeline().getFirst().invoke(request, response); + } else { + // Make sure this request/response is here because an error + // report is required. + if (!response.isErrorReportRequired()) { + throw new IllegalStateException(sm.getString(""standardHost.asyncStateError"")); + } + } + } catch (Throwable t) { + ExceptionUtils.handleThrowable(t); + container.getLogger().error(""Exception Processing "" + request.getRequestURI(), t); + // If a new error occurred while trying to report a previous + // error allow the original error to be reported. + if (!response.isErrorReportRequired()) { + request.setAttribute(RequestDispatcher.ERROR_EXCEPTION, t); + throwable(request, response, t); + } + } + + // Now that the request/response pair is back under container + // control lift the suspension so that the error handling can + // complete and/or the container can flush any remaining data + response.setSuspended(false); + + Throwable t = (Throwable) request.getAttribute(RequestDispatcher.ERROR_EXCEPTION); + + // Protect against NPEs if the context was destroyed during a + // long running request. + if (!context.getState().isAvailable()) { + return; + } + + // Look for (and render if found) an application level error page + if (response.isErrorReportRequired()) { + if (t != null) { + throwable(request, response, t); + } else { + status(request, response); + } + } + + if (!request.isAsync() && !asyncAtStart) { + context.fireRequestDestroyEvent(request.getRequest()); + } + } finally { + // Access a session (if present) to update last accessed time, based + // on a strict interpretation of the specification + if (ACCESS_SESSION) { + request.getSession(false); + } + + context.unbind(Globals.IS_SECURITY_ENABLED, MY_CLASSLOADER); + } + } + + + // -------------------------------------------------------- Private Methods + + /** + * Handle the HTTP status code (and corresponding message) generated + * while processing the specified Request to produce the specified + * Response. Any exceptions that occur during generation of the error + * report are logged and swallowed. + * + * @param request The request being processed + * @param response The response being generated + */ +" +174,0," private T run(PrivilegedExceptionAction action) throws Exception { + return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run(); + } +" +175,0," public void setUp() throws Exception { + sshKey = SshdServerMock.generateKeyPair(); + System.setProperty(PluginImpl.TEST_SSH_KEYFILE_LOCATION_PROPERTY, sshKey.getPrivateKey().getAbsolutePath()); + server = new SshdServerMock(); + sshd = SshdServerMock.startServer(server); + server.returnCommandFor(""gerrit ls-projects"", SshdServerMock.EofCommandMock.class); + server.returnCommandFor(GERRIT_STREAM_EVENTS, SshdServerMock.CommandMock.class); + server.returnCommandFor(""gerrit review.*"", SshdServerMock.EofCommandMock.class); + server.returnCommandFor(""gerrit version"", SshdServerMock.EofCommandMock.class); + } + + /** + * Runs after test method. + * + * @throws Exception throw if so. + */ + @After +" +176,0," public boolean getSource_location() { + return m_source_location; + } + +" +177,0," public void setCoyoteRequest(org.apache.coyote.Request coyoteRequest) { + this.coyoteRequest = coyoteRequest; + inputBuffer.setRequest(coyoteRequest); + } + + /** + * Get the Coyote request. + */ +" +178,0," public ExpressionInterceptUrlRegistry access(String attribute) { + if (not) { + attribute = ""!"" + attribute; + } + interceptUrl(requestMatchers, SecurityConfig.createList(attribute)); + return ExpressionUrlAuthorizationConfigurer.this.REGISTRY; + } + } +} +" +179,0," private @CheckForNull File getDirectChild(File parentFile, String childPath){ + File current = new File(parentFile, childPath); + while (current != null && !parentFile.equals(current.getParentFile())) { + current = current.getParentFile(); + } + return current; + } + } + + private static final SoloFilePathFilter UNRESTRICTED = SoloFilePathFilter.wrap(FilePathFilter.UNRESTRICTED); +} +" +180,0," public static boolean containsExpression(String expr) { + return expr.contains(""%{"") && expr.contains(""}""); + } + +" +181,0," public void derIntegerTest() + throws Exception + { + try + { + new ASN1Integer(new byte[] { 0, 0, 0, 1}); + } + catch (IllegalArgumentException e) + { + isTrue(""wrong exc"", ""malformed integer"".equals(e.getMessage())); + } + + try + { + new ASN1Integer(new byte[] {(byte)0xff, (byte)0x80, 0, 1}); + } + catch (IllegalArgumentException e) + { + isTrue(""wrong exc"", ""malformed integer"".equals(e.getMessage())); + } + + try + { + new ASN1Enumerated(new byte[] { 0, 0, 0, 1}); + } + catch (IllegalArgumentException e) + { + isTrue(""wrong exc"", ""malformed enumerated"".equals(e.getMessage())); + } + + try + { + new ASN1Enumerated(new byte[] {(byte)0xff, (byte)0x80, 0, 1}); + } + catch (IllegalArgumentException e) + { + isTrue(""wrong exc"", ""malformed enumerated"".equals(e.getMessage())); + } + } + +" +182,0," public void cleanUp() { + Set names = files.keySet(); + for (String name : names) { + List items = files.get(name); + for (FileItem item : items) { + if (LOG.isDebugEnabled()) { + String msg = LocalizedTextUtil.findText(this.getClass(), ""struts.messages.removing.file"", + Locale.ENGLISH, ""no.message.found"", new Object[]{name, item}); + LOG.debug(msg); + } + if (!item.isInMemory()) { + item.delete(); + } + } + } + } + +" +183,0," protected JDBCTableReader getTableReader(Connection connection, String tableName, EmbeddedDocumentUtil embeddedDocumentUtil) { + return new SQLite3TableReader(connection, tableName, embeddedDocumentUtil); + } +" +184,0," public Character decodeCharacter( PushbackString input ) { + input.mark(); + Character first = input.next(); + if ( first == null ) { + input.reset(); + return null; + } + + // if this is not an encoded character, return null + if (first != '%' ) { + input.reset(); + return null; + } + + // Search for exactly 2 hex digits following + StringBuilder sb = new StringBuilder(); + for ( int i=0; i<2; i++ ) { + Character c = input.nextHex(); + if ( c != null ) sb.append( c ); + } + if ( sb.length() == 2 ) { + try { + // parse the hex digit and create a character + int i = Integer.parseInt(sb.toString(), 16); + if (Character.isValidCodePoint(i)) { + return (char) i; + } + } catch( NumberFormatException ignored ) { } + } + input.reset(); + return null; + } + +" +185,0," public String getPathname() { + + return pathname; + + } + + + /** + * Set the pathname of our XML file containing user definitions. If a + * relative pathname is specified, it is resolved against ""catalina.base"". + * + * @param pathname The new pathname + */ +" +186,0," public boolean isSecureProcessing() + { + return m_isSecureProcessing; + } +" +187,0," public void write(OutputStream outputStream) + throws IOException, WebApplicationException { + Writer writer = new OutputStreamWriter(outputStream, UTF_8); + ContentHandler content; + + try { + SAXTransformerFactory factory = (SAXTransformerFactory) SAXTransformerFactory.newInstance(); + TransformerHandler handler = factory.newTransformerHandler(); + handler.getTransformer().setOutputProperty(OutputKeys.METHOD, format); + handler.getTransformer().setOutputProperty(OutputKeys.INDENT, ""yes""); + handler.getTransformer().setOutputProperty(OutputKeys.ENCODING, UTF_8.name()); + handler.setResult(new StreamResult(writer)); + content = new ExpandedTitleContentHandler(handler); + } catch (TransformerConfigurationException e) { + throw new WebApplicationException(e); + } + + parse(parser, LOG, info.getPath(), is, content, metadata, context); + } + }; + } +} +" +188,0," private boolean isSameAs( + byte[] a, + byte[] b) + { + if (a.length != b.length) + { + return false; + } + + for (int i = 0; i != a.length; i++) + { + if (a[i] != b[i]) + { + return false; + } + } + + return true; + } + +" +189,0," private void testModified() + throws Exception + { + KeyFactory kFact = KeyFactory.getInstance(""DSA"", ""BC""); + PublicKey pubKey = kFact.generatePublic(PUBLIC_KEY); + Signature sig = Signature.getInstance(""DSA"", ""BC""); + + for (int i = 0; i != MODIFIED_SIGNATURES.length; i++) + { + sig.initVerify(pubKey); + + sig.update(Strings.toByteArray(""Hello"")); + + boolean failed; + + try + { + failed = !sig.verify(Hex.decode(MODIFIED_SIGNATURES[i])); + } + catch (SignatureException e) + { + failed = true; + } + + isTrue(""sig verified when shouldn't"", failed); + } + } + +" +190,0," public LockoutPolicyRetriever getLockoutPolicyRetriever() { + return lockoutPolicyRetriever; + } +" +191,0," public void close() throws IOException { + if (!closed) { + synchronized (this) { + if (!closed) { + closed = true; + socket.close(); + } + } + } + } + +" +192,0," protected abstract void recycleInternal(); +" +193,0," public static JWTDecoder getInstance() { + if (instance == null) { + instance = new JWTDecoder(); + } + + return instance; + } + + /** + * Decode the JWT using one of they provided verifiers. One more verifiers may be provided, the first verifier found + * supporting the algorithm reported by the JWT header will be utilized. + *

+ * A JWT that is expired or not yet valid will not be decoded, instead a {@link JWTExpiredException} or {@link + * JWTUnavailableForProcessingException} exception will be thrown respectively. + * + * @param encodedJWT The encoded JWT in string format. + * @param verifiers A map of verifiers. + * @return a decoded JWT. + */ +" +194,0," public boolean check(String path, Resource resource) + { + int slash = path.lastIndexOf('/'); + if (slash<0 || resource.exists()) + return false; + String suffix=path.substring(slash); + return resource.getAlias().toString().endsWith(suffix); + } + } +} +" +195,0," public void setMaxParameterCount(int maxParameterCount) { + this.maxParameterCount = maxParameterCount; + } + + + /** + * Return the maximum size of a POST which will be automatically + * parsed by the container. + */ +" +196,0," protected boolean requiresAuthentication(final HttpServletRequest request, final HttpServletResponse response) { + boolean result = request.getRequestURI().contains(getFilterProcessesUrl()); + result |= isTokenExpired(); + if (logger.isDebugEnabled()) { + logger.debug(""requiresAuthentication = "" + result); + } + return result; + } + +" +197,0," public JettyContentExchange createContentExchange() { + return new JettyContentExchange9(); + } +" +198,0," protected void populateResponse(Exchange exchange, JettyContentExchange httpExchange, + Message in, HeaderFilterStrategy strategy, int responseCode) throws IOException { + Message answer = exchange.getOut(); + + answer.setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode); + + // must use response fields to get the http headers as + // httpExchange.getHeaders() does not work well with multi valued headers + Map> headers = httpExchange.getResponseHeaders(); + for (Map.Entry> ent : headers.entrySet()) { + String name = ent.getKey(); + Collection values = ent.getValue(); + for (String value : values) { + if (name.toLowerCase().equals(""content-type"")) { + name = Exchange.CONTENT_TYPE; + exchange.setProperty(Exchange.CHARSET_NAME, IOHelper.getCharsetNameFromContentType(value)); + } + if (strategy != null && !strategy.applyFilterToExternalHeaders(name, value, exchange)) { + HttpHelper.appendHeader(answer.getHeaders(), name, value); + } + } + } + + // preserve headers from in by copying any non existing headers + // to avoid overriding existing headers with old values + // We also need to apply the httpProtocolHeaderFilterStrategy to filter the http protocol header + MessageHelper.copyHeaders(exchange.getIn(), answer, httpProtocolHeaderFilterStrategy, false); + + // extract body after headers has been set as we want to ensure content-type from Jetty HttpExchange + // has been populated first + answer.setBody(extractResponseBody(exchange, httpExchange)); + } + +" +199,0," protected boolean isAuthorizedToSwitchToIdentityZone(String identityZoneId) { + Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); + boolean hasScope = OAuth2ExpressionUtils.hasAnyScope(authentication, getZoneSwitchingScopes(identityZoneId)); + boolean isUaa = IdentityZoneHolder.isUaa(); + boolean isTokenAuth = (authentication instanceof OAuth2Authentication); + return isTokenAuth && isUaa && hasScope; + } + +" +200,0," protected static void copyBytes(byte[] b, int dest, int src, int len) { + for (int pos = 0; pos < len; pos++) { + b[pos + dest] = b[pos + src]; + } + } + + +" +201,0," public String getSessionId() + throws IOException { + // Look up the current SSLSession + SSLSession session = ssl.getSession(); + if (session == null) + return null; + // Expose ssl_session (getId) + byte [] ssl_session = session.getId(); + if ( ssl_session == null) + return null; + StringBuffer buf=new StringBuffer(""""); + for(int x=0; x2) digit=digit.substring(digit.length()-2); + buf.append(digit); + } + return buf.toString(); + } + + +" +202,0," private void login(String clientId) throws Exception { + getMockMvc().perform(post(""/oauth/token"") + .accept(MediaType.APPLICATION_JSON_VALUE) + .header(""Authorization"", ""Basic "" + new String(Base64.encode((clientId + "":"" + SECRET).getBytes()))) + .param(""grant_type"", ""client_credentials"") + ) + .andExpect(status().isOk()) + .andReturn().getResponse().getContentAsString(); + } + + @Test +" +203,0," public abstract JettyContentExchange createContentExchange(); + +" +204,0," public void testParameterNameAware() { + ParametersInterceptor pi = createParametersInterceptor(); + final Map actual = injectValueStackFactory(pi); + ValueStack stack = createStubValueStack(actual); + final Map expected = new HashMap() { + { + put(""fooKey"", ""fooValue""); + put(""barKey"", ""barValue""); + } + }; + Object a = new ParameterNameAware() { + public boolean acceptableParameterName(String parameterName) { + return expected.containsKey(parameterName); + } + }; + Map parameters = new HashMap() { + { + put(""fooKey"", ""fooValue""); + put(""barKey"", ""barValue""); + put(""error"", ""error""); + } + }; + pi.setParameters(a, stack, parameters); + assertEquals(expected, actual); + } + +" +205,0," protected CloseButtonBehavior newCloseButtonBehavior() + { + return new CloseButtonBehavior(); + } +" +206,0," public void repositoryVerificationTimeoutTest() throws Exception { + Client client = client(); + + Settings settings = ImmutableSettings.settingsBuilder() + .put(""location"", randomRepoPath()) + .put(""random_control_io_exception_rate"", 1.0).build(); + logger.info(""--> creating repository that cannot write any files - should fail""); + assertThrows(client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings), + RepositoryVerificationException.class); + + logger.info(""--> creating repository that cannot write any files, but suppress verification - should be acked""); + assertAcked(client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings).setVerify(false)); + + logger.info(""--> verifying repository""); + assertThrows(client.admin().cluster().prepareVerifyRepository(""test-repo-1""), RepositoryVerificationException.class); + + File location = randomRepoPath(); + + logger.info(""--> creating repository""); + try { + client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()) + .setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", location) + .put(""localize_location"", true) + ).get(); + fail(""RepositoryVerificationException wasn't generated""); + } catch (RepositoryVerificationException ex) { + assertThat(ex.getMessage(), containsString(""is not shared"")); + } + } + +" +207,0," private static void log( String s ) { + if (log.isDebugEnabled()) + log.debug(""URLDecoder: "" + s ); + } + +" +208,0," private boolean evaluate(String text) { + try { + InputSource inputSource = new InputSource(new StringReader(text)); + Document inputDocument = builder.parse(inputSource); + return ((Boolean)xpath.evaluate(xpathExpression, inputDocument, XPathConstants.BOOLEAN)).booleanValue(); + } catch (Exception e) { + return false; + } + } + + @Override +" +209,0," public void basicWorkFlowTest() throws Exception { + Client client = client(); + + logger.info(""--> creating repository""); + assertAcked(client.admin().cluster().preparePutRepository(""test-repo"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", randomRepoPath()) + .put(""compress"", randomBoolean()) + .put(""chunk_size"", randomIntBetween(100, 1000)))); + + createIndex(""test-idx-1"", ""test-idx-2"", ""test-idx-3""); + ensureGreen(); + + logger.info(""--> indexing some data""); + for (int i = 0; i < 100; i++) { + index(""test-idx-1"", ""doc"", Integer.toString(i), ""foo"", ""bar"" + i); + index(""test-idx-2"", ""doc"", Integer.toString(i), ""foo"", ""baz"" + i); + index(""test-idx-3"", ""doc"", Integer.toString(i), ""foo"", ""baz"" + i); + } + refresh(); + assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L); + assertHitCount(client.prepareCount(""test-idx-2"").get(), 100L); + assertHitCount(client.prepareCount(""test-idx-3"").get(), 100L); + + ListenableActionFuture flushResponseFuture = null; + if (randomBoolean()) { + ArrayList indicesToFlush = newArrayList(); + for (int i = 1; i < 4; i++) { + if (randomBoolean()) { + indicesToFlush.add(""test-idx-"" + i); + } + } + if (!indicesToFlush.isEmpty()) { + String[] indices = indicesToFlush.toArray(new String[indicesToFlush.size()]); + logger.info(""--> starting asynchronous flush for indices {}"", Arrays.toString(indices)); + flushResponseFuture = client.admin().indices().prepareFlush(indices).execute(); + } + } + logger.info(""--> snapshot""); + CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-idx-*"", ""-test-idx-3"").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + + assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); + + logger.info(""--> delete some data""); + for (int i = 0; i < 50; i++) { + client.prepareDelete(""test-idx-1"", ""doc"", Integer.toString(i)).get(); + } + for (int i = 50; i < 100; i++) { + client.prepareDelete(""test-idx-2"", ""doc"", Integer.toString(i)).get(); + } + for (int i = 0; i < 100; i += 2) { + client.prepareDelete(""test-idx-3"", ""doc"", Integer.toString(i)).get(); + } + refresh(); + assertHitCount(client.prepareCount(""test-idx-1"").get(), 50L); + assertHitCount(client.prepareCount(""test-idx-2"").get(), 50L); + assertHitCount(client.prepareCount(""test-idx-3"").get(), 50L); + + logger.info(""--> close indices""); + client.admin().indices().prepareClose(""test-idx-1"", ""test-idx-2"").get(); + + logger.info(""--> restore all indices from the snapshot""); + RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + + ensureGreen(); + for (int i=0; i<5; i++) { + assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L); + assertHitCount(client.prepareCount(""test-idx-2"").get(), 100L); + assertHitCount(client.prepareCount(""test-idx-3"").get(), 50L); + } + + // Test restore after index deletion + logger.info(""--> delete indices""); + cluster().wipeIndices(""test-idx-1"", ""test-idx-2""); + logger.info(""--> restore one index after deletion""); + restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-idx-*"", ""-test-idx-2"").execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + + ensureGreen(); + for (int i=0; i<5; i++) { + assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L); + } + ClusterState clusterState = client.admin().cluster().prepareState().get().getState(); + assertThat(clusterState.getMetaData().hasIndex(""test-idx-1""), equalTo(true)); + assertThat(clusterState.getMetaData().hasIndex(""test-idx-2""), equalTo(false)); + + if (flushResponseFuture != null) { + // Finish flush + flushResponseFuture.actionGet(); + } + } + + + @Test +" +210,0," public ReturnValueDescriptor asDescriptor(boolean defaultGroupSequenceRedefined, List> defaultGroupSequence) { + return new ReturnValueDescriptorImpl( + getType(), + asDescriptors( getConstraints() ), + isCascading(), + defaultGroupSequenceRedefined, + defaultGroupSequence, + groupConversionHelper.asDescriptors() + ); + } +" +211,0," static final PyObject function___new__(PyNewWrapper new_, boolean init, PyType subtype, + PyObject[] args, String[] keywords) { + ArgParser ap = new ArgParser(""function"", args, keywords, + new String[] {""code"", ""globals"", ""name"", ""argdefs"", + ""closure""}, 0); + PyObject code = ap.getPyObject(0); + PyObject globals = ap.getPyObject(1); + PyObject name = ap.getPyObject(2, Py.None); + PyObject defaults = ap.getPyObject(3, Py.None); + PyObject closure = ap.getPyObject(4, Py.None); + + if (!(code instanceof PyBaseCode)) { + throw Py.TypeError(""function() argument 1 must be code, not "" + + code.getType().fastGetName()); + } + if (name != Py.None && !Py.isInstance(name, PyString.TYPE)) { + throw Py.TypeError(""arg 3 (name) must be None or string""); + } + if (defaults != Py.None && !(defaults instanceof PyTuple)) { + throw Py.TypeError(""arg 4 (defaults) must be None or tuple""); + } + + PyBaseCode tcode = (PyBaseCode)code; + int nfree = tcode.co_freevars == null ? 0 : tcode.co_freevars.length; + if (!(closure instanceof PyTuple)) { + if (nfree > 0 && closure == Py.None) { + throw Py.TypeError(""arg 5 (closure) must be tuple""); + } else if (closure != Py.None) { + throw Py.TypeError(""arg 5 (closure) must be None or tuple""); + } + } + + int nclosure = closure == Py.None ? 0 : closure.__len__(); + if (nfree != nclosure) { + throw Py.ValueError(String.format(""%s requires closure of length %d, not %d"", + tcode.co_name, nfree, nclosure)); + } + if (nclosure > 0) { + for (PyObject o : ((PyTuple)closure).asIterable()) { + if (!(o instanceof PyCell)) { + throw Py.TypeError(String.format(""arg 5 (closure) expected cell, found %s"", + o.getType().fastGetName())); + } + } + } + + PyFunction function = new PyFunction(globals, + defaults == Py.None + ? null : ((PyTuple)defaults).getArray(), + tcode, null, + closure == Py.None + ? null : ((PyTuple)closure).getArray()); + if (name != Py.None) { + function.__name__ = name.toString(); + } + return function; + } + + @ExposedSet(name = ""__name__"") +" +212,0," private static byte[] toUtf8Bytes(String str) + { + try + { + return str.getBytes(""UTF-8""); + } + catch(UnsupportedEncodingException e) + { + throw new IllegalStateException(""The Java spec requires UTF-8 support."", e); + } + } + + /** + * Append the two upper case hex characters for a byte. + * @param sb The string buffer to append to. + * @param b The byte to hexify + * @return sb with the hex characters appended. + */ + // rfc3986 2.1: For consistency, URI producers + // should use uppercase hexadecimal digits for all percent- + // encodings. +" +213,0," void getElUnsupported(String expression); + +" +214,0," public String toString() { + return xpathExpression; + } +" +215,0," public Descriptor getDescriptor() { + throw new UnsupportedOperationException(); + } + } +} +" +216,1," public void getAllPropertiesRequiresAdmin() { + j.jenkins.setAuthorizationStrategy(new MockAuthorizationStrategy() + .grant(Jenkins.ADMINISTER).everywhere().to(""admin"") + .grant(Jenkins.READ).everywhere().toEveryone()); + j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); + + User admin = User.get(""admin""); + User alice = User.get(""alice""); + User bob = User.get(""bob""); + + // Admin can access user properties for all users + try (ACLContext as = ACL.as(admin)) { + assertThat(alice.getAllProperties(), not(empty())); + assertThat(bob.getAllProperties(), not(empty())); + assertThat(admin.getAllProperties(), not(empty())); + } + + // Non admins can only view their own + try (ACLContext as = ACL.as(alice)) { + assertThat(alice.getAllProperties(), not(empty())); + assertThat(bob.getAllProperties(), empty()); + assertThat(admin.getAllProperties(), empty()); + } + } + +" +217,1," protected

P postProcess(P object) { + return (P) this.objectPostProcessor.postProcess(object); + } + + /** + * Executes the build using the {@link SecurityConfigurer}'s that have been applied + * using the following steps: + * + *

    + *
  • Invokes {@link #beforeInit()} for any subclass to hook into
  • + *
  • Invokes {@link SecurityConfigurer#init(SecurityBuilder)} for any + * {@link SecurityConfigurer} that was applied to this builder.
  • + *
  • Invokes {@link #beforeConfigure()} for any subclass to hook into
  • + *
  • Invokes {@link #performBuild()} which actually builds the Object
  • + *
+ */ + @Override +" +218,1," public SAXSource toSAXSourceFromStream(StreamSource source, Exchange exchange) throws SAXException { + InputSource inputSource; + if (source.getReader() != null) { + inputSource = new InputSource(source.getReader()); + } else { + inputSource = new InputSource(source.getInputStream()); + } + inputSource.setSystemId(source.getSystemId()); + inputSource.setPublicId(source.getPublicId()); + XMLReader xmlReader = null; + SAXParserFactory sfactory = null; + //Need to setup XMLReader security feature by default + try { + // use the SAXPaserFactory which is set from exchange + if (exchange != null) { + sfactory = exchange.getProperty(Exchange.SAXPARSER_FACTORY, SAXParserFactory.class); + } + if (sfactory == null) { + sfactory = SAXParserFactory.newInstance(); + try { + sfactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true); + } catch (Exception e) { + LOG.warn(""SAXParser doesn't support the feature {} with value {}, due to {}."", new Object[]{javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, ""true"", e}); + } + } + sfactory.setNamespaceAware(true); + SAXParser parser = sfactory.newSAXParser(); + xmlReader = parser.getXMLReader(); + } catch (Exception ex) { + LOG.warn(""Cannot create the SAXParser XMLReader, due to {}"", ex); + } + return new SAXSource(xmlReader, inputSource); + } + + /** + * @deprecated will be removed in Camel 3.0. Use the method which has 2 parameters. + */ + @Deprecated +" +219,1," public String getDisplayName() { + return Messages.CommandLauncher_displayName(); + } + } +} +" +220,1," public SSOValidatorResponse validateSamlResponse( + org.opensaml.saml.saml2.core.Response samlResponse, + boolean postBinding + ) throws WSSecurityException { + // Check the Issuer + validateIssuer(samlResponse.getIssuer()); + + // The Response must contain at least one Assertion. + if (samlResponse.getAssertions() == null || samlResponse.getAssertions().isEmpty()) { + LOG.fine(""The Response must contain at least one Assertion""); + throw new WSSecurityException(WSSecurityException.ErrorCode.FAILURE, ""invalidSAMLsecurity""); + } + + // The Response must contain a Destination that matches the assertionConsumerURL if it is + // signed + String destination = samlResponse.getDestination(); + if (samlResponse.isSigned() + && (destination == null || !destination.equals(assertionConsumerURL))) { + LOG.fine(""The Response must contain a destination that matches the assertion consumer URL""); + throw new WSSecurityException(WSSecurityException.ErrorCode.FAILURE, ""invalidSAMLsecurity""); + } + + // Validate Assertions + boolean foundValidSubject = false; + Date sessionNotOnOrAfter = null; + for (org.opensaml.saml.saml2.core.Assertion assertion : samlResponse.getAssertions()) { + // Check the Issuer + if (assertion.getIssuer() == null) { + LOG.fine(""Assertion Issuer must not be null""); + throw new WSSecurityException(WSSecurityException.ErrorCode.FAILURE, ""invalidSAMLsecurity""); + } + validateIssuer(assertion.getIssuer()); + + if (enforceAssertionsSigned && postBinding && assertion.getSignature() == null) { + LOG.fine(""If the HTTP Post binding is used to deliver the Response, "" + + ""the enclosed assertions must be signed""); + throw new WSSecurityException(WSSecurityException.ErrorCode.FAILURE, ""invalidSAMLsecurity""); + } + + // Check for AuthnStatements and validate the Subject accordingly + if (assertion.getAuthnStatements() != null + && !assertion.getAuthnStatements().isEmpty()) { + org.opensaml.saml.saml2.core.Subject subject = assertion.getSubject(); + if (validateAuthenticationSubject(subject, assertion.getID(), postBinding)) { + validateAudienceRestrictionCondition(assertion.getConditions()); + foundValidSubject = true; + // Store Session NotOnOrAfter + for (AuthnStatement authnStatment : assertion.getAuthnStatements()) { + if (authnStatment.getSessionNotOnOrAfter() != null) { + sessionNotOnOrAfter = authnStatment.getSessionNotOnOrAfter().toDate(); + } + } + } + } + + } + + if (!foundValidSubject) { + LOG.fine(""The Response did not contain any Authentication Statement that matched "" + + ""the Subject Confirmation criteria""); + throw new WSSecurityException(WSSecurityException.ErrorCode.FAILURE, ""invalidSAMLsecurity""); + } + + SSOValidatorResponse validatorResponse = new SSOValidatorResponse(); + validatorResponse.setResponseId(samlResponse.getID()); + validatorResponse.setSessionNotOnOrAfter(sessionNotOnOrAfter); + if (samlResponse.getIssueInstant() != null) { + validatorResponse.setCreated(samlResponse.getIssueInstant().toDate()); + } + + // the assumption for now is that SAMLResponse will contain only a single assertion + Element assertionElement = samlResponse.getAssertions().get(0).getDOM(); + Element clonedAssertionElement = (Element)assertionElement.cloneNode(true); + validatorResponse.setAssertionElement(clonedAssertionElement); + validatorResponse.setAssertion(DOM2Writer.nodeToString(clonedAssertionElement)); + + return validatorResponse; + } + + /** + * Validate the Issuer (if it exists) + */ +" +221,1," private JWT decode(String encodedJWT, Header header, String[] parts, Verifier verifier) { + int index = encodedJWT.lastIndexOf("".""); + // The message comprises the first two segments of the entire JWT, the signature is the last segment. + byte[] message = encodedJWT.substring(0, index).getBytes(StandardCharsets.UTF_8); + + // If a signature is provided and verifier must be provided. + if (parts.length == 3 && verifier == null) { + throw new MissingVerifierException(""No Verifier has been provided for verify a signature signed using ["" + header.algorithm.getName() + ""]""); + } + + // A verifier was provided but no signature exists, this is treated as an invalid signature. + if (parts.length == 2 && verifier != null) { + throw new InvalidJWTSignatureException(); + } + + if (parts.length == 3) { + // Verify the signature before de-serializing the payload. + byte[] signature = base64Decode(parts[2].getBytes(StandardCharsets.UTF_8)); + verifier.verify(header.algorithm, message, signature); + } + + JWT jwt = Mapper.deserialize(base64Decode(parts[1].getBytes(StandardCharsets.UTF_8)), JWT.class); + + // Verify expiration claim + if (jwt.isExpired()) { + throw new JWTExpiredException(); + } + + // Verify the notBefore claim + if (jwt.isUnavailableForProcessing()) { + throw new JWTUnavailableForProcessingException(); + } + + return jwt; + } + +" +222,1," String postProcessVariableName(String variableName) { + return this.matcher.postProcessVariableName(variableName); + } + } + +} +" +223,1," public String toString() { + + StringBuilder sb = new StringBuilder("" 0) { + sb.append("" groups=\""""); + int n = 0; + Iterator values = groups.iterator(); + while (values.hasNext()) { + if (n > 0) { + sb.append(','); + } + n++; + sb.append(RequestUtil.filter(values.next().getGroupname())); + } + sb.append(""\""""); + } + } + synchronized (roles) { + if (roles.size() > 0) { + sb.append("" roles=\""""); + int n = 0; + Iterator values = roles.iterator(); + while (values.hasNext()) { + if (n > 0) { + sb.append(','); + } + n++; + sb.append(RequestUtil.filter(values.next().getRolename())); + } + sb.append(""\""""); + } + } + sb.append(""/>""); + return (sb.toString()); + + } + + +" +224,1," public void processParameters( String str ) { + int end=str.length(); + int pos=0; + if( debug > 0) + log(""String: "" + str ); + + do { + boolean noEq=false; + int valStart=-1; + int valEnd=-1; + + int nameStart=pos; + int nameEnd=str.indexOf('=', nameStart ); + int nameEnd2=str.indexOf('&', nameStart ); + if( nameEnd2== -1 ) nameEnd2=end; + if( (nameEnd2!=-1 ) && + ( nameEnd==-1 || nameEnd > nameEnd2) ) { + nameEnd=nameEnd2; + noEq=true; + valStart=nameEnd; + valEnd=nameEnd; + if(debug>0) log(""no equal "" + nameStart + "" "" + nameEnd + "" "" + + str.substring(nameStart, nameEnd)); + } + + if( nameEnd== -1 ) nameEnd=end; + + if( ! noEq ) { + valStart=nameEnd+1; + valEnd=str.indexOf('&', valStart); + if( valEnd== -1 ) valEnd = (valStart < end) ? end : valStart; + } + + pos=valEnd+1; + + if( nameEnd<=nameStart ) { + continue; + } + if( debug>0) + log( ""XXX "" + nameStart + "" "" + nameEnd + "" "" + + valStart + "" "" + valEnd ); + + try { + tmpNameC.append(str, nameStart, nameEnd-nameStart ); + tmpValueC.append(str, valStart, valEnd-valStart ); + + if( debug > 0 ) + log( tmpNameC + ""= "" + tmpValueC); + + if( urlDec==null ) { + urlDec=new UDecoder(); + } + + urlDec.convert( tmpNameC ); + urlDec.convert( tmpValueC ); + + if( debug > 0 ) + log( tmpNameC + ""= "" + tmpValueC); + + addParam( tmpNameC.toString(), tmpValueC.toString() ); + } catch( IOException ex ) { + ex.printStackTrace(); + } + + tmpNameC.recycle(); + tmpValueC.recycle(); + + } while( pos users = userProvisioning.query(""username eq \""marissa\""""); + assertNotNull(users); + assertEquals(1, users.size()); + ScimUser user = users.get(0); + + ExpiringCode code = codeStore.generateCode(user.getId(), new Timestamp(System.currentTimeMillis() + UaaResetPasswordService.PASSWORD_RESET_LIFETIME), null); + getMockMvc().perform(createChangePasswordRequest(user, code, true, ""d3faultPasswd"", ""d3faultPasswd"")); + + code = codeStore.generateCode(user.getId(), new Timestamp(System.currentTimeMillis() + UaaResetPasswordService.PASSWORD_RESET_LIFETIME), null); + getMockMvc().perform(createChangePasswordRequest(user, code, true, ""d3faultPasswd"", ""d3faultPasswd"")) + .andExpect(status().isUnprocessableEntity()) + .andExpect(view().name(""forgot_password"")) + .andExpect(model().attribute(""message"", ""Your new password cannot be the same as the old password."")); + } + +" +227,1," public void recycle() { + if (buffered.getBuffer().length > 65536) { + buffered = null; + } else { + buffered.recycle(); + } + tempRead.recycle(); + hasRead = false; + buffer = null; + } + +" +228,1," public XMLLoader init(SolrParams args) { + inputFactory = XMLInputFactory.newInstance(); + try { + // The java 1.6 bundled stax parser (sjsxp) does not currently have a thread-safe + // XMLInputFactory, as that implementation tries to cache and reuse the + // XMLStreamReader. Setting the parser-specific ""reuse-instance"" property to false + // prevents this. + // All other known open-source stax parsers (and the bea ref impl) + // have thread-safe factories. + inputFactory.setProperty(""reuse-instance"", Boolean.FALSE); + } + catch (IllegalArgumentException ex) { + // Other implementations will likely throw this exception since ""reuse-instance"" + // isimplementation specific. + log.debug(""Unable to set the 'reuse-instance' property for the input chain: "" + inputFactory); + } + inputFactory.setXMLReporter(xmllog); + + xsltCacheLifetimeSeconds = XSLT_CACHE_DEFAULT; + if(args != null) { + xsltCacheLifetimeSeconds = args.getInt(XSLT_CACHE_PARAM,XSLT_CACHE_DEFAULT); + log.info(""xsltCacheLifetimeSeconds="" + xsltCacheLifetimeSeconds); + } + return this; + } + +" +229,1," protected void execute() { + + Arrays.asList(controllersListStrings).forEach( + cInfoString -> controllers.add(parseCInfoString(cInfoString))); + DriverService service = get(DriverService.class); + deviceId = DeviceId.deviceId(uri); + DriverHandler h = service.createHandler(deviceId); + ControllerConfig config = h.behaviour(ControllerConfig.class); + print(""before:""); + config.getControllers().forEach(c -> print(c.target())); + try { + if (removeAll) { + if (!controllers.isEmpty()) { + print(""Controllers list should be empty to remove all controllers""); + } else { + List controllersToRemove = config.getControllers(); + controllersToRemove.forEach(c -> print(""Will remove "" + c.target())); + config.removeControllers(controllersToRemove); + } + } else { + if (controllers.isEmpty()) { + print(""Controllers list is empty, cannot set/remove empty controllers""); + } else { + if (removeCont) { + print(""Will remove specified controllers""); + config.removeControllers(controllers); + } else { + print(""Will add specified controllers""); + config.setControllers(controllers); + } + } + } + } catch (NullPointerException e) { + print(""No Device with requested parameters {} "", uri); + } + print(""after:""); + config.getControllers().forEach(c -> print(c.target())); + print(""size %d"", config.getControllers().size()); + } + + +" +230,1," protected boolean parseChunkHeader() + throws IOException { + + int result = 0; + boolean eol = false; + boolean readDigit = false; + boolean trailer = false; + + while (!eol) { + + if (pos >= lastValid) { + if (readBytes() <= 0) + return false; + } + + if (buf[pos] == Constants.CR || buf[pos] == Constants.LF) { + parseCRLF(false); + eol = true; + } else if (buf[pos] == Constants.SEMI_COLON) { + trailer = true; + } else if (!trailer) { + //don't read data after the trailer + int charValue = HexUtils.getDec(buf[pos]); + if (charValue != -1) { + readDigit = true; + result *= 16; + result += charValue; + } else { + //we shouldn't allow invalid, non hex characters + //in the chunked header + return false; + } + } + + // Parsing the CRLF increments pos + if (!eol) { + pos++; + } + + } + + if (!readDigit) + return false; + + if (result == 0) + endChunk = true; + + remaining = result; + if (remaining < 0) + return false; + + return true; + + } + + + /** + * Parse CRLF at end of chunk. + * + * @param tolerant Should tolerant parsing (LF and CRLF) be used? This + * is recommended (RFC2616, section 19.3) for message + * headers. + */ +" +231,1," private static DocumentBuilder getBuilder() throws ParserConfigurationException { + ClassLoader loader = Thread.currentThread().getContextClassLoader(); + if (loader == null) { + loader = DOMUtils.class.getClassLoader(); + } + if (loader == null) { + return XMLUtils.getParser(); + } + DocumentBuilder builder = DOCUMENT_BUILDERS.get(loader); + if (builder == null) { + builder = XMLUtils.getParser(); + DOCUMENT_BUILDERS.put(loader, builder); + } + return builder; + } + + /** + * This function is much like getAttribute, but returns null, not """", for a nonexistent attribute. + * + * @param e + * @param attributeName + */ +" +232,1," public void init(Map pluginConfig) { + try { + String delegationTokenEnabled = (String)pluginConfig.getOrDefault(DELEGATION_TOKEN_ENABLED_PROPERTY, ""false""); + authFilter = (Boolean.parseBoolean(delegationTokenEnabled)) ? new HadoopAuthFilter() : new AuthenticationFilter(); + + // Initialize kerberos before initializing curator instance. + boolean initKerberosZk = Boolean.parseBoolean((String)pluginConfig.getOrDefault(INIT_KERBEROS_ZK, ""false"")); + if (initKerberosZk) { + (new Krb5HttpClientBuilder()).getBuilder(); + } + + FilterConfig conf = getInitFilterConfig(pluginConfig); + authFilter.init(conf); + + } catch (ServletException e) { + throw new SolrException(ErrorCode.SERVER_ERROR, ""Error initializing "" + getClass().getName() + "": ""+e); + } + } + + @SuppressWarnings(""unchecked"") +" +233,1," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) { + final class Debug implements RecipientProviderUtilities.IDebug { + private final ExtendedEmailPublisherDescriptor descriptor + = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class); + + private final PrintStream logger = context.getListener().getLogger(); + + public void send(final String format, final Object... args) { + descriptor.debug(logger, format, args); + } + } + final Debug debug = new Debug(); + debug.send(""Sending email to upstream committer(s).""); + Run cur; + Cause.UpstreamCause upc = context.getRun().getCause(Cause.UpstreamCause.class); + while (upc != null) { + Job p = (Job) Jenkins.getActiveInstance().getItemByFullName(upc.getUpstreamProject()); + if(p == null) { + context.getListener().getLogger().print(""There is a break in the project linkage, could not retrieve upstream project information""); + break; + } + cur = p.getBuildByNumber(upc.getUpstreamBuild()); + upc = cur.getCause(Cause.UpstreamCause.class); + addUpstreamCommittersTriggeringBuild(cur, to, cc, bcc, env, context.getListener(), debug); + } + } + + /** + * Adds for the given upstream build the committers to the recipient list for each commit in the upstream build. + * + * @param build the upstream build + * @param to the to recipient list + * @param cc the cc recipient list + * @param bcc the bcc recipient list + * @param env + * @param listener + */ +" +234,1," public void test_noVerification() throws Exception { + // Sign a JWT and then attempt to verify it using None. + JWT jwt = new JWT().setSubject(""art""); + String encodedJWT = JWT.getEncoder().encode(jwt, HMACSigner.newSHA256Signer(""secret"")); + + expectException(MissingVerifierException.class, () + -> JWT.getDecoder().decode(encodedJWT)); + } + + @Test +" +235,1," protected void configure(HttpSecurity http) throws Exception { + // This config is also on UrlAuthorizationConfigurer javadoc + http + .apply(new UrlAuthorizationConfigurer()).getRegistry() + .antMatchers(""/users**"",""/sessions/**"").hasRole(""USER"") + .antMatchers(""/signup"").hasRole(""ANONYMOUS"") + .anyRequest().hasRole(""USER""); + } + // @formatter:on +" +236,1," public void register(ContainerBuilder builder, LocatableProperties props) + throws ConfigurationException { + + builder.factory(com.opensymphony.xwork2.ObjectFactory.class) + .factory(ActionProxyFactory.class, DefaultActionProxyFactory.class, Scope.SINGLETON) + .factory(ObjectTypeDeterminer.class, DefaultObjectTypeDeterminer.class, Scope.SINGLETON) + + .factory(XWorkConverter.class, Scope.SINGLETON) + .factory(XWorkBasicConverter.class, Scope.SINGLETON) + .factory(ConversionPropertiesProcessor.class, DefaultConversionPropertiesProcessor.class, Scope.SINGLETON) + .factory(ConversionFileProcessor.class, DefaultConversionFileProcessor.class, Scope.SINGLETON) + .factory(ConversionAnnotationProcessor.class, DefaultConversionAnnotationProcessor.class, Scope.SINGLETON) + .factory(TypeConverterCreator.class, DefaultTypeConverterCreator.class, Scope.SINGLETON) + .factory(TypeConverterHolder.class, DefaultTypeConverterHolder.class, Scope.SINGLETON) + + .factory(FileManager.class, ""system"", DefaultFileManager.class, Scope.SINGLETON) + .factory(FileManagerFactory.class, DefaultFileManagerFactory.class, Scope.SINGLETON) + .factory(ValueStackFactory.class, OgnlValueStackFactory.class, Scope.SINGLETON) + .factory(ValidatorFactory.class, DefaultValidatorFactory.class, Scope.SINGLETON) + .factory(ValidatorFileParser.class, DefaultValidatorFileParser.class, Scope.SINGLETON) + .factory(PatternMatcher.class, WildcardHelper.class, Scope.SINGLETON) + .factory(ReflectionProvider.class, OgnlReflectionProvider.class, Scope.SINGLETON) + .factory(ReflectionContextFactory.class, OgnlReflectionContextFactory.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, CompoundRoot.class.getName(), CompoundRootAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Object.class.getName(), ObjectAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Iterator.class.getName(), XWorkIteratorPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Enumeration.class.getName(), XWorkEnumerationAccessor.class, Scope.SINGLETON) + .factory(UnknownHandlerManager.class, DefaultUnknownHandlerManager.class, Scope.SINGLETON) + + // silly workarounds for ognl since there is no way to flush its caches + .factory(PropertyAccessor.class, List.class.getName(), XWorkListPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, ArrayList.class.getName(), XWorkListPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, HashSet.class.getName(), XWorkCollectionPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Set.class.getName(), XWorkCollectionPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, HashMap.class.getName(), XWorkMapPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Map.class.getName(), XWorkMapPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, Collection.class.getName(), XWorkCollectionPropertyAccessor.class, Scope.SINGLETON) + .factory(PropertyAccessor.class, ObjectProxy.class.getName(), ObjectProxyPropertyAccessor.class, Scope.SINGLETON) + .factory(MethodAccessor.class, Object.class.getName(), XWorkMethodAccessor.class, Scope.SINGLETON) + .factory(MethodAccessor.class, CompoundRoot.class.getName(), CompoundRootAccessor.class, Scope.SINGLETON) + + .factory(TextParser.class, OgnlTextParser.class, Scope.SINGLETON) + + .factory(NullHandler.class, Object.class.getName(), InstantiatingNullHandler.class, Scope.SINGLETON) + .factory(ActionValidatorManager.class, AnnotationActionValidatorManager.class, Scope.SINGLETON) + .factory(ActionValidatorManager.class, ""no-annotations"", DefaultActionValidatorManager.class, Scope.SINGLETON) + .factory(TextProvider.class, ""system"", DefaultTextProvider.class, Scope.SINGLETON) + .factory(TextProvider.class, TextProviderSupport.class, Scope.SINGLETON) + .factory(LocaleProvider.class, DefaultLocaleProvider.class, Scope.SINGLETON) + .factory(OgnlUtil.class, Scope.SINGLETON) + .factory(CollectionConverter.class, Scope.SINGLETON) + .factory(ArrayConverter.class, Scope.SINGLETON) + .factory(DateConverter.class, Scope.SINGLETON) + .factory(NumberConverter.class, Scope.SINGLETON) + .factory(StringConverter.class, Scope.SINGLETON); + props.setProperty(XWorkConstants.DEV_MODE, Boolean.FALSE.toString()); + props.setProperty(XWorkConstants.LOG_MISSING_PROPERTIES, Boolean.FALSE.toString()); + props.setProperty(XWorkConstants.ENABLE_OGNL_EXPRESSION_CACHE, Boolean.TRUE.toString()); + props.setProperty(XWorkConstants.RELOAD_XML_CONFIGURATION, Boolean.FALSE.toString()); + } + +" +237,1," public void fireOnComplete() { + List listenersCopy = new ArrayList<>(); + listenersCopy.addAll(listeners); + + ClassLoader oldCL = context.bind(Globals.IS_SECURITY_ENABLED, null); + try { + for (AsyncListenerWrapper listener : listenersCopy) { + try { + listener.fireOnComplete(event); + } catch (Throwable t) { + ExceptionUtils.handleThrowable(t); + log.warn(""onComplete() failed for listener of type ["" + + listener.getClass().getName() + ""]"", t); + } + } + } finally { + context.fireRequestDestroyEvent(request); + clearServletRequestResponse(); + context.unbind(Globals.IS_SECURITY_ENABLED, oldCL); + } + } + + +" +238,1," public void testExcludedTrickyParameters() throws Exception { + Map params = new HashMap() { + { + put(""blah"", ""This is blah""); + put(""name"", ""try_1""); + put(""(name)"", ""try_2""); + put(""['name']"", ""try_3""); + put(""['na' + 'me']"", ""try_4""); + put(""{name}[0]"", ""try_5""); + put(""(new string{'name'})[0]"", ""try_6""); + put(""#{key: 'name'}.key"", ""try_7""); + + } + }; + + HashMap extraContext = new HashMap(); + extraContext.put(ActionContext.PARAMETERS, params); + + ActionProxy proxy = actionProxyFactory.createActionProxy("""", MockConfigurationProvider.PARAM_INTERCEPTOR_ACTION_NAME, extraContext); + + ActionConfig config = configuration.getRuntimeConfiguration().getActionConfig("""", MockConfigurationProvider.PARAM_INTERCEPTOR_ACTION_NAME); + ParametersInterceptor pi =(ParametersInterceptor) config.getInterceptors().get(0).getInterceptor(); + pi.setExcludeParams(""name""); + + proxy.execute(); + + SimpleAction action = (SimpleAction) proxy.getAction(); + assertNull(action.getName()); + assertEquals(""This is blah"", (action).getBlah()); + } + +" +239,1," protected BindResult process(final LDAPConnection connection, final int depth) + throws LDAPException + { + if (connection.synchronousMode()) + { + @SuppressWarnings(""deprecation"") + final boolean autoReconnect = + connection.getConnectionOptions().autoReconnect(); + return processSync(connection, autoReconnect); + } + + // See if a bind DN was provided without a password. If that is the case + // and this should not be allowed, then throw an exception. + if (password != null) + { + if ((bindDN.getValue().length > 0) && (password.getValue().length == 0) && + connection.getConnectionOptions().bindWithDNRequiresPassword()) + { + final LDAPException le = new LDAPException(ResultCode.PARAM_ERROR, + ERR_SIMPLE_BIND_DN_WITHOUT_PASSWORD.get()); + debugCodingError(le); + throw le; + } + } + + + // Create the LDAP message. + messageID = connection.nextMessageID(); + final LDAPMessage message = new LDAPMessage(messageID, this, getControls()); + + + // Register with the connection reader to be notified of responses for the + // request that we've created. + connection.registerResponseAcceptor(messageID, this); + + + try + { + // Send the request to the server. + final long responseTimeout = getResponseTimeoutMillis(connection); + debugLDAPRequest(Level.INFO, this, messageID, connection); + final long requestTime = System.nanoTime(); + connection.getConnectionStatistics().incrementNumBindRequests(); + connection.sendMessage(message, responseTimeout); + + // Wait for and process the response. + final LDAPResponse response; + try + { + if (responseTimeout > 0) + { + response = responseQueue.poll(responseTimeout, TimeUnit.MILLISECONDS); + } + else + { + response = responseQueue.take(); + } + } + catch (final InterruptedException ie) + { + debugException(ie); + Thread.currentThread().interrupt(); + throw new LDAPException(ResultCode.LOCAL_ERROR, + ERR_BIND_INTERRUPTED.get(connection.getHostPort()), ie); + } + + return handleResponse(connection, response, requestTime, false); + } + finally + { + connection.deregisterResponseAcceptor(messageID); + } + } + + + + /** + * Processes this bind operation in synchronous mode, in which the same + * thread will send the request and read the response. + * + * @param connection The connection to use to communicate with the directory + * server. + * @param allowRetry Indicates whether the request may be re-tried on a + * re-established connection if the initial attempt fails + * in a way that indicates the connection is no longer + * valid and autoReconnect is true. + * + * @return An LDAP result object that provides information about the result + * of the bind processing. + * + * @throws LDAPException If a problem occurs while sending the request or + * reading the response. + */ +" +240,1," public Set getControllerServices(final boolean recursive) { + readLock.lock(); + try { + final Set services = new HashSet<>(); + services.addAll(controllerServices.values()); + + if (recursive && parent.get() != null) { + services.addAll(parent.get().getControllerServices(true)); + } + + return services; + } finally { + readLock.unlock(); + } + } + + @Override +" +241,1," public boolean getValidateClientProvidedNewSessionId() { + return validateClientProvidedNewSessionId; + } + + @Override +" +242,1," private static void doZipDir(File dir, ZipOutputStream out) throws IOException { + File[] children = dir.listFiles(); + if (children == null) { + throw new IllegalStateException(""Fail to list files of directory "" + dir.getAbsolutePath()); + } + for (File child : children) { + doZip(child.getName(), child, out); + } + } + + /** + * @see #unzip(File, File, Predicate) + * @deprecated replaced by {@link Predicate} in 6.2. + */ + @Deprecated + @FunctionalInterface +" +243,1," public H and() { + return ChannelSecurityConfigurer.this.and(); + } + +" +244,1," public boolean isLockoutEnabled() { + return countFailuresWithin > 0; + } + +" +245,1," protected synchronized void authenticatorConfig() { + + // Always need an authenticator to support @ServletSecurity annotations + LoginConfig loginConfig = context.getLoginConfig(); + if (loginConfig == null) { + loginConfig = DUMMY_LOGIN_CONFIG; + context.setLoginConfig(loginConfig); + } + + // Has an authenticator been configured already? + if (context.getAuthenticator() != null) + return; + + if (!(context instanceof ContainerBase)) { + return; // Cannot install a Valve even if it would be needed + } + + // Has a Realm been configured for us to authenticate against? + if (context.getRealm() == null) { + log.error(sm.getString(""contextConfig.missingRealm"")); + ok = false; + return; + } + + /* + * First check to see if there is a custom mapping for the login + * method. If so, use it. Otherwise, check if there is a mapping in + * org/apache/catalina/startup/Authenticators.properties. + */ + Valve authenticator = null; + if (customAuthenticators != null) { + authenticator = (Valve) + customAuthenticators.get(loginConfig.getAuthMethod()); + } + if (authenticator == null) { + // Load our mapping properties if necessary + if (authenticators == null) { + try { + InputStream is=this.getClass().getClassLoader().getResourceAsStream(""org/apache/catalina/startup/Authenticators.properties""); + if( is!=null ) { + authenticators = new Properties(); + authenticators.load(is); + } else { + log.error(sm.getString( + ""contextConfig.authenticatorResources"")); + ok=false; + return; + } + } catch (IOException e) { + log.error(sm.getString( + ""contextConfig.authenticatorResources""), e); + ok = false; + return; + } + } + + // Identify the class name of the Valve we should configure + String authenticatorName = null; + authenticatorName = + authenticators.getProperty(loginConfig.getAuthMethod()); + if (authenticatorName == null) { + log.error(sm.getString(""contextConfig.authenticatorMissing"", + loginConfig.getAuthMethod())); + ok = false; + return; + } + + // Instantiate and install an Authenticator of the requested class + try { + Class authenticatorClass = Class.forName(authenticatorName); + authenticator = (Valve) authenticatorClass.newInstance(); + } catch (Throwable t) { + ExceptionUtils.handleThrowable(t); + log.error(sm.getString( + ""contextConfig.authenticatorInstantiate"", + authenticatorName), + t); + ok = false; + } + } + + if (authenticator != null && context instanceof ContainerBase) { + Pipeline pipeline = ((ContainerBase) context).getPipeline(); + if (pipeline != null) { + ((ContainerBase) context).getPipeline().addValve(authenticator); + if (log.isDebugEnabled()) { + log.debug(sm.getString( + ""contextConfig.authenticatorConfigured"", + loginConfig.getAuthMethod())); + } + } + } + + } + + + /** + * Create (if necessary) and return a Digester configured to process the + * web application deployment descriptor (web.xml). + */ +" +246,1," private void processInternal(Exchange exchange, AsyncCallback callback) throws Exception { + // creating the url to use takes 2-steps + String url = HttpHelper.createURL(exchange, getEndpoint()); + URI uri = HttpHelper.createURI(exchange, url, getEndpoint()); + // get the url from the uri + url = uri.toASCIIString(); + + // execute any custom url rewrite + String rewriteUrl = HttpHelper.urlRewrite(exchange, url, getEndpoint(), this); + if (rewriteUrl != null) { + // update url and query string from the rewritten url + url = rewriteUrl; + } + + String methodName = HttpHelper.createMethod(exchange, getEndpoint(), exchange.getIn().getBody() != null).name(); + + JettyContentExchange httpExchange = getEndpoint().createContentExchange(); + httpExchange.init(exchange, getBinding(), client, callback); + httpExchange.setURL(url); // Url has to be set first + httpExchange.setMethod(methodName); + + if (getEndpoint().getHttpClientParameters() != null) { + // For jetty 9 these parameters can not be set on the client + // so we need to set them on the httpExchange + String timeout = (String)getEndpoint().getHttpClientParameters().get(""timeout""); + if (timeout != null) { + httpExchange.setTimeout(new Long(timeout)); + } + String supportRedirect = (String)getEndpoint().getHttpClientParameters().get(""supportRedirect""); + if (supportRedirect != null) { + httpExchange.setSupportRedirect(Boolean.valueOf(supportRedirect)); + } + } + + LOG.trace(""Using URL: {} with method: {}"", url, methodName); + + // if there is a body to send as data + if (exchange.getIn().getBody() != null) { + String contentType = ExchangeHelper.getContentType(exchange); + if (contentType != null) { + httpExchange.setRequestContentType(contentType); + } + + if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) { + // serialized java object + Serializable obj = exchange.getIn().getMandatoryBody(Serializable.class); + // write object to output stream + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + HttpHelper.writeObjectToStream(bos, obj); + httpExchange.setRequestContent(bos.toByteArray()); + } finally { + IOHelper.close(bos, ""body"", LOG); + } + } else { + Object body = exchange.getIn().getBody(); + if (body instanceof String) { + String data = (String) body; + // be a bit careful with String as any type can most likely be converted to String + // so we only do an instanceof check and accept String if the body is really a String + // do not fallback to use the default charset as it can influence the request + // (for example application/x-www-form-urlencoded forms being sent) + String charset = IOHelper.getCharsetName(exchange, false); + httpExchange.setRequestContent(data, charset); + } else { + // then fallback to input stream + InputStream is = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, exchange, exchange.getIn().getBody()); + httpExchange.setRequestContent(is); + // setup the content length if it is possible + String length = exchange.getIn().getHeader(Exchange.CONTENT_LENGTH, String.class); + if (ObjectHelper.isNotEmpty(length)) { + httpExchange.addRequestHeader(Exchange.CONTENT_LENGTH, length); + } + } + } + } + + // if we bridge endpoint then we need to skip matching headers with the HTTP_QUERY to avoid sending + // duplicated headers to the receiver, so use this skipRequestHeaders as the list of headers to skip + Map skipRequestHeaders = null; + if (getEndpoint().isBridgeEndpoint()) { + exchange.setProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.TRUE); + String queryString = exchange.getIn().getHeader(Exchange.HTTP_QUERY, String.class); + if (queryString != null) { + skipRequestHeaders = URISupport.parseQuery(queryString, false, true); + } + // Need to remove the Host key as it should be not used + exchange.getIn().getHeaders().remove(""host""); + } + + // propagate headers as HTTP headers + Message in = exchange.getIn(); + HeaderFilterStrategy strategy = getEndpoint().getHeaderFilterStrategy(); + for (Map.Entry entry : in.getHeaders().entrySet()) { + String key = entry.getKey(); + Object headerValue = in.getHeader(key); + + if (headerValue != null) { + // use an iterator as there can be multiple values. (must not use a delimiter, and allow empty values) + final Iterator it = ObjectHelper.createIterator(headerValue, null, true); + + // the values to add as a request header + final List values = new ArrayList(); + + // if its a multi value then check each value if we can add it and for multi values they + // should be combined into a single value + while (it.hasNext()) { + String value = exchange.getContext().getTypeConverter().convertTo(String.class, it.next()); + + // we should not add headers for the parameters in the uri if we bridge the endpoint + // as then we would duplicate headers on both the endpoint uri, and in HTTP headers as well + if (skipRequestHeaders != null && skipRequestHeaders.containsKey(key)) { + continue; + } + if (value != null && strategy != null && !strategy.applyFilterToCamelHeaders(key, value, exchange)) { + values.add(value); + } + } + + // add the value(s) as a http request header + if (values.size() > 0) { + // use the default toString of a ArrayList to create in the form [xxx, yyy] + // if multi valued, for a single value, then just output the value as is + String s = values.size() > 1 ? values.toString() : values.get(0); + httpExchange.addRequestHeader(key, s); + } + } + } + + // set the callback, which will handle all the response logic + if (LOG.isDebugEnabled()) { + LOG.debug(""Sending HTTP request to: {}"", httpExchange.getUrl()); + } + httpExchange.send(client); + } + +" +247,1," public void send(final String format, final Object... args) { + descriptor.debug(logger, format, args); + } + } + final Debug debug = new Debug(); + + Set users = null; + + final Run currentRun = context.getRun(); + if (currentRun == null) { + debug.send(""currentRun was null""); + } else { + if (!Objects.equals(currentRun.getResult(), Result.FAILURE)) { + debug.send(""currentBuild did not fail""); + } else { + users = new HashSet<>(); + debug.send(""Collecting builds with suspects...""); + final HashSet> buildsWithSuspects = new HashSet<>(); + Run firstFailedBuild = currentRun; + Run candidate = currentRun; + while (candidate != null) { + final Result candidateResult = candidate.getResult(); + if ( candidateResult == null || !candidateResult.isWorseOrEqualTo(Result.FAILURE) ) { + break; + } + firstFailedBuild = candidate; + candidate = candidate.getPreviousCompletedBuild(); + } + if (firstFailedBuild instanceof AbstractBuild) { + buildsWithSuspects.add(firstFailedBuild); + } else { + debug.send("" firstFailedBuild was not an instance of AbstractBuild""); + } + debug.send(""Collecting suspects...""); + users.addAll(RecipientProviderUtilities.getChangeSetAuthors(buildsWithSuspects, debug)); + users.addAll(RecipientProviderUtilities.getUsersTriggeringTheBuilds(buildsWithSuspects, debug)); + } + } + if (users != null) { + RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug); + } + } + + @Extension + public static final class DescriptorImpl extends RecipientProviderDescriptor { + @Override + public String getDisplayName() { + return ""Suspects Causing the Build to Begin Failing""; + } + } + +} +" +248,1," public Document getMetaData(Idp config) throws RuntimeException { + //Return as text/xml + try { + Crypto crypto = CertsUtils.createCrypto(config.getCertificate()); + + ByteArrayOutputStream bout = new ByteArrayOutputStream(4096); + Writer streamWriter = new OutputStreamWriter(bout, ""UTF-8""); + XMLStreamWriter writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(streamWriter); + + writer.writeStartDocument(""UTF-8"", ""1.0""); + + String referenceID = IDGenerator.generateID(""_""); + writer.writeStartElement(""md"", ""EntityDescriptor"", SAML2_METADATA_NS); + writer.writeAttribute(""ID"", referenceID); + + writer.writeAttribute(""entityID"", config.getIdpUrl().toString()); + + writer.writeNamespace(""md"", SAML2_METADATA_NS); + writer.writeNamespace(""fed"", WS_FEDERATION_NS); + writer.writeNamespace(""wsa"", WS_ADDRESSING_NS); + writer.writeNamespace(""auth"", WS_FEDERATION_NS); + writer.writeNamespace(""xsi"", SCHEMA_INSTANCE_NS); + + writeFederationMetadata(writer, config, crypto); + + writer.writeEndElement(); // EntityDescriptor + + writer.writeEndDocument(); + streamWriter.flush(); + bout.flush(); + + if (LOG.isDebugEnabled()) { + String out = new String(bout.toByteArray()); + LOG.debug(""***************** unsigned ****************""); + LOG.debug(out); + LOG.debug(""***************** unsigned ****************""); + } + + InputStream is = new ByteArrayInputStream(bout.toByteArray()); + + Document result = SignatureUtils.signMetaInfo(crypto, null, config.getCertificatePassword(), is, referenceID); + if (result != null) { + return result; + } else { + throw new RuntimeException(""Failed to sign the metadata document: result=null""); + } + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + LOG.error(""Error creating service metadata information "", e); + throw new RuntimeException(""Error creating service metadata information: "" + e.getMessage()); + } + + } + +" +249,1," public void chaosSnapshotTest() throws Exception { + final List indices = new CopyOnWriteArrayList<>(); + Settings settings = settingsBuilder().put(""action.write_consistency"", ""one"").build(); + int initialNodes = between(1, 3); + logger.info(""--> start {} nodes"", initialNodes); + for (int i = 0; i < initialNodes; i++) { + internalCluster().startNode(settings); + } + + logger.info(""--> creating repository""); + assertAcked(client().admin().cluster().preparePutRepository(""test-repo"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", newTempDir(LifecycleScope.SUITE)) + .put(""compress"", randomBoolean()) + .put(""chunk_size"", randomIntBetween(100, 1000)))); + + int initialIndices = between(1, 3); + logger.info(""--> create {} indices"", initialIndices); + for (int i = 0; i < initialIndices; i++) { + createTestIndex(""test-"" + i); + indices.add(""test-"" + i); + } + + int asyncNodes = between(0, 5); + logger.info(""--> start {} additional nodes asynchronously"", asyncNodes); + ListenableFuture> asyncNodesFuture = internalCluster().startNodesAsync(asyncNodes, settings); + + int asyncIndices = between(0, 10); + logger.info(""--> create {} additional indices asynchronously"", asyncIndices); + Thread[] asyncIndexThreads = new Thread[asyncIndices]; + for (int i = 0; i < asyncIndices; i++) { + final int cur = i; + asyncIndexThreads[i] = new Thread(new Runnable() { + @Override + public void run() { + createTestIndex(""test-async-"" + cur); + indices.add(""test-async-"" + cur); + + } + }); + asyncIndexThreads[i].start(); + } + + logger.info(""--> snapshot""); + + ListenableActionFuture snapshotResponseFuture = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-*"").setPartial(true).execute(); + + long start = System.currentTimeMillis(); + // Produce chaos for 30 sec or until snapshot is done whatever comes first + int randomIndices = 0; + while (System.currentTimeMillis() - start < 30000 && !snapshotIsDone(""test-repo"", ""test-snap"")) { + Thread.sleep(100); + int chaosType = randomInt(10); + if (chaosType < 4) { + // Randomly delete an index + if (indices.size() > 0) { + String index = indices.remove(randomInt(indices.size() - 1)); + logger.info(""--> deleting random index [{}]"", index); + internalCluster().wipeIndices(index); + } + } else if (chaosType < 6) { + // Randomly shutdown a node + if (cluster().size() > 1) { + logger.info(""--> shutting down random node""); + internalCluster().stopRandomDataNode(); + } + } else if (chaosType < 8) { + // Randomly create an index + String index = ""test-rand-"" + randomIndices; + logger.info(""--> creating random index [{}]"", index); + createTestIndex(index); + randomIndices++; + } else { + // Take a break + logger.info(""--> noop""); + } + } + + logger.info(""--> waiting for async indices creation to finish""); + for (int i = 0; i < asyncIndices; i++) { + asyncIndexThreads[i].join(); + } + + logger.info(""--> update index settings to back to normal""); + assertAcked(client().admin().indices().prepareUpdateSettings(""test-*"").setSettings(ImmutableSettings.builder() + .put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, ""node"") + )); + + // Make sure that snapshot finished - doesn't matter if it failed or succeeded + try { + CreateSnapshotResponse snapshotResponse = snapshotResponseFuture.get(); + SnapshotInfo snapshotInfo = snapshotResponse.getSnapshotInfo(); + assertNotNull(snapshotInfo); + logger.info(""--> snapshot is done with state [{}], total shards [{}], successful shards [{}]"", snapshotInfo.state(), snapshotInfo.totalShards(), snapshotInfo.successfulShards()); + } catch (Exception ex) { + logger.info(""--> snapshot didn't start properly"", ex); + } + + asyncNodesFuture.get(); + logger.info(""--> done""); + } + +" +250,1," public Principal authenticate(String username, String credentials) { + + // No user or no credentials + // Can't possibly authenticate, don't bother the database then + if (username == null || credentials == null) { + if (log.isDebugEnabled()) + log.debug(sm.getString(""memoryRealm.authenticateFailure"", username)); + return null; + } + + GenericPrincipal principal = principals.get(username); + + if(principal == null || principal.getPassword() == null) { + // User was not found in the database of the password was null + + if (log.isDebugEnabled()) + log.debug(sm.getString(""memoryRealm.authenticateFailure"", username)); + return null; + } + + boolean validated = getCredentialHandler().matches(credentials, principal.getPassword()); + + if (validated) { + if (log.isDebugEnabled()) + log.debug(sm.getString(""memoryRealm.authenticateSuccess"", username)); + return principal; + } else { + if (log.isDebugEnabled()) + log.debug(sm.getString(""memoryRealm.authenticateFailure"", username)); + return null; + } + } + + + // -------------------------------------------------------- Package Methods + + + /** + * Add a new user to the in-memory database. + * + * @param username User's username + * @param password User's password (clear text) + * @param roles Comma-delimited set of roles associated with this user + */ +" +251,1," public Object getValue(Object o) { + if ( location.getMember() == null ) { + return o; + } + else { + return ReflectionHelper.getValue( location.getMember(), o ); + } + } + + @Override +" +252,1," private String buildErrorMessage(Throwable e, Object[] args) { + String errorKey = ""struts.message.upload.error."" + e.getClass().getSimpleName(); + if (LOG.isDebugEnabled()) + LOG.debug(""Preparing error message for key: [#0]"", errorKey); + return LocalizedTextUtil.findText(this.getClass(), errorKey, defaultLocale, e.getMessage(), args); + } + + /** + * Build action message. + * + * @param e + * @param args + * @return + */ +" +253,1," private void resetContext() throws Exception { + // Restore the original state ( pre reading web.xml in start ) + // If you extend this - override this method and make sure to clean up + + // Don't reset anything that is read from a element since + // elements are read at initialisation will not be read + // again for this object + children = new HashMap(); + startupTime = 0; + startTime = 0; + tldScanTime = 0; + + // Bugzilla 32867 + distributable = false; + + applicationListeners = new String[0]; + applicationEventListenersObjects = new Object[0]; + applicationLifecycleListenersObjects = new Object[0]; + jspConfigDescriptor = new ApplicationJspConfigDescriptor(); + + initializers.clear(); + + if(log.isDebugEnabled()) + log.debug(""resetContext "" + getObjectName()); + } + + /** + * Return a String representation of this component. + */ + @Override +" +254,1," public void testRestoreToShadow() throws ExecutionException, InterruptedException { + Settings nodeSettings = nodeSettings(); + + internalCluster().startNodesAsync(3, nodeSettings).get(); + final Path dataPath = newTempDir().toPath(); + Settings idxSettings = ImmutableSettings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build(); + assertAcked(prepareCreate(""foo"").setSettings(idxSettings)); + ensureGreen(); + final int numDocs = randomIntBetween(10, 100); + for (int i = 0; i < numDocs; i++) { + client().prepareIndex(""foo"", ""doc"", """"+i).setSource(""foo"", ""bar"").get(); + } + assertNoFailures(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet()); + + assertAcked(client().admin().cluster().preparePutRepository(""test-repo"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", newTempDir().toPath()))); + CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""foo"").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + assertThat(client().admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); + + Settings shadowSettings = ImmutableSettings.builder() + .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) + .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) + .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2).build(); + + logger.info(""--> restore the index into shadow replica index""); + RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"") + .setIndexSettings(shadowSettings).setWaitForCompletion(true) + .setRenamePattern(""(.+)"").setRenameReplacement(""$1-copy"") + .execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + ensureGreen(); + refresh(); + + for (IndicesService service : internalCluster().getDataNodeInstances(IndicesService.class)) { + if (service.hasIndex(""foo-copy"")) { + IndexShard shard = service.indexServiceSafe(""foo-copy"").shard(0); + if (shard.routingEntry().primary()) { + assertFalse(shard instanceof ShadowIndexShard); + } else { + assertTrue(shard instanceof ShadowIndexShard); + } + } + } + logger.info(""--> performing query""); + SearchResponse resp = client().prepareSearch(""foo-copy"").setQuery(matchAllQuery()).get(); + assertHitCount(resp, numDocs); + + } + + @Test +" +255,1," public KeyPair generateKeyPair() + { + if (!initialised) + { + DSAParametersGenerator pGen = new DSAParametersGenerator(); + + pGen.init(strength, certainty, random); + param = new DSAKeyGenerationParameters(random, pGen.generateParameters()); + engine.init(param); + initialised = true; + } + + AsymmetricCipherKeyPair pair = engine.generateKeyPair(); + DSAPublicKeyParameters pub = (DSAPublicKeyParameters)pair.getPublic(); + DSAPrivateKeyParameters priv = (DSAPrivateKeyParameters)pair.getPrivate(); + + return new KeyPair(new BCDSAPublicKey(pub), new BCDSAPrivateKey(priv)); + } +" +256,1," public void testInvalidate() throws IOException { + ProjectWorkspace workspace = + TestDataHelper.createProjectWorkspaceForScenario(this, ""parser_with_cell"", tmp); + workspace.setUp(); + + // Warm the parser cache. + TestContext context = new TestContext(); + ProcessResult runBuckResult = + workspace.runBuckdCommand(context, ""query"", ""deps(//Apps:TestAppsLibrary)""); + runBuckResult.assertSuccess(); + assertThat( + runBuckResult.getStdout(), + Matchers.containsString( + ""//Apps:TestAppsLibrary\n"" + + ""//Libraries/Dep1:Dep1_1\n"" + + ""//Libraries/Dep1:Dep1_2\n"" + + ""bar//Dep2:Dep2"")); + + // Save the parser cache to a file. + NamedTemporaryFile tempFile = new NamedTemporaryFile(""parser_data"", null); + runBuckResult = + workspace.runBuckdCommand(context, ""parser-cache"", ""--save"", tempFile.get().toString()); + runBuckResult.assertSuccess(); + + // Write an empty content to Apps/BUCK. + Path path = tmp.getRoot().resolve(""Apps/BUCK""); + byte[] data = {}; + Files.write(path, data); + + // Write an empty content to Apps/BUCK. + Path invalidationJsonPath = tmp.getRoot().resolve(""invalidation-data.json""); + String jsonData = ""[{\""path\"":\""Apps/BUCK\"",\""status\"":\""M\""}]""; + Files.write(invalidationJsonPath, jsonData.getBytes(StandardCharsets.UTF_8)); + + context = new TestContext(); + // Load the parser cache to a new buckd context. + runBuckResult = + workspace.runBuckdCommand( + context, + ""parser-cache"", + ""--load"", + tempFile.get().toString(), + ""--changes"", + invalidationJsonPath.toString()); + runBuckResult.assertSuccess(); + + // Perform the query again. + try { + workspace.runBuckdCommand(context, ""query"", ""deps(//Apps:TestAppsLibrary)""); + } catch (HumanReadableException e) { + assertThat( + e.getMessage(), Matchers.containsString(""//Apps:TestAppsLibrary could not be found"")); + } + } +" +257,1," public void send(final String format, final Object... args) { + descriptor.debug(logger, format, args); + } + } + final Debug debug = new Debug(); + Set users = RecipientProviderUtilities.getChangeSetAuthors(Collections.>singleton(context.getRun()), debug); + RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug); + } + + @Extension + public static final class DescriptorImpl extends RecipientProviderDescriptor { + @Override + public String getDisplayName() { + return ""Developers""; + } + } +} +" +258,1," public void testRejectBindWithDNButNoPassword() + throws Exception + { + if (! isDirectoryInstanceAvailable()) + { + return; + } + + LDAPConnection conn = getUnauthenticatedConnection(); + SimpleBindRequest bindRequest = new SimpleBindRequest(getTestBindDN(), """"); + + try + { + bindRequest.process(conn, 1); + fail(""Expected an exception when binding with a DN but no password""); + } + catch (LDAPException le) + { + assertEquals(le.getResultCode(), ResultCode.PARAM_ERROR); + } + + + // Reconfigure the connection so that it will allow binds with a DN but no + // password. + conn.getConnectionOptions().setBindWithDNRequiresPassword(false); + try + { + bindRequest.process(conn, 1); + } + catch (LDAPException le) + { + // The server will still likely reject the operation, but we should at + // least verify that it wasn't a parameter error. + assertFalse(le.getResultCode() == ResultCode.PARAM_ERROR); + } + + conn.getConnectionOptions().setBindWithDNRequiresPassword(true); + conn.close(); + } +" +259,1," public LockoutPolicy getLockoutPolicy() { + LockoutPolicy res = IdentityZoneHolder.get().getConfig().getClientLockoutPolicy(); + return res.getLockoutAfterFailures() != -1 ? res : defaultLockoutPolicy; + } + + @Override +" +260,1," private void detectJPA() { + // check whether we have Persistence on the classpath + Class persistenceClass; + try { + persistenceClass = run( LoadClass.action( PERSISTENCE_CLASS_NAME, this.getClass() ) ); + } + catch ( ValidationException e ) { + log.debugf( + ""Cannot find %s on classpath. Assuming non JPA 2 environment. All properties will per default be traversable."", + PERSISTENCE_CLASS_NAME + ); + return; + } + + // check whether Persistence contains getPersistenceUtil + Method persistenceUtilGetter = run( GetMethod.action( persistenceClass, PERSISTENCE_UTIL_METHOD ) ); + if ( persistenceUtilGetter == null ) { + log.debugf( + ""Found %s on classpath, but no method '%s'. Assuming JPA 1 environment. All properties will per default be traversable."", + PERSISTENCE_CLASS_NAME, + PERSISTENCE_UTIL_METHOD + ); + return; + } + + // try to invoke the method to make sure that we are dealing with a complete JPA2 implementation + // unfortunately there are several incomplete implementations out there (see HV-374) + try { + Object persistence = run( NewInstance.action( persistenceClass, ""persistence provider"" ) ); + ReflectionHelper.getValue(persistenceUtilGetter, persistence ); + } + catch ( Exception e ) { + log.debugf( + ""Unable to invoke %s.%s. Inconsistent JPA environment. All properties will per default be traversable."", + PERSISTENCE_CLASS_NAME, + PERSISTENCE_UTIL_METHOD + ); + } + + log.debugf( + ""Found %s on classpath containing '%s'. Assuming JPA 2 environment. Trying to instantiate JPA aware TraversableResolver"", + PERSISTENCE_CLASS_NAME, + PERSISTENCE_UTIL_METHOD + ); + + try { + @SuppressWarnings(""unchecked"") + Class jpaAwareResolverClass = (Class) + run( LoadClass.action( JPA_AWARE_TRAVERSABLE_RESOLVER_CLASS_NAME, this.getClass() ) ); + jpaTraversableResolver = run( NewInstance.action( jpaAwareResolverClass, """" ) ); + log.debugf( + ""Instantiated JPA aware TraversableResolver of type %s."", JPA_AWARE_TRAVERSABLE_RESOLVER_CLASS_NAME + ); + } + catch ( ValidationException e ) { + log.debugf( + ""Unable to load or instantiate JPA aware resolver %s. All properties will per default be traversable."", + JPA_AWARE_TRAVERSABLE_RESOLVER_CLASS_NAME + ); + } + } + + @Override +" +261,1," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) { + final class Debug implements RecipientProviderUtilities.IDebug { + private final ExtendedEmailPublisherDescriptor descriptor + = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class); + + private final PrintStream logger = context.getListener().getLogger(); + + public void send(final String format, final Object... args) { + descriptor.debug(logger, format, args); + } + } + final Debug debug = new Debug(); + Run run = context.getRun(); + final Result runResult = run.getResult(); + if (run instanceof AbstractBuild) { + Set users = ((AbstractBuild)run).getCulprits(); + RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug); + } else if (runResult != null) { + List> builds = new ArrayList<>(); + Run build = run; + builds.add(build); + build = build.getPreviousCompletedBuild(); + while (build != null) { + final Result buildResult = build.getResult(); + if (buildResult != null) { + if (buildResult.isWorseThan(Result.SUCCESS)) { + debug.send(""Including build %s with status %s"", build.getId(), buildResult); + builds.add(build); + } else { + break; + } + } + build = build.getPreviousCompletedBuild(); + } + Set users = RecipientProviderUtilities.getChangeSetAuthors(builds, debug); + RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug); + } + } + + @Extension +" +262,1," protected boolean isProbablePrime(BigInteger x) + { + /* + * Primes class for FIPS 186-4 C.3 primality checking + */ + return !Primes.hasAnySmallFactors(x) && Primes.isMRProbablePrime(x, param.getRandom(), iterations); + } + +" +263,1," public void batchingShardUpdateTaskTest() throws Exception { + + final Client client = client(); + + logger.info(""--> creating repository""); + assertAcked(client.admin().cluster().preparePutRepository(""test-repo"") + .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", newTempDir()) + .put(""compress"", randomBoolean()) + .put(""chunk_size"", randomIntBetween(100, 1000)))); + + assertAcked(prepareCreate(""test-idx"", 0, settingsBuilder().put(""number_of_shards"", between(1, 20)) + .put(""number_of_replicas"", 0))); + ensureGreen(); + + logger.info(""--> indexing some data""); + final int numdocs = randomIntBetween(10, 100); + IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; + for (int i = 0; i < builders.length; i++) { + builders[i] = client().prepareIndex(""test-idx"", ""type1"", Integer.toString(i)).setSource(""field1"", ""bar "" + i); + } + indexRandom(true, builders); + flushAndRefresh(); + + final int numberOfShards = getNumShards(""test-idx"").numPrimaries; + logger.info(""number of shards: {}"", numberOfShards); + + final ClusterService clusterService = internalCluster().clusterService(internalCluster().getMasterName()); + BlockingClusterStateListener snapshotListener = new BlockingClusterStateListener(clusterService, ""update_snapshot ["", ""update snapshot state"", Priority.HIGH); + try { + clusterService.addFirst(snapshotListener); + logger.info(""--> snapshot""); + ListenableActionFuture snapshotFuture = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-idx"").execute(); + + // Await until shard updates are in pending state. + assertBusyPendingTasks(""update snapshot state"", numberOfShards); + snapshotListener.unblock(); + + // Check that the snapshot was successful + CreateSnapshotResponse createSnapshotResponse = snapshotFuture.actionGet(); + assertEquals(SnapshotState.SUCCESS, createSnapshotResponse.getSnapshotInfo().state()); + assertEquals(numberOfShards, createSnapshotResponse.getSnapshotInfo().totalShards()); + assertEquals(numberOfShards, createSnapshotResponse.getSnapshotInfo().successfulShards()); + + } finally { + clusterService.remove(snapshotListener); + } + + // Check that we didn't timeout + assertFalse(snapshotListener.timedOut()); + // Check that cluster state update task was called only once + assertEquals(1, snapshotListener.count()); + + logger.info(""--> close indices""); + client.admin().indices().prepareClose(""test-idx"").get(); + + BlockingClusterStateListener restoreListener = new BlockingClusterStateListener(clusterService, ""restore_snapshot["", ""update snapshot state"", Priority.HIGH); + + try { + clusterService.addFirst(restoreListener); + logger.info(""--> restore snapshot""); + ListenableActionFuture futureRestore = client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).execute(); + + // Await until shard updates are in pending state. + assertBusyPendingTasks(""update snapshot state"", numberOfShards); + restoreListener.unblock(); + + RestoreSnapshotResponse restoreSnapshotResponse = futureRestore.actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(numberOfShards)); + + } finally { + clusterService.remove(restoreListener); + } + + // Check that we didn't timeout + assertFalse(restoreListener.timedOut()); + // Check that cluster state update task was called only once + assertEquals(1, restoreListener.count()); + } + +" +264,1," private File getLocationUnderBuild(AbstractBuild build) { + return new File(build.getRootDir(), ""fileParameters/"" + location); + } + + /** + * Default implementation from {@link File}. + */ +" +265,1," protected Details authenticate(String username, String password) throws AuthenticationException { + Details u = loadUserByUsername(username); + if (!u.isPasswordCorrect(password)) + throw new BadCredentialsException(""Failed to login as ""+username); + return u; + } + + /** + * Show the sign up page with the data from the identity. + */ + @Override +" +266,1," public void resetPassword_InvalidPasswordException_NewPasswordSameAsOld() { + ScimUser user = new ScimUser(""user-id"", ""username"", ""firstname"", ""lastname""); + user.setMeta(new ScimMeta(new Date(), new Date(), 0)); + user.setPrimaryEmail(""foo@example.com""); + ExpiringCode expiringCode = new ExpiringCode(""good_code"", + new Timestamp(System.currentTimeMillis() + UaaResetPasswordService.PASSWORD_RESET_LIFETIME), ""user-id"", null); + when(codeStore.retrieveCode(""good_code"")).thenReturn(expiringCode); + when(scimUserProvisioning.retrieve(""user-id"")).thenReturn(user); + when(scimUserProvisioning.checkPasswordMatches(""user-id"", ""Passwo3dAsOld"")) + .thenThrow(new InvalidPasswordException(""Your new password cannot be the same as the old password."", UNPROCESSABLE_ENTITY)); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(new MockAuthentication()); + SecurityContextHolder.setContext(securityContext); + try { + emailResetPasswordService.resetPassword(""good_code"", ""Passwo3dAsOld""); + fail(); + } catch (InvalidPasswordException e) { + assertEquals(""Your new password cannot be the same as the old password."", e.getMessage()); + assertEquals(UNPROCESSABLE_ENTITY, e.getStatus()); + } + } + + @Test +" +267,1," public boolean getValidateClientProvidedNewSessionId() { return false; } +" +268,1," protected Http11AprProcessor createProcessor() { + Http11AprProcessor processor = new Http11AprProcessor( + proto.getMaxHttpHeaderSize(), (AprEndpoint)proto.endpoint, + proto.getMaxTrailerSize()); + processor.setAdapter(proto.getAdapter()); + processor.setMaxKeepAliveRequests(proto.getMaxKeepAliveRequests()); + processor.setKeepAliveTimeout(proto.getKeepAliveTimeout()); + processor.setConnectionUploadTimeout( + proto.getConnectionUploadTimeout()); + processor.setDisableUploadTimeout(proto.getDisableUploadTimeout()); + processor.setCompressionMinSize(proto.getCompressionMinSize()); + processor.setCompression(proto.getCompression()); + processor.setNoCompressionUserAgents(proto.getNoCompressionUserAgents()); + processor.setCompressableMimeTypes(proto.getCompressableMimeTypes()); + processor.setRestrictedUserAgents(proto.getRestrictedUserAgents()); + processor.setSocketBuffer(proto.getSocketBuffer()); + processor.setMaxSavePostSize(proto.getMaxSavePostSize()); + processor.setServer(proto.getServer()); + processor.setClientCertProvider(proto.getClientCertProvider()); + register(processor); + return processor; + } + + @Override +" +269,1," public synchronized Servlet loadServlet() throws ServletException { + + // Nothing to do if we already have an instance or an instance pool + if (!singleThreadModel && (instance != null)) + return instance; + + PrintStream out = System.out; + if (swallowOutput) { + SystemLogHandler.startCapture(); + } + + Servlet servlet; + try { + long t1=System.currentTimeMillis(); + // Complain if no servlet class has been specified + if (servletClass == null) { + unavailable(null); + throw new ServletException + (sm.getString(""standardWrapper.notClass"", getName())); + } + + InstanceManager instanceManager = ((StandardContext)getParent()).getInstanceManager(); + try { + servlet = (Servlet) instanceManager.newInstance(servletClass); + } catch (ClassCastException e) { + unavailable(null); + // Restore the context ClassLoader + throw new ServletException + (sm.getString(""standardWrapper.notServlet"", servletClass), e); + } catch (Throwable e) { + ExceptionUtils.handleThrowable(e); + unavailable(null); + + // Added extra log statement for Bugzilla 36630: + // http://issues.apache.org/bugzilla/show_bug.cgi?id=36630 + if(log.isDebugEnabled()) { + log.debug(sm.getString(""standardWrapper.instantiate"", servletClass), e); + } + + // Restore the context ClassLoader + throw new ServletException + (sm.getString(""standardWrapper.instantiate"", servletClass), e); + } + + if (multipartConfigElement == null) { + MultipartConfig annotation = + servlet.getClass().getAnnotation(MultipartConfig.class); + if (annotation != null) { + multipartConfigElement = + new MultipartConfigElement(annotation); + } + } + + ServletSecurity secAnnotation = + servlet.getClass().getAnnotation(ServletSecurity.class); + Context ctxt = (Context) getParent(); + if (secAnnotation != null) { + ctxt.addServletSecurity( + new ApplicationServletRegistration(this, ctxt), + new ServletSecurityElement(secAnnotation)); + } + + + // Special handling for ContainerServlet instances + if ((servlet instanceof ContainerServlet) && + (isContainerProvidedServlet(servletClass) || + ctxt.getPrivileged() )) { + ((ContainerServlet) servlet).setWrapper(this); + } + + classLoadTime=(int) (System.currentTimeMillis() -t1); + + initServlet(servlet); + + // Register our newly initialized instance + singleThreadModel = servlet instanceof SingleThreadModel; + if (singleThreadModel) { + if (instancePool == null) + instancePool = new Stack(); + } + fireContainerEvent(""load"", this); + + loadTime=System.currentTimeMillis() -t1; + } finally { + if (swallowOutput) { + String log = SystemLogHandler.stopCapture(); + if (log != null && log.length() > 0) { + if (getServletContext() != null) { + getServletContext().log(log); + } else { + out.println(log); + } + } + } + } + return servlet; + + } + +" +270,1," public void testWelcomeFileStrict() throws Exception { + + Tomcat tomcat = getTomcatInstance(); + + File appDir = new File(""test/webapp""); + + StandardContext ctxt = (StandardContext) tomcat.addWebapp(null, ""/test"", + appDir.getAbsolutePath()); + ctxt.setReplaceWelcomeFiles(true); + ctxt.addWelcomeFile(""index.jsp""); + // Mapping for *.do is defined in web.xml + ctxt.addWelcomeFile(""index.do""); + + // Simulate STRICT_SERVLET_COMPLIANCE + ctxt.setResourceOnlyServlets(""""); + + tomcat.start(); + ByteChunk bc = new ByteChunk(); + int rc = getUrl(""http://localhost:"" + getPort() + + ""/test/welcome-files"", bc, new HashMap>()); + Assert.assertEquals(HttpServletResponse.SC_OK, rc); + Assert.assertTrue(bc.toString().contains(""JSP"")); + + rc = getUrl(""http://localhost:"" + getPort() + + ""/test/welcome-files/sub"", bc, + new HashMap>()); + Assert.assertEquals(HttpServletResponse.SC_NOT_FOUND, rc); + } + + /** + * Prepare a string to search in messages that contain a timestamp, when it + * is known that the timestamp was printed between {@code timeA} and + * {@code timeB}. + */ +" +271,1," protected void stripScopesFromAuthentication(String identityZoneId, HttpServletRequest servletRequest) { + OAuth2Authentication oa = (OAuth2Authentication)SecurityContextHolder.getContext().getAuthentication(); + + Object oaDetails = oa.getDetails(); + + //strip client scopes + OAuth2Request request = oa.getOAuth2Request(); + Collection requestAuthorities = UaaStringUtils.getStringsFromAuthorities(request.getAuthorities()); + Set clientScopes = new HashSet<>(); + Set clientAuthorities = new HashSet<>(); + for (String s : getZoneSwitchingScopes(identityZoneId)) { + String scope = stripPrefix(s, identityZoneId); + if (request.getScope().contains(s)) { + clientScopes.add(scope); + } + if (requestAuthorities.contains(s)) { + clientAuthorities.add(scope); + } + } + request = new OAuth2Request( + request.getRequestParameters(), + request.getClientId(), + UaaStringUtils.getAuthoritiesFromStrings(clientAuthorities), + request.isApproved(), + clientScopes, + request.getResourceIds(), + request.getRedirectUri(), + request.getResponseTypes(), + request.getExtensions() + ); + + + UaaAuthentication userAuthentication = (UaaAuthentication)oa.getUserAuthentication(); + if (userAuthentication!=null) { + userAuthentication = new UaaAuthentication( + userAuthentication.getPrincipal(), + null, + UaaStringUtils.getAuthoritiesFromStrings(clientScopes), + new UaaAuthenticationDetails(servletRequest), + true); + } + oa = new OAuth2Authentication(request, userAuthentication); + oa.setDetails(oaDetails); + SecurityContextHolder.getContext().setAuthentication(oa); + } + +" +272,1," public void testHandleNoInitialResponseNull() throws Exception + { + final AuthenticationResult result = _negotiator.handleResponse(null); + assertEquals(""Unexpected authentication status"", AuthenticationResult.AuthenticationStatus.CONTINUE, result.getStatus()); + assertArrayEquals(""Unexpected authentication challenge"", new byte[0], result.getChallenge()); + + final AuthenticationResult firstResult = _negotiator.handleResponse(VALID_RESPONSE.getBytes()); + assertEquals(""Unexpected first authentication result"", _expectedResult, firstResult); + } +" +273,1," private int readStored(final byte[] buffer, final int offset, final int length) throws IOException { + + if (current.hasDataDescriptor) { + if (lastStoredEntry == null) { + readStoredEntry(); + } + return lastStoredEntry.read(buffer, offset, length); + } + + final long csize = current.entry.getSize(); + if (current.bytesRead >= csize) { + return -1; + } + + if (buf.position() >= buf.limit()) { + buf.position(0); + final int l = in.read(buf.array()); + if (l == -1) { + return -1; + } + buf.limit(l); + + count(l); + current.bytesReadFromStream += l; + } + + int toRead = Math.min(buf.remaining(), length); + if ((csize - current.bytesRead) < toRead) { + // if it is smaller than toRead then it fits into an int + toRead = (int) (csize - current.bytesRead); + } + buf.get(buffer, offset, toRead); + current.bytesRead += toRead; + return toRead; + } + + /** + * Implementation of read for DEFLATED entries. + */ +" +274,1," private Object readResolve() { + return INSTANCE; + } + +" +275,1," private static ZipOutputStream getZipOutputStream(OutputStream out, Charset charset) { + charset = (null == charset) ? DEFAULT_CHARSET : charset; + return new ZipOutputStream(out, charset); + } + + /** + * 递归压缩文件夹 + * + * @param out 压缩文件存储对象 + * @param srcRootDir 压缩文件夹根目录的子路径 + * @param file 当前递归压缩的文件或目录对象 + * @throws UtilException IO��常 + */ +" +276,1," public boolean isFinished() { + return endChunk; + } +" +277,1," public Object compile(String expression) throws OgnlException { + if (enableExpressionCache) { + Object o = expressions.get(expression); + if (o == null) { + o = Ognl.parseExpression(expression); + expressions.putIfAbsent(expression, o); + } + return o; + } else + return Ognl.parseExpression(expression); + } + + /** + * Copies the properties in the object ""from"" and sets them in the object ""to"" + * using specified type converter, or {@link com.opensymphony.xwork2.conversion.impl.XWorkConverter} if none + * is specified. + * + * @param from the source object + * @param to the target object + * @param context the action context we're running under + * @param exclusions collection of method names to excluded from copying ( can be null) + * @param inclusions collection of method names to included copying (can be null) + * note if exclusions AND inclusions are supplied and not null nothing will get copied. + */ +" +278,1," String hash(String plaintext, String salt, int iterations) throws EncryptionException; + + /** + * Encrypts the provided plaintext and returns a ciphertext string using the + * master secret key and default cipher transformation. + *

+ * Compatibility with earlier ESAPI versions: The symmetric encryption + * in ESAPI 2.0 and later is not compatible with the encryption in ESAPI 1.4 + * or earlier. Not only are the interfaces slightly different, but they format + * of the serialized encrypted data is incompatible. Therefore, if you have + * encrypted data with ESAPI 1.4 or earlier, you must first encrypt it and + * then re-encrypt it with ESAPI 2.0. Backward compatibility with ESAPI 1.4 + * was proposed to both the ESAPI Developers and ESAPI Users mailing lists + * and voted down. More details are available in the ESAPI document + * + * Why Is OWASP Changing ESAPI Encryption? + *

+ * Why this method is deprecated: Most cryptographers strongly suggest + * that if you are creating crypto functionality for general-purpose use, + * at a minimum you should ensure that it provides authenticity, integrity, + * and confidentiality. This method only provides confidentiality, but not + * authenticity or integrity. Therefore, you are encouraged to use + * one of the other encryption methods referenced below. Because this + * method provides neither authenticity nor integrity, it may be + * removed in some future ESAPI Java release. Note: there are some cases + * where authenticity / integrity are not that important. For instance, consider + * a case where the encrypted data is never out of your application's control. For + * example, if you receive data that your application is encrypting itself and then + * storing the encrypted data in its own database for later use (and no other + * applications can query or update that column of the database), providing + * confidentiality alone might be sufficient. However, if there are cases + * where your application will be sending or receiving already encrypted data + * over an insecure, unauthenticated channel, in such cases authenticity and + * integrity of the encrypted data likely is important and this method should + * be avoided in favor of one of the other two. + * + * @param plaintext + * the plaintext {@code String} to encrypt. Note that if you are encrypting + * general bytes, you should encypt that byte array to a String using + * ""UTF-8"" encoding. + * + * @return + * the encrypted, base64-encoded String representation of 'plaintext' plus + * the random IV used. + * + * @throws EncryptionException + * if the specified encryption algorithm could not be found or another problem exists with + * the encryption of 'plaintext' + * + * @see #encrypt(PlainText) + * @see #encrypt(SecretKey, PlainText) + * + * @deprecated As of 1.4.2; use {@link #encrypt(PlainText)} instead, which + * also ensures message authenticity. This method will be + * completely removed as of the next major release or point + * release (3.0 or 2.1, whichever comes first) as per OWASP + * deprecation policy. + */ +" +279,1," public void handleDialog(ActionRequest req, ActionResponse resp) throws IOException, PortletException { + List lines = new ArrayList(); + req.getPortletSession().setAttribute(""lines"", lines); + + lines.add(""handling dialog""); + StringBuilder txt = new StringBuilder(128); + + String clr = req.getActionParameters().getValue(""color""); + txt.append(""Color: "").append(clr); + lines.add(txt.toString()); + LOGGER.fine(txt.toString()); + + resp.getRenderParameters().setValue(""color"", clr); + + txt.setLength(0); + Part part = null; + try { + part = req.getPart(""file""); + } catch (Throwable t) {} + + if ((part != null) && (part.getSubmittedFileName() != null) && + (part.getSubmittedFileName().length() > 0)) { + txt.append(""Uploaded file name: "").append(part.getSubmittedFileName()); + txt.append("", part name: "").append(part.getName()); + txt.append("", size: "").append(part.getSize()); + txt.append("", content type: "").append(part.getContentType()); + lines.add(txt.toString()); + LOGGER.fine(txt.toString()); + txt.setLength(0); + txt.append(""Headers: ""); + String sep = """"; + for (String hdrname : part.getHeaderNames()) { + txt.append(sep).append(hdrname).append(""="").append(part.getHeaders(hdrname)); + sep = "", ""; + } + lines.add(txt.toString()); + LOGGER.fine(txt.toString()); + + // Store the file in a temporary location in the webapp where it can be served. + // Note that this is, in general, not what you want to do in production, as + // there can be problems serving the resource. Did it this way for a + // quick solution that doesn't require additional Tomcat configuration. + + try { + String path = req.getPortletContext().getRealPath(TMP); + File dir = new File(path); + lines.add(""Temp path: "" + dir.getCanonicalPath()); + if (!dir.exists()) { + lines.add(""Creating directory. Path: "" + dir.getCanonicalPath()); + Files.createDirectories(dir.toPath()); + } + String fn = TMP + part.getSubmittedFileName(); + lines.add(""Temp file: "" + fn); + path = req.getPortletContext().getRealPath(fn); + File img = new File(path); + if (img.exists()) { + lines.add(""deleting existing temp file.""); + img.delete(); + } + InputStream is = part.getInputStream(); + Files.copy(is, img.toPath(), StandardCopyOption.REPLACE_EXISTING); + + resp.getRenderParameters().setValue(""fn"", fn); + resp.getRenderParameters().setValue(""ct"", part.getContentType()); + + } catch (Exception e) { + lines.add(""Exception doing I/O: "" + e.toString()); + } + } else { + lines.add(""file part was null""); + } + + } + + @RenderMethod(portletNames = ""MultipartPortlet"") +" +280,1," protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + throws ServletException, IOException { + + HttpServletRequest requestToUse = request; + + if (""POST"".equals(request.getMethod()) && request.getAttribute(WebUtils.ERROR_EXCEPTION_ATTRIBUTE) == null) { + String paramValue = request.getParameter(this.methodParam); + if (StringUtils.hasLength(paramValue)) { + requestToUse = new HttpMethodRequestWrapper(request, paramValue); + } + } + + filterChain.doFilter(requestToUse, response); + } + + + /** + * Simple {@link HttpServletRequest} wrapper that returns the supplied method for + * {@link HttpServletRequest#getMethod()}. + */ +" +281,1," private ScimGroupExternalMember getExternalGroupMap(final String groupId, + final String externalGroup, + final String origin) + throws ScimResourceNotFoundException { + try { + ScimGroupExternalMember u = jdbcTemplate.queryForObject(GET_GROUPS_WITH_EXTERNAL_GROUP_MAPPINGS_SQL, + rowMapper, groupId, origin, externalGroup); + return u; + } catch (EmptyResultDataAccessException e) { + throw new ScimResourceNotFoundException(""The mapping between groupId "" + groupId + "" and external group "" + + externalGroup + "" does not exist""); + } + } + +" +282,1," protected void serveResource(HttpServletRequest request, + HttpServletResponse response, + boolean content, + String encoding) + throws IOException, ServletException { + + boolean serveContent = content; + + // Identify the requested resource path + String path = getRelativePath(request); + if (debug > 0) { + if (serveContent) + log(""DefaultServlet.serveResource: Serving resource '"" + + path + ""' headers and data""); + else + log(""DefaultServlet.serveResource: Serving resource '"" + + path + ""' headers only""); + } + + WebResource resource = resources.getResource(path); + + if (!resource.exists()) { + // Check if we're included so we can return the appropriate + // missing resource name in the error + String requestUri = (String) request.getAttribute( + RequestDispatcher.INCLUDE_REQUEST_URI); + if (requestUri == null) { + requestUri = request.getRequestURI(); + } else { + // We're included + // SRV.9.3 says we must throw a FNFE + throw new FileNotFoundException(sm.getString( + ""defaultServlet.missingResource"", requestUri)); + } + + response.sendError(HttpServletResponse.SC_NOT_FOUND, requestUri); + return; + } + + if (!resource.canRead()) { + // Check if we're included so we can return the appropriate + // missing resource name in the error + String requestUri = (String) request.getAttribute( + RequestDispatcher.INCLUDE_REQUEST_URI); + if (requestUri == null) { + requestUri = request.getRequestURI(); + } else { + // We're included + // Spec doesn't say what to do in this case but a FNFE seems + // reasonable + throw new FileNotFoundException(sm.getString( + ""defaultServlet.missingResource"", requestUri)); + } + + response.sendError(HttpServletResponse.SC_FORBIDDEN, requestUri); + return; + } + + // If the resource is not a collection, and the resource path + // ends with ""/"" or ""\"", return NOT FOUND + if (resource.isFile() && (path.endsWith(""/"") || path.endsWith(""\\""))) { + // Check if we're included so we can return the appropriate + // missing resource name in the error + String requestUri = (String) request.getAttribute( + RequestDispatcher.INCLUDE_REQUEST_URI); + if (requestUri == null) { + requestUri = request.getRequestURI(); + } + response.sendError(HttpServletResponse.SC_NOT_FOUND, requestUri); + return; + } + + boolean isError = response.getStatus() >= HttpServletResponse.SC_BAD_REQUEST; + + boolean included = false; + // Check if the conditions specified in the optional If headers are + // satisfied. + if (resource.isFile()) { + // Checking If headers + included = (request.getAttribute( + RequestDispatcher.INCLUDE_CONTEXT_PATH) != null); + if (!included && !isError && !checkIfHeaders(request, response, resource)) { + return; + } + } + + // Find content type. + String contentType = resource.getMimeType(); + if (contentType == null) { + contentType = getServletContext().getMimeType(resource.getName()); + resource.setMimeType(contentType); + } + + // These need to reflect the original resource, not the potentially + // gzip'd version of the resource so get them now if they are going to + // be needed later + String eTag = null; + String lastModifiedHttp = null; + if (resource.isFile() && !isError) { + eTag = resource.getETag(); + lastModifiedHttp = resource.getLastModifiedHttp(); + } + + + // Serve a gzipped version of the file if present + boolean usingGzippedVersion = false; + if (gzip && !included && resource.isFile() && !path.endsWith("".gz"")) { + WebResource gzipResource = resources.getResource(path + "".gz""); + if (gzipResource.exists() && gzipResource.isFile()) { + Collection varyHeaders = response.getHeaders(""Vary""); + boolean addRequired = true; + for (String varyHeader : varyHeaders) { + if (""*"".equals(varyHeader) || + ""accept-encoding"".equalsIgnoreCase(varyHeader)) { + addRequired = false; + break; + } + } + if (addRequired) { + response.addHeader(""Vary"", ""accept-encoding""); + } + if (checkIfGzip(request)) { + response.addHeader(""Content-Encoding"", ""gzip""); + resource = gzipResource; + usingGzippedVersion = true; + } + } + } + + ArrayList ranges = null; + long contentLength = -1L; + + if (resource.isDirectory()) { + // Skip directory listings if we have been configured to + // suppress them + if (!listings) { + response.sendError(HttpServletResponse.SC_NOT_FOUND, + request.getRequestURI()); + return; + } + contentType = ""text/html;charset=UTF-8""; + } else { + if (!isError) { + if (useAcceptRanges) { + // Accept ranges header + response.setHeader(""Accept-Ranges"", ""bytes""); + } + + // Parse range specifier + ranges = parseRange(request, response, resource); + + // ETag header + response.setHeader(""ETag"", eTag); + + // Last-Modified header + response.setHeader(""Last-Modified"", lastModifiedHttp); + } + + // Get content length + contentLength = resource.getContentLength(); + // Special case for zero length files, which would cause a + // (silent) ISE when setting the output buffer size + if (contentLength == 0L) { + serveContent = false; + } + } + + ServletOutputStream ostream = null; + PrintWriter writer = null; + + if (serveContent) { + // Trying to retrieve the servlet output stream + try { + ostream = response.getOutputStream(); + } catch (IllegalStateException e) { + // If it fails, we try to get a Writer instead if we're + // trying to serve a text file + if (!usingGzippedVersion && + ((contentType == null) || + (contentType.startsWith(""text"")) || + (contentType.endsWith(""xml"")) || + (contentType.contains(""/javascript""))) + ) { + writer = response.getWriter(); + // Cannot reliably serve partial content with a Writer + ranges = FULL; + } else { + throw e; + } + } + } + + // Check to see if a Filter, Valve of wrapper has written some content. + // If it has, disable range requests and setting of a content length + // since neither can be done reliably. + ServletResponse r = response; + long contentWritten = 0; + while (r instanceof ServletResponseWrapper) { + r = ((ServletResponseWrapper) r).getResponse(); + } + if (r instanceof ResponseFacade) { + contentWritten = ((ResponseFacade) r).getContentWritten(); + } + if (contentWritten > 0) { + ranges = FULL; + } + + if (resource.isDirectory() || + isError || + ( (ranges == null || ranges.isEmpty()) + && request.getHeader(""Range"") == null ) || + ranges == FULL ) { + + // Set the appropriate output headers + if (contentType != null) { + if (debug > 0) + log(""DefaultServlet.serveFile: contentType='"" + + contentType + ""'""); + response.setContentType(contentType); + } + if (resource.isFile() && contentLength >= 0 && + (!serveContent || ostream != null)) { + if (debug > 0) + log(""DefaultServlet.serveFile: contentLength="" + + contentLength); + // Don't set a content length if something else has already + // written to the response. + if (contentWritten == 0) { + response.setContentLengthLong(contentLength); + } + } + + if (serveContent) { + try { + response.setBufferSize(output); + } catch (IllegalStateException e) { + // Silent catch + } + InputStream renderResult = null; + if (ostream == null) { + // Output via a writer so can't use sendfile or write + // content directly. + if (resource.isDirectory()) { + renderResult = render(getPathPrefix(request), resource); + } else { + renderResult = resource.getInputStream(); + } + copy(resource, renderResult, writer, encoding); + } else { + // Output is via an InputStream + if (resource.isDirectory()) { + renderResult = render(getPathPrefix(request), resource); + } else { + // Output is content of resource + if (!checkSendfile(request, response, resource, + contentLength, null)) { + // sendfile not possible so check if resource + // content is available directly + byte[] resourceBody = resource.getContent(); + if (resourceBody == null) { + // Resource content not available, use + // inputstream + renderResult = resource.getInputStream(); + } else { + // Use the resource content directly + ostream.write(resourceBody); + } + } + } + // If a stream was configured, it needs to be copied to + // the output (this method closes the stream) + if (renderResult != null) { + copy(resource, renderResult, ostream); + } + } + } + + } else { + + if ((ranges == null) || (ranges.isEmpty())) + return; + + // Partial content response. + + response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); + + if (ranges.size() == 1) { + + Range range = ranges.get(0); + response.addHeader(""Content-Range"", ""bytes "" + + range.start + + ""-"" + range.end + ""/"" + + range.length); + long length = range.end - range.start + 1; + response.setContentLengthLong(length); + + if (contentType != null) { + if (debug > 0) + log(""DefaultServlet.serveFile: contentType='"" + + contentType + ""'""); + response.setContentType(contentType); + } + + if (serveContent) { + try { + response.setBufferSize(output); + } catch (IllegalStateException e) { + // Silent catch + } + if (ostream != null) { + if (!checkSendfile(request, response, resource, + range.end - range.start + 1, range)) + copy(resource, ostream, range); + } else { + // we should not get here + throw new IllegalStateException(); + } + } + } else { + response.setContentType(""multipart/byteranges; boundary="" + + mimeSeparation); + if (serveContent) { + try { + response.setBufferSize(output); + } catch (IllegalStateException e) { + // Silent catch + } + if (ostream != null) { + copy(resource, ostream, ranges.iterator(), contentType); + } else { + // we should not get here + throw new IllegalStateException(); + } + } + } + } + } + + + /** + * Parse the content-range header. + * + * @param request The servlet request we a)re processing + * @param response The servlet response we are creating + * @return Range + */ +" +283,1," private void findCloneMethod() { + try { + iCloneMethod = iPrototype.getClass().getMethod(""clone"", (Class[]) null); + } catch (final NoSuchMethodException ex) { + throw new IllegalArgumentException(""PrototypeCloneFactory: The clone method must exist and be public ""); + } + } + + /** + * Creates an object by calling the clone method. + * + * @return the new object + */ + @SuppressWarnings(""unchecked"") +" +284,1," public boolean isSequenceType() { return false; } + + + /* Traverseproc implementation */ + @Override +" +285,1," public void setDynamicAttribute(String uri, String localName, Object value) throws JspException { + if (ComponentUtils.altSyntax(getStack()) && ComponentUtils.isExpression(value)) { + dynamicAttributes.put(localName, String.valueOf(ObjectUtils.defaultIfNull(findValue(value.toString()), value))); + } else { + dynamicAttributes.put(localName, value); + } + } + +" +286,1," protected void onModified() throws IOException { + super.onModified(); + Jenkins.getInstance().trimLabels(); + } + } + + /** + * Set of installed cluster nodes. + *

+ * We use this field with copy-on-write semantics. + * This field has mutable list (to keep the serialization look clean), + * but it shall never be modified. Only new completely populated slave + * list can be set here. + *

+ * The field name should be really {@code nodes}, but again the backward compatibility + * prevents us from renaming. + */ + protected volatile NodeList slaves; + + /** + * Quiet period. + * + * This is {@link Integer} so that we can initialize it to '5' for upgrading users. + */ + /*package*/ Integer quietPeriod; + + /** + * Global default for {@link AbstractProject#getScmCheckoutRetryCount()} + */ + /*package*/ int scmCheckoutRetryCount; + + /** + * {@link View}s. + */ + private final CopyOnWriteArrayList views = new CopyOnWriteArrayList(); + + /** + * Name of the primary view. + *

+ * Start with null, so that we can upgrade pre-1.269 data well. + * @since 1.269 + */ + private volatile String primaryView; + + private transient final ViewGroupMixIn viewGroupMixIn = new ViewGroupMixIn(this) { + protected List views() { return views; } + protected String primaryView() { return primaryView; } + protected void primaryView(String name) { primaryView=name; } + }; + + + private transient final FingerprintMap fingerprintMap = new FingerprintMap(); + + /** + * Loaded plugins. + */ + public transient final PluginManager pluginManager; + + public transient volatile TcpSlaveAgentListener tcpSlaveAgentListener; + + private transient UDPBroadcastThread udpBroadcastThread; + + private transient DNSMultiCast dnsMultiCast; + + /** + * List of registered {@link SCMListener}s. + */ + private transient final CopyOnWriteList scmListeners = new CopyOnWriteList(); + + /** + * TCP slave agent port. + * 0 for random, -1 to disable. + */ + private int slaveAgentPort =0; + + /** + * Whitespace-separated labels assigned to the master as a {@link Node}. + */ + private String label=""""; + + /** + * {@link hudson.security.csrf.CrumbIssuer} + */ + private volatile CrumbIssuer crumbIssuer; + + /** + * All labels known to Jenkins. This allows us to reuse the same label instances + * as much as possible, even though that's not a strict requirement. + */ + private transient final ConcurrentHashMap labels = new ConcurrentHashMap(); + + /** + * Load statistics of the entire system. + * + * This includes every executor and every job in the system. + */ + @Exported + public transient final OverallLoadStatistics overallLoad = new OverallLoadStatistics(); + + /** + * Load statistics of the free roaming jobs and slaves. + * + * This includes all executors on {@link Mode#NORMAL} nodes and jobs that do not have any assigned nodes. + * + * @since 1.467 + */ + @Exported + public transient final LoadStatistics unlabeledLoad = new UnlabeledLoadStatistics(); + + /** + * {@link NodeProvisioner} that reacts to {@link #unlabeledLoad}. + * @since 1.467 + */ + public transient final NodeProvisioner unlabeledNodeProvisioner = new NodeProvisioner(null,unlabeledLoad); + + /** + * @deprecated as of 1.467 + * Use {@link #unlabeledNodeProvisioner}. + * This was broken because it was tracking all the executors in the system, but it was only tracking + * free-roaming jobs in the queue. So {@link Cloud} fails to launch nodes when you have some exclusive + * slaves and free-roaming jobs in the queue. + */ + @Restricted(NoExternalUse.class) + public transient final NodeProvisioner overallNodeProvisioner = unlabeledNodeProvisioner; + + + public transient final ServletContext servletContext; + + /** + * Transient action list. Useful for adding navigation items to the navigation bar + * on the left. + */ + private transient final List actions = new CopyOnWriteArrayList(); + + /** + * List of master node properties + */ + private DescribableList,NodePropertyDescriptor> nodeProperties = new DescribableList,NodePropertyDescriptor>(this); + + /** + * List of global properties + */ + private DescribableList,NodePropertyDescriptor> globalNodeProperties = new DescribableList,NodePropertyDescriptor>(this); + + /** + * {@link AdministrativeMonitor}s installed on this system. + * + * @see AdministrativeMonitor + */ + public transient final List administrativeMonitors = getExtensionList(AdministrativeMonitor.class); + + /** + * Widgets on Hudson. + */ + private transient final List widgets = getExtensionList(Widget.class); + + /** + * {@link AdjunctManager} + */ + private transient final AdjunctManager adjuncts; + + /** + * Code that handles {@link ItemGroup} work. + */ + private transient final ItemGroupMixIn itemGroupMixIn = new ItemGroupMixIn(this,this) { + @Override + protected void add(TopLevelItem item) { + items.put(item.getName(),item); + } + + @Override + protected File getRootDirFor(String name) { + return Jenkins.this.getRootDirFor(name); + } + + /** + * Send the browser to the config page. + * use View to trim view/{default-view} from URL if possible + */ + @Override + protected String redirectAfterCreateItem(StaplerRequest req, TopLevelItem result) throws IOException { + String redirect = result.getUrl()+""configure""; + List ancestors = req.getAncestors(); + for (int i = ancestors.size() - 1; i >= 0; i--) { + Object o = ancestors.get(i).getObject(); + if (o instanceof View) { + redirect = req.getContextPath() + '/' + ((View)o).getUrl() + redirect; + break; + } + } + return redirect; + } + }; + + + /** + * Hook for a test harness to intercept Jenkins.getInstance() + * + * Do not use in the production code as the signature may change. + */ + public interface JenkinsHolder { + Jenkins getInstance(); + } + + static JenkinsHolder HOLDER = new JenkinsHolder() { + public Jenkins getInstance() { + return theInstance; + } + }; + + @CLIResolver + public static Jenkins getInstance() { + return HOLDER.getInstance(); + } + + /** + * Secret key generated once and used for a long time, beyond + * container start/stop. Persisted outside config.xml to avoid + * accidental exposure. + */ + private transient final String secretKey; + + private transient final UpdateCenter updateCenter = new UpdateCenter(); + + /** + * True if the user opted out from the statistics tracking. We'll never send anything if this is true. + */ + private Boolean noUsageStatistics; + + /** + * HTTP proxy configuration. + */ + public transient volatile ProxyConfiguration proxy; + + /** + * Bound to ""/log"". + */ + private transient final LogRecorderManager log = new LogRecorderManager(); + + protected Jenkins(File root, ServletContext context) throws IOException, InterruptedException, ReactorException { + this(root,context,null); + } + + /** + * @param pluginManager + * If non-null, use existing plugin manager. create a new one. + */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings({ + ""SC_START_IN_CTOR"", // bug in FindBugs. It flags UDPBroadcastThread.start() call but that's for another class + ""ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD"" // Trigger.timer + }) + protected Jenkins(File root, ServletContext context, PluginManager pluginManager) throws IOException, InterruptedException, ReactorException { + long start = System.currentTimeMillis(); + + // As Jenkins is starting, grant this process full control + ACL.impersonate(ACL.SYSTEM); + try { + this.root = root; + this.servletContext = context; + computeVersion(context); + if(theInstance!=null) + throw new IllegalStateException(""second instance""); + theInstance = this; + + if (!new File(root,""jobs"").exists()) { + // if this is a fresh install, use more modern default layout that's consistent with slaves + workspaceDir = ""${JENKINS_HOME}/workspace/${ITEM_FULLNAME}""; + } + + // doing this early allows InitStrategy to set environment upfront + final InitStrategy is = InitStrategy.get(Thread.currentThread().getContextClassLoader()); + + Trigger.timer = new Timer(""Jenkins cron thread""); + queue = new Queue(LoadBalancer.CONSISTENT_HASH); + + try { + dependencyGraph = DependencyGraph.EMPTY; + } catch (InternalError e) { + if(e.getMessage().contains(""window server"")) { + throw new Error(""Looks like the server runs without X. Please specify -Djava.awt.headless=true as JVM option"",e); + } + throw e; + } + + // get or create the secret + TextFile secretFile = new TextFile(new File(getRootDir(),""secret.key"")); + if(secretFile.exists()) { + secretKey = secretFile.readTrim(); + } else { + SecureRandom sr = new SecureRandom(); + byte[] random = new byte[32]; + sr.nextBytes(random); + secretKey = Util.toHexString(random); + secretFile.write(secretKey); + } + + try { + proxy = ProxyConfiguration.load(); + } catch (IOException e) { + LOGGER.log(SEVERE, ""Failed to load proxy configuration"", e); + } + + if (pluginManager==null) + pluginManager = new LocalPluginManager(this); + this.pluginManager = pluginManager; + // JSON binding needs to be able to see all the classes from all the plugins + WebApp.get(servletContext).setClassLoader(pluginManager.uberClassLoader); + + adjuncts = new AdjunctManager(servletContext, pluginManager.uberClassLoader,""adjuncts/""+SESSION_HASH); + + // initialization consists of ... + executeReactor( is, + pluginManager.initTasks(is), // loading and preparing plugins + loadTasks(), // load jobs + InitMilestone.ordering() // forced ordering among key milestones + ); + + if(KILL_AFTER_LOAD) + System.exit(0); + + if(slaveAgentPort!=-1) { + try { + tcpSlaveAgentListener = new TcpSlaveAgentListener(slaveAgentPort); + } catch (BindException e) { + new AdministrativeError(getClass().getName()+"".tcpBind"", + ""Failed to listen to incoming slave connection"", + ""Failed to listen to incoming slave connection. Change the port number to solve the problem."",e); + } + } else + tcpSlaveAgentListener = null; + + try { + udpBroadcastThread = new UDPBroadcastThread(this); + udpBroadcastThread.start(); + } catch (IOException e) { + LOGGER.log(Level.WARNING, ""Failed to broadcast over UDP"",e); + } + dnsMultiCast = new DNSMultiCast(this); + + Timer timer = Trigger.timer; + if (timer != null) { + timer.scheduleAtFixedRate(new SafeTimerTask() { + @Override + protected void doRun() throws Exception { + trimLabels(); + } + }, TimeUnit2.MINUTES.toMillis(5), TimeUnit2.MINUTES.toMillis(5)); + } + + updateComputerList(); + + {// master is online now + Computer c = toComputer(); + if(c!=null) + for (ComputerListener cl : ComputerListener.all()) + cl.onOnline(c,StreamTaskListener.fromStdout()); + } + + for (ItemListener l : ItemListener.all()) { + long itemListenerStart = System.currentTimeMillis(); + l.onLoaded(); + if (LOG_STARTUP_PERFORMANCE) + LOGGER.info(String.format(""Took %dms for item listener %s startup"", + System.currentTimeMillis()-itemListenerStart,l.getClass().getName())); + } + + if (LOG_STARTUP_PERFORMANCE) + LOGGER.info(String.format(""Took %dms for complete Jenkins startup"", + System.currentTimeMillis()-start)); + } finally { + SecurityContextHolder.clearContext(); + } + } + + /** + * Executes a reactor. + * + * @param is + * If non-null, this can be consulted for ignoring some tasks. Only used during the initialization of Hudson. + */ + private void executeReactor(final InitStrategy is, TaskBuilder... builders) throws IOException, InterruptedException, ReactorException { + Reactor reactor = new Reactor(builders) { + /** + * Sets the thread name to the task for better diagnostics. + */ + @Override + protected void runTask(Task task) throws Exception { + if (is!=null && is.skipInitTask(task)) return; + + ACL.impersonate(ACL.SYSTEM); // full access in the initialization thread + String taskName = task.getDisplayName(); + + Thread t = Thread.currentThread(); + String name = t.getName(); + if (taskName !=null) + t.setName(taskName); + try { + long start = System.currentTimeMillis(); + super.runTask(task); + if(LOG_STARTUP_PERFORMANCE) + LOGGER.info(String.format(""Took %dms for %s by %s"", + System.currentTimeMillis()-start, taskName, name)); + } finally { + t.setName(name); + SecurityContextHolder.clearContext(); + } + } + }; + + new InitReactorRunner() { + @Override + protected void onInitMilestoneAttained(InitMilestone milestone) { + initLevel = milestone; + } + }.run(reactor); + } + + + public TcpSlaveAgentListener getTcpSlaveAgentListener() { + return tcpSlaveAgentListener; + } + + /** + * Makes {@link AdjunctManager} URL-bound. + * The dummy parameter allows us to use different URLs for the same adjunct, + * for proper cache handling. + */ + public AdjunctManager getAdjuncts(String dummy) { + return adjuncts; + } + + @Exported + public int getSlaveAgentPort() { + return slaveAgentPort; + } + + /** + * @param port + * 0 to indicate random available TCP port. -1 to disable this service. + */ + public void setSlaveAgentPort(int port) throws IOException { + this.slaveAgentPort = port; + + // relaunch the agent + if(tcpSlaveAgentListener==null) { + if(slaveAgentPort!=-1) + tcpSlaveAgentListener = new TcpSlaveAgentListener(slaveAgentPort); + } else { + if(tcpSlaveAgentListener.configuredPort!=slaveAgentPort) { + tcpSlaveAgentListener.shutdown(); + tcpSlaveAgentListener = null; + if(slaveAgentPort!=-1) + tcpSlaveAgentListener = new TcpSlaveAgentListener(slaveAgentPort); + } + } + } + + public void setNodeName(String name) { + throw new UnsupportedOperationException(); // not allowed + } + + public String getNodeDescription() { + return Messages.Hudson_NodeDescription(); + } + + @Exported + public String getDescription() { + return systemMessage; + } + + public PluginManager getPluginManager() { + return pluginManager; + } + + public UpdateCenter getUpdateCenter() { + return updateCenter; + } + + public boolean isUsageStatisticsCollected() { + return noUsageStatistics==null || !noUsageStatistics; + } + + public void setNoUsageStatistics(Boolean noUsageStatistics) throws IOException { + this.noUsageStatistics = noUsageStatistics; + save(); + } + + public View.People getPeople() { + return new View.People(this); + } + + /** + * @since 1.484 + */ + public View.AsynchPeople getAsynchPeople() { + return new View.AsynchPeople(this); + } + + /** + * Does this {@link View} has any associated user information recorded? + */ + public boolean hasPeople() { + return View.People.isApplicable(items.values()); + } + + public Api getApi() { + return new Api(this); + } + + /** + * Returns a secret key that survives across container start/stop. + *

+ * This value is useful for implementing some of the security features. + * + * @deprecated + * Due to the past security advisory, this value should not be used any more to protect sensitive information. + * See {@link ConfidentialStore} and {@link ConfidentialKey} for how to store secrets. + */ + public String getSecretKey() { + return secretKey; + } + + /** + * Gets {@linkplain #getSecretKey() the secret key} as a key for AES-128. + * @since 1.308 + * @deprecated + * See {@link #getSecretKey()}. + */ + public SecretKey getSecretKeyAsAES128() { + return Util.toAes128Key(secretKey); + } + + /** + * Returns the unique identifier of this Jenkins that has been historically used to identify + * this Jenkins to the outside world. + * + *

+ * This form of identifier is weak in that it can be impersonated by others. See + * https://wiki.jenkins-ci.org/display/JENKINS/Instance+Identity for more modern form of instance ID + * that can be challenged and verified. + * + * @since 1.498 + */ + @SuppressWarnings(""deprecation"") + public String getLegacyInstanceId() { + return Util.getDigestOf(getSecretKey()); + } + + /** + * Gets the SCM descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor getScm(String shortClassName) { + return findDescriptor(shortClassName,SCM.all()); + } + + /** + * Gets the repository browser descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor> getRepositoryBrowser(String shortClassName) { + return findDescriptor(shortClassName,RepositoryBrowser.all()); + } + + /** + * Gets the builder descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor getBuilder(String shortClassName) { + return findDescriptor(shortClassName, Builder.all()); + } + + /** + * Gets the build wrapper descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor getBuildWrapper(String shortClassName) { + return findDescriptor(shortClassName, BuildWrapper.all()); + } + + /** + * Gets the publisher descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor getPublisher(String shortClassName) { + return findDescriptor(shortClassName, Publisher.all()); + } + + /** + * Gets the trigger descriptor by name. Primarily used for making them web-visible. + */ + public TriggerDescriptor getTrigger(String shortClassName) { + return (TriggerDescriptor) findDescriptor(shortClassName, Trigger.all()); + } + + /** + * Gets the retention strategy descriptor by name. Primarily used for making them web-visible. + */ + public Descriptor> getRetentionStrategy(String shortClassName) { + return findDescriptor(shortClassName, RetentionStrategy.all()); + } + + /** + * Gets the {@link JobPropertyDescriptor} by name. Primarily used for making them web-visible. + */ + public JobPropertyDescriptor getJobProperty(String shortClassName) { + // combining these two lines triggers javac bug. See issue #610. + Descriptor d = findDescriptor(shortClassName, JobPropertyDescriptor.all()); + return (JobPropertyDescriptor) d; + } + + /** + * @deprecated + * UI method. Not meant to be used programatically. + */ + public ComputerSet getComputer() { + return new ComputerSet(); + } + + /** + * Exposes {@link Descriptor} by its name to URL. + * + * After doing all the {@code getXXX(shortClassName)} methods, I finally realized that + * this just doesn't scale. + * + * @param id + * Either {@link Descriptor#getId()} (recommended) or the short name of a {@link Describable} subtype (for compatibility) + * @throws IllegalArgumentException if a short name was passed which matches multiple IDs (fail fast) + */ + @SuppressWarnings({""unchecked"", ""rawtypes""}) // too late to fix + public Descriptor getDescriptor(String id) { + // legacy descriptors that are reigstered manually doesn't show up in getExtensionList, so check them explicitly. + Iterable descriptors = Iterators.sequence(getExtensionList(Descriptor.class), DescriptorExtensionList.listLegacyInstances()); + for (Descriptor d : descriptors) { + if (d.getId().equals(id)) { + return d; + } + } + Descriptor candidate = null; + for (Descriptor d : descriptors) { + String name = d.getId(); + if (name.substring(name.lastIndexOf('.') + 1).equals(id)) { + if (candidate == null) { + candidate = d; + } else { + throw new IllegalArgumentException(id + "" is ambiguous; matches both "" + name + "" and "" + candidate.getId()); + } + } + } + return candidate; + } + + /** + * Alias for {@link #getDescriptor(String)}. + */ + public Descriptor getDescriptorByName(String id) { + return getDescriptor(id); + } + + /** + * Gets the {@link Descriptor} that corresponds to the given {@link Describable} type. + *

+ * If you have an instance of {@code type} and call {@link Describable#getDescriptor()}, + * you'll get the same instance that this method returns. + */ + public Descriptor getDescriptor(Class type) { + for( Descriptor d : getExtensionList(Descriptor.class) ) + if(d.clazz==type) + return d; + return null; + } + + /** + * Works just like {@link #getDescriptor(Class)} but don't take no for an answer. + * + * @throws AssertionError + * If the descriptor is missing. + * @since 1.326 + */ + public Descriptor getDescriptorOrDie(Class type) { + Descriptor d = getDescriptor(type); + if (d==null) + throw new AssertionError(type+"" is missing its descriptor""); + return d; + } + + /** + * Gets the {@link Descriptor} instance in the current Hudson by its type. + */ + public T getDescriptorByType(Class type) { + for( Descriptor d : getExtensionList(Descriptor.class) ) + if(d.getClass()==type) + return type.cast(d); + return null; + } + + /** + * Gets the {@link SecurityRealm} descriptors by name. Primarily used for making them web-visible. + */ + public Descriptor getSecurityRealms(String shortClassName) { + return findDescriptor(shortClassName,SecurityRealm.all()); + } + + /** + * Finds a descriptor that has the specified name. + */ + private > + Descriptor findDescriptor(String shortClassName, Collection> descriptors) { + String name = '.'+shortClassName; + for (Descriptor d : descriptors) { + if(d.clazz.getName().endsWith(name)) + return d; + } + return null; + } + + protected void updateComputerList() throws IOException { + updateComputerList(AUTOMATIC_SLAVE_LAUNCH); + } + + /** + * Gets all the installed {@link SCMListener}s. + */ + public CopyOnWriteList getSCMListeners() { + return scmListeners; + } + + /** + * Gets the plugin object from its short name. + * + *

+ * This allows URL hudson/plugin/ID to be served by the views + * of the plugin class. + */ + public Plugin getPlugin(String shortName) { + PluginWrapper p = pluginManager.getPlugin(shortName); + if(p==null) return null; + return p.getPlugin(); + } + + /** + * Gets the plugin object from its class. + * + *

+ * This allows easy storage of plugin information in the plugin singleton without + * every plugin reimplementing the singleton pattern. + * + * @param clazz The plugin class (beware class-loader fun, this will probably only work + * from within the jpi that defines the plugin class, it may or may not work in other cases) + * + * @return The plugin instance. + */ + @SuppressWarnings(""unchecked"") + public

P getPlugin(Class

clazz) { + PluginWrapper p = pluginManager.getPlugin(clazz); + if(p==null) return null; + return (P) p.getPlugin(); + } + + /** + * Gets the plugin objects from their super-class. + * + * @param clazz The plugin class (beware class-loader fun) + * + * @return The plugin instances. + */ + public

List

getPlugins(Class

clazz) { + List

result = new ArrayList

(); + for (PluginWrapper w: pluginManager.getPlugins(clazz)) { + result.add((P)w.getPlugin()); + } + return Collections.unmodifiableList(result); + } + + /** + * Synonym for {@link #getDescription}. + */ + public String getSystemMessage() { + return systemMessage; + } + + /** + * Gets the markup formatter used in the system. + * + * @return + * never null. + * @since 1.391 + */ + public MarkupFormatter getMarkupFormatter() { + return markupFormatter!=null ? markupFormatter : RawHtmlMarkupFormatter.INSTANCE; + } + + /** + * Sets the markup formatter used in the system globally. + * + * @since 1.391 + */ + public void setMarkupFormatter(MarkupFormatter f) { + this.markupFormatter = f; + } + + /** + * Sets the system message. + */ + public void setSystemMessage(String message) throws IOException { + this.systemMessage = message; + save(); + } + + public FederatedLoginService getFederatedLoginService(String name) { + for (FederatedLoginService fls : FederatedLoginService.all()) { + if (fls.getUrlName().equals(name)) + return fls; + } + return null; + } + + public List getFederatedLoginServices() { + return FederatedLoginService.all(); + } + + public Launcher createLauncher(TaskListener listener) { + return new LocalLauncher(listener).decorateFor(this); + } + + + public String getFullName() { + return """"; + } + + public String getFullDisplayName() { + return """"; + } + + /** + * Returns the transient {@link Action}s associated with the top page. + * + *

+ * Adding {@link Action} is primarily useful for plugins to contribute + * an item to the navigation bar of the top page. See existing {@link Action} + * implementation for it affects the GUI. + * + *

+ * To register an {@link Action}, implement {@link RootAction} extension point, or write code like + * {@code Hudson.getInstance().getActions().add(...)}. + * + * @return + * Live list where the changes can be made. Can be empty but never null. + * @since 1.172 + */ + public List getActions() { + return actions; + } + + /** + * Gets just the immediate children of {@link Jenkins}. + * + * @see #getAllItems(Class) + */ + @Exported(name=""jobs"") + public List getItems() { + if (authorizationStrategy instanceof AuthorizationStrategy.Unsecured || + authorizationStrategy instanceof FullControlOnceLoggedInAuthorizationStrategy) { + return new ArrayList(items.values()); + } + + List viewableItems = new ArrayList(); + for (TopLevelItem item : items.values()) { + if (item.hasPermission(Item.READ)) + viewableItems.add(item); + } + + return viewableItems; + } + + /** + * Returns the read-only view of all the {@link TopLevelItem}s keyed by their names. + *

+ * This method is efficient, as it doesn't involve any copying. + * + * @since 1.296 + */ + public Map getItemMap() { + return Collections.unmodifiableMap(items); + } + + /** + * Gets just the immediate children of {@link Jenkins} but of the given type. + */ + public List getItems(Class type) { + List r = new ArrayList(); + for (TopLevelItem i : getItems()) + if (type.isInstance(i)) + r.add(type.cast(i)); + return r; + } + + /** + * Gets all the {@link Item}s recursively in the {@link ItemGroup} tree + * and filter them by the given type. + */ + public List getAllItems(Class type) { + List r = new ArrayList(); + + Stack q = new Stack(); + q.push(this); + + while(!q.isEmpty()) { + ItemGroup parent = q.pop(); + for (Item i : parent.getItems()) { + if(type.isInstance(i)) { + if (i.hasPermission(Item.READ)) + r.add(type.cast(i)); + } + if(i instanceof ItemGroup) + q.push((ItemGroup)i); + } + } + + return r; + } + + /** + * Gets all the items recursively. + * + * @since 1.402 + */ + public List getAllItems() { + return getAllItems(Item.class); + } + + /** + * Gets a list of simple top-level projects. + * @deprecated This method will ignore Maven and matrix projects, as well as projects inside containers such as folders. + * You may prefer to call {@link #getAllItems(Class)} on {@link AbstractProject}, + * perhaps also using {@link Util#createSubList} to consider only {@link TopLevelItem}s. + * (That will also consider the caller's permissions.) + * If you really want to get just {@link Project}s at top level, ignoring permissions, + * you can filter the values from {@link #getItemMap} using {@link Util#createSubList}. + */ + @Deprecated + public List getProjects() { + return Util.createSubList(items.values(),Project.class); + } + + /** + * Gets the names of all the {@link Job}s. + */ + public Collection getJobNames() { + List names = new ArrayList(); + for (Job j : getAllItems(Job.class)) + names.add(j.getFullName()); + return names; + } + + public List getViewActions() { + return getActions(); + } + + /** + * Gets the names of all the {@link TopLevelItem}s. + */ + public Collection getTopLevelItemNames() { + List names = new ArrayList(); + for (TopLevelItem j : items.values()) + names.add(j.getName()); + return names; + } + + public View getView(String name) { + return viewGroupMixIn.getView(name); + } + + /** + * Gets the read-only list of all {@link View}s. + */ + @Exported + public Collection getViews() { + return viewGroupMixIn.getViews(); + } + + public void addView(View v) throws IOException { + viewGroupMixIn.addView(v); + } + + public boolean canDelete(View view) { + return viewGroupMixIn.canDelete(view); + } + + public synchronized void deleteView(View view) throws IOException { + viewGroupMixIn.deleteView(view); + } + + public void onViewRenamed(View view, String oldName, String newName) { + viewGroupMixIn.onViewRenamed(view,oldName,newName); + } + + /** + * Returns the primary {@link View} that renders the top-page of Hudson. + */ + @Exported + public View getPrimaryView() { + return viewGroupMixIn.getPrimaryView(); + } + + public void setPrimaryView(View v) { + this.primaryView = v.getViewName(); + } + + public ViewsTabBar getViewsTabBar() { + return viewsTabBar; + } + + public void setViewsTabBar(ViewsTabBar viewsTabBar) { + this.viewsTabBar = viewsTabBar; + } + + public Jenkins getItemGroup() { + return this; + } + + public MyViewsTabBar getMyViewsTabBar() { + return myViewsTabBar; + } + + public void setMyViewsTabBar(MyViewsTabBar myViewsTabBar) { + this.myViewsTabBar = myViewsTabBar; + } + + /** + * Returns true if the current running Hudson is upgraded from a version earlier than the specified version. + * + *

+ * This method continues to return true until the system configuration is saved, at which point + * {@link #version} will be overwritten and Hudson forgets the upgrade history. + * + *

+ * To handle SNAPSHOTS correctly, pass in ""1.N.*"" to test if it's upgrading from the version + * equal or younger than N. So say if you implement a feature in 1.301 and you want to check + * if the installation upgraded from pre-1.301, pass in ""1.300.*"" + * + * @since 1.301 + */ + public boolean isUpgradedFromBefore(VersionNumber v) { + try { + return new VersionNumber(version).isOlderThan(v); + } catch (IllegalArgumentException e) { + // fail to parse this version number + return false; + } + } + + /** + * Gets the read-only list of all {@link Computer}s. + */ + public Computer[] getComputers() { + Computer[] r = computers.values().toArray(new Computer[computers.size()]); + Arrays.sort(r,new Comparator() { + final Collator collator = Collator.getInstance(); + public int compare(Computer lhs, Computer rhs) { + if(lhs.getNode()==Jenkins.this) return -1; + if(rhs.getNode()==Jenkins.this) return 1; + return collator.compare(lhs.getDisplayName(), rhs.getDisplayName()); + } + }); + return r; + } + + @CLIResolver + public Computer getComputer(@Argument(required=true,metaVar=""NAME"",usage=""Node name"") String name) { + if(name.equals(""(master)"")) + name = """"; + + for (Computer c : computers.values()) { + if(c.getName().equals(name)) + return c; + } + return null; + } + + /** + * Gets the label that exists on this system by the name. + * + * @return null if name is null. + * @see Label#parseExpression(String) (String) + */ + public Label getLabel(String expr) { + if(expr==null) return null; + while(true) { + Label l = labels.get(expr); + if(l!=null) + return l; + + // non-existent + try { + labels.putIfAbsent(expr,Label.parseExpression(expr)); + } catch (ANTLRException e) { + // laxly accept it as a single label atom for backward compatibility + return getLabelAtom(expr); + } + } + } + + /** + * Returns the label atom of the given name. + * @return non-null iff name is non-null + */ + public @Nullable LabelAtom getLabelAtom(@CheckForNull String name) { + if (name==null) return null; + + while(true) { + Label l = labels.get(name); + if(l!=null) + return (LabelAtom)l; + + // non-existent + LabelAtom la = new LabelAtom(name); + if (labels.putIfAbsent(name, la)==null) + la.load(); + } + } + + /** + * Gets all the active labels in the current system. + */ + public Set

+ * This method first tries to use the manually configured value, then + * fall back to {@link StaplerRequest#getRootPath()}. + * It is done in this order so that it can work correctly even in the face + * of a reverse proxy. + * + * @return + * This method returns null if this parameter is not configured by the user. + * The caller must gracefully deal with this situation. + * The returned URL will always have the trailing '/'. + * @since 1.66 + * @see Descriptor#getCheckUrl(String) + * @see #getRootUrlFromRequest() + */ + public String getRootUrl() { + // for compatibility. the actual data is stored in Mailer + String url = JenkinsLocationConfiguration.get().getUrl(); + if(url!=null) { + if (!url.endsWith(""/"")) url += '/'; + return url; + } + + StaplerRequest req = Stapler.getCurrentRequest(); + if(req!=null) + return getRootUrlFromRequest(); + return null; + } + + /** + * Is Jenkins running in HTTPS? + * + * Note that we can't really trust {@link StaplerRequest#isSecure()} because HTTPS might be terminated + * in the reverse proxy. + */ + public boolean isRootUrlSecure() { + String url = getRootUrl(); + return url!=null && url.startsWith(""https""); + } + + /** + * Gets the absolute URL of Hudson top page, such as ""http://localhost/hudson/"". + * + *

+ * Unlike {@link #getRootUrl()}, which uses the manually configured value, + * this one uses the current request to reconstruct the URL. The benefit is + * that this is immune to the configuration mistake (users often fail to set the root URL + * correctly, especially when a migration is involved), but the downside + * is that unless you are processing a request, this method doesn't work. + * + * Please note that this will not work in all cases if Jenkins is running behind a + * reverse proxy (e.g. when user has switched off ProxyPreserveHost, which is + * default setup or the actual url uses https) and you should use getRootUrl if + * you want to be sure you reflect user setup. + * See https://wiki.jenkins-ci.org/display/JENKINS/Running+Jenkins+behind+Apache + * + * @since 1.263 + */ + public String getRootUrlFromRequest() { + StaplerRequest req = Stapler.getCurrentRequest(); + StringBuilder buf = new StringBuilder(); + buf.append(req.getScheme()+""://""); + buf.append(req.getServerName()); + if(req.getServerPort()!=80) + buf.append(':').append(req.getServerPort()); + buf.append(req.getContextPath()).append('/'); + return buf.toString(); + } + + public File getRootDir() { + return root; + } + + public FilePath getWorkspaceFor(TopLevelItem item) { + return new FilePath(expandVariablesForDirectory(workspaceDir, item)); + } + + public File getBuildDirFor(Job job) { + return expandVariablesForDirectory(buildsDir, job); + } + + private File expandVariablesForDirectory(String base, Item item) { + return new File(Util.replaceMacro(base, ImmutableMap.of( + ""JENKINS_HOME"", getRootDir().getPath(), + ""ITEM_ROOTDIR"", item.getRootDir().getPath(), + ""ITEM_FULLNAME"", item.getFullName()))); + } + + public String getRawWorkspaceDir() { + return workspaceDir; + } + + public String getRawBuildsDir() { + return buildsDir; + } + + public FilePath getRootPath() { + return new FilePath(getRootDir()); + } + + @Override + public FilePath createPath(String absolutePath) { + return new FilePath((VirtualChannel)null,absolutePath); + } + + public ClockDifference getClockDifference() { + return ClockDifference.ZERO; + } + + /** + * For binding {@link LogRecorderManager} to ""/log"". + * Everything below here is admin-only, so do the check here. + */ + public LogRecorderManager getLog() { + checkPermission(ADMINISTER); + return log; + } + + /** + * A convenience method to check if there's some security + * restrictions in place. + */ + @Exported + public boolean isUseSecurity() { + return securityRealm!=SecurityRealm.NO_AUTHENTICATION || authorizationStrategy!=AuthorizationStrategy.UNSECURED; + } + + public boolean isUseProjectNamingStrategy(){ + return projectNamingStrategy != DefaultProjectNamingStrategy.DEFAULT_NAMING_STRATEGY; + } + + /** + * If true, all the POST requests to Hudson would have to have crumb in it to protect + * Hudson from CSRF vulnerabilities. + */ + @Exported + public boolean isUseCrumbs() { + return crumbIssuer!=null; + } + + /** + * Returns the constant that captures the three basic security modes + * in Hudson. + */ + public SecurityMode getSecurity() { + // fix the variable so that this code works under concurrent modification to securityRealm. + SecurityRealm realm = securityRealm; + + if(realm==SecurityRealm.NO_AUTHENTICATION) + return SecurityMode.UNSECURED; + if(realm instanceof LegacySecurityRealm) + return SecurityMode.LEGACY; + return SecurityMode.SECURED; + } + + /** + * @return + * never null. + */ + public SecurityRealm getSecurityRealm() { + return securityRealm; + } + + public void setSecurityRealm(SecurityRealm securityRealm) { + if(securityRealm==null) + securityRealm= SecurityRealm.NO_AUTHENTICATION; + this.useSecurity = true; + this.securityRealm = securityRealm; + // reset the filters and proxies for the new SecurityRealm + try { + HudsonFilter filter = HudsonFilter.get(servletContext); + if (filter == null) { + // Fix for #3069: This filter is not necessarily initialized before the servlets. + // when HudsonFilter does come back, it'll initialize itself. + LOGGER.fine(""HudsonFilter has not yet been initialized: Can't perform security setup for now""); + } else { + LOGGER.fine(""HudsonFilter has been previously initialized: Setting security up""); + filter.reset(securityRealm); + LOGGER.fine(""Security is now fully set up""); + } + } catch (ServletException e) { + // for binary compatibility, this method cannot throw a checked exception + throw new AcegiSecurityException(""Failed to configure filter"",e) {}; + } + } + + public void setAuthorizationStrategy(AuthorizationStrategy a) { + if (a == null) + a = AuthorizationStrategy.UNSECURED; + useSecurity = true; + authorizationStrategy = a; + } + + public void disableSecurity() { + useSecurity = null; + setSecurityRealm(SecurityRealm.NO_AUTHENTICATION); + authorizationStrategy = AuthorizationStrategy.UNSECURED; + markupFormatter = null; + } + + public void setProjectNamingStrategy(ProjectNamingStrategy ns) { + if(ns == null){ + ns = DefaultProjectNamingStrategy.DEFAULT_NAMING_STRATEGY; + } + projectNamingStrategy = ns; + } + + public Lifecycle getLifecycle() { + return Lifecycle.get(); + } + + /** + * Gets the dependency injection container that hosts all the extension implementations and other + * components in Jenkins. + * + * @since 1.GUICE + */ + public Injector getInjector() { + return lookup(Injector.class); + } + + /** + * Returns {@link ExtensionList} that retains the discovered instances for the given extension type. + * + * @param extensionType + * The base type that represents the extension point. Normally {@link ExtensionPoint} subtype + * but that's not a hard requirement. + * @return + * Can be an empty list but never null. + */ + @SuppressWarnings({""unchecked""}) + public ExtensionList getExtensionList(Class extensionType) { + return extensionLists.get(extensionType); + } + + /** + * Used to bind {@link ExtensionList}s to URLs. + * + * @since 1.349 + */ + public ExtensionList getExtensionList(String extensionType) throws ClassNotFoundException { + return getExtensionList(pluginManager.uberClassLoader.loadClass(extensionType)); + } + + /** + * Returns {@link ExtensionList} that retains the discovered {@link Descriptor} instances for the given + * kind of {@link Describable}. + * + * @return + * Can be an empty list but never null. + */ + @SuppressWarnings({""unchecked""}) + public ,D extends Descriptor> DescriptorExtensionList getDescriptorList(Class type) { + return descriptorLists.get(type); + } + + /** + * Refresh {@link ExtensionList}s by adding all the newly discovered extensions. + * + * Exposed only for {@link PluginManager#dynamicLoad(File)}. + */ + public void refreshExtensions() throws ExtensionRefreshException { + ExtensionList finders = getExtensionList(ExtensionFinder.class); + for (ExtensionFinder ef : finders) { + if (!ef.isRefreshable()) + throw new ExtensionRefreshException(ef+"" doesn't support refresh""); + } + + List fragments = Lists.newArrayList(); + for (ExtensionFinder ef : finders) { + fragments.add(ef.refresh()); + } + ExtensionComponentSet delta = ExtensionComponentSet.union(fragments).filtered(); + + // if we find a new ExtensionFinder, we need it to list up all the extension points as well + List> newFinders = Lists.newArrayList(delta.find(ExtensionFinder.class)); + while (!newFinders.isEmpty()) { + ExtensionFinder f = newFinders.remove(newFinders.size()-1).getInstance(); + + ExtensionComponentSet ecs = ExtensionComponentSet.allOf(f).filtered(); + newFinders.addAll(ecs.find(ExtensionFinder.class)); + delta = ExtensionComponentSet.union(delta, ecs); + } + + for (ExtensionList el : extensionLists.values()) { + el.refresh(delta); + } + for (ExtensionList el : descriptorLists.values()) { + el.refresh(delta); + } + + // TODO: we need some generalization here so that extension points can be notified when a refresh happens? + for (ExtensionComponent ea : delta.find(RootAction.class)) { + Action a = ea.getInstance(); + if (!actions.contains(a)) actions.add(a); + } + } + + /** + * Returns the root {@link ACL}. + * + * @see AuthorizationStrategy#getRootACL() + */ + @Override + public ACL getACL() { + return authorizationStrategy.getRootACL(); + } + + /** + * @return + * never null. + */ + public AuthorizationStrategy getAuthorizationStrategy() { + return authorizationStrategy; + } + + /** + * The strategy used to check the project names. + * @return never null + */ + public ProjectNamingStrategy getProjectNamingStrategy() { + return projectNamingStrategy == null ? ProjectNamingStrategy.DEFAULT_NAMING_STRATEGY : projectNamingStrategy; + } + + /** + * Returns true if Hudson is quieting down. + *

+ * No further jobs will be executed unless it + * can be finished while other current pending builds + * are still in progress. + */ + @Exported + public boolean isQuietingDown() { + return isQuietingDown; + } + + /** + * Returns true if the container initiated the termination of the web application. + */ + public boolean isTerminating() { + return terminating; + } + + /** + * Gets the initialization milestone that we've already reached. + * + * @return + * {@link InitMilestone#STARTED} even if the initialization hasn't been started, so that this method + * never returns null. + */ + public InitMilestone getInitLevel() { + return initLevel; + } + + public void setNumExecutors(int n) throws IOException { + this.numExecutors = n; + save(); + } + + + + /** + * {@inheritDoc}. + * + * Note that the look up is case-insensitive. + */ + public TopLevelItem getItem(String name) { + if (name==null) return null; + TopLevelItem item = items.get(name); + if (item==null) + return null; + if (!item.hasPermission(Item.READ)) { + if (item.hasPermission(Item.DISCOVER)) { + throw new AccessDeniedException(""Please login to access job "" + name); + } + return null; + } + return item; + } + + /** + * Gets the item by its path name from the given context + * + *

Path Names

+ *

+ * If the name starts from '/', like ""/foo/bar/zot"", then it's interpreted as absolute. + * Otherwise, the name should be something like ""foo/bar"" and it's interpreted like + * relative path name in the file system is, against the given context. + * + * @param context + * null is interpreted as {@link Jenkins}. Base 'directory' of the interpretation. + * @since 1.406 + */ + public Item getItem(String pathName, ItemGroup context) { + if (context==null) context = this; + if (pathName==null) return null; + + if (pathName.startsWith(""/"")) // absolute + return getItemByFullName(pathName); + + Object/*Item|ItemGroup*/ ctx = context; + + StringTokenizer tokens = new StringTokenizer(pathName,""/""); + while (tokens.hasMoreTokens()) { + String s = tokens.nextToken(); + if (s.equals("".."")) { + if (ctx instanceof Item) { + ctx = ((Item)ctx).getParent(); + continue; + } + + ctx=null; // can't go up further + break; + } + if (s.equals(""."")) { + continue; + } + + if (ctx instanceof ItemGroup) { + ItemGroup g = (ItemGroup) ctx; + Item i = g.getItem(s); + if (i==null || !i.hasPermission(Item.READ)) { // XXX consider DISCOVER + ctx=null; // can't go up further + break; + } + ctx=i; + } else { + return null; + } + } + + if (ctx instanceof Item) + return (Item)ctx; + + // fall back to the classic interpretation + return getItemByFullName(pathName); + } + + public final Item getItem(String pathName, Item context) { + return getItem(pathName,context!=null?context.getParent():null); + } + + public final T getItem(String pathName, ItemGroup context, Class type) { + Item r = getItem(pathName, context); + if (type.isInstance(r)) + return type.cast(r); + return null; + } + + public final T getItem(String pathName, Item context, Class type) { + return getItem(pathName,context!=null?context.getParent():null,type); + } + + public File getRootDirFor(TopLevelItem child) { + return getRootDirFor(child.getName()); + } + + private File getRootDirFor(String name) { + return new File(new File(getRootDir(),""jobs""), name); + } + + /** + * Gets the {@link Item} object by its full name. + * Full names are like path names, where each name of {@link Item} is + * combined by '/'. + * + * @return + * null if either such {@link Item} doesn't exist under the given full name, + * or it exists but it's no an instance of the given type. + */ + public @CheckForNull T getItemByFullName(String fullName, Class type) { + StringTokenizer tokens = new StringTokenizer(fullName,""/""); + ItemGroup parent = this; + + if(!tokens.hasMoreTokens()) return null; // for example, empty full name. + + while(true) { + Item item = parent.getItem(tokens.nextToken()); + if(!tokens.hasMoreTokens()) { + if(type.isInstance(item)) + return type.cast(item); + else + return null; + } + + if(!(item instanceof ItemGroup)) + return null; // this item can't have any children + + if (!item.hasPermission(Item.READ)) + return null; // XXX consider DISCOVER + + parent = (ItemGroup) item; + } + } + + public @CheckForNull Item getItemByFullName(String fullName) { + return getItemByFullName(fullName,Item.class); + } + + /** + * Gets the user of the given name. + * + * @return the user of the given name, if that person exists or the invoker {@link #hasPermission} on {@link #ADMINISTER}; else null + * @see User#get(String,boolean) + */ + public @CheckForNull User getUser(String name) { + return User.get(name,hasPermission(ADMINISTER)); + } + + public synchronized TopLevelItem createProject( TopLevelItemDescriptor type, String name ) throws IOException { + return createProject(type, name, true); + } + + public synchronized TopLevelItem createProject( TopLevelItemDescriptor type, String name, boolean notify ) throws IOException { + return itemGroupMixIn.createProject(type,name,notify); + } + + /** + * Overwrites the existing item by new one. + * + *

+ * This is a short cut for deleting an existing job and adding a new one. + */ + public synchronized void putItem(TopLevelItem item) throws IOException, InterruptedException { + String name = item.getName(); + TopLevelItem old = items.get(name); + if (old ==item) return; // noop + + checkPermission(Item.CREATE); + if (old!=null) + old.delete(); + items.put(name,item); + ItemListener.fireOnCreated(item); + } + + /** + * Creates a new job. + * + *

+ * This version infers the descriptor from the type of the top-level item. + * + * @throws IllegalArgumentException + * if the project of the given name already exists. + */ + public synchronized T createProject( Class type, String name ) throws IOException { + return type.cast(createProject((TopLevelItemDescriptor)getDescriptor(type),name)); + } + + /** + * Called by {@link Job#renameTo(String)} to update relevant data structure. + * assumed to be synchronized on Hudson by the caller. + */ + public void onRenamed(TopLevelItem job, String oldName, String newName) throws IOException { + items.remove(oldName); + items.put(newName,job); + + for (View v : views) + v.onJobRenamed(job, oldName, newName); + save(); + } + + /** + * Called in response to {@link Job#doDoDelete(StaplerRequest, StaplerResponse)} + */ + public void onDeleted(TopLevelItem item) throws IOException { + for (ItemListener l : ItemListener.all()) + l.onDeleted(item); + + items.remove(item.getName()); + for (View v : views) + v.onJobRenamed(item, item.getName(), null); + save(); + } + + public FingerprintMap getFingerprintMap() { + return fingerprintMap; + } + + // if no finger print matches, display ""not found page"". + public Object getFingerprint( String md5sum ) throws IOException { + Fingerprint r = fingerprintMap.get(md5sum); + if(r==null) return new NoFingerprintMatch(md5sum); + else return r; + } + + /** + * Gets a {@link Fingerprint} object if it exists. + * Otherwise null. + */ + public Fingerprint _getFingerprint( String md5sum ) throws IOException { + return fingerprintMap.get(md5sum); + } + + /** + * The file we save our configuration. + */ + private XmlFile getConfigFile() { + return new XmlFile(XSTREAM, new File(root,""config.xml"")); + } + + public int getNumExecutors() { + return numExecutors; + } + + public Mode getMode() { + return mode; + } + + public void setMode(Mode m) throws IOException { + this.mode = m; + save(); + } + + public String getLabelString() { + return fixNull(label).trim(); + } + + @Override + public void setLabelString(String label) throws IOException { + this.label = label; + save(); + } + + @Override + public LabelAtom getSelfLabel() { + return getLabelAtom(""master""); + } + + public Computer createComputer() { + return new Hudson.MasterComputer(); + } + + private synchronized TaskBuilder loadTasks() throws IOException { + File projectsDir = new File(root,""jobs""); + if(!projectsDir.getCanonicalFile().isDirectory() && !projectsDir.mkdirs()) { + if(projectsDir.exists()) + throw new IOException(projectsDir+"" is not a directory""); + throw new IOException(""Unable to create ""+projectsDir+""\nPermission issue? Please create this directory manually.""); + } + File[] subdirs = projectsDir.listFiles(new FileFilter() { + public boolean accept(File child) { + return child.isDirectory() && Items.getConfigFile(child).exists(); + } + }); + + TaskGraphBuilder g = new TaskGraphBuilder(); + Handle loadHudson = g.requires(EXTENSIONS_AUGMENTED).attains(JOB_LOADED).add(""Loading global config"", new Executable() { + public void run(Reactor session) throws Exception { + // JENKINS-8043: some slaves (eg. swarm slaves) are not saved into the config file + // and will get overwritten when reloading. Make a backup copy now, and re-add them later + NodeList oldSlaves = slaves; + + XmlFile cfg = getConfigFile(); + if (cfg.exists()) { + // reset some data that may not exist in the disk file + // so that we can take a proper compensation action later. + primaryView = null; + views.clear(); + + // load from disk + cfg.unmarshal(Jenkins.this); + } + + // if we are loading old data that doesn't have this field + if (slaves == null) slaves = new NodeList(); + + clouds.setOwner(Jenkins.this); + items.clear(); + + // JENKINS-8043: re-add the slaves which were not saved into the config file + // and are now missing, but still connected. + if (oldSlaves != null) { + ArrayList newSlaves = new ArrayList(slaves); + for (Node n: oldSlaves) { + if (n instanceof EphemeralNode) { + if(!newSlaves.contains(n)) { + newSlaves.add(n); + } + } + } + setNodes(newSlaves); + } + } + }); + + for (final File subdir : subdirs) { + g.requires(loadHudson).attains(JOB_LOADED).notFatal().add(""Loading job ""+subdir.getName(),new Executable() { + public void run(Reactor session) throws Exception { + TopLevelItem item = (TopLevelItem) Items.load(Jenkins.this, subdir); + items.put(item.getName(), item); + } + }); + } + + g.requires(JOB_LOADED).add(""Finalizing set up"",new Executable() { + public void run(Reactor session) throws Exception { + rebuildDependencyGraph(); + + {// recompute label objects - populates the labels mapping. + for (Node slave : slaves) + // Note that not all labels are visible until the slaves have connected. + slave.getAssignedLabels(); + getAssignedLabels(); + } + + // initialize views by inserting the default view if necessary + // this is both for clean Hudson and for backward compatibility. + if(views.size()==0 || primaryView==null) { + View v = new AllView(Messages.Hudson_ViewName()); + setViewOwner(v); + views.add(0,v); + primaryView = v.getViewName(); + } + + // read in old data that doesn't have the security field set + if(authorizationStrategy==null) { + if(useSecurity==null || !useSecurity) + authorizationStrategy = AuthorizationStrategy.UNSECURED; + else + authorizationStrategy = new LegacyAuthorizationStrategy(); + } + if(securityRealm==null) { + if(useSecurity==null || !useSecurity) + setSecurityRealm(SecurityRealm.NO_AUTHENTICATION); + else + setSecurityRealm(new LegacySecurityRealm()); + } else { + // force the set to proxy + setSecurityRealm(securityRealm); + } + + if(useSecurity!=null && !useSecurity) { + // forced reset to the unsecure mode. + // this works as an escape hatch for people who locked themselves out. + authorizationStrategy = AuthorizationStrategy.UNSECURED; + setSecurityRealm(SecurityRealm.NO_AUTHENTICATION); + } + + // Initialize the filter with the crumb issuer + setCrumbIssuer(crumbIssuer); + + // auto register root actions + for (Action a : getExtensionList(RootAction.class)) + if (!actions.contains(a)) actions.add(a); + } + }); + + return g; + } + + /** + * Save the settings to a file. + */ + public synchronized void save() throws IOException { + if(BulkChange.contains(this)) return; + getConfigFile().write(this); + SaveableListener.fireOnChange(this, getConfigFile()); + } + + + /** + * Called to shut down the system. + */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(""ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD"") + public void cleanUp() { + for (ItemListener l : ItemListener.all()) + l.onBeforeShutdown(); + + Set> pending = new HashSet>(); + terminating = true; + for( Computer c : computers.values() ) { + c.interrupt(); + killComputer(c); + pending.add(c.disconnect(null)); + } + if(udpBroadcastThread!=null) + udpBroadcastThread.shutdown(); + if(dnsMultiCast!=null) + dnsMultiCast.close(); + interruptReloadThread(); + Timer timer = Trigger.timer; + if (timer != null) { + timer.cancel(); + } + // TODO: how to wait for the completion of the last job? + Trigger.timer = null; + if(tcpSlaveAgentListener!=null) + tcpSlaveAgentListener.shutdown(); + + if(pluginManager!=null) // be defensive. there could be some ugly timing related issues + pluginManager.stop(); + + if(getRootDir().exists()) + // if we are aborting because we failed to create JENKINS_HOME, + // don't try to save. Issue #536 + getQueue().save(); + + threadPoolForLoad.shutdown(); + for (Future f : pending) + try { + f.get(10, TimeUnit.SECONDS); // if clean up operation didn't complete in time, we fail the test + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + break; // someone wants us to die now. quick! + } catch (ExecutionException e) { + LOGGER.log(Level.WARNING, ""Failed to shut down properly"",e); + } catch (TimeoutException e) { + LOGGER.log(Level.WARNING, ""Failed to shut down properly"",e); + } + + LogFactory.releaseAll(); + + theInstance = null; + } + + public Object getDynamic(String token) { + for (Action a : getActions()) { + String url = a.getUrlName(); + if (url==null) continue; + if (url.equals(token) || url.equals('/' + token)) + return a; + } + for (Action a : getManagementLinks()) + if(a.getUrlName().equals(token)) + return a; + return null; + } + + +// +// +// actions +// +// + /** + * Accepts submission from the configuration page. + */ + public synchronized void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException, FormException { + BulkChange bc = new BulkChange(this); + try { + checkPermission(ADMINISTER); + + JSONObject json = req.getSubmittedForm(); + + workspaceDir = json.getString(""rawWorkspaceDir""); + buildsDir = json.getString(""rawBuildsDir""); + + systemMessage = Util.nullify(req.getParameter(""system_message"")); + + jdks.clear(); + jdks.addAll(req.bindJSONToList(JDK.class,json.get(""jdks""))); + + boolean result = true; + for( Descriptor d : Functions.getSortedDescriptorsForGlobalConfig() ) + result &= configureDescriptor(req,json,d); + + version = VERSION; + + save(); + updateComputerList(); + if(result) + FormApply.success(req.getContextPath()+'/').generateResponse(req, rsp, null); + else + FormApply.success(""configure"").generateResponse(req, rsp, null); // back to config + } finally { + bc.commit(); + } + } + + /** + * Gets the {@link CrumbIssuer} currently in use. + * + * @return null if none is in use. + */ + public CrumbIssuer getCrumbIssuer() { + return crumbIssuer; + } + + public void setCrumbIssuer(CrumbIssuer issuer) { + crumbIssuer = issuer; + } + + public synchronized void doTestPost( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { + rsp.sendRedirect(""foo""); + } + + private boolean configureDescriptor(StaplerRequest req, JSONObject json, Descriptor d) throws FormException { + // collapse the structure to remain backward compatible with the JSON structure before 1. + String name = d.getJsonSafeClassName(); + JSONObject js = json.has(name) ? json.getJSONObject(name) : new JSONObject(); // if it doesn't have the property, the method returns invalid null object. + json.putAll(js); + return d.configure(req, js); + } + + /** + * Accepts submission from the node configuration page. + */ + public synchronized void doConfigExecutorsSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException, FormException { + checkPermission(ADMINISTER); + + BulkChange bc = new BulkChange(this); + try { + JSONObject json = req.getSubmittedForm(); + + MasterBuildConfiguration mbc = MasterBuildConfiguration.all().get(MasterBuildConfiguration.class); + if (mbc!=null) + mbc.configure(req,json); + + getNodeProperties().rebuild(req, json.optJSONObject(""nodeProperties""), NodeProperty.all()); + } finally { + bc.commit(); + } + + rsp.sendRedirect(req.getContextPath()+'/'+toComputer().getUrl()); // back to the computer page + } + + /** + * Accepts the new description. + */ + public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { + getPrimaryView().doSubmitDescription(req, rsp); + } + + public synchronized HttpRedirect doQuietDown() throws IOException { + try { + return doQuietDown(false,0); + } catch (InterruptedException e) { + throw new AssertionError(); // impossible + } + } + + @CLIMethod(name=""quiet-down"") + public HttpRedirect doQuietDown( + @Option(name=""-block"",usage=""Block until the system really quiets down and no builds are running"") @QueryParameter boolean block, + @Option(name=""-timeout"",usage=""If non-zero, only block up to the specified number of milliseconds"") @QueryParameter int timeout) throws InterruptedException, IOException { + synchronized (this) { + checkPermission(ADMINISTER); + isQuietingDown = true; + } + if (block) { + if (timeout > 0) timeout += System.currentTimeMillis(); + while (isQuietingDown + && (timeout <= 0 || System.currentTimeMillis() < timeout) + && !RestartListener.isAllReady()) { + Thread.sleep(1000); + } + } + return new HttpRedirect("".""); + } + + @CLIMethod(name=""cancel-quiet-down"") + public synchronized HttpRedirect doCancelQuietDown() { + checkPermission(ADMINISTER); + isQuietingDown = false; + getQueue().scheduleMaintenance(); + return new HttpRedirect("".""); + } + + /** + * Backward compatibility. Redirect to the thread dump. + */ + public void doClassicThreadDump(StaplerResponse rsp) throws IOException, ServletException { + rsp.sendRedirect2(""threadDump""); + } + + /** + * Obtains the thread dump of all slaves (including the master.) + * + *

+ * Since this is for diagnostics, it has a built-in precautionary measure against hang slaves. + */ + public Map> getAllThreadDumps() throws IOException, InterruptedException { + checkPermission(ADMINISTER); + + // issue the requests all at once + Map>> future = new HashMap>>(); + + for (Computer c : getComputers()) { + try { + future.put(c.getName(), RemotingDiagnostics.getThreadDumpAsync(c.getChannel())); + } catch(Exception e) { + LOGGER.info(""Failed to get thread dump for node "" + c.getName() + "": "" + e.getMessage()); + } + } + if (toComputer() == null) { + future.put(""master"", RemotingDiagnostics.getThreadDumpAsync(MasterComputer.localChannel)); + } + + // if the result isn't available in 5 sec, ignore that. + // this is a precaution against hang nodes + long endTime = System.currentTimeMillis() + 5000; + + Map> r = new HashMap>(); + for (Entry>> e : future.entrySet()) { + try { + r.put(e.getKey(), e.getValue().get(endTime-System.currentTimeMillis(), TimeUnit.MILLISECONDS)); + } catch (Exception x) { + StringWriter sw = new StringWriter(); + x.printStackTrace(new PrintWriter(sw,true)); + r.put(e.getKey(), Collections.singletonMap(""Failed to retrieve thread dump"",sw.toString())); + } + } + return r; + } + + public synchronized TopLevelItem doCreateItem( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { + return itemGroupMixIn.createTopLevelItem(req, rsp); + } + + /** + * @since 1.319 + */ + public TopLevelItem createProjectFromXML(String name, InputStream xml) throws IOException { + return itemGroupMixIn.createProjectFromXML(name, xml); + } + + + @SuppressWarnings({""unchecked""}) + public T copy(T src, String name) throws IOException { + return itemGroupMixIn.copy(src, name); + } + + // a little more convenient overloading that assumes the caller gives us the right type + // (or else it will fail with ClassCastException) + public > T copy(T src, String name) throws IOException { + return (T)copy((TopLevelItem)src,name); + } + + public synchronized void doCreateView( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException, FormException { + checkPermission(View.CREATE); + addView(View.create(req,rsp, this)); + } + + /** + * Check if the given name is suitable as a name + * for job, view, etc. + * + * @throws ParseException + * if the given name is not good + */ + public static void checkGoodName(String name) throws Failure { + if(name==null || name.length()==0) + throw new Failure(Messages.Hudson_NoName()); + + for( int i=0; i[]:;"".indexOf(ch)!=-1) + throw new Failure(Messages.Hudson_UnsafeChar(ch)); + } + + // looks good + } + + /** + * Makes sure that the given name is good as a job name. + * @return trimmed name if valid; throws ParseException if not + */ + private String checkJobName(String name) throws Failure { + checkGoodName(name); + name = name.trim(); + projectNamingStrategy.checkName(name); + if(getItem(name)!=null) + throw new Failure(Messages.Hudson_JobAlreadyExists(name)); + // looks good + return name; + } + + private static String toPrintableName(String name) { + StringBuilder printableName = new StringBuilder(); + for( int i=0; i args = new ArrayList(); + while (true) + args.add(new byte[1024*1024]); + } + + private transient final Map duplexChannels = new HashMap(); + + /** + * Handles HTTP requests for duplex channels for CLI. + */ + public void doCli(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, InterruptedException { + if (!""POST"".equals(req.getMethod())) { + // for GET request, serve _cli.jelly, assuming this is a browser + checkPermission(READ); + req.getView(this,""_cli.jelly"").forward(req,rsp); + return; + } + + // do not require any permission to establish a CLI connection + // the actual authentication for the connecting Channel is done by CLICommand + + UUID uuid = UUID.fromString(req.getHeader(""Session"")); + rsp.setHeader(""Hudson-Duplex"",""""); // set the header so that the client would know + + FullDuplexHttpChannel server; + if(req.getHeader(""Side"").equals(""download"")) { + duplexChannels.put(uuid,server=new FullDuplexHttpChannel(uuid, !hasPermission(ADMINISTER)) { + protected void main(Channel channel) throws IOException, InterruptedException { + // capture the identity given by the transport, since this can be useful for SecurityRealm.createCliAuthenticator() + channel.setProperty(CLICommand.TRANSPORT_AUTHENTICATION,getAuthentication()); + channel.setProperty(CliEntryPoint.class.getName(),new CliManagerImpl(channel)); + } + }); + try { + server.download(req,rsp); + } finally { + duplexChannels.remove(uuid); + } + } else { + duplexChannels.get(uuid).upload(req,rsp); + } + } + + /** + * Binds /userContent/... to $JENKINS_HOME/userContent. + */ + public DirectoryBrowserSupport doUserContent() { + return new DirectoryBrowserSupport(this,getRootPath().child(""userContent""),""User content"",""folder.png"",true); + } + + /** + * Perform a restart of Hudson, if we can. + * + * This first replaces ""app"" to {@link HudsonIsRestarting} + */ + @CLIMethod(name=""restart"") + public void doRestart(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, RestartNotSupportedException { + checkPermission(ADMINISTER); + if (req != null && req.getMethod().equals(""GET"")) { + req.getView(this,""_restart.jelly"").forward(req,rsp); + return; + } + + restart(); + + if (rsp != null) // null for CLI + rsp.sendRedirect2("".""); + } + + /** + * Queues up a restart of Hudson for when there are no builds running, if we can. + * + * This first replaces ""app"" to {@link HudsonIsRestarting} + * + * @since 1.332 + */ + @CLIMethod(name=""safe-restart"") + public HttpResponse doSafeRestart(StaplerRequest req) throws IOException, ServletException, RestartNotSupportedException { + checkPermission(ADMINISTER); + if (req != null && req.getMethod().equals(""GET"")) + return HttpResponses.forwardToView(this,""_safeRestart.jelly""); + + safeRestart(); + + return HttpResponses.redirectToDot(); + } + + /** + * Performs a restart. + */ + public void restart() throws RestartNotSupportedException { + final Lifecycle lifecycle = Lifecycle.get(); + lifecycle.verifyRestartable(); // verify that Hudson is restartable + servletContext.setAttribute(""app"", new HudsonIsRestarting()); + + new Thread(""restart thread"") { + final String exitUser = getAuthentication().getName(); + @Override + public void run() { + try { + ACL.impersonate(ACL.SYSTEM); + + // give some time for the browser to load the ""reloading"" page + Thread.sleep(5000); + LOGGER.severe(String.format(""Restarting VM as requested by %s"",exitUser)); + for (RestartListener listener : RestartListener.all()) + listener.onRestart(); + lifecycle.restart(); + } catch (InterruptedException e) { + LOGGER.log(Level.WARNING, ""Failed to restart Hudson"",e); + } catch (IOException e) { + LOGGER.log(Level.WARNING, ""Failed to restart Hudson"",e); + } + } + }.start(); + } + + /** + * Queues up a restart to be performed once there are no builds currently running. + * @since 1.332 + */ + public void safeRestart() throws RestartNotSupportedException { + final Lifecycle lifecycle = Lifecycle.get(); + lifecycle.verifyRestartable(); // verify that Hudson is restartable + // Quiet down so that we won't launch new builds. + isQuietingDown = true; + + new Thread(""safe-restart thread"") { + final String exitUser = getAuthentication().getName(); + @Override + public void run() { + try { + ACL.impersonate(ACL.SYSTEM); + + // Wait 'til we have no active executors. + doQuietDown(true, 0); + + // Make sure isQuietingDown is still true. + if (isQuietingDown) { + servletContext.setAttribute(""app"",new HudsonIsRestarting()); + // give some time for the browser to load the ""reloading"" page + LOGGER.info(""Restart in 10 seconds""); + Thread.sleep(10000); + LOGGER.severe(String.format(""Restarting VM as requested by %s"",exitUser)); + for (RestartListener listener : RestartListener.all()) + listener.onRestart(); + lifecycle.restart(); + } else { + LOGGER.info(""Safe-restart mode cancelled""); + } + } catch (InterruptedException e) { + LOGGER.log(Level.WARNING, ""Failed to restart Hudson"",e); + } catch (IOException e) { + LOGGER.log(Level.WARNING, ""Failed to restart Hudson"",e); + } + } + }.start(); + } + + /** + * Shutdown the system. + * @since 1.161 + */ + @CLIMethod(name=""shutdown"") + public void doExit( StaplerRequest req, StaplerResponse rsp ) throws IOException { + checkPermission(ADMINISTER); + LOGGER.severe(String.format(""Shutting down VM as requested by %s from %s"", + getAuthentication().getName(), req!=null?req.getRemoteAddr():""???"")); + if (rsp!=null) { + rsp.setStatus(HttpServletResponse.SC_OK); + rsp.setContentType(""text/plain""); + PrintWriter w = rsp.getWriter(); + w.println(""Shutting down""); + w.close(); + } + + System.exit(0); + } + + + /** + * Shutdown the system safely. + * @since 1.332 + */ + @CLIMethod(name=""safe-shutdown"") + public HttpResponse doSafeExit(StaplerRequest req) throws IOException { + checkPermission(ADMINISTER); + isQuietingDown = true; + final String exitUser = getAuthentication().getName(); + final String exitAddr = req!=null ? req.getRemoteAddr() : ""unknown""; + new Thread(""safe-exit thread"") { + @Override + public void run() { + try { + ACL.impersonate(ACL.SYSTEM); + LOGGER.severe(String.format(""Shutting down VM as requested by %s from %s"", + exitUser, exitAddr)); + // Wait 'til we have no active executors. + while (isQuietingDown + && (overallLoad.computeTotalExecutors() > overallLoad.computeIdleExecutors())) { + Thread.sleep(5000); + } + // Make sure isQuietingDown is still true. + if (isQuietingDown) { + cleanUp(); + System.exit(0); + } + } catch (InterruptedException e) { + LOGGER.log(Level.WARNING, ""Failed to shutdown Hudson"",e); + } + } + }.start(); + + return HttpResponses.plainText(""Shutting down as soon as all jobs are complete""); + } + + /** + * Gets the {@link Authentication} object that represents the user + * associated with the current request. + */ + public static Authentication getAuthentication() { + Authentication a = SecurityContextHolder.getContext().getAuthentication(); + // on Tomcat while serving the login page, this is null despite the fact + // that we have filters. Looking at the stack trace, Tomcat doesn't seem to + // run the request through filters when this is the login request. + // see http://www.nabble.com/Matrix-authorization-problem-tp14602081p14886312.html + if(a==null) + a = ANONYMOUS; + return a; + } + + /** + * For system diagnostics. + * Run arbitrary Groovy script. + */ + public void doScript(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { + doScript(req, rsp, req.getView(this, ""_script.jelly"")); + } + + /** + * Run arbitrary Groovy script and return result as plain text. + */ + public void doScriptText(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { + doScript(req, rsp, req.getView(this, ""_scriptText.jelly"")); + } + + private void doScript(StaplerRequest req, StaplerResponse rsp, RequestDispatcher view) throws IOException, ServletException { + // ability to run arbitrary script is dangerous + checkPermission(RUN_SCRIPTS); + + String text = req.getParameter(""script""); + if (text != null) { + try { + req.setAttribute(""output"", + RemotingDiagnostics.executeGroovy(text, MasterComputer.localChannel)); + } catch (InterruptedException e) { + throw new ServletException(e); + } + } + + view.forward(req, rsp); + } + + /** + * Evaluates the Jelly script submitted by the client. + * + * This is useful for system administration as well as unit testing. + */ + @RequirePOST + public void doEval(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { + checkPermission(ADMINISTER); + + try { + MetaClass mc = WebApp.getCurrent().getMetaClass(getClass()); + Script script = mc.classLoader.loadTearOff(JellyClassLoaderTearOff.class).createContext().compileScript(new InputSource(req.getReader())); + new JellyRequestDispatcher(this,script).forward(req,rsp); + } catch (JellyException e) { + throw new ServletException(e); + } + } + + /** + * Sign up for the user account. + */ + public void doSignup( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { + req.getView(getSecurityRealm(), ""signup.jelly"").forward(req, rsp); + } + + /** + * Changes the icon size by changing the cookie + */ + public void doIconSize( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { + String qs = req.getQueryString(); + if(qs==null || !ICON_SIZE.matcher(qs).matches()) + throw new ServletException(); + Cookie cookie = new Cookie(""iconSize"", qs); + cookie.setMaxAge(/* ~4 mo. */9999999); // #762 + rsp.addCookie(cookie); + String ref = req.getHeader(""Referer""); + if(ref==null) ref="".""; + rsp.sendRedirect2(ref); + } + + public void doFingerprintCleanup(StaplerResponse rsp) throws IOException { + FingerprintCleanupThread.invoke(); + rsp.setStatus(HttpServletResponse.SC_OK); + rsp.setContentType(""text/plain""); + rsp.getWriter().println(""Invoked""); + } + + public void doWorkspaceCleanup(StaplerResponse rsp) throws IOException { + WorkspaceCleanupThread.invoke(); + rsp.setStatus(HttpServletResponse.SC_OK); + rsp.setContentType(""text/plain""); + rsp.getWriter().println(""Invoked""); + } + + /** + * If the user chose the default JDK, make sure we got 'java' in PATH. + */ + public FormValidation doDefaultJDKCheck(StaplerRequest request, @QueryParameter String value) { + if(!value.equals(""(Default)"")) + // assume the user configured named ones properly in system config --- + // or else system config should have reported form field validation errors. + return FormValidation.ok(); + + // default JDK selected. Does such java really exist? + if(JDK.isDefaultJDKValid(Jenkins.this)) + return FormValidation.ok(); + else + return FormValidation.errorWithMarkup(Messages.Hudson_NoJavaInPath(request.getContextPath())); + } + + /** + * Makes sure that the given name is good as a job name. + */ + public FormValidation doCheckJobName(@QueryParameter String value) { + // this method can be used to check if a file exists anywhere in the file system, + // so it should be protected. + checkPermission(Item.CREATE); + + if(fixEmpty(value)==null) + return FormValidation.ok(); + + try { + checkJobName(value); + return FormValidation.ok(); + } catch (Failure e) { + return FormValidation.error(e.getMessage()); + } + } + + /** + * Checks if a top-level view with the given name exists. + */ + public FormValidation doViewExistsCheck(@QueryParameter String value) { + checkPermission(View.CREATE); + + String view = fixEmpty(value); + if(view==null) return FormValidation.ok(); + + if(getView(view)==null) + return FormValidation.ok(); + else + return FormValidation.error(Messages.Hudson_ViewAlreadyExists(view)); + } + + /** + * Serves static resources placed along with Jelly view files. + *

+ * This method can serve a lot of files, so care needs to be taken + * to make this method secure. It's not clear to me what's the best + * strategy here, though the current implementation is based on + * file extensions. + */ + public void doResources(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { + String path = req.getRestOfPath(); + // cut off the ""..."" portion of /resources/.../path/to/file + // as this is only used to make path unique (which in turn + // allows us to set a long expiration date + path = path.substring(path.indexOf('/',1)+1); + + int idx = path.lastIndexOf('.'); + String extension = path.substring(idx+1); + if(ALLOWED_RESOURCE_EXTENSIONS.contains(extension)) { + URL url = pluginManager.uberClassLoader.getResource(path); + if(url!=null) { + long expires = MetaClass.NO_CACHE ? 0 : 365L * 24 * 60 * 60 * 1000; /*1 year*/ + rsp.serveFile(req,url,expires); + return; + } + } + rsp.sendError(HttpServletResponse.SC_NOT_FOUND); + } + + /** + * Extension list that {@link #doResources(StaplerRequest, StaplerResponse)} can serve. + * This set is mutable to allow plugins to add additional extensions. + */ + public static final Set ALLOWED_RESOURCE_EXTENSIONS = new HashSet(Arrays.asList( + ""js|css|jpeg|jpg|png|gif|html|htm"".split(""\\|"") + )); + + /** + * Checks if container uses UTF-8 to decode URLs. See + * http://wiki.jenkins-ci.org/display/JENKINS/Tomcat#Tomcat-i18n + */ + public FormValidation doCheckURIEncoding(StaplerRequest request) throws IOException { + // expected is non-ASCII String + final String expected = ""\u57f7\u4e8b""; + final String value = fixEmpty(request.getParameter(""value"")); + if (!expected.equals(value)) + return FormValidation.warningWithMarkup(Messages.Hudson_NotUsesUTF8ToDecodeURL()); + return FormValidation.ok(); + } + + /** + * Does not check when system default encoding is ""ISO-8859-1"". + */ + public static boolean isCheckURIEncodingEnabled() { + return !""ISO-8859-1"".equalsIgnoreCase(System.getProperty(""file.encoding"")); + } + + /** + * Rebuilds the dependency map. + */ + public void rebuildDependencyGraph() { + DependencyGraph graph = new DependencyGraph(); + graph.build(); + // volatile acts a as a memory barrier here and therefore guarantees + // that graph is fully build, before it's visible to other threads + dependencyGraph = graph; + } + + public DependencyGraph getDependencyGraph() { + return dependencyGraph; + } + + // for Jelly + public List getManagementLinks() { + return ManagementLink.all(); + } + + /** + * Exposes the current user to /me URL. + */ + public User getMe() { + User u = User.current(); + if (u == null) + throw new AccessDeniedException(""/me is not available when not logged in""); + return u; + } + + /** + * Gets the {@link Widget}s registered on this object. + * + *

+ * Plugins who wish to contribute boxes on the side panel can add widgets + * by {@code getWidgets().add(new MyWidget())} from {@link Plugin#start()}. + */ + public List getWidgets() { + return widgets; + } + + public Object getTarget() { + try { + checkPermission(READ); + } catch (AccessDeniedException e) { + String rest = Stapler.getCurrentRequest().getRestOfPath(); + if(rest.startsWith(""/login"") + || rest.startsWith(""/logout"") + || rest.startsWith(""/accessDenied"") + || rest.startsWith(""/adjuncts/"") + || rest.startsWith(""/signup"") + || rest.startsWith(""/tcpSlaveAgentListener"") + || rest.startsWith(""/cli"") + || rest.startsWith(""/federatedLoginService/"") + || rest.startsWith(""/securityRealm"")) + return this; // URLs that are always visible without READ permission + + for (String name : getUnprotectedRootActions()) { + if (rest.startsWith(""/"" + name + ""/"") || rest.equals(""/"" + name)) { + return this; + } + } + + throw e; + } + return this; + } + + /** + * Gets a list of unprotected root actions. + * These URL prefixes should be exempted from access control checks by container-managed security. + * Ideally would be synchronized with {@link #getTarget}. + * @return a list of {@linkplain Action#getUrlName URL names} + * @since 1.495 + */ + public Collection getUnprotectedRootActions() { + Set names = new TreeSet(); + names.add(""jnlpJars""); // XXX cleaner to refactor doJnlpJars into a URA + // XXX consider caching (expiring cache when actions changes) + for (Action a : getActions()) { + if (a instanceof UnprotectedRootAction) { + names.add(a.getUrlName()); + } + } + return names; + } + + /** + * Fallback to the primary view. + */ + public View getStaplerFallback() { + return getPrimaryView(); + } + + /** + * This method checks all existing jobs to see if displayName is + * unique. It does not check the displayName against the displayName of the + * job that the user is configuring though to prevent a validation warning + * if the user sets the displayName to what it currently is. + * @param displayName + * @param currentJobName + * @return + */ + boolean isDisplayNameUnique(String displayName, String currentJobName) { + Collection itemCollection = items.values(); + + // if there are a lot of projects, we'll have to store their + // display names in a HashSet or something for a quick check + for(TopLevelItem item : itemCollection) { + if(item.getName().equals(currentJobName)) { + // we won't compare the candidate displayName against the current + // item. This is to prevent an validation warning if the user + // sets the displayName to what the existing display name is + continue; + } + else if(displayName.equals(item.getDisplayName())) { + return false; + } + } + + return true; + } + + /** + * True if there is no item in Jenkins that has this name + * @param name The name to test + * @param currentJobName The name of the job that the user is configuring + * @return + */ + boolean isNameUnique(String name, String currentJobName) { + Item item = getItem(name); + + if(null==item) { + // the candidate name didn't return any items so the name is unique + return true; + } + else if(item.getName().equals(currentJobName)) { + // the candidate name returned an item, but the item is the item + // that the user is configuring so this is ok + return true; + } + else { + // the candidate name returned an item, so it is not unique + return false; + } + } + + /** + * Checks to see if the candidate displayName collides with any + * existing display names or project names + * @param displayName The display name to test + * @param jobName The name of the job the user is configuring + * @return + */ + public FormValidation doCheckDisplayName(@QueryParameter String displayName, + @QueryParameter String jobName) { + displayName = displayName.trim(); + + if(LOGGER.isLoggable(Level.FINE)) { + LOGGER.log(Level.FINE, ""Current job name is "" + jobName); + } + + if(!isNameUnique(displayName, jobName)) { + return FormValidation.warning(Messages.Jenkins_CheckDisplayName_NameNotUniqueWarning(displayName)); + } + else if(!isDisplayNameUnique(displayName, jobName)){ + return FormValidation.warning(Messages.Jenkins_CheckDisplayName_DisplayNameNotUniqueWarning(displayName)); + } + else { + return FormValidation.ok(); + } + } + + public static class MasterComputer extends Computer { + protected MasterComputer() { + super(Jenkins.getInstance()); + } + + /** + * Returns """" to match with {@link Jenkins#getNodeName()}. + */ + @Override + public String getName() { + return """"; + } + + @Override + public boolean isConnecting() { + return false; + } + + @Override + public String getDisplayName() { + return Messages.Hudson_Computer_DisplayName(); + } + + @Override + public String getCaption() { + return Messages.Hudson_Computer_Caption(); + } + + @Override + public String getUrl() { + return ""computer/(master)/""; + } + + public RetentionStrategy getRetentionStrategy() { + return RetentionStrategy.NOOP; + } + + /** + * Report an error. + */ + @Override + public HttpResponse doDoDelete() throws IOException { + throw HttpResponses.status(SC_BAD_REQUEST); + } + + @Override + public void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, FormException { + Jenkins.getInstance().doConfigExecutorsSubmit(req, rsp); + } + + @Override + public boolean hasPermission(Permission permission) { + // no one should be allowed to delete the master. + // this hides the ""delete"" link from the /computer/(master) page. + if(permission==Computer.DELETE) + return false; + // Configuration of master node requires ADMINISTER permission + return super.hasPermission(permission==Computer.CONFIGURE ? Jenkins.ADMINISTER : permission); + } + + @Override + public VirtualChannel getChannel() { + return localChannel; + } + + @Override + public Charset getDefaultCharset() { + return Charset.defaultCharset(); + } + + public List getLogRecords() throws IOException, InterruptedException { + return logRecords; + } + + public void doLaunchSlaveAgent(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { + // this computer never returns null from channel, so + // this method shall never be invoked. + rsp.sendError(SC_NOT_FOUND); + } + + protected Future _connect(boolean forceReconnect) { + return Futures.precomputed(null); + } + + /** + * {@link LocalChannel} instance that can be used to execute programs locally. + */ + public static final LocalChannel localChannel = new LocalChannel(threadPoolForRemoting); + } + + /** + * Shortcut for {@code Hudson.getInstance().lookup.get(type)} + */ + public static T lookup(Class type) { + return Jenkins.getInstance().lookup.get(type); + } + + /** + * Live view of recent {@link LogRecord}s produced by Hudson. + */ + public static List logRecords = Collections.emptyList(); // initialized to dummy value to avoid NPE + + /** + * Thread-safe reusable {@link XStream}. + */ + public static final XStream XSTREAM = new XStream2(); + + /** + * Alias to {@link #XSTREAM} so that one can access additional methods on {@link XStream2} more easily. + */ + public static final XStream2 XSTREAM2 = (XStream2)XSTREAM; + + private static final int TWICE_CPU_NUM = Math.max(4, Runtime.getRuntime().availableProcessors() * 2); + + /** + * Thread pool used to load configuration in parallel, to improve the start up time. + *

+ * The idea here is to overlap the CPU and I/O, so we want more threads than CPU numbers. + */ + /*package*/ transient final ExecutorService threadPoolForLoad = new ThreadPoolExecutor( + TWICE_CPU_NUM, TWICE_CPU_NUM, + 5L, TimeUnit.SECONDS, new LinkedBlockingQueue(), new DaemonThreadFactory()); + + + private static void computeVersion(ServletContext context) { + // set the version + Properties props = new Properties(); + try { + InputStream is = Jenkins.class.getResourceAsStream(""jenkins-version.properties""); + if(is!=null) + props.load(is); + } catch (IOException e) { + e.printStackTrace(); // if the version properties is missing, that's OK. + } + String ver = props.getProperty(""version""); + if(ver==null) ver=""?""; + VERSION = ver; + context.setAttribute(""version"",ver); + + VERSION_HASH = Util.getDigestOf(ver).substring(0, 8); + SESSION_HASH = Util.getDigestOf(ver+System.currentTimeMillis()).substring(0, 8); + + if(ver.equals(""?"") || Boolean.getBoolean(""hudson.script.noCache"")) + RESOURCE_PATH = """"; + else + RESOURCE_PATH = ""/static/""+SESSION_HASH; + + VIEW_RESOURCE_PATH = ""/resources/""+ SESSION_HASH; + } + + /** + * Version number of this Hudson. + */ + public static String VERSION=""?""; + + /** + * Parses {@link #VERSION} into {@link VersionNumber}, or null if it's not parseable as a version number + * (such as when Hudson is run with ""mvn hudson-dev:run"") + */ + public static VersionNumber getVersion() { + try { + return new VersionNumber(VERSION); + } catch (NumberFormatException e) { + try { + // for non-released version of Hudson, this looks like ""1.345 (private-foobar), so try to approximate. + int idx = VERSION.indexOf(' '); + if (idx>0) + return new VersionNumber(VERSION.substring(0,idx)); + } catch (NumberFormatException _) { + // fall through + } + + // totally unparseable + return null; + } catch (IllegalArgumentException e) { + // totally unparseable + return null; + } + } + + /** + * Hash of {@link #VERSION}. + */ + public static String VERSION_HASH; + + /** + * Unique random token that identifies the current session. + * Used to make {@link #RESOURCE_PATH} unique so that we can set long ""Expires"" header. + * + * We used to use {@link #VERSION_HASH}, but making this session local allows us to + * reuse the same {@link #RESOURCE_PATH} for static resources in plugins. + */ + public static String SESSION_HASH; + + /** + * Prefix to static resources like images and javascripts in the war file. + * Either """" or strings like ""/static/VERSION"", which avoids Hudson to pick up + * stale cache when the user upgrades to a different version. + *

+ * Value computed in {@link WebAppMain}. + */ + public static String RESOURCE_PATH = """"; + + /** + * Prefix to resources alongside view scripts. + * Strings like ""/resources/VERSION"", which avoids Hudson to pick up + * stale cache when the user upgrades to a different version. + *

+ * Value computed in {@link WebAppMain}. + */ + public static String VIEW_RESOURCE_PATH = ""/resources/TBD""; + + public static boolean PARALLEL_LOAD = Configuration.getBooleanConfigParameter(""parallelLoad"", true); + public static boolean KILL_AFTER_LOAD = Configuration.getBooleanConfigParameter(""killAfterLoad"", false); + /** + * Enabled by default as of 1.337. Will keep it for a while just in case we have some serious problems. + */ + public static boolean FLYWEIGHT_SUPPORT = Configuration.getBooleanConfigParameter(""flyweightSupport"", true); + + /** + * Tentative switch to activate the concurrent build behavior. + * When we merge this back to the trunk, this allows us to keep + * this feature hidden for a while until we iron out the kinks. + * @see AbstractProject#isConcurrentBuild() + * @deprecated as of 1.464 + * This flag will have no effect. + */ + @Restricted(NoExternalUse.class) + public static boolean CONCURRENT_BUILD = true; + + /** + * Switch to enable people to use a shorter workspace name. + */ + private static final String WORKSPACE_DIRNAME = Configuration.getStringConfigParameter(""workspaceDirName"", ""workspace""); + + /** + * Automatically try to launch a slave when Jenkins is initialized or a new slave is created. + */ + public static boolean AUTOMATIC_SLAVE_LAUNCH = true; + + private static final Logger LOGGER = Logger.getLogger(Jenkins.class.getName()); + + private static final Pattern ICON_SIZE = Pattern.compile(""\\d+x\\d+""); + + public static final PermissionGroup PERMISSIONS = Permission.HUDSON_PERMISSIONS; + public static final Permission ADMINISTER = Permission.HUDSON_ADMINISTER; + public static final Permission READ = new Permission(PERMISSIONS,""Read"",Messages._Hudson_ReadPermission_Description(),Permission.READ,PermissionScope.JENKINS); + public static final Permission RUN_SCRIPTS = new Permission(PERMISSIONS, ""RunScripts"", Messages._Hudson_RunScriptsPermission_Description(),ADMINISTER,PermissionScope.JENKINS); + + /** + * {@link Authentication} object that represents the anonymous user. + * Because Acegi creates its own {@link AnonymousAuthenticationToken} instances, the code must not + * expect the singleton semantics. This is just a convenient instance. + * + * @since 1.343 + */ + public static final Authentication ANONYMOUS = new AnonymousAuthenticationToken( + ""anonymous"",""anonymous"",new GrantedAuthority[]{new GrantedAuthorityImpl(""anonymous"")}); + + static { + XSTREAM.alias(""jenkins"",Jenkins.class); + XSTREAM.alias(""slave"", DumbSlave.class); + XSTREAM.alias(""jdk"",JDK.class); + // for backward compatibility with <1.75, recognize the tag name ""view"" as well. + XSTREAM.alias(""view"", ListView.class); + XSTREAM.alias(""listView"", ListView.class); + // this seems to be necessary to force registration of converter early enough + Mode.class.getEnumConstants(); + + // double check that initialization order didn't do any harm + assert PERMISSIONS!=null; + assert ADMINISTER!=null; + } + +} +" +287,1," Object getValue(Object parent); + + /** + * Returns the name of this cascadable element. + * + * @return The name of this cascadable element. + */ +" +288,1," public String getInfo() { + + return (info); + + } + + + +" +289,1," protected boolean addInputFilter(InputFilter[] inputFilters, + String encodingName) { + if (encodingName.equals(""identity"")) { + // Skip + } else if (encodingName.equals(""chunked"")) { + inputBuffer.addActiveFilter + (inputFilters[Constants.CHUNKED_FILTER]); + contentDelimitation = true; + } else { + for (int i = 2; i < inputFilters.length; i++) { + if (inputFilters[i].getEncodingName() + .toString().equals(encodingName)) { + inputBuffer.addActiveFilter(inputFilters[i]); + return true; + } + } + return false; + } + return true; + } + + + /** + * Specialized utility method: find a sequence of lower case bytes inside + * a ByteChunk. + */ +" +290,1," public void changePassword_Resets_Session() throws Exception { + ScimUser user = createUser(); + + MockHttpSession session = new MockHttpSession(); + MockHttpSession afterLoginSession = (MockHttpSession) getMockMvc().perform(post(""/login.do"") + .session(session) + .accept(TEXT_HTML_VALUE) + .param(""username"", user.getUserName()) + .param(""password"", ""secr3T"")) + .andExpect(status().isFound()) + .andExpect(redirectedUrl(""/"")) + .andReturn().getRequest().getSession(false); + + assertTrue(session.isInvalid()); + assertNotNull(afterLoginSession); + assertNotNull(afterLoginSession.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY)); + + MockHttpSession afterPasswordChange = (MockHttpSession) getMockMvc().perform(post(""/change_password.do"") + .session(afterLoginSession) + .with(csrf()) + .accept(TEXT_HTML_VALUE) + .param(""current_password"", ""secr3T"") + .param(""new_password"", ""secr3T1"") + .param(""confirm_password"", ""secr3T1"")) + .andExpect(status().isFound()) + .andExpect(redirectedUrl(""profile"")) + .andReturn().getRequest().getSession(false); + + assertTrue(afterLoginSession.isInvalid()); + assertNotNull(afterPasswordChange); + assertNotNull(afterPasswordChange.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY)); + assertNotSame(afterLoginSession, afterPasswordChange); + + } + +" +291,1," public String changePassword( + Model model, + @RequestParam(""current_password"") String currentPassword, + @RequestParam(""new_password"") String newPassword, + @RequestParam(""confirm_password"") String confirmPassword, + HttpServletResponse response, + HttpServletRequest request) { + + PasswordConfirmationValidation validation = new PasswordConfirmationValidation(newPassword, confirmPassword); + if (!validation.valid()) { + model.addAttribute(""message_code"", validation.getMessageCode()); + response.setStatus(HttpStatus.UNPROCESSABLE_ENTITY.value()); + return ""change_password""; + } + + SecurityContext securityContext = SecurityContextHolder.getContext(); + Authentication authentication = securityContext.getAuthentication(); + String username = authentication.getName(); + + try { + changePasswordService.changePassword(username, currentPassword, newPassword); + request.getSession().invalidate(); + request.getSession(true); + securityContext.setAuthentication(authentication); + return ""redirect:profile""; + } catch (BadCredentialsException e) { + model.addAttribute(""message_code"", ""unauthorized""); + } catch (InvalidPasswordException e) { + model.addAttribute(""message"", e.getMessagesAsOneString()); + } + response.setStatus(HttpStatus.UNPROCESSABLE_ENTITY.value()); + return ""change_password""; + } +" +292,1," private XPathEvaluator createEvaluator(String xpath2) { + try { + return (XPathEvaluator)EVALUATOR_CONSTRUCTOR.newInstance(new Object[] {xpath}); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof RuntimeException) { + throw (RuntimeException)cause; + } + throw new RuntimeException(""Invalid XPath Expression: "" + xpath + "" reason: "" + e.getMessage(), e); + } catch (Throwable e) { + throw new RuntimeException(""Invalid XPath Expression: "" + xpath + "" reason: "" + e.getMessage(), e); + } + } + +" +293,1," protected RequestEntity createRequestEntity(Exchange exchange) throws CamelExchangeException { + Message in = exchange.getIn(); + if (in.getBody() == null) { + return null; + } + + RequestEntity answer = in.getBody(RequestEntity.class); + if (answer == null) { + try { + Object data = in.getBody(); + if (data != null) { + String contentType = ExchangeHelper.getContentType(exchange); + + if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) { + // serialized java object + Serializable obj = in.getMandatoryBody(Serializable.class); + // write object to output stream + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + HttpHelper.writeObjectToStream(bos, obj); + answer = new ByteArrayRequestEntity(bos.toByteArray(), HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); + IOHelper.close(bos); + } else if (data instanceof File || data instanceof GenericFile) { + // file based (could potentially also be a FTP file etc) + File file = in.getBody(File.class); + if (file != null) { + answer = new FileRequestEntity(file, contentType); + } + } else if (data instanceof String) { + // be a bit careful with String as any type can most likely be converted to String + // so we only do an instanceof check and accept String if the body is really a String + // do not fallback to use the default charset as it can influence the request + // (for example application/x-www-form-urlencoded forms being sent) + String charset = IOHelper.getCharsetName(exchange, false); + answer = new StringRequestEntity((String) data, contentType, charset); + } + // fallback as input stream + if (answer == null) { + // force the body as an input stream since this is the fallback + InputStream is = in.getMandatoryBody(InputStream.class); + answer = new InputStreamRequestEntity(is, contentType); + } + } + } catch (UnsupportedEncodingException e) { + throw new CamelExchangeException(""Error creating RequestEntity from message body"", exchange, e); + } catch (IOException e) { + throw new CamelExchangeException(""Error serializing message body"", exchange, e); + } + } + return answer; + } + +" +294,1," public void execute(String key, ActionMapping mapping) { + String location = key.substring(REDIRECT_ACTION_PREFIX + .length()); + ServletRedirectResult redirect = new ServletRedirectResult(); + container.inject(redirect); + String extension = getDefaultExtension(); + if (extension != null && extension.length() > 0) { + location += ""."" + extension; + } + redirect.setLocation(location); + mapping.setResult(redirect); + } + }); + } + }; + } + + /** + * Adds a parameter action. Should only be called during initialization + * + * @param prefix The string prefix to trigger the action + * @param parameterAction The parameter action to execute + * @since 2.1.0 + */ +" +295,1," private void findConstructor() { + try { + iConstructor = iClassToInstantiate.getConstructor(iParamTypes); + } catch (final NoSuchMethodException ex) { + throw new IllegalArgumentException(""InstantiateFactory: The constructor must exist and be public ""); + } + } + + /** + * Creates an object using the stored constructor. + * + * @return the new object + */ +" +296,1," private void checkParams() + throws Exception + { + if (vi == null) + { + throw new Exception(""no layers defined.""); + } + if (vi.length > 1) + { + for (int i = 0; i < vi.length - 1; i++) + { + if (vi[i] >= vi[i + 1]) + { + throw new Exception( + ""v[i] has to be smaller than v[i+1]""); + } + } + } + else + { + throw new Exception( + ""Rainbow needs at least 1 layer, such that v1 < v2.""); + } + } + + /** + * Getter for the number of layers + * + * @return the number of layers + */ +" +297,1," public boolean isTransferException() { + return transferException; + } + + /** + * If enabled and an Exchange failed processing on the consumer side, and if the caused Exception was send back serialized + * in the response as a application/x-java-serialized-object content type (for example using Jetty or Servlet Camel components). + * On the producer side the exception will be deserialized and thrown as is, instead of the AhcOperationFailedException. + * The caused exception is required to be serialized. + */ +" +298,1," public void setMethods(Set methods) { + this.methods = new HashSet(); + for (String method : methods) { + this.methods.add(method.toUpperCase()); + } + } + + /** + * @param authenticationEntryPoint the authenticationEntryPoint to set + */ +" +299,1," private EnumSet getValidatedExecutableTypes(DefaultValidatedExecutableTypesType validatedExecutables) { + if ( validatedExecutables == null ) { + return null; + } + + EnumSet executableTypes = EnumSet.noneOf( ExecutableType.class ); + executableTypes.addAll( validatedExecutables.getExecutableType() ); + + return executableTypes; + } +" +300,1," public boolean getValidateClientProvidedNewSessionId(); +" +301,1," private File mkdirsE(File dir) throws IOException { + if (dir.exists()) { + return dir; + } + filterNonNull().mkdirs(dir); + return IOUtils.mkdirs(dir); + } + +" +302,1," public static boolean isExpression(Object value) { + String expr = value.toString(); + return expr.startsWith(""%{"") && expr.endsWith(""}""); + } + +" +303,1," protected final GF2Polynomial[] invertMatrix(GF2Polynomial[] matrix) + { + GF2Polynomial[] a = new GF2Polynomial[matrix.length]; + GF2Polynomial[] inv = new GF2Polynomial[matrix.length]; + GF2Polynomial dummy; + int i, j; + // initialize a as a copy of matrix and inv as E(inheitsmatrix) + for (i = 0; i < mDegree; i++) + { + try + { + a[i] = new GF2Polynomial(matrix[i]); + inv[i] = new GF2Polynomial(mDegree); + inv[i].setBit(mDegree - 1 - i); + } + catch (RuntimeException BDNEExc) + { + BDNEExc.printStackTrace(); + } + } + // construct triangle matrix so that for each a[i] the first i bits are + // zero + for (i = 0; i < mDegree - 1; i++) + { + // find column where bit i is set + j = i; + while ((j < mDegree) && !a[j].testBit(mDegree - 1 - i)) + { + j++; + } + if (j >= mDegree) + { + throw new RuntimeException( + ""GF2nField.invertMatrix: Matrix cannot be inverted!""); + } + if (i != j) + { // swap a[i]/a[j] and inv[i]/inv[j] + dummy = a[i]; + a[i] = a[j]; + a[j] = dummy; + dummy = inv[i]; + inv[i] = inv[j]; + inv[j] = dummy; + } + for (j = i + 1; j < mDegree; j++) + { // add column i to all columns>i + // having their i-th bit set + if (a[j].testBit(mDegree - 1 - i)) + { + a[j].addToThis(a[i]); + inv[j].addToThis(inv[i]); + } + } + } + // construct Einheitsmatrix from a + for (i = mDegree - 1; i > 0; i--) + { + for (j = i - 1; j >= 0; j--) + { // eliminate the i-th bit in all + // columns < i + if (a[j].testBit(mDegree - 1 - i)) + { + a[j].addToThis(a[i]); + inv[j].addToThis(inv[i]); + } + } + } + return inv; + } + + /** + * Converts the given element in representation according to this field to a + * new element in representation according to B1 using the change-of-basis + * matrix calculated by computeCOBMatrix. + * + * @param elem the GF2nElement to convert + * @param basis the basis to convert elem to + * @return elem converted to a new element representation + * according to basis + * @see GF2nField#computeCOBMatrix + * @see GF2nField#getRandomRoot + * @see GF2nPolynomial + * @see ""P1363 A.7 p109ff"" + */ +" +304,1," public JDBCTableReader getTableReader(Connection connection, String tableName, ParseContext context) { + return new SQLite3TableReader(connection, tableName, context); + } +" +305,1," public Source getAssociatedStylesheet( + Source source, String media, String title, String charset) + throws TransformerConfigurationException + { + + String baseID; + InputSource isource = null; + Node node = null; + XMLReader reader = null; + + if (source instanceof DOMSource) + { + DOMSource dsource = (DOMSource) source; + + node = dsource.getNode(); + baseID = dsource.getSystemId(); + } + else + { + isource = SAXSource.sourceToInputSource(source); + baseID = isource.getSystemId(); + } + + // What I try to do here is parse until the first startElement + // is found, then throw a special exception in order to terminate + // the parse. + StylesheetPIHandler handler = new StylesheetPIHandler(baseID, media, + title, charset); + + // Use URIResolver. Patch from Dmitri Ilyin + if (m_uriResolver != null) + { + handler.setURIResolver(m_uriResolver); + } + + try + { + if (null != node) + { + TreeWalker walker = new TreeWalker(handler, new org.apache.xml.utils.DOM2Helper(), baseID); + + walker.traverse(node); + } + else + { + + // Use JAXP1.1 ( if possible ) + try + { + javax.xml.parsers.SAXParserFactory factory = + javax.xml.parsers.SAXParserFactory.newInstance(); + + factory.setNamespaceAware(true); + + if (m_isSecureProcessing) + { + try + { + factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); + } + catch (org.xml.sax.SAXException e) {} + } + + javax.xml.parsers.SAXParser jaxpParser = factory.newSAXParser(); + + reader = jaxpParser.getXMLReader(); + } + catch (javax.xml.parsers.ParserConfigurationException ex) + { + throw new org.xml.sax.SAXException(ex); + } + catch (javax.xml.parsers.FactoryConfigurationError ex1) + { + throw new org.xml.sax.SAXException(ex1.toString()); + } + catch (NoSuchMethodError ex2){} + catch (AbstractMethodError ame){} + + if (null == reader) + { + reader = XMLReaderFactory.createXMLReader(); + } + + // Need to set options! + reader.setContentHandler(handler); + reader.parse(isource); + } + } + catch (StopParseException spe) + { + + // OK, good. + } + catch (org.xml.sax.SAXException se) + { + throw new TransformerConfigurationException( + ""getAssociatedStylesheets failed"", se); + } + catch (IOException ioe) + { + throw new TransformerConfigurationException( + ""getAssociatedStylesheets failed"", ioe); + } + + return handler.getAssociatedStylesheet(); + } + + /** + * Create a new Transformer object that performs a copy + * of the source to the result. + * + * @return A Transformer object that may be used to perform a transformation + * in a single thread, never null. + * + * @throws TransformerConfigurationException May throw this during + * the parse when it is constructing the + * Templates object and fails. + */ +" +306,1," public Calendar ceil(Calendar cal) { + Calendar twoYearsFuture = (Calendar) cal.clone(); + twoYearsFuture.add(Calendar.YEAR, 2); + OUTER: + while (true) { + if (cal.compareTo(twoYearsFuture) > 0) { + // we went too far into the future + throw new RareOrImpossibleDateException(); + } + for (CalendarField f : CalendarField.ADJUST_ORDER) { + int cur = f.valueOf(cal); + int next = f.ceil(this,cur); + if (cur==next) continue; // this field is already in a good shape. move on to next + + // we are modifying this field, so clear all the lower level fields + for (CalendarField l=f.lowerField; l!=null; l=l.lowerField) + l.clear(cal); + + if (next<0) { + // we need to roll over to the next field. + f.rollUp(cal, 1); + f.setTo(cal,f.first(this)); + // since higher order field is affected by this, we need to restart from all over + continue OUTER; + } else { + f.setTo(cal,next); + if (f.redoAdjustmentIfModified) + continue OUTER; // when we modify DAY_OF_MONTH and DAY_OF_WEEK, do it all over from the top + } + } + return cal; // all fields adjusted + } + } + + /** + * Computes the nearest past timestamp that matched this cron tab. + *

+ * More precisely, given the time 't', computes another smallest time x such that: + * + *

    + *
  • x <= t (inclusive) + *
  • x matches this crontab + *
+ * + *

+ * Note that if t already matches this cron, it's returned as is. + */ +" +307,1," public void run() { + try { + ObjectInputStream in = new ObjectInputStream(socket.getInputStream()); + while (!closed) { + Message msg = (Message) in.readObject(); + handle(msg); + } + } catch (EOFException eof) { + // Remote side has closed the connection, just cleanup. + try { + close(); + } catch (Exception unused) { + // no-op. + } + } catch (Exception e) { + if (!closed) { + LOG.log(Level.WARNING, ""Error in inbound message handling."", e); + try { + close(); + } catch (Exception unused) { + // no-op. + } + } + } + } + +" +308,1," public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) { + this.allowJavaSerializedObject = allowJavaSerializedObject; + } +" +309,1," private void updateGraph(ActionRequest actionRequest, + ActionResponse actionResponse) { + DBManager DBase = new DBManager(); + Connection con = DBase.getConnection(); + String graph_id = actionRequest.getParameter(""graph_id""); + actionResponse.setRenderParameter(""graph_id"", graph_id); + + String name = actionRequest.getParameter(""name""); + String description = actionRequest.getParameter(""description""); + String server_id = actionRequest.getParameter(""server_id""); + String xlabel = actionRequest.getParameter(""xlabel""); + String ylabel = actionRequest.getParameter(""ylabel""); + String timeframe = actionRequest.getParameter(""timeframe""); + String mbean = actionRequest.getParameter(""mbean""); + String dataname1 = actionRequest.getParameter(""dataname1""); + String data1operation = actionRequest.getParameter(""data1operation""); + String operation = actionRequest.getParameter(""operation""); + int archive = 0; + if (actionRequest.getParameter(""showArchive"") != null + && actionRequest.getParameter(""showArchive"").equals(""on"")) { + archive = 1; + } + + if (operation.equals(""other"")) { + operation = actionRequest.getParameter(""othermath""); + } + String dataname2 = actionRequest.getParameter(""dataname2""); + String data2operation = actionRequest.getParameter(""data2operation""); + if (data2operation == null) + data2operation = ""A""; + try { + PreparedStatement pStmt = con + .prepareStatement(""UPDATE graphs SET server_id="" + + server_id + + "", name='"" + + name + + ""', description='"" + + description + + ""', timeframe="" + + timeframe + + "", mbean='"" + + mbean + + ""', dataname1='"" + + dataname1 + + ""', xlabel='"" + + xlabel + + ""', ylabel='"" + + ylabel + + ""', data1operation='"" + + data1operation + + ""', operation='"" + + operation + + ""', data2operation='"" + + data2operation + + ""', dataname2='"" + + dataname2 + + ""', warninglevel1=0, warninglevel2=0, modified=CURRENT_TIMESTAMP, archive="" + + archive + "" WHERE graph_id="" + graph_id); + pStmt.executeUpdate(); + con.close(); + actionResponse.setRenderParameter(""message"", + ""Graph "" + name + + "" has been updated.""); + return; + + } catch (Exception e) { + actionResponse.setRenderParameter(""message"", + ""Error editing graph "" + + e.getMessage()); + return; + } + } + +" +310,1," protected void onSuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, Authentication authResult) throws IOException { + super.onSuccessfulAuthentication(request,response,authResult); + // make sure we have a session to store this successful authentication, given that we no longer + // let HttpSessionContextIntegrationFilter2 to create sessions. + // HttpSessionContextIntegrationFilter stores the updated SecurityContext object into this session later + // (either when a redirect is issued, via its HttpResponseWrapper, or when the execution returns to its + // doFilter method. + request.getSession(); + } + + /** + * Leave the information about login failure. + * + *

+ * Otherwise it seems like Acegi doesn't really leave the detail of the failure anywhere. + */ + @Override +" +311,1," private Cache verifyCacheExists() { + int timeToWait = 0; + Cache cache = null; + while (timeToWait < TIME_TO_WAIT_FOR_CACHE) { + try { + cache = CacheFactory.getAnyInstance(); + break; + } catch (Exception ignore) { + // keep trying and hope for the best + } + try { + Thread.sleep(250); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + break; + } + timeToWait += 250; + } + + if (cache == null) { + cache = new CacheFactory().create(); + } + + return cache; + } + +" +312,1," public void clientCredentials_byDefault_WillNotLockoutDuringFailedBasicAuthAndFormData() throws Exception { + String clientId = ""testclient"" + generator.generate(); + String scopes = ""space.*.developer,space.*.admin,org.*.reader,org.123*.admin,*.*,*""; + setUpClients(clientId, scopes, scopes, GRANT_TYPES, true); + + String body = null; + for(int i = 0; i < 3; i++){ + body = getMockMvc().perform(post(""/oauth/token"") + .accept(MediaType.APPLICATION_JSON_VALUE) + .header(""Authorization"", ""Basic "" + new String(Base64.encode((clientId + "":"" + BADSECRET).getBytes()))) + .param(""grant_type"", ""client_credentials"") + ) + .andExpect(status().isUnauthorized()) + .andReturn().getResponse().getContentAsString(); + + body = getMockMvc().perform(post(""/oauth/token"") + .accept(MediaType.APPLICATION_JSON_VALUE) + .contentType(MediaType.APPLICATION_FORM_URLENCODED_VALUE) + .param(""grant_type"", ""client_credentials"") + .param(""client_id"", clientId) + .param(""client_secret"", BADSECRET) + ) + .andExpect(status().isUnauthorized()) + .andReturn().getResponse().getContentAsString(); + + } + + body = getMockMvc().perform(post(""/oauth/token"") + .accept(MediaType.APPLICATION_JSON_VALUE) + .header(""Authorization"", ""Basic "" + new String(Base64.encode((clientId + "":"" + SECRET).getBytes()))) + .param(""grant_type"", ""client_credentials"") + ) + .andExpect(status().isOk()) + .andReturn().getResponse().getContentAsString(); + } + + @Test +" +313,1," public String toStringInternal() { + String strValue=null; + try { + if( enc==null ) enc=DEFAULT_CHARACTER_ENCODING; + strValue = new String( buff, start, end-start, enc ); + /* + Does not improve the speed too much on most systems, + it's safer to use the ""clasical"" new String(). + + Most overhead is in creating char[] and copying, + the internal implementation of new String() is very close to + what we do. The decoder is nice for large buffers and if + we don't go to String ( so we can take advantage of reduced GC) + + // Method is commented out, in: + return B2CConverter.decodeString( enc ); + */ + } catch (java.io.UnsupportedEncodingException e) { + // Use the platform encoding in that case; the usage of a bad + // encoding will have been logged elsewhere already + strValue = new String(buff, start, end-start); + } + return strValue; + } + +" +314,1," public void repositoryVerificationTimeoutTest() throws Exception { + Client client = client(); + + Settings settings = ImmutableSettings.settingsBuilder() + .put(""location"", newTempDir(LifecycleScope.SUITE)) + .put(""random_control_io_exception_rate"", 1.0).build(); + logger.info(""--> creating repository that cannot write any files - should fail""); + assertThrows(client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings), + RepositoryVerificationException.class); + + logger.info(""--> creating repository that cannot write any files, but suppress verification - should be acked""); + assertAcked(client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings).setVerify(false)); + + logger.info(""--> verifying repository""); + assertThrows(client.admin().cluster().prepareVerifyRepository(""test-repo-1""), RepositoryVerificationException.class); + + File location = newTempDir(LifecycleScope.SUITE); + + logger.info(""--> creating repository""); + try { + client.admin().cluster().preparePutRepository(""test-repo-1"") + .setType(MockRepositoryModule.class.getCanonicalName()) + .setSettings(ImmutableSettings.settingsBuilder() + .put(""location"", location) + .put(""localize_location"", true) + ).get(); + fail(""RepositoryVerificationException wasn't generated""); + } catch (RepositoryVerificationException ex) { + assertThat(ex.getMessage(), containsString(""is not shared"")); + } + } + +" +315,1," public void execute(String key, ActionMapping mapping) { + String location = key.substring(REDIRECT_ACTION_PREFIX + .length()); + ServletRedirectResult redirect = new ServletRedirectResult(); + container.inject(redirect); + String extension = getDefaultExtension(); + if (extension != null && extension.length() > 0) { + location += ""."" + extension; + } + redirect.setLocation(location); + mapping.setResult(redirect); + } + }); + } + }; + } + + /** + * Adds a parameter action. Should only be called during initialization + * + * @param prefix The string prefix to trigger the action + * @param parameterAction The parameter action to execute + * @since 2.1.0 + */ +" +316,1," public void noUserSearchCausesUsernameNotFound() throws Exception { + DirContext ctx = mock(DirContext.class); + when(ctx.getNameInNamespace()).thenReturn(""""); + when(ctx.search(any(Name.class), any(String.class), any(Object[].class), any(SearchControls.class))) + .thenReturn(new EmptyEnumeration()); + + provider.contextFactory = createContextFactoryReturning(ctx); + + provider.authenticate(joe); + } + + @SuppressWarnings(""unchecked"") + @Test(expected = IncorrectResultSizeDataAccessException.class) +" +317,1," protected void addEmptyValueMapping(DefaultMapper mapper, String field, Object model) { + ParserContext parserContext = new FluentParserContext().evaluate(model.getClass()); + Expression target = expressionParser.parseExpression(field, parserContext); + try { + Class propertyType = target.getValueType(model); + Expression source = new StaticExpression(getEmptyValue(propertyType)); + DefaultMapping mapping = new DefaultMapping(source, target); + if (logger.isDebugEnabled()) { + logger.debug(""Adding empty value mapping for parameter '"" + field + ""'""); + } + mapper.addMapping(mapping); + } catch (EvaluationException e) { + } + } + + /** + * Adds a {@link DefaultMapping} between the given request parameter name and a matching model field. + * + * @param mapper the mapper to add the mapping to + * @param parameter the request parameter name + * @param model the model + */ +" +318,1," private boolean evaluate(String text) { + try { + InputSource inputSource = new InputSource(new StringReader(text)); + return ((Boolean)expression.evaluate(inputSource, XPathConstants.BOOLEAN)).booleanValue(); + } catch (XPathExpressionException e) { + return false; + } + } + + @Override +"