gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.planet_ink.coffee_mud.WebMacros; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class PlayerData extends StdWebMacro { public String name() {return this.getClass().getName().substring(this.getClass().getName().lastIndexOf('.')+1);} public final static String[] BASICS={ "NAME", "DESCRIPTION", "LASTDATETIME", "EMAIL", "RACENAME", "CHARCLASS", "LEVEL", "LEVELSTR", "CLASSLEVEL", "CLASSES", "MAXCARRY", "ATTACKNAME", "ARMORNAME", "DAMAGENAME", "HOURS", "PRACTICES", "EXPERIENCE", "EXPERIENCELEVEL", "TRAINS", "MONEY", "DEITYNAME", "LIEGE", "CLANNAME", "CLANROLE", "ALIGNMENTNAME", "ALIGNMENTSTRING", "WIMP", "STARTROOM", "LOCATION", "STARTROOMID", "LOCATIONID", "INVENTORY", "WEIGHT", "ENCUMBRANCE", "GENDERNAME", "LASTDATETIMEMILLIS", "HITPOINTS", "MANA", "MOVEMENT", "RIDING", "HEIGHT", "LASTIP", "QUESTPOINTS", "BASEHITPOINTS", "BASEMANA", "BASEMOVEMENT", "IMAGE", "MAXITEMS", "IMGURL", "HASIMG", "NOTES", "LEVELS", "ATTACK", "DAMAGE", "ARMOR", "SPEEDNAME", "SPEED", "EXPERTISE", "TATTOOS", "SECURITY", "TITLES", "FACTIONNAMES", "ACCTEXPUSED", "ACCTEXP", "FOLLOWERNAMES", "ACCOUNT" }; public static int getBasicCode(String val) { for(int i=0;i<BASICS.length;i++) if(val.equalsIgnoreCase(BASICS[i])) return i; return -1; } public static String getBasic(MOB M, int i) { StringBuffer str=new StringBuffer(""); switch(i) { case 0: str.append(M.Name()+", "); break; case 1: str.append(M.description()+", "); break; case 2: if(M.playerStats()!=null) str.append(CMLib.time().date2String(M.playerStats().lastDateTime())+", "); break; case 3: if(M.playerStats()!=null) str.append(M.playerStats().getEmail()+", "); break; case 4: str.append(M.baseCharStats().getMyRace().name()+", "); break; case 5: str.append(M.baseCharStats().getCurrentClass().name(M.baseCharStats().getCurrentClassLevel())+", "); break; case 6: str.append(M.baseEnvStats().level()+", "); break; case 7: str.append(M.baseCharStats().displayClassLevel(M,true)+", "); break; case 8: str.append(M.baseCharStats().getClassLevel(M.baseCharStats().getCurrentClass())+", "); break; case 9: { for(int c=M.charStats().numClasses()-1;c>=0;c--) { CharClass C=M.charStats().getMyClass(c); str.append(C.name(M.baseCharStats().getCurrentClassLevel())+" ("+M.charStats().getClassLevel(C)+") "); } str.append(", "); break; } case 10: if(M.maxCarry()>(Integer.MAX_VALUE/3)) str.append("NA, "); else str.append(M.maxCarry()+", "); break; case 11: str.append(CMStrings.capitalizeAndLower(CMLib.combat().fightingProwessStr(M))+", "); break; case 12: str.append(CMStrings.capitalizeAndLower(CMLib.combat().armorStr(M))+", "); break; case 13: str.append(CMLib.combat().adjustedDamage(M,null,null)+", "); break; case 14: str.append(Math.round(CMath.div(M.getAgeHours(),60.0))+", "); break; case 15: str.append(M.getPractices()+", "); break; case 16: str.append(M.getExperience()+", "); break; case 17: if(M.getExpNeededLevel()==Integer.MAX_VALUE) str.append("N/A, "); else str.append(M.getExpNextLevel()+", "); break; case 18: str.append(M.getTrains()+", "); break; case 19: str.append(CMLib.beanCounter().getMoney(M)+", "); break; case 20: str.append(M.getWorshipCharID()+", "); break; case 21: str.append(M.getLiegeID()+", "); break; case 22: str.append(M.getClanID()+", "); break; case 23: if(M.getClanID().length()>0) { Clan C=CMLib.clans().getClan(M.getClanID()); if(C!=null) str.append(CMLib.clans().getRoleName(C.getGovernment(),M.getClanRole(),true,false)+", "); } break; case 24: str.append(M.fetchFaction(CMLib.factions().AlignID())+", "); break; case 25: { Faction.FactionRange FR=CMLib.factions().getRange(CMLib.factions().AlignID(),M.fetchFaction(CMLib.factions().AlignID())); if(FR!=null) str.append(FR.name()+", "); else str.append(M.fetchFaction(CMLib.factions().AlignID())); break; } case 26: str.append(M.getWimpHitPoint()+", "); break; case 27: if(M.getStartRoom()!=null) str.append(M.getStartRoom().displayText()+", "); break; case 28: if(M.location()!=null) str.append(M.location().displayText()+", "); break; case 29: if(M.getStartRoom()!=null) str.append(M.getStartRoom().roomID()+", "); break; case 30: if(M.location()!=null) str.append(M.location().roomID()+", "); break; case 31: { for(int inv=0;inv<M.inventorySize();inv++) { Item I=M.fetchInventory(inv); if((I!=null)&&(I.container()==null)) str.append(I.name()+", "); } break; } case 32: str.append(M.baseEnvStats().weight()+", "); break; case 33: str.append(M.envStats().weight()+", "); break; case 34: str.append(CMStrings.capitalizeAndLower(M.baseCharStats().genderName())+", "); break; case 35: if(M.playerStats()!=null) str.append(M.playerStats().lastDateTime()+", "); break; case 36: str.append(M.curState().getHitPoints()+", "); break; case 37: str.append(M.curState().getMana()+", "); break; case 38: str.append(M.curState().getMovement()+", "); break; case 39: if(M.riding()!=null) str.append(M.riding().name()+", "); break; case 40: str.append(M.baseEnvStats().height()+", "); break; case 41: if(!M.isMonster()) str.append(M.session().getAddress()+", "); else if(M.playerStats()!=null) str.append(M.playerStats().lastIP()+", "); break; case 42: str.append(M.getQuestPoint()+", "); break; case 43: str.append(M.maxState().getHitPoints()+", "); break; case 44: str.append(M.maxState().getMana()+", "); break; case 45: str.append(M.maxState().getMovement()+", "); break; case 46: str.append(M.rawImage()+", "); break; case 47: str.append(M.maxItems()+", "); break; case 48: { String[] paths=CMProps.mxpImagePath(M.image()); if(paths[0].length()>0) str.append(paths[0]+paths[1]+", "); break; } case 49: if(CMProps.mxpImagePath(M.image())[0].length()>0) str.append("true, "); else str.append("false, "); break; case 50: if(M.playerStats()!=null) str.append(M.playerStats().notes()+", "); break; case 51: if(M.playerStats()!=null) { long lastDateTime=-1; for(int level=0;level<=M.envStats().level();level++) { long dateTime=M.playerStats().leveledDateTime(level); if((dateTime>1529122205)&&(dateTime!=lastDateTime)) { str.append("<TR>"); if(level==0) str.append("<TD><FONT COLOR=WHITE>Created</FONT></TD>"); else str.append("<TD><FONT COLOR=WHITE>"+level+"</FONT></TD>"); str.append("<TD><FONT COLOR=WHITE>"+CMLib.time().date2String(dateTime)+"</FONT></TD></TR>"); } } str.append(", "); } break; case 52: str.append(M.baseEnvStats().attackAdjustment()+", "); break; case 53: str.append(M.baseEnvStats().damage()+", "); break; case 54: str.append(M.baseEnvStats().armor()+", "); break; case 55: str.append(M.envStats().speed()+", "); break; case 56: str.append(M.baseEnvStats().speed()+", "); break; case 57: { for(int e=0;e<M.numExpertises();e++) { String E=M.fetchExpertise(e); ExpertiseLibrary.ExpertiseDefinition X=CMLib.expertises().getDefinition(E); if(X==null) str.append(E+", "); else str.append(X.name+", "); } break; } case 58: { for(int t=0;t<M.numTattoos();t++) { String E=M.fetchTattoo(t); str.append(E+", "); } break; } case 59: { if(M.playerStats()!=null) for(int b=0;b<M.playerStats().getSecurityGroups().size();b++) { String B=(String)M.playerStats().getSecurityGroups().elementAt(b); if(B!=null) str.append(B+", "); } break; } case 60: { if(M.playerStats()!=null) for(int b=0;b<M.playerStats().getTitles().size();b++) { String B=(String)M.playerStats().getTitles().elementAt(b); if(B!=null) str.append(B+", "); } break; } case 61: { for(Enumeration e=M.fetchFactions();e.hasMoreElements();) { String FID=(String)e.nextElement(); Faction F=CMLib.factions().getFaction(FID); int value=M.fetchFaction(FID); if(F!=null) str.append(F.name()+" ("+value+"), "); } break; } case 62: str.append(CMProps.getBoolVar(CMProps.SYSTEMB_ACCOUNTEXPIRATION)?"true":"false"); break; case 63: if(M.playerStats()!=null)str.append(CMLib.time().date2String(M.playerStats().getAccountExpiration())); break; case 64: { for(int f=0;f<M.numFollowers();f++) str.append(M.fetchFollower(f).name()).append(", "); //Vector V=CMLib.database().DBScanFollowers(M); //for(int v=0;v<V.size();v++) // str.append(((MOB)V.elementAt(v)).name()).append(", "); break; } case 65: if((M.playerStats()!=null)&&(M.playerStats().getAccount()!=null)) str.append(M.playerStats().getAccount().accountName()); break; } return str.toString(); } public String runMacro(ExternalHTTPRequests httpReq, String parm) { if(!CMProps.getBoolVar(CMProps.SYSTEMB_MUDSTARTED)) return CMProps.getVar(CMProps.SYSTEM_MUDSTATUS); Hashtable parms=parseParms(parm); String last=httpReq.getRequestParameter("PLAYER"); if(last==null) return " @break@"; if(last.length()>0) { MOB M=CMLib.players().getLoadPlayer(last); if(M==null) { MOB authM=Authenticate.getAuthenticatedMob(httpReq); if((authM!=null)&&(authM.Name().equalsIgnoreCase(last))) M=authM; else return " @break@"; } boolean firstTime=(!httpReq.isRequestParameter("ACTION")) ||(httpReq.getRequestParameter("ACTION")).equals("FIRSTTIME"); StringBuffer str=new StringBuffer(""); for(int i=0;i<MOB.AUTODESC.length;i++) { if(parms.containsKey(MOB.AUTODESC[i])) { boolean set=CMath.isSet(M.getBitmap(),i); if(MOB.AUTOREV[i]) set=!set; str.append((set?"ON":"OFF")+","); } } for(int i : CharStats.CODES.ALL()) { String stat=CharStats.CODES.NAME(i); if(!stat.equalsIgnoreCase("GENDER")) { CharStats C=M.charStats(); if(parms.containsKey(stat)) { String old=httpReq.getRequestParameter(stat); if((firstTime)||(old.length()==0)) { if((!CharStats.CODES.isBASE(i))&&(i!=CharStats.STAT_GENDER)) old=""+C.getSave(i); else old=""+C.getStat(i); } str.append(old+", "); } } } for(int i : CharStats.CODES.ALL()) { String stat=CharStats.CODES.NAME(i); if(!stat.equalsIgnoreCase("GENDER")) { CharStats C=M.baseCharStats(); if(parms.containsKey("BASE"+stat)) { String old=httpReq.getRequestParameter("BASE"+stat); if((firstTime)||(old.length()==0)) old=""+C.getStat(i); str.append(old+", "); } } } for(int i=0;i<BASICS.length;i++) { if(parms.containsKey(BASICS[i])) { if(httpReq.isRequestParameter(BASICS[i])) str.append(httpReq.getRequestParameter(BASICS[i])+", "); else str.append(getBasic(M,i)); } } if(parms.containsKey("RACE")) { String old=httpReq.getRequestParameter("RACE"); if((firstTime)||(old.length()==0)) old=""+M.baseCharStats().getMyRace().ID(); for(Enumeration r=CMClass.races();r.hasMoreElements();) { Race R2=(Race)r.nextElement(); str.append("<OPTION VALUE=\""+R2.ID()+"\""); if(R2.ID().equals(old)) str.append(" SELECTED"); str.append(">"+R2.name()); } } if(parms.containsKey("DEITY")) { String old=httpReq.getRequestParameter("DEITY"); if(firstTime) old=M.getWorshipCharID(); str.append("<OPTION "+((old.length()==0)?"SELECTED":"")+" VALUE=\"\">Godless"); for(Enumeration e=CMLib.map().deities();e.hasMoreElements();) { Deity E=(Deity)e.nextElement(); str.append("<OPTION VALUE=\""+E.Name()+"\""); if(E.Name().equalsIgnoreCase(old)) str.append(" SELECTED"); str.append(">"+E.Name()); } } if(parms.containsKey("TITLELIST")) { if(M.playerStats()!=null) { int b=0; Vector titles=new Vector(); if(firstTime) CMParms.addToVector(M.playerStats().getTitles(),titles); else while(httpReq.isRequestParameter("TITLE"+b)) { String B=httpReq.getRequestParameter("TITLE"+b); if((B!=null)&&(B.trim().length()>0)) titles.addElement(B); b++; } for(b=0;b<titles.size();b++) { String B=(String)titles.elementAt(b); if(B!=null) str.append("<INPUT TYPE=TEXT NAME=TITLE"+b+" SIZE="+B.length()+" VALUE=\""+CMStrings.replaceAll(B,"\"","&quot;")+"\"><BR>"); } str.append("<INPUT TYPE=TEXT NAME=TITLE"+titles.size()+" SIZE=60 VALUE=\"\">"); } } if(parms.containsKey("CLAN")) { String old=httpReq.getRequestParameter("CLAN"); if(firstTime) old=M.getClanID(); str.append("<OPTION "+((old.length()==0)?"SELECTED":"")+" VALUE=\"\">Clanless"); for(Enumeration e=CMLib.clans().allClans();e.hasMoreElements();) { Clan C=(Clan)e.nextElement(); str.append("<OPTION VALUE=\""+C.clanID()+"\""); if(C.clanID().equalsIgnoreCase(old)) str.append(" SELECTED"); str.append(">"+C.getName()); } } if(parms.containsKey("ALIGNMENT")) { String old=httpReq.getRequestParameter("ALIGNMENT"); if((firstTime)||(old.length()==0)) old=""+M.fetchFaction(CMLib.factions().AlignID()); if(CMLib.factions().getFaction(CMLib.factions().AlignID())!=null) { for(int v=1;v<Faction.ALIGN_NAMES.length;v++) { str.append("<OPTION VALUE="+Faction.ALIGN_NAMES[v]); if(old.equalsIgnoreCase(Faction.ALIGN_NAMES[v])) str.append(" SELECTED"); str.append(">"+CMStrings.capitalizeAndLower(Faction.ALIGN_NAMES[v].toLowerCase())); } } } if(parms.containsKey("BASEGENDER")) { String old=httpReq.getRequestParameter("BASEGENDER"); if(firstTime) old=""+M.baseCharStats().getStat(CharStats.STAT_GENDER); str.append("<OPTION VALUE=M "+((old.equalsIgnoreCase("M"))?"SELECTED":"")+">M"); str.append("<OPTION VALUE=F "+((old.equalsIgnoreCase("F"))?"SELECTED":"")+">F"); str.append("<OPTION VALUE=N "+((old.equalsIgnoreCase("N"))?"SELECTED":"")+">N"); } str.append(MobData.expertiseList(M,httpReq,parms)); str.append(MobData.classList(M,httpReq,parms)); str.append(MobData.itemList(M,M,httpReq,parms,0)); str.append(MobData.abilities(M,httpReq,parms,0)); str.append(MobData.factions(M,httpReq,parms,0)); str.append(AreaData.affectsNBehaves(M,httpReq,parms,0)); str.append(ExitData.dispositions(M,firstTime,httpReq,parms)); str.append(MobData.senses(M,firstTime,httpReq,parms)); String strstr=str.toString(); if(strstr.endsWith(", ")) strstr=strstr.substring(0,strstr.length()-2); return clearWebMacros(strstr); } return ""; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.classroom.model; /** * Announcement created by a teacher for students of the course * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Google Classroom API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Announcement extends com.google.api.client.json.GenericJson { /** * Absolute link to this announcement in the Classroom web UI. This is only populated if `state` * is `PUBLISHED`. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String alternateLink; /** * Assignee mode of the announcement. If unspecified, the default value is `ALL_STUDENTS`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String assigneeMode; /** * Identifier of the course. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String courseId; /** * Timestamp when this announcement was created. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private String creationTime; /** * Identifier for the user that created the announcement. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creatorUserId; /** * Classroom-assigned identifier of this announcement, unique per course. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Identifiers of students with access to the announcement. This field is set only if * `assigneeMode` is `INDIVIDUAL_STUDENTS`. If the `assigneeMode` is `INDIVIDUAL_STUDENTS`, then * only students specified in this field will be able to see the announcement. * The value may be {@code null}. */ @com.google.api.client.util.Key private IndividualStudentsOptions individualStudentsOptions; /** * Additional materials. * * Announcements must have no more than 20 material items. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Material> materials; /** * Optional timestamp when this announcement is scheduled to be published. * The value may be {@code null}. */ @com.google.api.client.util.Key private String scheduledTime; /** * Status of this announcement. If unspecified, the default state is `DRAFT`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** * Description of this announcement. The text must be a valid UTF-8 string containing no more than * 30,000 characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String text; /** * Timestamp of the most recent change to this announcement. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private String updateTime; /** * Absolute link to this announcement in the Classroom web UI. This is only populated if `state` * is `PUBLISHED`. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getAlternateLink() { return alternateLink; } /** * Absolute link to this announcement in the Classroom web UI. This is only populated if `state` * is `PUBLISHED`. * * Read-only. * @param alternateLink alternateLink or {@code null} for none */ public Announcement setAlternateLink(java.lang.String alternateLink) { this.alternateLink = alternateLink; return this; } /** * Assignee mode of the announcement. If unspecified, the default value is `ALL_STUDENTS`. * @return value or {@code null} for none */ public java.lang.String getAssigneeMode() { return assigneeMode; } /** * Assignee mode of the announcement. If unspecified, the default value is `ALL_STUDENTS`. * @param assigneeMode assigneeMode or {@code null} for none */ public Announcement setAssigneeMode(java.lang.String assigneeMode) { this.assigneeMode = assigneeMode; return this; } /** * Identifier of the course. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getCourseId() { return courseId; } /** * Identifier of the course. * * Read-only. * @param courseId courseId or {@code null} for none */ public Announcement setCourseId(java.lang.String courseId) { this.courseId = courseId; return this; } /** * Timestamp when this announcement was created. * * Read-only. * @return value or {@code null} for none */ public String getCreationTime() { return creationTime; } /** * Timestamp when this announcement was created. * * Read-only. * @param creationTime creationTime or {@code null} for none */ public Announcement setCreationTime(String creationTime) { this.creationTime = creationTime; return this; } /** * Identifier for the user that created the announcement. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getCreatorUserId() { return creatorUserId; } /** * Identifier for the user that created the announcement. * * Read-only. * @param creatorUserId creatorUserId or {@code null} for none */ public Announcement setCreatorUserId(java.lang.String creatorUserId) { this.creatorUserId = creatorUserId; return this; } /** * Classroom-assigned identifier of this announcement, unique per course. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * Classroom-assigned identifier of this announcement, unique per course. * * Read-only. * @param id id or {@code null} for none */ public Announcement setId(java.lang.String id) { this.id = id; return this; } /** * Identifiers of students with access to the announcement. This field is set only if * `assigneeMode` is `INDIVIDUAL_STUDENTS`. If the `assigneeMode` is `INDIVIDUAL_STUDENTS`, then * only students specified in this field will be able to see the announcement. * @return value or {@code null} for none */ public IndividualStudentsOptions getIndividualStudentsOptions() { return individualStudentsOptions; } /** * Identifiers of students with access to the announcement. This field is set only if * `assigneeMode` is `INDIVIDUAL_STUDENTS`. If the `assigneeMode` is `INDIVIDUAL_STUDENTS`, then * only students specified in this field will be able to see the announcement. * @param individualStudentsOptions individualStudentsOptions or {@code null} for none */ public Announcement setIndividualStudentsOptions(IndividualStudentsOptions individualStudentsOptions) { this.individualStudentsOptions = individualStudentsOptions; return this; } /** * Additional materials. * * Announcements must have no more than 20 material items. * @return value or {@code null} for none */ public java.util.List<Material> getMaterials() { return materials; } /** * Additional materials. * * Announcements must have no more than 20 material items. * @param materials materials or {@code null} for none */ public Announcement setMaterials(java.util.List<Material> materials) { this.materials = materials; return this; } /** * Optional timestamp when this announcement is scheduled to be published. * @return value or {@code null} for none */ public String getScheduledTime() { return scheduledTime; } /** * Optional timestamp when this announcement is scheduled to be published. * @param scheduledTime scheduledTime or {@code null} for none */ public Announcement setScheduledTime(String scheduledTime) { this.scheduledTime = scheduledTime; return this; } /** * Status of this announcement. If unspecified, the default state is `DRAFT`. * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * Status of this announcement. If unspecified, the default state is `DRAFT`. * @param state state or {@code null} for none */ public Announcement setState(java.lang.String state) { this.state = state; return this; } /** * Description of this announcement. The text must be a valid UTF-8 string containing no more than * 30,000 characters. * @return value or {@code null} for none */ public java.lang.String getText() { return text; } /** * Description of this announcement. The text must be a valid UTF-8 string containing no more than * 30,000 characters. * @param text text or {@code null} for none */ public Announcement setText(java.lang.String text) { this.text = text; return this; } /** * Timestamp of the most recent change to this announcement. * * Read-only. * @return value or {@code null} for none */ public String getUpdateTime() { return updateTime; } /** * Timestamp of the most recent change to this announcement. * * Read-only. * @param updateTime updateTime or {@code null} for none */ public Announcement setUpdateTime(String updateTime) { this.updateTime = updateTime; return this; } @Override public Announcement set(String fieldName, Object value) { return (Announcement) super.set(fieldName, value); } @Override public Announcement clone() { return (Announcement) super.clone(); } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.skydoc.fakebuildapi; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.starlarkbuildapi.StarlarkAttrModuleApi; import com.google.devtools.build.lib.starlarkbuildapi.core.ProviderApi; import com.google.devtools.build.skydoc.rendering.proto.StardocOutputProtos.AttributeType; import java.util.ArrayList; import java.util.List; import java.util.Map; import net.starlark.java.eval.Dict; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.Module; import net.starlark.java.eval.Printer; import net.starlark.java.eval.Sequence; import net.starlark.java.eval.StarlarkInt; import net.starlark.java.eval.StarlarkThread; /** * Fake implementation of {@link StarlarkAttrModuleApi}. */ public class FakeStarlarkAttrModuleApi implements StarlarkAttrModuleApi { @Override public Descriptor intAttribute( StarlarkInt defaultInt, String doc, Boolean mandatory, Sequence<?> values, StarlarkThread thread) throws EvalException { return new FakeDescriptor(AttributeType.INT, doc, mandatory, ImmutableList.of(), defaultInt); } @Override public Descriptor stringAttribute( String defaultString, String doc, Boolean mandatory, Sequence<?> values, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.STRING, doc, mandatory, ImmutableList.of(), defaultString != null ? "\"" + defaultString + "\"" : null); } @Override public Descriptor labelAttribute( Object defaultO, String doc, Boolean executable, Object allowFiles, Object allowSingleFile, Boolean mandatory, Sequence<?> providers, Object allowRules, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { List<List<String>> allNameGroups = new ArrayList<>(); if (providers != null) { allNameGroups = allProviderNameGroups(providers, thread); } return new FakeDescriptor(AttributeType.LABEL, doc, mandatory, allNameGroups, defaultO); } @Override public Descriptor stringListAttribute( Boolean mandatory, Boolean allowEmpty, Sequence<?> defaultList, String doc, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.STRING_LIST, doc, mandatory, ImmutableList.of(), defaultList); } @Override public Descriptor intListAttribute( Boolean mandatory, Boolean allowEmpty, Sequence<?> defaultList, String doc, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.INT_LIST, doc, mandatory, ImmutableList.of(), defaultList); } @Override public Descriptor labelListAttribute( Boolean allowEmpty, Object defaultList, String doc, Object allowFiles, Object allowRules, Sequence<?> providers, Sequence<?> flags, Boolean mandatory, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { List<List<String>> allNameGroups = new ArrayList<>(); if (providers != null) { allNameGroups = allProviderNameGroups(providers, thread); } return new FakeDescriptor(AttributeType.LABEL_LIST, doc, mandatory, allNameGroups, defaultList); } @Override public Descriptor labelKeyedStringDictAttribute( Boolean allowEmpty, Object defaultList, String doc, Object allowFiles, Object allowRules, Sequence<?> providers, Sequence<?> flags, Boolean mandatory, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { List<List<String>> allNameGroups = new ArrayList<>(); if (providers != null) { allNameGroups = allProviderNameGroups(providers, thread); } return new FakeDescriptor( AttributeType.LABEL_STRING_DICT, doc, mandatory, allNameGroups, defaultList); } @Override public Descriptor boolAttribute( Boolean defaultO, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.BOOLEAN, doc, mandatory, ImmutableList.of(), Boolean.TRUE.equals(defaultO) ? "True" : "False"); } @Override public Descriptor outputAttribute(String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor(AttributeType.OUTPUT, doc, mandatory, ImmutableList.of(), ""); } @Override public Descriptor outputListAttribute( Boolean allowEmpty, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor(AttributeType.OUTPUT_LIST, doc, mandatory, ImmutableList.of(), ""); } @Override public Descriptor stringDictAttribute( Boolean allowEmpty, Dict<?, ?> defaultO, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.STRING_DICT, doc, mandatory, ImmutableList.of(), defaultO); } @Override public Descriptor stringListDictAttribute( Boolean allowEmpty, Dict<?, ?> defaultO, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.STRING_LIST_DICT, doc, mandatory, ImmutableList.of(), defaultO); } @Override public Descriptor licenseAttribute( Object defaultO, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { return new FakeDescriptor( AttributeType.STRING_LIST, doc, mandatory, ImmutableList.of(), defaultO); } @Override public void repr(Printer printer) {} /** * Returns a list of provider name groups, given the value of a Starlark attribute's "providers" * argument. * * <p>{@code providers} can either be a list of providers or a list of lists of providers, where * each provider is represented by a ProviderApi or by a String. In the case of a single-level * list, the whole list is considered a single group, while in the case of a double-level list, * each of the inner lists is a separate group. */ private static List<List<String>> allProviderNameGroups( Sequence<?> providers, StarlarkThread thread) { List<List<String>> allNameGroups = new ArrayList<>(); for (Object object : providers) { List<String> providerNameGroup; if (object instanceof Sequence) { Sequence<?> group = (Sequence<?>) object; providerNameGroup = parseProviderGroup(group, thread); allNameGroups.add(providerNameGroup); } else { providerNameGroup = parseProviderGroup(providers, thread); allNameGroups.add(providerNameGroup); break; } } return allNameGroups; } /** * Returns the names of the providers in the given group. * * <p>Each item in the group may be either a {@link ProviderApi} or a {@code String} (representing * a legacy provider). */ private static List<String> parseProviderGroup(Sequence<?> group, StarlarkThread thread) { List<String> providerNameGroup = new ArrayList<>(); for (Object object : group) { if (object instanceof ProviderApi) { ProviderApi provider = (ProviderApi) object; String providerName = providerName(provider, thread); providerNameGroup.add(providerName); } else if (object instanceof String) { String legacyProvider = (String) object; providerNameGroup.add(legacyProvider); } } return providerNameGroup; } /** * Returns the name of {@code provider}. * * <p>{@code thread} contains a {@code Map<String, Object>} where the values are built-in objects * or objects defined in the file and the keys are the names of these objects. If a {@code * provider} is in the map, the name of the provider is set as the key of this object in {@code * bindings}. If it is not in the map, the provider may be part of a module in the map and the * name will be set to "Unknown Provider". */ private static String providerName(ProviderApi provider, StarlarkThread thread) { Module bzl = Module.ofInnermostEnclosingStarlarkFunction(thread); // user-defined provider? for (Map.Entry<String, Object> e : bzl.getGlobals().entrySet()) { if (provider.equals(e.getValue())) { return e.getKey(); } } // predeclared provider? (e.g. DefaultInfo) for (Map.Entry<String, Object> e : bzl.getPredeclaredBindings().entrySet()) { if (provider.equals(e.getValue())) { return e.getKey(); } } return "Unknown Provider"; } }
/** * Copyright (c) 2003-2016 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.content.entityproviders; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.sakaiproject.authz.api.SecurityService; import org.sakaiproject.content.api.ContentHostingService; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entitybroker.EntityView; import org.sakaiproject.entitybroker.entityprovider.EntityProvider; import org.sakaiproject.entitybroker.entityprovider.annotations.EntityCustomAction; import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable; import org.sakaiproject.entitybroker.entityprovider.capabilities.AutoRegisterEntityProvider; import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable; import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable; import org.sakaiproject.entitybroker.entityprovider.extension.Formats; import org.sakaiproject.entitybroker.exception.EntityNotFoundException; import org.sakaiproject.entitybroker.util.AbstractEntityProvider; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.tool.api.ToolManager; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserDirectoryService; import org.sakaiproject.user.api.UserNotDefinedException; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** * Entity provider for the Dropbox tool */ @Slf4j public class DropboxEntityProvider extends AbstractEntityProvider implements EntityProvider, AutoRegisterEntityProvider, ActionsExecutable, Outputable, Describeable { public final static String ENTITY_PREFIX = "dropbox"; @Override public String getEntityPrefix() { return ENTITY_PREFIX; } /** * site/siteId/user/userEid */ @EntityCustomAction(action = "site", viewKey = EntityView.VIEW_LIST) public List<DropboxItem> getDropboxCollectionForSiteAndUser(EntityView view) { // get siteId String siteId = view.getPathSegment(2); // get userId String userEid = view.getPathSegment(4); if(log.isDebugEnabled()) { log.debug("Dropbox for site: " + siteId + " and user: " + userEid); } // check siteId and userEid supplied if (StringUtils.isBlank(siteId) || StringUtils.isBlank(userEid)) { throw new IllegalArgumentException( "siteId and userEid must be set in order to get the dropbox for a site, via the URL /dropbox/site/siteId/user/userId"); } //check user can access this site Site site; try { site = siteService.getSiteVisit(siteId); } catch (IdUnusedException e) { throw new EntityNotFoundException("Invalid siteId: " + siteId, siteId); } catch (PermissionException e) { throw new EntityNotFoundException("No access to site: " + siteId, siteId); } //check user can access the tool, it might be hidden ToolConfiguration toolConfig = site.getToolForCommonId("sakai.dropbox"); if(toolConfig == null || !toolManager.isVisible(site, toolConfig)) { throw new EntityNotFoundException("No access to tool in site: " + siteId, siteId); } //get Id for user based on supplied eid String userId = null; try { User u = userDirectoryService.getUserByEid(userEid); if(u != null){ userId = u.getId(); } } catch (UserNotDefinedException e) { throw new EntityNotFoundException("Invalid user: " + userEid, userEid); } //check user has permission to this dropbox in this site boolean isAllowed = canAccessDropbox(siteId, userId); if(!isAllowed) { throw new SecurityException("No access to site: " + siteId + " and dropbox: " + userEid); } //get collectionId for the dropbox String collectionId = getDropBoxCollectionId(siteId, userId); //get list of resources in dropbox List<ContentResource> resources = contentHostingService.getAllResources(collectionId); List<DropboxItem> dropboxItems = new ArrayList<DropboxItem>(); for(ContentResource resource: resources) { //convert to our simplified object DropboxItem item = new DropboxItem(); ResourceProperties props = resource.getProperties(); item.setTitle(props.getProperty(ResourceProperties.PROP_DISPLAY_NAME)); item.setDescription(props.getProperty(ResourceProperties.PROP_DESCRIPTION)); item.setType(props.getProperty(ResourceProperties.PROP_CONTENT_TYPE)); item.setSize(Long.parseLong(props.getProperty(ResourceProperties.PROP_CONTENT_LENGTH))); item.setUrl(resource.getUrl()); dropboxItems.add(item); } return dropboxItems; } @Override public String[] getHandledOutputFormats() { return new String[] { Formats.XML, Formats.JSON}; } @Setter private ContentHostingService contentHostingService; @Setter private SiteService siteService; @Setter private ToolManager toolManager; @Setter private SecurityService securityService; @Setter private UserDirectoryService userDirectoryService; /** * Simplified helper class to represent an individual item in a user's dropbox */ public static class DropboxItem { @Getter @Setter private String title; @Getter @Setter private String description; @Getter @Setter private String url; @Getter @Setter private String type; @Getter @Setter private long size; } /** * Can the current user actually access the requested dropbox? * Admin, dropbox.maintain and dropbox.own are allowed * * @param siteId - siteId specified * @param dropboxUserId - userId of the dropbox * @return */ private boolean canAccessDropbox(String siteId, String dropboxUserId) { String currentUserId = userDirectoryService.getCurrentUser().getId(); //admin if (securityService.isSuperUser(currentUserId)) { return true; } String siteRef = ""; if(siteId != null && !siteId.startsWith(SiteService.REFERENCE_ROOT)) { siteRef = SiteService.REFERENCE_ROOT + Entity.SEPARATOR + siteId; } //owner - current user must match dropboxid, and have permission in the site if(StringUtils.equals(currentUserId, dropboxUserId) && securityService.unlock(currentUserId, ContentHostingService.AUTH_DROPBOX_OWN, siteRef)) { return true; } //maintainer, must have permission in the site if(securityService.unlock(currentUserId, ContentHostingService.AUTH_DROPBOX_MAINTAIN, siteRef)) { return true; } return false; } /** * Get the collection Id for the dropbox * @param siteId * @param userId * @return */ private String getDropBoxCollectionId(String siteId, String userId) { return ContentHostingService.COLLECTION_DROPBOX + siteId + "/" + userId + "/"; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.controller; import com.google.common.annotations.VisibleForTesting; import com.google.common.primitives.Longs; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.commons.httpclient.HttpConnectionManager; import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.helix.HelixManager; import org.apache.helix.HelixManagerFactory; import org.apache.helix.InstanceType; import org.apache.helix.SystemPropertyKeys; import org.apache.helix.api.listeners.ControllerChangeListener; import org.apache.helix.model.ClusterConstraints; import org.apache.helix.model.ConstraintItem; import org.apache.helix.model.MasterSlaveSMD; import org.apache.helix.model.Message; import org.apache.helix.task.TaskDriver; import org.apache.pinot.common.Utils; import org.apache.pinot.common.function.FunctionRegistry; import org.apache.pinot.common.metrics.ControllerMeter; import org.apache.pinot.common.metrics.ControllerMetrics; import org.apache.pinot.spi.metrics.PinotMetricsRegistry; import org.apache.pinot.common.metrics.PinotMetricUtils; import org.apache.pinot.common.metrics.ValidationMetrics; import org.apache.pinot.common.utils.CommonConstants; import org.apache.pinot.common.utils.NetUtil; import org.apache.pinot.common.utils.ServiceStatus; import org.apache.pinot.common.utils.fetcher.SegmentFetcherFactory; import org.apache.pinot.common.utils.helix.LeadControllerUtils; import org.apache.pinot.controller.api.ControllerAdminApiApplication; import org.apache.pinot.controller.api.access.AccessControlFactory; import org.apache.pinot.controller.api.events.MetadataEventNotifierFactory; import org.apache.pinot.controller.helix.core.minion.MinionInstancesCleanupTask; import org.apache.pinot.core.transport.ListenerConfig; import org.apache.pinot.controller.api.resources.ControllerFilePathProvider; import org.apache.pinot.controller.api.resources.InvalidControllerConfigException; import org.apache.pinot.controller.helix.SegmentStatusChecker; import org.apache.pinot.controller.helix.core.PinotHelixResourceManager; import org.apache.pinot.controller.helix.core.minion.PinotHelixTaskResourceManager; import org.apache.pinot.controller.helix.core.minion.PinotTaskManager; import org.apache.pinot.controller.helix.core.realtime.PinotLLCRealtimeSegmentManager; import org.apache.pinot.controller.helix.core.realtime.PinotRealtimeSegmentManager; import org.apache.pinot.controller.helix.core.realtime.SegmentCompletionManager; import org.apache.pinot.controller.helix.core.relocation.SegmentRelocator; import org.apache.pinot.controller.helix.core.retention.RetentionManager; import org.apache.pinot.controller.helix.core.statemodel.LeadControllerResourceMasterSlaveStateModelFactory; import org.apache.pinot.controller.helix.core.util.HelixSetupUtils; import org.apache.pinot.controller.helix.starter.HelixConfig; import org.apache.pinot.core.util.ListenerConfigUtil; import org.apache.pinot.controller.validation.BrokerResourceValidationManager; import org.apache.pinot.controller.validation.OfflineSegmentIntervalChecker; import org.apache.pinot.controller.validation.RealtimeSegmentValidationManager; import org.apache.pinot.core.periodictask.PeriodicTask; import org.apache.pinot.core.periodictask.PeriodicTaskScheduler; import org.apache.pinot.core.transport.TlsConfig; import org.apache.pinot.core.util.TlsUtils; import org.apache.pinot.spi.crypt.PinotCrypterFactory; import org.apache.pinot.spi.env.PinotConfiguration; import org.apache.pinot.spi.filesystem.PinotFSFactory; import org.apache.pinot.spi.services.ServiceRole; import org.apache.pinot.spi.services.ServiceStartable; import org.glassfish.hk2.utilities.binding.AbstractBinder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ControllerStarter implements ServiceStartable { private static final Logger LOGGER = LoggerFactory.getLogger(ControllerStarter.class); private static final String METRICS_REGISTRY_NAME = "pinot.controller.metrics"; private static final Long DATA_DIRECTORY_MISSING_VALUE = 1000000L; private static final Long DATA_DIRECTORY_EXCEPTION_VALUE = 1100000L; private static final String METADATA_EVENT_NOTIFIER_PREFIX = "metadata.event.notifier"; private static final String MAX_STATE_TRANSITIONS_PER_INSTANCE = "MaxStateTransitionsPerInstance"; private final ControllerConf _config; private final List<ListenerConfig> _listenerConfigs; private final ControllerAdminApiApplication _adminApp; // TODO: rename this variable once it's full separated with Helix controller. private final PinotHelixResourceManager _helixResourceManager; private final ExecutorService _executorService; private final String _helixZkURL; private final String _helixClusterName; private final String _helixControllerInstanceId; private final String _helixParticipantInstanceId; private final boolean _isUpdateStateModel; private final boolean _enableBatchMessageMode; private final ControllerConf.ControllerMode _controllerMode; private HelixManager _helixControllerManager; private HelixManager _helixParticipantManager; private PinotMetricsRegistry _metricsRegistry; private ControllerMetrics _controllerMetrics; // Can only be constructed after resource manager getting started private OfflineSegmentIntervalChecker _offlineSegmentIntervalChecker; private RealtimeSegmentValidationManager _realtimeSegmentValidationManager; private BrokerResourceValidationManager _brokerResourceValidationManager; private SegmentRelocator _segmentRelocator; private RetentionManager _retentionManager; private SegmentStatusChecker _segmentStatusChecker; private PinotTaskManager _taskManager; private PeriodicTaskScheduler _periodicTaskScheduler; private PinotHelixTaskResourceManager _helixTaskResourceManager; private PinotRealtimeSegmentManager _realtimeSegmentsManager; private PinotLLCRealtimeSegmentManager _pinotLLCRealtimeSegmentManager; private SegmentCompletionManager _segmentCompletionManager; private LeadControllerManager _leadControllerManager; private List<ServiceStatus.ServiceStatusCallback> _serviceStatusCallbackList; private MinionInstancesCleanupTask _minionInstancesCleanupTask; public ControllerStarter(ControllerConf conf) { _config = conf; inferHostnameIfNeeded(_config); setupHelixSystemProperties(); _controllerMode = conf.getControllerMode(); // Helix related settings. _helixZkURL = HelixConfig.getAbsoluteZkPathForHelix(_config.getZkStr()); _helixClusterName = _config.getHelixClusterName(); _listenerConfigs = ListenerConfigUtil.buildControllerConfigs(_config); String host = conf.getControllerHost(); int port = _listenerConfigs.get(0).getPort(); _helixControllerInstanceId = host + "_" + port; _helixParticipantInstanceId = LeadControllerUtils.generateParticipantInstanceId(host, port); _isUpdateStateModel = _config.isUpdateSegmentStateModel(); _enableBatchMessageMode = _config.getEnableBatchMessageMode(); _serviceStatusCallbackList = new ArrayList<>(); if (_controllerMode == ControllerConf.ControllerMode.HELIX_ONLY) { _adminApp = null; _helixResourceManager = null; _executorService = null; } else { // Initialize FunctionRegistry before starting the admin application (PinotQueryResource requires it to compile // queries) FunctionRegistry.init(); _adminApp = new ControllerAdminApiApplication(); // Do not use this before the invocation of {@link PinotHelixResourceManager::start()}, which happens in {@link ControllerStarter::start()} _helixResourceManager = new PinotHelixResourceManager(_config); _executorService = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("restapi-multiget-thread-%d").build()); } } private void inferHostnameIfNeeded(ControllerConf config) { if (config.getControllerHost() == null) { if (config.getProperty(CommonConstants.Helix.SET_INSTANCE_ID_TO_HOSTNAME_KEY, false)) { final String inferredHostname = NetUtil.getHostnameOrAddress(); if (inferredHostname != null) { config.setControllerHost(inferredHostname); } else { throw new RuntimeException( "Failed to infer controller hostname, please set controller instanceId explicitly in config file."); } } } } private void setupHelixSystemProperties() { // NOTE: Helix will disconnect the manager and disable the instance if it detects flapping (too frequent disconnect // from ZooKeeper). Setting flapping time window to a small value can avoid this from happening. Helix ignores the // non-positive value, so set the default value as 1. System.setProperty(SystemPropertyKeys.FLAPPING_TIME_WINDOW, _config .getProperty(CommonConstants.Helix.CONFIG_OF_CONTROLLER_FLAPPING_TIME_WINDOW_MS, CommonConstants.Helix.DEFAULT_FLAPPING_TIME_WINDOW_MS)); } private void setupHelixClusterConstraints() { String maxStateTransitions = _config .getProperty(CommonConstants.Helix.CONFIG_OF_HELIX_INSTANCE_MAX_STATE_TRANSITIONS, CommonConstants.Helix.DEFAULT_HELIX_INSTANCE_MAX_STATE_TRANSITIONS); Map<ClusterConstraints.ConstraintAttribute, String> constraintAttributes = new HashMap<>(); constraintAttributes.put(ClusterConstraints.ConstraintAttribute.INSTANCE, ".*"); constraintAttributes .put(ClusterConstraints.ConstraintAttribute.MESSAGE_TYPE, Message.MessageType.STATE_TRANSITION.name()); ConstraintItem constraintItem = new ConstraintItem(constraintAttributes, maxStateTransitions); _helixControllerManager.getClusterManagmentTool() .setConstraint(_helixClusterName, ClusterConstraints.ConstraintType.MESSAGE_CONSTRAINT, MAX_STATE_TRANSITIONS_PER_INSTANCE, constraintItem); } public PinotHelixResourceManager getHelixResourceManager() { return _helixResourceManager; } /** * Gets the Helix Manager connected as Helix controller. */ public HelixManager getHelixControllerManager() { return _helixControllerManager; } public LeadControllerManager getLeadControllerManager() { return _leadControllerManager; } public OfflineSegmentIntervalChecker getOfflineSegmentIntervalChecker() { return _offlineSegmentIntervalChecker; } public RealtimeSegmentValidationManager getRealtimeSegmentValidationManager() { return _realtimeSegmentValidationManager; } public BrokerResourceValidationManager getBrokerResourceValidationManager() { return _brokerResourceValidationManager; } public PinotHelixTaskResourceManager getHelixTaskResourceManager() { return _helixTaskResourceManager; } public PinotTaskManager getTaskManager() { return _taskManager; } public MinionInstancesCleanupTask getMinionInstancesCleanupTask() { return _minionInstancesCleanupTask; } @Override public ServiceRole getServiceRole() { return ServiceRole.CONTROLLER; } @Override public String getInstanceId() { return _helixParticipantInstanceId; } @Override public PinotConfiguration getConfig() { return _config; } @Override public void start() { LOGGER.info("Starting Pinot controller in mode: {}.", _controllerMode.name()); Utils.logVersions(); // Set up controller metrics initControllerMetrics(); switch (_controllerMode) { case DUAL: setUpHelixController(); setUpPinotController(); break; case PINOT_ONLY: setUpPinotController(); break; case HELIX_ONLY: setUpHelixController(); break; default: LOGGER.error("Invalid mode: " + _controllerMode); } ServiceStatus.setServiceStatusCallback(_helixParticipantInstanceId, new ServiceStatus.MultipleCallbackServiceStatusCallback(_serviceStatusCallbackList)); } private void setUpHelixController() { // Register and connect instance as Helix controller. LOGGER.info("Starting Helix controller"); _helixControllerManager = HelixSetupUtils.setupHelixController(_helixClusterName, _helixZkURL, _helixControllerInstanceId); // Emit helix controller metrics _controllerMetrics.addCallbackGauge(CommonConstants.Helix.INSTANCE_CONNECTED_METRIC_NAME, () -> _helixControllerManager.isConnected() ? 1L : 0L); // Deprecated, since getting the leadership of Helix does not mean Helix has been ready for pinot. _controllerMetrics.addCallbackGauge("helix.leader", () -> _helixControllerManager.isLeader() ? 1L : 0L); _helixControllerManager.addPreConnectCallback( () -> _controllerMetrics.addMeteredGlobalValue(ControllerMeter.HELIX_ZOOKEEPER_RECONNECTS, 1L)); _serviceStatusCallbackList.add(generateServiceStatusCallback(_helixControllerManager)); // setup up constraint setupHelixClusterConstraints(); } private void setUpPinotController() { // install default SSL context if necessary (even if not force-enabled everywhere) TlsConfig tlsDefaults = TlsUtils.extractTlsConfig(_config, ControllerConf.CONTROLLER_TLS_PREFIX); if (StringUtils.isNotBlank(tlsDefaults.getKeyStorePath()) || StringUtils .isNotBlank(tlsDefaults.getTrustStorePath())) { LOGGER.info("Installing default SSL context for any client requests"); TlsUtils.installDefaultSSLSocketFactory(tlsDefaults); } // Set up Pinot cluster in Helix if needed HelixSetupUtils.setupPinotCluster(_helixClusterName, _helixZkURL, _isUpdateStateModel, _enableBatchMessageMode, _config.getLeadControllerResourceRebalanceStrategy()); // Start all components initPinotFSFactory(); initControllerFilePathProvider(); initSegmentFetcherFactory(); initPinotCrypterFactory(); LOGGER.info("Initializing Helix participant manager"); _helixParticipantManager = HelixManagerFactory .getZKHelixManager(_helixClusterName, _helixParticipantInstanceId, InstanceType.PARTICIPANT, _helixZkURL); // LeadControllerManager needs to be initialized before registering as Helix participant. LOGGER.info("Initializing lead controller manager"); _leadControllerManager = new LeadControllerManager(_helixParticipantManager, _controllerMetrics); LOGGER.info("Registering and connecting Helix participant manager as Helix Participant role"); registerAndConnectAsHelixParticipant(); // LeadControllerManager needs to be started after the connection // as it can check Helix leadership and resource config only after connecting to Helix cluster. LOGGER.info("Starting lead controller manager"); _leadControllerManager.start(); LOGGER.info("Starting Pinot Helix resource manager and connecting to Zookeeper"); _helixResourceManager.start(_helixParticipantManager); LOGGER.info("Starting task resource manager"); _helixTaskResourceManager = new PinotHelixTaskResourceManager(new TaskDriver(_helixParticipantManager)); // Helix resource manager must be started in order to create PinotLLCRealtimeSegmentManager LOGGER.info("Starting realtime segment manager"); _pinotLLCRealtimeSegmentManager = new PinotLLCRealtimeSegmentManager(_helixResourceManager, _config, _controllerMetrics); // TODO: Need to put this inside HelixResourceManager when HelixControllerLeadershipManager is removed. _helixResourceManager.registerPinotLLCRealtimeSegmentManager(_pinotLLCRealtimeSegmentManager); _segmentCompletionManager = new SegmentCompletionManager(_helixParticipantManager, _pinotLLCRealtimeSegmentManager, _controllerMetrics, _leadControllerManager, _config.getSegmentCommitTimeoutSeconds()); if (_config.getHLCTablesAllowed()) { LOGGER.info("Realtime tables with High Level consumers will be supported"); _realtimeSegmentsManager = new PinotRealtimeSegmentManager(_helixResourceManager, _leadControllerManager); _realtimeSegmentsManager.start(_controllerMetrics); } else { LOGGER.info("Realtime tables with High Level consumers will NOT be supported"); _realtimeSegmentsManager = null; } // Setting up periodic tasks List<PeriodicTask> controllerPeriodicTasks = setupControllerPeriodicTasks(); LOGGER.info("Init controller periodic tasks scheduler"); _periodicTaskScheduler = new PeriodicTaskScheduler(); _periodicTaskScheduler.init(controllerPeriodicTasks); _periodicTaskScheduler.start(); String accessControlFactoryClass = _config.getAccessControlFactoryClass(); LOGGER.info("Use class: {} as the AccessControlFactory", accessControlFactoryClass); final AccessControlFactory accessControlFactory; try { accessControlFactory = (AccessControlFactory) Class.forName(accessControlFactoryClass).newInstance(); } catch (Exception e) { throw new RuntimeException("Caught exception while creating new AccessControlFactory instance", e); } final MetadataEventNotifierFactory metadataEventNotifierFactory = MetadataEventNotifierFactory.loadFactory(_config.subset(METADATA_EVENT_NOTIFIER_PREFIX)); LOGGER.info("Controller download url base: {}", _config.generateVipUrl()); LOGGER.info("Injecting configuration and resource managers to the API context"); final MultiThreadedHttpConnectionManager connectionManager = new MultiThreadedHttpConnectionManager(); connectionManager.getParams().setConnectionTimeout(_config.getServerAdminRequestTimeoutSeconds() * 1000); // register all the controller objects for injection to jersey resources _adminApp.registerBinder(new AbstractBinder() { @Override protected void configure() { bind(_config).to(ControllerConf.class); bind(_helixResourceManager).to(PinotHelixResourceManager.class); bind(_helixTaskResourceManager).to(PinotHelixTaskResourceManager.class); bind(_segmentCompletionManager).to(SegmentCompletionManager.class); bind(_taskManager).to(PinotTaskManager.class); bind(connectionManager).to(HttpConnectionManager.class); bind(_executorService).to(Executor.class); bind(_controllerMetrics).to(ControllerMetrics.class); bind(accessControlFactory).to(AccessControlFactory.class); bind(metadataEventNotifierFactory).to(MetadataEventNotifierFactory.class); bind(_leadControllerManager).to(LeadControllerManager.class); } }); LOGGER.info("Starting controller admin application on: {}", ListenerConfigUtil.toString(_listenerConfigs)); _adminApp.start(_listenerConfigs); _controllerMetrics.addCallbackGauge("dataDir.exists", () -> new File(_config.getDataDir()).exists() ? 1L : 0L); _controllerMetrics.addCallbackGauge("dataDir.fileOpLatencyMs", () -> { File dataDir = new File(_config.getDataDir()); if (dataDir.exists()) { try { long startTime = System.currentTimeMillis(); File testFile = new File(dataDir, _config.getControllerHost()); try (OutputStream outputStream = new FileOutputStream(testFile, false)) { outputStream.write(Longs.toByteArray(System.currentTimeMillis())); } FileUtils.deleteQuietly(testFile); return System.currentTimeMillis() - startTime; } catch (IOException e) { LOGGER.warn("Caught exception while checking the data directory operation latency", e); return DATA_DIRECTORY_EXCEPTION_VALUE; } } else { return DATA_DIRECTORY_MISSING_VALUE; } }); _serviceStatusCallbackList.add(generateServiceStatusCallback(_helixParticipantManager)); } private ServiceStatus.ServiceStatusCallback generateServiceStatusCallback(HelixManager helixManager) { return new ServiceStatus.ServiceStatusCallback() { private boolean _isStarted = false; private String _statusDescription = "Helix ZK Not connected as " + helixManager.getInstanceType(); @Override public ServiceStatus.Status getServiceStatus() { if (_isStarted) { // If we've connected to Helix at some point, the instance status depends on being connected to ZK if (helixManager.isConnected()) { return ServiceStatus.Status.GOOD; } else { return ServiceStatus.Status.BAD; } } // Return starting until zk is connected if (!helixManager.isConnected()) { return ServiceStatus.Status.STARTING; } else { _isStarted = true; _statusDescription = ServiceStatus.STATUS_DESCRIPTION_NONE; return ServiceStatus.Status.GOOD; } } @Override public String getStatusDescription() { return _statusDescription; } }; } private void initControllerMetrics() { PinotConfiguration metricsConfiguration = _config.subset(METRICS_REGISTRY_NAME); PinotMetricUtils.init(metricsConfiguration); _metricsRegistry = PinotMetricUtils.getPinotMetricsRegistry(); _controllerMetrics = new ControllerMetrics(_config.getMetricsPrefix(), _metricsRegistry); _controllerMetrics.initializeGlobalMeters(); } private void initPinotFSFactory() { LOGGER.info("Initializing PinotFSFactory"); PinotFSFactory.init(_config.subset(CommonConstants.Controller.PREFIX_OF_CONFIG_OF_PINOT_FS_FACTORY)); } private void initControllerFilePathProvider() { LOGGER.info("Initializing ControllerFilePathProvider"); try { ControllerFilePathProvider.init(_config); } catch (InvalidControllerConfigException e) { throw new RuntimeException("Caught exception while initializing ControllerFilePathProvider", e); } } private void initSegmentFetcherFactory() { PinotConfiguration segmentFetcherFactoryConfig = _config.subset(CommonConstants.Controller.PREFIX_OF_CONFIG_OF_SEGMENT_FETCHER_FACTORY); LOGGER.info("Initializing SegmentFetcherFactory"); try { SegmentFetcherFactory.init(segmentFetcherFactoryConfig); } catch (Exception e) { throw new RuntimeException("Caught exception while initializing SegmentFetcherFactory", e); } } private void initPinotCrypterFactory() { PinotConfiguration pinotCrypterConfig = _config.subset(CommonConstants.Controller.PREFIX_OF_CONFIG_OF_PINOT_CRYPTER); LOGGER.info("Initializing PinotCrypterFactory"); try { PinotCrypterFactory.init(pinotCrypterConfig); } catch (Exception e) { throw new RuntimeException("Caught exception while initializing PinotCrypterFactory", e); } } /** * Registers, connects to Helix cluster as PARTICIPANT role, and adds listeners. */ private void registerAndConnectAsHelixParticipant() { // Registers customized Master-Slave state model to state machine engine, which is for calculating participant assignment in lead controller resource. _helixParticipantManager.getStateMachineEngine().registerStateModelFactory(MasterSlaveSMD.name, new LeadControllerResourceMasterSlaveStateModelFactory(_leadControllerManager)); // Connects to cluster. try { _helixParticipantManager.connect(); } catch (Exception e) { String errorMsg = String.format("Exception when connecting the instance %s as Participant role to Helix.", _helixParticipantInstanceId); LOGGER.error(errorMsg, e); throw new RuntimeException(errorMsg); } LOGGER.info("Registering helix controller listener"); // This registration is not needed when the leadControllerResource is enabled. // However, the resource can be disabled sometime while the cluster is in operation, so we keep it here. Plus, it does not add much overhead. // At some point in future when we stop supporting the disabled resource, we will remove this line altogether and the logic that goes with it. _helixParticipantManager.addControllerListener( (ControllerChangeListener) changeContext -> _leadControllerManager.onHelixControllerChange()); LOGGER.info("Registering resource config listener"); try { _helixParticipantManager.addResourceConfigChangeListener( (resourceConfigList, changeContext) -> _leadControllerManager.onResourceConfigChange()); } catch (Exception e) { throw new RuntimeException( "Error registering resource config listener for " + CommonConstants.Helix.LEAD_CONTROLLER_RESOURCE_NAME, e); } } public ControllerConf.ControllerMode getControllerMode() { return _controllerMode; } @VisibleForTesting protected List<PeriodicTask> setupControllerPeriodicTasks() { LOGGER.info("Setting up periodic tasks"); List<PeriodicTask> periodicTasks = new ArrayList<>(); _taskManager = new PinotTaskManager(_helixTaskResourceManager, _helixResourceManager, _leadControllerManager, _config, _controllerMetrics); periodicTasks.add(_taskManager); _retentionManager = new RetentionManager(_helixResourceManager, _leadControllerManager, _config, _controllerMetrics); periodicTasks.add(_retentionManager); _offlineSegmentIntervalChecker = new OfflineSegmentIntervalChecker(_config, _helixResourceManager, _leadControllerManager, new ValidationMetrics(_metricsRegistry), _controllerMetrics); periodicTasks.add(_offlineSegmentIntervalChecker); _realtimeSegmentValidationManager = new RealtimeSegmentValidationManager(_config, _helixResourceManager, _leadControllerManager, _pinotLLCRealtimeSegmentManager, new ValidationMetrics(_metricsRegistry), _controllerMetrics); periodicTasks.add(_realtimeSegmentValidationManager); _brokerResourceValidationManager = new BrokerResourceValidationManager(_config, _helixResourceManager, _leadControllerManager, _controllerMetrics); periodicTasks.add(_brokerResourceValidationManager); _segmentStatusChecker = new SegmentStatusChecker(_helixResourceManager, _leadControllerManager, _config, _controllerMetrics); periodicTasks.add(_segmentStatusChecker); _segmentRelocator = new SegmentRelocator(_helixResourceManager, _leadControllerManager, _config, _controllerMetrics, _executorService); periodicTasks.add(_segmentRelocator); _minionInstancesCleanupTask = new MinionInstancesCleanupTask(_helixResourceManager, _config, _controllerMetrics); periodicTasks.add(_minionInstancesCleanupTask); return periodicTasks; } @Override public void stop() { switch (_controllerMode) { case DUAL: stopPinotController(); stopHelixController(); break; case PINOT_ONLY: stopPinotController(); break; case HELIX_ONLY: stopHelixController(); break; } LOGGER.info("Deregistering service status handler"); ServiceStatus.removeServiceStatusCallback(_helixParticipantInstanceId); LOGGER.info("Shutdown Controller Metrics Registry"); _metricsRegistry.shutdown(); LOGGER.info("Finish shutting down Pinot controller for {}", _helixParticipantInstanceId); } private void stopHelixController() { LOGGER.info("Disconnecting helix controller zk manager"); _helixControllerManager.disconnect(); } private void stopPinotController() { try { // Stopping periodic tasks has to be done before stopping HelixResourceManager. // Stop controller periodic task. LOGGER.info("Stopping controller periodic tasks"); _periodicTaskScheduler.stop(); LOGGER.info("Stopping lead controller manager"); _leadControllerManager.stop(); // Stop PinotLLCSegmentManager before stopping Jersey API. It is possible that stopping Jersey API // may interrupt the handlers waiting on an I/O. _pinotLLCRealtimeSegmentManager.stop(); LOGGER.info("Closing PinotFS classes"); PinotFSFactory.shutdown(); LOGGER.info("Stopping Jersey admin API"); _adminApp.stop(); if (_realtimeSegmentsManager != null) { LOGGER.info("Stopping realtime segment manager"); _realtimeSegmentsManager.stop(); } LOGGER.info("Stopping resource manager"); _helixResourceManager.stop(); LOGGER.info("Disconnecting helix participant zk manager"); _helixParticipantManager.disconnect(); LOGGER.info("Shutting down executor service"); _executorService.shutdownNow(); _executorService.awaitTermination(10L, TimeUnit.SECONDS); } catch (final Exception e) { LOGGER.error("Caught exception while shutting down", e); } } public PinotMetricsRegistry getMetricsRegistry() { return _metricsRegistry; } @VisibleForTesting public ControllerMetrics getControllerMetrics() { return _controllerMetrics; } public static ControllerStarter startDefault() { return startDefault(null); } public static ControllerStarter startDefault(File webappPath) { final ControllerConf conf = new ControllerConf(); conf.setControllerHost("localhost"); conf.setControllerPort("9000"); conf.setDataDir("/tmp/PinotController"); conf.setZkStr("localhost:2122"); conf.setHelixClusterName("quickstart"); if (webappPath == null) { String path = ControllerStarter.class.getClassLoader().getResource("webapp").getFile(); if (!path.startsWith("file://")) { path = "file://" + path; } conf.setQueryConsolePath(path); } else { conf.setQueryConsolePath("file://" + webappPath.getAbsolutePath()); } conf.setControllerVipHost("localhost"); conf.setControllerVipProtocol(CommonConstants.HTTP_PROTOCOL); conf.setRetentionControllerFrequencyInSeconds(3600 * 6); conf.setOfflineSegmentIntervalCheckerFrequencyInSeconds(3600); conf.setRealtimeSegmentValidationFrequencyInSeconds(3600); conf.setBrokerResourceValidationFrequencyInSeconds(3600); conf.setStatusCheckerFrequencyInSeconds(5 * 60); conf.setSegmentRelocatorFrequencyInSeconds(3600); conf.setStatusCheckerWaitForPushTimeInSeconds(10 * 60); conf.setTenantIsolationEnabled(true); final ControllerStarter starter = new ControllerStarter(conf); starter.start(); return starter; } public static void main(String[] args) { startDefault(); } }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imageformat; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import com.facebook.common.internal.ByteStreams; import com.facebook.common.internal.Closeables; import com.facebook.common.internal.Ints; import com.facebook.common.internal.Preconditions; import com.facebook.common.internal.Throwables; import com.facebook.common.webp.WebpSupportStatus; /** * Detects the format of an encoded image. */ public class ImageFormatChecker { private ImageFormatChecker() {} /** * Tries to match imageHeaderByte and headerSize against every known image format. * If any match succeeds, corresponding ImageFormat is returned. * @param imageHeaderBytes * @param headerSize * @return ImageFormat for given imageHeaderBytes or UNKNOWN if no such type could be recognized */ private static ImageFormat doGetImageFormat( final byte[] imageHeaderBytes, final int headerSize) { Preconditions.checkNotNull(imageHeaderBytes); if (WebpSupportStatus.isWebpHeader(imageHeaderBytes, 0, headerSize)) { return getWebpFormat(imageHeaderBytes, headerSize); } if (isJpegHeader(imageHeaderBytes, headerSize)) { return ImageFormat.JPEG; } if (isPngHeader(imageHeaderBytes, headerSize)) { return ImageFormat.PNG; } if (isGifHeader(imageHeaderBytes, headerSize)) { return ImageFormat.GIF; } if (isBmpHeader(imageHeaderBytes, headerSize)) { return ImageFormat.BMP; } return ImageFormat.UNKNOWN; } /** * Reads up to MAX_HEADER_LENGTH bytes from is InputStream. If mark is supported by is, it is * used to restore content of the stream after appropriate amount of data is read. * Read bytes are stored in imageHeaderBytes, which should be capable of storing * MAX_HEADER_LENGTH bytes. * @param is * @param imageHeaderBytes * @return number of bytes read from is * @throws IOException */ private static int readHeaderFromStream( final InputStream is, final byte[] imageHeaderBytes) throws IOException { Preconditions.checkNotNull(is); Preconditions.checkNotNull(imageHeaderBytes); Preconditions.checkArgument(imageHeaderBytes.length >= MAX_HEADER_LENGTH); // If mark is supported by the stream, use it to let the owner of the stream re-read the same // data. Otherwise, just consume some data. if (is.markSupported()) { try { is.mark(MAX_HEADER_LENGTH); return ByteStreams.read(is, imageHeaderBytes, 0, MAX_HEADER_LENGTH); } finally { is.reset(); } } else { return ByteStreams.read(is, imageHeaderBytes, 0, MAX_HEADER_LENGTH); } } /** * Tries to read up to MAX_HEADER_LENGTH bytes from InputStream is and use read bytes to * determine type of the image contained in is. If provided input stream does not support mark, * then this method consumes data from is and it is not safe to read further bytes from is after * this method returns. Otherwise, if mark is supported, it will be used to preserve oryginal * content of is. * @param is * @return ImageFormat matching content of is InputStream or UNKNOWN if no type is suitable * @throws IOException if exception happens during read */ public static ImageFormat getImageFormat(final InputStream is) throws IOException { Preconditions.checkNotNull(is); final byte[] imageHeaderBytes = new byte[MAX_HEADER_LENGTH]; final int headerSize = readHeaderFromStream(is, imageHeaderBytes); return doGetImageFormat(imageHeaderBytes, headerSize); } /* * A variant of getImageFormat that wraps IOException with RuntimeException. * This relieves clients of implementing dummy rethrow try-catch block. */ public static ImageFormat getImageFormat_WrapIOException(final InputStream is) { try { return getImageFormat(is); } catch (IOException ioe) { throw Throwables.propagate(ioe); } } /** * Reads image header from a file indicated by provided filename and determines * its format. This method does not throw IOException if one occurs. In this case, * ImageFormat.UNKNOWN will be returned. * @param filename * @return ImageFormat for image stored in filename */ public static ImageFormat getImageFormat(String filename) { FileInputStream fileInputStream = null; try { fileInputStream = new FileInputStream(filename); return getImageFormat(fileInputStream); } catch (IOException ioe) { return ImageFormat.UNKNOWN; } finally { Closeables.closeQuietly(fileInputStream); } } /** * Checks if byteArray interpreted as sequence of bytes has a subsequence equal to pattern * starting at position equal to offset. * @param byteArray * @param offset * @param pattern * @return true if match succeeds, false otherwise */ private static boolean matchBytePattern( final byte[] byteArray, final int offset, final byte[] pattern) { Preconditions.checkNotNull(byteArray); Preconditions.checkNotNull(pattern); Preconditions.checkArgument(offset >= 0); if (pattern.length + offset > byteArray.length) { return false; } for (int i = 0; i < pattern.length; ++i) { if (byteArray[i + offset] != pattern[i]) { return false; } } return true; } /** * Helper method that transforms provided string into it's byte representation * using ASCII encoding * @param value * @return byte array representing ascii encoded value */ private static byte[] asciiBytes(String value) { Preconditions.checkNotNull(value); try { return value.getBytes("ASCII"); } catch (UnsupportedEncodingException uee) { // won't happen throw new RuntimeException("ASCII not found!", uee); } } /** * Each WebP header should cosist of at least 20 bytes and start * with "RIFF" bytes followed by some 4 bytes and "WEBP" bytes. * More detailed description if WebP can be found here: * <a href="https://developers.google.com/speed/webp/docs/riff_container"> * https://developers.google.com/speed/webp/docs/riff_container</a> */ private static final int SIMPLE_WEBP_HEADER_LENGTH = 20; /** * Each VP8X WebP image has "features" byte following its ChunkHeader('VP8X') */ private static final int EXTENDED_WEBP_HEADER_LENGTH = 21; /** * Determines type of WebP image. imageHeaderBytes has to be header of a WebP image */ private static ImageFormat getWebpFormat(final byte[] imageHeaderBytes, final int headerSize) { Preconditions.checkArgument(WebpSupportStatus.isWebpHeader(imageHeaderBytes, 0, headerSize)); if (WebpSupportStatus.isSimpleWebpHeader(imageHeaderBytes, 0)) { return ImageFormat.WEBP_SIMPLE; } if (WebpSupportStatus.isLosslessWebpHeader(imageHeaderBytes, 0)) { return ImageFormat.WEBP_LOSSLESS; } if (WebpSupportStatus.isExtendedWebpHeader(imageHeaderBytes, 0, headerSize)) { if (WebpSupportStatus.isAnimatedWebpHeader(imageHeaderBytes, 0)) { return ImageFormat.WEBP_ANIMATED; } if (WebpSupportStatus.isExtendedWebpHeaderWithAlpha(imageHeaderBytes, 0)) { return ImageFormat.WEBP_EXTENDED_WITH_ALPHA; } return ImageFormat.WEBP_EXTENDED; } return ImageFormat.UNKNOWN; } /** * Every JPEG image should start with SOI mark (0xFF, 0xD8) followed by beginning * of another segment (0xFF) */ private static final byte[] JPEG_HEADER = new byte[] {(byte)0xFF, (byte)0xD8, (byte)0xFF}; /** * Checks if imageHeaderBytes starts with SOI (start of image) marker, followed by 0xFF. * If headerSize is lower than 3 false is returned. * Description of jpeg format can be found here: * <a href="http://www.w3.org/Graphics/JPEG/itu-t81.pdf"> * http://www.w3.org/Graphics/JPEG/itu-t81.pdf</a> * Annex B deals with compressed data format * @param imageHeaderBytes * @param headerSize * @return true if imageHeaderBytes starts with SOI_BYTES and headerSize >= 3 */ private static boolean isJpegHeader(final byte[] imageHeaderBytes, final int headerSize) { return headerSize >= JPEG_HEADER.length && matchBytePattern(imageHeaderBytes, 0, JPEG_HEADER); } /** * Every PNG image starts with 8 byte signature consisting of * following bytes */ private static final byte[] PNG_HEADER = new byte[] { (byte) 0x89, 'P', 'N', 'G', (byte) 0x0D, (byte) 0x0A, (byte) 0x1A, (byte) 0x0A}; /** * Checks if array consisting of first headerSize bytes of imageHeaderBytes * starts with png signature. More information on PNG can be found there: * <a href="http://en.wikipedia.org/wiki/Portable_Network_Graphics"> * http://en.wikipedia.org/wiki/Portable_Network_Graphics</a> * @param imageHeaderBytes * @param headerSize * @return true if imageHeaderBytes starts with PNG_HEADER */ private static boolean isPngHeader(final byte[] imageHeaderBytes, final int headerSize) { return headerSize >= PNG_HEADER.length && matchBytePattern(imageHeaderBytes, 0, PNG_HEADER); } /** * Every gif image starts with "GIF" bytes followed by * bytes indicating version of gif standard */ private static final byte[] GIF_HEADER_87A = asciiBytes("GIF87a"); private static final byte[] GIF_HEADER_89A = asciiBytes("GIF89a"); private static final int GIF_HEADER_LENGTH = 6; /** * Checks if first headerSize bytes of imageHeaderBytes constitute a valid header for a gif image. * Details on GIF header can be found <a href="http://www.w3.org/Graphics/GIF/spec-gif89a.txt"> * on page 7</a> * @param imageHeaderBytes * @param headerSize * @return true if imageHeaderBytes is a valid header for a gif image */ private static boolean isGifHeader(final byte[] imageHeaderBytes, final int headerSize) { if (headerSize < GIF_HEADER_LENGTH) { return false; } return matchBytePattern(imageHeaderBytes, 0, GIF_HEADER_87A) || matchBytePattern(imageHeaderBytes, 0, GIF_HEADER_89A); } /** * Every bmp image starts with "BM" bytes */ private static final byte[] BMP_HEADER = asciiBytes("BM"); /** * Checks if first headerSize bytes of imageHeaderBytes constitute a valid header for a bmp image. * Details on BMP header can be found <a href="http://www.onicos.com/staff/iz/formats/bmp.html"> * </a> * @param imageHeaderBytes * @param headerSize * @return true if imageHeaderBytes is a valid header for a bmp image */ private static boolean isBmpHeader(final byte[] imageHeaderBytes, final int headerSize) { if (headerSize < BMP_HEADER.length) { return false; } return matchBytePattern(imageHeaderBytes, 0, BMP_HEADER); } /** * Maximum header size for any image type. * * <p>This determines how much data {@link #getImageFormat(InputStream) * reads from a stream. After changing any of the type detection algorithms, or adding a new one, * this value should be edited. */ private static final int MAX_HEADER_LENGTH = Ints.max( EXTENDED_WEBP_HEADER_LENGTH, SIMPLE_WEBP_HEADER_LENGTH, JPEG_HEADER.length, PNG_HEADER.length, GIF_HEADER_LENGTH, BMP_HEADER.length); }
/** * \file Human.java * * \author G.D Godfrey * * \date 27 Feb '14 * * \brief Unit test class for the Human class. * */ import static org.junit.Assert.*; import java.io.IOException; import org.junit.Test; public class HumanTest { @Test /* Test shows moving piece to position (1,1) at start of game is a valid move. */ public void test1Connect4() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(0); boolean validMove = testPlayer.move(1, 1, PC); assertEquals(true, validMove); } @Test public void test2Connect4() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(0); boolean validMove = testPlayer.move(4, 5, PC); assertEquals(true, validMove); } /** Test shows a move that is not on board is not a valid move */ @Test(expected = ArrayIndexOutOfBoundsException.class) public void test3Connect4() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(0); boolean validMove = testPlayer.move(12, 12, PC); /* Does not need (assertEquals(false, validMove);) since ArrayIndexOutOfBoundsException is expected. */ } /* Test shows a piece can not be placed on another piece*/ @Test public void test4Connect4(){ ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } /** * If a row is full then any move made is an invalid move. */ Piece testRedPiece = new Piece("Red"); Piece testYellowPiece = new Piece("Yellow"); for(int row = 0; row < PC.getGame().getBoard().getBoardHeight(); row++){ if(row % 2 == 0){ PC.getGame().getBoard().setPiece(testRedPiece, 0, row); }else{ PC.getGame().getBoard().setPiece(testYellowPiece, 0, row); } } boolean validMove = PC.getGame().getPlayer(1).move(0, 0, PC); assertEquals(false, validMove); } /* Test a Piece drops at the bottom of the column */ @Test public void test5Connect4(){ ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Piece testRedPiece = new Piece("Red"); Piece testYellowPiece = new Piece("Yellow"); for(int col = 0; col < PC.getGame().getBoard().getBoardHeight(); col++){ if(col % 2 == 0){ PC.getGame().getBoard().setPiece(testRedPiece, 0, col); }else{ PC.getGame().getBoard().setPiece(testYellowPiece, 0, col); } } boolean validMove = PC.getGame().getPlayer(1).move(3, 6, PC); assertEquals(true, validMove); } /** Shows invalid move at start of game. */ @Test public void Test1Othelo() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(0); boolean validMove = testPlayer.move(3, 0, PC); assertEquals(false, validMove); } /** Shows invalid move at start of game. */ @Test public void Test2Othello() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(1); boolean validMove = testPlayer.move(2, 2, PC); assertEquals(false, validMove); } /** Shows valid move at start of game. */ @Test public void Test3Othello() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(1); boolean validMove = testPlayer.move(4, 2, PC); assertEquals(true, validMove); } /** Shows valid move at start of game. */ @Test public void Test4Othello() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(1); boolean validMove = testPlayer.move(2, 4, PC); assertEquals(true, validMove); } /* Test to ensure player cannot place piece outside the board */ @Test(expected = ArrayIndexOutOfBoundsException.class) public void Test5Othello() { ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } Player testPlayer = PC.getGame().getPlayer(1); boolean validMove = testPlayer.move(12, 15, PC); /* Does not need (assertEquals(true, validMove);) since ArrayIndexOutOfBoundsException is expected. */ } /* Test shows invalid move */ @Test public void Test6othello(){ ProgramController PC = new ProgramController(); try { PC.ProgramController(); } catch (IOException e) { e.printStackTrace(); } /** * If a row is full then any move made is an invalid move. */ Piece testBlackPiece = new Piece("Black"); Piece testWhitePiece = new Piece("White"); for(int row = 0; row < PC.getGame().getBoard().getBoardHeight(); row++){ if(row % 2 == 0){ PC.getGame().getBoard().setPiece(testBlackPiece, 0, row); }else{ PC.getGame().getBoard().setPiece(testWhitePiece, 0, row); } } boolean validMove = PC.getGame().getPlayer(1).move(2, 2, PC); assertEquals(false, validMove); } }
/******************************************************************************* * The MIT License (MIT) * Copyright (c) 2015 University of Twente * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. *******************************************************************************/ package hmi.graphics.util; import hmi.animation.ConfigList; import hmi.animation.SkeletonInterpolator; import hmi.graphics.collada.Collada; import hmi.graphics.collada.Scene; import hmi.graphics.collada.scenegraph.ColladaTranslator; import hmi.graphics.scenegraph.GScene; import hmi.graphics.scenegraph.GSkinnedMesh; import hmi.math.Mat4f; import hmi.math.Quat4f; import hmi.xml.XMLTokenizer; import java.io.File; import java.io.PrintWriter; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class for conversion of SkeletonInterpolator animation data, necessary when a Collada skeleton * needs adaptation of bind pose matrices, must be rotated into position, and must be put into HAnim pose * as its neutral position. The constructor reads a Collada file, supposed to include a Collada scene * containing the virtual character, including the skeleton to be used for the conversion data. * Thereafter, SkeletonInterpolators can be converted directly using the adaptSkeletonInterpolator method * @author Job Zwiers */ public class AnimationConverter { private String[] jointSIDs; // used to resolve the GNodes from the skeleton private int[] parentIndex; // parentIndex[i] = index of the parent joint of joint i, or -1 if joint i is a root joint. private float[][] inverseBindMatrices; // inverse bind matrices, one for every joint, each matrix containing 16 floats, in row major order. private float[][] originalInverseBindMatrices; // addition, not essential for main framework, used for adaping skeleton interpolators private static Logger logger = LoggerFactory.getLogger(AnimationConverter.class.getName()); /** * Creates a new AnimationConverter for converting animation data (i.e. SkeletonInterpolator data), * using the bindpose adaptation, the rotatation, and the HAnim conversion data from a Collada * skeleton that is the be found in the Collada file specified as a resource file within * a specified resource dir. (The latter can be null or empty,) */ public AnimationConverter(String resourceDir, String resourceFileName) { readSkeletonAdaptationData(resourceDir, resourceFileName); } /* * Extracts the original as well as current bind matrices, jointSIDs, and parentIndex from * the (first) skeleton found in a Collada scene, within the specified resource file, within * the specified resource directory. (The latter can be null or empty) * mimics SceneIO readGScene, to some extent. */ private void readSkeletonAdaptationData(String resourceDir, String resourceFileName) { String resDir = (resourceDir == null || resourceDir.equals("")) ? "" : resourceDir.replace('\\', '/') + "/"; String file = resDir + resourceFileName; try { if (file.endsWith(".dae") || file.endsWith(".DAE")) { Collada col = Collada.forResource(file); if (col == null) throw new RuntimeException("AnimationConverter: null Collada input"); getSkeletonAdaptationData(col); } else { logger.error("AnimationConverter: scene file must be a Collada dae file"); } } catch (Exception ioe) { throw new RuntimeException(ioe.getMessage()); } } /* * Extracts the original as well as current bind matrices, jointSIDs, and parentIndex from * the (first) skeleton found in the Collada scene. * This methods mimics the processing from ColladaTranslator methods like * colladaToGSkinnedMeshScene and SceneIO meyods lile readGScene. */ private void getSkeletonAdaptationData(Collada collada) { Scene scene = collada.getScene(); if (scene == null) { throw new RuntimeException("AnimationConverter: Collada document without scene."); } GScene gscene = ColladaTranslator.colladaSceneToGScene(collada, scene); // gscene.normalizeMeshes(); // ensure triangle meshes with unified indices: not needed here for animation conversion gscene.collectSkinnedMeshes(); gscene.resolveSkinnedMeshJoints(); Map<String, String> renaming = new HashMap<String, String>(); String upAxis = collada.getAsset().getUpAxis(); if (upAxis.equals("X_UP")) { gscene.rotate(0f, 0f, 1f, (float) (Math.PI / 2.0)); // from X-up to Y-up } else if (upAxis.equals("Y_UP") || upAxis.equals("")) { // ok, do nothing } else if (upAxis.equals("Z_UP")) { gscene.rotate(1f, 0f, 0f, (float) (-Math.PI / 2.0)); // from Z-up to Y-up } else { // unknown up axis. logger.error("ColladaTranslator: Collada Asset with unknown or incorrect UP axis: " + upAxis); } float scale = collada.getAsset().getUnitMeter(); if (scale != 1.0f) gscene.scale(scale); GSkinnedMesh gmesh = gscene.getSkinnedMeshes().get(0); inverseBindMatrices = gmesh.getInvBindMatrices(); // before adjustBindPose/HAnim , so original originalInverseBindMatrices = new float[inverseBindMatrices.length][Mat4f.MAT4F_SIZE]; for (int i = 0; i < inverseBindMatrices.length; i++) { Mat4f.set(originalInverseBindMatrices[i], inverseBindMatrices[i]); } gmesh.simplifyBindPose(); String renamingList = collada.getRenamingList(); if (renamingList != null) { renaming = ColladaTranslator.getColladaRenaming(renamingList, gscene.getRootNodes()); gscene.renameJoints(renaming); } gscene.setSkeletonHAnimPoses(); this.jointSIDs = gmesh.getJointSIDs(); this.parentIndex = gmesh.getParentIndex(); this.inverseBindMatrices = gmesh.getInvBindMatrices(); } /** * Adapts the poses from the specified SkeletonInterpolator, using the adaptation data taken from * the (first) keleton within the scene specified as argument for the constructor. */ public SkeletonInterpolator adaptSkeletonInterpolator(SkeletonInterpolator skel) { ConfigList configs = skel.getConfigList(); int configSize = configs.getConfigSize(); String configType = skel.getConfigType(); String[] partIds = skel.getPartIds(); int nrOfParts = partIds.length; float[][] origBindParentMat = new float[nrOfParts][Mat4f.MAT4F_SIZE]; // original bind matrix, from Collada file float[][] origInvBindMat = new float[nrOfParts][]; // original inverse bind matrix, from Collada file float[][] origBindParentQuat = new float[nrOfParts][Quat4f.QUAT4F_SIZE]; float[][] origInvBindQuat = new float[nrOfParts][Quat4f.QUAT4F_SIZE]; float[] Aquat = Quat4f.getQuat4fFromAxisAngleDegrees(1.0f, 0.0f, 0.0f, -90.0f); float[] invAquat = Quat4f.getQuat4fFromAxisAngleDegrees(1.0f, 0.0f, 0.0f, 90.0f); float[][] bindMat = new float[nrOfParts][Mat4f.MAT4F_SIZE]; // "current" bind matrix, after adjustBindPoses/setHAnim etc float[][] invBindParentMat = new float[nrOfParts][Mat4f.MAT4F_SIZE]; // "current" inverse bind mat from parent float[][] bindQuat = new float[nrOfParts][Quat4f.QUAT4F_SIZE]; float[][] invBindParentQuat = new float[nrOfParts][Quat4f.QUAT4F_SIZE]; for (int pi = 0; pi < nrOfParts; pi++) { String pid = partIds[pi]; // search for this part in the skeleton joints for (int ji = 0; ji < jointSIDs.length; ji++) { if (jointSIDs[ji].equals(pid)) { origInvBindMat[pi] = originalInverseBindMatrices[ji]; Quat4f.setFromMat4f(origInvBindQuat[pi], origInvBindMat[pi]); int parIndex = parentIndex[ji]; float[] origInvBindParent = null; if (parIndex >= 0) { origInvBindParent = originalInverseBindMatrices[parIndex]; invBindParentMat[pi] = inverseBindMatrices[parIndex]; } else { origInvBindParent = Mat4f.getIdentity(); invBindParentMat[pi] = Mat4f.getIdentity(); } Mat4f.invertAffine(origBindParentMat[pi], origInvBindParent); Quat4f.setFromMat4f(origBindParentQuat[pi], origBindParentMat[pi]); Quat4f.setFromMat4f(invBindParentQuat[pi], invBindParentMat[pi]); Mat4f.invertAffine(bindMat[pi], inverseBindMatrices[ji]); Quat4f.setFromMat4f(bindQuat[pi], bindMat[pi]); } } } int nrOfJoints = configSize / 4; if (nrOfJoints != nrOfParts) { logger.error("GLSkinnedMesh.adaptSkeletonInterpolator: nrOfParts differs from nrOfJoints"); } int offset = configType.equals("T1R") ? 3 : 0; // assume type is R, or T1R 3 if type is T1R int stride = 4; float[] q = Quat4f.getQuat4f(); float[] newq = Quat4f.getQuat4f(); ConfigList adapted = new ConfigList(configSize); for (int ci = 0; ci < configs.size(); ci++) { float[] cfg = configs.getConfig(ci); float[] newcfg = new float[configSize]; for (int pi = 0; pi < nrOfJoints; pi++) { q[Quat4f.S] = cfg[offset + stride * pi]; q[Quat4f.X] = cfg[offset + stride * pi + 1]; q[Quat4f.Y] = cfg[offset + stride * pi + 2]; q[Quat4f.Z] = cfg[offset + stride * pi + 3]; Quat4f.mul(newq, Aquat, q); Quat4f.mul(newq, newq, invAquat); Quat4f.mul(newq, origBindParentQuat[pi], newq); Quat4f.mul(newq, newq, origInvBindQuat[pi]); Quat4f.mul(newq, invBindParentQuat[pi], newq); Quat4f.mul(newq, newq, bindQuat[pi]); newcfg[offset + stride * pi] = newq[Quat4f.S]; newcfg[offset + stride * pi + 1] = newq[Quat4f.X]; newcfg[offset + stride * pi + 2] = newq[Quat4f.Y]; newcfg[offset + stride * pi + 3] = newq[Quat4f.Z]; } adapted.addConfig(configs.getTime(ci), newcfg); } SkeletonInterpolator result = new SkeletonInterpolator(skel.getPartIds(), adapted, skel.getConfigType()); return result; } /** * Converts a SkeletonInterpolator, read from file. */ public void convertSkeletonInterpolator(String resourceDir, String skeletonInterpolatorFileName, String convertedInterpolatorFileName) { String resDir = (resourceDir == null || resourceDir.equals("")) ? "" : resourceDir.replace('\\', '/') + "/"; String infileName = resDir + skeletonInterpolatorFileName; String outfile = resDir + convertedInterpolatorFileName; try { File inFile = new File(infileName); XMLTokenizer tk = new XMLTokenizer(inFile); SkeletonInterpolator ski = new SkeletonInterpolator(tk); // tk.closeReader(); SkeletonInterpolator adaptedSki = adaptSkeletonInterpolator(ski); PrintWriter out = new PrintWriter(outfile); int tab = 3; adaptedSki.writeXML(out, tab); out.close(); } catch (Exception e) { logger.error("AnimationConvertor: " + e); } } public static void main(String[] arg) { String humanoidResources = "Humanoids/armandia/dae"; String colladafile = "armandia-toplevel.dae"; String infile = ""; String outfile = ""; switch (arg.length) { case 1: infile = arg[0]; outfile = "converted-" + infile; break; case 2: infile = arg[0]; outfile = arg[1]; break; default: System.out.println("provide conversion arguments: <SkeletonInterpolator file> [<output file>] "); System.exit(0); } logger.info("AnimationConvertor for " + colladafile + ", converting " + infile + " to " + outfile); AnimationConverter converter = new AnimationConverter(humanoidResources, colladafile); converter.convertSkeletonInterpolator(null, infile, outfile); System.out.println("Conversion finished"); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.xmloutput; import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import org.apache.commons.vfs2.FileObject; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.steps.xmloutput.XMLField.ContentType; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This class knows how to handle the MetaData for the XML output step * * @since 14-jan-2006 * */ @Step( id = "XMLOutput", image = "XOU.svg", i18nPackageName = "org.pentaho.di.trans.steps.xmloutput", name = "XMLOutput.name", description = "XMLOutput.description", categoryDescription = "XMLOutput.category", documentationUrl = "http://wiki.pentaho.com/display/EAI/XML+Output" ) @InjectionSupported( localizationPrefix = "XMLOutput.Injection.", groups = "OUTPUT_FIELDS" ) public class XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = XMLOutputMeta.class; // for i18n purposes, needed by Translator2!! /** The base name of the output file */ @Injection( name = "FILENAME" ) private String fileName; /** The file extention in case of a generated filename */ @Injection( name = "EXTENSION" ) private String extension; /** Whether to push the output into the output of a servlet with the executeTrans Carte/DI-Server servlet */ @Injection( name = "PASS_TO_SERVLET" ) private boolean servletOutput; /** * if this value is larger then 0, the text file is split up into parts of this number of lines */ @Injection( name = "SPLIT_EVERY" ) private int splitEvery; /** Flag: add the stepnr in the filename */ @Injection( name = "INC_STEPNR_IN_FILENAME" ) private boolean stepNrInFilename; /** Flag: add the date in the filename */ @Injection( name = "INC_DATE_IN_FILENAME" ) private boolean dateInFilename; /** Flag: add the time in the filename */ @Injection( name = "INC_TIME_IN_FILENAME" ) private boolean timeInFilename; /** Flag: put the destination file in a zip archive */ @Injection( name = "ZIPPED" ) private boolean zipped; /** * The encoding to use for reading: null or empty string means system default encoding */ @Injection( name = "ENCODING" ) private String encoding; /** * The name space for the XML document: null or empty string means no xmlns is written */ @Injection( name = "NAMESPACE" ) private String nameSpace; /** The name of the parent XML element */ @Injection( name = "MAIN_ELEMENT" ) private String mainElement; /** The name of the repeating row XML element */ @Injection( name = "REPEAT_ELEMENT" ) private String repeatElement; /** Flag: add the filenames to result filenames */ @Injection( name = "ADD_TO_RESULT" ) private boolean addToResultFilenames; /* THE FIELD SPECIFICATIONS ... */ /** The output fields */ @InjectionDeep private XMLField[] outputFields; /** Flag : Do not open new file when transformation start */ @Injection( name = "DO_NOT_CREATE_FILE_AT_STARTUP" ) private boolean doNotOpenNewFileInit; /** Omit null elements from xml output */ @Injection( name = "OMIT_NULL_VALUES" ) private boolean omitNullValues; @Injection( name = "SPEFICY_FORMAT" ) private boolean SpecifyFormat; @Injection( name = "DATE_FORMAT" ) private String date_time_format; public XMLOutputMeta() { super(); // allocate BaseStepMeta } /** * @return Returns the dateInFilename. */ public boolean isDateInFilename() { return dateInFilename; } /** * @param dateInFilename * The dateInFilename to set. */ public void setDateInFilename( boolean dateInFilename ) { this.dateInFilename = dateInFilename; } /** * @return Returns the extension. */ public String getExtension() { return extension; } /** * @param extension * The extension to set. */ public void setExtension( String extension ) { this.extension = extension; } /** * @return Returns the "do not open new file at init" flag. */ public boolean isDoNotOpenNewFileInit() { return doNotOpenNewFileInit; } /** * @param doNotOpenNewFileInit * The "do not open new file at init" flag to set. */ public void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ) { this.doNotOpenNewFileInit = doNotOpenNewFileInit; } /** * @return Returns the fileName. */ public String getFileName() { return fileName; } /** * @param fileName * The fileName to set. */ public void setFileName( String fileName ) { this.fileName = fileName; } /** * @return Returns the splitEvery. */ public int getSplitEvery() { return splitEvery; } /** * @param splitEvery * The splitEvery to set. */ public void setSplitEvery( int splitEvery ) { this.splitEvery = splitEvery; } /** * @return Returns the stepNrInFilename. */ public boolean isStepNrInFilename() { return stepNrInFilename; } /** * @param stepNrInFilename * The stepNrInFilename to set. */ public void setStepNrInFilename( boolean stepNrInFilename ) { this.stepNrInFilename = stepNrInFilename; } /** * @return Returns the timeInFilename. */ public boolean isTimeInFilename() { return timeInFilename; } /** * @param timeInFilename * The timeInFilename to set. */ public void setTimeInFilename( boolean timeInFilename ) { this.timeInFilename = timeInFilename; } /** * @return Returns the add to result filesname. */ public boolean isAddToResultFiles() { return addToResultFilenames; } /** * @param addtoresultfilenamesin * The addtoresultfilenames to set. */ public void setAddToResultFiles( boolean addtoresultfilenamesin ) { this.addToResultFilenames = addtoresultfilenamesin; } public boolean isSpecifyFormat() { return SpecifyFormat; } public void setSpecifyFormat( boolean SpecifyFormat ) { this.SpecifyFormat = SpecifyFormat; } public String getDateTimeFormat() { return date_time_format; } public void setDateTimeFormat( String date_time_format ) { this.date_time_format = date_time_format; } /** * @return Returns the zipped. */ public boolean isZipped() { return zipped; } /** * @param zipped * The zipped to set. */ public void setZipped( boolean zipped ) { this.zipped = zipped; } /** * @return Returns the outputFields. */ public XMLField[] getOutputFields() { return outputFields; } /** * @param outputFields * The outputFields to set. */ public void setOutputFields( XMLField[] outputFields ) { this.outputFields = outputFields; } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } public void allocate( int nrfields ) { outputFields = new XMLField[nrfields]; } public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; } private void readData( Node stepnode ) throws KettleXMLException { try { setEncoding( XMLHandler.getTagValue( stepnode, "encoding" ) ); setNameSpace( XMLHandler.getTagValue( stepnode, "name_space" ) ); setMainElement( XMLHandler.getTagValue( stepnode, "xml_main_element" ) ); setRepeatElement( XMLHandler.getTagValue( stepnode, "xml_repeat_element" ) ); setFileName( XMLHandler.getTagValue( stepnode, "file", "name" ) ); setExtension( XMLHandler.getTagValue( stepnode, "file", "extention" ) ); setServletOutput( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "servlet_output" ) ) ); setDoNotOpenNewFileInit( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "do_not_open_newfile_init" ) ) ); setStepNrInFilename( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "split" ) ) ); setDateInFilename( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_date" ) ) ); setTimeInFilename( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_time" ) ) ); setSpecifyFormat( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "SpecifyFormat" ) ) ); setOmitNullValues( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "omit_null_values" ) ) ); setDateTimeFormat( XMLHandler.getTagValue( stepnode, "file", "date_time_format" ) ); setAddToResultFiles( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_to_result_filenames" ) ) ); setZipped( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "zipped" ) ) ); setSplitEvery( Const.toInt( XMLHandler.getTagValue( stepnode, "file", "splitevery" ), 0 ) ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); outputFields[i] = new XMLField(); String contentTypeString = Const.NVL( XMLHandler.getTagValue( fnode, "content_type" ), ContentType.Element.name() ); outputFields[i].setContentType( ContentType.valueOf( contentTypeString ) ); String fieldName = XMLHandler.getTagValue( fnode, "name" ); outputFields[i].setFieldName( fieldName ); String elementName = XMLHandler.getTagValue( fnode, "element" ); outputFields[i].setElementName( elementName == null ? "" : elementName ); outputFields[i].setType( XMLHandler.getTagValue( fnode, "type" ) ); outputFields[i].setFormat( XMLHandler.getTagValue( fnode, "format" ) ); outputFields[i].setCurrencySymbol( XMLHandler.getTagValue( fnode, "currency" ) ); outputFields[i].setDecimalSymbol( XMLHandler.getTagValue( fnode, "decimal" ) ); outputFields[i].setGroupingSymbol( XMLHandler.getTagValue( fnode, "group" ) ); outputFields[i].setNullString( XMLHandler.getTagValue( fnode, "nullif" ) ); outputFields[i].setLength( Const.toInt( XMLHandler.getTagValue( fnode, "length" ), -1 ) ); outputFields[i].setPrecision( Const.toInt( XMLHandler.getTagValue( fnode, "precision" ), -1 ) ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; } public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); } public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; } public String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ) { SimpleDateFormat daf = new SimpleDateFormat(); DecimalFormat df = new DecimalFormat( "00000" ); // Replace possible environment variables... String retval = space.environmentSubstitute( fileName ); String realextension = space.environmentSubstitute( extension ); Date now = new Date(); if ( SpecifyFormat && !Utils.isEmpty( date_time_format ) ) { daf.applyPattern( date_time_format ); String dt = daf.format( now ); retval += dt; } else { if ( dateInFilename ) { daf.applyPattern( "yyyyMMdd" ); String d = daf.format( now ); retval += "_" + d; } if ( timeInFilename ) { daf.applyPattern( "HHmmss" ); String t = daf.format( now ); retval += "_" + t; } } if ( stepNrInFilename ) { retval += "_" + stepnr; } if ( splitEvery > 0 ) { retval += "_" + df.format( splitnr + 1 ); } if ( zipped ) { if ( ziparchive ) { retval += ".zip"; } else { if ( realextension != null && realextension.length() != 0 ) { retval += "." + realextension; } } } else { if ( realextension != null && realextension.length() != 0 ) { retval += "." + realextension; } } return retval; } public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { // No values are added to the row in this type of step // However, in case of Fixed length records, // the field precisions and lengths are altered! for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } } public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; } public String getXML() { StringBuffer retval = new StringBuffer( 600 ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name_space", nameSpace ) ); retval.append( " " ).append( XMLHandler.addTagValue( "xml_main_element", mainElement ) ); retval.append( " " ).append( XMLHandler.addTagValue( "xml_repeat_element", repeatElement ) ); retval.append( " <file>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extention", extension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "servlet_output", servletOutput ) ); retval.append( " " ).append( XMLHandler.addTagValue( "do_not_open_newfile_init", doNotOpenNewFileInit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "split", stepNrInFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_date", dateInFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_time", timeInFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyFormat", SpecifyFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "omit_null_values", omitNullValues ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_time_format", date_time_format ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_to_result_filenames", addToResultFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "zipped", zipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "splitevery", splitEvery ) ); retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; if ( field.getFieldName() != null && field.getFieldName().length() != 0 ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "content_type", field.getContentType().name() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "element", field.getElementName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field.getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field.getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field.getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field.getGroupingSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nullif", field.getNullString() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field.getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field.getPrecision() ) ); retval.append( " </field>" ).append( Const.CR ); } } retval.append( " </fields>" ).append( Const.CR ); return retval.toString(); } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "encoding", encoding ); rep.saveStepAttribute( id_transformation, id_step, "name_space", nameSpace ); rep.saveStepAttribute( id_transformation, id_step, "xml_main_element", mainElement ); rep.saveStepAttribute( id_transformation, id_step, "xml_repeat_element", repeatElement ); rep.saveStepAttribute( id_transformation, id_step, "file_name", fileName ); rep.saveStepAttribute( id_transformation, id_step, "file_extention", extension ); rep.saveStepAttribute( id_transformation, id_step, "file_servlet_output", servletOutput ); rep.saveStepAttribute( id_transformation, id_step, "do_not_open_newfile_init", doNotOpenNewFileInit ); rep.saveStepAttribute( id_transformation, id_step, "file_split", splitEvery ); rep.saveStepAttribute( id_transformation, id_step, "file_add_stepnr", stepNrInFilename ); rep.saveStepAttribute( id_transformation, id_step, "file_add_date", dateInFilename ); rep.saveStepAttribute( id_transformation, id_step, "file_add_time", timeInFilename ); rep.saveStepAttribute( id_transformation, id_step, "SpecifyFormat", SpecifyFormat ); rep.saveStepAttribute( id_transformation, id_step, "omit_null_values", omitNullValues ); rep.saveStepAttribute( id_transformation, id_step, "date_time_format", date_time_format ); rep.saveStepAttribute( id_transformation, id_step, "add_to_result_filenames", addToResultFilenames ); rep.saveStepAttribute( id_transformation, id_step, "file_zipped", zipped ); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; rep.saveStepAttribute( id_transformation, id_step, i, "field_content_type", field.getContentType().name() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_name", field.getFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_element", field.getElementName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_type", field.getTypeDesc() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_format", field.getFormat() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_currency", field.getCurrencySymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_decimal", field.getDecimalSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_group", field.getGroupingSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_nullif", field.getNullString() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_length", field.getLength() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", field.getPrecision() ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; // Check output fields if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; // Starting from selected fields in ... for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ) { return new XMLOutput( stepMeta, stepDataInterface, cnr, transMeta, trans ); } public StepDataInterface getStepData() { return new XMLOutputData(); } public String getEncoding() { return encoding; } public void setEncoding( String encoding ) { this.encoding = encoding; } /** * @return Returns the mainElement. */ public String getMainElement() { return mainElement; } /** * @param mainElement * The mainElement to set. */ public void setMainElement( String mainElement ) { this.mainElement = mainElement; } /** * @return Returns the repeatElement. */ public String getRepeatElement() { return repeatElement; } /** * @param repeatElement * The repeatElement to set. */ public void setRepeatElement( String repeatElement ) { this.repeatElement = repeatElement; } /** * @return Returns the nameSpace. */ public String getNameSpace() { return nameSpace; } /** * @param nameSpace * The nameSpace to set. */ public void setNameSpace( String nameSpace ) { this.nameSpace = nameSpace; } public void setOmitNullValues( boolean omitNullValues ) { this.omitNullValues = omitNullValues; } public boolean isOmitNullValues() { return omitNullValues; } public boolean isServletOutput() { return servletOutput; } public void setServletOutput( boolean servletOutput ) { this.servletOutput = servletOutput; } /** * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So * what this does is turn the name of the base path into an absolute path. * * @param space * the variable space to use * @param definitions * @param resourceNamingInterface * @param repository * The repository to optionally load other resources from (to be converted to XML) * @param metaStore * the metaStore in which non-kettle metadata could reside. * * @return the filename of the exported resource */ public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { // The object that we're modifying here is a copy of the original! // So let's change the filename from relative to absolute by grabbing the file object... // if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } }
/* * * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.web.tasks.leader; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.AtomicDouble; import com.netflix.genie.common.dto.UserResourcesSummary; import com.netflix.genie.common.external.dtos.v4.JobStatus; import com.netflix.genie.web.data.services.DataServices; import com.netflix.genie.web.data.services.PersistenceService; import com.netflix.genie.web.properties.UserMetricsProperties; import com.netflix.genie.web.tasks.GenieTaskScheduleType; import com.netflix.genie.web.util.MetricsConstants; import io.micrometer.core.instrument.Gauge; import io.micrometer.core.instrument.MeterRegistry; import lombok.extern.slf4j.Slf4j; import java.util.Map; import java.util.Set; /** * A task which publishes user metrics. * * @author mprimi * @since 4.0.0 */ @Slf4j public class UserMetricsTask extends LeaderTask { private static final String USER_ACTIVE_JOBS_METRIC_NAME = "genie.user.active-jobs.gauge"; private static final String USER_ACTIVE_MEMORY_METRIC_NAME = "genie.user.active-memory.gauge"; private static final String USER_ACTIVE_USERS_METRIC_NAME = "genie.user.active-users.gauge"; private static final UserResourcesRecord USER_RECORD_PLACEHOLDER = new UserResourcesRecord("nobody"); private final MeterRegistry registry; private final PersistenceService persistenceService; private final UserMetricsProperties userMetricsProperties; private final Map<String, UserResourcesRecord> userResourcesRecordMap = Maps.newHashMap(); private final AtomicDouble activeUsersCount; /** * Constructor. * * @param registry the metrics registry * @param dataServices The {@link DataServices} instance to use * @param userMetricsProperties the properties that configure this task */ public UserMetricsTask( final MeterRegistry registry, final DataServices dataServices, final UserMetricsProperties userMetricsProperties ) { this.registry = registry; this.persistenceService = dataServices.getPersistenceService(); this.userMetricsProperties = userMetricsProperties; this.activeUsersCount = new AtomicDouble(Double.NaN); // Register gauge for count of distinct users with active jobs. Gauge.builder(USER_ACTIVE_USERS_METRIC_NAME, this::getUsersCount) .register(registry); } /** * {@inheritDoc} */ @Override public GenieTaskScheduleType getScheduleType() { return GenieTaskScheduleType.FIXED_RATE; } /** * {@inheritDoc} */ @Override public long getFixedRate() { return this.userMetricsProperties.getRefreshInterval(); } /** * {@inheritDoc} */ @Override public void run() { log.debug("Publishing user metrics"); // For now just report the API jobs as they're using resources on Genie web nodes // Get us unblocked for now on agent migration but in future we may want to change this to further dice or // combine reports by CLI vs. API final Map<String, UserResourcesSummary> summaries = this.persistenceService.getUserResourcesSummaries( JobStatus.getActiveStatuses(), true ); // Update number of active users log.debug("Number of users with active jobs: {}", summaries.size()); this.activeUsersCount.set(summaries.size()); // Track users who previously had jobs but no longer do final Set<String> usersToReset = Sets.newHashSet(this.userResourcesRecordMap.keySet()); usersToReset.removeAll(summaries.keySet()); for (final String user : usersToReset) { // Remove user. If gauge is polled, it'll return NaN this.userResourcesRecordMap.remove(user); } // Update existing user metrics for (final UserResourcesSummary userResourcesSummary : summaries.values()) { final String user = userResourcesSummary.getUser(); final long jobs = userResourcesSummary.getRunningJobsCount(); final long memory = userResourcesSummary.getUsedMemory(); log.debug("User {}: {} jobs running, using {}MB", user, jobs, memory); this.userResourcesRecordMap.computeIfAbsent( userResourcesSummary.getUser(), userName -> { // Register gauges this user user. // Gauge creation is idempotent so it doesn't matter if the user is new or seen before. // Registry holds a reference to the gauge so no need to save it. Gauge.builder( USER_ACTIVE_JOBS_METRIC_NAME, () -> this.getUserJobCount(userName) ) .tags(MetricsConstants.TagKeys.USER, userName) .register(registry); Gauge.builder( USER_ACTIVE_MEMORY_METRIC_NAME, () -> this.getUserMemoryAmount(userName) ) .tags(MetricsConstants.TagKeys.USER, userName) .register(registry); return new UserResourcesRecord(userName); } ).update(jobs, memory); } log.debug("Done publishing user metrics"); } /** * {@inheritDoc} */ @Override public void cleanup() { log.debug("Cleaning up user metrics publishing"); // Reset all users this.userResourcesRecordMap.clear(); // Reset active users count this.activeUsersCount.set(Double.NaN); } private Number getUserJobCount(final String userName) { final UserResourcesRecord record = this.userResourcesRecordMap.getOrDefault(userName, USER_RECORD_PLACEHOLDER); final double jobCount = record.jobCount.get(); log.debug("Current jobs count for user '{}' is {}", userName, (long) jobCount); return jobCount; } private Number getUserMemoryAmount(final String userName) { final UserResourcesRecord record = this.userResourcesRecordMap.getOrDefault(userName, USER_RECORD_PLACEHOLDER); final double memoryAmount = record.memoryAmount.get(); log.debug("Current memory amount for user '{}' is {}MB", userName, (long) memoryAmount); return memoryAmount; } private Number getUsersCount() { return activeUsersCount.get(); } private static class UserResourcesRecord { private final String userName; private final AtomicDouble jobCount = new AtomicDouble(Double.NaN); private final AtomicDouble memoryAmount = new AtomicDouble(Double.NaN); UserResourcesRecord( final String userName ) { this.userName = userName; } void update(final long runningJobsCount, final long usedMemory) { log.debug( "Updating usage of user '{}': {} jobs totalling {}MB", this.userName, runningJobsCount, usedMemory ); this.jobCount.set(runningJobsCount); this.memoryAmount.set(usedMemory); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.functions; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import org.apache.jmeter.engine.util.CompoundVariable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.samplers.Sampler; import org.apache.jmeter.threads.JMeterVariables; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; import org.apache.oro.text.MalformedCachePatternException; import org.apache.oro.text.regex.MatchResult; import org.apache.oro.text.regex.Pattern; import org.apache.oro.text.regex.PatternMatcher; import org.apache.oro.text.regex.PatternMatcherInput; import org.apache.oro.text.regex.Perl5Compiler; import org.apache.oro.text.regex.Util; /** * Implements regular expression parsing of sample results and variables * @since 1.X */ // @see TestRegexFunction for unit tests public class RegexFunction extends AbstractFunction { private static final Logger log = LoggingManager.getLoggerForClass(); public static final String ALL = "ALL"; //$NON-NLS-1$ public static final String RAND = "RAND"; //$NON-NLS-1$ public static final String KEY = "__regexFunction"; //$NON-NLS-1$ private Object[] values;// Parameters are stored here private static final List<String> desc = new LinkedList<>(); private static final String TEMPLATE_PATTERN = "\\$(\\d+)\\$"; //$NON-NLS-1$ /** initialised to the regex \$(\d+)\$ */ private final Pattern templatePattern; // Number of parameters expected - used to reject invalid calls private static final int MIN_PARAMETER_COUNT = 2; private static final int MAX_PARAMETER_COUNT = 7; static { desc.add(JMeterUtils.getResString("regexfunc_param_1"));// regex //$NON-NLS-1$ desc.add(JMeterUtils.getResString("regexfunc_param_2"));// template //$NON-NLS-1$ desc.add(JMeterUtils.getResString("regexfunc_param_3"));// which match //$NON-NLS-1$ desc.add(JMeterUtils.getResString("regexfunc_param_4"));// between text //$NON-NLS-1$ desc.add(JMeterUtils.getResString("regexfunc_param_5"));// default text //$NON-NLS-1$ desc.add(JMeterUtils.getResString("function_name_paropt")); // output variable name //$NON-NLS-1$ desc.add(JMeterUtils.getResString("regexfunc_param_7"));// input variable //$NON-NLS-1$ } public RegexFunction() { templatePattern = JMeterUtils.getPatternCache().getPattern(TEMPLATE_PATTERN, Perl5Compiler.READ_ONLY_MASK); } /** {@inheritDoc} */ @Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { String valueIndex = ""; //$NON-NLS-1$ String defaultValue = ""; //$NON-NLS-1$ String between = ""; //$NON-NLS-1$ String name = ""; //$NON-NLS-1$ String inputVariable = ""; //$NON-NLS-1$ Pattern searchPattern; Object[] tmplt; try { searchPattern = JMeterUtils.getPatternCache().getPattern(((CompoundVariable) values[0]).execute(), Perl5Compiler.READ_ONLY_MASK); tmplt = generateTemplate(((CompoundVariable) values[1]).execute()); if (values.length > 2) { valueIndex = ((CompoundVariable) values[2]).execute(); } if (valueIndex.length() == 0) { valueIndex = "1"; //$NON-NLS-1$ } if (values.length > 3) { between = ((CompoundVariable) values[3]).execute(); } if (values.length > 4) { String dv = ((CompoundVariable) values[4]).execute(); if (dv.length() != 0) { defaultValue = dv; } } if (values.length > 5) { name = ((CompoundVariable) values[5]).execute(); } if (values.length > 6) { inputVariable = ((CompoundVariable) values[6]).execute(); } } catch (MalformedCachePatternException e) { log.error("Malformed cache pattern:"+values[0], e); throw new InvalidVariableException("Malformed cache pattern:"+values[0], e); } // Relatively expensive operation, so do it once JMeterVariables vars = getVariables(); if (vars == null){// Can happen if called during test closedown return defaultValue; } if (name.length() > 0) { vars.put(name, defaultValue); } String textToMatch=null; if (inputVariable.length() > 0){ textToMatch=vars.get(inputVariable); } else if (previousResult != null){ textToMatch = previousResult.getResponseDataAsString(); } if (textToMatch == null || textToMatch.length() == 0) { return defaultValue; } List<MatchResult> collectAllMatches = new ArrayList<>(); try { PatternMatcher matcher = JMeterUtils.getMatcher(); PatternMatcherInput input = new PatternMatcherInput(textToMatch); while (matcher.contains(input, searchPattern)) { MatchResult match = matcher.getMatch(); collectAllMatches.add(match); } } finally { if (name.length() > 0){ vars.put(name + "_matchNr", Integer.toString(collectAllMatches.size())); //$NON-NLS-1$ } } if (collectAllMatches.size() == 0) { return defaultValue; } if (valueIndex.equals(ALL)) { StringBuilder value = new StringBuilder(); Iterator<MatchResult> it = collectAllMatches.iterator(); boolean first = true; while (it.hasNext()) { if (!first) { value.append(between); } else { first = false; } value.append(generateResult(it.next(), name, tmplt, vars)); } return value.toString(); } else if (valueIndex.equals(RAND)) { MatchResult result = collectAllMatches.get(ThreadLocalRandom.current().nextInt(collectAllMatches.size())); return generateResult(result, name, tmplt, vars); } else { try { int index = Integer.parseInt(valueIndex) - 1; MatchResult result = collectAllMatches.get(index); return generateResult(result, name, tmplt, vars); } catch (NumberFormatException e) { float ratio = Float.parseFloat(valueIndex); MatchResult result = collectAllMatches .get((int) (collectAllMatches.size() * ratio + .5) - 1); return generateResult(result, name, tmplt, vars); } catch (IndexOutOfBoundsException e) { return defaultValue; } } } private void saveGroups(MatchResult result, String namep, JMeterVariables vars) { if (result != null) { for (int x = 0; x < result.groups(); x++) { vars.put(namep + "_g" + x, result.group(x)); //$NON-NLS-1$ } } } /** {@inheritDoc} */ @Override public List<String> getArgumentDesc() { return desc; } private String generateResult(MatchResult match, String namep, Object[] template, JMeterVariables vars) { saveGroups(match, namep, vars); StringBuilder result = new StringBuilder(); for (Object t : template) { if (t instanceof String) { result.append(t); } else { result.append(match.group(((Integer) t).intValue())); } } if (namep.length() > 0){ vars.put(namep, result.toString()); } return result.toString(); } /** {@inheritDoc} */ @Override public String getReferenceKey() { return KEY; } /** {@inheritDoc} */ @Override public void setParameters(Collection<CompoundVariable> parameters) throws InvalidVariableException { checkParameterCount(parameters, MIN_PARAMETER_COUNT, MAX_PARAMETER_COUNT); values = parameters.toArray(); } private Object[] generateTemplate(String rawTemplate) { List<String> pieces = new ArrayList<>(); // String or Integer List<Object> combined = new LinkedList<>(); PatternMatcher matcher = JMeterUtils.getMatcher(); Util.split(pieces, matcher, templatePattern, rawTemplate); PatternMatcherInput input = new PatternMatcherInput(rawTemplate); boolean startsWith = isFirstElementGroup(rawTemplate); if (startsWith) { pieces.remove(0);// Remove initial empty entry } Iterator<String> iter = pieces.iterator(); while (iter.hasNext()) { boolean matchExists = matcher.contains(input, templatePattern); if (startsWith) { if (matchExists) { combined.add(Integer.valueOf(matcher.getMatch().group(1))); } combined.add(iter.next()); } else { combined.add(iter.next()); if (matchExists) { combined.add(Integer.valueOf(matcher.getMatch().group(1))); } } } if (matcher.contains(input, templatePattern)) { combined.add(Integer.valueOf(matcher.getMatch().group(1))); } return combined.toArray(); } private boolean isFirstElementGroup(String rawData) { Pattern pattern = JMeterUtils.getPatternCache().getPattern("^\\$\\d+\\$", //$NON-NLS-1$ Perl5Compiler.READ_ONLY_MASK); return JMeterUtils.getMatcher().contains(rawData, pattern); } }
package org.adligo.i.log.shared; import org.adligo.i.util.shared.AppenderFactory; import org.adligo.i.util.shared.ArrayIterator; import org.adligo.i.util.shared.CollectionFactory; import org.adligo.i.util.shared.I_Appender; import org.adligo.i.util.shared.I_Collection; import org.adligo.i.util.shared.I_ImmutableMap; import org.adligo.i.util.shared.I_Iterator; /** * this class tracks all logging * until it has * received notification that the * log levels have been set, * at which point it logs everything in order * and then defers new calls to the * a SimpleLog obtained by LogFactory * * @author scott * */ public class ProxyLog implements I_LogMutant, I_ProxyLog, I_Log { protected I_LogDelegate single_delegate = null; private I_Collection delegates = null; private String logName; private short level = I_LogDelegate.LOG_LEVEL_OFF; private boolean enabled = true; public ProxyLog(Class c) { if (c == null) { throw new NullPointerException("ProxyLog " + "constructor can not accept a null Class"); } logName = c.getName(); } public ProxyLog(String name) { if (name == null) { throw new NullPointerException("ProxyLog " + "constructor can not accept a String name"); } logName = name; } public synchronized void addDelegate(I_LogDelegate p) { if (LogPlatform.isDebug()) { LogPlatform.log("LogFactory","entering add delegate " + p + " in " + this + " for class " + logName); } if (p != null) { if (single_delegate == null) { single_delegate = p; } else { if (delegates == null) { delegates = CollectionFactory.create(); delegates.add(single_delegate); } delegates.add(p); } } } /** * returns a iterator of * I_LogDelegates * * @return */ public I_Iterator getDelegates() { if (delegates == null && single_delegate != null) { return new ArrayIterator(new I_LogDelegate[] {single_delegate}); } else if (delegates != null) { return delegates.getIterator(); } else { return new ArrayIterator(new I_LogDelegate[] {}); } } public void debug(Object message, Throwable t) { if (isDebugEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_DEBUG, message, t); if (LogPlatform.isDebug()) { LogPlatform.log("LogFactory","sending to " + single_delegate ); } } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); if (LogPlatform.isDebug()) { LogPlatform.log("LogFactory","sending to " + delegate ); } delegate.log(I_LogDelegate.LOG_LEVEL_DEBUG, message, t); } } } } public void debug(Object message) { if (isDebugEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_DEBUG, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_DEBUG, message, null); } } } } public void error(Object message, Throwable t) { if (isErrorEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_ERROR, message, t); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_ERROR, message, t); } } } } public void error(Object message) { if (isErrorEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_ERROR, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_ERROR, message, null); } } } } public void fatal(Object message, Throwable t) { if (isFatalEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_FATAL, message, t); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_FATAL, message, t); } } } } public void fatal(Object message) { if (isFatalEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_FATAL, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_FATAL, message, null); } } } } public void info(Object message, Throwable t) { if (isInfoEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_INFO, message, t); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_INFO, message, t); } } } } public void info(Object message) { if (isInfoEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_INFO, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_INFO, message, null); } } } } public void setLevel(short p) { this.level = p; } public void setLogLevel(I_ImmutableMap props) { // the delegates don't need a log level this.level = SimpleLog.getLogLevel(props, logName); } public boolean isDebugEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_DEBUG, this.level); } public boolean isErrorEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_ERROR, this.level); } public boolean isFatalEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_FATAL, this.level); } public boolean isInfoEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_INFO, this.level); } public boolean isTraceEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_TRACE, this.level); } public boolean isWarnEnabled() { return SimpleLog.isLevelEnabled(I_LogDelegate.LOG_LEVEL_WARN, this.level); } public void trace(Object message, Throwable t) { if (isTraceEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_TRACE, message, t); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_TRACE, message, t); } } } } public void trace(Object message) { if (isTraceEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_TRACE, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_TRACE, message, null); } } } } public void warn(Object message, Throwable t) { if (isWarnEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_WARN, message, t); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_WARN, message, t); } } } } public void warn(Object message) { if (isWarnEnabled()) { if (delegates == null && single_delegate != null) { single_delegate.log(I_LogDelegate.LOG_LEVEL_WARN, message, null); } else if (delegates != null) { I_Iterator it = delegates.getIterator(); while (it.hasNext()) { I_LogDelegate delegate = (I_LogDelegate) it.next(); delegate.log(I_LogDelegate.LOG_LEVEL_WARN, message, null); } } } } public short getLevel() { return level; } public String getLogName() { return logName; } public void log(short type, Object message, Throwable t) { if (LogPlatform.isDebug()) { LogPlatform.log("LogFactory"," in log with type " + type + " isEnabled " + isEnabled()); } if (isEnabled()) { switch (type) { case I_LogDelegate.LOG_LEVEL_TRACE: this.trace(message, t); break; case I_LogDelegate.LOG_LEVEL_DEBUG: this.debug(message, t); break; case I_LogDelegate.LOG_LEVEL_INFO: this.info(message, t); break; case I_LogDelegate.LOG_LEVEL_WARN: this.warn(message, t); break; case I_LogDelegate.LOG_LEVEL_ERROR: this.error(message, t); break; case I_LogDelegate.LOG_LEVEL_FATAL: this.fatal(message, t); break; } } } public int hashCode() { if (logName == null) { return 0; } return logName.hashCode(); } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() == obj.getClass()) { return equals2((I_Log) obj); } try { return equals2((I_Log) obj); } catch (ClassCastException x) { // do nothing; } return false; } private boolean equals2(I_Log obj) { if (logName == null) { if (obj.getLogName() != null) return false; } else if (!logName.equals(obj.getLogName())) return false; return true; } public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public String toString() { I_Appender sb = AppenderFactory.create(); sb.append("ProxyLog [name="); sb.append(logName); sb.append(",delegates="); sb.append(this.delegates); sb.append("]"); return sb.toString(); } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static org.junit.Assert.assertEquals; import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.jvm.java.JavaLibraryBuilder; import com.facebook.buck.jvm.java.KeystoreBuilder; import com.facebook.buck.jvm.java.PrebuiltJarBuilder; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.FakeSourcePath; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.util.BuckConstant; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import org.junit.Test; import java.nio.file.Path; import java.nio.file.Paths; public class AndroidPackageableCollectorTest { /** * This is a regression test to ensure that an additional 1 second startup cost is not * re-introduced to fb4a. */ @Test public void testFindTransitiveDependencies() throws Exception { BuildRuleResolver ruleResolver = new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); Path prebuiltNativeLibraryPath = Paths.get("java/com/facebook/prebuilt_native_library/libs"); projectFilesystem.mkdirs(prebuiltNativeLibraryPath); // Create an AndroidBinaryRule that transitively depends on two prebuilt JARs. One of the two // prebuilt JARs will be listed in the AndroidBinaryRule's no_dx list. BuildTarget guavaTarget = BuildTargetFactory.newInstance("//third_party/guava:guava"); PrebuiltJarBuilder .createBuilder(guavaTarget) .setBinaryJar(Paths.get("third_party/guava/guava-10.0.1.jar")) .build(ruleResolver); BuildTarget jsr305Target = BuildTargetFactory.newInstance("//third_party/jsr-305:jsr-305"); PrebuiltJarBuilder .createBuilder(jsr305Target) .setBinaryJar(Paths.get("third_party/jsr-305/jsr305.jar")) .build(ruleResolver); BuildRule ndkLibrary = new NdkLibraryBuilder( BuildTargetFactory.newInstance("//java/com/facebook/native_library:library")) .build(ruleResolver, projectFilesystem); BuildTarget prebuiltNativeLibraryTarget = BuildTargetFactory.newInstance("//java/com/facebook/prebuilt_native_library:library"); BuildRule prebuiltNativeLibraryBuild = PrebuiltNativeLibraryBuilder.newBuilder(prebuiltNativeLibraryTarget) .setNativeLibs(prebuiltNativeLibraryPath) .setIsAsset(true) .build(ruleResolver, projectFilesystem); BuildTarget libraryRuleTarget = BuildTargetFactory.newInstance("//java/src/com/facebook:example"); JavaLibraryBuilder .createBuilder(libraryRuleTarget) .setProguardConfig(Paths.get("debug.pro")) .addSrc(Paths.get("Example.java")) .addDep(guavaTarget) .addDep(jsr305Target) .addDep(prebuiltNativeLibraryBuild.getBuildTarget()) .addDep(ndkLibrary.getBuildTarget()) .build(ruleResolver); BuildTarget manifestTarget = BuildTargetFactory.newInstance("//java/src/com/facebook:res"); AndroidResource manifestRule = AndroidResourceRuleBuilder .newBuilder() .setResolver(pathResolver) .setBuildTarget(manifestTarget) .setManifest( new PathSourcePath( projectFilesystem, Paths.get("java/src/com/facebook/module/AndroidManifest.xml"))) .setAssets(new FakeSourcePath("assets")) .build(); ruleResolver.addToIndex(manifestRule); BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug"); KeystoreBuilder.createBuilder(keystoreTarget) .setStore(new FakeSourcePath(projectFilesystem, "keystore/debug.keystore")) .setProperties(new FakeSourcePath(projectFilesystem, "keystore/debug.keystore.properties")) .build(ruleResolver); ImmutableSortedSet<BuildTarget> originalDepsTargets = ImmutableSortedSet.of(libraryRuleTarget, manifestTarget); ruleResolver.getAllRules(originalDepsTargets); AndroidBinary binaryRule = (AndroidBinary) AndroidBinaryBuilder.createBuilder( BuildTargetFactory.newInstance("//java/src/com/facebook:app")) .setOriginalDeps(originalDepsTargets) .setBuildTargetsToExcludeFromDex( ImmutableSet.of(BuildTargetFactory.newInstance("//third_party/guava:guava"))) .setManifest(new FakeSourcePath("java/src/com/facebook/AndroidManifest.xml")) .setKeystore(keystoreTarget) .build(ruleResolver); // Verify that the correct transitive dependencies are found. AndroidPackageableCollection packageableCollection = binaryRule.getAndroidPackageableCollection(); assertEquals( "Because guava was passed to no_dx, it should not be in the classpathEntriesToDex list", ImmutableSet.of( Paths.get("buck-out/gen/third_party/jsr-305/jsr-305.jar"), BuckConstant.GEN_PATH.resolve( "java/src/com/facebook/lib__example__output/example.jar")), FluentIterable.from(packageableCollection.getClasspathEntriesToDex()) .transform(pathResolver.deprecatedPathFunction()) .toSet()); assertEquals( "Because guava was passed to no_dx, it should not be treated as a third-party JAR whose " + "resources need to be extracted and repacked in the APK. If this is done, then code " + "in the guava-10.0.1.dex.1.jar in the APK's assets/ tmp may try to load the resource " + "from the APK as a ZipFileEntry rather than as a resource within " + "guava-10.0.1.dex.1.jar. Loading a resource in this way could take substantially " + "longer. Specifically, this was observed to take over one second longer to load " + "the resource in fb4a. Because the resource was loaded on startup, this introduced a " + "substantial regression in the startup time for the fb4a app.", ImmutableSet.of(Paths.get("buck-out/gen/third_party/jsr-305/jsr-305.jar")), FluentIterable.from(packageableCollection.getPathsToThirdPartyJars()) .transform(pathResolver.deprecatedPathFunction()) .toSet()); assertEquals( "Because assets directory was passed an AndroidResourceRule it should be added to the " + "transitive dependencies", ImmutableSet.of(new FakeSourcePath("assets")), packageableCollection.getAssetsDirectories()); assertEquals( "Because a native library was declared as a dependency, it should be added to the " + "transitive dependencies.", ImmutableSet.<SourcePath>of( new PathSourcePath( new FakeProjectFilesystem(), ((NativeLibraryBuildRule) ndkLibrary).getLibraryPath())), packageableCollection.getNativeLibsDirectories()); assertEquals( "Because a prebuilt native library was declared as a dependency (and asset), it should " + "be added to the transitive dependecies.", ImmutableSet.<SourcePath>of( new PathSourcePath( new FakeProjectFilesystem(), ((NativeLibraryBuildRule) prebuiltNativeLibraryBuild).getLibraryPath())), packageableCollection.getNativeLibAssetsDirectories()); assertEquals( ImmutableSet.of(new FakeSourcePath("debug.pro")), packageableCollection.getProguardConfigs()); } /** * Create the following dependency graph of {@link AndroidResource}s: * <pre> * A * / | \ * B | D * \ | / * C * </pre> * Note that an ordinary breadth-first traversal would yield either {@code A B C D} or * {@code A D C B}. However, either of these would be <em>wrong</em> in this case because we need * to be sure that we perform a topological sort, the resulting traversal of which is either * {@code A B D C} or {@code A D B C}. * <p> * The reason for the correct result being reversed is because we want the resources with the most * dependencies listed first on the path, so that they're used in preference to the ones that they * depend on (presumably, the reason for extending the initial set of resources was to override * values). */ @Test public void testGetAndroidResourceDeps() throws Exception { BuildRuleResolver ruleResolver = new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); BuildRule c = ruleResolver.addToIndex( AndroidResourceRuleBuilder.newBuilder() .setResolver(pathResolver) .setBuildTarget(BuildTargetFactory.newInstance("//:c")) .setRes(new FakeSourcePath("res_c")) .setRDotJavaPackage("com.facebook") .build()); BuildRule b = ruleResolver.addToIndex( AndroidResourceRuleBuilder.newBuilder() .setResolver(pathResolver) .setBuildTarget(BuildTargetFactory.newInstance("//:b")) .setRes(new FakeSourcePath("res_b")) .setRDotJavaPackage("com.facebook") .setDeps(ImmutableSortedSet.of(c)) .build()); BuildRule d = ruleResolver.addToIndex( AndroidResourceRuleBuilder.newBuilder() .setResolver(pathResolver) .setBuildTarget(BuildTargetFactory.newInstance("//:d")) .setRes(new FakeSourcePath("res_d")) .setRDotJavaPackage("com.facebook") .setDeps(ImmutableSortedSet.of(c)) .build()); AndroidResource a = ruleResolver.addToIndex( AndroidResourceRuleBuilder.newBuilder() .setResolver(pathResolver) .setBuildTarget(BuildTargetFactory.newInstance("//:a")) .setRes(new FakeSourcePath("res_a")) .setRDotJavaPackage("com.facebook") .setDeps(ImmutableSortedSet.of(b, c, d)) .build()); AndroidPackageableCollector collector = new AndroidPackageableCollector(a.getBuildTarget()); collector.addPackageables(ImmutableList.<AndroidPackageable>of(a)); // Note that a topological sort for a DAG is not guaranteed to be unique, but we order nodes // within the same depth of the search. ImmutableList<BuildTarget> result = FluentIterable.from(ImmutableList.of(a, d, b, c)) .transform(BuildTarget.TO_TARGET) .toList(); assertEquals( "Android resources should be topologically sorted.", result, collector.build().getResourceDetails().getResourcesWithNonEmptyResDir()); // Introduce an AndroidBinaryRule that depends on A and C and verify that the same topological // sort results. This verifies that both AndroidResourceRule.getAndroidResourceDeps does the // right thing when it gets a non-AndroidResourceRule as well as an AndroidResourceRule. BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug"); KeystoreBuilder.createBuilder(keystoreTarget) .setStore(new FakeSourcePath("keystore/debug.keystore")) .setProperties(new FakeSourcePath("keystore/debug.keystore.properties")) .build(ruleResolver); ImmutableSortedSet<BuildTarget> declaredDepsTargets = ImmutableSortedSet.of(a.getBuildTarget(), c.getBuildTarget()); AndroidBinary androidBinary = (AndroidBinary) AndroidBinaryBuilder .createBuilder(BuildTargetFactory.newInstance("//:e")) .setManifest(new FakeSourcePath("AndroidManfiest.xml")) .setKeystore(keystoreTarget) .setOriginalDeps(declaredDepsTargets) .build(ruleResolver); assertEquals( "Android resources should be topologically sorted.", result, androidBinary .getAndroidPackageableCollection() .getResourceDetails() .getResourcesWithNonEmptyResDir()); } /** * If the keystore rule depends on an android_library, and an android_binary uses that keystore, * the keystore's android_library should not contribute to the classpath of the android_binary. */ @Test public void testGraphForAndroidBinaryExcludesKeystoreDeps() throws Exception { BuildRuleResolver ruleResolver = new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); BuildTarget androidLibraryKeystoreTarget = BuildTargetFactory.newInstance("//java/com/keystore/base:base"); BuildRule androidLibraryKeystore = AndroidLibraryBuilder .createBuilder(androidLibraryKeystoreTarget) .addSrc(Paths.get("java/com/facebook/keystore/Base.java")) .build(ruleResolver); BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug"); KeystoreBuilder.createBuilder(keystoreTarget) .setStore(new FakeSourcePath("keystore/debug.keystore")) .setProperties(new FakeSourcePath("keystore/debug.keystore.properties")) .addDep(androidLibraryKeystore.getBuildTarget()) .build(ruleResolver); BuildTarget androidLibraryTarget = BuildTargetFactory.newInstance("//java/com/facebook/base:base"); BuildRule androidLibrary = AndroidLibraryBuilder.createBuilder(androidLibraryTarget) .addSrc(Paths.get("java/com/facebook/base/Base.java")) .build(ruleResolver); ImmutableSortedSet<BuildTarget> originalDepsTargets = ImmutableSortedSet.of(androidLibrary.getBuildTarget()); AndroidBinary androidBinary = (AndroidBinary) AndroidBinaryBuilder.createBuilder( BuildTargetFactory.newInstance("//apps/sample:app")) .setManifest(new FakeSourcePath("apps/sample/AndroidManifest.xml")) .setOriginalDeps(originalDepsTargets) .setKeystore(keystoreTarget) .build(ruleResolver); AndroidPackageableCollection packageableCollection = androidBinary.getAndroidPackageableCollection(); assertEquals( "Classpath entries should include facebook/base but not keystore/base.", ImmutableSet.of( BuckConstant.GEN_PATH.resolve("java/com/facebook/base/lib__base__output/base.jar")), FluentIterable.from(packageableCollection.getClasspathEntriesToDex()) .transform(pathResolver.deprecatedPathFunction()) .toSet()); } }
/* * Copyright 2014 Plain Solutions * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tt.platform.servlet; import org.tt.core.dm.TTDeliveryManager; import org.tt.core.dm.TTFactory; import org.tt.core.entity.datafetcher.Department; import org.tt.core.entity.datafetcher.Group; import org.tt.core.entity.db.TTEntity; import org.tt.core.sql.ex.NoSuchDepartmentException; import org.tt.core.sql.ex.NoSuchGroupException; import org.tt.platform.convert.AbstractDataConverter; import org.tt.platform.convert.json.JSONConverter; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.sql.SQLException; import java.util.List; @Path("/") public class Resources { final TTFactory ttf = TTFactory.getInstance(); final AbstractDataConverter dconv = new JSONConverter(); final int ResponseSQLErrorCode = 209; final int ResponseNoSuchDepartmentCode = 309; final int ResponseNoSuchGroupCode = 399; final int ResponseUnsupportedEncodingCode = 509; @GET @Path("/2/department/{tag}/group/{name}") public Response getPlainerTT(@PathParam("tag") String tag, @PathParam("name") String name) { Response.ResponseBuilder r= Response.ok(); r.header("Access-Control-Allow-Origin", "*"); r.header("Access-Control-Allow-Methods", "GET"); r.type("application/json;charset=UTF-8"); String groupName; try { groupName = URLDecoder.decode(name, "UTF-8"); TTDeliveryManager ttdm = ttf.produceDeliveryManager(); try { TTEntity result = ttdm.getTT(tag, groupName); r.status(Response.Status.OK); r.entity(dconv.convertTTPlainer(result)); } catch (SQLException e) { e.printStackTrace(); r.status(this.ResponseSQLErrorCode); r.entity(dconv.returnSQLErrMsg(e.getSQLState())); } catch (NoSuchDepartmentException e) { e.printStackTrace(); r.status(this.ResponseNoSuchDepartmentCode); r.entity(dconv.returnNoSuchDepEx()); } catch (NoSuchGroupException e) { e.printStackTrace(); r.status(this.ResponseNoSuchGroupCode); r.entity(dconv.returnNoSuchGrEx()); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); r.status(this.ResponseUnsupportedEncodingCode); r.entity("{errmsg: Invalid encoding}"); } return r.build(); } @GET @Path("/1/department/{tag}/group/{name}") public Response getTT(@PathParam("tag") String tag, @PathParam("name") String name) { Response.ResponseBuilder r= Response.ok(); r.header("Access-Control-Allow-Origin", "*"); r.header("Access-Control-Allow-Methods", "GET"); r.type("application/json;charset=UTF-8"); String groupName; try { groupName = URLDecoder.decode(name, "UTF-8"); TTDeliveryManager ttdm = ttf.produceDeliveryManager(); try { TTEntity result = ttdm.getTT(tag, groupName); r.status(Response.Status.OK); r.entity(dconv.convertTT(result)); } catch (SQLException e) { e.printStackTrace(); r.status(this.ResponseSQLErrorCode); r.entity(dconv.returnSQLErrMsg(e.getSQLState())); } catch (NoSuchDepartmentException e) { e.printStackTrace(); r.status(this.ResponseNoSuchDepartmentCode); r.entity(dconv.returnNoSuchDepEx()); } catch (NoSuchGroupException e) { e.printStackTrace(); r.status(this.ResponseNoSuchGroupCode); r.entity(dconv.returnNoSuchGrEx()); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); r.status(this.ResponseUnsupportedEncodingCode); r.entity("{errmsg: Invalid encoding}"); } return r.build(); } @GET @Path("/1/department/{tag}/groups") public Response getAllGroups(@PathParam("tag") String tag, @QueryParam("filled") int fullfill) { Response.ResponseBuilder r = Response.ok(); r.header("Access-Control-Allow-Origin", "*"); r.header("Access-Control-Allow-Methods", "GET"); r.type("application/json;charset=UTF-8"); TTDeliveryManager ttdm = ttf.produceDeliveryManager(); try { List<Group> result; if (fullfill == 1) result = ttdm.getNonEmptyGroups(tag); else result = ttdm.getGroups(tag); r.status(Response.Status.OK); r.entity(dconv.convertGroupList(result)); } catch (NoSuchDepartmentException e) { e.printStackTrace(); r.status(this.ResponseNoSuchDepartmentCode); r.entity(dconv.returnNoSuchDepEx()); } catch (SQLException e) { e.printStackTrace(); r.status(this.ResponseSQLErrorCode); r.entity(dconv.returnSQLErrMsg(e.getSQLState())); } catch (NoSuchGroupException e) { e.printStackTrace(); r.status(this.ResponseNoSuchGroupCode); r.entity(dconv.returnNoSuchGrEx()); } return r.build(); } @GET @Path("/2/department/{tag}/groups") public Response getAllGroupsForward(@PathParam("tag") String tag, @QueryParam("filled") int fullfill) { return this.getAllGroups(tag, fullfill); } @GET @Path("/1/department/{tag}/msg") public Response getDepartmentMessage(@PathParam("tag") String tag) { Response.ResponseBuilder r = Response.ok(); r.header("Access-Control-Allow-Origin", "*"); r.header("Access-Control-Allow-Methods", "GET"); r.type("application/json;charset=UTF-8"); TTDeliveryManager ttdm = ttf.produceDeliveryManager(); try { String result = ttdm.getDepartmentMessage(tag); r.status(Response.Status.OK); r.entity(dconv.convertDepartmentMessage(result)); } catch (NoSuchDepartmentException e) { e.printStackTrace(); r.status(this.ResponseNoSuchDepartmentCode); r.entity(dconv.returnNoSuchDepEx()); } catch (SQLException e) { e.printStackTrace(); r.status(this.ResponseSQLErrorCode); r.entity(dconv.returnSQLErrMsg(e.getSQLState())); } return r.build(); } @GET @Path("/2/department/{tag}/msg") public Response getDepartmentMessageForward(@PathParam("tag") String tag) { return this.getDepartmentMessage(tag); } @GET @Path("/1/departments") public Response getDepartments() { Response.ResponseBuilder r = Response.ok(); r.header("Access-Control-Allow-Origin", "*"); r.header("Access-Control-Allow-Methods", "GET"); r.type("application/json;charset=UTF-8"); TTDeliveryManager ttdm = ttf.produceDeliveryManager(); try { List<Department> result = ttdm.getDepartments(); r.status(Response.Status.OK); r.entity(dconv.convertDepartmentList(result)); } catch (SQLException e) { e.printStackTrace(); r.status(this.ResponseSQLErrorCode); r.entity(dconv.returnSQLErrMsg(e.getSQLState())); } return r.build(); } @GET @Path("/2/departments") public Response getDepartmentsForward() { return this.getDepartments(); } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import org.apache.commons.lang3.BooleanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.adaptris.annotation.AdvancedConfig; import com.adaptris.annotation.InputFieldDefault; import com.adaptris.core.util.Args; import com.adaptris.util.GuidGenerator; /** * <p> * Implementation of default / common behaviour for <code>Service</code>s. * Includes basic implementation of <code>MessageEventGenerator</code> which * returns the fully qualified name of the class. * </p> */ public abstract class ServiceImp implements Service { // protected transient Logger log = LoggerFactory.getLogger(this.getClass().getName()); protected transient Logger log = LoggerFactory.getLogger(this.getClass().getName()); private transient ComponentState serviceState; private transient boolean prepared = false; private String uniqueId; private transient boolean isBranching; // defaults to false @AdvancedConfig @InputFieldDefault(value = "false") private Boolean continueOnFail; @AdvancedConfig(rare = true) @InputFieldDefault(value = "false") private Boolean isTrackingEndpoint; /** * <p> * Creates a new instance. Default unique ID is autogenerated using {@link GuidGenerator#getUUID()}. * </p> */ public ServiceImp() { setUniqueId(new GuidGenerator().getUUID()); changeState(ClosedState.getInstance()); } @Override public final void init() throws CoreException { if (!prepared) prepare(); initService(); } protected abstract void initService() throws CoreException; @Override public final void close() { closeService(); prepared = false; } protected abstract void closeService(); @Override public void stop() { // over-ride if required } @Override public void start() throws CoreException { // over-ride if required } @Override public String createName() { return this.getClass().getName(); } @Override public String createQualifier() { return defaultIfEmpty(getUniqueId(), ""); } @Override public String getUniqueId() { return uniqueId; } @Override public void setUniqueId(String s) { uniqueId = Args.notNull(s, "uniqueId"); } @Override public boolean isBranching() { return isBranching; } @Override public boolean continueOnFailure() { return BooleanUtils.toBooleanDefaultIfNull(getContinueOnFail(), false); } /** * @return whether or not this service is configured to continue on failure. * @see #continueOnFailure() */ public Boolean getContinueOnFail() { return continueOnFail; } /** * whether or not this service is configured to continue on failure. * * @param b true/false, default if not specified is false. */ public void setContinueOnFail(Boolean b) { continueOnFail = b; } public Boolean getIsTrackingEndpoint() { return isTrackingEndpoint; } /** * whether or not this service is is a tracking endpoint. * * @param b true/false, default if not specified is false. */ public void setIsTrackingEndpoint(Boolean b) { isTrackingEndpoint = b; } @Override public boolean isTrackingEndpoint() { return BooleanUtils.toBooleanDefaultIfNull(getIsTrackingEndpoint(), false); } /** * <p> * Updates the state for the component <code>ComponentState</code>. * </p> */ @Override public void changeState(ComponentState newState) { serviceState = newState; } /** * <p> * Returns the last record <code>ComponentState</code>. * </p> * @return the current <code>ComponentState</code> */ @Override public ComponentState retrieveComponentState() { return serviceState; } /** * <p> * Request this component is init'd. * </p> * @throws CoreException wrapping any underlying Exceptions */ @Override public void requestInit() throws CoreException { serviceState.requestInit(this); } /** * <p> * Request this component is started. * </p> * @throws CoreException wrapping any underlying Exceptions */ @Override public void requestStart() throws CoreException { serviceState.requestStart(this); } /** * <p> * Request this component is stopped. * </p> */ @Override public void requestStop() { serviceState.requestStop(this); } /** * <p> * Request this component is closed. * </p> */ @Override public void requestClose() { serviceState.requestClose(this); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.resources.admin; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import org.jboss.logging.Logger; import org.jboss.resteasy.annotations.cache.NoCache; import javax.ws.rs.NotFoundException; import org.keycloak.authorization.model.Resource; import org.keycloak.authorization.model.ResourceServer; import org.keycloak.broker.provider.IdentityProvider; import org.keycloak.broker.provider.IdentityProviderFactory; import org.keycloak.broker.provider.IdentityProviderMapper; import org.keycloak.broker.social.SocialIdentityProvider; import org.keycloak.events.admin.OperationType; import org.keycloak.events.admin.ResourceType; import org.keycloak.models.ClientModel; import org.keycloak.models.FederatedIdentityModel; import org.keycloak.models.IdentityProviderMapperModel; import org.keycloak.models.IdentityProviderModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.KeycloakSessionFactory; import org.keycloak.models.ModelDuplicateException; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.utils.ModelToRepresentation; import org.keycloak.models.utils.RepresentationToModel; import org.keycloak.models.utils.StripSecretsUtils; import org.keycloak.provider.ProviderConfigProperty; import org.keycloak.provider.ProviderFactory; import org.keycloak.representations.idm.ComponentRepresentation; import org.keycloak.representations.idm.ConfigPropertyRepresentation; import org.keycloak.representations.idm.IdentityProviderMapperRepresentation; import org.keycloak.representations.idm.IdentityProviderMapperTypeRepresentation; import org.keycloak.representations.idm.IdentityProviderRepresentation; import org.keycloak.representations.idm.ManagementPermissionReference; import org.keycloak.services.ErrorResponse; import org.keycloak.services.resources.admin.permissions.AdminPermissionEvaluator; import org.keycloak.services.resources.admin.permissions.AdminPermissionManagement; import org.keycloak.services.resources.admin.permissions.AdminPermissions; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; /** * @resource Identity Providers * @author Pedro Igor */ public class IdentityProviderResource { protected static final Logger logger = Logger.getLogger(IdentityProviderResource.class); private final AdminPermissionEvaluator auth; private final RealmModel realm; private final KeycloakSession session; private final IdentityProviderModel identityProviderModel; private final AdminEventBuilder adminEvent; public IdentityProviderResource(AdminPermissionEvaluator auth, RealmModel realm, KeycloakSession session, IdentityProviderModel identityProviderModel, AdminEventBuilder adminEvent) { this.realm = realm; this.session = session; this.identityProviderModel = identityProviderModel; this.auth = auth; this.adminEvent = adminEvent.resource(ResourceType.IDENTITY_PROVIDER); } /** * Get the identity provider * * @return */ @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public IdentityProviderRepresentation getIdentityProvider() { this.auth.realm().requireViewIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } IdentityProviderRepresentation rep = ModelToRepresentation.toRepresentation(realm, this.identityProviderModel); return StripSecretsUtils.strip(rep); } /** * Delete the identity provider * * @return */ @DELETE @NoCache public Response delete() { this.auth.realm().requireManageIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } String alias = this.identityProviderModel.getAlias(); session.users().preRemove(realm, identityProviderModel); this.realm.removeIdentityProviderByAlias(alias); Set<IdentityProviderMapperModel> mappers = this.realm.getIdentityProviderMappersByAlias(alias); for (IdentityProviderMapperModel mapper : mappers) { this.realm.removeIdentityProviderMapper(mapper); } adminEvent.operation(OperationType.DELETE).resourcePath(session.getContext().getUri()).success(); return Response.noContent().build(); } /** * Update the identity provider * * @param providerRep * @return */ @PUT @Consumes(MediaType.APPLICATION_JSON) @NoCache public Response update(IdentityProviderRepresentation providerRep) { this.auth.realm().requireManageIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } try { updateIdpFromRep(providerRep, realm, session); adminEvent.operation(OperationType.UPDATE).resourcePath(session.getContext().getUri()).representation(providerRep).success(); return Response.noContent().build(); } catch (IllegalArgumentException e) { String message = e.getMessage(); if (message == null) { message = "Invalid request"; } return ErrorResponse.error(message, BAD_REQUEST); } catch (ModelDuplicateException e) { return ErrorResponse.exists("Identity Provider " + providerRep.getAlias() + " already exists"); } } private void updateIdpFromRep(IdentityProviderRepresentation providerRep, RealmModel realm, KeycloakSession session) { String internalId = providerRep.getInternalId(); String newProviderId = providerRep.getAlias(); String oldProviderId = getProviderIdByInternalId(realm, internalId); if (oldProviderId == null) { lookUpProviderIdByAlias(realm, providerRep); } IdentityProviderModel updated = RepresentationToModel.toModel(realm, providerRep, session); if (updated.getConfig() != null && ComponentRepresentation.SECRET_VALUE.equals(updated.getConfig().get("clientSecret"))) { updated.getConfig().put("clientSecret", identityProviderModel.getConfig() != null ? identityProviderModel.getConfig().get("clientSecret") : null); } realm.updateIdentityProvider(updated); if (oldProviderId != null && !oldProviderId.equals(newProviderId)) { // Admin changed the ID (alias) of identity provider. We must update all clients and users logger.debug("Changing providerId in all clients and linked users. oldProviderId=" + oldProviderId + ", newProviderId=" + newProviderId); updateUsersAfterProviderAliasChange(session.users().getUsers(realm, false), oldProviderId, newProviderId, realm, session); } } // return ID of IdentityProvider from realm based on internalId of this provider private static String getProviderIdByInternalId(RealmModel realm, String providerInternalId) { List<IdentityProviderModel> providerModels = realm.getIdentityProviders(); for (IdentityProviderModel providerModel : providerModels) { if (providerModel.getInternalId().equals(providerInternalId)) { return providerModel.getAlias(); } } return null; } // sets internalId to IdentityProvider based on alias private static void lookUpProviderIdByAlias(RealmModel realm, IdentityProviderRepresentation providerRep) { List<IdentityProviderModel> providerModels = realm.getIdentityProviders(); for (IdentityProviderModel providerModel : providerModels) { if (providerModel.getAlias().equals(providerRep.getAlias())) { providerRep.setInternalId(providerModel.getInternalId()); return; } } throw new javax.ws.rs.NotFoundException(); } private static void updateUsersAfterProviderAliasChange(List<UserModel> users, String oldProviderId, String newProviderId, RealmModel realm, KeycloakSession session) { for (UserModel user : users) { FederatedIdentityModel federatedIdentity = session.users().getFederatedIdentity(user, oldProviderId, realm); if (federatedIdentity != null) { // Remove old link first session.users().removeFederatedIdentity(realm, user, oldProviderId); // And create new FederatedIdentityModel newFederatedIdentity = new FederatedIdentityModel(newProviderId, federatedIdentity.getUserId(), federatedIdentity.getUserName(), federatedIdentity.getToken()); session.users().addFederatedIdentity(realm, user, newFederatedIdentity); } } } private IdentityProviderFactory getIdentityProviderFactory() { List<ProviderFactory> allProviders = new ArrayList<ProviderFactory>(); allProviders.addAll(this.session.getKeycloakSessionFactory().getProviderFactories(IdentityProvider.class)); allProviders.addAll(this.session.getKeycloakSessionFactory().getProviderFactories(SocialIdentityProvider.class)); for (ProviderFactory providerFactory : allProviders) { if (providerFactory.getId().equals(identityProviderModel.getProviderId())) return (IdentityProviderFactory)providerFactory; } return null; } /** * Export public broker configuration for identity provider * * @param format Format to use * @return */ @GET @Path("export") @NoCache public Response export(@QueryParam("format") String format) { this.auth.realm().requireViewIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } try { IdentityProviderFactory factory = getIdentityProviderFactory(); return factory.create(session, identityProviderModel).export(session.getContext().getUri(), realm, format); } catch (Exception e) { return ErrorResponse.error("Could not export public broker configuration for identity provider [" + identityProviderModel.getProviderId() + "].", Response.Status.NOT_FOUND); } } /** * Get mapper types for identity provider */ @GET @Path("mapper-types") @NoCache public Map<String, IdentityProviderMapperTypeRepresentation> getMapperTypes() { this.auth.realm().requireViewIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } KeycloakSessionFactory sessionFactory = session.getKeycloakSessionFactory(); Map<String, IdentityProviderMapperTypeRepresentation> types = new HashMap<>(); List<ProviderFactory> factories = sessionFactory.getProviderFactories(IdentityProviderMapper.class); for (ProviderFactory factory : factories) { IdentityProviderMapper mapper = (IdentityProviderMapper)factory; for (String type : mapper.getCompatibleProviders()) { if (IdentityProviderMapper.ANY_PROVIDER.equals(type) || type.equals(identityProviderModel.getProviderId())) { IdentityProviderMapperTypeRepresentation rep = new IdentityProviderMapperTypeRepresentation(); rep.setId(mapper.getId()); rep.setCategory(mapper.getDisplayCategory()); rep.setName(mapper.getDisplayType()); rep.setHelpText(mapper.getHelpText()); List<ProviderConfigProperty> configProperties = mapper.getConfigProperties(); for (ProviderConfigProperty prop : configProperties) { ConfigPropertyRepresentation propRep = ModelToRepresentation.toRepresentation(prop); rep.getProperties().add(propRep); } types.put(rep.getId(), rep); break; } } } return types; } /** * Get mappers for identity provider */ @GET @Path("mappers") @Produces(MediaType.APPLICATION_JSON) @NoCache public List<IdentityProviderMapperRepresentation> getMappers() { this.auth.realm().requireViewIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } List<IdentityProviderMapperRepresentation> mappers = new LinkedList<>(); for (IdentityProviderMapperModel model : realm.getIdentityProviderMappersByAlias(identityProviderModel.getAlias())) { mappers.add(ModelToRepresentation.toRepresentation(model)); } return mappers; } /** * Add a mapper to identity provider * * @param mapper * @return */ @POST @Path("mappers") @Consumes(MediaType.APPLICATION_JSON) public Response addMapper(IdentityProviderMapperRepresentation mapper) { this.auth.realm().requireManageIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } IdentityProviderMapperModel model = RepresentationToModel.toModel(mapper); try { model = realm.addIdentityProviderMapper(model); } catch (Exception e) { return ErrorResponse.error("Failed to add mapper '" + model.getName() + "' to identity provider [" + identityProviderModel.getProviderId() + "].", Response.Status.BAD_REQUEST); } adminEvent.operation(OperationType.CREATE).resource(ResourceType.IDENTITY_PROVIDER_MAPPER).resourcePath(session.getContext().getUri(), model.getId()) .representation(mapper).success(); return Response.created(session.getContext().getUri().getAbsolutePathBuilder().path(model.getId()).build()).build(); } /** * Get mapper by id for the identity provider * * @param id * @return */ @GET @NoCache @Path("mappers/{id}") @Produces(MediaType.APPLICATION_JSON) public IdentityProviderMapperRepresentation getMapperById(@PathParam("id") String id) { this.auth.realm().requireViewIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } IdentityProviderMapperModel model = realm.getIdentityProviderMapperById(id); if (model == null) throw new NotFoundException("Model not found"); return ModelToRepresentation.toRepresentation(model); } /** * Update a mapper for the identity provider * * @param id Mapper id * @param rep */ @PUT @NoCache @Path("mappers/{id}") @Consumes(MediaType.APPLICATION_JSON) public void update(@PathParam("id") String id, IdentityProviderMapperRepresentation rep) { this.auth.realm().requireManageIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } IdentityProviderMapperModel model = realm.getIdentityProviderMapperById(id); if (model == null) throw new NotFoundException("Model not found"); model = RepresentationToModel.toModel(rep); realm.updateIdentityProviderMapper(model); adminEvent.operation(OperationType.UPDATE).resource(ResourceType.IDENTITY_PROVIDER_MAPPER).resourcePath(session.getContext().getUri()).representation(rep).success(); } /** * Delete a mapper for the identity provider * * @param id Mapper id */ @DELETE @NoCache @Path("mappers/{id}") public void delete(@PathParam("id") String id) { this.auth.realm().requireManageIdentityProviders(); if (identityProviderModel == null) { throw new javax.ws.rs.NotFoundException(); } IdentityProviderMapperModel model = realm.getIdentityProviderMapperById(id); if (model == null) throw new NotFoundException("Model not found"); realm.removeIdentityProviderMapper(model); adminEvent.operation(OperationType.DELETE).resource(ResourceType.IDENTITY_PROVIDER_MAPPER).resourcePath(session.getContext().getUri()).success(); } /** * Return object stating whether client Authorization permissions have been initialized or not and a reference * * @return */ @Path("management/permissions") @GET @Produces(MediaType.APPLICATION_JSON) @NoCache public ManagementPermissionReference getManagementPermissions() { this.auth.realm().requireViewIdentityProviders(); AdminPermissionManagement permissions = AdminPermissions.management(session, realm); if (!permissions.idps().isPermissionsEnabled(identityProviderModel)) { return new ManagementPermissionReference(); } return toMgmtRef(identityProviderModel, permissions); } public static ManagementPermissionReference toMgmtRef(IdentityProviderModel model, AdminPermissionManagement permissions) { ManagementPermissionReference ref = new ManagementPermissionReference(); ref.setEnabled(true); ref.setResource(permissions.idps().resource(model).getId()); ref.setScopePermissions(permissions.idps().getPermissions(model)); return ref; } /** * Return object stating whether client Authorization permissions have been initialized or not and a reference * * * @return initialized manage permissions reference */ @Path("management/permissions") @PUT @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @NoCache public ManagementPermissionReference setManagementPermissionsEnabled(ManagementPermissionReference ref) { this.auth.realm().requireManageIdentityProviders(); AdminPermissionManagement permissions = AdminPermissions.management(session, realm); permissions.idps().setPermissionsEnabled(identityProviderModel, ref.isEnabled()); if (ref.isEnabled()) { return toMgmtRef(identityProviderModel, permissions); } else { return new ManagementPermissionReference(); } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.stepfunctions.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/states-2016-11-23/ListActivities" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListActivitiesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of activities. * </p> */ private java.util.List<ActivityListItem> activities; /** * <p> * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of results, * make the call again using the returned token in <code>nextToken</code>. Keep all other arguments unchanged. * </p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. * </p> */ private String nextToken; /** * <p> * The list of activities. * </p> * * @return The list of activities. */ public java.util.List<ActivityListItem> getActivities() { return activities; } /** * <p> * The list of activities. * </p> * * @param activities * The list of activities. */ public void setActivities(java.util.Collection<ActivityListItem> activities) { if (activities == null) { this.activities = null; return; } this.activities = new java.util.ArrayList<ActivityListItem>(activities); } /** * <p> * The list of activities. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setActivities(java.util.Collection)} or {@link #withActivities(java.util.Collection)} if you want to * override the existing values. * </p> * * @param activities * The list of activities. * @return Returns a reference to this object so that method calls can be chained together. */ public ListActivitiesResult withActivities(ActivityListItem... activities) { if (this.activities == null) { setActivities(new java.util.ArrayList<ActivityListItem>(activities.length)); } for (ActivityListItem ele : activities) { this.activities.add(ele); } return this; } /** * <p> * The list of activities. * </p> * * @param activities * The list of activities. * @return Returns a reference to this object so that method calls can be chained together. */ public ListActivitiesResult withActivities(java.util.Collection<ActivityListItem> activities) { setActivities(activities); return this; } /** * <p> * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of results, * make the call again using the returned token in <code>nextToken</code>. Keep all other arguments unchanged. * </p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. * </p> * * @param nextToken * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of * results, make the call again using the returned token in <code>nextToken</code>. Keep all other arguments * unchanged.</p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of results, * make the call again using the returned token in <code>nextToken</code>. Keep all other arguments unchanged. * </p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. * </p> * * @return If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of * results, make the call again using the returned token in <code>nextToken</code>. Keep all other arguments * unchanged.</p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. */ public String getNextToken() { return this.nextToken; } /** * <p> * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of results, * make the call again using the returned token in <code>nextToken</code>. Keep all other arguments unchanged. * </p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. * </p> * * @param nextToken * If a <code>nextToken</code> is returned, there are more results available. To retrieve the next page of * results, make the call again using the returned token in <code>nextToken</code>. Keep all other arguments * unchanged.</p> * <p> * The configured <code>maxResults</code> determines how many results can be returned in a single call. * @return Returns a reference to this object so that method calls can be chained together. */ public ListActivitiesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getActivities() != null) sb.append("Activities: ").append(getActivities()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListActivitiesResult == false) return false; ListActivitiesResult other = (ListActivitiesResult) obj; if (other.getActivities() == null ^ this.getActivities() == null) return false; if (other.getActivities() != null && other.getActivities().equals(this.getActivities()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getActivities() == null) ? 0 : getActivities().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListActivitiesResult clone() { try { return (ListActivitiesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Copyright (c) 2015 Source Auditor Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.spdx.spdxspreadsheet; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.CellType; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.spdx.rdfparser.InvalidSPDXAnalysisException; import org.spdx.rdfparser.SPDXCreatorInformation; import org.spdx.rdfparser.SpdxRdfConstants; import org.spdx.rdfparser.license.AnyLicenseInfo; import org.spdx.rdfparser.license.SimpleLicensingInfo; import org.spdx.rdfparser.model.Checksum; import org.spdx.rdfparser.model.ExternalDocumentRef; import org.spdx.rdfparser.model.SpdxDocument; import org.spdx.rdfparser.model.SpdxItem; import org.spdx.tag.BuildDocument; import org.spdx.tag.InvalidSpdxTagFileException; /** * @author Gary * */ public class OriginsSheetV2d0 extends DocumentInfoSheet { static final int NUM_COLS = 14; static final int SPDX_VERSION_COL = SPREADSHEET_VERSION_COL + 1; static final int DATA_LICENSE_COL = SPDX_VERSION_COL + 1; static final int SPDX_ID_COL = DATA_LICENSE_COL + 1; static final int LICENSE_LIST_VERSION_COL = SPDX_ID_COL + 1; static final int DOCUMENT_NAME_COL = LICENSE_LIST_VERSION_COL + 1; static final int NAMESPACE_COL = DOCUMENT_NAME_COL + 1; static final int DOCUMENT_DESCRIBES_COL = NAMESPACE_COL + 1; static final int EXTERNAL_DOC_REFS_COL = DOCUMENT_DESCRIBES_COL + 1; static final int DOCUMENT_COMMENT_COL = EXTERNAL_DOC_REFS_COL + 1; static final int CREATED_BY_COL = DOCUMENT_COMMENT_COL + 1; static final int CREATED_COL = CREATED_BY_COL + 1; static final int AUTHOR_COMMENTS_COL = CREATED_COL + 1; static final int USER_DEFINED_COL = AUTHOR_COMMENTS_COL + 1; static final boolean[] REQUIRED = new boolean[] {true, true, true, true, false, true, true, true, false, false, true, true, false, false}; static final String[] HEADER_TITLES = new String[] {"Spreadsheet Version", "SPDX Version", "Data License", "SPDX Identifier", "License List Version", "Document Name", "Document Namespace", "Document Contents", "External Document References", "Document Comment", "Creator", "Created", "Creator Comment", "Optional User Defined Columns..."}; static final int[] COLUMN_WIDTHS = new int[] {20, 16, 20, 20, 16, 40, 80, 50, 140, 70, 60, 20, 70, 60}; static final boolean[] LEFT_WRAP = new boolean[] {false, false, false, false, false, true, true, true, true, true, true, false, true, true}; static final boolean[] CENTER_NOWRAP = new boolean[] {true, true, true, true, true, false, false, false, false, false, false, true, false, false}; public OriginsSheetV2d0(Workbook workbook, String sheetName, String version) { super(workbook, sheetName, version); } @Override public String verify() { try { if (sheet == null) { return "Worksheet for SPDX Origins does not exist"; } // validate version version = getDataCellStringValue(SPREADSHEET_VERSION_COL); if (version == null) { return "Invalid origins spreadsheet - no spreadsheet version found"; } if (!SPDXSpreadsheet.verifyVersion(version)) { return "Spreadsheet version "+version+" not supported."; } Row firstRow = sheet.getRow(firstRowNum); for (int i = 0; i < NUM_COLS-1; i++) { // don't check the last col - which is the user defined column Cell cell = firstRow.getCell(i+firstCellNum); if (cell == null || cell.getStringCellValue() == null || !cell.getStringCellValue().equals(HEADER_TITLES[i])) { return "Column "+HEADER_TITLES[i]+" missing for SPDX Origins worksheet"; } } // validate rows boolean done = false; int rowNum = firstRowNum + 1; while (!done) { Row row = sheet.getRow(rowNum); if (row == null || row.getCell(SPDX_VERSION_COL) == null) { done = true; } else { String error = validateRow(row); if (error != null) { return error; } rowNum++; } } return null; } catch (Exception ex) { return "Error in verifying SPDX Origins work sheet: "+ex.getMessage(); } } @SuppressWarnings("deprecation") private String validateRow(Row row) { for (int i = 0; i < NUM_COLS; i++) { Cell cell = row.getCell(i); if (cell == null) { if (REQUIRED[i]) { return "Required cell "+HEADER_TITLES[i]+" missing for row "+String.valueOf(row.getRowNum()+" in Origins Spreadsheet"); } } else { if (i == CREATED_COL) { if (!(cell.getCellTypeEnum() == CellType.NUMERIC)) { return "Created column in origin spreadsheet is not of type Date"; } } } } return null; } public static void create(Workbook wb, String sheetName) { int sheetNum = wb.getSheetIndex(sheetName); if (sheetNum >= 0) { wb.removeSheetAt(sheetNum); } CellStyle headerStyle = AbstractSheet.createHeaderStyle(wb); CellStyle centerStyle = AbstractSheet.createCenterStyle(wb); CellStyle wrapStyle = AbstractSheet.createLeftWrapStyle(wb); Sheet sheet = wb.createSheet(sheetName); Row row = sheet.createRow(0); for (int i = 0; i < HEADER_TITLES.length; i++) { sheet.setColumnWidth(i, COLUMN_WIDTHS[i]*256); if (LEFT_WRAP[i]) { sheet.setDefaultColumnStyle(i, wrapStyle); } else if (CENTER_NOWRAP[i]) { sheet.setDefaultColumnStyle(i, centerStyle); } Cell cell = row.createCell(i); cell.setCellStyle(headerStyle); cell.setCellValue(HEADER_TITLES[i]); } Row dataRow = sheet.createRow(1); Cell ssVersionCell = dataRow.createCell(SPREADSHEET_VERSION_COL); ssVersionCell.setCellValue(SPDXSpreadsheet.CURRENT_VERSION); } public void setAuthorComments(String comments) { setDataCellStringValue(AUTHOR_COMMENTS_COL, comments); } public void setCreatedBy(String createdBy) { setDataCellStringValue(CREATED_BY_COL, createdBy); } public void setDataLicense(String dataLicense) { setDataCellStringValue(DATA_LICENSE_COL, dataLicense); } public void setSPDXVersion(String version) { setDataCellStringValue(SPDX_VERSION_COL, version); } public void setSpreadsheetVersion(String version) { setDataCellStringValue(SPREADSHEET_VERSION_COL, version); } public String getAuthorComments() { return getDataCellStringValue(AUTHOR_COMMENTS_COL); } public Date getCreated() { return getDataCellDateValue(CREATED_COL); } public String getDataLicense() { return getDataCellStringValue(DATA_LICENSE_COL); } public String getSPDXVersion() { return getDataCellStringValue(SPDX_VERSION_COL); } public String getSpreadsheetVersion() { return getDataCellStringValue(SPREADSHEET_VERSION_COL); } public void setCreatedBy(String[] createdBy) { if (createdBy == null || createdBy.length < 1) { setDataCellStringValue(CREATED_BY_COL, ""); int i = firstRowNum + DATA_ROW_NUM + 1; Row nextRow = sheet.getRow(i); while (nextRow != null) { Cell createdByCell = nextRow.getCell(CREATED_BY_COL); if (createdByCell != null) { createdByCell.setCellValue(""); } i++; nextRow = sheet.getRow(i); } return; } setDataCellStringValue(CREATED_BY_COL, createdBy[0]); for (int i = 1; i < createdBy.length; i++) { Row row = getDataRow(i); Cell cell = row.getCell(CREATED_BY_COL); if (cell == null) { cell = row.createCell(CREATED_BY_COL); } cell.setCellValue(createdBy[i]); } // delete any remaining rows for (int i = firstRowNum + DATA_ROW_NUM + createdBy.length; i <= this.lastRowNum; i++) { Row row = sheet.getRow(i); Cell cell = row.getCell(CREATED_BY_COL); if (cell != null) { row.removeCell(cell); } } } public String[] getCreatedBy() { // first count rows int numRows = 0; while (sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows) != null && sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(CREATED_BY_COL) != null && !sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(CREATED_BY_COL).getStringCellValue().isEmpty()) { numRows ++; } String[] retval = new String[numRows]; for (int i = 0; i < numRows; i++) { retval[i] = sheet.getRow(firstRowNum + DATA_ROW_NUM + i).getCell(CREATED_BY_COL).getStringCellValue(); } return retval; } public void setCreated(Date created) { setDataCellDateValue(CREATED_COL, created); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getDocumentDomment() */ @Override public String getDocumentComment() { return getDataCellStringValue(DOCUMENT_COMMENT_COL); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setDocumentComment(java.lang.String) */ @Override public void setDocumentComment(String docComment) { setDataCellStringValue(DOCUMENT_COMMENT_COL, docComment); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getLicenseListVersion() */ @Override public String getLicenseListVersion() { return getDataCellStringValue(LICENSE_LIST_VERSION_COL); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setLicenseListVersion(java.lang.String) */ @Override public void setLicenseListVersion(String licenseVersion) { setDataCellStringValue(LICENSE_LIST_VERSION_COL, licenseVersion); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getNamespace() */ @Override public String getNamespace() { return getDataCellStringValue(NAMESPACE_COL); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#addDocument(org.spdx.rdfparser.model.SpdxDocument) */ @Override public void addDocument(SpdxDocument doc) throws SpreadsheetException { // SPDX Version setSPDXVersion(doc.getSpecVersion()); // Created by SPDXCreatorInformation creator; try { creator = doc.getCreationInfo(); } catch (InvalidSPDXAnalysisException e1) { throw(new SpreadsheetException("Error getting the creation info: "+e1.getMessage())); } String[] createdBys = creator.getCreators(); setCreatedBy(createdBys); // Data license AnyLicenseInfo dataLicense; try { dataLicense = doc.getDataLicense(); } catch (InvalidSPDXAnalysisException e1) { throw(new SpreadsheetException("Error getting the data license info: "+e1.getMessage())); } if (dataLicense != null && (dataLicense instanceof SimpleLicensingInfo)) { setDataLicense(((SimpleLicensingInfo)dataLicense).getLicenseId()); } // Author Comments String comments = creator.getComment(); if (comments != null && !comments.isEmpty()) { setAuthorComments(comments); } String created = creator.getCreated(); if (created == null) { throw(new SpreadsheetException("Missing created date")); } DateFormat dateFormat = new SimpleDateFormat(SpdxRdfConstants.SPDX_DATE_FORMAT); try { setCreated(dateFormat.parse(created)); } catch (ParseException e) { throw(new SpreadsheetException("Invalid created date - unable to parse")); } // Document comments String docComment = doc.getComment(); if (docComment != null) { setDocumentComment(docComment); } // License List Version String licenseListVersion; try { licenseListVersion = doc.getCreationInfo().getLicenseListVersion(); } catch (InvalidSPDXAnalysisException e) { throw(new SpreadsheetException("Error getting the license list info: "+e.getMessage())); } if (licenseListVersion != null) { setLicenseListVersion(licenseListVersion); } setSpdxId(doc.getId()); setDocumentName(doc.getName()); try { setNamespace(doc.getDocumentNamespace()); } catch (InvalidSPDXAnalysisException e) { throw(new SpreadsheetException("Error getting the document namespace: "+e.getMessage())); } SpdxItem[] contents = null; try { contents = doc.getDocumentDescribes(); } catch (InvalidSPDXAnalysisException e1) { throw(new SpreadsheetException("Error getting the document describes: "+e1.getMessage())); } String[] contentIds = new String[contents.length]; for (int i = 0; i < contents.length; i++) { contentIds[i] = contents[i].getId(); } Arrays.sort(contentIds); setDocumentDescribes(contentIds); try { setExternalDocumentRefs(doc.getExternalDocumentRefs()); } catch (InvalidSPDXAnalysisException e) { throw(new SpreadsheetException("Error getting the external document references: "+e.getMessage())); } } /** * @param namespace */ private void setNamespace(String namespace) { setDataCellStringValue(NAMESPACE_COL, namespace); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getSpdxId() */ @Override public String getSpdxId() { return getDataCellStringValue(SPDX_ID_COL); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setSpdxId(java.lang.String) */ @Override public void setSpdxId(String id) { setDataCellStringValue(SPDX_ID_COL, id); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getDocumentName() */ @Override public String getDocumentName() { return getDataCellStringValue(DOCUMENT_NAME_COL); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setDocumentName(java.lang.String) */ @Override public void setDocumentName(String documentName) { setDataCellStringValue(DOCUMENT_NAME_COL, documentName); } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getDocumentContents() */ @Override public String[] getDocumentContents() { // first count rows int numRows = 0; while (sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows) != null && sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(DOCUMENT_DESCRIBES_COL) != null && !sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(DOCUMENT_DESCRIBES_COL).getStringCellValue().isEmpty()) { numRows ++; } String[] retval = new String[numRows]; for (int i = 0; i < numRows; i++) { retval[i] = sheet.getRow(firstRowNum + DATA_ROW_NUM + i).getCell(DOCUMENT_DESCRIBES_COL).getStringCellValue(); } return retval; } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setDocumentContents(java.lang.String[]) */ @Override public void setDocumentDescribes(String[] contents) { if (contents == null || contents.length < 1) { setDataCellStringValue(DOCUMENT_DESCRIBES_COL, ""); int i = firstRowNum + DATA_ROW_NUM + 1; Row nextRow = sheet.getRow(i); while (nextRow != null) { Cell documentDescribesCell = nextRow.getCell(DOCUMENT_DESCRIBES_COL); if (documentDescribesCell != null) { documentDescribesCell.setCellValue(""); } i++; nextRow = sheet.getRow(i); } return; } setDataCellStringValue(DOCUMENT_DESCRIBES_COL, contents[0]); for (int i = 1; i < contents.length; i++) { Row row = getDataRow(i); Cell cell = row.getCell(DOCUMENT_DESCRIBES_COL); if (cell == null) { cell = row.createCell(DOCUMENT_DESCRIBES_COL); } cell.setCellValue(contents[i]); } // delete any remaining rows for (int i = firstRowNum + DATA_ROW_NUM + contents.length; i <= this.lastRowNum; i++) { Row row = sheet.getRow(i); Cell cell = row.getCell(DOCUMENT_DESCRIBES_COL); if (cell != null) { row.removeCell(cell); } } } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#getExternalDocumentRefs() */ @Override public ExternalDocumentRef[] getExternalDocumentRefs() throws SpreadsheetException { int numRows = 0; while (sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows) != null && sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(EXTERNAL_DOC_REFS_COL) != null && !sheet.getRow(firstRowNum + DATA_ROW_NUM + numRows).getCell(EXTERNAL_DOC_REFS_COL).getStringCellValue().isEmpty()) { numRows ++; } ExternalDocumentRef[] retval = new ExternalDocumentRef[numRows]; for (int i = 0; i < numRows; i++) { try { retval[i] = BuildDocument.parseExternalDocumentRef(sheet.getRow( firstRowNum + DATA_ROW_NUM + i). getCell(EXTERNAL_DOC_REFS_COL).getStringCellValue(), firstRowNum + DATA_ROW_NUM + i); } catch (InvalidSpdxTagFileException e) { throw(new SpreadsheetException("Invalid external document reference string: "+sheet.getRow( firstRowNum + DATA_ROW_NUM + i). getCell(EXTERNAL_DOC_REFS_COL).getStringCellValue())); } } return retval; } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.OriginsSheet#setExternalDocumentRefs(org.spdx.rdfparser.model.ExternalDocumentRef[]) */ @Override public void setExternalDocumentRefs(ExternalDocumentRef[] externalDocumentRefs) throws SpreadsheetException { if (externalDocumentRefs == null || externalDocumentRefs.length < 1) { setDataCellStringValue(EXTERNAL_DOC_REFS_COL, ""); int i = firstRowNum + DATA_ROW_NUM + 1; Row nextRow = sheet.getRow(i); while (nextRow != null) { Cell externalDocRefsCell = nextRow.getCell(EXTERNAL_DOC_REFS_COL); if (externalDocRefsCell != null) { externalDocRefsCell.setCellValue(""); } i++; nextRow = sheet.getRow(i); } return; } try { setDataCellStringValue(EXTERNAL_DOC_REFS_COL, externalDocRefToStr(externalDocumentRefs[0])); } catch (InvalidSPDXAnalysisException e) { throw(new SpreadsheetException("Error getting external document reference",e)); } for (int i = 1; i < externalDocumentRefs.length; i++) { Row row = getDataRow(i); Cell cell = row.getCell(EXTERNAL_DOC_REFS_COL); if (cell == null) { cell = row.createCell(EXTERNAL_DOC_REFS_COL); } try { cell.setCellValue(externalDocRefToStr(externalDocumentRefs[i])); } catch (InvalidSPDXAnalysisException e) { throw(new SpreadsheetException("Error getting external document reference",e)); } } // delete any remaining rows for (int i = firstRowNum + DATA_ROW_NUM + externalDocumentRefs.length; i <= this.lastRowNum; i++) { Row row = sheet.getRow(i); Cell cell = row.getCell(EXTERNAL_DOC_REFS_COL); if (cell != null) { row.removeCell(cell); } } } /** * @param externalDocumentRef * @return * @throws InvalidSPDXAnalysisException */ private String externalDocRefToStr(ExternalDocumentRef externalDocumentRef) throws InvalidSPDXAnalysisException { if (externalDocumentRef == null) { return ""; } return externalDocumentRef.getExternalDocumentId() + " " + externalDocumentRef.getSpdxDocumentNamespace() + " " + Checksum.CHECKSUM_ALGORITHM_TO_TAG.get(externalDocumentRef.getChecksum().getAlgorithm()) + " " + externalDocumentRef.getChecksum().getValue(); } }
package nachos.threads; import nachos.machine.*; import java.util.PriorityQueue; import java.lang.Long; /** * Uses the hardware timer to provide preemption, and to allow threads to sleep * until a certain time. */ public class Alarm { /** * Allocate a new Alarm. Set the machine's timer interrupt handler to this * alarm's callback. * * <p><b>Note</b>: Nachos will not function correctly with more than one * alarm. */ public Alarm() { Machine.timer().setInterruptHandler(new Runnable() { public void run() { timerInterrupt(); } }); } /** * The timer interrupt handler. This is called by the machine's timer * periodically (approximately every 500 clock ticks). Causes the current * thread to yield, forcing a context switch if there is another thread * that should be run. */ public void timerInterrupt() { boolean intStatus = Machine.interrupt().disable(); long currentTime =Machine.timer().getTime(); while (alarmQueue.peek()!=null){ if (currentTime>=alarmQueue.peek().getfirst()){ AlarmTuple temp=alarmQueue.poll(); ((KThread) temp.getsecond()).ready(); } else{ break; } } KThread.currentThread().yield(); Machine.interrupt().restore(intStatus); } /** * Put the current thread to sleep for at least <i>x</i> ticks, * waking it up in the timer interrupt handler. The thread must be * woken up (placed in the scheduler ready set) during the first timer * interrupt where * * <p><blockquote> * (current time) >= (WaitUntil called time)+(x) * </blockquote> * * @param x the minimum number of clock ticks to wait. * * @see nachos.machine.Timer#getTime() */ public void waitUntil(long x) { //creates a alarmTuple that contains the thread and the time to wake up //then puts the tuple in the priority queue //and sleeps the current thread boolean intStatus = Machine.interrupt().disable(); long wakeTime = Machine.timer().getTime() + x; AlarmTuple temp =new AlarmTuple(wakeTime, KThread.currentThread()); alarmQueue.add(temp); KThread.sleep(); Machine.interrupt().restore(intStatus); } //A Java priority queue to keep track and sort threads private static PriorityQueue<AlarmTuple> alarmQueue=new PriorityQueue<AlarmTuple>(); //A helper class for priority queue //compareTo() is overrided to allow sorting private class AlarmTuple<Long, KThread> implements Comparable<AlarmTuple>{ private long first; private KThread second; public AlarmTuple(long x, KThread y) { this.first = x; this.second = y; } public int compare(AlarmTuple a, AlarmTuple b){ return a.compareTo(b); } @Override public int compareTo( AlarmTuple comparing){ if (this.first==comparing.getfirst()){ return 0; } else{ if (this.first>comparing.getfirst()){ return 1; } else{ return -1; } } } public long getfirst(){ return this.first; } public KThread getsecond(){ return this.second; } } /////////////////////////////////////////////////////////////////////////// //the following are test code //Self tests for this class is implemented here, and this method is called in ThreadedKernal public static void selfTest(Alarm curAlarm){ int statusNew = 0; int statusReady = 1; int statusRunning = 2; int statusBlocked = 3; int statusFinished = 4; //first test //test a single thread //checks the thread is actually asleep(status==blocked==3) during the wait time //and checks when it wakes up current time is greater than wake up time firstTestRun firstTest= new firstTestRun(curAlarm); KThread firstThread= new KThread(firstTest); firstThread.fork(); //second test //tests when 2 threads are schedule to wake at the same time //both will wake at the same time //e.g. when one thread runs the other's status!=blocked secondTestRun secondTest= new secondTestRun(curAlarm); KThread secondThread= new KThread(secondTest); secondThread.fork(); //third test //when 2 threads are scheduled to wake up at times far apart (1000 ticks) //when the first thread runs the second is still asleep thirdTestRun thirdTest= new thirdTestRun(curAlarm); KThread thirdThread= new KThread(thirdTest); thirdThread.fork(); } } class firstTestRun implements Runnable { private Alarm alarm; public firstTestRun(Alarm curAlarm){ this.alarm=curAlarm; } public void run(){ int originID=KThread.currentThread().getID(); long wakeTime = Machine.timer().getTime() + 1000; statusCheckRun statusCheck= new statusCheckRun(KThread.currentThread(), wakeTime); KThread checkThread= new KThread(statusCheck); //another thread that checks during the wait time, this thread is actually asleep checkThread.fork(); int checkThreadID=checkThread.getID(); Lib.assertTrue(checkThreadID!=originID); alarm.waitUntil(1000);//current thread is put to sleep //actual test calls making sure this tread wakes up correctly Lib.assertTrue(KThread.currentThread().getID()==originID); Lib.assertTrue(Machine.timer().getTime()>=wakeTime); /* Alternatively can use print instead of assert System.out.println("first"); System.out.println(Machine.timer().getTime()); System.out.println(wakeTime); */ } } class statusCheckRun implements Runnable{ //a thread that checks status of the input thread private KThread checkingThread; private long wakeTime; public statusCheckRun(KThread currentThread, long time){ this.checkingThread= currentThread; this.wakeTime=time; } public void run(){ if (Machine.timer().getTime()<=wakeTime){ Lib.assertTrue(checkingThread.getStatus()==3); /* test prints System.out.println("blocked"); System.out.println(Machine.timer().getTime()); */ } } } class secondTestRun implements Runnable { private Alarm alarm; public secondTestRun(Alarm curAlarm){ this.alarm=curAlarm; } public void run(){ int originID=KThread.currentThread().getID(); long wakeTime = Machine.timer().getTime() + 1000; sleepRun sleepThread= new sleepRun(alarm, wakeTime); KThread sameTimeThread= new KThread(sleepThread); //another thread that sleep itself until the waketime sameTimeThread.fork(); int sameTimeID=sameTimeThread.getID(); Lib.assertTrue(sameTimeID!=originID); alarm.waitUntil(1000);//current thread is put to sleep //actual test calls making sure this tread wakes up correctly Lib.assertTrue(KThread.currentThread().getID()==originID); Lib.assertTrue(Machine.timer().getTime()>=wakeTime); Lib.assertTrue(sameTimeThread.getStatus()!=3);//make sure the other thread is awake /* Alternatively can use print instead of assert System.out.println("second"); System.out.println("callThread"); System.out.println(Machine.timer().getTime()); System.out.println(wakeTime); */ } } class sleepRun implements Runnable{ //a thread that sleeps till the given time private Alarm alarm; private long wakeTime; public sleepRun(Alarm machineAlarm, long time){ this.alarm=machineAlarm; this.wakeTime=time; } public void run(){ alarm.waitUntil(wakeTime-Machine.timer().getTime());; Lib.assertTrue(Machine.timer().getTime()>=wakeTime); /* System.out.println("sleepThread"); System.out.println(Machine.timer().getTime()); System.out.println(wakeTime); */ } } class thirdTestRun implements Runnable { private Alarm alarm; public thirdTestRun(Alarm curAlarm){ this.alarm=curAlarm; } public void run(){ int originID=KThread.currentThread().getID(); long wakeTime = Machine.timer().getTime() + 20; sleepRun sleepThread= new sleepRun(alarm, wakeTime+1000); KThread sameTimeThread= new KThread(sleepThread); //another thread that sleep itself until the waketime sameTimeThread.fork(); int sameTimeID=sameTimeThread.getID(); Lib.assertTrue(sameTimeID!=originID); alarm.waitUntil(20);//current thread is put to sleep //actual test calls making sure this tread wakes up correctly Lib.assertTrue(KThread.currentThread().getID()==originID); Lib.assertTrue(Machine.timer().getTime()>=wakeTime); Lib.assertTrue(sameTimeThread.getStatus()==3);//make sure the other thread is asleep /* Alternatively can use print instead of assert System.out.println("third"); System.out.println("callThread"); System.out.println(Machine.timer().getTime()); System.out.println(wakeTime); */ } }
/* * Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.util; import java.io.IOException; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.ext.EntityResolver2; import org.w3c.dom.ls.LSInput; import org.w3c.dom.ls.LSResourceResolver; import javax.xml.parsers.SAXParserFactory; import com.sun.org.apache.xerces.internal.dom.DOMInputImpl; import com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl; import com.sun.org.apache.xerces.internal.xni.XNIException; import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier; import com.sun.org.apache.xerces.internal.xni.parser.XMLEntityResolver; import com.sun.org.apache.xerces.internal.xni.parser.XMLInputSource; import com.sun.org.apache.xml.internal.resolver.Catalog; import com.sun.org.apache.xml.internal.resolver.CatalogManager; import com.sun.org.apache.xml.internal.resolver.readers.OASISXMLCatalogReader; import com.sun.org.apache.xml.internal.resolver.readers.SAXCatalogReader; /** * <p>The catalog resolver handles the resolution of external * identifiers and URI references through XML catalogs. This * component supports XML catalogs defined by the * <a href="http://www.oasis-open.org/committees/entity/spec.html"> * OASIS XML Catalogs Specification</a>. It encapsulates the * <a href="http://xml.apache.org/commons/">XML Commons</a> resolver. * An instance of this class may be registered on the parser * as a SAX entity resolver, as a DOM LSResourceResolver or * as an XNI entity resolver by setting the property * (http://apache.org/xml/properties/internal/entity-resolver).</p> * * <p>It is intended that this class may be used standalone to perform * catalog resolution outside of a parsing context. It may be shared * between several parsers and the application.</p> * * @author Michael Glavassevich, IBM * */ public class XMLCatalogResolver implements XMLEntityResolver, EntityResolver2, LSResourceResolver { /** Internal catalog manager for Apache catalogs. **/ private CatalogManager fResolverCatalogManager = null; /** Internal catalog structure. **/ private Catalog fCatalog = null; /** An array of catalog URIs. **/ private String [] fCatalogsList = null; /** * Indicates whether the list of catalogs has * changed since it was processed. */ private boolean fCatalogsChanged = true; /** Application specified prefer public setting. **/ private boolean fPreferPublic = true; /** * Indicates whether the application desires that * the parser or some other component performing catalog * resolution should use the literal system identifier * instead of the expanded system identifier. */ private boolean fUseLiteralSystemId = true; /** * <p>Constructs a catalog resolver with a default configuration.</p> */ public XMLCatalogResolver () { this(null, true); } /** * <p>Constructs a catalog resolver with the given * list of entry files.</p> * * @param catalogs an ordered array list of absolute URIs */ public XMLCatalogResolver (String [] catalogs) { this(catalogs, true); } /** * <p>Constructs a catalog resolver with the given * list of entry files and the preference for whether * system or public matches are preferred.</p> * * @param catalogs an ordered array list of absolute URIs * @param preferPublic the prefer public setting */ public XMLCatalogResolver (String [] catalogs, boolean preferPublic) { init(catalogs, preferPublic); } /** * <p>Returns the initial list of catalog entry files.</p> * * @return the initial list of catalog entry files */ public final synchronized String [] getCatalogList () { return (fCatalogsList != null) ? (String[]) fCatalogsList.clone() : null; } /** * <p>Sets the initial list of catalog entry files. * If there were any catalog mappings cached from * the previous list they will be replaced by catalog * mappings from the new list the next time the catalog * is queried.</p> * * @param catalogs an ordered array list of absolute URIs */ public final synchronized void setCatalogList (String [] catalogs) { fCatalogsChanged = true; fCatalogsList = (catalogs != null) ? (String[]) catalogs.clone() : null; } /** * <p>Forces the cache of catalog mappings to be cleared.</p> */ public final synchronized void clear () { fCatalog = null; } /** * <p>Returns the preference for whether system or public * matches are preferred. This is used in the absence * of any occurence of the <code>prefer</code> attribute * on the <code>catalog</code> entry of a catalog. If this * property has not yet been explicitly set its value is * <code>true</code>.</p> * * @return the prefer public setting */ public final boolean getPreferPublic () { return fPreferPublic; } /** * <p>Sets the preference for whether system or public * matches are preferred. This is used in the absence * of any occurence of the <code>prefer</code> attribute * on the <code>catalog</code> entry of a catalog.</p> * * @param preferPublic the prefer public setting */ public final void setPreferPublic (boolean preferPublic) { fPreferPublic = preferPublic; fResolverCatalogManager.setPreferPublic(preferPublic); } /** * <p>Returns the preference for whether the literal system * identifier should be used when resolving system * identifiers when both it and the expanded system * identifier are available. If this property has not yet * been explicitly set its value is <code>true</code>.</p> * * @return the preference for using literal system identifers * for catalog resolution * * @see #setUseLiteralSystemId */ public final boolean getUseLiteralSystemId () { return fUseLiteralSystemId; } /** * <p>Sets the preference for whether the literal system * identifier should be used when resolving system * identifiers when both it and the expanded system * identifier are available.</p> * * <p>The literal system identifier is the URI as it was * provided before absolutization. It may be embedded within * an entity. It may be provided externally or it may be the * result of redirection. For example, redirection may * have come from the protocol level through HTTP or from * an application's entity resolver.</p> * * <p>The expanded system identifier is an absolute URI * which is the result of resolving the literal system * identifier against a base URI.</p> * * @param useLiteralSystemId the preference for using * literal system identifers for catalog resolution */ public final void setUseLiteralSystemId (boolean useLiteralSystemId) { fUseLiteralSystemId = useLiteralSystemId; } /** * <p>Resolves an external entity. If the entity cannot be * resolved, this method should return <code>null</code>. This * method returns an input source if an entry was found in the * catalog for the given external identifier. It should be * overrided if other behaviour is required.</p> * * @param publicId the public identifier, or <code>null</code> if none was supplied * @param systemId the system identifier * * @throws SAXException any SAX exception, possibly wrapping another exception * @throws IOException thrown if some i/o error occurs */ public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException { String resolvedId = null; if (publicId != null && systemId != null) { resolvedId = resolvePublic(publicId, systemId); } else if (systemId != null) { resolvedId = resolveSystem(systemId); } if (resolvedId != null) { InputSource source = new InputSource(resolvedId); source.setPublicId(publicId); return source; } return null; } /** * <p>Resolves an external entity. If the entity cannot be * resolved, this method should return <code>null</code>. This * method returns an input source if an entry was found in the * catalog for the given external identifier. It should be * overrided if other behaviour is required.</p> * * @param name the identifier of the external entity * @param publicId the public identifier, or <code>null</code> if none was supplied * @param baseURI the URI with respect to which relative systemIDs are interpreted. * @param systemId the system identifier * * @throws SAXException any SAX exception, possibly wrapping another exception * @throws IOException thrown if some i/o error occurs */ public InputSource resolveEntity(String name, String publicId, String baseURI, String systemId) throws SAXException, IOException { String resolvedId = null; if (!getUseLiteralSystemId() && baseURI != null) { // Attempt to resolve the system identifier against the base URI. try { URI uri = new URI(new URI(baseURI), systemId); systemId = uri.toString(); } // Ignore the exception. Fallback to the literal system identifier. catch (URI.MalformedURIException ex) {} } if (publicId != null && systemId != null) { resolvedId = resolvePublic(publicId, systemId); } else if (systemId != null) { resolvedId = resolveSystem(systemId); } if (resolvedId != null) { InputSource source = new InputSource(resolvedId); source.setPublicId(publicId); return source; } return null; } /** * <p>Locates an external subset for documents which do not explicitly * provide one. This method always returns <code>null</code>. It * should be overrided if other behaviour is required.</p> * * @param name the identifier of the document root element * @param baseURI the document's base URI * * @throws SAXException any SAX exception, possibly wrapping another exception * @throws IOException thrown if some i/o error occurs */ public InputSource getExternalSubset(String name, String baseURI) throws SAXException, IOException { return null; } /** * <p>Resolves a resource using the catalog. This method interprets that * the namespace URI corresponds to uri entries in the catalog. * Where both a namespace and an external identifier exist, the namespace * takes precedence.</p> * * @param type the type of the resource being resolved * @param namespaceURI the namespace of the resource being resolved, * or <code>null</code> if none was supplied * @param publicId the public identifier of the resource being resolved, * or <code>null</code> if none was supplied * @param systemId the system identifier of the resource being resolved, * or <code>null</code> if none was supplied * @param baseURI the absolute base URI of the resource being parsed, * or <code>null</code> if there is no base URI */ public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, String baseURI) { String resolvedId = null; try { // The namespace is useful for resolving namespace aware // grammars such as XML schema. Let it take precedence over // the external identifier if one exists. if (namespaceURI != null) { resolvedId = resolveURI(namespaceURI); } if (!getUseLiteralSystemId() && baseURI != null) { // Attempt to resolve the system identifier against the base URI. try { URI uri = new URI(new URI(baseURI), systemId); systemId = uri.toString(); } // Ignore the exception. Fallback to the literal system identifier. catch (URI.MalformedURIException ex) {} } // Resolve against an external identifier if one exists. This // is useful for resolving DTD external subsets and other // external entities. For XML schemas if there was no namespace // mapping we might be able to resolve a system identifier // specified as a location hint. if (resolvedId == null) { if (publicId != null && systemId != null) { resolvedId = resolvePublic(publicId, systemId); } else if (systemId != null) { resolvedId = resolveSystem(systemId); } } } // Ignore IOException. It cannot be thrown from this method. catch (IOException ex) {} if (resolvedId != null) { return new DOMInputImpl(publicId, resolvedId, baseURI); } return null; } /** * <p>Resolves an external entity. If the entity cannot be * resolved, this method should return <code>null</code>. This * method only calls <code>resolveIdentifier</code> and returns * an input source if an entry was found in the catalog. It * should be overrided if other behaviour is required.</p> * * @param resourceIdentifier location of the XML resource to resolve * * @throws XNIException thrown on general error * @throws IOException thrown if some i/o error occurs */ public XMLInputSource resolveEntity(XMLResourceIdentifier resourceIdentifier) throws XNIException, IOException { String resolvedId = resolveIdentifier(resourceIdentifier); if (resolvedId != null) { return new XMLInputSource(resourceIdentifier.getPublicId(), resolvedId, resourceIdentifier.getBaseSystemId()); } return null; } /** * <p>Resolves an identifier using the catalog. This method interprets that * the namespace of the identifier corresponds to uri entries in the catalog. * Where both a namespace and an external identifier exist, the namespace * takes precedence.</p> * * @param resourceIdentifier the identifier to resolve * * @throws XNIException thrown on general error * @throws IOException thrown if some i/o error occurs */ public String resolveIdentifier(XMLResourceIdentifier resourceIdentifier) throws IOException, XNIException { String resolvedId = null; // The namespace is useful for resolving namespace aware // grammars such as XML schema. Let it take precedence over // the external identifier if one exists. String namespace = resourceIdentifier.getNamespace(); if (namespace != null) { resolvedId = resolveURI(namespace); } // Resolve against an external identifier if one exists. This // is useful for resolving DTD external subsets and other // external entities. For XML schemas if there was no namespace // mapping we might be able to resolve a system identifier // specified as a location hint. if (resolvedId == null) { String publicId = resourceIdentifier.getPublicId(); String systemId = getUseLiteralSystemId() ? resourceIdentifier.getLiteralSystemId() : resourceIdentifier.getExpandedSystemId(); if (publicId != null && systemId != null) { resolvedId = resolvePublic(publicId, systemId); } else if (systemId != null) { resolvedId = resolveSystem(systemId); } } return resolvedId; } /** * <p>Returns the URI mapping in the catalog for the given * external identifier or <code>null</code> if no mapping * exists. If the system identifier is an URN in the * <code>publicid</code> namespace it is converted into * a public identifier by URN "unwrapping" as specified * in the XML Catalogs specification.</p> * * @param systemId the system identifier to locate in the catalog * * @return the mapped URI or <code>null</code> if no mapping * was found in the catalog * * @throws IOException if an i/o error occurred while reading * the catalog */ public final synchronized String resolveSystem (String systemId) throws IOException { if (fCatalogsChanged) { parseCatalogs(); fCatalogsChanged = false; } return (fCatalog != null) ? fCatalog.resolveSystem(systemId) : null; } /** * <p>Returns the URI mapping in the catalog for the given * external identifier or <code>null</code> if no mapping * exists. Public identifiers are normalized before * comparison.</p> * * @param publicId the public identifier to locate in the catalog * @param systemId the system identifier to locate in the catalog * * @return the mapped URI or <code>null</code> if no mapping * was found in the catalog * * @throws IOException if an i/o error occurred while reading * the catalog */ public final synchronized String resolvePublic (String publicId, String systemId) throws IOException { if (fCatalogsChanged) { parseCatalogs(); fCatalogsChanged = false; } return (fCatalog != null) ? fCatalog.resolvePublic(publicId, systemId) : null; } /** * <p>Returns the URI mapping in the catalog for the given URI * reference or <code>null</code> if no mapping exists. * URI comparison is case sensitive. If the URI reference * is an URN in the <code>publicid</code> namespace * it is converted into a public identifier by URN "unwrapping" * as specified in the XML Catalogs specification and then * resolution is performed following the semantics of * external identifier resolution.</p> * * @param uri the URI to locate in the catalog * * @return the mapped URI or <code>null</code> if no mapping * was found in the catalog * * @throws IOException if an i/o error occurred while reading * the catalog */ public final synchronized String resolveURI (String uri) throws IOException { if (fCatalogsChanged) { parseCatalogs(); fCatalogsChanged = false; } return (fCatalog != null) ? fCatalog.resolveURI(uri) : null; } /** * Initialization. Create a CatalogManager and set all * the properties upfront. This prevents JVM wide system properties * or a property file somewhere in the environment from affecting * the behaviour of this catalog resolver. */ private void init (String [] catalogs, boolean preferPublic) { fCatalogsList = (catalogs != null) ? (String[]) catalogs.clone() : null; fPreferPublic = preferPublic; fResolverCatalogManager = new CatalogManager(); fResolverCatalogManager.setAllowOasisXMLCatalogPI(false); fResolverCatalogManager.setCatalogClassName("com.sun.org.apache.xml.internal.resolver.Catalog"); fResolverCatalogManager.setCatalogFiles(""); fResolverCatalogManager.setIgnoreMissingProperties(true); fResolverCatalogManager.setPreferPublic(fPreferPublic); fResolverCatalogManager.setRelativeCatalogs(false); fResolverCatalogManager.setUseStaticCatalog(false); fResolverCatalogManager.setVerbosity(0); } /** * Instruct the <code>Catalog</code> to parse each of the * catalogs in the list. Only the first catalog will actually be * parsed immediately. The others will be queued and read if * they are needed later. */ private void parseCatalogs () throws IOException { if (fCatalogsList != null) { fCatalog = new Catalog(fResolverCatalogManager); attachReaderToCatalog(fCatalog); for (int i = 0; i < fCatalogsList.length; ++i) { String catalog = fCatalogsList[i]; if (catalog != null && catalog.length() > 0) { fCatalog.parseCatalog(catalog); } } } else { fCatalog = null; } } /** * Attaches the reader to the catalog. */ private void attachReaderToCatalog (Catalog catalog) { SAXParserFactory spf = new SAXParserFactoryImpl(); spf.setNamespaceAware(true); spf.setValidating(false); SAXCatalogReader saxReader = new SAXCatalogReader(spf); saxReader.setCatalogParser(OASISXMLCatalogReader.namespaceName, "catalog", "com.sun.org.apache.xml.internal.resolver.readers.OASISXMLCatalogReader"); catalog.addReader("application/xml", saxReader); } }
package com.logginghub.logging.frontend.views.logeventdetail; import com.logginghub.logging.DefaultLogEvent; import com.logginghub.logging.LogEvent; import com.logginghub.logging.filters.CompositeAndFilter; import com.logginghub.logging.filters.MessageContainsFilter; import com.logginghub.logging.frontend.Utils; import com.logginghub.logging.frontend.images.Icons; import com.logginghub.logging.frontend.images.Icons.IconIdentifier; import com.logginghub.logging.frontend.model.EnvironmentController; import com.logginghub.logging.frontend.model.EventTableColumnModel; import com.logginghub.logging.frontend.model.LevelNamesModel; import com.logginghub.logging.frontend.model.LogEventContainer; import com.logginghub.logging.frontend.model.LogEventContainerController; import com.logginghub.logging.listeners.LogEventListener; import com.logginghub.utils.Is; import com.logginghub.utils.ObjectUtils; import com.logginghub.utils.Out; import com.logginghub.utils.Pair; import com.logginghub.utils.Stopwatch; import com.logginghub.utils.filter.Filter; import com.logginghub.utils.logging.Logger; import javax.swing.*; import javax.swing.table.DefaultTableModel; import java.util.Map; public class DetailedLogEventTableModel extends DefaultTableModel implements LogEventListener { public static final int COLUMN_CHANNEL = 10; public static final int COLUMN_CLASS_METHOD = 5; public static final int COLUMN_DIAGNOSTIC_CONTEXT = 7; public static final int COLUMN_HOST = 2; public static final int COLUMN_LEVEL = 3; public static final int COLUMN_LOCKED = 8; public static final int COLUMN_MESSAGE = 6; public static final int COLUMN_PID = 9; public static final int COLUMN_SOURCE = 1; public static final int COLUMN_THREAD = 4; public static final int COLUMN_TIME = 0; public static final int NUMBER_OF_COLUMNS = 11; private static final Logger logger = Logger.getLoggerFor(DetailedLogEventTableModel.class); private static final long serialVersionUID = 1L; private static final String BLANK = ""; // private final EventTableColumnModel eventTableColumnModel; private final LevelNamesModel levelNamesModel; private Object eventLock = new Object(); private CompositeAndFilter filters = new CompositeAndFilter(); private LogEventContainerController eventController; private boolean[] isColumnEditable = new boolean[100]; private boolean isPlaying = true; // private Map<Integer, String> metadataColumns = new HashMap<Integer, String>(); // private Map<Integer, String> metadataColumnNames = new HashMap<Integer, String>(); private ColumnTarget[] visibleColumns = new ColumnTarget[NUMBER_OF_COLUMNS]; private EnvironmentController environmentController; public DetailedLogEventTableModel(EventTableColumnModel eventTableColumnModel, LevelNamesModel levelNamesModel, LogEventContainerController eventController) { // this.eventTableColumnModel = eventTableColumnModel; this.levelNamesModel = levelNamesModel; this.eventController = eventController; visibleColumns[0] = new ColumnTarget("Time", COLUMN_TIME, null, ColumnTarget.Renderer.Normal); visibleColumns[1] = new ColumnTarget("Source", COLUMN_SOURCE, null, ColumnTarget.Renderer.Normal); visibleColumns[2] = new ColumnTarget("Host", COLUMN_HOST, null, ColumnTarget.Renderer.Normal); visibleColumns[3] = new ColumnTarget("Level", COLUMN_LEVEL, null, ColumnTarget.Renderer.Normal); visibleColumns[4] = new ColumnTarget("Thread", COLUMN_THREAD, null, ColumnTarget.Renderer.Normal); visibleColumns[5] = new ColumnTarget("Method", COLUMN_CLASS_METHOD, null, ColumnTarget.Renderer.Normal); visibleColumns[6] = new ColumnTarget("Message", COLUMN_MESSAGE, null, ColumnTarget.Renderer.Normal); visibleColumns[7] = new ColumnTarget("DC", COLUMN_DIAGNOSTIC_CONTEXT, null, ColumnTarget.Renderer.Normal); visibleColumns[8] = new ColumnTarget("Locked", COLUMN_LOCKED, null, ColumnTarget.Renderer.Normal); visibleColumns[9] = new ColumnTarget("PID", COLUMN_PID, null, ColumnTarget.Renderer.Normal); visibleColumns[10] = new ColumnTarget("Channel", COLUMN_CHANNEL, null, ColumnTarget.Renderer.Normal); } public void addFilter(Filter<LogEvent> filter, LogEvent currentSelection) { filters.addFilter(filter); refilter(currentSelection); } public EnvironmentController getEnvironmentController() { return environmentController; } private void refilter(final LogEvent currentSelection) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { synchronized (eventLock) { Stopwatch refiltering = Stopwatch.start("Refiltering"); Out.out("Filters : {}", ObjectUtils.recursiveDump(filters)); eventController.refilter(filters); logger.info(refiltering); eventController.getLiveEventsThatPassFilter().indexOf(currentSelection); } fireTableDataChanged(); } }); } public void fireTableDataChanged() { super.fireTableDataChanged(); } public void fireTableStructureChanged() { super.fireTableStructureChanged(); } public void addMetadataColumn(int index, String metadataKey, String name, ColumnTarget.Renderer renderer) { // ColumnTarget[] newVisibleColumns; // if(visibleColumns.length == 0) { // newVisibleColumns = new ColumnTarget[1]; // newVisibleColumns[0] = new ColumnTarget(name, -1, metadataKey); // }else { // // int newLength = visibleColumns.length + 1; // newVisibleColumns = new ColumnTarget[newLength]; // // // Copy the first chunk up to the insertion point // System.arraycopy(visibleColumns, 0, newVisibleColumns, 0, index); // // // Add the new item // newVisibleColumns[index] = new ColumnTarget(name, -1, metadataKey); // // // Copy the second chunk passed the removed item to the end // System.arraycopy(visibleColumns, index, newVisibleColumns, index + 1, visibleColumns.length - index); // } // // // Switch over the arrays // visibleColumns = newVisibleColumns; // // // Tell the views the model has changed // fireTableStructureChanged(); int newLength = visibleColumns.length + 1; ColumnTarget[] newVisibleColumns = new ColumnTarget[newLength]; // Copy the first chunk up to the insertion point System.arraycopy(visibleColumns, 0, newVisibleColumns, 0, index); // Add the new item newVisibleColumns[index] = new ColumnTarget(name, -1, metadataKey, renderer); // Copy the second chunk passed the removed item to the end System.arraycopy(visibleColumns, index, newVisibleColumns, index + 1, visibleColumns.length - index); // Switch over the arrays visibleColumns = newVisibleColumns; // Tell the views the model has changed fireTableStructureChanged(); } public void clear() { fireTableDataChanged(); } public int findFirstTime(long time) { int found = -1; synchronized (eventLock) { int rowCount = getRowCount(); for (int i = 0; i < rowCount; i++) { LogEvent next = eventController.getLiveEventsThatPassFilter().get(i); long entryTime = next.getOriginTime(); logger.finest("Comparing entryTime '{}' vs search time '{}'", Logger.toDateString(entryTime), Logger.toDateString(time)); if (entryTime >= time) { found = i; break; } } } return found; } /** * Search the visible rows for the next row that matches the filter. * * @param selectedRow * @param filter * @return */ public int findNextEvent(int selectedRow, MessageContainsFilter filter) { int found = -1; synchronized (eventLock) { for (int i = selectedRow; i < getRowCount(); i++) { LogEvent next = eventController.getLiveEventsThatPassFilter().get(i); if (filter.passes(next)) { found = i; break; } } } return found; } public int findPreviousEvent(int selectedRow, MessageContainsFilter filter) { int found = -1; synchronized (eventLock) { for (int i = selectedRow; i > 0; i--) { LogEvent next = eventController.getLiveEventsThatPassFilter().get(i); if (filter.passes(next)) { found = i; break; } } } return found; } public int getAllEventsSize() { synchronized (eventLock) { return eventController.getLiveEventsSize(); } } public CompositeAndFilter getFilters() { return filters; } public LogEvent getLogEventAtRow(int rowIndex) { synchronized (eventLock) { return eventController.getLiveEventsThatPassFilter().get(rowIndex); } } public int getVisibleIndexForEvent(LogEvent event) { synchronized (eventLock) { int index = eventController.getLiveEventsThatPassFilter().indexOf(event); return index; } } public boolean isColumnVisible(String column) { return getColumnIndex(column) != -1; } public int getColumnIndex(String column) { int index = -1; for (int i = 0; i < visibleColumns.length; i++) { ColumnTarget visibleColumn = visibleColumns[i]; if (visibleColumn.columnName.equalsIgnoreCase(column)) { index = i; break; } } return index; } public void onNewLogEvent(LogEventContainer newEvents) { int startAddedRow; int endAddedRow; int removedRows = 0; Is.swingEventThread(); synchronized (eventLock) { startAddedRow = eventController.getLiveEventsThatPassFilter().size(); endAddedRow = startAddedRow; for (LogEvent event : newEvents) { Pair<Boolean, Boolean> state = eventController.add(event, filters, isPlaying); boolean visible = state.getA(); boolean removedVisible = state.getB(); if (visible) { endAddedRow++; } if (removedVisible) { removedRows++; } } } if (isPlaying) { if (removedRows > 0) { fireTableRowsDeleted(0, removedRows - 1); } // We've just got rid of some, so we need to offset the added rows back startAddedRow -= removedRows; endAddedRow -= removedRows; fireTableRowsInserted(startAddedRow, endAddedRow); } } public void onNewLogEvent(LogEvent event) { if (event == null) { throw new RuntimeException("Please dont add null events"); } int rowCount = -1; boolean removedVisible; Is.swingEventThread(); synchronized (eventLock) { // TODO : this is leaky and nasty, maybe we should house the filters // and the playing state in the controller? Pair<Boolean, Boolean> pair = eventController.add(event, filters, isPlaying); boolean isVisible = pair.getA(); removedVisible = pair.getB(); if (isVisible) { rowCount = eventController.getLiveEventsThatPassFilter().size(); } } } public void pause() { this.isPlaying = false; } public void play() { this.isPlaying = true; synchronized (eventLock) { eventController.play(); } } public void refreshFilters(LogEvent currentSelection) { refilter(currentSelection); } public void removeColumn(String column) { int columnIndex = getColumnIndex(column); if (columnIndex != -1) { int currentLength = visibleColumns.length; int newLength = currentLength - 1; ColumnTarget[] newVisibleColumns = new ColumnTarget[newLength]; // Copy the first chunk up to the cut off point System.arraycopy(visibleColumns, 0, newVisibleColumns, 0, columnIndex); // Copy the second chunk passed the removed item to the end System.arraycopy(visibleColumns, columnIndex + 1, newVisibleColumns, columnIndex, currentLength - (columnIndex + 1)); // Switch over the arrays visibleColumns = newVisibleColumns; } // Tell the views the model has changed fireTableStructureChanged(); } // ////////////////////////////////////////////////////////////////// // LogEventListener implementations // ////////////////////////////////////////////////////////////////// public void removeFilter(Filter<LogEvent> filter, LogEvent currentSelection) { filters.removeFilter(filter); refilter(currentSelection); } @Override public void removeRow(int rowIndex) { Is.swingEventThread(); synchronized (eventLock) { eventController.removeLiveEvent(rowIndex); } fireTableRowsDeleted(rowIndex, rowIndex); } @Override public int getRowCount() { int rowCount = 0; // gah - the super class ctor calls get row count :/ if (eventLock != null) { synchronized (eventLock) { rowCount = eventController.getLiveEventsThatPassFilter().size(); } } return rowCount; } @Override public int getColumnCount() { return visibleColumns.length; // NUMBER_OF_COLUMNS + metadataColumns.size(); } @Override public String getColumnName(int column) { return visibleColumns[column].columnName; // String name = eventTableColumnModel.getColumnNameMappings().get(column); // if (name == null) { // name = metadataColumnNames.get(column); // } // // return name; } @Override public boolean isCellEditable(int row, int columns) { return isColumnEditable(columns); } private boolean isColumnEditable(int column) { return isColumnEditable[column]; } public void setColumnEditable(int column, boolean isEditable) { isColumnEditable[column] = isEditable; } // @Override // public Object getValueAt(int row, int column) { // LogEvent logEvent; // synchronized (eventLock) { // logEvent = eventController.getLiveEventsThatPassFilter().get(row); // } // // if (logEvent == null) { // throw new RuntimeException("The row returned from the visible events collection was null. Not sure how this is possible. Index was " + // row + // " row count was " + // getRowCount()); // } // // Object value; // // switch (column) { // case COLUMN_LOCKED: { // if (logEvent instanceof DefaultLogEvent) { // DefaultLogEvent defaultLogEvent = (DefaultLogEvent) logEvent; // Map<String, String> metadata = defaultLogEvent.getMetadata(); // if (metadata.containsKey("locked")) { // if (metadata.get("locked").equalsIgnoreCase("true")) { // return Icons.get(IconIdentifier.Locked); // } else { // return Icons.get(IconIdentifier.Unlocked); // } // } else { // value = ""; // } // } else { // value = ""; // } // break; // } // case COLUMN_CLASS_METHOD: { // // String sourceClassName = logEvent.getSourceClassName(); // String sourceMethodName = logEvent.getSourceMethodName(); // if (sourceClassName != null && sourceMethodName != null) { // value = sourceClassName + "." + sourceMethodName; // } else if (sourceClassName != null) { // value = sourceClassName; // } else if (sourceMethodName != null) { // value = sourceMethodName; // } else { // value = "[Not captured]"; // } // break; // } // case COLUMN_SOURCE: { // value = logEvent.getSourceApplication(); // break; // } // case COLUMN_HOST: { // value = logEvent.getSourceHost(); // break; // } // case COLUMN_TIME: { // value = Utils.formatTime(logEvent.getOriginTime()); // break; // } // case COLUMN_THREAD: { // value = logEvent.getThreadName(); // break; // } // case COLUMN_MESSAGE: { // value = logEvent.getMessage(); // break; // } // case COLUMN_LEVEL: { // value = levelNamesModel.getLevelName(logEvent.getLevel()); // break; // } // case COLUMN_DIAGNOSTIC_CONTEXT: { // value = formatDiagnosticContext(logEvent.getFormattedObject()); // break; // } // case COLUMN_PID: { // value = logEvent.getPid(); // break; // } // case COLUMN_CHANNEL: { // value = logEvent.getChannel(); // break; // } // default: { // logger.fine("Custom column id {}", column); // String metadatakey = metadataColumns.get(column); // if (metadatakey != null && logEvent.getMetadata() != null) { // value = logEvent.getMetadata().get(metadatakey); // if (value == null) { // value = ""; // } // } else { // value = "???"; // } // } // } // // return value; // } @Override public Object getValueAt(int row, int column) { LogEvent logEvent; synchronized (eventLock) { logEvent = eventController.getLiveEventsThatPassFilter().get(row); } if (logEvent == null) { throw new RuntimeException("The row returned from the visible events collection was null. Not sure how this is possible. Index was " + row + " row count was " + getRowCount()); } Object value; ColumnTarget visibleColumn = visibleColumns[column]; if (visibleColumn.metadata == null) { value = extractEventField(logEvent, visibleColumn.eventFieldIndex); } else { logger.fine("Custom column id {}", column); String metadatakey = visibleColumn.metadata; Map<String, String> metadata = logEvent.getMetadata(); if (metadatakey != null && metadata != null) { value = metadata.get(metadatakey); if (value == null) { value = ""; }else { if (visibleColumn.renderer == ColumnTarget.Renderer.Date) { value = Logger.toLocalDateString(Long.parseLong(value.toString())).toString(); } } } else { value = "???"; } } return value; } private Object extractEventField(LogEvent logEvent, int eventFieldIndex) { Object value; switch (eventFieldIndex) { case COLUMN_LOCKED: { if (logEvent instanceof DefaultLogEvent) { DefaultLogEvent defaultLogEvent = (DefaultLogEvent) logEvent; Map<String, String> metadata = defaultLogEvent.getMetadata(); if (metadata.containsKey("locked")) { if (metadata.get("locked").equalsIgnoreCase("true")) { return Icons.get(IconIdentifier.Locked); } else { return Icons.get(IconIdentifier.Unlocked); } } else { value = ""; } } else { value = ""; } break; } case COLUMN_CLASS_METHOD: { String sourceClassName = logEvent.getSourceClassName(); String sourceMethodName = logEvent.getSourceMethodName(); if (sourceClassName != null && sourceMethodName != null) { value = sourceClassName + "." + sourceMethodName; } else if (sourceClassName != null) { value = sourceClassName; } else if (sourceMethodName != null) { value = sourceMethodName; } else { value = "[Not captured]"; } break; } case COLUMN_SOURCE: { value = logEvent.getSourceApplication(); break; } case COLUMN_HOST: { value = logEvent.getSourceHost(); break; } case COLUMN_TIME: { value = Utils.formatTime(logEvent.getOriginTime()); break; } case COLUMN_THREAD: { value = logEvent.getThreadName(); break; } case COLUMN_MESSAGE: { value = logEvent.getMessage(); break; } case COLUMN_LEVEL: { value = levelNamesModel.getLevelName(logEvent.getLevel()); break; } case COLUMN_DIAGNOSTIC_CONTEXT: { value = formatDiagnosticContext(logEvent.getFormattedObject()); break; } case COLUMN_PID: { value = logEvent.getPid(); break; } case COLUMN_CHANNEL: { value = logEvent.getChannel(); break; } default: { value = "???"; } } return value; } private String formatDiagnosticContext(String[] formattedObject) { String formatted; if (formattedObject != null && formattedObject.length > 0) { StringBuilder context = new StringBuilder(); for (int i = 0; i < formattedObject.length; i++) { context.append("[").append(formattedObject[i]).append("]"); } formatted = context.toString(); } else { formatted = BLANK; } return formatted; } public void setEnvironmentController(EnvironmentController environmentController) { this.environmentController = environmentController; } @Override public void setValueAt(Object item, int row, int column) { } public final static class ColumnTarget { int eventFieldIndex; String metadata; String columnName; Renderer renderer = Renderer.Normal; public ColumnTarget(String columnName, int eventFieldIndex, String metadata, Renderer renderer) { this.columnName = columnName; this.eventFieldIndex = eventFieldIndex; this.metadata = metadata; this.renderer = renderer; } public enum Renderer { Normal, Date, Action } } }
package net.fushizen.invokedynamic.proxy; import jdk.nashorn.internal.codegen.CompilerConstants; import org.openjdk.jmh.annotations.*; import java.lang.invoke.*; import java.util.concurrent.TimeUnit; /* @Measurement(iterations = 2) @Fork(value = 2) @Warmup(iterations = 2) */ @State(Scope.Benchmark) public class IndyProxyBenchmark { private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup(); // Do _not_ make this a constant private int one = 1; public interface VoidMethod { public void method(); } private static void noop() {} private static final VoidMethod voidMethodProxy; static { try { final MethodHandle noopHandle = LOOKUP.findStatic(IndyProxyBenchmark.class, "noop", MethodType.methodType(Void.TYPE)); voidMethodProxy = (VoidMethod) DynamicProxy.builder() .withInterfaces(VoidMethod.class) .withInvocationHandler( (lookup, name, type, superMethod) -> { if (superMethod != null) return new ConstantCallSite(superMethod); MethodHandle coercedHandle = MethodHandles.dropArguments(noopHandle, 0, Object.class); return new ConstantCallSite(coercedHandle.asType(type)); } ) .build() .constructor() .invoke(); } catch (Throwable e) { throw new RuntimeException(e); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public void proxy_voidMethod() { voidMethodProxy.method(); } public interface NonPrimsMethod { public String method(String input); } private static String noop_String(String input) { return input; } private static final NonPrimsMethod nonPrimsProxy; static { try { final MethodHandle noopHandle = LOOKUP.findStatic( IndyProxyBenchmark.class, "noop_String", MethodType.methodType(String.class, String.class) ); nonPrimsProxy = (NonPrimsMethod) DynamicProxy.builder() .withInterfaces(NonPrimsMethod.class) .withInvocationHandler( (lookup, name, type, superMethod) -> { if (superMethod != null) return new ConstantCallSite(superMethod); MethodHandle coercedHandle = MethodHandles.dropArguments(noopHandle, 0, Object.class); return new ConstantCallSite(coercedHandle.asType(type)); } ) .build() .constructor() .invoke(); } catch (Throwable e) { throw new RuntimeException(e); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public String proxy_nonPrims() { return nonPrimsProxy.method("Hello world"); } public interface PrimsMethod { public int increment(int x); } private static int increment(int x) { return x + 1; } private static final PrimsMethod primsMethodProxy; static { try { final MethodHandle incrementHandle = LOOKUP.findStatic( IndyProxyBenchmark.class, "increment", MethodType.methodType(Integer.TYPE, Integer.TYPE) ); primsMethodProxy = (PrimsMethod) DynamicProxy.builder() .withInterfaces(PrimsMethod.class) .withInvocationHandler( (lookup, name, type, superMethod) -> { if (superMethod != null) return new ConstantCallSite(superMethod); MethodHandle coercedHandle = MethodHandles.dropArguments(incrementHandle, 0, Object.class); return new ConstantCallSite(coercedHandle.asType(type)); } ) .build() .constructor() .invoke(); } catch (Throwable e) { throw new RuntimeException(e); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public int proxy_prims() { return primsMethodProxy.increment(one); } private static class PrimsPassThrough implements PrimsMethod { @Override public int increment(int x) { return x + 1; } } public static class PassThroughBase { PrimsPassThrough invokee; public final void $$setInvokee(PrimsPassThrough invokee) { this.invokee = invokee; } public PassThroughBase() {} } private static final DynamicProxy primsPassthroughProxyTemplate; static { try { primsPassthroughProxyTemplate = DynamicProxy.builder() .withInterfaces(PrimsMethod.class) .withSuperclass(PassThroughBase.class) .withInvocationHandler((lookup, name, type, superMethod) -> { MethodType withoutReceiverType = type.dropParameterTypes(0, 1); MethodHandle receiverHandle = LOOKUP.findVirtual(PrimsPassThrough.class, name, withoutReceiverType); MethodHandle getReceiver = LOOKUP.findGetter(PassThroughBase.class, "invokee", PrimsPassThrough.class); MethodHandle invokedHandle = MethodHandles.filterArguments(receiverHandle, 0, getReceiver); return new ConstantCallSite(invokedHandle.asType(type)); }) .build(); } catch (Throwable t) { throw new RuntimeException(t); } } private static PrimsMethod makePassThrough() { try { Object proxyInstance = primsPassthroughProxyTemplate.supplier().get(); ((PassThroughBase)proxyInstance).$$setInvokee(new PrimsPassThrough()); return (PrimsMethod) proxyInstance; } catch (Throwable t) { throw new RuntimeException(t); } } private static final PrimsMethod passthroughStaticProxy = makePassThrough(); @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public int proxy_passThrough() { return passthroughStaticProxy.increment(1); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public Object proxy_passThrough_ctor_cached() { return makePassThrough(); } public interface IncrementDecrement { public int increment(int x); public int decrement(int x); public int noop(int x); } private static CallSite easyDispatch(String name, MethodType type) { try { MethodHandle method = LOOKUP.findStatic(IndyProxyBenchmark.class, name, type.dropParameterTypes(0, 1)); method = MethodHandles.dropArguments(method, 0, Object.class); return new ConstantCallSite(method.asType(type)); } catch (Throwable t) { throw new RuntimeException(t); } } private static CallSite incDecDispatchHandler(MethodHandles.Lookup proxyLookup, String name, MethodType type, MethodHandle superMethod) { if (superMethod != null) return new ConstantCallSite(superMethod.asType(type)); switch (name) { case "increment": return easyDispatch("increment", type); case "decrement": return easyDispatch("decrement", type); case "noop": return easyDispatch("noop_int", type); default: throw new RuntimeException("unknown method " + name); } } // increment is already defined above private static int decrement(int x) { return x - 1; } private static int noop_int(int x) { return x; } private static final IncrementDecrement incDecProxy; static { try { incDecProxy = (IncrementDecrement)DynamicProxy.builder() .withInterfaces(IncrementDecrement.class) .withInvocationHandler(IndyProxyBenchmark::incDecDispatchHandler) .build() .supplier().get(); } catch (Exception e) { throw new RuntimeException(e); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Group("incDec") public int incdec_increment() { return incDecProxy.increment(one); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Group("incDec") public int incdec_decrement() { return incDecProxy.decrement(one); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Group("incDec") public int incdec_noop() { return incDecProxy.noop(one); } }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package com.liferay.knowledgebase.service; import com.liferay.portal.service.InvokableService; /** * @author Brian Wing Shun Chan * @generated */ public class KBArticleServiceClp implements KBArticleService { public KBArticleServiceClp(InvokableService invokableService) { _invokableService = invokableService; _methodName0 = "getBeanIdentifier"; _methodParameterTypes0 = new String[] { }; _methodName1 = "setBeanIdentifier"; _methodParameterTypes1 = new String[] { "java.lang.String" }; _methodName3 = "addKBArticle"; _methodParameterTypes3 = new String[] { "java.lang.String", "long", "long", "java.lang.String", "java.lang.String", "java.lang.String", "java.lang.String", "java.lang.String", "java.lang.String[][]", "java.lang.String[][]", "com.liferay.portal.service.ServiceContext" }; _methodName4 = "addKBArticlesMarkdown"; _methodParameterTypes4 = new String[] { "long", "long", "java.lang.String", "boolean", "java.io.InputStream", "com.liferay.portal.service.ServiceContext" }; _methodName5 = "addTempAttachment"; _methodParameterTypes5 = new String[] { "long", "long", "java.lang.String", "java.lang.String", "java.io.InputStream", "java.lang.String" }; _methodName6 = "deleteKBArticle"; _methodParameterTypes6 = new String[] { "long" }; _methodName7 = "deleteKBArticles"; _methodParameterTypes7 = new String[] { "long", "long[][]" }; _methodName8 = "deleteTempAttachment"; _methodParameterTypes8 = new String[] { "long", "long", "java.lang.String", "java.lang.String" }; _methodName9 = "fetchLatestKBArticle"; _methodParameterTypes9 = new String[] { "long", "int" }; _methodName10 = "getGroupKBArticles"; _methodParameterTypes10 = new String[] { "long", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName11 = "getGroupKBArticlesCount"; _methodParameterTypes11 = new String[] { "long", "int" }; _methodName12 = "getGroupKBArticlesRSS"; _methodParameterTypes12 = new String[] { "int", "int", "java.lang.String", "java.lang.String", "com.liferay.portal.theme.ThemeDisplay" }; _methodName13 = "getKBArticle"; _methodParameterTypes13 = new String[] { "long", "int" }; _methodName14 = "getKBArticleAndAllDescendantKBArticles"; _methodParameterTypes14 = new String[] { "long", "long", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName15 = "getKBArticleAndAllDescendants"; _methodParameterTypes15 = new String[] { "long", "long", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName16 = "getKBArticleRSS"; _methodParameterTypes16 = new String[] { "long", "int", "int", "java.lang.String", "java.lang.String", "com.liferay.portal.theme.ThemeDisplay" }; _methodName17 = "getKBArticles"; _methodParameterTypes17 = new String[] { "long", "long", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName18 = "getKBArticles"; _methodParameterTypes18 = new String[] { "long", "long[][]", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName19 = "getKBArticles"; _methodParameterTypes19 = new String[] { "long", "long[][]", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName20 = "getKBArticlesCount"; _methodParameterTypes20 = new String[] { "long", "long", "int" }; _methodName21 = "getKBArticlesCount"; _methodParameterTypes21 = new String[] { "long", "long[][]", "int" }; _methodName22 = "getKBArticleSearchDisplay"; _methodParameterTypes22 = new String[] { "long", "java.lang.String", "java.lang.String", "int", "java.util.Date", "java.util.Date", "boolean", "int[][]", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName23 = "getKBArticleVersions"; _methodParameterTypes23 = new String[] { "long", "long", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName24 = "getKBArticleVersionsCount"; _methodParameterTypes24 = new String[] { "long", "long", "int" }; _methodName25 = "getLatestKBArticle"; _methodParameterTypes25 = new String[] { "long", "int" }; _methodName26 = "getSectionsKBArticles"; _methodParameterTypes26 = new String[] { "long", "java.lang.String[][]", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName27 = "getSectionsKBArticlesCount"; _methodParameterTypes27 = new String[] { "long", "java.lang.String[][]", "int" }; _methodName28 = "getSiblingKBArticles"; _methodParameterTypes28 = new String[] { "long", "long", "int", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName29 = "getSiblingKBArticlesCount"; _methodParameterTypes29 = new String[] { "long", "long", "int" }; _methodName30 = "getTempAttachmentNames"; _methodParameterTypes30 = new String[] { "long", "java.lang.String" }; _methodName31 = "moveKBArticle"; _methodParameterTypes31 = new String[] { "long", "long", "long", "double" }; _methodName32 = "subscribeGroupKBArticles"; _methodParameterTypes32 = new String[] { "long", "java.lang.String" }; _methodName33 = "subscribeKBArticle"; _methodParameterTypes33 = new String[] { "long", "long" }; _methodName34 = "unsubscribeGroupKBArticles"; _methodParameterTypes34 = new String[] { "long", "java.lang.String" }; _methodName35 = "unsubscribeKBArticle"; _methodParameterTypes35 = new String[] { "long" }; _methodName36 = "updateKBArticle"; _methodParameterTypes36 = new String[] { "long", "java.lang.String", "java.lang.String", "java.lang.String", "java.lang.String", "java.lang.String[][]", "java.lang.String[][]", "long[][]", "com.liferay.portal.service.ServiceContext" }; _methodName37 = "updateKBArticlesPriorities"; _methodParameterTypes37 = new String[] { "long", "java.util.Map" }; } @Override public java.lang.String getBeanIdentifier() { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName0, _methodParameterTypes0, new Object[] { }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.lang.String)ClpSerializer.translateOutput(returnObj); } @Override public void setBeanIdentifier(java.lang.String beanIdentifier) { try { _invokableService.invokeMethod(_methodName1, _methodParameterTypes1, new Object[] { ClpSerializer.translateInput(beanIdentifier) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public java.lang.Object invokeMethod(java.lang.String name, java.lang.String[] parameterTypes, java.lang.Object[] arguments) throws java.lang.Throwable { throw new UnsupportedOperationException(); } @Override public com.liferay.knowledgebase.model.KBArticle addKBArticle( java.lang.String portletId, long parentResourceClassNameId, long parentResourcePrimKey, java.lang.String title, java.lang.String urlTitle, java.lang.String content, java.lang.String description, java.lang.String sourceURL, java.lang.String[] sections, java.lang.String[] selectedFileNames, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName3, _methodParameterTypes3, new Object[] { ClpSerializer.translateInput(portletId), parentResourceClassNameId, parentResourcePrimKey, ClpSerializer.translateInput(title), ClpSerializer.translateInput(urlTitle), ClpSerializer.translateInput(content), ClpSerializer.translateInput(description), ClpSerializer.translateInput(sourceURL), ClpSerializer.translateInput(sections), ClpSerializer.translateInput(selectedFileNames), ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public int addKBArticlesMarkdown(long groupId, long parentKBFolderId, java.lang.String fileName, boolean prioritizeByNumericalPrefix, java.io.InputStream inputStream, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName4, _methodParameterTypes4, new Object[] { groupId, parentKBFolderId, ClpSerializer.translateInput(fileName), prioritizeByNumericalPrefix, ClpSerializer.translateInput(inputStream), ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public void addTempAttachment(long groupId, long resourcePrimKey, java.lang.String fileName, java.lang.String tempFolderName, java.io.InputStream inputStream, java.lang.String mimeType) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName5, _methodParameterTypes5, new Object[] { groupId, resourcePrimKey, ClpSerializer.translateInput(fileName), ClpSerializer.translateInput(tempFolderName), ClpSerializer.translateInput(inputStream), ClpSerializer.translateInput(mimeType) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public com.liferay.knowledgebase.model.KBArticle deleteKBArticle( long resourcePrimKey) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName6, _methodParameterTypes6, new Object[] { resourcePrimKey }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public void deleteKBArticles(long groupId, long[] resourcePrimKeys) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName7, _methodParameterTypes7, new Object[] { groupId, ClpSerializer.translateInput(resourcePrimKeys) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void deleteTempAttachment(long groupId, long resourcePrimKey, java.lang.String fileName, java.lang.String tempFolderName) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName8, _methodParameterTypes8, new Object[] { groupId, resourcePrimKey, ClpSerializer.translateInput(fileName), ClpSerializer.translateInput(tempFolderName) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public com.liferay.knowledgebase.model.KBArticle fetchLatestKBArticle( long resourcePrimKey, int status) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName9, _methodParameterTypes9, new Object[] { resourcePrimKey, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getGroupKBArticles( long groupId, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName10, _methodParameterTypes10, new Object[] { groupId, status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public int getGroupKBArticlesCount(long groupId, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName11, _methodParameterTypes11, new Object[] { groupId, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public java.lang.String getGroupKBArticlesRSS(int status, int rssDelta, java.lang.String rssDisplayStyle, java.lang.String rssFormat, com.liferay.portal.theme.ThemeDisplay themeDisplay) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName12, _methodParameterTypes12, new Object[] { status, rssDelta, ClpSerializer.translateInput(rssDisplayStyle), ClpSerializer.translateInput(rssFormat), ClpSerializer.translateInput(themeDisplay) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.lang.String)ClpSerializer.translateOutput(returnObj); } @Override public com.liferay.knowledgebase.model.KBArticle getKBArticle( long resourcePrimKey, int version) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName13, _methodParameterTypes13, new Object[] { resourcePrimKey, version }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticleAndAllDescendantKBArticles( long groupId, long resourcePrimKey, int status, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName14, _methodParameterTypes14, new Object[] { groupId, resourcePrimKey, status, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticleAndAllDescendants( long groupId, long resourcePrimKey, int status, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName15, _methodParameterTypes15, new Object[] { groupId, resourcePrimKey, status, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public java.lang.String getKBArticleRSS(long resourcePrimKey, int status, int rssDelta, java.lang.String rssDisplayStyle, java.lang.String rssFormat, com.liferay.portal.theme.ThemeDisplay themeDisplay) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName16, _methodParameterTypes16, new Object[] { resourcePrimKey, status, rssDelta, ClpSerializer.translateInput(rssDisplayStyle), ClpSerializer.translateInput(rssFormat), ClpSerializer.translateInput(themeDisplay) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.lang.String)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticles( long groupId, long parentResourcePrimKey, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName17, _methodParameterTypes17, new Object[] { groupId, parentResourcePrimKey, status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticles( long groupId, long[] resourcePrimKeys, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName18, _methodParameterTypes18, new Object[] { groupId, ClpSerializer.translateInput(resourcePrimKeys), status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticles( long groupId, long[] resourcePrimKeys, int status, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName19, _methodParameterTypes19, new Object[] { groupId, ClpSerializer.translateInput(resourcePrimKeys), status, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public int getKBArticlesCount(long groupId, long parentResourcePrimKey, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName20, _methodParameterTypes20, new Object[] { groupId, parentResourcePrimKey, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public int getKBArticlesCount(long groupId, long[] resourcePrimKeys, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName21, _methodParameterTypes21, new Object[] { groupId, ClpSerializer.translateInput(resourcePrimKeys), status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public com.liferay.knowledgebase.model.KBArticleSearchDisplay getKBArticleSearchDisplay( long groupId, java.lang.String title, java.lang.String content, int status, java.util.Date startDate, java.util.Date endDate, boolean andOperator, int[] curStartValues, int cur, int delta, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName22, _methodParameterTypes22, new Object[] { groupId, ClpSerializer.translateInput(title), ClpSerializer.translateInput(content), status, ClpSerializer.translateInput(startDate), ClpSerializer.translateInput(endDate), andOperator, ClpSerializer.translateInput(curStartValues), cur, delta, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticleSearchDisplay)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getKBArticleVersions( long groupId, long resourcePrimKey, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName23, _methodParameterTypes23, new Object[] { groupId, resourcePrimKey, status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public int getKBArticleVersionsCount(long groupId, long resourcePrimKey, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName24, _methodParameterTypes24, new Object[] { groupId, resourcePrimKey, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public com.liferay.knowledgebase.model.KBArticle getLatestKBArticle( long resourcePrimKey, int status) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName25, _methodParameterTypes25, new Object[] { resourcePrimKey, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getSectionsKBArticles( long groupId, java.lang.String[] sections, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName26, _methodParameterTypes26, new Object[] { groupId, ClpSerializer.translateInput(sections), status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public int getSectionsKBArticlesCount(long groupId, java.lang.String[] sections, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName27, _methodParameterTypes27, new Object[] { groupId, ClpSerializer.translateInput(sections), status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public java.util.List<com.liferay.knowledgebase.model.KBArticle> getSiblingKBArticles( long groupId, long parentResourcePrimKey, int status, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName28, _methodParameterTypes28, new Object[] { groupId, parentResourcePrimKey, status, start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<com.liferay.knowledgebase.model.KBArticle>)ClpSerializer.translateOutput(returnObj); } @Override public int getSiblingKBArticlesCount(long groupId, long parentResourcePrimKey, int status) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName29, _methodParameterTypes29, new Object[] { groupId, parentResourcePrimKey, status }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public java.lang.String[] getTempAttachmentNames(long groupId, java.lang.String tempFolderName) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName30, _methodParameterTypes30, new Object[] { groupId, ClpSerializer.translateInput(tempFolderName) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.lang.String[])ClpSerializer.translateOutput(returnObj); } @Override public void moveKBArticle(long resourcePrimKey, long parentResourceClassNameId, long parentResourcePrimKey, double priority) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName31, _methodParameterTypes31, new Object[] { resourcePrimKey, parentResourceClassNameId, parentResourcePrimKey, priority }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void subscribeGroupKBArticles(long groupId, java.lang.String portletId) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName32, _methodParameterTypes32, new Object[] { groupId, ClpSerializer.translateInput(portletId) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void subscribeKBArticle(long groupId, long resourcePrimKey) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName33, _methodParameterTypes33, new Object[] { groupId, resourcePrimKey }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void unsubscribeGroupKBArticles(long groupId, java.lang.String portletId) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName34, _methodParameterTypes34, new Object[] { groupId, ClpSerializer.translateInput(portletId) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void unsubscribeKBArticle(long resourcePrimKey) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName35, _methodParameterTypes35, new Object[] { resourcePrimKey }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public com.liferay.knowledgebase.model.KBArticle updateKBArticle( long resourcePrimKey, java.lang.String title, java.lang.String content, java.lang.String description, java.lang.String sourceURL, java.lang.String[] sections, java.lang.String[] selectedFileNames, long[] removeFileEntryIds, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableService.invokeMethod(_methodName36, _methodParameterTypes36, new Object[] { resourcePrimKey, ClpSerializer.translateInput(title), ClpSerializer.translateInput(content), ClpSerializer.translateInput(description), ClpSerializer.translateInput(sourceURL), ClpSerializer.translateInput(sections), ClpSerializer.translateInput(selectedFileNames), ClpSerializer.translateInput(removeFileEntryIds), ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.knowledgebase.model.KBArticle)ClpSerializer.translateOutput(returnObj); } @Override public void updateKBArticlesPriorities(long groupId, java.util.Map<java.lang.Long, java.lang.Double> resourcePrimKeyToPriorityMap) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableService.invokeMethod(_methodName37, _methodParameterTypes37, new Object[] { groupId, ClpSerializer.translateInput(resourcePrimKeyToPriorityMap) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } private InvokableService _invokableService; private String _methodName0; private String[] _methodParameterTypes0; private String _methodName1; private String[] _methodParameterTypes1; private String _methodName3; private String[] _methodParameterTypes3; private String _methodName4; private String[] _methodParameterTypes4; private String _methodName5; private String[] _methodParameterTypes5; private String _methodName6; private String[] _methodParameterTypes6; private String _methodName7; private String[] _methodParameterTypes7; private String _methodName8; private String[] _methodParameterTypes8; private String _methodName9; private String[] _methodParameterTypes9; private String _methodName10; private String[] _methodParameterTypes10; private String _methodName11; private String[] _methodParameterTypes11; private String _methodName12; private String[] _methodParameterTypes12; private String _methodName13; private String[] _methodParameterTypes13; private String _methodName14; private String[] _methodParameterTypes14; private String _methodName15; private String[] _methodParameterTypes15; private String _methodName16; private String[] _methodParameterTypes16; private String _methodName17; private String[] _methodParameterTypes17; private String _methodName18; private String[] _methodParameterTypes18; private String _methodName19; private String[] _methodParameterTypes19; private String _methodName20; private String[] _methodParameterTypes20; private String _methodName21; private String[] _methodParameterTypes21; private String _methodName22; private String[] _methodParameterTypes22; private String _methodName23; private String[] _methodParameterTypes23; private String _methodName24; private String[] _methodParameterTypes24; private String _methodName25; private String[] _methodParameterTypes25; private String _methodName26; private String[] _methodParameterTypes26; private String _methodName27; private String[] _methodParameterTypes27; private String _methodName28; private String[] _methodParameterTypes28; private String _methodName29; private String[] _methodParameterTypes29; private String _methodName30; private String[] _methodParameterTypes30; private String _methodName31; private String[] _methodParameterTypes31; private String _methodName32; private String[] _methodParameterTypes32; private String _methodName33; private String[] _methodParameterTypes33; private String _methodName34; private String[] _methodParameterTypes34; private String _methodName35; private String[] _methodParameterTypes35; private String _methodName36; private String[] _methodParameterTypes36; private String _methodName37; private String[] _methodParameterTypes37; }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.medialive.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * The settings for a PUSH type input. * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/InputDestination" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InputDestination implements Serializable, Cloneable, StructuredPojo { /** * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. */ private String ip; /** The port number for the input. */ private String port; /** * This represents the endpoint that the customer stream will be pushed to. */ private String url; private InputDestinationVpc vpc; /** * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. * * @param ip * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. */ public void setIp(String ip) { this.ip = ip; } /** * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. * * @return The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. */ public String getIp() { return this.ip; } /** * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. * * @param ip * The system-generated static IP address of endpoint. It remains fixed for the lifetime of the input. * @return Returns a reference to this object so that method calls can be chained together. */ public InputDestination withIp(String ip) { setIp(ip); return this; } /** * The port number for the input. * * @param port * The port number for the input. */ public void setPort(String port) { this.port = port; } /** * The port number for the input. * * @return The port number for the input. */ public String getPort() { return this.port; } /** * The port number for the input. * * @param port * The port number for the input. * @return Returns a reference to this object so that method calls can be chained together. */ public InputDestination withPort(String port) { setPort(port); return this; } /** * This represents the endpoint that the customer stream will be pushed to. * * @param url * This represents the endpoint that the customer stream will be pushed to. */ public void setUrl(String url) { this.url = url; } /** * This represents the endpoint that the customer stream will be pushed to. * * @return This represents the endpoint that the customer stream will be pushed to. */ public String getUrl() { return this.url; } /** * This represents the endpoint that the customer stream will be pushed to. * * @param url * This represents the endpoint that the customer stream will be pushed to. * @return Returns a reference to this object so that method calls can be chained together. */ public InputDestination withUrl(String url) { setUrl(url); return this; } /** * @param vpc */ public void setVpc(InputDestinationVpc vpc) { this.vpc = vpc; } /** * @return */ public InputDestinationVpc getVpc() { return this.vpc; } /** * @param vpc * @return Returns a reference to this object so that method calls can be chained together. */ public InputDestination withVpc(InputDestinationVpc vpc) { setVpc(vpc); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIp() != null) sb.append("Ip: ").append(getIp()).append(","); if (getPort() != null) sb.append("Port: ").append(getPort()).append(","); if (getUrl() != null) sb.append("Url: ").append(getUrl()).append(","); if (getVpc() != null) sb.append("Vpc: ").append(getVpc()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof InputDestination == false) return false; InputDestination other = (InputDestination) obj; if (other.getIp() == null ^ this.getIp() == null) return false; if (other.getIp() != null && other.getIp().equals(this.getIp()) == false) return false; if (other.getPort() == null ^ this.getPort() == null) return false; if (other.getPort() != null && other.getPort().equals(this.getPort()) == false) return false; if (other.getUrl() == null ^ this.getUrl() == null) return false; if (other.getUrl() != null && other.getUrl().equals(this.getUrl()) == false) return false; if (other.getVpc() == null ^ this.getVpc() == null) return false; if (other.getVpc() != null && other.getVpc().equals(this.getVpc()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIp() == null) ? 0 : getIp().hashCode()); hashCode = prime * hashCode + ((getPort() == null) ? 0 : getPort().hashCode()); hashCode = prime * hashCode + ((getUrl() == null) ? 0 : getUrl().hashCode()); hashCode = prime * hashCode + ((getVpc() == null) ? 0 : getVpc().hashCode()); return hashCode; } @Override public InputDestination clone() { try { return (InputDestination) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.medialive.model.transform.InputDestinationMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.controllers; import leap.core.annotation.Inject; import leap.core.annotation.M; import leap.lang.Assert; import leap.lang.New; import leap.lang.http.HTTP; import leap.web.App; import leap.web.Content; import leap.web.Contents; import leap.web.Request; import leap.web.Response; import leap.web.Results; import leap.web.annotation.*; import leap.web.annotation.http.*; import leap.web.format.ResponseFormat; import leap.web.view.ViewData; import app.Global; import app.beans.TestBean; import app.beans.TestPrimaryBean; import app.controllers.products.ProductController; public class HomeController { public @Inject @M App app; public @Inject @M Global global; public @Inject @M ProductController productController; public @Inject @M TestBean testBean1; private @Inject TestBean testBean2; private @Inject TestPrimaryBean testPrimaryBean; public void test_inject() { Assert.isTrue(testBean1 == testBean2); Assert.isTrue(app == testBean1.app); Assert.notNull(testPrimaryBean); Assert.isTrue(testBean1 == testPrimaryBean.getTestBean()); } /** * View /WEB-INF/views/index.jsp */ public String index(){ return "Hello world!"; } /** * No view , String -> text/plain */ public String text(){ return "Hello world!"; } public byte[] bytes() { return new byte[]{0x01,0x02}; } public Content html(){ return Contents.html("<h1>Hello world!</h1>"); } /** * Explicit annotated with response format 'html' will set the ContentTpe as 'text/html' */ @Produces("html") public String html1() { return "<h1>Hello world!</h1>"; } /** * Render as format 'html" */ public void html3(){ Results.render("<h1>Hello world!</h1>",ResponseFormat.HTML); } /** * Response format : text/html */ public void html2(){ Results.render(Contents.html("<h1>Hello world!</h1>")); } @Produces("json") public String jsonString() { return "Hello json"; } public void noContent(){ Results.noContent(); } public void notImplemented(){ Results.notImplemented(); } public void nothing(){ Results.ok(); } @POST public void postAction(){ Results.text("METHOD:" + Request.current().getMethod()); } public void rawResponse(){ Request request = Request.current(); Response response = request.response(); response.setStatus(HTTP.SC_OK); //status must be setted response.setContentType("text/plain;charset=UTF-8"); response.getWriter().write("RawResponse"); } public void renderView(){ Results.renderView("/test_render_view").setReturnValue("Hello world!"); } public void redirectTo(){ Results.redirect("^/redirect_to_notfound_url"); } public void redirectTo1(){ Results.redirect("/redirect_to_notfound_url"); } public void redirectTo2(){ Results.redirect("/redirect_to_notfound_url",New.hashMap("p","1")); } public String redirectTo3(){ return "redirect:/redirect_to_notfound_url"; } public void forwardTo(){ Results.forward("views:/test_forward.jsp"); } public void forwardTo1(){ Results.forward("/test_forward.jsp"); } public void exception1() { throw new RuntimeException("Test Exception"); } public String controllerPath() { return Request.current().getActionContext().getRoute().getControllerPath(); } @HttpsOnly public String httpsOnly() { return "OK"; } @NonAction public void nonAction() { } @Path("/arbitrary_path/{subPath:.*}") public String arbitraryPath(String subPath) { return subPath; } @GET("method_with_path") public void methodWithPathGet() { } @POST("method_with_path") public void methodWithPathPost() { } @PUT("method_with_path") public void methodWithPathPut() { } @DELETE("method_with_path") public void methodWithPathDelete() { } @HEAD("method_with_path") public void methodWithPathHead() { } @OPTIONS("method_with_path") public void methodWithPathOptions() { } public void jsp(ViewData vd) { } @Success(status=HTTP.Status.NO_CONTENT) public void successStatus() { System.out.println(); } @DefaultView("/test_default_view1") public void defaultView1() { } @Success(defaultView = "/test_default_view2") public void defaultView2() { } public static final class NestedController { public void index() { } } }
import java.awt.*; import javax.swing.*; /** * The class Mosaic makes available a window made up of a grid * of colored rectangles. Routines are provided for opening and * closing the window and for setting and testing the color of rectangles * in the grid. * * Each rectangle in the grid has a color. The color can be * specified by red, green, and blue amounts in the range from * 0 to 255. It can also be given as an object belonging * to the class Color. */ public class Mosaic { private static JFrame window; // A mosaic window, null if no window is open. private static MosaicPanel canvas; // A component that actually manages and displays the rectangles. private static boolean use3DEffect = true; // When true, 3D Rects and "grouting" are used on the mosaic. private static int mosaicRows; // The number of rows in the mosaic, if the window is open. private static int mosaicCols; // The number of cols in the mosaic, if the window is open. /** * Open a mosaic window with a 20-by-20 grid of squares, where each * square is 15 pixel on a side. */ public static void open() { open(20,20,15,15); } /** * Opens a mosaic window containing a specified number of rows and * a specified number of columns of square. Each square is 15 pixels * on a side. */ public static void open(int rows, int columns) { open(rows,columns,15,15); } /** * Opens a "mosaic" window on the screen. If another mosaic window was * already open, that one is closed and a new one is created. * * Precondition: The parameters rows, cols, w, and h are positive integers. * Postcondition: A window is open on the screen that can display rows and * columns of colored rectangles. Each rectangle is w pixels * wide and h pixels high. The number of rows is given by * the first parameter and the number of columns by the * second. Initially, all rectangles are black. * Note: The rows are numbered from 0 to rows - 1, and the columns are * numbered from 0 to cols - 1. */ public static void open(int rows, int columns, int blockWidth, int blockHeight) { if (window != null) window.dispose(); canvas = new MosaicPanel(rows,columns,blockWidth,blockHeight); mosaicRows = rows; mosaicCols = columns; if ( ! use3DEffect ) { canvas.setGroutingColor(null); canvas.setUse3D(false); } window = new JFrame("Mosaic Window"); window.setContentPane(canvas); window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); window.pack(); Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); if (window.getWidth() > screen.width - 20 || window.getHeight() > screen.height - 100) { // change size to fit on screen int w = window.getWidth(); int h = window.getHeight(); if (window.getWidth() > screen.width - 20) w = screen.width - 20; if (window.getHeight() > screen.height - 100) h = screen.height - 100; window.setSize(w,h); } window.setLocation( (screen.width - window.getWidth())/2, (screen.height - window.getHeight())/2 ); window.setVisible(true); } /** * Close the mosaic window, if one is open. */ public static void close() { if (window != null) { window.dispose(); window = null; canvas = null; } } /** * Tests whether the mosaic window is currently open. * * Precondition: None. * Postcondition: The return value is true if the window is open when this * function is called, and it is false if the window is * closed. */ public static boolean isOpen() { return (window != null); } /** * Inserts a delay in the program (to regulate the speed at which the colors * are changed, for example). * * Precondition: milliseconds is a positive integer. * Postcondition: The program has paused for at least the specified number * of milliseconds, where one second is equal to 1000 * milliseconds. */ public static void delay(int milliseconds) { if (milliseconds > 0) { try { Thread.sleep(milliseconds); } catch (InterruptedException e) { } } } /** * Gets the color of one of the rectangles in the mosaic. * * Precondition: row and col are in the valid range of row and column numbers. * Postcondition: The color of the specified rectangle is returned as * object of type color. */ public static Color getColor(int row, int col) { if (canvas == null) return Color.black; return canvas.getColor(row, col); } /** * Gets the red component of the color of one of the rectangles. * * Precondition: row and col are in the valid range of row and column numbers. * Postcondition: The red component of the color of the specified rectangle is * returned as an integer in the range 0 to 255 inclusive. */ public static int getRed(int row, int col) { if (canvas == null) return 0; if (row < 0 || row >= mosaicRows || col < 0 || col >= mosaicCols) { throw new IllegalArgumentException("(row,col) = (" + row + "," + col + ") is not in the mosaic."); } return canvas.getRed(row, col); } /** * Like getRed, but returns the green component of the color. */ public static int getGreen(int row, int col) { if (canvas == null) return 0; if (row < 0 || row >= mosaicRows || col < 0 || col >= mosaicCols) { throw new IllegalArgumentException("(row,col) = (" + row + "," + col + ") is not in the mosaic."); } return canvas.getGreen(row, col); } /** * Like getRed, but returns the blue component of the color. */ public static int getBlue(int row, int col) { if (canvas == null) return 0; if (row < 0 || row >= mosaicRows || col < 0 || col >= mosaicCols) { throw new IllegalArgumentException("(row,col) = (" + row + "," + col + ") is not in the mosaic."); } return canvas.getBlue(row, col); } /** * Sets the color of one of the rectangles in the window. * * Precondition: row and col are in the valid range of row and column numbers. * Postcondition: The color of the rectangle in row number row and column * number col has been set to the color specified by c. * If c is null, the color of the rectangle is set to black. */ public static void setColor(int row, int col, Color c) { if (canvas == null) return; if (row < 0 || row >= mosaicRows || col < 0 || col >= mosaicCols) { throw new IllegalArgumentException("(row,col) = (" + row + "," + col + ") is not in the mosaic."); } canvas.setColor(row,col,c); } /** * Sets the color of one of the rectangles in the window. * * Precondition: row and col are in the valid range of row and column numbers, * and r, g, and b are in the range 0 to 255, inclusive. * Postcondition: The color of the rectangle in row number row and column * number col has been set to the color specified by r, g, * and b. r gives the amount of red in the color with 0 * representing no red and 255 representing the maximum * possible amount of red. The larger the value of r, the * more red in the color. g and b work similarly for the * green and blue color components. */ public static void setColor(int row, int col, int red, int green, int blue) { if (canvas == null) return; if (row < 0 || row >= mosaicRows || col < 0 || col >= mosaicCols) { throw new IllegalArgumentException("(row,col) = (" + row + "," + col + ") is not in the mosaic."); } canvas.setColor(row,col,red,green,blue); } /** * Fills the entire mosaic with a specified color. If c is null, the mosaic * is filled with black. * * Precondition: The mosaic window must be open. */ public static void fill(Color c) { canvas.fill(c); } /** * Fills the entire mosaic with a color that is specified by giving its * red, green, and blue components (numbers in the range 0 to 255). * * Precondition: The mosaic window must be open. */ public static void fill(int red, int green, int blue) { canvas.fill(red,green,blue); } /** * Fill the entire mosaic window with random colors by setting * the color of each rectangle to a randomly selected red/blue/green * values. * * Precondition: The mosaic window must be open. */ public static void fillRandomly() { canvas.fillRandomly(); } /** * If use3DEffect is true, which is the default, then rectangles are drawn * as "3D" rects, which is supposed to make them look raised up from their * background, and a 1-pixel gray border is drawn around the outside of * the rectangles, giving better definition to the rows and columns. If * use3DEffect is set to false, ordinary "flat" rects are used, with no * border between them. The mosaic window does not have to be open when * this is called. */ public static void setUse3DEffect(boolean use3D) { use3DEffect = use3D; if (canvas != null) { canvas.setGroutingColor(use3DEffect? Color.GRAY : null); canvas.setUse3D(use3DEffect); canvas.repaint(); } } /** * Returns the value of the use3DEffect property. * @return */ public boolean getUse3DEffect() { return use3DEffect; } } // end of class Mosaic
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.proxy; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.apache.accumulo.core.cli.Help; import org.apache.accumulo.core.client.ClientConfiguration; import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty; import org.apache.accumulo.core.client.impl.ClientContext; import org.apache.accumulo.core.client.security.tokens.KerberosToken; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.rpc.SslConnectionParams; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.accumulo.proxy.thrift.AccumuloProxy; import org.apache.accumulo.server.metrics.MetricsFactory; import org.apache.accumulo.server.rpc.RpcWrapper; import org.apache.accumulo.server.rpc.SaslServerConnectionParams; import org.apache.accumulo.server.rpc.ServerAddress; import org.apache.accumulo.server.rpc.TServerUtils; import org.apache.accumulo.server.rpc.ThriftServerType; import org.apache.accumulo.server.rpc.TimedProcessor; import org.apache.accumulo.server.rpc.UGIAssumingProcessor; import org.apache.accumulo.start.spi.KeywordExecutable; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.google.auto.service.AutoService; import com.google.common.io.Files; import com.google.common.net.HostAndPort; @AutoService(KeywordExecutable.class) public class Proxy implements KeywordExecutable { private static final Logger log = LoggerFactory.getLogger(Proxy.class); public static final String USE_MINI_ACCUMULO_KEY = "useMiniAccumulo"; public static final String USE_MINI_ACCUMULO_DEFAULT = "false"; public static final String USE_MOCK_INSTANCE_KEY = "useMockInstance"; public static final String USE_MOCK_INSTANCE_DEFAULT = "false"; public static final String ACCUMULO_INSTANCE_NAME_KEY = "instance"; public static final String ZOOKEEPERS_KEY = "zookeepers"; public static final String THRIFT_THREAD_POOL_SIZE_KEY = "numThreads"; // Default number of threads from THsHaServer.Args public static final String THRIFT_THREAD_POOL_SIZE_DEFAULT = "5"; public static final String THRIFT_MAX_FRAME_SIZE_KEY = "maxFrameSize"; public static final String THRIFT_MAX_FRAME_SIZE_DEFAULT = "16M"; // Type of thrift server to create public static final String THRIFT_SERVER_TYPE = "thriftServerType"; public static final String THRIFT_SERVER_TYPE_DEFAULT = ""; public static final ThriftServerType DEFAULT_SERVER_TYPE = ThriftServerType.getDefault(); public static final String KERBEROS_PRINCIPAL = "kerberosPrincipal"; public static final String KERBEROS_KEYTAB = "kerberosKeytab"; public static final String THRIFT_SERVER_HOSTNAME = "thriftServerHostname"; public static final String THRIFT_SERVER_HOSTNAME_DEFAULT = "0.0.0.0"; public static class PropertiesConverter implements IStringConverter<Properties> { @Override public Properties convert(String fileName) { Properties prop = new Properties(); InputStream is; try { is = new FileInputStream(fileName); try { prop.load(is); } finally { is.close(); } } catch (IOException e) { throw new RuntimeException(e); } return prop; } } public static class Opts extends Help { @Parameter(names = "-p", required = true, description = "properties file name", converter = PropertiesConverter.class) Properties prop; } @Override public String keyword() { return "proxy"; } @Override public void execute(final String[] args) throws Exception { Opts opts = new Opts(); opts.parseArgs(Proxy.class.getName(), args); boolean useMini = Boolean.parseBoolean(opts.prop.getProperty(USE_MINI_ACCUMULO_KEY, USE_MINI_ACCUMULO_DEFAULT)); boolean useMock = Boolean.parseBoolean(opts.prop.getProperty(USE_MOCK_INSTANCE_KEY, USE_MOCK_INSTANCE_DEFAULT)); String instance = opts.prop.getProperty(ACCUMULO_INSTANCE_NAME_KEY); String zookeepers = opts.prop.getProperty(ZOOKEEPERS_KEY); if (!useMini && !useMock && instance == null) { System.err.println("Properties file must contain one of : useMiniAccumulo=true, useMockInstance=true, or instance=<instance name>"); System.exit(1); } if (instance != null && zookeepers == null) { System.err.println("When instance is set in properties file, zookeepers must also be set."); System.exit(1); } if (!opts.prop.containsKey("port")) { System.err.println("No port property"); System.exit(1); } if (useMini) { log.info("Creating mini cluster"); final File folder = Files.createTempDir(); final MiniAccumuloCluster accumulo = new MiniAccumuloCluster(folder, "secret"); accumulo.start(); opts.prop.setProperty("instance", accumulo.getConfig().getInstanceName()); opts.prop.setProperty("zookeepers", accumulo.getZooKeepers()); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void start() { try { accumulo.stop(); } catch (Exception e) { throw new RuntimeException(); } finally { if (!folder.delete()) log.warn("Unexpected error removing " + folder); } } }); } Class<? extends TProtocolFactory> protoFactoryClass = Class.forName(opts.prop.getProperty("protocolFactory", TCompactProtocol.Factory.class.getName())) .asSubclass(TProtocolFactory.class); TProtocolFactory protoFactory = protoFactoryClass.newInstance(); int port = Integer.parseInt(opts.prop.getProperty("port")); String hostname = opts.prop.getProperty(THRIFT_SERVER_HOSTNAME, THRIFT_SERVER_HOSTNAME_DEFAULT); HostAndPort address = HostAndPort.fromParts(hostname, port); ServerAddress server = createProxyServer(address, protoFactory, opts.prop); // Wait for the server to come up while (!server.server.isServing()) { Thread.sleep(100); } log.info("Proxy server started on " + server.getAddress()); while (server.server.isServing()) { Thread.sleep(1000); } } public static void main(String[] args) throws Exception { new Proxy().execute(args); } public static ServerAddress createProxyServer(HostAndPort address, TProtocolFactory protocolFactory, Properties properties) throws Exception { return createProxyServer(address, protocolFactory, properties, ClientConfiguration.loadDefault()); } public static ServerAddress createProxyServer(HostAndPort address, TProtocolFactory protocolFactory, Properties properties, ClientConfiguration clientConf) throws Exception { final int numThreads = Integer.parseInt(properties.getProperty(THRIFT_THREAD_POOL_SIZE_KEY, THRIFT_THREAD_POOL_SIZE_DEFAULT)); final long maxFrameSize = AccumuloConfiguration.getMemoryInBytes(properties.getProperty(THRIFT_MAX_FRAME_SIZE_KEY, THRIFT_MAX_FRAME_SIZE_DEFAULT)); final int simpleTimerThreadpoolSize = Integer.parseInt(Property.GENERAL_SIMPLETIMER_THREADPOOL_SIZE.getDefaultValue()); // How frequently to try to resize the thread pool final long threadpoolResizeInterval = 1000l * 5; // No timeout final long serverSocketTimeout = 0l; // Use the new hadoop metrics2 support final MetricsFactory metricsFactory = new MetricsFactory(false); final String serverName = "Proxy", threadName = "Accumulo Thrift Proxy"; // create the implementation of the proxy interface ProxyServer impl = new ProxyServer(properties); // Wrap the implementation -- translate some exceptions AccumuloProxy.Iface wrappedImpl = RpcWrapper.service(impl, new AccumuloProxy.Processor<AccumuloProxy.Iface>(impl)); // Create the processor from the implementation TProcessor processor = new AccumuloProxy.Processor<>(wrappedImpl); // Get the type of thrift server to instantiate final String serverTypeStr = properties.getProperty(THRIFT_SERVER_TYPE, THRIFT_SERVER_TYPE_DEFAULT); ThriftServerType serverType = DEFAULT_SERVER_TYPE; if (!THRIFT_SERVER_TYPE_DEFAULT.equals(serverTypeStr)) { serverType = ThriftServerType.get(serverTypeStr); } SslConnectionParams sslParams = null; SaslServerConnectionParams saslParams = null; switch (serverType) { case SSL: sslParams = SslConnectionParams.forClient(ClientContext.convertClientConfig(clientConf)); break; case SASL: if (!clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) { // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability log.error("FATAL: SASL thrift server was requested but it is disabled in client configuration"); throw new RuntimeException("SASL is not enabled in configuration"); } // Kerberos needs to be enabled to use it if (!UserGroupInformation.isSecurityEnabled()) { // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability log.error("FATAL: Hadoop security is not enabled"); throw new RuntimeException(); } // Login via principal and keytab final String kerberosPrincipal = properties.getProperty(KERBEROS_PRINCIPAL, ""), kerberosKeytab = properties.getProperty(KERBEROS_KEYTAB, ""); if (StringUtils.isBlank(kerberosPrincipal) || StringUtils.isBlank(kerberosKeytab)) { // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability log.error("FATAL: Kerberos principal and keytab must be provided"); throw new RuntimeException(); } UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytab); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); log.info("Logged in as " + ugi.getUserName()); // The kerberosPrimary set in the SASL server needs to match the principal we're logged in as. final String shortName = ugi.getShortUserName(); log.info("Setting server primary to {}", shortName); clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY, shortName); KerberosToken token = new KerberosToken(); saslParams = new SaslServerConnectionParams(clientConf, token, null); processor = new UGIAssumingProcessor(processor); break; default: // nothing to do -- no extra configuration necessary break; } // Hook up support for tracing for thrift calls TimedProcessor timedProcessor = new TimedProcessor(metricsFactory, processor, serverName, threadName); // Create the thrift server with our processor and properties ServerAddress serverAddr = TServerUtils.startTServer(address, serverType, timedProcessor, protocolFactory, serverName, threadName, numThreads, simpleTimerThreadpoolSize, threadpoolResizeInterval, maxFrameSize, sslParams, saslParams, serverSocketTimeout); return serverAddr; } }
/* * This file is part of NeptuneCommon, licensed under the MIT License (MIT). * * Copyright (c) 2015-2016, Jamie Mansfield <https://github.com/jamierocks> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.neptunepowered.common.mixin.minecraft.server.management; import com.mojang.authlib.GameProfile; import io.netty.buffer.Unpooled; import net.canarymod.Canary; import net.canarymod.api.ConfigurationManager; import net.canarymod.api.PlayerListAction; import net.canarymod.api.PlayerListData; import net.canarymod.api.entity.living.humanoid.Player; import net.canarymod.api.packet.Packet; import net.canarymod.api.world.DimensionType; import net.canarymod.api.world.World; import net.canarymod.chat.MessageReceiver; import net.canarymod.hook.player.ConnectionHook; import net.canarymod.hook.player.PlayerListHook; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.NetHandlerPlayServer; import net.minecraft.network.NetworkManager; import net.minecraft.network.PacketBuffer; import net.minecraft.network.play.server.S01PacketJoinGame; import net.minecraft.network.play.server.S05PacketSpawnPosition; import net.minecraft.network.play.server.S09PacketHeldItemChange; import net.minecraft.network.play.server.S1DPacketEntityEffect; import net.minecraft.network.play.server.S38PacketPlayerListItem; import net.minecraft.network.play.server.S39PacketPlayerAbilities; import net.minecraft.network.play.server.S3FPacketCustomPayload; import net.minecraft.network.play.server.S41PacketServerDifficulty; import net.minecraft.potion.PotionEffect; import net.minecraft.scoreboard.ServerScoreboard; import net.minecraft.server.MinecraftServer; import net.minecraft.server.management.PlayerProfileCache; import net.minecraft.server.management.ServerConfigurationManager; import net.minecraft.stats.StatList; import net.minecraft.util.BlockPos; import net.minecraft.util.ChatComponentTranslation; import net.minecraft.util.EnumChatFormatting; import net.minecraft.util.IChatComponent; import net.minecraft.world.WorldServer; import net.minecraft.world.WorldSettings; import net.minecraft.world.storage.WorldInfo; import org.neptunepowered.common.wrapper.chat.NeptuneChatComponent; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Overwrite; import org.spongepowered.asm.mixin.Shadow; import java.util.Iterator; import java.util.List; import java.util.Map; @Mixin(ServerConfigurationManager.class) public abstract class MixinServerConfigurationManager implements ConfigurationManager { @Shadow public List playerEntityList; @Shadow public Map uuidToPlayerMap; @Shadow private MinecraftServer mcServer; @Shadow private Map playerStatFiles; @Shadow public abstract NBTTagCompound readPlayerDataFromFile(EntityPlayerMP playerIn); @Shadow protected abstract void setPlayerGameTypeBasedOnOther(EntityPlayerMP p_72381_1_, EntityPlayerMP p_72381_2_, net.minecraft.world.World worldIn); @Shadow protected abstract void sendScoreboard(ServerScoreboard scoreboardIn, EntityPlayerMP playerIn); @Shadow public abstract void sendChatMsg(IChatComponent component); @Shadow public abstract void playerLoggedIn(EntityPlayerMP playerIn); @Shadow public abstract void updateTimeAndWeatherForPlayer(EntityPlayerMP playerIn, WorldServer worldIn); @Shadow protected abstract void writePlayerData(EntityPlayerMP playerIn); @Shadow public abstract int getCurrentPlayerCount(); @Overwrite public void playerLoggedOut(EntityPlayerMP playerIn) { playerIn.triggerAchievement(StatList.leaveGameStat); this.writePlayerData(playerIn); WorldServer worldserver = playerIn.getServerForPlayer(); if (playerIn.ridingEntity != null) { worldserver.removePlayerEntityDangerously(playerIn.ridingEntity); ServerConfigurationManager.logger.debug("removing player mount"); } worldserver.removeEntity(playerIn); worldserver.getPlayerManager().removePlayer(playerIn); this.playerEntityList.remove(playerIn); this.uuidToPlayerMap.remove(playerIn.getUniqueID()); this.playerStatFiles.remove(playerIn.getUniqueID()); // Neptune: start PlayerListData playerListData = ((Player) playerIn).getPlayerListData(PlayerListAction.REMOVE_PLAYER); for (int i = 0; i < playerEntityList.size(); i++) { EntityPlayerMP playerMP = (EntityPlayerMP) this.playerEntityList.get(i); PlayerListHook playerListHook = new PlayerListHook(playerListData.copy(), (Player) playerMP); if (!playerListHook.call().isCanceled()) { S38PacketPlayerListItem packet = new S38PacketPlayerListItem(); packet.action = S38PacketPlayerListItem.Action.valueOf(PlayerListAction.REMOVE_PLAYER.name()); WorldSettings.GameType gameType = WorldSettings.GameType.getByID(playerListHook.getData().getMode().getId()); IChatComponent iChatComponent = playerListHook.getData().displayNameSet() ? ((NeptuneChatComponent) playerListHook.getData().getDisplayName()).getHandle() : null; packet.players.add(packet.new AddPlayerData(playerListHook.getData() .getProfile(), playerListHook.getData().getPing(), gameType, iChatComponent)); playerMP.playerNetServerHandler.sendPacket(packet); } } // Neptune: end //this.sendPacketToAllPlayers(new S38PacketPlayerListItem(S38PacketPlayerListItem.Action.REMOVE_PLAYER, new // EntityPlayerMP[] {playerIn})); // Neptune: replaced by above code } @Overwrite public void initializeConnectionToPlayer(NetworkManager netManager, EntityPlayerMP playerIn) { GameProfile gameprofile = playerIn.getGameProfile(); PlayerProfileCache playerprofilecache = this.mcServer.getPlayerProfileCache(); GameProfile gameprofile1 = playerprofilecache.getProfileByUUID(gameprofile.getId()); String s = gameprofile1 == null ? gameprofile.getName() : gameprofile1.getName(); playerprofilecache.addEntry(gameprofile); NBTTagCompound nbttagcompound = this.readPlayerDataFromFile(playerIn); playerIn.setWorld(this.mcServer.worldServerForDimension(playerIn.dimension)); playerIn.theItemInWorldManager.setWorld((WorldServer) playerIn.worldObj); String s1 = "local"; if (netManager.getRemoteAddress() != null) { s1 = netManager.getRemoteAddress().toString(); } ServerConfigurationManager.logger .info(playerIn.getCommandSenderName() + "[" + s1 + "] logged in with entity id " + playerIn .getEntityId() + " at (" + playerIn.posX + ", " + playerIn.posY + ", " + playerIn.posZ + ")"); WorldServer worldserver = this.mcServer.worldServerForDimension(playerIn.dimension); WorldInfo worldinfo = worldserver.getWorldInfo(); BlockPos blockpos = worldserver.getSpawnPoint(); this.setPlayerGameTypeBasedOnOther(playerIn, null, worldserver); NetHandlerPlayServer nethandlerplayserver = new NetHandlerPlayServer(this.mcServer, netManager, playerIn); nethandlerplayserver.sendPacket( new S01PacketJoinGame(playerIn.getEntityId(), playerIn.theItemInWorldManager.getGameType(), worldinfo.isHardcoreModeEnabled(), worldserver.provider.getDimensionId(), worldserver.getDifficulty(), this.getMaxPlayers(), worldinfo.getTerrainType(), worldserver.getGameRules().getGameRuleBooleanValue("reducedDebugInfo"))); nethandlerplayserver .sendPacket(new S3FPacketCustomPayload("MC|Brand", (new PacketBuffer(Unpooled.buffer())).writeString( this.mcServer.getServerModName()))); nethandlerplayserver .sendPacket(new S41PacketServerDifficulty(worldinfo.getDifficulty(), worldinfo.isDifficultyLocked())); nethandlerplayserver.sendPacket(new S05PacketSpawnPosition(blockpos)); nethandlerplayserver.sendPacket(new S39PacketPlayerAbilities(playerIn.capabilities)); nethandlerplayserver.sendPacket(new S09PacketHeldItemChange(playerIn.inventory.currentItem)); playerIn.getStatFile().func_150877_d(); playerIn.getStatFile().sendAchievements(playerIn); this.sendScoreboard((ServerScoreboard) worldserver.getScoreboard(), playerIn); this.mcServer.refreshStatusNextTick(); ChatComponentTranslation chatcomponenttranslation; if (!playerIn.getCommandSenderName().equalsIgnoreCase(s)) { chatcomponenttranslation = new ChatComponentTranslation("multiplayer.player.joined.renamed", new Object[]{playerIn.getDisplayName(), s}); } else { chatcomponenttranslation = new ChatComponentTranslation("multiplayer.player.joined", new Object[]{playerIn.getDisplayName()}); } chatcomponenttranslation.getChatStyle().setColor(EnumChatFormatting.YELLOW); // Neptune: start ConnectionHook hook = (ConnectionHook) new ConnectionHook( (Player) playerIn, chatcomponenttranslation.getKey(), false ).call(); if (!hook.isHidden()) { this.sendChatMsg(chatcomponenttranslation); } // Neptune: end //this.sendChatMsg(chatcomponenttranslation); // Neptune: Called above this.playerLoggedIn(playerIn); nethandlerplayserver.setPlayerLocation(playerIn.posX, playerIn.posY, playerIn.posZ, playerIn.rotationYaw, playerIn.rotationPitch); this.updateTimeAndWeatherForPlayer(playerIn, worldserver); if (this.mcServer.getResourcePackUrl().length() > 0) { playerIn.loadResourcePack(this.mcServer.getResourcePackUrl(), this.mcServer.getResourcePackHash()); } Iterator iterator = playerIn.getActivePotionEffects().iterator(); while (iterator.hasNext()) { PotionEffect potioneffect = (PotionEffect) iterator.next(); nethandlerplayserver.sendPacket(new S1DPacketEntityEffect(playerIn.getEntityId(), potioneffect)); } playerIn.addSelfToInternalCraftingInventory(); if (nbttagcompound != null && nbttagcompound.hasKey("Riding", 10)) { Entity entity = EntityList.createEntityFromNBT(nbttagcompound.getCompoundTag("Riding"), worldserver); if (entity != null) { entity.forceSpawn = true; worldserver.spawnEntityInWorld(entity); playerIn.mountEntity(entity); entity.forceSpawn = false; } } // Neptune: start Canary.motd().sendMOTD((MessageReceiver) playerIn); // Neptune: end } @Override public void sendPacketToAllInWorld(String world, Packet packet) { } @Override public int getNumPlayersOnline() { return getCurrentPlayerCount(); } @Override public Player getPlayerByName(String name) { return null; } @Override public List<Player> getAllPlayers() { return null; } @Override @Shadow public abstract int getMaxPlayers(); @Override public void markBlockNeedsUpdate(int x, int y, int z, DimensionType dimension, String world) { } @Override public void switchDimension(Player player, World world, boolean createPortal) { } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.spi.impl; import com.hazelcast.internal.partition.FragmentedMigrationAwareService; import com.hazelcast.internal.partition.PartitionMigrationEvent; import com.hazelcast.internal.partition.PartitionReplicationEvent; import com.hazelcast.internal.services.ServiceNamespace; import com.hazelcast.spi.impl.operationservice.Operation; import com.hazelcast.test.HazelcastParallelParametersRunnerFactory; import com.hazelcast.test.HazelcastParametrizedRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import java.util.Collection; import static com.hazelcast.spi.impl.CountingMigrationAwareService.IN_FLIGHT_MIGRATION_STAMP; import static com.hazelcast.spi.impl.CountingMigrationAwareService.PRIMARY_REPLICA_INDEX; import static com.hazelcast.spi.impl.CountingMigrationAwareService.isPrimaryReplicaMigrationEvent; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.runners.Parameterized.UseParametersRunnerFactory; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Test count-tracking functionality of CountingMigrationAwareService */ @RunWith(HazelcastParametrizedRunner.class) @UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class CountingMigrationAwareServiceTest { @Parameterized.Parameter public FragmentedMigrationAwareService wrappedMigrationAwareService; @Parameterized.Parameter(1) public PartitionMigrationEvent event; @Rule public ExpectedException expectedException = ExpectedException.none(); private CountingMigrationAwareService countingMigrationAwareService; private int initialMigrationStamp; @Parameterized.Parameters(name = "{0}, replica: {1}") public static Collection<Object> parameters() { PartitionMigrationEvent promotionEvent = mock(PartitionMigrationEvent.class); when(promotionEvent.getNewReplicaIndex()).thenReturn(PRIMARY_REPLICA_INDEX); when(promotionEvent.getCurrentReplicaIndex()).thenReturn(1); when(promotionEvent.toString()).thenReturn("1 > 0"); PartitionMigrationEvent demotionEvent = mock(PartitionMigrationEvent.class); when(demotionEvent.getNewReplicaIndex()).thenReturn(1); when(demotionEvent.getCurrentReplicaIndex()).thenReturn(PRIMARY_REPLICA_INDEX); when(demotionEvent.toString()).thenReturn("0 > 1"); PartitionMigrationEvent backupsEvent = mock(PartitionMigrationEvent.class); when(backupsEvent.getNewReplicaIndex()).thenReturn(1); when(backupsEvent.getCurrentReplicaIndex()).thenReturn(2); when(backupsEvent.toString()).thenReturn("2 > 1"); return Arrays.asList(new Object[]{ new Object[]{new NoOpMigrationAwareService(), promotionEvent}, new Object[]{new NoOpMigrationAwareService(), demotionEvent}, new Object[]{new NoOpMigrationAwareService(), backupsEvent}, new Object[]{new ExceptionThrowingMigrationAwareService(), promotionEvent}, new Object[]{new ExceptionThrowingMigrationAwareService(), demotionEvent}, new Object[]{new ExceptionThrowingMigrationAwareService(), backupsEvent}, }); } @Before public void setUp() throws Exception { // setup the counting migration aware service and execute 1 prepareReplicationOperation (which does not // affect the counter) countingMigrationAwareService = new CountingMigrationAwareService(wrappedMigrationAwareService); initialMigrationStamp = countingMigrationAwareService.getMigrationStamp(); countingMigrationAwareService.prepareReplicationOperation(null); // also execute the first part of migration: beforeMigration try { countingMigrationAwareService.beforeMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } } @Test public void beforeMigration() throws Exception { // when: countingMigrationAwareService.beforeMigration was invoked (in setUp method) // then: if event involves primary replica, stamp should change. if (isPrimaryReplicaMigrationEvent(event)) { assertEquals(IN_FLIGHT_MIGRATION_STAMP, countingMigrationAwareService.getMigrationStamp()); assertFalse(countingMigrationAwareService.validateMigrationStamp(IN_FLIGHT_MIGRATION_STAMP)); } else { assertEquals(initialMigrationStamp, countingMigrationAwareService.getMigrationStamp()); assertTrue(countingMigrationAwareService.validateMigrationStamp(initialMigrationStamp)); } } @Test public void commitMigration() throws Exception { // when: before - commit migration methods have been executed try { countingMigrationAwareService.commitMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } int currentMigrationStamp = countingMigrationAwareService.getMigrationStamp(); // then: if event involves primary replica, stamp should change. if (isPrimaryReplicaMigrationEvent(event)) { assertNotEquals(initialMigrationStamp, currentMigrationStamp); } else { assertEquals(initialMigrationStamp, currentMigrationStamp); } assertTrue(countingMigrationAwareService.validateMigrationStamp(currentMigrationStamp)); } @Test public void rollbackMigration() throws Exception { // when: before - rollback migration methods have been executed try { countingMigrationAwareService.rollbackMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } int currentMigrationStamp = countingMigrationAwareService.getMigrationStamp(); // then: if event involves primary replica, stamp should change. if (isPrimaryReplicaMigrationEvent(event)) { assertNotEquals(initialMigrationStamp, currentMigrationStamp); } else { assertEquals(initialMigrationStamp, currentMigrationStamp); } assertTrue(countingMigrationAwareService.validateMigrationStamp(currentMigrationStamp)); } @Test public void commitMigration_invalidCount_throwsAssertionError() { // when: invalid sequence of beforeMigration, commitMigration, commitMigration is executed // and try { countingMigrationAwareService.commitMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } // on second commitMigration, if event involves partition owner assertion error is thrown if (isPrimaryReplicaMigrationEvent(event)) { expectedException.expect(AssertionError.class); } try { countingMigrationAwareService.commitMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } } @Test public void rollbackMigration_invalidCount_throwsAssertionError() { // when: invalid sequence of beforeMigration, rollbackMigration, rollbackMigration is executed try { countingMigrationAwareService.rollbackMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } // on second rollbackMigration, if event involves partition owner assertion error is thrown if (isPrimaryReplicaMigrationEvent(event)) { expectedException.expect(AssertionError.class); } try { countingMigrationAwareService.rollbackMigration(event); } catch (RuntimeException e) { // we do not care whether the wrapped service throws an exception } } static class ExceptionThrowingMigrationAwareService implements FragmentedMigrationAwareService { @Override public Collection<ServiceNamespace> getAllServiceNamespaces(PartitionReplicationEvent event) { return null; } @Override public boolean isKnownServiceNamespace(ServiceNamespace namespace) { return false; } @Override public Operation prepareReplicationOperation(PartitionReplicationEvent event, Collection<ServiceNamespace> namespaces) { return null; } @Override public Operation prepareReplicationOperation(PartitionReplicationEvent event) { return null; } @Override public void beforeMigration(PartitionMigrationEvent event) { throw new RuntimeException(""); } @Override public void commitMigration(PartitionMigrationEvent event) { throw new RuntimeException(""); } @Override public void rollbackMigration(PartitionMigrationEvent event) { throw new RuntimeException(""); } @Override public String toString() { return "ExceptionThrowingMigrationAwareService"; } } static class NoOpMigrationAwareService implements FragmentedMigrationAwareService { @Override public Collection<ServiceNamespace> getAllServiceNamespaces(PartitionReplicationEvent event) { return null; } @Override public boolean isKnownServiceNamespace(ServiceNamespace namespace) { return false; } @Override public Operation prepareReplicationOperation(PartitionReplicationEvent event, Collection<ServiceNamespace> namespaces) { return null; } @Override public Operation prepareReplicationOperation(PartitionReplicationEvent event) { return null; } @Override public void beforeMigration(PartitionMigrationEvent event) { } @Override public void commitMigration(PartitionMigrationEvent event) { } @Override public void rollbackMigration(PartitionMigrationEvent event) { } @Override public String toString() { return "NoOpMigrationAwareService"; } } }
package net.ME1312.SubServers.Velocity.Network; import net.ME1312.Galaxi.Library.Container.Pair; import net.ME1312.Galaxi.Library.Map.ObjectMap; import net.ME1312.Galaxi.Library.Try; import net.ME1312.Galaxi.Library.Util; import net.ME1312.Galaxi.Library.Version.Version; import net.ME1312.SubData.Client.SubDataClient; import net.ME1312.SubData.Client.SubDataProtocol; import net.ME1312.SubServers.Client.Common.Network.API.RemotePlayer; import net.ME1312.SubServers.Client.Common.Network.API.Server; import net.ME1312.SubServers.Client.Common.Network.Packet.*; import net.ME1312.SubServers.Velocity.Event.SubNetworkConnectEvent; import net.ME1312.SubServers.Velocity.Event.SubNetworkDisconnectEvent; import net.ME1312.SubServers.Velocity.Event.SubRemoveServerEvent; import net.ME1312.SubServers.Velocity.ExProxy; import net.ME1312.SubServers.Velocity.Network.Packet.*; import net.ME1312.SubServers.Velocity.Server.CachedPlayer; import net.ME1312.SubServers.Velocity.Server.ServerData; import net.ME1312.SubServers.Velocity.SubAPI; import com.velocitypowered.api.proxy.server.RegisteredServer; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.HashMap; import java.util.UUID; import java.util.function.Consumer; import java.util.logging.Logger; /** * SubServers Protocol Class */ public class SubProtocol extends SubDataProtocol { private static SubProtocol instance; @SuppressWarnings("deprecation") protected SubProtocol() { ExProxy plugin = SubAPI.getInstance().getInternals(); setName("SubServers 2"); addVersion(new Version("2.18a+")); // 00-0F: Object Link Packets registerPacket(0x0000, PacketLinkProxy.class); registerPacket(0x0000, new PacketLinkProxy(plugin)); // 10-2F: Download Packets registerPacket(0x0010, PacketDownloadLang.class); registerPacket(0x0011, PacketDownloadPlatformInfo.class); registerPacket(0x0012, PacketDownloadProxyInfo.class); registerPacket(0x0013, PacketDownloadHostInfo.class); registerPacket(0x0014, PacketDownloadGroupInfo.class); registerPacket(0x0015, PacketDownloadServerInfo.class); registerPacket(0x0016, PacketDownloadPlayerInfo.class); registerPacket(0x0017, PacketCheckPermission.class); registerPacket(0x0018, PacketCheckPermissionResponse.class); registerPacket(0x0010, new PacketDownloadLang(plugin)); registerPacket(0x0011, new PacketDownloadPlatformInfo()); registerPacket(0x0012, new PacketDownloadProxyInfo()); registerPacket(0x0013, new PacketDownloadHostInfo()); registerPacket(0x0014, new PacketDownloadGroupInfo()); registerPacket(0x0015, new PacketDownloadServerInfo()); registerPacket(0x0016, new PacketDownloadPlayerInfo()); registerPacket(0x0017, new PacketCheckPermission()); registerPacket(0x0018, new PacketCheckPermissionResponse()); // 30-4F: Control Packets registerPacket(0x0030, PacketCreateServer.class); registerPacket(0x0031, PacketAddServer.class); registerPacket(0x0032, PacketStartServer.class); registerPacket(0x0033, PacketUpdateServer.class); registerPacket(0x0034, PacketEditServer.class); registerPacket(0x0035, PacketRestartServer.class); registerPacket(0x0036, PacketCommandServer.class); registerPacket(0x0037, PacketStopServer.class); registerPacket(0x0038, PacketRemoveServer.class); registerPacket(0x0039, PacketDeleteServer.class); registerPacket(0x003B, PacketTransferPlayer.class); registerPacket(0x003C, PacketDisconnectPlayer.class); registerPacket(0x003D, PacketMessagePlayer.class); registerPacket(0x0030, new PacketCreateServer()); registerPacket(0x0031, new PacketAddServer()); registerPacket(0x0032, new PacketStartServer()); registerPacket(0x0033, new PacketUpdateServer()); registerPacket(0x0034, new PacketEditServer()); registerPacket(0x0035, new PacketRestartServer()); registerPacket(0x0036, new PacketCommandServer()); registerPacket(0x0037, new PacketStopServer()); registerPacket(0x0038, new PacketRemoveServer()); registerPacket(0x0039, new PacketDeleteServer()); registerPacket(0x003B, new PacketTransferPlayer()); registerPacket(0x003C, new PacketDisconnectPlayer()); registerPacket(0x003D, new PacketMessagePlayer()); // 70-7F: External Sync Packets //registerPacket(0x0070, PacketInExRunEvent.class); //registerPacket(0x0071, PacketInExReset.class); //registerPacket(0x0073, PacketInExReload.class); registerPacket(0x0074, PacketExSyncPlayer.class); registerPacket(0x0075, PacketExTransferPlayer.class); registerPacket(0x0076, PacketExDisconnectPlayer.class); registerPacket(0x0077, PacketExMessagePlayer.class); registerPacket(0x0070, new PacketInExRunEvent(plugin)); registerPacket(0x0071, new PacketInExReset()); registerPacket(0x0073, new PacketInExEditServer(plugin)); registerPacket(0x0074, new PacketExSyncPlayer(plugin)); registerPacket(0x0075, new PacketExTransferPlayer(plugin)); registerPacket(0x0076, new PacketExDisconnectPlayer(plugin)); registerPacket(0x0077, new PacketExMessagePlayer(plugin)); } public static SubProtocol get() { if (instance == null) instance = new SubProtocol(); return instance; } private Logger getLogger(int channel) { return net.ME1312.SubServers.Velocity.Library.Compatibility.Logger.get("SubData" + ((channel != 0)?File.separator+"+"+channel:"")); } @Override protected SubDataClient sub(Consumer<Runnable> scheduler, Logger logger, InetAddress address, int port, ObjectMap<?> login) throws IOException { ExProxy plugin = SubAPI.getInstance().getInternals(); HashMap<Integer, SubDataClient> map = Try.all.get(() -> Util.reflect(ExProxy.class.getDeclaredField("subdata"), plugin), null); int channel = 1; while (map.keySet().contains(channel)) channel++; final int fc = channel; SubDataClient subdata = super.open(scheduler, getLogger(fc), address, port, login); map.put(fc, subdata); subdata.sendPacket(new PacketLinkProxy(plugin, fc)); subdata.on.closed(client -> map.remove(fc)); return subdata; } @SuppressWarnings("deprecation") @Override public SubDataClient open(Consumer<Runnable> scheduler, Logger logger, InetAddress address, int port) throws IOException { ExProxy plugin = SubAPI.getInstance().getInternals(); SubDataClient subdata = super.open(scheduler, logger, address, port); HashMap<Integer, SubDataClient> map = Try.all.get(() -> Util.reflect(ExProxy.class.getDeclaredField("subdata"), plugin)); subdata.sendPacket(new PacketLinkProxy(plugin, 0)); subdata.sendPacket(new PacketDownloadLang()); subdata.sendPacket(new PacketDownloadPlatformInfo(platform -> { if (plugin.lastReload != platform.getMap("subservers").getLong("last-reload")) { net.ME1312.SubServers.Velocity.Library.Compatibility.Logger.get("SubServers").info("Resetting Server Data"); for (RegisteredServer server : ExProxy.getInstance().getAllServers()) ExProxy.getInstance().unregisterServer(server.getServerInfo()); plugin.servers.clear(); plugin.lastReload = platform.getMap("subservers").getLong("last-reload"); } /* try { ProxyConfig config = ExProxy.getInstance().getConfiguration(); // TODO maybe? if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Forced-Hosts", true)) Util.reflect(ListenerInfo.class.getDeclaredField("forcedHosts"), listeners.get(i), platform.getMap("bungee").getMapList("listeners").get(i).getMap("forced-hosts").get()); if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Motd", false)) Util.reflect(ListenerInfo.class.getDeclaredField("motd"), listeners.get(i), platform.getMap("bungee").getMapList("listeners").get(i).getString("motd")); if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Player-Limit", false)) Util.reflect(ListenerInfo.class.getDeclaredField("maxPlayers"), listeners.get(i), platform.getMap("bungee").getMapList("listeners").get(i).getInt("player-limit")); if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Server-Priorities", true)) Util.reflect(ListenerInfo.class.getDeclaredField("serverPriority"), listeners.get(i), platform.getMap("bungee").getMapList("listeners").get(i).getStringList("priorities")); if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Disabled-Commands", false)) Util.reflect(Configuration.class.getDeclaredField("disabledCommands"), plugin.getConfig(), platform.getMap("bungee").getStringList("disabled-cmds")); if (plugin.config.get().getMap("Sync", new ObjectMap<>()).getBoolean("Player-Limit", false)) Util.reflect(Configuration.class.getDeclaredField("playerLimit"), plugin.getConfig(), platform.getMap("bungee").getInt("player-limit")); } catch (Exception e) { net.ME1312.SubServers.Velocity.Library.Compatibility.Logger.get("SubServers").info("Problem converting synced BungeeCord configuration options"); e.printStackTrace(); } */ ArrayList<CachedPlayer> localPlayers = new ArrayList<CachedPlayer>(); for (UUID id : new ArrayList<UUID>(plugin.rPlayers.keySet())) { if (ExProxy.getInstance().getPlayer(id).isPresent()) { localPlayers.add(plugin.rPlayers.get(id)); } else { plugin.rPlayerLinkS.remove(id); plugin.rPlayerLinkP.remove(id); plugin.rPlayers.remove(id); } } subdata.sendPacket(new PacketExSyncPlayer(null, localPlayers.toArray(new CachedPlayer[0]))); plugin.api.getServers(servers -> { ArrayList<ServerData> localServers = new ArrayList<ServerData>(plugin.servers.values()); for (Server server : servers.values()) { ExProxy.getInstance().getServer(server.getName()).map(RegisteredServer::getServerInfo).map(plugin::getData).ifPresent(localServers::remove); plugin.merge(server); } for (ServerData server : localServers) { plugin.remove(new SubRemoveServerEvent(null, null, server.getName())); } plugin.api.getRemotePlayers(players -> { synchronized (plugin.rPlayers) { plugin.rPlayerLinkS.clear(); plugin.rPlayerLinkP.clear(); plugin.rPlayers.clear(); for (RemotePlayer player : players.values()) { plugin.rPlayerLinkP.put(player.getUniqueId(), player.getProxyName().toLowerCase()); plugin.rPlayers.put(player.getUniqueId(), (CachedPlayer) player); ExProxy.getInstance().getServer(player.getServerName()).map(RegisteredServer::getServerInfo).map(plugin::getData).ifPresent(server -> plugin.rPlayerLinkS.put(player.getUniqueId(), server) ); } } }); }); })); subdata.on.ready(client -> ExProxy.getInstance().getEventManager().fire(new SubNetworkConnectEvent((SubDataClient) client))); subdata.on.closed(client -> { SubNetworkDisconnectEvent event = new SubNetworkDisconnectEvent(client.value(), client.key()); ExProxy.getInstance().getEventManager().fire(event); if (plugin.running) { Logger log = net.ME1312.SubServers.Velocity.Library.Compatibility.Logger.get("SubData"); Try.all.run(() -> Util.reflect(ExProxy.class.getDeclaredMethod("connect", Logger.class, Pair.class), plugin, log, client)); } else map.put(0, null); }); return subdata; } public SubDataClient open(InetAddress address, int port) throws IOException { return open(getLogger(0), address, port); } }
package SubSystems; import Utilities.TrajectorySmoother; import edu.wpi.first.wpilibj.util.BoundaryException; /** * Class implements a PIV Control Loop. * * Does all computation synchronously (i.e. the calculate() function must be * called by the user from his own thread) */ public class FeedforwardPIV { private double m_P; // factor for "proportional" control private double m_I; // factor for "integral" control private double m_V; // factor for "derivative of velocity" control private double m_ffV; // feedforward velocity private double m_ffA; // feedforward acceleration private double m_maximumOutput = 1.0; // |maximum output| private double m_minimumOutput = -1.0; // |minimum output| private double m_maximumInput = 0.0; // maximum input - limit setpoint to this private double m_minimumInput = 0.0; // minimum input - limit setpoint to this private double m_prevError = 0.0; // the prior sensor input (used to compute velocity) private double m_integralError = 0.0; //the sum of the errors for use in the integral calc protected double m_setpoint = 0.0; private double m_error = 0.0; private double m_result = 0.0; private double m_last_input = Double.NaN; public FeedforwardPIV() { m_P = m_I = m_V = m_ffV = m_ffA = 0.0; } /** * Allocate a PID object with the given constants for P, I, V * @param Kp the proportional coefficient * @param Ki the integral coefficient * @param Kv the velocity differential coefficient * @param KffV the feedforward velocity gain * @param KffA the feedforward acceleration gain */ public FeedforwardPIV(double Kp, double Ki, double Kv, double KffV, double KffA) { setParams(Kp,Ki,Kv,KffV,KffA); } public synchronized double calculate(double desiredPosition, double desiredSpeed, double desiredAccel, double currentPosition, double currentVelocity, double dt) { m_last_input = currentPosition; m_error = desiredPosition - currentPosition; m_integralError += m_error - currentPosition; m_result = (m_P * m_error + m_I * m_integralError + m_V * ((m_error - m_prevError)/dt - desiredSpeed)) + m_ffV * desiredSpeed + m_ffA * desiredAccel; m_prevError = m_error; if (m_result > m_maximumOutput) { m_result = m_maximumOutput; } else if (m_result < m_minimumOutput) { m_result = m_minimumOutput; } return m_result; } /** * Read the input, calculate the output accordingly, and write to the output. * This should be called at a constant rate by the user (ex. in a timed thread) */ public synchronized double calculate(TrajectorySmoother trajectory, double currentPosition, double currentVelocity, double dt) { return calculate(trajectory.getPosition(), trajectory.getVelocity(), trajectory.getAcceleration(), currentPosition, currentVelocity, dt); } /** * Return the current PID result * This is always centered on zero and constrained the the max and min outs * @return the latest calculated output */ public synchronized double get() { return m_result; } /** * Sets the maximum and minimum values expected from the input. * * @param minimumInput the minimum value expected from the input * @param maximumInput the maximum value expected from the output */ public synchronized void setInputRange(double minimumInput, double maximumInput) { if (minimumInput > maximumInput) { throw new BoundaryException("Lower bound is greater than upper bound"); } m_minimumInput = minimumInput; m_maximumInput = maximumInput; setSetpoint(m_setpoint); } /** * Sets the minimum and maximum values to write. * * @param minimumOutput the minimum value to write to the output * @param maximumOutput the maximum value to write to the output */ public synchronized void setOutputRange(double minimumOutput, double maximumOutput) { if (minimumOutput > maximumOutput) { throw new BoundaryException("Lower bound is greater than upper bound"); } m_minimumOutput = minimumOutput; m_maximumOutput = maximumOutput; } /** * Set the setpoint for the PID controller * * @param setpoint the desired setpoint */ public synchronized void setSetpoint(double setpoint) { if (m_maximumInput > m_minimumInput) { if (setpoint > m_maximumInput) { m_setpoint = m_maximumInput; } else if (setpoint < m_minimumInput) { m_setpoint = m_minimumInput; } else { m_setpoint = setpoint; } } else { m_setpoint = setpoint; } } /** * Returns the current setpoint of the PID controller * * @return the current setpoint */ public synchronized double getSetpoint() { return m_setpoint; } /** * Returns the current difference of the input from the setpoint * * @return the current error */ public synchronized double getError() { return m_error; } /** * Return true if the error is within the tolerance * * @return true if the error is less than the tolerance */ public synchronized boolean onTarget(double tolerance) { if( m_last_input != Double.NaN && Math.abs(m_last_input - m_setpoint ) < tolerance ) { return true; } else { return false; } } /** * Reset all internal terms. */ public synchronized void reset() { m_last_input = Double.NaN; m_prevError = 0; m_integralError = 0; m_result = 0; m_setpoint = 0; } public synchronized String getState() { String lState = ""; lState += "Kp: " + m_P + "\n"; lState += "Ki: " + m_I + "\n"; lState += "Kv: " + m_V + "\n"; lState += "KffV: " + m_ffV + "\n"; lState += "KffA: " + m_ffA + "\n"; return lState; } public synchronized final void setParams(double Kp, double Ki, double Kv, double KffV, double KffA) { m_P = Kp; m_I = Ki; m_V = Kv; m_ffV = KffV; m_ffA = KffA; } }
/* * Copyright (c) 2006 Pyxis Technologies inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, * or see the FSF site: http://www.fsf.org. */ package com.greenpepper.interpreter.collection; import com.greenpepper.*; import com.greenpepper.annotation.Annotations; import com.greenpepper.call.Annotate; import com.greenpepper.call.Compile; import com.greenpepper.interpreter.AbstractTableInterpreter; import com.greenpepper.interpreter.CollectionHeaderForm; import com.greenpepper.interpreter.column.Column; import com.greenpepper.interpreter.column.ExpectedColumn; import com.greenpepper.interpreter.column.NullColumn; import com.greenpepper.reflect.CollectionProvider; import com.greenpepper.reflect.Fixture; import com.greenpepper.reflect.NoSuchMessageException; import com.greenpepper.util.StringUtil; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import static com.greenpepper.GreenPepper.canContinue; import static com.greenpepper.GreenPepper.shouldStop; import static com.greenpepper.annotation.Annotations.exception; import static com.greenpepper.annotation.Annotations.ignored; import static com.greenpepper.util.ExampleUtil.contentOf; // TODO: STATS compile stats here and in derived classes ( no test for that yet ) /** * <p>Abstract CollectionInterpreter class.</p> * * @author oaouattara * @version $Id: $Id */ public abstract class CollectionInterpreter extends AbstractTableInterpreter { /** * <p>statistics.</p> * * @return a {@link com.greenpepper.Statistics} object. */ public Statistics statistics() { return this.stats; } /** * <p>Constructor for CollectionInterpreter.</p> * * @param fixture a {@link com.greenpepper.reflect.Fixture} object. */ protected CollectionInterpreter( Fixture fixture ) { super(fixture); } protected ExpectedColumn parseColumn(Example header ) { try { return CollectionHeaderForm.parse(header.getContent()).selectColumn(); } catch (Exception e) { header.annotate( exception( e ) ); stats.exception(); return new NullColumn(); } } /** * <p>toList.</p> * * @param results a {@link java.lang.Object} object. * @return a {@link java.util.List} object. */ private List<?> toList(Object results) { if (results instanceof Object[]) { return Arrays.asList( (Object[]) results ); } if (results instanceof Collection) { return new ArrayList<Object>( (Collection<?>) results ); } return null; } /** * <p>executeRow.</p> * * @param valuesRow a {@link com.greenpepper.Example} object. * @param headers a {@link com.greenpepper.Example} object. * @param rowFixtureAdapter a {@link com.greenpepper.reflect.Fixture} object. */ private void executeRow(Example valuesRow, Example headers, Fixture rowFixtureAdapter) { valuesRow.annotate( Annotations.right() ); Statistics rowStats = new Statistics(); for (int i = 0; i != valuesRow.remainings(); ++i) { Example cell = valuesRow.at( i ); if (i < headers.remainings()) { // We can do the cast because #parseColumn returns an ExpectedColumn ExpectedColumn column = (ExpectedColumn) columns[i]; try { chekOneCell(rowFixtureAdapter, rowStats, cell, column); } catch (Exception e) { cell.annotate( exception( e ) ); stats.exception(); } } else { cell.annotate( ignored( cell.getContent() ) ); } } applyRowStatistic(rowStats); } /** * <p>addSurplusRow.</p> * * @param example a {@link com.greenpepper.Example} object. * @param headers a {@link com.greenpepper.Example} object. * @param rowFixtureAdapter a {@link com.greenpepper.reflect.Fixture} object. */ protected void addSurplusRow( Example example, Example headers, Fixture rowFixtureAdapter) { Example row = example.addSibling(); for (int i = 0; i < headers.remainings(); i++) { ExpectedColumn column = (ExpectedColumn) columns[i]; Example cell = row.addChild(); try { Call call = new Call( rowFixtureAdapter.check( column.header() ) ); Object actual = call.execute(); cell.setContent( TypeConversion.toString(actual)); cell.annotate( Annotations.surplus() ); if (i == 0) // Notify test listener on first cell only { stats.wrong(); } } catch (Exception e) { // TODO: STATS count stats? cell.annotate( ignored( e ) ); } } } /** * <p>missingRow.</p> * * @param row a {@link com.greenpepper.Example} object. */ protected void missingRow( Example row ) { Example firstChild = row.firstChild(); firstChild.annotate( Annotations.missing() ); stats.wrong(); if (firstChild.hasSibling()) { for (Example cell : firstChild.nextSibling()) { cell.annotate( Annotations.missing() ); } } } private List<?> getCollectionProvider() { Object target = fixture.getTarget(); for (Method method : target.getClass().getMethods()) { if (method.isAnnotationPresent( CollectionProvider.class )) { return toList(invoke( target, method )); } } return null; } private Object invoke( Object target, Method method ) { try { return method.invoke( target ); } catch (Exception e) { return null; } } /** * <p>getFixtureList.</p> * * @return a {@link java.util.List} object. * @throws java.lang.Exception if any. */ public List<Fixture> getFixtureList() throws Exception { List results = getCollectionProvider(); if (results == null) { results = toList( fixture.getTarget() ); } if (results == null) { try { Call query = new Call( fixture.check( "query" ) ); results = toList( query.execute() ); } catch (NoSuchMessageException ignored) {} } if (results == null) throw new IllegalArgumentException( "results parameter is neither an Object[] nor a Collection" ); List<Fixture> fixtures = new ArrayList<Fixture>(); for (Object object : results) { fixtures.add( fixture.fixtureFor( object ) ); } return fixtures; } /** * <p>mustProcessMissing.</p> * * @return a boolean. */ protected boolean mustProcessMissing() { return false; } /** * <p>mustProcessSurplus.</p> * * @return a boolean. */ protected boolean mustProcessSurplus() { return false; } /** {@inheritDoc} */ public void interpret( Specification specification ) { stats = new Statistics(); Example table = specification.nextExample(); columns = parseColumns(table); execute( table.at( 0, 1 ) ); specification.exampleDone( stats ); } /** * <p>execute.</p> * * @param example a {@link com.greenpepper.Example} object. */ public void execute( Example example ) { try { List<Fixture> fixtures = getFixtureList(); Example headers = example.at( 0, 0 ); if (columns == null) { columns = getHeaderColumns(headers); } if (example.hasSibling()) { RowFixtureSplitter splitter = new RowFixtureSplitter(); splitter.split(example.at(1), fixtures, columns); for (RowFixture rowFixture : splitter.getMatch()) { Example row = rowFixture.getRow(); executeRow(row.firstChild(), headers, rowFixture.getAdapter()); if (shouldStop(stats)) { row.addChild().annotate(Annotations.stopped()); break; } } if (mustProcessMissing() && canContinue(stats)) { for (Example row : splitter.getMissing()) { missingRow(row); if (shouldStop(stats)) { row.addChild().annotate(Annotations.stopped()); break; } } } if (mustProcessSurplus() && canContinue(stats)) { for (Fixture adapter : splitter.getSurplus()) { addSurplusRow(example, headers, adapter); if (shouldStop(stats)) { example.lastSibling().addChild().annotate(Annotations.stopped()); break; } } } } } catch (Exception e) { stats.exception(); example.firstChild().annotate( exception( e ) ); if (shouldStop( stats )) { example.addChild().annotate(Annotations.stopped()); } } } private Column[] getHeaderColumns(Example headersRow) throws Exception { ArrayList<Column> columns = new ArrayList<Column>(); for (Example header: headersRow) { columns.add(CollectionHeaderForm.parse(header.getContent()).selectColumn()); } return columns.toArray(new Column[columns.size()]); } /** * <p>applyRowStatistic.</p> * * @param rowStats a {@link com.greenpepper.Statistics} object. */ protected void applyRowStatistic(Statistics rowStats) { if (rowStats.exceptionCount() > 0) { stats.exception(); } else if (rowStats.wrongCount() > 0) { stats.wrong(); } else if (rowStats.rightCount() > 0) { stats.right(); } else { stats.ignored(); } } protected void chekOneCell(Fixture rowFixtureAdapter, Statistics rowStats, Example cell, ExpectedColumn column) throws Exception { if (StringUtil.isBlank(contentOf(cell))) { column.setCheck(rowFixtureAdapter.check(column.header())); column.doCell(cell, rowStats); } else { Call call = new Call( rowFixtureAdapter.check( column.header() ) ); call.expect( cell.getContent() ); call.will( Annotate.withDetails( cell ) ); call.will( Compile.statistics( rowStats ) ); call.execute(); } } }
package javagene.io; import javagene.seq.*; import javagene.util.*; import java.io.*; import java.util.*; /** * Read and write sequences as Fasta files. A Fasta file has a single header line * of the following format: *<pre> >id description </pre> * where the > symbol is required, the id is a single word, and the description can * be many words, extending to the end of line. The proper interpretation of the id and * description fields is not fixed. Data lines immediately follow the header line. Multiple * sequences in a single file are separated from one another by a blank line. * * @author Hanno Hinsch */ public class Fasta { private Fasta() {}; /** * Read a Fasta file containing a single sequence into a SeqBig object. * * @param filename The path to the file. * @return The SeqBig object. * @throws IOException Something went wrong -- check exception detail message. */ public static SeqBig readBig( String filename ) throws IOException { return new SeqBig( filename ); } /** * Read a file containing (possibly) multiple sequences. * * @param filename The path to the file. * @param bufferSize An estimate of the size of the largest sequence (hint: unless * you're short on RAM, generously round up. ) * @return An ArrayList of SeqMem objects. * @throws IOException Something went wrong -- check exception detail message. */ public static ArrayList<SeqI> readMany( String filename, int bufferSize ) throws IOException { Log.log( "Fasta: Reading " + filename ); BufferedReader br = new BufferedReader(new FileReader( filename )); ArrayList<SeqI> sequences= new ArrayList<SeqI>(); StringBuffer temp= new StringBuffer( bufferSize ); SeqFast seq= null; String id= null; String description= null; for( String s= br.readLine(); null != s; s=br.readLine() ) { s= s.trim(); if( 0 < s.length() ) //skip blank lines { if( s.charAt(0) == '>' ) { //save current sequence, if any if( id != null ) { seq= new SeqFast(temp.toString(), id, description ); sequences.add( seq ); Log.log( "Fasta: " + seq.id() + " loaded." ); id= null; temp.setLength( 0 ); //empty the buffer } //id, desc line int i= s.indexOf( ' ' ); if( i < 0 ) { id= s.substring( 1 ); //whole line after > description=""; } else { id= s.substring( 1, i ); description= s.substring( i ); } } else { //sequence data if( id != null ) { temp.append( s ); } else { throw new IOException( "Unexpected char before >" ); } } } } if( id != null ) //grab final sequence { seq= new SeqFast(temp.toString(), id, description ); sequences.add( seq ); Log.log( "Fasta: " + seq.id() + " loaded." ); } br.close(); return sequences; } /** * Read a file containing a single sequence. * * @param filename The path to the file. * @param bufferSize An estimate of the size of the sequence (hint: unless * you're short on RAM, generously round up. ) * @return A SeqMem object. * @throws IOException Something went wrong -- check exception detail message. */ public static SeqI read( String filename, int bufferSize ) throws IOException { return ( readMany( filename, bufferSize )).get(0); } /** * Write an ArrayList of sequences to file. * * @param sequences The ArrayList of SeqI sequences. * @param filename The path to the file. * @throws IOException Something went wrong -- check exception detail message. */ public static void write( ArrayList<SeqI> sequences, String filename ) throws IOException { Log.log( "Fasta: Writing " + filename ); BufferedWriter bw= new BufferedWriter( new FileWriter( filename )); for( SeqI seq: sequences ) { bw.write( ">" + seq.id() + " " + seq.description() + " " + (seq.bounds().bioStart()) + "-" + seq.bounds().bioEnd() ); bw.newLine(); LocIterator li= seq.bounds().iterator( 60, 60 ); while( li.hasNext() ) { bw.write( seq.toString( li.next()) ); bw.newLine(); } bw.write( seq.toString( li.remainder()) ); bw.newLine(); bw.newLine(); } bw.close(); } /** * Write a sequence to file. * * @param sequence The SeqI object to write. * @param filename The path to the file. * @throws IOException Something went wrong -- check exception detail message. */ public static void write( SeqI sequence, String filename ) throws IOException { ArrayList<SeqI> list= new ArrayList<SeqI>(); list.add( sequence ); write( list, filename ); } /** * @deprecated */ public static void main( String args[] ) throws IOException { ArrayList<SeqI> seqs= Fasta.readMany( "../rfam/rfam.fasta", 500 ); int max= (seqs.size() < 10)?seqs.size():10; for( int i=0; i < max; i++ ) { Log.log( seqs.get(i).id() + seqs.get(i).description() + "\n" ); Log.log( seqs.get(i).toString() ); } } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.bpmn.event.message; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.List; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.RepositoryService; import org.camunda.bpm.engine.RuntimeService; import org.camunda.bpm.engine.TaskService; import org.camunda.bpm.engine.impl.persistence.entity.EventSubscriptionEntity; import org.camunda.bpm.engine.repository.DeploymentWithDefinitions; import org.camunda.bpm.engine.repository.ProcessDefinition; import org.camunda.bpm.engine.runtime.EventSubscription; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.task.Task; import org.camunda.bpm.engine.test.ProcessEngineRule; import org.camunda.bpm.engine.test.util.ProcessEngineTestRule; import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule; import org.camunda.bpm.model.bpmn.Bpmn; import org.camunda.bpm.model.bpmn.BpmnModelInstance; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import junit.framework.AssertionFailedError; public class MessageStartEventSubscriptionTest { private static final String SINGLE_MESSAGE_START_EVENT_XML = "org/camunda/bpm/engine/test/bpmn/event/message/MessageStartEventTest.testSingleMessageStartEvent.bpmn20.xml"; private static final String ONE_TASK_PROCESS = "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"; private static final String MESSAGE_EVENT_PROCESS = "singleMessageStartEvent"; private static final BpmnModelInstance MODEL_WITHOUT_MESSAGE = Bpmn.createExecutableProcess(MESSAGE_EVENT_PROCESS) .startEvent() .userTask() .endEvent() .done(); private static final BpmnModelInstance MODEL = Bpmn.createExecutableProcess("another") .startEvent() .message("anotherMessage") .userTask() .endEvent() .done(); protected ProcessEngineRule engineRule = new ProvidedProcessEngineRule(); protected ProcessEngineTestRule testRule = new ProcessEngineTestRule(engineRule); @Rule public RuleChain ruleChain = RuleChain.outerRule(engineRule).around(testRule); @Rule public ExpectedException thrown= ExpectedException.none(); protected RepositoryService repositoryService; protected RuntimeService runtimeService; protected TaskService taskService; @Before public void setUp() throws Exception { repositoryService = engineRule.getRepositoryService(); runtimeService = engineRule.getRuntimeService(); taskService = engineRule.getTaskService(); } @Test public void testUpdateProcessVersionCancelsSubscriptions() { testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); List<EventSubscription> eventSubscriptions = runtimeService.createEventSubscriptionQuery().list(); List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, eventSubscriptions.size()); assertEquals(1, processDefinitions.size()); // when testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); // then List<EventSubscription> newEventSubscriptions = runtimeService.createEventSubscriptionQuery().list(); List<ProcessDefinition> newProcessDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, newEventSubscriptions.size()); assertEquals(2, newProcessDefinitions.size()); for (ProcessDefinition processDefinition : newProcessDefinitions) { if (processDefinition.getVersion() == 1) { for (EventSubscription subscription : newEventSubscriptions) { EventSubscriptionEntity subscriptionEntity = (EventSubscriptionEntity) subscription; assertFalse(subscriptionEntity.getConfiguration().equals(processDefinition.getId())); } } else { for (EventSubscription subscription : newEventSubscriptions) { EventSubscriptionEntity subscriptionEntity = (EventSubscriptionEntity) subscription; assertTrue(subscriptionEntity.getConfiguration().equals(processDefinition.getId())); } } } assertFalse(eventSubscriptions.equals(newEventSubscriptions)); } @Test public void testEventSubscriptionAfterDeleteLatestProcessVersion() { // given a deployed process testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); ProcessDefinition processDefinitionV1 = repositoryService.createProcessDefinitionQuery().singleResult(); assertNotNull(processDefinitionV1); // deploy second version of the process String deploymentId = testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML).getId(); // when repositoryService.deleteDeployment(deploymentId, true); // then ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery().processDefinitionKey(MESSAGE_EVENT_PROCESS).singleResult(); assertEquals(processDefinitionV1.getId(), processDefinition.getId()); EventSubscriptionEntity eventSubscription = (EventSubscriptionEntity) runtimeService.createEventSubscriptionQuery().singleResult(); assertNotNull(eventSubscription); assertEquals(processDefinitionV1.getId(), eventSubscription.getConfiguration()); } @Test public void testStartInstanceAfterDeleteLatestProcessVersionByIds() { // given a deployed process testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); // deploy second version of the process DeploymentWithDefinitions deployment = testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); ProcessDefinition processDefinition = deployment.getDeployedProcessDefinitions().get(0); // delete it repositoryService.deleteProcessDefinitions() .byIds(processDefinition.getId()) .delete(); // when ProcessInstance processInstance = runtimeService.startProcessInstanceByMessage("newInvoiceMessage"); // then assertFalse(processInstance.isEnded()); Task task = taskService.createTaskQuery().singleResult(); assertNotNull(task); taskService.complete(task.getId()); ProcessInstance completedInstance = runtimeService .createProcessInstanceQuery() .processInstanceId(processInstance.getId()) .singleResult(); if (completedInstance != null) { throw new AssertionFailedError("Expected finished process instance '" + completedInstance + "' but it was still in the db"); } } @Test public void testStartInstanceAfterDeleteLatestProcessVersion() { // given a deployed process testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); // deploy second version of the process String deploymentId = testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML).getId(); org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult(); // delete it repositoryService.deleteDeployment(deployment.getId(), true); // when ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("singleMessageStartEvent"); assertFalse(processInstance.isEnded()); Task task = taskService.createTaskQuery().singleResult(); assertNotNull(task); taskService.complete(task.getId()); ProcessInstance completedInstance = runtimeService .createProcessInstanceQuery() .processInstanceId(processInstance.getId()) .singleResult(); if (completedInstance != null) { throw new AssertionFailedError("Expected finished process instance '" + completedInstance + "' but it was still in the db"); } } @Test public void testVersionWithoutConditionAfterDeleteLatestProcessVersionWithCondition() { // given a process testRule.deploy(MODEL_WITHOUT_MESSAGE); // deploy second version of the process String deploymentId = testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML).getId(); org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult(); // delete it repositoryService.deleteDeployment(deployment.getId(), true); thrown.expect(ProcessEngineException.class); thrown.expectMessage("No subscriptions were found during evaluation of the conditional start events."); // when runtimeService .createConditionEvaluation() .setVariable("foo", 1) .evaluateStartConditions(); } @Test public void testSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionByKeys() { // given three versions of the process testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); testRule.deploy(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byKey(MESSAGE_EVENT_PROCESS) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testSubscriptionsWhenDeletingGroupsProcessDefinitionsByIds() { // given String processDefId11 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String processDefId12 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String processDefId13 = testRule.deployAndGetDefinition(MODEL_WITHOUT_MESSAGE).getId(); String processDefId21 = deployModel(MODEL); String processDefId22 = deployModel(MODEL); String processDefId23 = deployModel(MODEL); String processDefId31 = deployProcess(ONE_TASK_PROCESS); @SuppressWarnings("unused") String processDefId32 = deployProcess(ONE_TASK_PROCESS); // assume assertEquals(1, runtimeService.createEventSubscriptionQuery().count()); // when repositoryService.deleteProcessDefinitions() .byIds(processDefId21,processDefId23,processDefId13, processDefId12,processDefId31) .delete(); // then List<EventSubscription> list = runtimeService.createEventSubscriptionQuery().list(); assertEquals(2, list.size()); for (EventSubscription eventSubscription : list) { EventSubscriptionEntity eventSubscriptionEntity = (EventSubscriptionEntity) eventSubscription; if (!eventSubscriptionEntity.getConfiguration().equals(processDefId11) && !eventSubscriptionEntity.getConfiguration().equals(processDefId22)) { fail("This process definition '" + eventSubscriptionEntity.getConfiguration() + "' and the respective event subscription should not exist."); } } } @Test public void testSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionByIdOrdered() { // given String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId1, definitionId2, definitionId3) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionByIdReverseOrder() { // given String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId3, definitionId2, definitionId1) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testMixedSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionById1() { // given first version without condition String definitionId1 = deployModel(MODEL_WITHOUT_MESSAGE); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId1, definitionId2, definitionId3) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testMixedSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionById2() { // given second version without condition String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployModel(MODEL_WITHOUT_MESSAGE); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId1, definitionId2, definitionId3) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testMixedSubscriptionsWhenDeletingProcessDefinitionsInOneTransactionById3() { // given third version without condition String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployModel(MODEL_WITHOUT_MESSAGE); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId1, definitionId2, definitionId3) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); } @Test public void testMixedSubscriptionsWhenDeletingTwoProcessDefinitionsInOneTransaction1() { // given first version without condition String definitionId1 = deployModel(MODEL_WITHOUT_MESSAGE); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId2, definitionId3) .delete(); // then assertEquals(0, runtimeService.createEventSubscriptionQuery().count()); assertEquals(definitionId1, repositoryService.createProcessDefinitionQuery().singleResult().getId()); } @Test public void testMixedSubscriptionsWhenDeletingTwoProcessDefinitionsInOneTransaction2() { // given second version without condition String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployModel(MODEL_WITHOUT_MESSAGE); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId2, definitionId3) .delete(); // then assertEquals(1, runtimeService.createEventSubscriptionQuery().count()); assertEquals(definitionId1, ((EventSubscriptionEntity) runtimeService.createEventSubscriptionQuery().singleResult()).getConfiguration()); } @Test public void testMixedSubscriptionsWhenDeletingTwoProcessDefinitionsInOneTransaction3() { // given third version without condition String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployModel(MODEL_WITHOUT_MESSAGE); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId2, definitionId3) .delete(); // then assertEquals(1, runtimeService.createEventSubscriptionQuery().count()); assertEquals(definitionId1, ((EventSubscriptionEntity) runtimeService.createEventSubscriptionQuery().singleResult()).getConfiguration()); } /** * Tests the case, when no new subscription is needed, as it is not the latest version, that is being deleted. */ @Test public void testDeleteNotLatestVersion() { @SuppressWarnings("unused") String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); // when repositoryService.deleteProcessDefinitions() .byIds(definitionId2) .delete(); // then assertEquals(1, runtimeService.createEventSubscriptionQuery().count()); assertEquals(definitionId3, ((EventSubscriptionEntity) runtimeService.createEventSubscriptionQuery().singleResult()).getConfiguration()); } /** * Tests the case when the previous of the previous version will be needed. */ @Test public void testSubscribePreviousPreviousVersion() { String definitionId1 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId2 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); String definitionId3 = deployProcess(SINGLE_MESSAGE_START_EVENT_XML); //we're deleting version 3, but as version 2 is already deleted, we must subscribe version 1 // when repositoryService.deleteProcessDefinitions() .byIds(definitionId2, definitionId3) .delete(); // then assertEquals(1, runtimeService.createEventSubscriptionQuery().count()); assertEquals(definitionId1, ((EventSubscriptionEntity) runtimeService.createEventSubscriptionQuery().singleResult()).getConfiguration()); } protected String deployProcess(String resourcePath) { List<ProcessDefinition> deployedProcessDefinitions = testRule.deploy(resourcePath).getDeployedProcessDefinitions(); assertEquals(1, deployedProcessDefinitions.size()); return deployedProcessDefinitions.get(0).getId(); } protected String deployModel(BpmnModelInstance model) { List<ProcessDefinition> deployedProcessDefinitions = testRule.deploy(model).getDeployedProcessDefinitions(); assertEquals(1, deployedProcessDefinitions.size()); String definitionId2 = deployedProcessDefinitions.get(0).getId(); return definitionId2; } }
/** * Copyright 2005-2013 Restlet S.A.S. * * The contents of this file are subject to the terms of one of the following * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL * 1.0 (the "Licenses"). You can select the license that you prefer but you may * not use this file except in compliance with one of these Licenses. * * You can obtain a copy of the Apache 2.0 license at * http://www.opensource.org/licenses/apache-2.0 * * You can obtain a copy of the LGPL 3.0 license at * http://www.opensource.org/licenses/lgpl-3.0 * * You can obtain a copy of the LGPL 2.1 license at * http://www.opensource.org/licenses/lgpl-2.1 * * You can obtain a copy of the CDDL 1.0 license at * http://www.opensource.org/licenses/cddl1 * * You can obtain a copy of the EPL 1.0 license at * http://www.opensource.org/licenses/eclipse-1.0 * * See the Licenses for the specific language governing permissions and * limitations under the Licenses. * * Alternatively, you can obtain a royalty free commercial license with less * limitations, transferable or non-transferable, directly at * http://www.restlet.com/products/restlet-framework * * Restlet is a registered trademark of Restlet S.A.S. */ package org.restlet.ext.html.internal; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.logging.Level; import org.restlet.Context; import org.restlet.data.CharacterSet; import org.restlet.ext.html.FormData; import org.restlet.representation.Representation; import org.restlet.util.Series; /** * Form reader. * * @author Jerome Louvel */ public class FormReader { /** The encoding to use, decoding is enabled, see {@link #decoding}. */ private volatile CharacterSet characterSet; /** Indicates if the entries should be decoded. */ private volatile boolean decoding; /** The separator character used between entries. */ private volatile char separator; /** The form stream. */ private volatile InputStream stream; /** * Constructor.<br> * In case the representation does not define a character set, the UTF-8 * character set is used. * * @param representation * The web form content. * @throws IOException * if the stream of the representation could not be opened. */ public FormReader(Representation representation) throws IOException { this.decoding = true; this.stream = representation.getStream(); this.separator = '&'; if (representation.getCharacterSet() != null) { this.characterSet = representation.getCharacterSet(); } else { this.characterSet = CharacterSet.UTF_8; } } /** * Constructor. Will leave the parsed data encoded. * * @param queryString * The query string. */ public FormReader(String queryString, char separator) { this.decoding = false; this.stream = new ByteArrayInputStream(queryString.getBytes()); // org.restlet.engine.io.StringInputStream(queryString); this.characterSet = null; this.separator = separator; } /** * Constructor. * * @param queryString * The query string. * @param characterSet * The supported character encoding. Set to null to leave the * data encoded. */ public FormReader(String queryString, CharacterSet characterSet, char separator) { this.decoding = true; this.stream = new ByteArrayInputStream(queryString.getBytes()); // org.restlet.engine.io.StringInputStream(queryString); this.characterSet = characterSet; this.separator = separator; } /** * Adds the entries into a given series. * * @param entries * The target series of entries. */ public void addEntries(Series<FormData> entries) { boolean readNext = true; FormData entry = null; if (this.stream != null) { // Let's read all form data entries try { while (readNext) { entry = readNextEntry(); if (entry != null) { // Add parsed entry to the form entries.add(entry); } else { // Last entry parsed readNext = false; } } } catch (IOException ioe) { Context.getCurrentLogger() .log(Level.WARNING, "Unable to parse a form entry. Skipping the remaining entries.", ioe); } try { this.stream.close(); } catch (IOException ioe) { Context.getCurrentLogger().log(Level.WARNING, "Unable to close the form input stream", ioe); } } } /** * Reads all the entries. * * @return The form read. * @throws IOException * If the entries could not be read. */ public Series<FormData> read() throws IOException { Series<FormData> result = new Series<FormData>(FormData.class); FormData entry = readNextEntry(); while (entry != null) { result.add(entry); entry = readNextEntry(); } this.stream.close(); return result; } /** * Reads the entries whose name is a key in the given map. If a matching * entry is found, its value is put in the map. If multiple values are * found, a list is created and set in the map. * * @param entries * The entries map controlling the reading. * @throws IOException * If the entries could not be read. */ @SuppressWarnings("unchecked") public void readEntries(Map<String, Object> entries) throws IOException { FormData entry = readNextEntry(); Object currentValue = null; while (entry != null) { if (entries.containsKey(entry.getName())) { currentValue = entries.get(entry.getName()); if (currentValue != null) { List<Object> values = null; if (currentValue instanceof List) { // Multiple values already found for this entry values = (List<Object>) currentValue; } else { // Second value found for this entry // Create a list of values values = new ArrayList<Object>(); values.add(currentValue); entries.put(entry.getName(), values); } if (entry.getValue() == null) { values.add(Series.EMPTY_VALUE); } else { values.add(entry.getValue()); } } else { if (entry.getValue() == null) { entries.put(entry.getName(), Series.EMPTY_VALUE); } else { entries.put(entry.getName(), entry.getValue()); } } } entry = readNextEntry(); } this.stream.close(); } /** * Reads the entries with the given name. If multiple values are found, a * list is returned created. * * @param name * The entry name to match. * @return The entry value or list of values. * @throws IOException * If the entry could not be read. */ @SuppressWarnings("unchecked") public Object readEntry(String name) throws IOException { FormData entry = readNextEntry(); Object result = null; while (entry != null) { if (entry.getName().equals(name)) { if (result != null) { List<Object> values = null; if (result instanceof List) { // Multiple values already found for this entry values = (List<Object>) result; } else { // Second value found for this entry // Create a list of values values = new ArrayList<Object>(); values.add(result); result = values; } if (entry.getValue() == null) { values.add(Series.EMPTY_VALUE); } else { values.add(entry.getValue()); } } else { if (entry.getValue() == null) { result = Series.EMPTY_VALUE; } else { result = entry.getValue(); } } } entry = readNextEntry(); } this.stream.close(); return result; } /** * Reads the first entry with the given name. * * @param name * The entry name to match. * @return The entry value. * @throws IOException */ public FormData readFirstEntry(String name) throws IOException { FormData entry = readNextEntry(); FormData result = null; while ((entry != null) && (result == null)) { if (entry.getName().equals(name)) { result = entry; } entry = readNextEntry(); } this.stream.close(); return result; } /** * Reads the next entry available or null. * * @return The next entry available or null. * @throws IOException * If the next entry could not be read. */ public FormData readNextEntry() throws IOException { FormData result = null; try { boolean readingName = true; boolean readingValue = false; final StringBuilder nameBuffer = new StringBuilder(); final StringBuilder valueBuffer = new StringBuilder(); int nextChar = 0; while ((result == null) && (nextChar != -1)) { nextChar = this.stream.read(); if (readingName) { if (nextChar == '=') { if (nameBuffer.length() > 0) { readingName = false; readingValue = true; } else { throw new IOException( "Empty entry name detected. Please check your form data"); } } else if ((nextChar == this.separator) || (nextChar == -1)) { if (nameBuffer.length() > 0) { result = FormUtils.create(nameBuffer, null, this.decoding, this.characterSet); } else if (nextChar == -1) { // Do nothing return null preference } else { Context.getCurrentLogger() .fine("Empty entry name detected. Please check your form data"); } } else { nameBuffer.append((char) nextChar); } } else if (readingValue) { if ((nextChar == this.separator) || (nextChar == -1)) { if (valueBuffer.length() > 0) { result = FormUtils.create(nameBuffer, valueBuffer, this.decoding, this.characterSet); } else { result = FormUtils.create(nameBuffer, null, this.decoding, this.characterSet); } } else { valueBuffer.append((char) nextChar); } } } } catch (UnsupportedEncodingException uee) { throw new IOException( "Unsupported encoding. Please contact the administrator"); } return result; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.tree; import com.intellij.ide.util.EditSourceUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.Language; import com.intellij.navigation.ItemPresentation; import com.intellij.navigation.NavigationItem; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.FileStatus; import com.intellij.psi.*; import com.intellij.psi.impl.CheckUtil; import com.intellij.psi.impl.SharedPsiElementImplUtil; import com.intellij.psi.impl.source.PsiElementArrayConstructor; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.codeStyle.CodeEditUtil; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; public abstract class CompositePsiElement extends CompositeElement implements PsiElement, NavigationItem { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.tree.CompositePsiElement"); protected static int ourHC = 0; protected CompositePsiElement(IElementType type) { super(type); } @NotNull public PsiElement[] getChildren() { return getChildrenAsPsiElements(null, PsiElementArrayConstructor.PSI_ELEMENT_ARRAY_CONSTRUCTOR); } public PsiElement getFirstChild() { ASTNode node = getFirstChildNode(); return node != null ? node.getPsi() : null; } public PsiElement getLastChild() { ASTNode node = getLastChildNode(); return node != null ? node.getPsi() : null; } public void acceptChildren(@NotNull PsiElementVisitor visitor) { PsiElement child = getFirstChild(); while (child != null) { child.accept(visitor); child = child.getNextSibling(); } } public PsiElement getParent() { final CompositeElement parentNode = getTreeParent(); return parentNode != null ? parentNode.getPsi() : null; } public PsiElement getNextSibling() { ASTNode node = getTreeNext(); return node != null ? node.getPsi() : null; } public PsiElement getPrevSibling() { ASTNode node = getTreePrev(); return node != null ? node.getPsi() : null; } public PsiFile getContainingFile() { PsiFile file = SharedImplUtil.getContainingFile(this); if (file == null) throw new PsiInvalidElementAccessException(this); return file; } public PsiElement findElementAt(int offset) { ASTNode leaf = findLeafElementAt(offset); return SourceTreeToPsiMap.treeElementToPsi(leaf); } public PsiReference findReferenceAt(int offset) { return SharedPsiElementImplUtil.findReferenceAt(this, offset); } public PsiElement copy() { ASTNode elementCopy = copyElement(); return SourceTreeToPsiMap.treeElementToPsi(elementCopy); } public boolean isValid() { return SharedImplUtil.isValid(this); } public boolean isWritable() { return SharedImplUtil.isWritable(this); } public PsiReference getReference() { return null; } @NotNull public PsiReference[] getReferences() { return SharedPsiElementImplUtil.getReferences(this); } public PsiElement add(@NotNull PsiElement element) throws IncorrectOperationException { return addInnerBefore(element, null); } public PsiElement addBefore(@NotNull PsiElement element, PsiElement anchor) throws IncorrectOperationException { return addInnerBefore(element, anchor); } public PsiElement addAfter(@NotNull PsiElement element, PsiElement anchor) throws IncorrectOperationException { CheckUtil.checkWritable(this); TreeElement elementCopy = ChangeUtil.copyToElement(element); TreeElement treeElement = addInternal(elementCopy, elementCopy, SourceTreeToPsiMap.psiElementToTree(anchor), Boolean.FALSE); return ChangeUtil.decodeInformation(treeElement).getPsi(); } public final void checkAdd(@NotNull PsiElement element) throws IncorrectOperationException { CheckUtil.checkWritable(this); } public final PsiElement addRange(PsiElement first, PsiElement last) throws IncorrectOperationException { return SharedImplUtil.addRange(this, first, last, null, null); } public final PsiElement addRangeBefore(@NotNull PsiElement first, @NotNull PsiElement last, PsiElement anchor) throws IncorrectOperationException { return SharedImplUtil.addRange(this, first, last, SourceTreeToPsiMap.psiElementToTree(anchor), Boolean.TRUE); } public final PsiElement addRangeAfter(PsiElement first, PsiElement last, PsiElement anchor) throws IncorrectOperationException { return SharedImplUtil.addRange(this, first, last, SourceTreeToPsiMap.psiElementToTree(anchor), Boolean.FALSE); } public void delete() throws IncorrectOperationException { LOG.assertTrue(getTreeParent() != null, "Parent not found for " + this); CheckUtil.checkWritable(this); getTreeParent().deleteChildInternal(this); invalidate(); } public void checkDelete() throws IncorrectOperationException { CheckUtil.checkWritable(this); } public void deleteChildRange(PsiElement first, PsiElement last) throws IncorrectOperationException { CheckUtil.checkWritable(this); ASTNode firstElement = SourceTreeToPsiMap.psiElementToTree(first); ASTNode lastElement = SourceTreeToPsiMap.psiElementToTree(last); LOG.assertTrue(firstElement.getTreeParent() == this); LOG.assertTrue(lastElement.getTreeParent() == this); CodeEditUtil.removeChildren(this, firstElement, lastElement); } public PsiElement replace(@NotNull PsiElement newElement) throws IncorrectOperationException { return SharedImplUtil.doReplace(this, this, newElement); } public void accept(@NotNull PsiElementVisitor visitor) { //TODO: remove this method!! visitor.visitElement(this); } public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull ResolveState state, PsiElement lastParent, @NotNull PsiElement place) { return true; } public String toString() { return "PsiElement" + "(" + getElementType().toString() + ")"; } public PsiElement getContext() { return getParent(); } @NotNull public PsiElement getNavigationElement() { return this; } public PsiElement getOriginalElement() { return this; } public boolean isPhysical() { PsiFile file = getContainingFile(); return file != null && file.isPhysical(); } @NotNull public GlobalSearchScope getResolveScope() { return getManager().getFileManager().getResolveScope(this); } @NotNull public SearchScope getUseScope() { return getManager().getFileManager().getUseScope(this); } public ItemPresentation getPresentation() { return null; } public String getName() { return null; } public void navigate(boolean requestFocus) { EditSourceUtil.getDescriptor(this).navigate(requestFocus); } public boolean canNavigate() { return EditSourceUtil.canNavigate(this); } public boolean canNavigateToSource() { return canNavigate(); } public FileStatus getFileStatus() { return SharedImplUtil.getFileStatus(this); } @NotNull public Project getProject() { final PsiManager manager = getManager(); if (manager == null) throw new PsiInvalidElementAccessException(this); return manager.getProject(); } @NotNull public Language getLanguage() { return getElementType().getLanguage(); } @NotNull public ASTNode getNode() { return this; } @Override protected PsiElement createPsiNoLock() { return this; } private PsiElement addInnerBefore(final PsiElement element, final PsiElement anchor) throws IncorrectOperationException { CheckUtil.checkWritable(this); TreeElement elementCopy = ChangeUtil.copyToElement(element); TreeElement treeElement = addInternal(elementCopy, elementCopy, SourceTreeToPsiMap.psiElementToTree(anchor), Boolean.TRUE); if (treeElement != null) return ChangeUtil.decodeInformation(treeElement).getPsi(); throw new IncorrectOperationException("Element cannot be added"); } public boolean isEquivalentTo(final PsiElement another) { return this == another; } }
package com.jamesmorrisstudios.appbaselibrary.sound; import android.media.AudioManager; import android.media.MediaPlayer; import android.net.Uri; import android.speech.tts.TextToSpeech; import android.support.annotation.NonNull; import android.util.Log; import com.jamesmorrisstudios.appbaselibrary.UtilsLocale; import com.jamesmorrisstudios.appbaselibrary.app.AppBase; import java.util.HashMap; import java.util.Locale; /** * Supports loading and playing one specific audio stream at a time. * Playback starts as soon as loading is complete. Cannot be stopped but cannot be paused. * <p/> * Created by James on 12/11/2015. */ public final class SoundInstant { private static SoundInstant instance = null; private MediaPlayer music; private TextToSpeech tts; /** * Private constructor */ private SoundInstant() { } /** * @return SoundInstant instance */ @NonNull public static SoundInstant getInstance() { if (instance == null) { instance = new SoundInstant(); } return instance; } /** * Loads and plays sound in notification stream * * @param uri Uri of file */ public final void loadAndPlayNotification(@NonNull final Uri uri) { loadAndPlay(uri, AudioManager.STREAM_NOTIFICATION); } /** * Loads and plays sound in alarm stream * * @param uri Uri of file */ public final void loadAndPlayAlarm(@NonNull final Uri uri) { loadAndPlay(uri, AudioManager.STREAM_ALARM); } /** * Loads and plays sound in ring stream * * @param uri Uri of file */ public final void loadAndPlayRing(@NonNull final Uri uri) { loadAndPlay(uri, AudioManager.STREAM_RING); } /** * Loads and plays sound in music stream * * @param uri Uri of file */ public final void loadAndPlayMusic(@NonNull final Uri uri) { loadAndPlay(uri, AudioManager.STREAM_MUSIC); } /** * Loads and plays sound in the given music stream type * * @param uri Uri of file * @param streamType Stream to play on */ public final void loadAndPlay(@NonNull final Uri uri, final int streamType) { music = new MediaPlayer(); music.setAudioStreamType(streamType); try { music.setDataSource(AppBase.getContext(), uri); music.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { @Override public void onPrepared(MediaPlayer mp) { music.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { try { music.stop(); music.release(); } catch (Exception e) { e.printStackTrace(); } } }); music.setLooping(false); music.setVolume(1.0f, 1.0f); music.start(); } }); music.prepareAsync(); } catch (Exception e) { e.printStackTrace(); } } /** * Stops any currently playing sounds. */ public final void stopPlayback() { try { if (music != null) { if (music.isPlaying()) { music.stop(); } music.release(); } } catch (Exception e) { e.printStackTrace(); //Do nothing as the media player is stupid. } } /** * Speaks text in notification stream * * @param text Text to speak */ public final void speakTextNotification(@NonNull final String text) { speakText(text, AudioManager.STREAM_NOTIFICATION); } /** * Speaks text in alarm stream * * @param text Text to speak */ public final void speakTextAlarm(@NonNull final String text) { speakText(text, AudioManager.STREAM_ALARM); } /** * Speaks text in ring stream * * @param text Text to speak */ public final void speakTextRing(@NonNull final String text) { speakText(text, AudioManager.STREAM_RING); } /** * Speaks text in music stream * * @param text Text to speak */ public final void speakTextMusic(@NonNull final String text) { speakText(text, AudioManager.STREAM_MUSIC); } /** * Speaks text in given stream. If the current language is not support it falls back to english. * If english is not supported or the TTS engine is not setup it says nothing. * * @param text Text to speak */ public final void speakText(@NonNull final String text, final int streamType) { tts = new TextToSpeech(AppBase.getContext(), new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { int result = tts.setLanguage(UtilsLocale.getLocale()); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Log.e("SoundInstant", "This Language is not supported"); result = tts.setLanguage(Locale.US); } if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Log.e("SoundInstant", "English not supported"); return; } HashMap<String, String> myHashAlarm = new HashMap<>(); myHashAlarm.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(streamType)); tts.setOnUtteranceCompletedListener(new TextToSpeech.OnUtteranceCompletedListener() { @Override public void onUtteranceCompleted(String utteranceId) { tts.shutdown(); } }); tts.speak(text, TextToSpeech.QUEUE_ADD, myHashAlarm); } } }); } }
package com.sibilantsolutions.iptools.net; import com.sibilantsolutions.iptools.event.LostConnectionEvt; import com.sibilantsolutions.iptools.event.ReceiveEvt; import com.sibilantsolutions.iptools.event.SocketListenerI; import com.sibilantsolutions.utils.util.Convert; import com.sibilantsolutions.utils.util.HexDump; import com.sibilantsolutions.utils.util.HexDumpDeferred; import com.sibilantsolutions.utils.util.HexUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.BufferOverflowException; import java.nio.ByteOrder; public class LengthByteBuffer implements SocketListenerI { final static private Logger log = LoggerFactory.getLogger( LengthByteBuffer.class ); static public enum LengthByteType { /** * The length bytes describe the length of the entire packet including the number of length * bytes, any other header bytes, and the payload. * <p> * The length bytes will never be 0 even if there is no payload data; the length bytes * will always account for themselves and any other fixed-length header bytes if any. */ LENGTH_OF_ENTIRE_PACKET, /** * The length bytes describe the length of the payload only, and do not include the * number of length bytes or the length of any other header bytes. * <p> * The payload length may be 0, indicating that the packet is only comprised of header bytes. */ LENGTH_OF_PAYLOAD; } final private int lengthBytesOffset; final private int numLengthBytes; final private LengthByteType lengthByteType; final private ByteOrder byteOrder; final private int padBytes; final private SocketListenerI receiver; //final private ByteBuffer buf; final private byte[] buf; private int curOff; /** * * @param lengthBytesOffset Number of bytes from start of message at which length byte(s) begin. * @param numLengthBytes Number of bytes used to represent the length of the message. * @param lengthByteType Length byte type * @param byteOrder Byte order of length bytes * @param padBytes * Number of bytes after the length byte(s) and before the data; only used when * lengthByteType is LENGTH_OF_PAYLOAD. * @param bufferCapacity Capacity of internal buffer used to hold paritial messages * @param receiver Receiver that will be given single, complete messages. */ public LengthByteBuffer( int lengthBytesOffset, int numLengthBytes, LengthByteType lengthByteType, ByteOrder byteOrder, int padBytes, int bufferCapacity, SocketListenerI receiver ) { this.lengthBytesOffset = lengthBytesOffset; this.numLengthBytes = numLengthBytes; this.lengthByteType = lengthByteType; this.byteOrder = byteOrder; this.padBytes = padBytes; this.receiver = receiver; //buf = ByteBuffer.allocate( bufferCapacity ); buf = new byte[bufferCapacity]; } private void doReceiveArray( ReceiveEvt evt ) { int rawOffset = evt.getOffset(); final int rawLength = evt.getLength(); log.trace("======== doReceiveBuffer: offset={}, length={}.", rawOffset, rawLength ); while ( rawOffset < rawLength ) { int remaining = buf.length - curOff; int len = Math.min( rawLength - rawOffset, remaining ); if ( len == 0 ) { // throw new BufferOverflowException(); //After lost network connection (wifi dropped): throw new RuntimeException( "offset=" + rawOffset + ", length=" + rawLength + ", buf len=" + buf.length + ", cur offset=" + curOff + ", remaining=" + remaining, new BufferOverflowException() ); } System.arraycopy( evt.getData(), rawOffset, buf, curOff, len ); curOff += len; rawOffset += len; int numFired = 0; for ( boolean keepChecking = true; keepChecking; numFired++) { final int minNeeded; switch ( lengthByteType ) { case LENGTH_OF_ENTIRE_PACKET: minNeeded = lengthBytesOffset + numLengthBytes; break; case LENGTH_OF_PAYLOAD: minNeeded = lengthBytesOffset + numLengthBytes + padBytes; break; default: throw new RuntimeException( "Unexpected lengthByteType=" + lengthByteType ); } if ( curOff >= minNeeded ) { final int lengthBytesVal = (int)Convert.toNum( buf, lengthBytesOffset, numLengthBytes, byteOrder ); final int packetLen; switch ( lengthByteType ) { case LENGTH_OF_ENTIRE_PACKET: packetLen = lengthBytesVal; break; case LENGTH_OF_PAYLOAD: packetLen = minNeeded + lengthBytesVal; break; default: throw new RuntimeException( "Unexpected lengthByteType=" + lengthByteType ); } if ( curOff >= packetLen ) { byte[] singlePacket = new byte[packetLen]; System.arraycopy( buf, 0, singlePacket, 0, packetLen ); if ( curOff > packetLen ) { System.arraycopy( buf, packetLen, buf, 0, curOff - packetLen ); curOff -= packetLen; } else { curOff = 0; keepChecking = false; } ReceiveEvt packetEvt = new ReceiveEvt( singlePacket, evt.getSource() ); //Log the single packet, but only if there was more than one in the receive. //The receive was already logged so we don't need to log again for the //normal case of having received a single complete packet. if ( keepChecking || numFired > 0 ) { log.trace("Firing single packet=0x{}/{} to receiver: \n{}", HexUtils.numToHex( singlePacket.length ), singlePacket.length, HexDumpDeferred.prettyDump( singlePacket ) ); } else { log.trace("Firing single packet=0x{}/{} to receiver.", HexUtils.numToHex( singlePacket.length ), singlePacket.length ); } try { receiver.onReceive( packetEvt ); } catch ( Exception e ) { //throw new RuntimeException( "Trouble processing packet (exception follows data): \n" + // HexDump.prettyDump( singlePacket ), e ); log.error( "Trouble processing packet (exception follows data): \n" + HexDump.prettyDump( singlePacket ), e ); } } else { log.trace("Received length bytes, but not all data yet ({} of {} bytes).", curOff, packetLen ); keepChecking = false; } } else { log.trace("Received data, but not length bytes yet ({} bytes).", curOff); keepChecking = false; } } } // if ( curOff != 0 ) // { // log.debug( "Have a partial message; waiting for more data." ); // } } @Override public void onLostConnection( LostConnectionEvt evt ) { receiver.onLostConnection( evt ); } @Override public void onReceive( ReceiveEvt evt ) { //doReceiveBuffer( evt ); doReceiveArray( evt ); } }
package net.techmastary.plugins.chatmaster; import java.io.*; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.Enumeration; import java.util.logging.Level; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.plugin.Plugin; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; /** * Check dev.bukkit.org to find updates for a given plugin, and download the updates if needed. * <p/> * <b>VERY, VERY IMPORTANT</b>: Because there are no standards for adding auto-update toggles in your plugin's config, this system provides NO CHECK WITH YOUR CONFIG to make sure the user has allowed auto-updating. * <br> * It is a <b>BUKKIT POLICY</b> that you include a boolean value in your config that prevents the auto-updater from running <b>AT ALL</b>. * <br> * If you fail to include this option in your config, your plugin will be <b>REJECTED</b> when you attempt to submit it to dev.bukkit.org. * <p/> * An example of a good configuration option would be something similar to 'auto-update: true' - if this value is set to false you may NOT run the auto-updater. * <br> * If you are unsure about these rules, please read the plugin submission guidelines: http://goo.gl/8iU5l * * @author Gravity * @version 2.1 */ public class Updater { private Plugin plugin; private UpdateType type; private String versionName; private String versionLink; private String versionType; private String versionGameVersion; private boolean announce; // Whether to announce file downloads private URL url; // Connecting to RSS private File file; // The plugin's file private Thread thread; // Updater thread private int id = -1; // Project's Curse ID private String apiKey = null; // BukkitDev ServerMods API key private static final String TITLE_VALUE = "name"; // Gets remote file's title private static final String LINK_VALUE = "downloadUrl"; // Gets remote file's download link private static final String TYPE_VALUE = "releaseType"; // Gets remote file's release type private static final String VERSION_VALUE = "gameVersion"; // Gets remote file's build version private static final String QUERY = "/servermods/files?projectIds="; // Path to GET private static final String HOST = "https://api.curseforge.com"; // Slugs will be appended to this to get to the project's RSS feed private static final String USER_AGENT = "Updater (by Gravity)"; private static final String delimiter = "^v|[\\s_-]v"; // Used for locating version numbers in file names private static final String[] NO_UPDATE_TAG = { "-DEV", "-PRE", "-SNAPSHOT" }; // If the version number contains one of these, don't update. private static final int BYTE_SIZE = 1024; // Used for downloading files private final YamlConfiguration config = new YamlConfiguration(); // Config file private String updateFolder;// The folder that downloads will be placed in private Updater.UpdateResult result = Updater.UpdateResult.SUCCESS; // Used for determining the outcome of the update process /** * Gives the developer the result of the update process. Can be obtained by called {@link #getResult()} */ public enum UpdateResult { /** * The updater found an update, and has readied it to be loaded the next time the server restarts/reloads. */ SUCCESS, /** * The updater did not find an update, and nothing was downloaded. */ NO_UPDATE, /** * The server administrator has disabled the updating system. */ DISABLED, /** * The updater found an update, but was unable to download it. */ FAIL_DOWNLOAD, /** * For some reason, the updater was unable to contact dev.bukkit.org to download the file. */ FAIL_DBO, /** * When running the version check, the file on DBO did not contain a recognizable version. */ FAIL_NOVERSION, /** * The id provided by the plugin running the updater was invalid and doesn't exist on DBO. */ FAIL_BADID, /** * The server administrator has improperly configured their API key in the configuration. */ FAIL_APIKEY, /** * The updater found an update, but because of the UpdateType being set to NO_DOWNLOAD, it wasn't downloaded. */ UPDATE_AVAILABLE } /** * Allows the developer to specify the type of update that will be run. */ public enum UpdateType { /** * Run a version check, and then if the file is out of date, download the newest version. */ DEFAULT, /** * Don't run a version check, just find the latest update and download it. */ NO_VERSION_CHECK, /** * Get information about the version and the download size, but don't actually download anything. */ NO_DOWNLOAD } /** * Represents the various release types of a file on BukkitDev. */ public enum ReleaseType { /** * An "alpha" file. */ ALPHA, /** * A "beta" file. */ BETA, /** * A "release" file. */ RELEASE } /** * Initialize the updater. * * @param plugin The plugin that is checking for an update. * @param id The dev.bukkit.org id of the project. * @param file The file that the plugin is running from, get this by doing this.getFile() from within your main class. * @param type Specify the type of update this will be. See {@link UpdateType} * @param announce True if the program should announce the progress of new updates in console. */ public Updater(Plugin plugin, int id, File file, UpdateType type, boolean announce) { this.plugin = plugin; this.type = type; this.announce = announce; this.file = file; this.id = id; this.updateFolder = plugin.getServer().getUpdateFolder(); final File pluginFile = plugin.getDataFolder().getParentFile(); final File updaterFile = new File(pluginFile, "Updater"); final File updaterConfigFile = new File(updaterFile, "config.yml"); this.config.options().header("This configuration file affects all plugins using the Updater system (version 2+ - http://forums.bukkit.org/threads/96681/ )" + '\n' + "If you wish to use your API key, read http://wiki.bukkit.org/ServerMods_API and place it below." + '\n' + "Some updating systems will not adhere to the disabled value, but these may be turned off in their plugin's configuration."); this.config.addDefault("api-key", "PUT_API_KEY_HERE"); this.config.addDefault("disable", false); if (!updaterFile.exists()) { updaterFile.mkdir(); } boolean createFile = !updaterConfigFile.exists(); try { if (createFile) { updaterConfigFile.createNewFile(); this.config.options().copyDefaults(true); this.config.save(updaterConfigFile); } else { this.config.load(updaterConfigFile); } } catch (final Exception e) { if (createFile) { plugin.getLogger().severe("The updater could not create configuration at " + updaterFile.getAbsolutePath()); } else { plugin.getLogger().severe("The updater could not load configuration at " + updaterFile.getAbsolutePath()); } plugin.getLogger().log(Level.SEVERE, null, e); } if (this.config.getBoolean("disable")) { this.result = UpdateResult.DISABLED; return; } String key = this.config.getString("api-key"); if (key.equalsIgnoreCase("PUT_API_KEY_HERE") || key.equals("")) { key = null; } this.apiKey = key; try { this.url = new URL(Updater.HOST + Updater.QUERY + id); } catch (final MalformedURLException e) { plugin.getLogger().log(Level.SEVERE, "The project ID provided for updating, " + id + " is invalid.", e); this.result = UpdateResult.FAIL_BADID; } this.thread = new Thread(new UpdateRunnable()); this.thread.start(); } /** * Get the result of the update process. * * @return result of the update process. * @see UpdateResult */ public Updater.UpdateResult getResult() { this.waitForThread(); return this.result; } /** * Get the latest version's release type. * * @return latest version's release type. * @see ReleaseType */ public ReleaseType getLatestType() { this.waitForThread(); if (this.versionType != null) { for (ReleaseType type : ReleaseType.values()) { if (this.versionType.equals(type.name().toLowerCase())) { return type; } } } return null; } /** * Get the latest version's game version (such as "CB 1.2.5-R1.0"). * * @return latest version's game version. */ public String getLatestGameVersion() { this.waitForThread(); return this.versionGameVersion; } /** * Get the latest version's name (such as "Project v1.0"). * * @return latest version's name. */ public String getLatestName() { this.waitForThread(); return this.versionName; } /** * Get the latest version's direct file link. * * @return latest version's file link. */ public String getLatestFileLink() { this.waitForThread(); return this.versionLink; } /** * As the result of Updater output depends on the thread's completion, it is necessary to wait for the thread to finish * before allowing anyone to check the result. */ private void waitForThread() { if ((this.thread != null) && this.thread.isAlive()) { try { this.thread.join(); } catch (final InterruptedException e) { plugin.getLogger().log(Level.SEVERE, null, e); } } } /** * Save an update from dev.bukkit.org into the server's update folder. * * @param folder the updates folder location. * @param file the name of the file to save it as. * @param link the url of the file. */ private void saveFile(File folder, String file, String link) { if (!folder.exists()) { folder.mkdir(); } BufferedInputStream in = null; FileOutputStream fout = null; try { // Download the file final URL url = new URL(link); final int fileLength = url.openConnection().getContentLength(); in = new BufferedInputStream(url.openStream()); fout = new FileOutputStream(folder.getAbsolutePath() + File.separator + file); final byte[] data = new byte[Updater.BYTE_SIZE]; int count; if (this.announce) { this.plugin.getLogger().info("About to download a new update: " + this.versionName); } long downloaded = 0; while ((count = in.read(data, 0, Updater.BYTE_SIZE)) != -1) { downloaded += count; fout.write(data, 0, count); final int percent = (int) ((downloaded * 100) / fileLength); if (this.announce && ((percent % 10) == 0)) { this.plugin.getLogger().info("Downloading update: " + percent + "% of " + fileLength + " bytes."); } } //Just a quick check to make sure we didn't leave any files from last time... for (final File xFile : new File(this.plugin.getDataFolder().getParent(), this.updateFolder).listFiles()) { if (xFile.getName().endsWith(".zip")) { xFile.delete(); } } // Check to see if it's a zip file, if it is, unzip it. final File dFile = new File(folder.getAbsolutePath() + File.separator + file); if (dFile.getName().endsWith(".zip")) { // Unzip this.unzip(dFile.getCanonicalPath()); } if (this.announce) { this.plugin.getLogger().info("Finished updating."); } } catch (final Exception ex) { this.plugin.getLogger().warning("The auto-updater tried to download a new update, but was unsuccessful."); this.result = Updater.UpdateResult.FAIL_DOWNLOAD; } finally { try { if (in != null) { in.close(); } if (fout != null) { fout.close(); } } catch (final Exception ex) { } } } /** * Part of Zip-File-Extractor, modified by Gravity for use with Updater. * * @param file the location of the file to extract. */ private void unzip(String file) { try { final File fSourceZip = new File(file); final String zipPath = file.substring(0, file.length() - 4); ZipFile zipFile = new ZipFile(fSourceZip); Enumeration<? extends ZipEntry> e = zipFile.entries(); while (e.hasMoreElements()) { ZipEntry entry = e.nextElement(); File destinationFilePath = new File(zipPath, entry.getName()); destinationFilePath.getParentFile().mkdirs(); if (entry.isDirectory()) { continue; } else { final BufferedInputStream bis = new BufferedInputStream(zipFile.getInputStream(entry)); int b; final byte buffer[] = new byte[Updater.BYTE_SIZE]; final FileOutputStream fos = new FileOutputStream(destinationFilePath); final BufferedOutputStream bos = new BufferedOutputStream(fos, Updater.BYTE_SIZE); while ((b = bis.read(buffer, 0, Updater.BYTE_SIZE)) != -1) { bos.write(buffer, 0, b); } bos.flush(); bos.close(); bis.close(); final String name = destinationFilePath.getName(); if (name.endsWith(".jar") && this.pluginFile(name)) { destinationFilePath.renameTo(new File(this.plugin.getDataFolder().getParent(), this.updateFolder + File.separator + name)); } } entry = null; destinationFilePath = null; } e = null; zipFile.close(); zipFile = null; // Move any plugin data folders that were included to the right place, Bukkit won't do this for us. for (final File dFile : new File(zipPath).listFiles()) { if (dFile.isDirectory()) { if (this.pluginFile(dFile.getName())) { final File oFile = new File(this.plugin.getDataFolder().getParent(), dFile.getName()); // Get current dir final File[] contents = oFile.listFiles(); // List of existing files in the current dir for (final File cFile : dFile.listFiles()) // Loop through all the files in the new dir { boolean found = false; for (final File xFile : contents) // Loop through contents to see if it exists { if (xFile.getName().equals(cFile.getName())) { found = true; break; } } if (!found) { // Move the new file into the current dir cFile.renameTo(new File(oFile.getCanonicalFile() + File.separator + cFile.getName())); } else { // This file already exists, so we don't need it anymore. cFile.delete(); } } } } dFile.delete(); } new File(zipPath).delete(); fSourceZip.delete(); } catch (final IOException e) { this.plugin.getLogger().log(Level.SEVERE, "The auto-updater tried to unzip a new update file, but was unsuccessful.", e); this.result = Updater.UpdateResult.FAIL_DOWNLOAD; } new File(file).delete(); } /** * Check if the name of a jar is one of the plugins currently installed, used for extracting the correct files out of a zip. * * @param name a name to check for inside the plugins folder. * @return true if a file inside the plugins folder is named this. */ private boolean pluginFile(String name) { for (final File file : new File("plugins").listFiles()) { if (file.getName().equals(name)) { return true; } } return false; } /** * Check to see if the program should continue by evaluating whether the plugin is already updated, or shouldn't be updated. * * @param title the plugin's title. * @return true if the version was located and is not the same as the remote's newest. */ private boolean versionCheck(String title) { if (this.type != UpdateType.NO_VERSION_CHECK) { final String localVersion = this.plugin.getDescription().getVersion(); if (title.split(delimiter).length == 2) { final String remoteVersion = title.split(delimiter)[1].split(" ")[0]; // Get the newest file's version number if (this.hasTag(localVersion) || !this.shouldUpdate(localVersion, remoteVersion)) { // We already have the latest version, or this build is tagged for no-update this.result = Updater.UpdateResult.NO_UPDATE; return false; } } else { // The file's name did not contain the string 'vVersion' final String authorInfo = this.plugin.getDescription().getAuthors().size() == 0 ? "" : " (" + this.plugin.getDescription().getAuthors().get(0) + ")"; this.plugin.getLogger().warning("The author of this plugin" + authorInfo + " has misconfigured their Auto Update system"); this.plugin.getLogger().warning("File versions should follow the format 'PluginName vVERSION'"); this.plugin.getLogger().warning("Please notify the author of this error."); this.result = Updater.UpdateResult.FAIL_NOVERSION; return false; } } return true; } /** * <b>If you wish to run mathematical versioning checks, edit this method.</b> * <p> * With default behavior, Updater will NOT verify that a remote version available on BukkitDev * which is not this version is indeed an "update". * If a version is present on BukkitDev that is not the version that is currently running, * Updater will assume that it is a newer version. * This is because there is no standard versioning scheme, and creating a calculation that can * determine whether a new update is actually an update is sometimes extremely complicated. * </p> * <p> * Updater will call this method from {@link #versionCheck(String)} before deciding whether * the remote version is actually an update. * If you have a specific versioning scheme with which a mathematical determination can * be reliably made to decide whether one version is higher than another, you may * revise this method, using the local and remote version parameters, to execute the * appropriate check. * </p> * <p> * Returning a value of <b>false</b> will tell the update process that this is NOT a new version. * Without revision, this method will always consider a remote version at all different from * that of the local version a new update. * </p> * @param localVersion the current version * @param remoteVersion the remote version * @return true if Updater should consider the remote version an update, false if not. */ public boolean shouldUpdate(String localVersion, String remoteVersion) { return !localVersion.equalsIgnoreCase(remoteVersion); } /** * Evaluate whether the version number is marked showing that it should not be updated by this program. * * @param version a version number to check for tags in. * @return true if updating should be disabled. */ private boolean hasTag(String version) { for (final String string : Updater.NO_UPDATE_TAG) { if (version.contains(string)) { return true; } } return false; } /** * Make a connection to the BukkitDev API and request the newest file's details. * * @return true if successful. */ private boolean read() { try { final URLConnection conn = this.url.openConnection(); conn.setConnectTimeout(5000); if (this.apiKey != null) { conn.addRequestProperty("X-API-Key", this.apiKey); } conn.addRequestProperty("User-Agent", Updater.USER_AGENT); conn.setDoOutput(true); final BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream())); final String response = reader.readLine(); final JSONArray array = (JSONArray) JSONValue.parse(response); if (array.size() == 0) { this.plugin.getLogger().warning("The updater could not find any files for the project id " + this.id); this.result = UpdateResult.FAIL_BADID; return false; } this.versionName = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.TITLE_VALUE); this.versionLink = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.LINK_VALUE); this.versionType = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.TYPE_VALUE); this.versionGameVersion = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.VERSION_VALUE); return true; } catch (final IOException e) { if (e.getMessage().contains("HTTP response code: 403")) { this.plugin.getLogger().severe("dev.bukkit.org rejected the API key provided in plugins/Updater/config.yml"); this.plugin.getLogger().severe("Please double-check your configuration to ensure it is correct."); this.result = UpdateResult.FAIL_APIKEY; } else { this.plugin.getLogger().severe("The updater could not contact dev.bukkit.org for updating."); this.plugin.getLogger().severe("If you have not recently modified your configuration and this is the first time you are seeing this message, the site may be experiencing temporary downtime."); this.result = UpdateResult.FAIL_DBO; } this.plugin.getLogger().log(Level.SEVERE, null, e); return false; } } private class UpdateRunnable implements Runnable { @Override public void run() { if (Updater.this.url != null) { // Obtain the results of the project's file feed if (Updater.this.read()) { if (Updater.this.versionCheck(Updater.this.versionName)) { if ((Updater.this.versionLink != null) && (Updater.this.type != UpdateType.NO_DOWNLOAD)) { String name = Updater.this.file.getName(); // If it's a zip file, it shouldn't be downloaded as the plugin's name if (Updater.this.versionLink.endsWith(".zip")) { final String[] split = Updater.this.versionLink.split("/"); name = split[split.length - 1]; } Updater.this.saveFile(new File(Updater.this.plugin.getDataFolder().getParent(), Updater.this.updateFolder), name, Updater.this.versionLink); } else { Updater.this.result = UpdateResult.UPDATE_AVAILABLE; } } } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.List; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * Tests VersionUtils. Note: this test should remain unchanged across major versions * it uses the hardcoded versions on purpose. */ public class VersionUtilsTests extends ESTestCase { public void testAllVersionsSorted() { List<Version> allVersions = VersionUtils.allReleasedVersions(); for (int i = 0, j = 1; j < allVersions.size(); ++i, ++j) { assertTrue(allVersions.get(i).before(allVersions.get(j))); } } public void testRandomVersionBetween() { // full range Version got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), null, Version.CURRENT); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); // sub range got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_0_0_beta1); assertTrue(got.onOrAfter(Version.V_5_0_0)); assertTrue(got.onOrBefore(Version.V_6_0_0_beta1)); // unbounded lower got = VersionUtils.randomVersionBetween(random(), null, Version.V_6_0_0_beta1); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(Version.V_6_0_0_beta1)); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0))); // unbounded upper got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, null); assertTrue(got.onOrAfter(Version.V_5_0_0)); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null); assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); // range of one got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); assertEquals(got, Version.CURRENT); got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_beta1, Version.V_6_0_0_beta1); assertEquals(got, Version.V_6_0_0_beta1); // implicit range of one got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, null); assertEquals(got, Version.CURRENT); // max or min can be an unreleased version Version unreleased = randomFrom(VersionUtils.allUnreleasedVersions()); assertThat(VersionUtils.randomVersionBetween(random(), null, unreleased), lessThanOrEqualTo(unreleased)); assertThat(VersionUtils.randomVersionBetween(random(), unreleased, null), greaterThanOrEqualTo(unreleased)); assertEquals(unreleased, VersionUtils.randomVersionBetween(random(), unreleased, unreleased)); } public static class TestReleaseBranch { public static final Version V_5_3_0 = Version.fromString("5.3.0"); public static final Version V_5_3_1 = Version.fromString("5.3.1"); public static final Version V_5_3_2 = Version.fromString("5.3.2"); public static final Version V_5_4_0 = Version.fromString("5.4.0"); public static final Version V_5_4_1 = Version.fromString("5.4.1"); public static final Version CURRENT = V_5_4_1; } public void testResolveReleasedVersionsForReleaseBranch() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestReleaseBranch.CURRENT, TestReleaseBranch.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestReleaseBranch.V_5_3_0, TestReleaseBranch.V_5_3_1, TestReleaseBranch.V_5_3_2, TestReleaseBranch.V_5_4_0), released); assertEquals(singletonList(TestReleaseBranch.V_5_4_1), unreleased); } public static class TestStableBranch { public static final Version V_5_3_0 = Version.fromString("5.3.0"); public static final Version V_5_3_1 = Version.fromString("5.3.1"); public static final Version V_5_3_2 = Version.fromString("5.3.2"); public static final Version V_5_4_0 = Version.fromString("5.4.0"); public static final Version CURRENT = V_5_4_0; } public void testResolveReleasedVersionsForUnreleasedStableBranch() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestStableBranch.CURRENT, TestStableBranch.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestStableBranch.V_5_3_0, TestStableBranch.V_5_3_1), released); assertEquals(Arrays.asList(TestStableBranch.V_5_3_2, TestStableBranch.V_5_4_0), unreleased); } public static class TestStableBranchBehindStableBranch { public static final Version V_5_3_0 = Version.fromString("5.3.0"); public static final Version V_5_3_1 = Version.fromString("5.3.1"); public static final Version V_5_3_2 = Version.fromString("5.3.2"); public static final Version V_5_4_0 = Version.fromString("5.4.0"); public static final Version V_5_5_0 = Version.fromString("5.5.0"); public static final Version CURRENT = V_5_5_0; } public void testResolveReleasedVersionsForStableBranchBehindStableBranch() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestStableBranchBehindStableBranch.CURRENT, TestStableBranchBehindStableBranch.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestStableBranchBehindStableBranch.V_5_3_0, TestStableBranchBehindStableBranch.V_5_3_1), released); assertEquals(Arrays.asList(TestStableBranchBehindStableBranch.V_5_3_2, TestStableBranchBehindStableBranch.V_5_4_0, TestStableBranchBehindStableBranch.V_5_5_0), unreleased); } public static class TestUnstableBranch { public static final Version V_5_3_0 = Version.fromString("5.3.0"); public static final Version V_5_3_1 = Version.fromString("5.3.1"); public static final Version V_5_3_2 = Version.fromString("5.3.2"); public static final Version V_5_4_0 = Version.fromString("5.4.0"); public static final Version V_6_0_0_alpha1 = Version.fromString("6.0.0-alpha1"); public static final Version V_6_0_0_alpha2 = Version.fromString("6.0.0-alpha2"); public static final Version V_6_0_0_beta1 = Version.fromString("6.0.0-beta1"); public static final Version CURRENT = V_6_0_0_beta1; } public void testResolveReleasedVersionsForUnstableBranch() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestUnstableBranch.CURRENT, TestUnstableBranch.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestUnstableBranch.V_5_3_0, TestUnstableBranch.V_5_3_1, TestUnstableBranch.V_6_0_0_alpha1, TestUnstableBranch.V_6_0_0_alpha2), released); assertEquals(Arrays.asList(TestUnstableBranch.V_5_3_2, TestUnstableBranch.V_5_4_0, TestUnstableBranch.V_6_0_0_beta1), unreleased); } public static class TestNewMajorRelease { public static final Version V_5_6_0 = Version.fromString("5.6.0"); public static final Version V_5_6_1 = Version.fromString("5.6.1"); public static final Version V_5_6_2 = Version.fromString("5.6.2"); public static final Version V_6_0_0_alpha1 = Version.fromString("6.0.0-alpha1"); public static final Version V_6_0_0_alpha2 = Version.fromString("6.0.0-alpha2"); public static final Version V_6_0_0_beta1 = Version.fromString("6.0.0-beta1"); public static final Version V_6_0_0_beta2 = Version.fromString("6.0.0-beta2"); public static final Version V_6_0_0 = Version.fromString("6.0.0"); public static final Version V_6_0_1 = Version.fromString("6.0.1"); public static final Version CURRENT = V_6_0_1; } public void testResolveReleasedVersionsAtNewMajorRelease() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestNewMajorRelease.CURRENT, TestNewMajorRelease.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestNewMajorRelease.V_5_6_0, TestNewMajorRelease.V_5_6_1, TestNewMajorRelease.V_6_0_0_alpha1, TestNewMajorRelease.V_6_0_0_alpha2, TestNewMajorRelease.V_6_0_0_beta1, TestNewMajorRelease.V_6_0_0_beta2, TestNewMajorRelease.V_6_0_0), released); assertEquals(Arrays.asList(TestNewMajorRelease.V_5_6_2, TestNewMajorRelease.V_6_0_1), unreleased); } public static class TestVersionBumpIn6x { public static final Version V_5_6_0 = Version.fromString("5.6.0"); public static final Version V_5_6_1 = Version.fromString("5.6.1"); public static final Version V_5_6_2 = Version.fromString("5.6.2"); public static final Version V_6_0_0_alpha1 = Version.fromString("6.0.0-alpha1"); public static final Version V_6_0_0_alpha2 = Version.fromString("6.0.0-alpha2"); public static final Version V_6_0_0_beta1 = Version.fromString("6.0.0-beta1"); public static final Version V_6_0_0_beta2 = Version.fromString("6.0.0-beta2"); public static final Version V_6_0_0 = Version.fromString("6.0.0"); public static final Version V_6_0_1 = Version.fromString("6.0.1"); public static final Version V_6_1_0 = Version.fromString("6.1.0"); public static final Version CURRENT = V_6_1_0; } public void testResolveReleasedVersionsAtVersionBumpIn6x() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestVersionBumpIn6x.CURRENT, TestVersionBumpIn6x.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestVersionBumpIn6x.V_5_6_0, TestVersionBumpIn6x.V_5_6_1, TestVersionBumpIn6x.V_6_0_0_alpha1, TestVersionBumpIn6x.V_6_0_0_alpha2, TestVersionBumpIn6x.V_6_0_0_beta1, TestVersionBumpIn6x.V_6_0_0_beta2, TestVersionBumpIn6x.V_6_0_0), released); assertEquals(Arrays.asList(TestVersionBumpIn6x.V_5_6_2, TestVersionBumpIn6x.V_6_0_1, TestVersionBumpIn6x.V_6_1_0), unreleased); } public static class TestNewMinorBranchIn6x { public static final Version V_5_6_0 = Version.fromString("5.6.0"); public static final Version V_5_6_1 = Version.fromString("5.6.1"); public static final Version V_5_6_2 = Version.fromString("5.6.2"); public static final Version V_6_0_0_alpha1 = Version.fromString("6.0.0-alpha1"); public static final Version V_6_0_0_alpha2 = Version.fromString("6.0.0-alpha2"); public static final Version V_6_0_0_beta1 = Version.fromString("6.0.0-beta1"); public static final Version V_6_0_0_beta2 = Version.fromString("6.0.0-beta2"); public static final Version V_6_0_0 = Version.fromString("6.0.0"); public static final Version V_6_0_1 = Version.fromString("6.0.1"); public static final Version V_6_1_0 = Version.fromString("6.1.0"); public static final Version V_6_1_1 = Version.fromString("6.1.1"); public static final Version V_6_1_2 = Version.fromString("6.1.2"); public static final Version V_6_2_0 = Version.fromString("6.2.0"); public static final Version CURRENT = V_6_2_0; } public void testResolveReleasedVersionsAtNewMinorBranchIn6x() { Tuple<List<Version>, List<Version>> t = VersionUtils.resolveReleasedVersions(TestNewMinorBranchIn6x.CURRENT, TestNewMinorBranchIn6x.class); List<Version> released = t.v1(); List<Version> unreleased = t.v2(); assertEquals(Arrays.asList(TestNewMinorBranchIn6x.V_5_6_0, TestNewMinorBranchIn6x.V_5_6_1, TestNewMinorBranchIn6x.V_6_0_0_alpha1, TestNewMinorBranchIn6x.V_6_0_0_alpha2, TestNewMinorBranchIn6x.V_6_0_0_beta1, TestNewMinorBranchIn6x.V_6_0_0_beta2, TestNewMinorBranchIn6x.V_6_0_0, TestNewMinorBranchIn6x.V_6_1_0, TestNewMinorBranchIn6x.V_6_1_1), released); assertEquals(Arrays.asList(TestNewMinorBranchIn6x.V_5_6_2, TestNewMinorBranchIn6x.V_6_0_1, TestNewMinorBranchIn6x.V_6_1_2, TestNewMinorBranchIn6x.V_6_2_0), unreleased); } /** * Tests that {@link Version#minimumCompatibilityVersion()} and {@link VersionUtils#allReleasedVersions()} * agree with the list of wire and index compatible versions we build in gradle. */ public void testGradleVersionsMatchVersionUtils() { // First check the index compatible versions VersionsFromProperty indexCompatible = new VersionsFromProperty("tests.gradle_index_compat_versions"); List<Version> released = VersionUtils.allReleasedVersions().stream() /* Java lists all versions from the 5.x series onwards, but we only want to consider * ones that we're supposed to be compatible with. */ .filter(v -> v.onOrAfter(Version.CURRENT.minimumIndexCompatibilityVersion())) /* Gradle will never include *released* alphas or betas because it will prefer * the unreleased branch head. Gradle is willing to use branch heads that are * beta or rc so that we have *something* to test against even though we * do not offer backwards compatibility for alphas, betas, or rcs. */ .filter(Version::isRelease) .collect(toList()); List<String> releasedIndexCompatible = released.stream() .map(Object::toString) .collect(toList()); assertEquals(releasedIndexCompatible, indexCompatible.released); List<String> unreleasedIndexCompatible = new ArrayList<>(VersionUtils.allUnreleasedVersions().stream() /* Gradle skips the current version because being backwards compatible * with yourself is implied. Java lists the version because it is useful. */ .filter(v -> v != Version.CURRENT) /* Java lists all versions from the 5.x series onwards, but we only want to consider * ones that we're supposed to be compatible with. */ .filter(v -> v.onOrAfter(Version.CURRENT.minimumIndexCompatibilityVersion())) /* Note that gradle skips alphas because they don't have any backwards * compatibility guarantees but keeps the last beta and rc in a branch * on when there are only betas an RCs in that branch so that we have * *something* to test that branch against. There is no need to recreate * that logic here because allUnreleasedVersions already only contains * the heads of branches so it should be good enough to just keep all * the non-alphas.*/ .filter(v -> false == v.isAlpha()) .map(Object::toString) .collect(toCollection(LinkedHashSet::new))); assertEquals(unreleasedIndexCompatible, indexCompatible.unreleased); // Now the wire compatible versions VersionsFromProperty wireCompatible = new VersionsFromProperty("tests.gradle_wire_compat_versions"); Version minimumCompatibleVersion = Version.CURRENT.minimumCompatibilityVersion(); List<String> releasedWireCompatible = released.stream() .filter(v -> v.onOrAfter(minimumCompatibleVersion)) .map(Object::toString) .collect(toList()); assertEquals(releasedWireCompatible, wireCompatible.released); List<String> unreleasedWireCompatible = VersionUtils.allUnreleasedVersions().stream() /* Gradle skips the current version because being backwards compatible * with yourself is implied. Java lists the version because it is useful. */ .filter(v -> v != Version.CURRENT) .filter(v -> v.onOrAfter(minimumCompatibleVersion)) .map(Object::toString) .collect(toList()); assertEquals(unreleasedWireCompatible, wireCompatible.unreleased); } /** * Read a versions system property as set by gradle into a tuple of {@code (releasedVersion, unreleasedVersion)}. */ private class VersionsFromProperty { private final List<String> released = new ArrayList<>(); private final List<String> unreleased = new ArrayList<>(); private VersionsFromProperty(String property) { String versions = System.getProperty(property); assertNotNull("Couldn't find [" + property + "]. Gradle should set these before running the tests.", versions); logger.info("Looked up versions [{}={}]", property, versions); for (String version : versions.split(",")) { if (version.endsWith("-SNAPSHOT")) { unreleased.add(version.replace("-SNAPSHOT", "")); } else { released.add(version); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.topology; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.managers.communication.GridIoPolicy; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedManagerAdapter; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.internal.LT; import org.apache.ignite.internal.util.typedef.internal.U; import static org.apache.ignite.IgniteSystemProperties.IGNITE_EVICTION_PERMITS; import static org.apache.ignite.IgniteSystemProperties.getInteger; import static org.apache.ignite.IgniteSystemProperties.getLong; /** * Class that serves asynchronous part eviction process. * Multiple partition from group can be evicted at the same time. */ public class PartitionsEvictManager extends GridCacheSharedManagerAdapter { /** Default eviction progress show frequency. */ private static final int DEFAULT_SHOW_EVICTION_PROGRESS_FREQ_MS = 2 * 60 * 1000; // 2 Minutes. /** Eviction progress frequency property name. */ private static final String SHOW_EVICTION_PROGRESS_FREQ = "SHOW_EVICTION_PROGRESS_FREQ"; /** Eviction thread pool policy. */ private static final byte EVICT_POOL_PLC = GridIoPolicy.SYSTEM_POOL; /** Eviction progress frequency in ms. */ private final long evictionProgressFreqMs = getLong(SHOW_EVICTION_PROGRESS_FREQ, DEFAULT_SHOW_EVICTION_PROGRESS_FREQ_MS); /** */ private final int confPermits = getInteger(IGNITE_EVICTION_PERMITS, -1); /** Next time of show eviction progress. */ private long nextShowProgressTime; /** */ private final Map<Integer, GroupEvictionContext> evictionGroupsMap = new ConcurrentHashMap<>(); /** Flag indicates that eviction process has stopped. */ private volatile boolean stop; /** Check stop eviction context. */ private final EvictionContext sharedEvictionContext = () -> stop; /** Number of maximum concurrent operations. */ private volatile int threads; /** How many eviction task may execute concurrent. */ private volatile int permits; /** Bucket queue for load balance partitions to the threads via count of partition size. * Is not thread-safe. * All method should be called under mux synchronization. */ private volatile BucketQueue evictionQueue; /** Lock object. */ private final Object mux = new Object(); /** * Stops eviction process for group. * * Method awaits last offered partition eviction. * * @param grp Group context. */ public void onCacheGroupStopped(CacheGroupContext grp){ GroupEvictionContext groupEvictionContext = evictionGroupsMap.remove(grp.groupId()); if (groupEvictionContext != null){ groupEvictionContext.stop(); groupEvictionContext.awaitFinishAll(); } } /** * Adds partition to eviction queue and starts eviction process if permit available. * * @param grp Group context. * @param part Partition to evict. */ public void evictPartitionAsync(CacheGroupContext grp, GridDhtLocalPartition part) { GroupEvictionContext groupEvictionContext = evictionGroupsMap.computeIfAbsent( grp.groupId(), (k) -> new GroupEvictionContext(grp)); // Check node stop. if (groupEvictionContext.shouldStop()) return; int bucket; synchronized (mux) { if (!groupEvictionContext.partIds.add(part.id())) return; bucket = evictionQueue.offer(new PartitionEvictionTask(part, groupEvictionContext)); } groupEvictionContext.totalTasks.incrementAndGet(); if (log.isDebugEnabled()) log.debug("Partition has been scheduled for eviction [grp=" + grp.cacheOrGroupName() + ", p=" + part.id() + ", state=" + part.state() + "]"); scheduleNextPartitionEviction(bucket); } /** * Gets next partition from the queue and schedules it for eviction. * * @param bucket Bucket. */ private void scheduleNextPartitionEviction(int bucket) { // Check node stop. if (sharedEvictionContext.shouldStop()) return; synchronized (mux) { // Check that we have permits for next operation. if (permits > 0) { // If queue is empty not need to do. if (evictionQueue.isEmpty()) return; // Get task until we have permits. while (permits >= 0) { // Get task from bucket. PartitionEvictionTask evictionTask = evictionQueue.poll(bucket); // If bucket empty try get from another. if (evictionTask == null) { // Until queue have tasks. while (!evictionQueue.isEmpty()) { // Get task from any other bucket. evictionTask = evictionQueue.pollAny(); // Stop iteration if we found task. if (evictionTask != null) break; } // If task not found no need to do some. if (evictionTask == null) return; } // Print current eviction progress. showProgress(); GroupEvictionContext groupEvictionContext = evictionTask.groupEvictionCtx; // Check that group or node stopping. if (groupEvictionContext.shouldStop()) continue; // Get permit for this task. permits--; // Register task future, may need if group or node will be stopped. groupEvictionContext.taskScheduled(evictionTask); evictionTask.finishFut.listen(f -> { synchronized (mux) { // Return permit after task completed. permits++; } // Re-schedule new one task form same bucket. scheduleNextPartitionEviction(bucket); }); // Submit task to executor. cctx.kernalContext() .closure() .runLocalSafe(evictionTask, EVICT_POOL_PLC); } } } } /** * Shows progress of eviction. */ private void showProgress() { if (U.currentTimeMillis() >= nextShowProgressTime) { int size = evictionQueue.size() + 1; // Queue size plus current partition. if (log.isInfoEnabled()) log.info("Eviction in progress [permits=" + permits+ ", threads=" + threads + ", groups=" + evictionGroupsMap.keySet().size() + ", remainingPartsToEvict=" + size + "]"); evictionGroupsMap.values().forEach(GroupEvictionContext::showProgress); nextShowProgressTime = U.currentTimeMillis() + evictionProgressFreqMs; } } /** {@inheritDoc} */ @Override protected void start0() throws IgniteCheckedException { super.start0(); // If property is not setup, calculate permits as parts of sys pool. if (confPermits == -1) { int sysPoolSize = cctx.kernalContext().config().getSystemThreadPoolSize(); threads = permits = sysPoolSize / 4; } else threads = permits = confPermits; // Avoid 0 permits if sys pool size less that 4. if (threads == 0) threads = permits = 1; log.info("Evict partition permits=" + permits); evictionQueue = new BucketQueue(threads); } /** {@inheritDoc} */ @Override protected void stop0(boolean cancel) { super.stop0(cancel); stop = true; Collection<GroupEvictionContext> evictionGrps = evictionGroupsMap.values(); evictionGrps.forEach(GroupEvictionContext::stop); evictionGrps.forEach(GroupEvictionContext::awaitFinishAll); } /** * */ private class GroupEvictionContext implements EvictionContext { /** */ private final CacheGroupContext grp; /** Deduplicate set partition ids. */ private final Set<Integer> partIds = new HashSet<>(); /** Future for currently running partition eviction task. */ private final Map<Integer, IgniteInternalFuture<?>> partsEvictFutures = new ConcurrentHashMap<>(); /** Flag indicates that eviction process has stopped for this group. */ private volatile boolean stop; /** Total partition to evict. */ private AtomicInteger totalTasks = new AtomicInteger(); /** Total partition evict in progress. */ private int taskInProgress; /** * @param grp Group context. */ private GroupEvictionContext(CacheGroupContext grp) { this.grp = grp; } /** {@inheritDoc} */ @Override public boolean shouldStop() { return stop || sharedEvictionContext.shouldStop(); } /** * * @param task Partition eviction task. */ private synchronized void taskScheduled(PartitionEvictionTask task) { if (shouldStop()) return; taskInProgress++; GridFutureAdapter<?> fut = task.finishFut; int partId = task.part.id(); partIds.remove(partId); partsEvictFutures.put(partId, fut); fut.listen(f -> { synchronized (this) { taskInProgress--; partsEvictFutures.remove(partId, f); if (totalTasks.decrementAndGet() == 0) evictionGroupsMap.remove(grp.groupId()); } }); } /** * Stop eviction for group. */ private void stop() { stop = true; } /** * Await evict finish. */ private void awaitFinishAll(){ partsEvictFutures.forEach(this::awaitFinish); evictionGroupsMap.remove(grp.groupId()); } /** * Await evict finish partition. */ private void awaitFinish(Integer part, IgniteInternalFuture<?> fut) { // Wait for last offered partition eviction completion try { log.info("Await partition evict, grpName=" + grp.cacheOrGroupName() + ", grpId=" + grp.groupId() + ", partId=" + part); fut.get(); } catch (IgniteCheckedException e) { if (log.isDebugEnabled()) log.warning("Failed to await partition eviction during stopping.", e); } } /** * Shows progress group of eviction. */ private void showProgress() { if (log.isInfoEnabled()) log.info("Group eviction in progress [grpName=" + grp.cacheOrGroupName()+ ", grpId=" + grp.groupId() + ", remainingPartsToEvict=" + (totalTasks.get() - taskInProgress) + ", partsEvictInProgress=" + taskInProgress + ", totalParts= " + grp.topology().localPartitions().size() + "]"); } } /** * Task for self-scheduled partition eviction / clearing. */ private class PartitionEvictionTask implements Runnable { /** Partition to evict. */ private final GridDhtLocalPartition part; /** */ private final long size; /** Eviction context. */ private final GroupEvictionContext groupEvictionCtx; /** */ private final GridFutureAdapter<?> finishFut = new GridFutureAdapter<>(); /** * @param part Partition. * @param groupEvictionCtx Eviction context. */ private PartitionEvictionTask( GridDhtLocalPartition part, GroupEvictionContext groupEvictionCtx ) { this.part = part; this.groupEvictionCtx = groupEvictionCtx; size = part.fullSize(); } /** {@inheritDoc} */ @Override public void run() { if (groupEvictionCtx.shouldStop()) { finishFut.onDone(); return; } try { boolean success = part.tryClear(groupEvictionCtx); if (success) { if (part.state() == GridDhtPartitionState.EVICTED && part.markForDestroy()) part.destroy(); } // Complete eviction future before schedule new to prevent deadlock with // simultaneous eviction stopping and scheduling new eviction. finishFut.onDone(); // Re-offer partition if clear was unsuccessful due to partition reservation. if (!success) evictPartitionAsync(groupEvictionCtx.grp, part); } catch (Throwable ex) { finishFut.onDone(ex); if (cctx.kernalContext().isStopping()) { LT.warn(log, ex, "Partition eviction failed (current node is stopping).", false, true); } else{ LT.error(log, ex, "Partition eviction failed, this can cause grid hang."); } } } } /** * */ private class BucketQueue { /** Queues contains partitions scheduled for eviction. */ private final Queue<PartitionEvictionTask>[] buckets; /** */ private final long[] bucketSizes; /** * @param buckets Number of buckets. */ BucketQueue(int buckets) { this.buckets = new Queue[buckets]; for (int i = 0; i < buckets; i++) this.buckets[i] = createEvictPartitionQueue(); bucketSizes = new long[buckets]; } /** * Poll eviction task from queue for specific bucket. * * @param bucket Bucket index. * @return Partition evict task, or {@code null} if bucket queue is empty. */ PartitionEvictionTask poll(int bucket) { PartitionEvictionTask task = buckets[bucket].poll(); if (task != null) bucketSizes[bucket] -= task.size; return task; } /** * Poll eviction task from queue (bucket is not specific). * * @return Partition evict task. */ PartitionEvictionTask pollAny() { for (int bucket = 0; bucket < bucketSizes.length; bucket++){ if (!buckets[bucket].isEmpty()) return poll(bucket); } return null; } /** * Offer task to queue. * * @param task Eviction task. * @return Bucket index. */ int offer(PartitionEvictionTask task) { int bucket = calculateBucket(); buckets[bucket].offer(task); bucketSizes[bucket] += task.size; return bucket; } /** * @return {@code True} if queue is empty, {@code} False if not empty. */ boolean isEmpty(){ return size() == 0; } /** * @return Queue size. */ int size(){ int size = 0; for (Queue<PartitionEvictionTask> queue : buckets) { size += queue.size(); } return size; } /*** * @return Bucket index. */ private int calculateBucket() { int min = 0; for (int bucket = min; bucket < bucketSizes.length; bucket++) { if (bucketSizes[min] > bucketSizes[bucket]) min = bucket; } return min; } /** * 0 - PRIORITY QUEUE (compare by partition size). * default (any other values) - FIFO. */ private static final byte QUEUE_TYPE = 1; /** * * @return Queue for evict partitions. */ private Queue<PartitionEvictionTask> createEvictPartitionQueue() { switch (QUEUE_TYPE) { case 1: return new PriorityBlockingQueue<>( 1000, Comparator.comparingLong(p -> p.part.fullSize())); default: return new LinkedBlockingQueue<>(); } } } }
package org.robolectric.shadows; import android.content.res.Resources; import android.util.TypedValue; import org.robolectric.res.AttrData; import org.robolectric.res.Attribute; import org.robolectric.res.DrawableNode; import org.robolectric.res.DrawableResourceLoader; import org.robolectric.res.FsFile; import org.robolectric.res.ResName; import org.robolectric.res.ResType; import org.robolectric.res.ResourceIndex; import org.robolectric.res.ResourceLoader; import org.robolectric.res.TypedResource; import org.robolectric.util.Util; import java.util.LinkedHashMap; import java.util.Map; public class Converter<T> { private static int nextStringCookie = 0xbaaa5; private static final Map<String, ResType> ATTR_TYPE_MAP = new LinkedHashMap<>(); static { ATTR_TYPE_MAP.put("boolean", ResType.BOOLEAN); ATTR_TYPE_MAP.put("color", ResType.COLOR); ATTR_TYPE_MAP.put("dimension", ResType.DIMEN); ATTR_TYPE_MAP.put("float", ResType.FLOAT); ATTR_TYPE_MAP.put("integer", ResType.INTEGER); ATTR_TYPE_MAP.put("string", ResType.CHAR_SEQUENCE); ATTR_TYPE_MAP.put("fraction", ResType.FRACTION); } synchronized private static int getNextStringCookie() { return nextStringCookie++; } public static void convertAndFill(Attribute attribute, TypedValue outValue, ResourceLoader resourceLoader, String qualifiers, boolean resolveRefs) { if (attribute == null || attribute.isNull() || attribute.isEmpty()) { outValue.type = TypedValue.TYPE_NULL; if (attribute != null && attribute.isEmpty()) { outValue.data = TypedValue.DATA_NULL_EMPTY; } else { outValue.data = TypedValue.DATA_NULL_UNDEFINED; } return; } TypedResource attrTypeData = resourceLoader.getValue(attribute.resName, qualifiers); if (attrTypeData == null) { return; } AttrData attrData = (AttrData) attrTypeData.getData(); convertAndFill(attribute, outValue, resourceLoader, qualifiers, attrData, resolveRefs); } public static void convertAndFill(Attribute attribute, TypedValue outValue, ResourceLoader resourceLoader, String qualifiers, AttrData attrData, boolean resolveRefs) { // short-circuit Android caching of loaded resources cuz our string positions don't remain stable... outValue.assetCookie = getNextStringCookie(); String format = attrData.getFormat(); String[] types = format.split("\\|"); // TODO: Handle resource and style references if (attribute.isStyleReference()) { return; } ResourceIndex resourceIndex = resourceLoader.getResourceIndex(); while (attribute.isResourceReference()) { ResName resName = attribute.getResourceReference(); Integer resourceId = resourceIndex.getResourceId(resName); if (resourceId == null) { throw new Resources.NotFoundException("unknown resource " + resName); } outValue.type = TypedValue.TYPE_REFERENCE; outValue.resourceId = resourceId; TypedResource dereferencedRef = resourceLoader.getValue(resName, qualifiers); if (dereferencedRef == null) { if (resName.type.equals("id")) { return; } else if (resName.type.equals("layout")) { return; // resourceId is good enough, right? } else if (resName.type.equals("dimen")) { return; } else if (resName.type.equals("transition")) { return; } else if (resName.type.equals("interpolator")) { return; } else if (resName.type.equals("menu")) { return; } else if (DrawableResourceLoader.isStillHandledHere(resName)) { // wtf. color and drawable references reference are all kinds of stupid. DrawableNode drawableNode = resourceLoader.getDrawableNode(resName, qualifiers); if (drawableNode == null) { throw new Resources.NotFoundException("can't find file for " + resName); } else { outValue.type = TypedValue.TYPE_STRING; outValue.data = 0; outValue.assetCookie = getNextStringCookie(); outValue.string = drawableNode.getFsFile().getPath(); return; } } else { throw new RuntimeException("huh? " + resName); } } else { if (dereferencedRef.isFile()) { outValue.type = TypedValue.TYPE_STRING; outValue.data = 0; outValue.assetCookie = getNextStringCookie(); outValue.string = dereferencedRef.asString(); return; } else if (dereferencedRef.getData() instanceof String) { attribute = new Attribute(attribute.resName, dereferencedRef.asString(), resName.packageName); if (attribute.isResourceReference()) { continue; } if (resolveRefs) { getConverter(dereferencedRef.getResType()).fillTypedValue(attribute.value, outValue); return; } } } break; } if (attribute.isNull()) { outValue.type = TypedValue.TYPE_NULL; return; } // Special case for attrs that can be integers or enums, like numColumns. // todo: generalize this! if (format.equals("integer|enum") || format.equals("dimension|enum")) { if (attribute.value.matches("^\\d.*")) { types = new String[]{types[0]}; } else { types = new String[]{"enum"}; } } for (String type : types) { if ("reference".equals(type)) continue; // already handled above Converter converter = ATTR_TYPE_MAP.containsKey(type) ? getConverter(ATTR_TYPE_MAP.get(type)) : null; if (converter == null) { if (type.equals("enum")) { converter = new EnumConverter(attrData); } else if (type.equals("flag")) { converter = new FlagConverter(attrData); } } if (converter != null) { try { converter.fillTypedValue(attribute.value, outValue); } catch (Exception e) { throw new RuntimeException("error converting " + attribute.value + " using " + converter.getClass().getSimpleName(), e); } return; } } } // TODO: Handle 'anim' resources public static Converter getConverter(ResType resType) { switch (resType) { case ATTR_DATA: return new FromAttrData(); case BOOLEAN: return new FromBoolean(); case CHAR_SEQUENCE: return new FromCharSequence(); case COLOR: return new FromColor(); case COLOR_STATE_LIST: return new FromFilePath(); case DIMEN: return new FromDimen(); case FILE: return new FromFile(); case FLOAT: return new FromFloat(); case INTEGER: return new FromInt(); case FRACTION: return new FromFraction(); case DRAWABLE: // TODO: maybe call this DRAWABLE_VALUE instead to avoid confusion? return new FromDrawableValue(); case LAYOUT: // TODO: LOLLIPOP: should we rename this? it's also used for drawable xml files return new FromFilePath(); case CHAR_SEQUENCE_ARRAY: case INTEGER_ARRAY: return new FromArray(); default: throw new UnsupportedOperationException(resType.name()); } } public CharSequence asCharSequence(TypedResource typedResource) { throw cantDo("asCharSequence"); } public int asInt(TypedResource typedResource) { throw cantDo("asInt"); } public TypedResource[] getItems(TypedResource typedResource) { throw cantDo("getItems"); } public void fillTypedValue(T data, TypedValue typedValue) { throw cantDo("fillTypedValue"); } private UnsupportedOperationException cantDo(String operation) { return new UnsupportedOperationException(getClass().getName() + " doesn't support " + operation); } public static class FromAttrData extends Converter<AttrData> { @Override public CharSequence asCharSequence(TypedResource typedResource) { return typedResource.asString(); } @Override public void fillTypedValue(AttrData data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_STRING; throw new RuntimeException("huh?"); } } public static class FromCharSequence extends Converter<String> { @Override public CharSequence asCharSequence(TypedResource typedResource) { return typedResource.asString().trim(); } @Override public int asInt(TypedResource typedResource) { return convertInt(typedResource.asString().trim()); } @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_STRING; typedValue.data = 0; typedValue.assetCookie = getNextStringCookie(); typedValue.string = data; } } public static class FromColor extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_INT_COLOR_ARGB8; typedValue.data = ResourceHelper.getColor(data); typedValue.assetCookie = 0; } @Override public int asInt(TypedResource typedResource) { return ResourceHelper.getColor(typedResource.asString().trim()); } } public static class FromDrawableValue extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_INT_COLOR_ARGB8; typedValue.data = ResourceHelper.getColor(data); typedValue.assetCookie = 0; } @Override public int asInt(TypedResource typedResource) { return ResourceHelper.getColor(typedResource.asString().trim()); } } private static class FromFilePath extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_STRING; typedValue.data = 0; typedValue.string = data; typedValue.assetCookie = getNextStringCookie(); } } public static class FromArray extends Converter { @Override public TypedResource[] getItems(TypedResource typedResource) { return (TypedResource[]) typedResource.getData(); } } private static class FromInt extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_INT_HEX; typedValue.data = convertInt(data); typedValue.assetCookie = 0; } @Override public int asInt(TypedResource typedResource) { return convertInt(typedResource.asString().trim()); } } private static class FromFraction extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { ResourceHelper.parseFloatAttribute(null, data, typedValue, false); } } private static class FromFile extends Converter<FsFile> { @Override public void fillTypedValue(FsFile data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_STRING; typedValue.data = 0; typedValue.string = data.getPath(); typedValue.assetCookie = getNextStringCookie(); } } private static class FromFloat extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { ResourceHelper.parseFloatAttribute(null, data, typedValue, false); } } private static class FromBoolean extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_INT_BOOLEAN; typedValue.data = convertBool(data) ? 1 : 0; typedValue.assetCookie = 0; } } private static class FromDimen extends Converter<String> { @Override public void fillTypedValue(String data, TypedValue typedValue) { ResourceHelper.parseFloatAttribute(null, data, typedValue, false); } } private static int convertInt(String rawValue) { try { // Decode into long, because there are some large hex values in the android resource files // (e.g. config_notificationsBatteryLowARGB = 0xFFFF0000 in sdk 14). // Integer.decode() does not support large, i.e. negative values in hex numbers. // try parsing decimal number return (int) Long.parseLong(rawValue); } catch (NumberFormatException nfe) { // try parsing hex number try { return Long.decode(rawValue).intValue(); } catch (NumberFormatException e) { throw new RuntimeException(rawValue + " is not an integer.", nfe); } } } private static boolean convertBool(String rawValue) { if ("true".equalsIgnoreCase(rawValue)) { return true; } else if ("false".equalsIgnoreCase(rawValue)) { return false; } try { int intValue = Integer.parseInt(rawValue); return intValue != 0; } catch (NumberFormatException e) { throw new RuntimeException(e); } } private static class EnumConverter extends EnumOrFlagConverter { public EnumConverter(AttrData attrData) { super(attrData); } @Override public void fillTypedValue(String data, TypedValue typedValue) { typedValue.type = TypedValue.TYPE_INT_HEX; typedValue.data = findValueFor(data); typedValue.assetCookie = 0; } } private static class FlagConverter extends EnumOrFlagConverter { public FlagConverter(AttrData attrData) { super(attrData); } @Override public void fillTypedValue(String data, TypedValue typedValue) { int flags = 0; for (String key : data.split("\\|")) { flags |= findValueFor(key); } typedValue.type = TypedValue.TYPE_INT_HEX; typedValue.data = flags; typedValue.assetCookie = 0; } } private static class EnumOrFlagConverter extends Converter<String> { private final AttrData attrData; public EnumOrFlagConverter(AttrData attrData) { this.attrData = attrData; } protected int findValueFor(String key) { String valueFor = attrData.getValueFor(key); if (valueFor == null) { // Maybe they have passed in the value directly, rather than the name. if (attrData.isValue(key)) { valueFor = key; } else { throw new RuntimeException("no value found for " + key); } } return Util.parseInt(valueFor); } } }
// "Therefore those skilled at the unorthodox // are infinite as heaven and earth, // inexhaustible as the great rivers. // When they come to an end, // they begin again, // like the days and months; // they die and are reborn, // like the four seasons." // // - Sun Tsu, // "The Art of War" package net.crofis.ui.custom.cropper; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.widget.ImageView; import android.widget.TextView; import net.crofis.ui.R; import net.crofis.ui.custom.actionitem.ActionItem; import net.crofis.ui.custom.actionitem.ActionItemClickListener; import net.crofis.ui.dialog.ActionDialog; import net.crofis.ui.dialog.DialogManager; import java.io.File; import java.io.IOException; import java.util.ArrayList; /** * Built-in activity for image cropping.<br> * Use {@link CropImage#activity(Uri)} to create a builder to start this activity. */ public class CropImageActivity extends AppCompatActivity implements CropImageView.OnSetImageUriCompleteListener, CropImageView.OnSaveCroppedImageCompleteListener { /** * The crop image view library widget used in the activity */ private CropImageView mCropImageView; /** * the options that were set for the crop image */ private CropImageOptions mOptions; private String [] ratios; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.crop_image_activity); ratios = getResources().getStringArray(R.array.aspect_ratios); mCropImageView = (CropImageView) findViewById(R.id.cropImageView); Intent intent = getIntent(); Uri source = intent.getParcelableExtra(CropImage.CROP_IMAGE_EXTRA_SOURCE); mOptions = intent.getParcelableExtra(CropImage.CROP_IMAGE_EXTRA_OPTIONS); if (savedInstanceState == null) { mCropImageView.setImageUriAsync(source); } //ActionBar actionBar = getSupportActionBar(); findViewById(R.id.action_rotate_left).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { rotateImage(-90); } }); findViewById(R.id.action_rotate_right).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { rotateImage(90); } }); findViewById(R.id.action_restore).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mCropImageView.resetCropRect(); } }); findViewById(R.id.action_done).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { cropImage(); } }); findViewById(R.id.action_cancel).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setResultCancel(); } }); findViewById(R.id.action_change_ratio).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { //mCropImageView.setAspectRatio(); ArrayList<ActionItem> items = new ArrayList<ActionItem>(); for(int i = 0;i<ratios.length;i++) { final int finalI = i; items.add(new UIAlertAction(null, ratios[i], new ActionItemClickListener() { @Override public void onActionSelected() { if(finalI==0){ //Original was clicked mCropImageView.setFixedAspectRatio(false); }else if(finalI == ratios.length-1){ //Cancel was clicked. }else{ mCropImageView.setFixedAspectRatio(true); int ratio2; int ratio1; try { //Validate aspect ratio format. if(ratios[finalI].length()-1 != ratios[finalI].replace(":","").length()) throw new NumberFormatException("Invalid Aspect! Correct format is 'x:y'!"); //Parse String to Integers. ratio1 = Integer.parseInt(ratios[finalI].split(":")[0]); ratio2 = Integer.parseInt(ratios[finalI].split(":")[1]); //Check if integers are not 0 to avoid error. if(ratio1 <= 0 || ratio2 <= 0) throw new ArithmeticException("Ratio Cannot contain zero or negative value!"); mCropImageView.setAspectRatio(ratio1,ratio2 ); }catch (NumberFormatException e){ //Handle number format exception. e.printStackTrace(); Log.e(getClass().getName(),"Invalid Aspect Format."); mCropImageView.setFixedAspectRatio(false); }catch (ArithmeticException e){ //Handle zero values. e.printStackTrace(); Log.e(getClass().getName(),"Invalid Aspect given."); mCropImageView.setFixedAspectRatio(false); } } hide(); } })); } final ActionDialog dialog = new ActionDialog(CropImageActivity.this,items); DialogManager.setDialogPosition(dialog,dialog.getDialog(),Gravity.BOTTOM); dialog.show(); } }); // if (actionBar != null) { // String title = mOptions.activityTitle != null && !mOptions.activityTitle.isEmpty() // ? mOptions.activityTitle // : getResources().getString(R.string.crop_image_activity_title); // actionBar.setTitle(title); // actionBar.setDisplayHomeAsUpEnabled(true); // } hide(); } private void hide() { if (Build.VERSION.SDK_INT >= 19) { View v = this.getWindow().getDecorView(); v.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY); } WindowManager.LayoutParams attrs =this.getWindow().getAttributes(); attrs.flags |= WindowManager.LayoutParams.FLAG_FULLSCREEN; this.getWindow().setAttributes(attrs); } @Override protected void onStart() { super.onStart(); mCropImageView.setOnSetImageUriCompleteListener(this); mCropImageView.setOnSaveCroppedImageCompleteListener(this); } @Override protected void onStop() { super.onStop(); mCropImageView.setOnSetImageUriCompleteListener(null); mCropImageView.setOnSaveCroppedImageCompleteListener(null); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.crop_image_menu, menu); if (!mOptions.allowRotation) { menu.removeItem(R.id.crop_image_menu_rotate); } Drawable cropIcon = null; try { cropIcon = ContextCompat.getDrawable(this, R.drawable.crop_image_menu_crop); if (cropIcon != null) { menu.findItem(R.id.crop_image_menu_crop).setIcon(cropIcon); } } catch (Exception e) { } if (mOptions.activityMenuIconColor != 0) { updateMenuItemIconColor(menu, R.id.crop_image_menu_rotate, mOptions.activityMenuIconColor); if (cropIcon != null) { updateMenuItemIconColor(menu, R.id.crop_image_menu_crop, mOptions.activityMenuIconColor); } } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.crop_image_menu_crop) { cropImage(); return true; } if (item.getItemId() == R.id.crop_image_menu_rotate) { rotateImage(); return true; } if (item.getItemId() == android.R.id.home) { setResultCancel(); return true; } return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { super.onBackPressed(); setResultCancel(); } @Override public void onSetImageUriComplete(CropImageView view, Uri uri, Exception error) { if (error == null) { if (mOptions.initialCropWindowRectangle != null) { mCropImageView.setCropRect(mOptions.initialCropWindowRectangle); } if (mOptions.initialRotation > -1) { mCropImageView.setRotatedDegrees(mOptions.initialRotation); } } else { setResult(null, error); } } @Override public void onSaveCroppedImageComplete(CropImageView view, Uri uri, Exception error) { setResult(uri, error); } //region: Private methods /** * Execute crop image and save the result tou output uri. */ protected void cropImage() { if (mOptions.noOutputImage) { setResult(null, null); } else { Uri outputUri = getOutputUri(); mCropImageView.saveCroppedImageAsync(outputUri, mOptions.outputCompressFormat, mOptions.outputCompressQuality, mOptions.outputRequestWidth, mOptions.outputRequestHeight); } } /** * Rotate the image in the crop image view. */ protected void rotateImage() { mCropImageView.rotateImage(90); } private void rotateImage(int degrees){ mCropImageView.rotateImage(degrees); } /** * Get Android uri to save the cropped image into.<br> * Use the given in options or create a temp file. */ protected Uri getOutputUri() { Uri outputUri = mOptions.outputUri; if (outputUri.equals(Uri.EMPTY)) { try { String ext = mOptions.outputCompressFormat == Bitmap.CompressFormat.JPEG ? ".jpg" : mOptions.outputCompressFormat == Bitmap.CompressFormat.PNG ? ".png" : ".wepb"; outputUri = Uri.fromFile(File.createTempFile("cropped", ext, getCacheDir())); } catch (IOException e) { throw new RuntimeException("Failed to create temp file for output image", e); } } return outputUri; } /** * Result with cropped image data or error if failed. */ protected void setResult(Uri uri, Exception error) { int resultCode = error == null ? RESULT_OK : CropImage.CROP_IMAGE_ACTIVITY_RESULT_ERROR_CODE; setResult(resultCode, getResultIntent(uri, error)); finish(); } /** * Cancel of cropping activity. */ protected void setResultCancel() { setResult(RESULT_CANCELED); finish(); } /** * Get intent instance to be used for the result of this activity. */ protected Intent getResultIntent(Uri uri, Exception error) { CropImage.ActivityResult result = new CropImage.ActivityResult(uri, error, mCropImageView.getCropPoints(), mCropImageView.getCropRect(), mCropImageView.getRotatedDegrees()); Intent intent = new Intent(); intent.putExtra(CropImage.CROP_IMAGE_EXTRA_RESULT, result); return intent; } /** * Update the color of a specific menu item to the given color. */ private void updateMenuItemIconColor(Menu menu, int itemId, int color) { MenuItem menuItem = menu.findItem(itemId); if (menuItem != null) { Drawable menuItemIcon = menuItem.getIcon(); if (menuItemIcon != null) { try { menuItemIcon.mutate(); menuItemIcon.setColorFilter(color, PorterDuff.Mode.SRC_ATOP); menuItem.setIcon(menuItemIcon); } catch (Exception e) { } } } } //endregion } class UIAlertAction extends ActionItem { @Override public String getTitle() { return super.getTitle(); } /** * Default constructor. * * @param icon - Action Icon * @param title - Action Title * @param listener - Action Listener */ public UIAlertAction(Drawable icon, String title, ActionItemClickListener listener) { super(icon, title, listener); } @Override public View getView(Context context, boolean showActionIcons) { View convertView = LayoutInflater.from(context).inflate(R.layout.ui_general_action, null, false); TextView title = (TextView) convertView.findViewById(R.id.action_title); title.setText(getTitle()); title.setGravity(Gravity.CENTER); title.setTextColor(context.getResources().getColor(R.color.blue)); if (false)((ImageView) convertView.findViewById(R.id.action_icon)).setImageDrawable(getIcon()); else convertView.findViewById(R.id.action_icon).setVisibility(View.GONE); return convertView; } }
package hu.sztaki.phytree; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.StreamTokenizer; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.Stack; import java.util.Vector; import javax.swing.JProgressBar; import hu.sztaki.phytree.tree.*; /** * @author James * * Parses the newick portion of a file For nexus files, additional * node-number mapping is needed to rename files Identification of a * file as either newick or nexus determines contents * * */ /** * Modified by ador: * * GUI stuff removed * * */ public class TreeParser { /** * Nexus file identifier. We look for this as the first token to identify a * tree file as Nexus, or other. */ private static final String nexusFileID = "#NEXUS"; /** Begin tag. */ private static final String beginTag = "begin"; /** End tag. */ private static final String endTag = "end"; // trees section /** Tree section. */ private static final String treeSectionTag = "trees"; /** Tree ID. */ private static final String treeID = "tree"; /** Tree ID (same or similar to {@link #treeID}?). */ private static final String utreeID = "utree"; // two different tree IDs? /** Line (and tree information) termination. */ private static final char lineTerminator = ';'; /** Equality sign. */ private static final char equals = '='; /** Nexus comment open. */ private static final char commentOpen = '['; /** Nexus comment close. */ private static final char commentClose = ']'; /** * True: show debug output. False: suppress printing. */ private static boolean debugOutput = false; private StreamTokenizer tokenizer; /** * Root node of the tree being parsed. Must be initialized outside the * tokenizer. */ private TreeNode rootNode; /** * Guess the type of treeFile based on the presence of nexus identifiers. * * @param fileName * The name of the file. * @return true when file is nexus format, false if nexus strings weren't * found. */ public boolean isNexusFile(String fileName) { boolean returnValue = false; BufferedReader r; try { r = new BufferedReader(new FileReader(fileName)); String line = r.readLine(); if (line.indexOf(nexusFileID) != -1) returnValue = true; r.close(); } catch (FileNotFoundException e) { System.err.println("Could not find file to identify: " + fileName); } catch (IOException e) { System.out.println("Couldn't identify file: " + fileName); } return returnValue; } /** * Parses names of trees in nexus file. * * @param fileName * Name of nexus file. * @return List of all tree names found in nexus file */ public static ArrayList<String> nexusFileTreeNames(String fileName) { ArrayList<String> returnList = null; BufferedReader r; try { r = new BufferedReader(new FileReader(fileName)); StreamTokenizer st = new StreamTokenizer(r); st.wordChars('#', '#'); st.nextToken(); returnList = new ArrayList<String>(); while (st.ttype != StreamTokenizer.TT_EOF) { if (st.ttype == StreamTokenizer.TT_WORD) { if (st.sval.equalsIgnoreCase(beginTag)) { st.nextToken(); if (st.ttype == StreamTokenizer.TT_WORD && st.sval.equalsIgnoreCase(treeSectionTag)) { // found a tree section, huzzah boolean endOfTreeList = false; st.nextToken(); while (st.ttype != StreamTokenizer.TT_EOF && !endOfTreeList) { // expect either a tree/utree id or the end tag if (st.ttype == StreamTokenizer.TT_WORD) { if (st.sval.equalsIgnoreCase(endTag)) endOfTreeList = true; else if (st.sval.equalsIgnoreCase(treeID) || st.sval.equalsIgnoreCase(utreeID)) { // found the start of a tree st.nextToken(); if (st.ttype == StreamTokenizer.TT_WORD) { returnList.add(st.sval); // found a tree name } while (st.nextToken() != StreamTokenizer.TT_EOF && st.ttype != ';') ; // find the end of the tree } } else st.nextToken(); // eat a non-word while looking for first tree // word // System.out.println("Not a word while looking for a tree start tag: " // + st.ttype); } } // not a tree section, find the end tag or the next start tag else while (st.nextToken() != StreamTokenizer.TT_EOF && st.ttype != StreamTokenizer.TT_WORD || (!st.sval.equalsIgnoreCase(beginTag) && !st.sval .equalsIgnoreCase(endTag))) ; } else st.nextToken(); } else st.nextToken(); } r.close(); } catch (FileNotFoundException e) { System.err.println("Could not find file to identify: " + fileName); } catch (IOException e) { System.out.println("Couldn't identify file: " + fileName); } return returnList; } /** * Initializes parsing of a tree by creating a tokenizer and setting default * properties (such as spacing, quoting characters). * {@link #tokenize(long, String, JProgressBar)} is required to start the * parsing. * * @param b * Buffered reader that could start in the middle of a nexus file or * the start of a newick file (basically the beginning of a newick * tree, is run for each tree in a nexus file) */ public TreeParser(BufferedReader b) { tokenizer = new StreamTokenizer(b); tokenizer.eolIsSignificant(false); tokenizer.quoteChar('"'); // tokenizer.quoteChar('\''); // TODO: check quote layering, quoted quotes tokenizer.wordChars('\'', '\''); // quote problem, turn this into a prime // symbol? // 32 = space tokenizer.wordChars('!', '!'); // 33 // 34 = " tokenizer.wordChars('#', '&'); // 35-38 // 39-41 = '() newick tokenizer.wordChars('*', '+'); // 42-43 // 44 = , newick tokenizer.wordChars('-', '/'); // 45-47 // 48-59 = [0-9]:; tokenizer.wordChars('<', '<'); // 60 // 61 = = nexus tokenizer.wordChars('>', '@'); // 62-64 // 65-90 = [A-Z] // tokenizer.wordChars('[', '['); // 91 [ nexus comment character, treat as // char // 92 = \ (esc, support esc'd spaces) // 93 = ] nexus comment character tokenizer.wordChars('^', '`'); // 93-96 // 97-122 = [a-z] tokenizer.wordChars('{', '~'); // 123-126 // 127 = del } /** * Debug printout function. Avoid using the system calls and use this, and set * flag {@link #debugOutput} depending on debugging or not. * * @param s * Display the string, for debugging. */ public void debugOutput(String s) { if (debugOutput) System.out.println(s); } /** * Adds node at the top of the stack to the tree. TreeNode is already created * based on Newick properties. * * @param name * Name of the node. * @param nodeStack * Stack of nodes that haven't been added to the tree yet. Nodes are * popped when they have names and all children are processed. * @return Newly added treeNode linked into the tree. */ private TreeNode popAndName(String name, Stack<TreeNode> nodeStack) { TreeNode topNode = (TreeNode) nodeStack.pop(); if (name == null) { topNode.label = ""; topNode.setName(""); } else { topNode.label = name; topNode.setName(name); } try { TreeNode parent = (TreeNode) nodeStack.peek(); parent.addChild(topNode); } catch (EmptyStackException e) { if (topNode != rootNode) System.out.println("Parser error on node " + topNode); } topNode.setExtremeLeaves(); // sets leftmost and rightmost leaf, // non-recursive topNode.setNumberLeaves(); // sets number of leaves, non-recursive topNode.linkNodesInPreorder(); topNode.linkNodesInPostorder(); return topNode; } /** * Newick tokenizer: converts a string (tree as a string) into a tree object. * The stream tokenizer should be initialized before calling this function. * * @return Tree parsed from the stream. */ public Tree tokenize() { final char openBracket = '(', closeBracket = ')', childSeparator = ',', treeTerminator = lineTerminator, doubleQuote = '"', infoSeparator = ':'; Tree t = new Tree(); rootNode = new TreeNode(t); t.setRootNode(rootNode); Stack<TreeNode> nodeStack = new Stack<TreeNode>(); nodeStack.push(rootNode); int thisToken; TreeNode lastNamed = null; boolean EOT = false; boolean nameNext = true; // int percentage = 0; double lastnum = 0.0; try { while (EOT == false && (thisToken = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) { switch (thisToken) { // case quote: case doubleQuote: case StreamTokenizer.TT_WORD: if (tokenizer.sval.matches("[eE]-?[0-9]+")) { String numberToParse = tokenizer.sval; int exponent = Integer.parseInt(numberToParse.trim().substring(1)); // update branch length if (lastNamed != null) { for (int ee = 0; ee > Math.abs(exponent); ee++) { if (exponent < 0) { lastnum /= 10.0; } else { lastnum *= 10.0; } } lastNamed.setWeight(lastnum); } } else if (!nameNext) { System.err.println("Error: didn't expect this name here: " + tokenizer.sval); } else { lastNamed = popAndName(tokenizer.sval, nodeStack); } nameNext = false; break; case StreamTokenizer.TT_NUMBER: if (nameNext) lastNamed = popAndName(tokenizer.sval, nodeStack); else { if (lastNamed != null) lastNamed.setWeight(tokenizer.nval); else System.err.println("Error: can't set value " + tokenizer.nval + " to a null node"); lastNamed = null; } lastnum = new Double(tokenizer.nval); nameNext = false; break; case infoSeparator: if (nameNext) lastNamed = popAndName(null, nodeStack); nameNext = false; break; case treeTerminator: case StreamTokenizer.TT_EOF: if (nameNext) lastNamed = popAndName(null, nodeStack); EOT = true; nameNext = false; break; case openBracket: nodeStack.push(new TreeNode(t)); nameNext = true; break; case closeBracket: if (nameNext) lastNamed = popAndName(null, nodeStack); nameNext = true; break; case childSeparator: if (nameNext) lastNamed = popAndName(null, nodeStack); nodeStack.push(new TreeNode(t)); nameNext = true; break; default: debugOutput("default " + (char) thisToken); break; } } } catch (IOException e) { } if (!nodeStack.isEmpty()) System.err .println("Node stack still has " + nodeStack.size() + " things"); t.postProcess(); return t; } /** * Tokenize the tree section of a nexus file only, uses newick tokenizer. * * @param treeNumbers * Vector of Integers for commandline-based input of nexus trees; * assume this vector is in ascending order * @return arraylist of trees parsed from the tree file. */ private ArrayList<Tree> nexusTreeTokenize(Vector<Integer> treeNumbers) { ArrayList<Tree> treeArray = new ArrayList<Tree>(); // newick tree subsection stuff (newick encoding) debugOutput("tokenizing tree section"); boolean readAllTrees = true; boolean treeSectionEnd = false; boolean nextTreeID = false; int nextNumber = -1; int thisToken; int currTree = 0; String currTreeName = null; if (treeNumbers != null && treeNumbers.size() > 0) { readAllTrees = false; nextNumber = ((Integer) treeNumbers.get(0)).intValue(); treeNumbers.remove(0); } while ((readAllTrees || nextNumber != -1) && !treeSectionEnd) try { while (!treeSectionEnd && (thisToken = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) { switch (thisToken) { case StreamTokenizer.TT_WORD: if (nextTreeID) { currTreeName = tokenizer.sval; debugOutput("found tree ID: " + currTreeName); nextTreeID = false; } else if (tokenizer.sval.equalsIgnoreCase(treeID) || tokenizer.sval.equalsIgnoreCase(utreeID)) { debugOutput("new tree"); nextTreeID = true; // tree tag found, next word is a tree name } else if (tokenizer.sval.equalsIgnoreCase(endTag)) treeSectionEnd = true; // debugOutput("TWord: " + tokenizer.sval); break; case equals: { if (treeNumbers == null || currTree == nextNumber) { Tree t = tokenize(); treeArray.add(t); if (treeNumbers != null && !treeNumbers.isEmpty()) { nextNumber = ((Integer) treeNumbers.get(0)).intValue(); treeNumbers.remove(0); } else nextNumber = -1; } currTree++; } break; // eat the equals case commentOpen: debugOutput("TEating comment"); while (thisToken != StreamTokenizer.TT_EOF && thisToken != commentClose) { thisToken = tokenizer.nextToken(); // eat the comments } break; default: debugOutput("Tdefault " + (char) thisToken); break; } } } catch (IOException e) { System.err.println("Nexus tokenizer error: " + e); } return treeArray; } /** * Tokenize a nexus file, uses newick tokenizer after identifying the region * with the tree information. * * @param treeNumbers * Vector of Integers for commandline-based input of nexus trees; * assume this vector is in ascending order. * @return arraylist of trees parsed from the nexus file. */ public ArrayList<Tree> nexusTokenize(Vector<Integer> treeNumbers, JProgressBar progressBar) { System.out.println("Nexus tokenize: " + treeNumbers.toString()); ArrayList<Tree> treeArray = null; boolean EOF = false; int thisToken; try { while (EOF == false && (thisToken = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) { switch (thisToken) { case StreamTokenizer.TT_WORD: if (tokenizer.sval.equalsIgnoreCase(nexusFileID)) ; // ignore else if (tokenizer.sval.equalsIgnoreCase(beginTag)) { debugOutput("beginning new section: " + tokenizer.sval); thisToken = tokenizer.nextToken(); if (tokenizer.sval.equalsIgnoreCase(treeSectionTag)) treeArray = nexusTreeTokenize(treeNumbers); } else debugOutput("Word: " + tokenizer.sval); break; case commentOpen: debugOutput("Eating comment"); while (thisToken != StreamTokenizer.TT_EOF && thisToken != commentClose) { thisToken = tokenizer.nextToken(); // eat the comments } break; default: debugOutput("default " + (char) thisToken); break; } } } catch (IOException e) { System.err.println("Nexus tokenizer error: " + e); } return treeArray; } /** * Test application function. * * @param args * Program arguments. Only first argument used (for filename). */ public static void main(String[] args) { String fileName = args[0]; long start = System.currentTimeMillis(); File f = new File(fileName); try { BufferedReader r = new BufferedReader(new FileReader(f)); TreeParser tp = new TreeParser(r); Tree t = tp.tokenize(); System.out.println("Tree is : " + t.drawTreeString(false, false)); } catch (FileNotFoundException e) { System.out.println("Couldn't find file: " + fileName); } System.out.println("Parsed in " + ((System.currentTimeMillis() - start) / 1000.0) + " s"); System.exit(0); } }
package jp.sourceforge.ea2ddl.dao.bsentity; import java.io.Serializable; import java.util.*; import org.seasar.dbflute.Entity; import org.seasar.dbflute.dbmeta.DBMeta; import jp.sourceforge.ea2ddl.dao.allcommon.DBMetaInstanceHandler; /** * The entity of t_connectorconstraint that the type is TABLE. <br /> * <pre> * [primary-key] * * * [column] * ConnectorID, Constraint, ConstraintType, Notes * * [sequence] * * * [identity] * * * [version-no] * * * [foreign-table] * * * [referrer-table] * * * [foreign-property] * * * [referrer-property] * * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsTConnectorconstraint implements Entity, Serializable { // =================================================================================== // Definition // ========== /** Serial version UID. (Default) */ private static final long serialVersionUID = 1L; // =================================================================================== // Attribute // ========= // ----------------------------------------------------- // Column // ------ /** ConnectorID: {UQ : INTEGER} */ protected java.lang.Integer _connectorid; /** Constraint: {UQ : VARCHAR(255)} */ protected String _constraint; /** ConstraintType: {VARCHAR(50)} */ protected String _constrainttype; /** Notes: {LONGCHAR(2147483647)} */ protected String _notes; // ----------------------------------------------------- // Internal // -------- /** The attribute of entity modified properties. (for S2Dao) */ protected EntityModifiedProperties _modifiedProperties = newEntityModifiedProperties(); // =================================================================================== // Table Name // ========== public String getTableDbName() { return "t_connectorconstraint"; } public String getTablePropertyName() { // as JavaBeansRule return "TConnectorconstraint"; } // =================================================================================== // DBMeta // ====== public DBMeta getDBMeta() { return DBMetaInstanceHandler.findDBMeta(getTableDbName()); } // =================================================================================== // Classification Classifying // ========================== // =================================================================================== // Classification Determination // ============================ // =================================================================================== // Classification Name/Alias // ========================= // =================================================================================== // Foreign Property // ================ // =================================================================================== // Referrer Property // ================= // =================================================================================== // Determination // ============= public boolean hasPrimaryKeyValue() { return false; } // =================================================================================== // Modified Properties // =================== public Set<String> getModifiedPropertyNames() { return _modifiedProperties.getPropertyNames(); } protected EntityModifiedProperties newEntityModifiedProperties() { return new EntityModifiedProperties(); } public void clearModifiedPropertyNames() { _modifiedProperties.clear(); } public boolean hasModification() { return !_modifiedProperties.isEmpty(); } // =================================================================================== // Basic Override // ============== /** * If the all-column value of the other is same as this one, returns true. * @param other Other entity. (Nullable) * @return Comparing result. If other is null, returns false. */ public boolean equals(Object other) { if (other == null || !(other instanceof BsTConnectorconstraint)) { return false; } final BsTConnectorconstraint otherEntity = (BsTConnectorconstraint)other; if (!helpComparingValue(getConnectorid(), otherEntity.getConnectorid())) { return false; } if (!helpComparingValue(getConstraint(), otherEntity.getConstraint())) { return false; } if (!helpComparingValue(getConstrainttype(), otherEntity.getConstrainttype())) { return false; } if (!helpComparingValue(getNotes(), otherEntity.getNotes())) { return false; } return true; } protected boolean helpComparingValue(Object value1, Object value2) { if (value1 == null && value2 == null) { return true; } return value1 != null && value2 != null && value1.equals(value2); } /** * Calculates hash-code from all columns. * @return Hash-code from all-columns. */ public int hashCode() { int result = 17; if (this.getConnectorid() != null) { result = result + this.getConnectorid().hashCode(); } if (this.getConstraint() != null) { result = result + this.getConstraint().hashCode(); } if (this.getConstrainttype() != null) { result = result + this.getConstrainttype().hashCode(); } if (this.getNotes() != null) { result = result + this.getNotes().hashCode(); } return result; } /** * @return The view string of columns. (NotNull) */ public String toString() { String delimiter = ","; StringBuilder sb = new StringBuilder(); sb.append(delimiter).append(getConnectorid()); sb.append(delimiter).append(getConstraint()); sb.append(delimiter).append(getConstrainttype()); sb.append(delimiter).append(getNotes()); if (sb.length() > 0) { sb.delete(0, delimiter.length()); } sb.insert(0, "{").append("}"); return sb.toString(); } // =================================================================================== // Accessor // ======== /** * ConnectorID: {UQ : INTEGER} <br /> * @return The value of the column 'ConnectorID'. (Nullable) */ public java.lang.Integer getConnectorid() { return _connectorid; } /** * ConnectorID: {UQ : INTEGER} <br /> * @param connectorid The value of the column 'ConnectorID'. (Nullable) */ public void setConnectorid(java.lang.Integer connectorid) { _modifiedProperties.addPropertyName("connectorid"); this._connectorid = connectorid; } /** * Constraint: {UQ : VARCHAR(255)} <br /> * @return The value of the column 'Constraint'. (Nullable) */ public String getConstraint() { return _constraint; } /** * Constraint: {UQ : VARCHAR(255)} <br /> * @param constraint The value of the column 'Constraint'. (Nullable) */ public void setConstraint(String constraint) { _modifiedProperties.addPropertyName("constraint"); this._constraint = constraint; } /** * ConstraintType: {VARCHAR(50)} <br /> * @return The value of the column 'ConstraintType'. (Nullable) */ public String getConstrainttype() { return _constrainttype; } /** * ConstraintType: {VARCHAR(50)} <br /> * @param constrainttype The value of the column 'ConstraintType'. (Nullable) */ public void setConstrainttype(String constrainttype) { _modifiedProperties.addPropertyName("constrainttype"); this._constrainttype = constrainttype; } /** * Notes: {LONGCHAR(2147483647)} <br /> * @return The value of the column 'Notes'. (Nullable) */ public String getNotes() { return _notes; } /** * Notes: {LONGCHAR(2147483647)} <br /> * @param notes The value of the column 'Notes'. (Nullable) */ public void setNotes(String notes) { _modifiedProperties.addPropertyName("notes"); this._notes = notes; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.mapjoin; import java.io.IOException; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; // Single-Column String hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; // Single-Column String specific imports. import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; /* * Specialized class for doing a vectorized map join that is an outer join on a Single-Column String * using a hash map. */ public class VectorMapJoinOuterStringOperator extends VectorMapJoinOuterGenerateResultOperator { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOuterStringOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinOuterStringOperator.class.getName(); // (none) // The above members are initialized by the constructor and must not be // transient. //--------------------------------------------------------------------------- // The hash map for this specialized class. private transient VectorMapJoinBytesHashMap hashMap; //--------------------------------------------------------------------------- // Single-Column String specific members. // // The column number for this one column join specialization. private transient int singleJoinColumn; //--------------------------------------------------------------------------- // Pass-thru constructors. // /** Kryo ctor. */ protected VectorMapJoinOuterStringOperator() { super(); } public VectorMapJoinOuterStringOperator(CompilationOpContext ctx) { super(ctx); } public VectorMapJoinOuterStringOperator(CompilationOpContext ctx, VectorizationContext vContext, OperatorDesc conf) throws HiveException { super(ctx, vContext, conf); } //--------------------------------------------------------------------------- // Process Single-Column String Outer Join on a vectorized row batch. // @Override public void process(Object row, int tag) throws HiveException { try { VectorizedRowBatch batch = (VectorizedRowBatch) row; alias = (byte) tag; if (needCommonSetup) { // Our one time process method initialization. commonSetup(batch); /* * Initialize Single-Column String members for this specialized class. */ singleJoinColumn = bigTableKeyColumnMap[0]; needCommonSetup = false; } if (needHashTableSetup) { // Setup our hash table specialization. It will be the first time the process // method is called, or after a Hybrid Grace reload. /* * Get our Single-Column String hash map information for this specialized class. */ hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; needHashTableSetup = false; } batchCounter++; final int inputLogicalSize = batch.size; if (inputLogicalSize == 0) { if (isLogDebugEnabled) { LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); } return; } // Do the per-batch setup for an outer join. outerPerBatchSetup(batch); // For outer join, remember our input rows before ON expression filtering or before // hash table matching so we can generate results for all rows (matching and non matching) // later. boolean inputSelectedInUse = batch.selectedInUse; if (inputSelectedInUse) { // if (!verifyMonotonicallyIncreasing(batch.selected, batch.size)) { // throw new HiveException("batch.selected is not in sort order and unique"); // } System.arraycopy(batch.selected, 0, inputSelected, 0, inputLogicalSize); } // Filtering for outer join just removes rows available for hash table matching. boolean someRowsFilteredOut = false; if (bigTableFilterExpressions.length > 0) { // Since the input for (VectorExpression ve : bigTableFilterExpressions) { ve.evaluate(batch); } someRowsFilteredOut = (batch.size != inputLogicalSize); if (isLogDebugEnabled) { if (batch.selectedInUse) { if (inputSelectedInUse) { LOG.debug(CLASS_NAME + " inputSelected " + intArrayToRangesString(inputSelected, inputLogicalSize) + " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); } else { LOG.debug(CLASS_NAME + " inputLogicalSize " + inputLogicalSize + " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); } } } } // Perform any key expressions. Results will go into scratch columns. if (bigTableKeyExpressions != null) { for (VectorExpression ve : bigTableKeyExpressions) { ve.evaluate(batch); } } /* * Single-Column String specific declarations. */ // The one join column for this specialized class. BytesColumnVector joinColVector = (BytesColumnVector) batch.cols[singleJoinColumn]; byte[][] vector = joinColVector.vector; int[] start = joinColVector.start; int[] length = joinColVector.length; /* * Single-Column String check for repeating. */ // Check single column for repeating. boolean allKeyInputColumnsRepeating = joinColVector.isRepeating; if (allKeyInputColumnsRepeating) { /* * Repeating. */ // All key input columns are repeating. Generate key once. Lookup once. // Since the key is repeated, we must use entry 0 regardless of selectedInUse. /* * Single-Column String specific repeated lookup. */ JoinUtil.JoinResult joinResult; if (batch.size == 0) { // Whole repeated key batch was filtered out. joinResult = JoinUtil.JoinResult.NOMATCH; } else if (!joinColVector.noNulls && joinColVector.isNull[0]) { // Any (repeated) null key column is no match for whole batch. joinResult = JoinUtil.JoinResult.NOMATCH; } else { // Handle *repeated* join key, if found. byte[] keyBytes = vector[0]; int keyStart = start[0]; int keyLength = length[0]; joinResult = hashMap.lookup(keyBytes, keyStart, keyLength, hashMapResults[0]); } /* * Common repeated join result processing. */ if (isLogDebugEnabled) { LOG.debug(CLASS_NAME + " batch #" + batchCounter + " repeated joinResult " + joinResult.name()); } finishOuterRepeated(batch, joinResult, hashMapResults[0], someRowsFilteredOut, inputSelectedInUse, inputLogicalSize); } else { /* * NOT Repeating. */ if (isLogDebugEnabled) { LOG.debug(CLASS_NAME + " batch #" + batchCounter + " non-repeated"); } int selected[] = batch.selected; boolean selectedInUse = batch.selectedInUse; int hashMapResultCount = 0; int allMatchCount = 0; int equalKeySeriesCount = 0; int spillCount = 0; boolean atLeastOneNonMatch = someRowsFilteredOut; /* * Single-Column String specific variables. */ int saveKeyBatchIndex = -1; // We optimize performance by only looking up the first key in a series of equal keys. boolean haveSaveKey = false; JoinUtil.JoinResult saveJoinResult = JoinUtil.JoinResult.NOMATCH; // Logical loop over the rows in the batch since the batch may have selected in use. for (int logical = 0; logical < batch.size; logical++) { int batchIndex = (selectedInUse ? selected[logical] : logical); // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, taskName + ", " + getOperatorId() + " candidate " + CLASS_NAME + " batch"); /* * Single-Column String outer null detection. */ boolean isNull = !joinColVector.noNulls && joinColVector.isNull[batchIndex]; if (isNull) { // Have that the NULL does not interfere with the current equal key series, if there // is one. We do not set saveJoinResult. // // Let a current MATCH equal key series keep going, or // Let a current SPILL equal key series keep going, or // Let a current NOMATCH keep not matching. atLeastOneNonMatch = true; // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " NULL"); } else { /* * Single-Column String outer get key. */ // Implicit -- use batchIndex. /* * Equal key series checking. */ if (!haveSaveKey || StringExpr.equal(vector[saveKeyBatchIndex], start[saveKeyBatchIndex], length[saveKeyBatchIndex], vector[batchIndex], start[batchIndex], length[batchIndex]) == false) { // New key. if (haveSaveKey) { // Move on with our counts. switch (saveJoinResult) { case MATCH: hashMapResultCount++; equalKeySeriesCount++; break; case SPILL: hashMapResultCount++; break; case NOMATCH: break; } } // Regardless of our matching result, we keep that information to make multiple use // of it for a possible series of equal keys. haveSaveKey = true; /* * Single-Column String specific save key. */ saveKeyBatchIndex = batchIndex; /* * Single-Column Long specific lookup key. */ byte[] keyBytes = vector[batchIndex]; int keyStart = start[batchIndex]; int keyLength = length[batchIndex]; saveJoinResult = hashMap.lookup(keyBytes, keyStart, keyLength, hashMapResults[hashMapResultCount]); /* * Common outer join result processing. */ switch (saveJoinResult) { case MATCH: equalKeySeriesHashMapResultIndices[equalKeySeriesCount] = hashMapResultCount; equalKeySeriesAllMatchIndices[equalKeySeriesCount] = allMatchCount; equalKeySeriesIsSingleValue[equalKeySeriesCount] = hashMapResults[hashMapResultCount].isSingleRow(); equalKeySeriesDuplicateCounts[equalKeySeriesCount] = 1; allMatchs[allMatchCount++] = batchIndex; // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH isSingleValue " + equalKeySeriesIsSingleValue[equalKeySeriesCount] + " currentKey " + currentKey); break; case SPILL: spills[spillCount] = batchIndex; spillHashMapResultIndices[spillCount] = hashMapResultCount; spillCount++; break; case NOMATCH: atLeastOneNonMatch = true; // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH" + " currentKey " + currentKey); break; } } else { // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " Key Continues " + saveKey + " " + saveJoinResult.name()); // Series of equal keys. switch (saveJoinResult) { case MATCH: equalKeySeriesDuplicateCounts[equalKeySeriesCount]++; allMatchs[allMatchCount++] = batchIndex; // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH duplicate"); break; case SPILL: spills[spillCount] = batchIndex; spillHashMapResultIndices[spillCount] = hashMapResultCount; spillCount++; break; case NOMATCH: // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH duplicate"); break; } } // if (!verifyMonotonicallyIncreasing(allMatchs, allMatchCount)) { // throw new HiveException("allMatchs is not in sort order and unique"); // } } } if (haveSaveKey) { // Update our counts for the last key. switch (saveJoinResult) { case MATCH: hashMapResultCount++; equalKeySeriesCount++; break; case SPILL: hashMapResultCount++; break; case NOMATCH: break; } } if (isLogDebugEnabled) { LOG.debug(CLASS_NAME + " batch #" + batchCounter + " allMatchs " + intArrayToRangesString(allMatchs,allMatchCount) + " equalKeySeriesHashMapResultIndices " + intArrayToRangesString(equalKeySeriesHashMapResultIndices, equalKeySeriesCount) + " equalKeySeriesAllMatchIndices " + intArrayToRangesString(equalKeySeriesAllMatchIndices, equalKeySeriesCount) + " equalKeySeriesIsSingleValue " + Arrays.toString(Arrays.copyOfRange(equalKeySeriesIsSingleValue, 0, equalKeySeriesCount)) + " equalKeySeriesDuplicateCounts " + Arrays.toString(Arrays.copyOfRange(equalKeySeriesDuplicateCounts, 0, equalKeySeriesCount)) + " atLeastOneNonMatch " + atLeastOneNonMatch + " inputSelectedInUse " + inputSelectedInUse + " inputLogicalSize " + inputLogicalSize + " spills " + intArrayToRangesString(spills, spillCount) + " spillHashMapResultIndices " + intArrayToRangesString(spillHashMapResultIndices, spillCount) + " hashMapResults " + Arrays.toString(Arrays.copyOfRange(hashMapResults, 0, hashMapResultCount))); } // We will generate results for all matching and non-matching rows. finishOuter(batch, allMatchCount, equalKeySeriesCount, atLeastOneNonMatch, inputSelectedInUse, inputLogicalSize, spillCount, hashMapResultCount); } if (batch.size > 0) { // Forward any remaining selected rows. forwardBigTableBatch(batch); } } catch (IOException e) { throw new HiveException(e); } catch (Exception e) { throw new HiveException(e); } } }
package net.bull.javamelody.swing.util; import java.awt.AlphaComposite; import java.awt.Component; import java.awt.Composite; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.LayoutManager; import java.awt.Rectangle; import java.awt.image.AffineTransformOp; import java.awt.image.BufferedImage; import java.awt.image.BufferedImageOp; import java.awt.image.ConvolveOp; import java.awt.image.Kernel; import javax.swing.JComponent; import javax.swing.JPanel; /** * @author Alexander Potochkin * * https://swinghelper.dev.java.net/ * http://weblogs.java.net/blog/alexfromsun/ */ public class JXLayer extends JPanel { private static final long serialVersionUID = -180838202361369295L; // layering related properties private final JComponent contentPane; private final JComponent glassPane = new JXGlassPane(); // painting related properties private float alpha = 1; private transient BufferedImageOp bio; private transient BufferedImage tempSrc; private transient BufferedImage tempDst; private static class JXGlassPane extends JPanel { private static final long serialVersionUID = -6311466609547970582L; /** * Constructeur. */ JXGlassPane() { super(); setOpaque(false); } /** {@inheritDoc} */ @Override public boolean contains(int x, int y) { if (getMouseListeners().length == 0 && getMouseMotionListeners().length == 0 && getMouseWheelListeners().length == 0 && !isCursorSet()) { return false; } return super.contains(x, y); } } /** * Constructeur. * @param c JComponent */ public JXLayer(JComponent c) { super(null); super.addImpl(glassPane, null, 0); super.addImpl(c, null, 1); this.contentPane = c; } /** {@inheritDoc} */ @Override public void doLayout() { if (contentPane != null) { setPreferredSize(contentPane.getPreferredSize()); contentPane.setLocation(0, 0); contentPane.setSize(getWidth(), getHeight()); } if (glassPane != null) { glassPane.setLocation(0, 0); glassPane.setSize(getWidth(), getHeight()); } } /** {@inheritDoc} */ @Override public boolean isOptimizedDrawingEnabled() { return false; } /** {@inheritDoc} */ @Override protected void addImpl(Component comp, Object constraints, int index) { contentPane.add(comp, constraints, index); doLayout(); } /** {@inheritDoc} */ @Override public void remove(Component comp) { contentPane.remove(comp); } /** {@inheritDoc} */ @Override public void removeAll() { contentPane.removeAll(); } /** {@inheritDoc} */ @Override public void setLayout(LayoutManager mgr) { if (contentPane != null) { contentPane.setLayout(mgr); } } /** {@inheritDoc} */ @Override public LayoutManager getLayout() { return contentPane.getLayout(); } /** {@inheritDoc} */ @Override public void setPreferredSize(Dimension preferredSize) { contentPane.setPreferredSize(preferredSize); } /** {@inheritDoc} */ @Override public Dimension getPreferredSize() { return contentPane.getPreferredSize(); } /** {@inheritDoc} */ @Override public Dimension getMaximumSize() { return contentPane.getMaximumSize(); } /** {@inheritDoc} */ @Override public void setMaximumSize(Dimension maximumSize) { contentPane.setMaximumSize(maximumSize); } /** {@inheritDoc} */ @Override public Dimension getMinimumSize() { return contentPane.getMinimumSize(); } /** {@inheritDoc} */ @Override public void setMinimumSize(Dimension minimumSize) { contentPane.setMinimumSize(minimumSize); } // painting /** * @return BufferedImageOp */ public BufferedImageOp getBufferedImageOp() { return bio; } /** * @param bufferedImageOp BufferedImageOp */ public void setBufferedImageOp(BufferedImageOp bufferedImageOp) { if (bufferedImageOp instanceof AffineTransformOp) { throw new IllegalArgumentException("AffineTransformOp is not supported"); } this.bio = bufferedImageOp; repaint(); } /** * @return float */ public float getAlpha() { return alpha; } /** * @param alpha float */ public void setAlpha(float alpha) { if (alpha < 0 || alpha > 1) { throw new IllegalArgumentException(); } this.alpha = alpha; repaint(); } /** {@inheritDoc} */ @Override public void paint(Graphics g) { if (bio == null && alpha == 1 || !(g instanceof Graphics2D)) { super.paint(g); return; } final Graphics2D g2 = (Graphics2D) g.create(); Rectangle clipBounds = g2.getClipBounds(); if (clipBounds == null) { clipBounds = new Rectangle(getSize()); } if (clipBounds.isEmpty()) { return; } final boolean isConvolveOp = bio instanceof ConvolveOp; if (isConvolveOp) { final ConvolveOp cop = (ConvolveOp) bio; final Kernel kernel = cop.getKernel(); clipBounds.grow(kernel.getWidth() / 2, kernel.getHeight() / 2); } createTempImagesIfNecessary(clipBounds); final Graphics2D bufg = (Graphics2D) tempSrc.getGraphics(); bufg.translate(-clipBounds.x, -clipBounds.y); bufg.setClip(clipBounds); super.paint(bufg); bufg.dispose(); applyFilter(g2, clipBounds, isConvolveOp); } private void applyFilter(Graphics2D g2, Rectangle clipBounds, boolean isConvolveOp) { if (isConvolveOp) { tempDst = bio.filter(tempSrc, tempDst); } else { tempDst = bio.filter(tempSrc, tempSrc); } if (isOpaque()) { g2.clearRect(clipBounds.x, clipBounds.y, clipBounds.width, clipBounds.height); } final Composite oldComposite = g2.getComposite(); if (alpha != 1) { g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, alpha)); } g2.drawImage(tempDst, clipBounds.x, clipBounds.y, null); g2.setComposite(oldComposite); g2.dispose(); } private void createTempImagesIfNecessary(Rectangle clipBounds) { if (tempSrc == null || tempSrc.getWidth() != clipBounds.width || tempSrc.getHeight() != clipBounds.height) { tempSrc = getGraphicsConfiguration().createCompatibleImage(clipBounds.width, clipBounds.height); // TYPE_4BYTE_ABGR the only type which works properly on Linux and Solaris ? // tempSrc = new BufferedImage(clipBounds.width, clipBounds.height, BufferedImage.TYPE_4BYTE_ABGR); if (bio instanceof ConvolveOp) { tempDst = getGraphicsConfiguration().createCompatibleImage(clipBounds.width, clipBounds.height); // idem ? } } } }
/** * $RCSfile: RosterManager.java,v $ * $Revision: 3138 $ * $Date: 2005-12-01 02:13:26 -0300 (Thu, 01 Dec 2005) $ * * Copyright (C) 2004-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.roster; import org.jivesoftware.openfire.RoutingTable; import org.jivesoftware.openfire.SharedGroupException; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.openfire.container.BasicModule; import org.jivesoftware.openfire.event.GroupEventDispatcher; import org.jivesoftware.openfire.event.GroupEventListener; import org.jivesoftware.openfire.event.UserEventDispatcher; import org.jivesoftware.openfire.event.UserEventListener; import org.jivesoftware.openfire.group.Group; import org.jivesoftware.openfire.group.GroupManager; import org.jivesoftware.openfire.group.GroupNotFoundException; import org.jivesoftware.openfire.user.User; import org.jivesoftware.openfire.user.UserManager; import org.jivesoftware.openfire.user.UserNotFoundException; import org.jivesoftware.util.ClassUtils; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.PropertyEventDispatcher; import org.jivesoftware.util.PropertyEventListener; import org.jivesoftware.util.cache.Cache; import org.jivesoftware.util.cache.CacheFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; import org.xmpp.packet.Presence; import java.util.*; /** * A simple service that allows components to retrieve a roster based solely on the ID * of the owner. Users have convenience methods for obtaining a roster associated with * the owner. However there are many components that need to retrieve the roster * based solely on the generic ID owner key. This interface defines a service that can * do that. This allows classes that generically manage resource for resource owners * (such as presence updates) to generically offer their services without knowing or * caring if the roster owner is a user, chatbot, etc. * * @author Iain Shigeoka */ public class RosterManager extends BasicModule implements GroupEventListener, UserEventListener { private static final Logger Log = LoggerFactory.getLogger(RosterManager.class); private Cache<String, Roster> rosterCache = null; private XMPPServer server; private RoutingTable routingTable; private RosterItemProvider provider; /** * Returns true if the roster service is enabled. When disabled it is not possible to * retrieve users rosters or broadcast presence packets to roster contacts. * * @return true if the roster service is enabled. */ public static boolean isRosterServiceEnabled() { return JiveGlobals.getBooleanProperty("xmpp.client.roster.active", true); } public RosterManager() { super("Roster Manager"); rosterCache = CacheFactory.createCache("Roster"); initProvider(); PropertyEventDispatcher.addListener(new PropertyEventListener() { @Override public void propertySet(String property, Map params) { if (property.equals("provider.roster.className")) { initProvider(); } } @Override public void propertyDeleted(String property, Map params) {} @Override public void xmlPropertySet(String property, Map params) {} @Override public void xmlPropertyDeleted(String property, Map params) {} }); } /** * Returns the roster for the given username. * * @param username the username to search for. * @return the roster associated with the ID. * @throws org.jivesoftware.openfire.user.UserNotFoundException if the ID does not correspond * to a known entity on the server. */ public Roster getRoster(String username) throws UserNotFoundException { Roster roster = rosterCache.get(username); if (roster == null) { // Synchronize using a unique key so that other threads loading the User // and not the Roster cannot produce a deadlock synchronized ((username + " ro").intern()) { roster = rosterCache.get(username); if (roster == null) { // Not in cache so load a new one: roster = new Roster(username); rosterCache.put(username, roster); } } } return roster; } /** * Removes the entire roster of a given user. This is necessary when a user * account is being deleted from the server. * * @param user the user. */ public void deleteRoster(JID user) { if (!server.isLocal(user)) { // Ignore request if user is not a local user return; } try { String username = user.getNode(); // Get the roster of the deleted user Roster roster = getRoster(username); // Remove each roster item from the user's roster for (RosterItem item : roster.getRosterItems()) { try { roster.deleteRosterItem(item.getJid(), false); } catch (SharedGroupException e) { // Do nothing. We shouldn't have this exception since we disabled the checkings } } // Remove the cached roster from memory rosterCache.remove(username); // Get the rosters that have a reference to the deleted user Iterator<String> usernames = provider.getUsernames(user.toBareJID()); while (usernames.hasNext()) { username = usernames.next(); try { // Get the roster that has a reference to the deleted user roster = getRoster(username); // Remove the deleted user reference from this roster roster.deleteRosterItem(user, false); } catch (SharedGroupException e) { // Do nothing. We shouldn't have this exception since we disabled the checkings } catch (UserNotFoundException e) { // Do nothing. } } } catch (UnsupportedOperationException | UserNotFoundException e) { // Do nothing } } /** * Returns a collection with all the groups that the user may include in his roster. The * following criteria will be used to select the groups: 1) Groups that are configured so that * everybody can include in his roster, 2) Groups that are configured so that its users may * include the group in their rosters and the user is a group user of the group and 3) User * belongs to a Group that may see a Group that whose members may include the Group in their * rosters. * * @param username the username of the user to return his shared groups. * @return a collection with all the groups that the user may include in his roster. */ public Collection<Group> getSharedGroups(String username) { Collection<Group> answer = new HashSet<>(); Collection<Group> groups = GroupManager.getInstance().getSharedGroups(username); for (Group group : groups) { String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); if ("onlyGroup".equals(showInRoster)) { if (group.isUser(username)) { // The user belongs to the group so add the group to the answer answer.add(group); } else { // Check if the user belongs to a group that may see this group Collection<Group> groupList = parseGroups(group.getProperties().get("sharedRoster.groupList")); for (Group groupInList : groupList) { if (groupInList.isUser(username)) { answer.add(group); } } } } else if ("everybody".equals(showInRoster)) { // Anyone can see this group so add the group to the answer answer.add(group); } } return answer; } /** * Returns the list of shared groups whose visibility is public. * * @return the list of shared groups whose visibility is public. */ public Collection<Group> getPublicSharedGroups() { return GroupManager.getInstance().getPublicSharedGroups(); } /** * Returns a collection of Groups obtained by parsing a comma delimited String with the name * of groups. * * @param groupNames a comma delimited string with group names. * @return a collection of Groups obtained by parsing a comma delimited String with the name * of groups. */ private Collection<Group> parseGroups(String groupNames) { Collection<Group> answer = new HashSet<>(); for (String groupName : parseGroupNames(groupNames)) { try { answer.add(GroupManager.getInstance().getGroup(groupName)); } catch (GroupNotFoundException e) { // Do nothing. Silently ignore the invalid reference to the group } } return answer; } /** * Returns a collection of Groups obtained by parsing a comma delimited String with the name * of groups. * * @param groupNames a comma delimited string with group names. * @return a collection of Groups obtained by parsing a comma delimited String with the name * of groups. */ private static Collection<String> parseGroupNames(String groupNames) { Collection<String> answer = new HashSet<>(); if (groupNames != null) { StringTokenizer tokenizer = new StringTokenizer(groupNames, ","); while (tokenizer.hasMoreTokens()) { answer.add(tokenizer.nextToken()); } } return answer; } @Override public void groupCreated(Group group, Map params) { //Do nothing } @Override public void groupDeleting(Group group, Map params) { // Get group members Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Get users whose roster will be updated Collection<JID> affectedUsers = getAffectedUsers(group); // Iterate on group members and update rosters of affected users for (JID deletedUser : users) { groupUserDeleted(group, affectedUsers, deletedUser); } } @Override public void groupModified(Group group, Map params) { // Do nothing if no group property has been modified if ("propertyDeleted".equals(params.get("type"))) { return; } String keyChanged = (String) params.get("propertyKey"); String originalValue = (String) params.get("originalValue"); if ("sharedRoster.showInRoster".equals(keyChanged)) { String currentValue = group.getProperties().get("sharedRoster.showInRoster"); // Nothing has changed so do nothing. if (currentValue.equals(originalValue)) { return; } // Get the users of the group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Get the users whose roster will be affected Collection<JID> affectedUsers = getAffectedUsers(group, originalValue, group.getProperties().get("sharedRoster.groupList")); // Remove the group members from the affected rosters for (JID deletedUser : users) { groupUserDeleted(group, affectedUsers, deletedUser); } // Simulate that the group users has been added to the group. This will cause to push // roster items to the "affected" users for the group users for (JID user : users) { groupUserAdded(group, user); } } else if ("sharedRoster.groupList".equals(keyChanged)) { String currentValue = group.getProperties().get("sharedRoster.groupList"); // Nothing has changed so do nothing. if (currentValue.equals(originalValue)) { return; } // Get the users of the group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Get the users whose roster will be affected Collection<JID> affectedUsers = getAffectedUsers(group, group.getProperties().get("sharedRoster.showInRoster"), originalValue); // Remove the group members from the affected rosters for (JID deletedUser : users) { groupUserDeleted(group, affectedUsers, deletedUser); } // Simulate that the group users has been added to the group. This will cause to push // roster items to the "affected" users for the group users for (JID user : users) { groupUserAdded(group, user); } } else if ("sharedRoster.displayName".equals(keyChanged)) { String currentValue = group.getProperties().get("sharedRoster.displayName"); // Nothing has changed so do nothing. if (currentValue.equals(originalValue)) { return; } // Do nothing if the group is not being shown in users' rosters if (!isSharedGroup(group)) { return; } // Get all the affected users Collection<JID> users = getAffectedUsers(group); // Iterate on all the affected users and update their rosters for (JID updatedUser : users) { // Get the roster to update. Roster roster = null; if (server.isLocal(updatedUser)) { roster = rosterCache.get(updatedUser.getNode()); } if (roster != null) { // Update the roster with the new group display name roster.shareGroupRenamed(users); } } } } @Override public void initialize(XMPPServer server) { super.initialize(server); this.server = server; this.routingTable = server.getRoutingTable(); RosterEventDispatcher.addListener(new RosterEventListener() { @Override public void rosterLoaded(Roster roster) { // Do nothing } @Override public boolean addingContact(Roster roster, RosterItem item, boolean persistent) { // Do nothing return true; } @Override public void contactAdded(Roster roster, RosterItem item) { // Set object again in cache. This is done so that other cluster nodes // get refreshed with latest version of the object rosterCache.put(roster.getUsername(), roster); } @Override public void contactUpdated(Roster roster, RosterItem item) { // Set object again in cache. This is done so that other cluster nodes // get refreshed with latest version of the object rosterCache.put(roster.getUsername(), roster); } @Override public void contactDeleted(Roster roster, RosterItem item) { // Set object again in cache. This is done so that other cluster nodes // get refreshed with latest version of the object rosterCache.put(roster.getUsername(), roster); } }); } /** * Returns true if the specified Group may be included in a user roster. The decision is made * based on the group properties that are configurable through the Admin Console. * * @param group the group to check if it may be considered a shared group. * @return true if the specified Group may be included in a user roster. */ public static boolean isSharedGroup(Group group) { String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); if ("onlyGroup".equals(showInRoster) || "everybody".equals(showInRoster)) { return true; } return false; } /** * Returns true if the specified Group may be seen by all users in the system. The decision * is made based on the group properties that are configurable through the Admin Console. * * @param group the group to check if it may be seen by all users in the system. * @return true if the specified Group may be seen by all users in the system. */ public static boolean isPublicSharedGroup(Group group) { String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); if ("everybody".equals(showInRoster)) { return true; } return false; } @Override public void memberAdded(Group group, Map params) { JID addedUser = new JID((String) params.get("member")); // Do nothing if the user was an admin that became a member if (group.getAdmins().contains(addedUser)) { return; } if (!isSharedGroup(group)) { for (Group visibleGroup : getVisibleGroups(group)) { // Get the list of affected users Collection<JID> users = new HashSet<>(visibleGroup.getMembers()); users.addAll(visibleGroup.getAdmins()); groupUserAdded(visibleGroup, users, addedUser); } } else { groupUserAdded(group, addedUser); } } @Override public void memberRemoved(Group group, Map params) { String member = (String) params.get("member"); if (member == null) { return; } JID deletedUser = new JID(member); // Do nothing if the user is still an admin if (group.getAdmins().contains(deletedUser)) { return; } if (!isSharedGroup(group)) { for (Group visibleGroup : getVisibleGroups(group)) { // Get the list of affected users Collection<JID> users = new HashSet<>(visibleGroup.getMembers()); users.addAll(visibleGroup.getAdmins()); groupUserDeleted(visibleGroup, users, deletedUser); } } else { groupUserDeleted(group, deletedUser); } } @Override public void adminAdded(Group group, Map params) { JID addedUser = new JID((String) params.get("admin")); // Do nothing if the user was a member that became an admin if (group.getMembers().contains(addedUser)) { return; } if (!isSharedGroup(group)) { for (Group visibleGroup : getVisibleGroups(group)) { // Get the list of affected users Collection<JID> users = new HashSet<>(visibleGroup.getMembers()); users.addAll(visibleGroup.getAdmins()); groupUserAdded(visibleGroup, users, addedUser); } } else { groupUserAdded(group, addedUser); } } @Override public void adminRemoved(Group group, Map params) { JID deletedUser = new JID((String) params.get("admin")); // Do nothing if the user is still a member if (group.getMembers().contains(deletedUser)) { return; } // Do nothing if the group is not being shown in group members' rosters if (!isSharedGroup(group)) { for (Group visibleGroup : getVisibleGroups(group)) { // Get the list of affected users Collection<JID> users = new HashSet<>(visibleGroup.getMembers()); users.addAll(visibleGroup.getAdmins()); groupUserDeleted(visibleGroup, users, deletedUser); } } else { groupUserDeleted(group, deletedUser); } } /** * A new user has been created so members of public shared groups need to have * their rosters updated. Members of public shared groups need to have a roster * item with subscription FROM for the new user since the new user can see them. * * @param newUser the newly created user. * @param params event parameters. */ @Override public void userCreated(User newUser, Map<String,Object> params) { JID newUserJID = server.createJID(newUser.getUsername(), null); // Shared public groups that are public should have a presence subscription // of type FROM for the new user for (Group group : getPublicSharedGroups()) { // Get group members of public group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Update the roster of each group member to include a subscription of type FROM for (JID userToUpdate : users) { // Get the roster to update Roster roster = null; if (server.isLocal(userToUpdate)) { // Check that the user exists, if not then continue with the next user try { UserManager.getInstance().getUser(userToUpdate.getNode()); } catch (UserNotFoundException e) { continue; } roster = rosterCache.get(userToUpdate.getNode()); } // Only update rosters in memory if (roster != null) { roster.addSharedUser(group, newUserJID); } if (!server.isLocal(userToUpdate)) { // Susbcribe to the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(newUserJID, userToUpdate, true); } } } } @Override public void userDeleting(User user, Map<String,Object> params) { // Shared public groups that have a presence subscription of type FROM // for the deleted user should no longer have a reference to the deleted user JID userJID = server.createJID(user.getUsername(), null); // Shared public groups that are public should have a presence subscription // of type FROM for the new user for (Group group : getPublicSharedGroups()) { // Get group members of public group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Update the roster of each group member to include a subscription of type FROM for (JID userToUpdate : users) { // Get the roster to update Roster roster = null; if (server.isLocal(userToUpdate)) { // Check that the user exists, if not then continue with the next user try { UserManager.getInstance().getUser(userToUpdate.getNode()); } catch (UserNotFoundException e) { continue; } roster = rosterCache.get(userToUpdate.getNode()); } // Only update rosters in memory if (roster != null) { roster.deleteSharedUser(group, userJID); } if (!server.isLocal(userToUpdate)) { // Unsusbcribe from the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(userJID, userToUpdate, false); } } } deleteRoster(userJID); } @Override public void userModified(User user, Map<String,Object> params) { if ("nameModified".equals(params.get("type"))) { for (Group group : getSharedGroups(user.getUsername())) { ArrayList<JID> groupUsers = new ArrayList<>(); groupUsers.addAll(group.getAdmins()); groupUsers.addAll(group.getMembers()); for (JID groupUser : groupUsers) { rosterCache.remove(groupUser.getNode()); } } } } /** * Notification that a Group user has been added. Update the group users' roster accordingly. * * @param group the group where the user was added. * @param addedUser the username of the user that has been added to the group. */ private void groupUserAdded(Group group, JID addedUser) { groupUserAdded(group, getAffectedUsers(group), addedUser); } /** * Notification that a Group user has been added. Update the group users' roster accordingly. * * @param group the group where the user was added. * @param users the users to update their rosters * @param addedUser the username of the user that has been added to the group. */ private void groupUserAdded(Group group, Collection<JID> users, JID addedUser) { // Get the roster of the added user. Roster addedUserRoster = null; if (server.isLocal(addedUser)) { addedUserRoster = rosterCache.get(addedUser.getNode()); } // Iterate on all the affected users and update their rosters for (JID userToUpdate : users) { if (!addedUser.equals(userToUpdate)) { // Get the roster to update Roster roster = null; if (server.isLocal(userToUpdate)) { // Check that the user exists, if not then continue with the next user try { UserManager.getInstance().getUser(userToUpdate.getNode()); } catch (UserNotFoundException e) { continue; } roster = rosterCache.get(userToUpdate.getNode()); } // Only update rosters in memory if (roster != null) { roster.addSharedUser(group, addedUser); } // Check if the roster is still not in memory if (addedUserRoster == null && server.isLocal(addedUser)) { addedUserRoster = rosterCache.get(addedUser.getNode()); } // Update the roster of the newly added group user. if (addedUserRoster != null) { Collection<Group> groups = GroupManager.getInstance().getGroups(userToUpdate); addedUserRoster.addSharedUser(userToUpdate, groups, group); } if (!server.isLocal(addedUser)) { // Susbcribe to the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(userToUpdate, addedUser, true); } if (!server.isLocal(userToUpdate)) { // Susbcribe to the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(addedUser, userToUpdate, true); } } } } /** * Notification that a Group user has been deleted. Update the group users' roster accordingly. * * @param group the group from where the user was deleted. * @param deletedUser the username of the user that has been deleted from the group. */ private void groupUserDeleted(Group group, JID deletedUser) { groupUserDeleted(group, getAffectedUsers(group), deletedUser); } /** * Notification that a Group user has been deleted. Update the group users' roster accordingly. * * @param group the group from where the user was deleted. * @param users the users to update their rosters * @param deletedUser the username of the user that has been deleted from the group. */ private void groupUserDeleted(Group group, Collection<JID> users, JID deletedUser) { // Get the roster of the deleted user. Roster deletedUserRoster = null; if (server.isLocal(deletedUser)) { deletedUserRoster = rosterCache.get(deletedUser.getNode()); } // Iterate on all the affected users and update their rosters for (JID userToUpdate : users) { // Get the roster to update Roster roster = null; if (server.isLocal(userToUpdate)) { // Check that the user exists, if not then continue with the next user try { UserManager.getInstance().getUser(userToUpdate.getNode()); } catch (UserNotFoundException e) { continue; } roster = rosterCache.get(userToUpdate.getNode()); } // Only update rosters in memory if (roster != null) { roster.deleteSharedUser(group, deletedUser); } // Check if the roster is still not in memory if (deletedUserRoster == null && server.isLocal(deletedUser)) { deletedUserRoster = rosterCache.get(deletedUser.getNode()); } // Update the roster of the newly deleted group user. if (deletedUserRoster != null) { deletedUserRoster.deleteSharedUser(userToUpdate, group); } if (!server.isLocal(deletedUser)) { // Unsusbcribe from the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(userToUpdate, deletedUser, false); } if (!server.isLocal(userToUpdate)) { // Unsusbcribe from the presence of the remote user. This is only necessary for // remote users and may only work with remote users that **automatically** // accept presence subscription requests sendSubscribeRequest(deletedUser, userToUpdate, false); } } } private void sendSubscribeRequest(JID sender, JID recipient, boolean isSubscribe) { Presence presence = new Presence(); presence.setFrom(sender); presence.setTo(recipient); if (isSubscribe) { presence.setType(Presence.Type.subscribe); } else { presence.setType(Presence.Type.unsubscribe); } routingTable.routePacket(recipient, presence, false); } private Collection<Group> getVisibleGroups(Group groupToCheck) { return GroupManager.getInstance().getVisibleGroups(groupToCheck); } /** * Returns true if a given group is visible to a given user. That means, if the user can * see the group in his roster. * * @param group the group to check if the user can see. * @param user the JID of the user to check if he may see the group. * @return true if a given group is visible to a given user. */ public boolean isGroupVisible(Group group, JID user) { String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); if ("everybody".equals(showInRoster)) { return true; } else if ("onlyGroup".equals(showInRoster)) { if (group.isUser(user)) { return true; } // Check if the user belongs to a group that may see this group Collection<Group> groupList = parseGroups(group.getProperties().get( "sharedRoster.groupList")); for (Group groupInList : groupList) { if (groupInList.isUser(user)) { return true; } } } return false; } /** * Returns all the users that are related to a shared group. This is the logic that we are * using: 1) If the group visiblity is configured as "Everybody" then all users in the system or * all logged users in the system will be returned (configurable thorugh the "filterOffline" * flag), 2) if the group visiblity is configured as "onlyGroup" then all the group users will * be included in the answer and 3) if the group visiblity is configured as "onlyGroup" and * the group allows other groups to include the group in the groups users' roster then all * the users of the allowed groups will be included in the answer. */ private Collection<JID> getAffectedUsers(Group group) { return getAffectedUsers(group, group.getProperties().get("sharedRoster.showInRoster"), group.getProperties().get("sharedRoster.groupList")); } /** * This method is similar to {@link #getAffectedUsers(Group)} except that it receives * some group properties. The group properties are passed as parameters since the called of this * method may want to obtain the related users of the group based in some properties values. * * This is useful when the group is being edited and some properties has changed and we need to * obtain the related users of the group based on the previous group state. */ private Collection<JID> getAffectedUsers(Group group, String showInRoster, String groupNames) { // Answer an empty collection if the group is not being shown in users' rosters if (!"onlyGroup".equals(showInRoster) && !"everybody".equals(showInRoster)) { return new ArrayList<>(); } // Add the users of the group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // Check if anyone can see this shared group if ("everybody".equals(showInRoster)) { // Add all users in the system for (String username : UserManager.getInstance().getUsernames()) { users.add(server.createJID(username, null, true)); } // Add all logged users. We don't need to add all users in the system since only the // logged ones will be affected. //users.addAll(SessionManager.getInstance().getSessionUsers()); } else { // Add the users that may see the group Collection<Group> groupList = parseGroups(groupNames); for (Group groupInList : groupList) { users.addAll(groupInList.getMembers()); users.addAll(groupInList.getAdmins()); } } return users; } Collection<JID> getSharedUsersForRoster(Group group, Roster roster) { String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); String groupNames = group.getProperties().get("sharedRoster.groupList"); // Answer an empty collection if the group is not being shown in users' rosters if (!"onlyGroup".equals(showInRoster) && !"everybody".equals(showInRoster)) { return new ArrayList<>(); } // Add the users of the group Collection<JID> users = new HashSet<>(group.getMembers()); users.addAll(group.getAdmins()); // If the user of the roster belongs to the shared group then we should return // users that need to be in the roster with subscription "from" if (group.isUser(roster.getUsername())) { // Check if anyone can see this shared group if ("everybody".equals(showInRoster)) { // Add all users in the system for (String username : UserManager.getInstance().getUsernames()) { users.add(server.createJID(username, null, true)); } } else { // Add the users that may see the group Collection<Group> groupList = parseGroups(groupNames); for (Group groupInList : groupList) { users.addAll(groupInList.getMembers()); users.addAll(groupInList.getAdmins()); } } } return users; } /** * Returns true if a group in the first collection may mutually see a group of the * second collection. More precisely, return true if both collections contain a public * group (i.e. anybody can see the group) or if both collection have a group that may see * each other and the users are members of those groups or if one group is public and the * other group allowed the public group to see it. * * @param user the name of the user associated to the first collection of groups. This is always a local user. * @param groups a collection of groups to check against the other collection of groups. * @param otherUser the JID of the user associated to the second collection of groups. * @param otherGroups the other collection of groups to check against the first collection. * @return true if a group in the first collection may mutually see a group of the * second collection. */ boolean hasMutualVisibility(String user, Collection<Group> groups, JID otherUser, Collection<Group> otherGroups) { for (Group group : groups) { for (Group otherGroup : otherGroups) { // Skip this groups if the users are not group users of the groups if (!group.isUser(user) || !otherGroup.isUser(otherUser)) { continue; } if (group.equals(otherGroup)) { return true; } String showInRoster = group.getProperties().get("sharedRoster.showInRoster"); String otherShowInRoster = otherGroup.getProperties().get("sharedRoster.showInRoster"); // Return true if both groups are public groups (i.e. anybody can see them) if ("everybody".equals(showInRoster) && "everybody".equals(otherShowInRoster)) { return true; } else if ("onlyGroup".equals(showInRoster) && "onlyGroup".equals(otherShowInRoster)) { String groupNames = group.getProperties().get("sharedRoster.groupList"); String otherGroupNames = otherGroup.getProperties().get("sharedRoster.groupList"); // Return true if each group may see the other group if (groupNames != null && otherGroupNames != null) { if (groupNames.contains(otherGroup.getName()) && otherGroupNames.contains(group.getName())) { return true; } // Check if each shared group can be seen by a group where each user belongs Collection<Group> groupList = parseGroups(groupNames); Collection<Group> otherGroupList = parseGroups(otherGroupNames); for (Group groupName : groupList) { if (groupName.isUser(otherUser)) { for (Group otherGroupName : otherGroupList) { if (otherGroupName.isUser(user)) { return true; } } } } } } else if ("everybody".equals(showInRoster) && "onlyGroup".equals(otherShowInRoster)) { // Return true if one group is public and the other group allowed the public // group to see him String otherGroupNames = otherGroup.getProperties().get("sharedRoster.groupList"); if (otherGroupNames != null && otherGroupNames.contains(group.getName())) { return true; } } else if ("onlyGroup".equals(showInRoster) && "everybody".equals(otherShowInRoster)) { // Return true if one group is public and the other group allowed the public // group to see him String groupNames = group.getProperties().get("sharedRoster.groupList"); // Return true if each group may see the other group if (groupNames != null && groupNames.contains(otherGroup.getName())) { return true; } } } } return false; } @Override public void start() throws IllegalStateException { super.start(); // Add this module as a user event listener so we can update // rosters when users are created or deleted UserEventDispatcher.addListener(this); // Add the new instance as a listener of group events GroupEventDispatcher.addListener(this); } @Override public void stop() { super.stop(); // Remove this module as a user event listener UserEventDispatcher.removeListener(this); // Remove this module as a listener of group events GroupEventDispatcher.removeListener(this); } public static RosterItemProvider getRosterItemProvider() { return XMPPServer.getInstance().getRosterManager().provider; } private void initProvider() { JiveGlobals.migrateProperty("provider.roster.className"); String className = JiveGlobals.getProperty("provider.roster.className", "org.jivesoftware.openfire.roster.DefaultRosterItemProvider"); if (provider == null || !className.equals(provider.getClass().getName())) { try { Class c = ClassUtils.forName(className); provider = (RosterItemProvider) c.newInstance(); } catch (Exception e) { Log.error("Error loading roster provider: " + className, e); provider = new DefaultRosterItemProvider(); } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_03_01.implementation; import com.microsoft.azure.arm.collection.InnerSupportsGet; import com.microsoft.azure.arm.collection.InnerSupportsDelete; import com.microsoft.azure.arm.collection.InnerSupportsListing; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in WebApplicationFirewallPolicies. */ public class WebApplicationFirewallPoliciesInner implements InnerSupportsGet<WebApplicationFirewallPolicyInner>, InnerSupportsDelete<Void>, InnerSupportsListing<WebApplicationFirewallPolicyInner> { /** The Retrofit service to perform REST calls. */ private WebApplicationFirewallPoliciesService service; /** The service client containing this operation class. */ private NetworkManagementClientImpl client; /** * Initializes an instance of WebApplicationFirewallPoliciesInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public WebApplicationFirewallPoliciesInner(Retrofit retrofit, NetworkManagementClientImpl client) { this.service = retrofit.create(WebApplicationFirewallPoliciesService.class); this.client = client; } /** * The interface defining all the services for WebApplicationFirewallPolicies to be * used by Retrofit to perform actually REST calls. */ interface WebApplicationFirewallPoliciesService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies listByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies") Observable<Response<ResponseBody>> listByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies list" }) @GET("subscriptions/{subscriptionId}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies getByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}") Observable<Response<ResponseBody>> getByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("policyName") String policyName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("policyName") String policyName, @Path("subscriptionId") String subscriptionId, @Body WebApplicationFirewallPolicyInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("policyName") String policyName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies beginDelete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("policyName") String policyName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies listByResourceGroupNext" }) @GET Observable<Response<ResponseBody>> listByResourceGroupNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_03_01.WebApplicationFirewallPolicies listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Lists all of the protection policies within a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object if successful. */ public PagedList<WebApplicationFirewallPolicyInner> listByResourceGroup(final String resourceGroupName) { ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response = listByResourceGroupSinglePageAsync(resourceGroupName).toBlocking().single(); return new PagedList<WebApplicationFirewallPolicyInner>(response.body()) { @Override public Page<WebApplicationFirewallPolicyInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the protection policies within a resource group. * * @param resourceGroupName The name of the resource group. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<WebApplicationFirewallPolicyInner>> listByResourceGroupAsync(final String resourceGroupName, final ListOperationCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupSinglePageAsync(resourceGroupName), new Func1<String, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the protection policies within a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<Page<WebApplicationFirewallPolicyInner>> listByResourceGroupAsync(final String resourceGroupName) { return listByResourceGroupWithServiceResponseAsync(resourceGroupName) .map(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Page<WebApplicationFirewallPolicyInner>>() { @Override public Page<WebApplicationFirewallPolicyInner> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response) { return response.body(); } }); } /** * Lists all of the protection policies within a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listByResourceGroupWithServiceResponseAsync(final String resourceGroupName) { return listByResourceGroupSinglePageAsync(resourceGroupName) .concatMap(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the protection policies within a resource group. * ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2020-03-01"; return service.listByResourceGroup(resourceGroupName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> result = listByResourceGroupDelegate(response); return Observable.just(new ServiceResponse<Page<WebApplicationFirewallPolicyInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<WebApplicationFirewallPolicyInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<WebApplicationFirewallPolicyInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all the WAF policies in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object if successful. */ public PagedList<WebApplicationFirewallPolicyInner> list() { ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<WebApplicationFirewallPolicyInner>(response.body()) { @Override public Page<WebApplicationFirewallPolicyInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all the WAF policies in a subscription. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<WebApplicationFirewallPolicyInner>> listAsync(final ListOperationCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all the WAF policies in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<Page<WebApplicationFirewallPolicyInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Page<WebApplicationFirewallPolicyInner>>() { @Override public Page<WebApplicationFirewallPolicyInner> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response) { return response.body(); } }); } /** * Gets all the WAF policies in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all the WAF policies in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listSinglePageAsync() { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2020-03-01"; return service.list(this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<WebApplicationFirewallPolicyInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<WebApplicationFirewallPolicyInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<WebApplicationFirewallPolicyInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Retrieve protection policy with specified name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the WebApplicationFirewallPolicyInner object if successful. */ public WebApplicationFirewallPolicyInner getByResourceGroup(String resourceGroupName, String policyName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, policyName).toBlocking().single().body(); } /** * Retrieve protection policy with specified name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<WebApplicationFirewallPolicyInner> getByResourceGroupAsync(String resourceGroupName, String policyName, final ServiceCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, policyName), serviceCallback); } /** * Retrieve protection policy with specified name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WebApplicationFirewallPolicyInner object */ public Observable<WebApplicationFirewallPolicyInner> getByResourceGroupAsync(String resourceGroupName, String policyName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, policyName).map(new Func1<ServiceResponse<WebApplicationFirewallPolicyInner>, WebApplicationFirewallPolicyInner>() { @Override public WebApplicationFirewallPolicyInner call(ServiceResponse<WebApplicationFirewallPolicyInner> response) { return response.body(); } }); } /** * Retrieve protection policy with specified name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WebApplicationFirewallPolicyInner object */ public Observable<ServiceResponse<WebApplicationFirewallPolicyInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String policyName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (policyName == null) { throw new IllegalArgumentException("Parameter policyName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2020-03-01"; return service.getByResourceGroup(resourceGroupName, policyName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<WebApplicationFirewallPolicyInner>>>() { @Override public Observable<ServiceResponse<WebApplicationFirewallPolicyInner>> call(Response<ResponseBody> response) { try { ServiceResponse<WebApplicationFirewallPolicyInner> clientResponse = getByResourceGroupDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<WebApplicationFirewallPolicyInner> getByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<WebApplicationFirewallPolicyInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<WebApplicationFirewallPolicyInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Creates or update policy with specified rule set name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param parameters Policy to be created. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the WebApplicationFirewallPolicyInner object if successful. */ public WebApplicationFirewallPolicyInner createOrUpdate(String resourceGroupName, String policyName, WebApplicationFirewallPolicyInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, policyName, parameters).toBlocking().single().body(); } /** * Creates or update policy with specified rule set name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param parameters Policy to be created. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<WebApplicationFirewallPolicyInner> createOrUpdateAsync(String resourceGroupName, String policyName, WebApplicationFirewallPolicyInner parameters, final ServiceCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, policyName, parameters), serviceCallback); } /** * Creates or update policy with specified rule set name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param parameters Policy to be created. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WebApplicationFirewallPolicyInner object */ public Observable<WebApplicationFirewallPolicyInner> createOrUpdateAsync(String resourceGroupName, String policyName, WebApplicationFirewallPolicyInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, policyName, parameters).map(new Func1<ServiceResponse<WebApplicationFirewallPolicyInner>, WebApplicationFirewallPolicyInner>() { @Override public WebApplicationFirewallPolicyInner call(ServiceResponse<WebApplicationFirewallPolicyInner> response) { return response.body(); } }); } /** * Creates or update policy with specified rule set name within a resource group. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param parameters Policy to be created. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WebApplicationFirewallPolicyInner object */ public Observable<ServiceResponse<WebApplicationFirewallPolicyInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String policyName, WebApplicationFirewallPolicyInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (policyName == null) { throw new IllegalArgumentException("Parameter policyName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2020-03-01"; return service.createOrUpdate(resourceGroupName, policyName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<WebApplicationFirewallPolicyInner>>>() { @Override public Observable<ServiceResponse<WebApplicationFirewallPolicyInner>> call(Response<ResponseBody> response) { try { ServiceResponse<WebApplicationFirewallPolicyInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<WebApplicationFirewallPolicyInner> createOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<WebApplicationFirewallPolicyInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<WebApplicationFirewallPolicyInner>() { }.getType()) .register(201, new TypeToken<WebApplicationFirewallPolicyInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String policyName) { deleteWithServiceResponseAsync(resourceGroupName, policyName).toBlocking().last().body(); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String policyName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, policyName), serviceCallback); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<Void> deleteAsync(String resourceGroupName, String policyName) { return deleteWithServiceResponseAsync(resourceGroupName, policyName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String policyName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (policyName == null) { throw new IllegalArgumentException("Parameter policyName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2020-03-01"; Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, policyName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType()); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete(String resourceGroupName, String policyName) { beginDeleteWithServiceResponseAsync(resourceGroupName, policyName).toBlocking().single().body(); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String policyName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, policyName), serviceCallback); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> beginDeleteAsync(String resourceGroupName, String policyName) { return beginDeleteWithServiceResponseAsync(resourceGroupName, policyName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes Policy. * * @param resourceGroupName The name of the resource group. * @param policyName The name of the policy. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String policyName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (policyName == null) { throw new IllegalArgumentException("Parameter policyName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2020-03-01"; return service.beginDelete(resourceGroupName, policyName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = beginDeleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Lists all of the protection policies within a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object if successful. */ public PagedList<WebApplicationFirewallPolicyInner> listByResourceGroupNext(final String nextPageLink) { ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response = listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<WebApplicationFirewallPolicyInner>(response.body()) { @Override public Page<WebApplicationFirewallPolicyInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the protection policies within a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<WebApplicationFirewallPolicyInner>> listByResourceGroupNextAsync(final String nextPageLink, final ServiceFuture<List<WebApplicationFirewallPolicyInner>> serviceFuture, final ListOperationCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the protection policies within a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<Page<WebApplicationFirewallPolicyInner>> listByResourceGroupNextAsync(final String nextPageLink) { return listByResourceGroupNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Page<WebApplicationFirewallPolicyInner>>() { @Override public Page<WebApplicationFirewallPolicyInner> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response) { return response.body(); } }); } /** * Lists all of the protection policies within a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listByResourceGroupNextWithServiceResponseAsync(final String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the protection policies within a resource group. * ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByResourceGroupNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> result = listByResourceGroupNextDelegate(response); return Observable.just(new ServiceResponse<Page<WebApplicationFirewallPolicyInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> listByResourceGroupNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<WebApplicationFirewallPolicyInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<WebApplicationFirewallPolicyInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all the WAF policies in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object if successful. */ public PagedList<WebApplicationFirewallPolicyInner> listNext(final String nextPageLink) { ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<WebApplicationFirewallPolicyInner>(response.body()) { @Override public Page<WebApplicationFirewallPolicyInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all the WAF policies in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<WebApplicationFirewallPolicyInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<WebApplicationFirewallPolicyInner>> serviceFuture, final ListOperationCallback<WebApplicationFirewallPolicyInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all the WAF policies in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<Page<WebApplicationFirewallPolicyInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Page<WebApplicationFirewallPolicyInner>>() { @Override public Page<WebApplicationFirewallPolicyInner> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> response) { return response.body(); } }); } /** * Gets all the WAF policies in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(ServiceResponse<Page<WebApplicationFirewallPolicyInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all the WAF policies in a subscription. * ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;WebApplicationFirewallPolicyInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>>>() { @Override public Observable<ServiceResponse<Page<WebApplicationFirewallPolicyInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<WebApplicationFirewallPolicyInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<WebApplicationFirewallPolicyInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<WebApplicationFirewallPolicyInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<WebApplicationFirewallPolicyInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.plugin.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import java.util.*; /** * Cache start descriptor. */ public class DynamicCacheDescriptor { /** Cache start ID. */ private IgniteUuid deploymentId; /** Cache configuration. */ @GridToStringExclude private CacheConfiguration cacheCfg; /** Cancelled flag. */ private boolean cancelled; /** Locally configured flag. */ private boolean locCfg; /** Statically configured flag. */ private boolean staticCfg; /** Started flag. */ private boolean started; /** Cache type. */ private CacheType cacheType; /** */ private volatile Map<UUID, CacheConfiguration> rmtCfgs; /** Template configuration flag. */ private boolean template; /** Cache plugin manager. */ private final CachePluginManager pluginMgr; /** */ private boolean updatesAllowed = true; /** * @param ctx Context. * @param cacheCfg Cache configuration. * @param cacheType Cache type. * @param template {@code True} if this is template configuration. * @param deploymentId Deployment ID. */ public DynamicCacheDescriptor(GridKernalContext ctx, CacheConfiguration cacheCfg, CacheType cacheType, boolean template, IgniteUuid deploymentId) { this.cacheCfg = cacheCfg; this.cacheType = cacheType; this.template = template; this.deploymentId = deploymentId; pluginMgr = new CachePluginManager(ctx, cacheCfg); } /** * @return {@code True} if this is template configuration. */ public boolean template() { return template; } /** * @return Cache type. */ public CacheType cacheType() { return cacheType; } /** * @return Start ID. */ public IgniteUuid deploymentId() { return deploymentId; } /** * @param deploymentId Deployment ID. */ public void deploymentId(IgniteUuid deploymentId) { this.deploymentId = deploymentId; } /** * @return Locally configured flag. */ public boolean locallyConfigured() { return locCfg; } /** * @param locCfg Locally configured flag. */ public void locallyConfigured(boolean locCfg) { this.locCfg = locCfg; } /** * @return {@code True} if statically configured. */ public boolean staticallyConfigured() { return staticCfg; } /** * @param staticCfg {@code True} if statically configured. */ public void staticallyConfigured(boolean staticCfg) { this.staticCfg = staticCfg; } /** * @return {@code True} if started flag was flipped by this call. */ public boolean onStart() { if (!started) { started = true; return true; } return false; } /** * @return Cache configuration. */ public CacheConfiguration cacheConfiguration() { return cacheCfg; } /** * @return Cache plugin manager. */ public CachePluginManager pluginManager() { return pluginMgr; } /** * Sets cancelled flag. */ public void onCancelled() { cancelled = true; } /** * @return Cancelled flag. */ public boolean cancelled() { return cancelled; } /** * @param nodeId Remote node ID. * @return Configuration. */ public CacheConfiguration remoteConfiguration(UUID nodeId) { Map<UUID, CacheConfiguration> cfgs = rmtCfgs; return cfgs == null ? null : cfgs.get(nodeId); } /** * @param nodeId Remote node ID. * @param cfg Remote node configuration. */ public void addRemoteConfiguration(UUID nodeId, CacheConfiguration cfg) { Map<UUID, CacheConfiguration> cfgs = rmtCfgs; if (cfgs == null) rmtCfgs = cfgs = new HashMap<>(); cfgs.put(nodeId, cfg); } /** * */ public void clearRemoteConfigurations() { rmtCfgs = null; } /** * @return Updates allowed flag. */ public boolean updatesAllowed() { return updatesAllowed; } /** * @param updatesAllowed Updates allowed flag. */ public void updatesAllowed(boolean updatesAllowed) { this.updatesAllowed = updatesAllowed; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DynamicCacheDescriptor.class, this, "cacheName", U.maskName(cacheCfg.getName())); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.network; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.List; import java.util.Map; import javax.ejb.Local; import javax.inject.Inject; import javax.naming.ConfigurationException; import org.apache.cloudstack.api.command.admin.network.CreateStorageNetworkIpRangeCmd; import org.apache.cloudstack.api.command.admin.network.DeleteStorageNetworkIpRangeCmd; import org.apache.cloudstack.api.command.admin.network.ListStorageNetworkIpRangeCmd; import org.apache.cloudstack.api.command.admin.network.UpdateStorageNetworkIpRangeCmd; import org.apache.log4j.Logger; import org.springframework.stereotype.Component; import com.cloud.dc.HostPodVO; import com.cloud.dc.StorageNetworkIpAddressVO; import com.cloud.dc.StorageNetworkIpRange; import com.cloud.dc.StorageNetworkIpRangeVO; import com.cloud.dc.dao.HostPodDao; import com.cloud.dc.dao.StorageNetworkIpAddressDao; import com.cloud.dc.dao.StorageNetworkIpRangeDao; import com.cloud.exception.InvalidParameterValueException; import com.cloud.network.Networks.TrafficType; import com.cloud.network.dao.NetworkDao; import com.cloud.network.dao.NetworkVO; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.DB; import com.cloud.utils.db.SearchCriteria.Op; import com.cloud.utils.db.SearchCriteria2; import com.cloud.utils.db.SearchCriteriaService; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.net.NetUtils; import com.cloud.vm.SecondaryStorageVmVO; import com.cloud.vm.VirtualMachine; import com.cloud.vm.dao.SecondaryStorageVmDao; @Component @Local(value = { StorageNetworkManager.class, StorageNetworkService.class }) public class StorageNetworkManagerImpl extends ManagerBase implements StorageNetworkManager, StorageNetworkService { private static final Logger s_logger = Logger.getLogger(StorageNetworkManagerImpl.class); @Inject StorageNetworkIpAddressDao _sNwIpDao; @Inject StorageNetworkIpRangeDao _sNwIpRangeDao; @Inject NetworkDao _networkDao; @Inject HostPodDao _podDao; @Inject SecondaryStorageVmDao _ssvmDao; private void checkOverlapPrivateIpRange(long podId, String startIp, String endIp) { HostPodVO pod = _podDao.findById(podId); if (pod == null) { throw new CloudRuntimeException("Cannot find pod " + podId); } String[] IpRange = pod.getDescription().split("-"); if ((IpRange[0] == null || IpRange[1] == null) || (!NetUtils.isValidIp(IpRange[0]) || !NetUtils.isValidIp(IpRange[1]))) { return; } if (NetUtils.ipRangesOverlap(startIp, endIp, IpRange[0], IpRange[1])) { throw new InvalidParameterValueException("The Storage network Start IP and endIP address range overlap with private IP :" + IpRange[0] + ":" + IpRange[1]); } } private void checkOverlapStorageIpRange(long podId, String startIp, String endIp) { List<StorageNetworkIpRangeVO> curRanges = _sNwIpRangeDao.listByPodId(podId); for (StorageNetworkIpRangeVO range : curRanges) { if (NetUtils.ipRangesOverlap(startIp, endIp, range.getStartIp(), range.getEndIp())) { throw new InvalidParameterValueException("The Storage network Start IP and endIP address range overlap with private IP :" + range.getStartIp() + " - " + range.getEndIp()); } } } private void createStorageIpEntires(Transaction txn, long rangeId, String startIp, String endIp, long zoneId) throws SQLException { long startIPLong = NetUtils.ip2Long(startIp); long endIPLong = NetUtils.ip2Long(endIp); String insertSql = "INSERT INTO `cloud`.`op_dc_storage_network_ip_address` (range_id, ip_address, mac_address, taken) VALUES (?, ?, (select mac_address from `cloud`.`data_center` where id=?), ?)"; String updateSql = "UPDATE `cloud`.`data_center` set mac_address = mac_address+1 where id=?"; PreparedStatement stmt = null; Connection conn = txn.getConnection(); while (startIPLong <= endIPLong) { stmt = conn.prepareStatement(insertSql); stmt.setLong(1, rangeId); stmt.setString(2, NetUtils.long2Ip(startIPLong++)); stmt.setLong(3, zoneId); stmt.setNull(4, java.sql.Types.DATE); stmt.executeUpdate(); stmt.close(); stmt = txn.prepareStatement(updateSql); stmt.setLong(1, zoneId); stmt.executeUpdate(); stmt.close(); } } @Override @DB public StorageNetworkIpRange updateIpRange(UpdateStorageNetworkIpRangeCmd cmd) { Integer vlan = cmd.getVlan(); Long rangeId = cmd.getId(); String startIp = cmd.getStartIp(); String endIp = cmd.getEndIp(); String netmask = cmd.getNetmask(); if (netmask != null && !NetUtils.isValidNetmask(netmask)) { throw new CloudRuntimeException("Invalid netmask:" + netmask); } if (_sNwIpDao.countInUseIpByRangeId(rangeId) > 0) { throw new CloudRuntimeException("Cannot update the range," + getInUseIpAddress(rangeId)); } StorageNetworkIpRangeVO range = _sNwIpRangeDao.findById(rangeId); if (range == null) { throw new CloudRuntimeException("Cannot find storage ip range " + rangeId); } if (startIp != null || endIp != null) { long podId = range.getPodId(); startIp = startIp == null ? range.getStartIp() : startIp; endIp = endIp == null ? range.getEndIp() : endIp; checkOverlapPrivateIpRange(podId, startIp, endIp); checkOverlapStorageIpRange(podId, startIp, endIp); } Transaction txn = Transaction.currentTxn(); txn.start(); try { range = _sNwIpRangeDao.acquireInLockTable(range.getId()); if (range == null) { throw new CloudRuntimeException("Cannot acquire lock on storage ip range " + rangeId); } StorageNetworkIpRangeVO vo = _sNwIpRangeDao.createForUpdate(); if (vlan != null) { vo.setVlan(vlan); } if (startIp != null) { vo.setStartIp(startIp); } if (endIp != null) { vo.setEndIp(endIp); } if (netmask != null) { vo.setNetmask(netmask); } _sNwIpRangeDao.update(rangeId, vo); } finally { if (range != null) { _sNwIpRangeDao.releaseFromLockTable(range.getId()); } } txn.commit(); return _sNwIpRangeDao.findById(rangeId); } @Override @DB public StorageNetworkIpRange createIpRange(CreateStorageNetworkIpRangeCmd cmd) throws SQLException { Long podId = cmd.getPodId(); String startIp = cmd.getStartIp(); String endIp = cmd.getEndIp(); Integer vlan = cmd.getVlan(); String netmask = cmd.getNetmask(); if (endIp == null) { endIp = startIp; } if (!NetUtils.isValidNetmask(netmask)) { throw new CloudRuntimeException("Invalid netmask:" + netmask); } HostPodVO pod = _podDao.findById(podId); if (pod == null) { throw new CloudRuntimeException("Cannot find pod " + podId); } Long zoneId = pod.getDataCenterId(); List<NetworkVO> nws = _networkDao.listByZoneAndTrafficType(zoneId, TrafficType.Storage); if (nws.size() == 0) { throw new CloudRuntimeException("Cannot find storage network in zone " + zoneId); } if (nws.size() > 1) { throw new CloudRuntimeException("Find more than one storage network in zone " + zoneId + "," + nws.size() + " found"); } NetworkVO nw = nws.get(0); checkOverlapPrivateIpRange(podId, startIp, endIp); checkOverlapStorageIpRange(podId, startIp, endIp); Transaction txn = Transaction.currentTxn(); StorageNetworkIpRangeVO range = null; txn.start(); range = new StorageNetworkIpRangeVO(zoneId, podId, nw.getId(), startIp, endIp, vlan, netmask, cmd.getGateWay()); _sNwIpRangeDao.persist(range); try { createStorageIpEntires(txn, range.getId(), startIp, endIp, zoneId); } catch (SQLException e) { txn.rollback(); StringBuilder err = new StringBuilder(); err.append("Create storage network range failed."); err.append("startIp=" + startIp); err.append("endIp=" + endIp); err.append("netmask=" + netmask); err.append("zoneId=" + zoneId); s_logger.debug(err.toString(), e); throw e; } txn.commit(); return range; } private String getInUseIpAddress(long rangeId) { List<String> ips = _sNwIpDao.listInUseIpByRangeId(rangeId); StringBuilder res = new StringBuilder(); res.append("Below IP of range " + rangeId + " is still in use:"); for (String ip : ips) { res.append(ip).append(","); } return res.toString(); } @Override @DB public void deleteIpRange(DeleteStorageNetworkIpRangeCmd cmd) { long rangeId = cmd.getId(); StorageNetworkIpRangeVO range = _sNwIpRangeDao.findById(rangeId); if (range == null) { throw new CloudRuntimeException("Can not find storage network ip range " + rangeId); } if (_sNwIpDao.countInUseIpByRangeId(rangeId) > 0) { throw new CloudRuntimeException(getInUseIpAddress(rangeId)); } final Transaction txn = Transaction.currentTxn(); txn.start(); try { range = _sNwIpRangeDao.acquireInLockTable(rangeId); if (range == null) { String msg = "Unable to acquire lock on storage network ip range id=" + rangeId + ", delete failed"; s_logger.warn(msg); throw new CloudRuntimeException(msg); } /* * entries in op_dc_storage_network_ip_address will be deleted automatically due to * fk_storage_ip_address__range_id constraint key */ _sNwIpRangeDao.remove(rangeId); } finally { if (range != null) { _sNwIpRangeDao.releaseFromLockTable(rangeId); } } txn.commit(); } @Override public List<StorageNetworkIpRange> listIpRange(ListStorageNetworkIpRangeCmd cmd) { Long rangeId = cmd.getRangeId(); Long podId = cmd.getPodId(); Long zoneId = cmd.getZoneId(); List result = null; if (rangeId != null) { result = _sNwIpRangeDao.listByRangeId(rangeId); } else if (podId != null) { result = _sNwIpRangeDao.listByPodId(podId); } else if (zoneId != null) { result = _sNwIpRangeDao.listByDataCenterId(zoneId); } else { result = _sNwIpRangeDao.listAll(); } return result; } @Override public void releaseIpAddress(String ip) { _sNwIpDao.releaseIpAddress(ip); } @Override public StorageNetworkIpAddressVO acquireIpAddress(long podId) { List<StorageNetworkIpRangeVO> ranges = _sNwIpRangeDao.listByPodId(podId); for (StorageNetworkIpRangeVO r : ranges) { try { Long rangeId = r.getId(); r = _sNwIpRangeDao.acquireInLockTable(rangeId); if (r == null) { String msg = "Unable to acquire lock on storage network ip range id=" + rangeId + ", delete failed"; s_logger.warn(msg); throw new CloudRuntimeException(msg); } StorageNetworkIpAddressVO ip = _sNwIpDao.takeIpAddress(r.getId()); if (ip != null) { return ip; } } finally { if (r != null) { _sNwIpRangeDao.releaseFromLockTable(r.getId()); } } } return null; } @Override public boolean isStorageIpRangeAvailable(long zoneId) { SearchCriteriaService<StorageNetworkIpRangeVO, StorageNetworkIpRangeVO> sc = SearchCriteria2.create(StorageNetworkIpRangeVO.class); sc.addAnd(sc.getEntity().getDataCenterId(), Op.EQ, zoneId); List<StorageNetworkIpRangeVO> entries = sc.list(); return entries.size() > 0; } @Override public List<SecondaryStorageVmVO> getSSVMWithNoStorageNetwork(long zoneId) { List<SecondaryStorageVmVO> ssvms = _ssvmDao.getSecStorageVmListInStates(null, zoneId, VirtualMachine.State.Starting, VirtualMachine.State.Running, VirtualMachine.State.Stopping); return ssvms; } @Override public boolean isAnyStorageIpInUseInZone(long zoneId) { List<StorageNetworkIpRangeVO> ranges = _sNwIpRangeDao.listByDataCenterId(zoneId); for (StorageNetworkIpRangeVO r : ranges) { if (_sNwIpDao.countInUseIpByRangeId(r.getId()) > 0) { return true; } } return false; } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android.desugar; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static java.lang.invoke.MethodHandles.publicLookup; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.android.desugar.io.BitFlags; import java.io.IOException; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.lang.reflect.Executable; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import javax.annotation.Nullable; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Handle; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.FieldInsnNode; import org.objectweb.asm.tree.InsnNode; import org.objectweb.asm.tree.MethodNode; import org.objectweb.asm.tree.TypeInsnNode; /** * Visitor that desugars classes with uses of lambdas into Java 7-looking code. This includes * rewriting lambda-related invokedynamic instructions as well as fixing accessibility of methods * that javac emits for lambda bodies. * * <p>Implementation note: {@link InvokeDynamicLambdaMethodCollector} needs to detect any class * that this visitor may rewrite, as we conditionally apply this visitor based on it. */ class LambdaDesugaring extends ClassVisitor { private final ClassLoader targetLoader; private final LambdaClassMaker lambdas; private final ImmutableSet.Builder<String> aggregateInterfaceLambdaMethods; private final Map<Handle, MethodReferenceBridgeInfo> bridgeMethods = new LinkedHashMap<>(); private final ImmutableSet<MethodInfo> lambdaMethodsUsedInInvokeDyanmic; private final boolean allowDefaultMethods; private String internalName; private boolean isInterface; private int lambdaCount; public LambdaDesugaring( ClassVisitor dest, ClassLoader targetLoader, LambdaClassMaker lambdas, ImmutableSet.Builder<String> aggregateInterfaceLambdaMethods, ImmutableSet<MethodInfo> lambdaMethodsUsedInInvokeDyanmic, boolean allowDefaultMethods) { super(Opcodes.ASM7, dest); this.targetLoader = targetLoader; this.lambdas = lambdas; this.aggregateInterfaceLambdaMethods = aggregateInterfaceLambdaMethods; this.lambdaMethodsUsedInInvokeDyanmic = lambdaMethodsUsedInInvokeDyanmic; this.allowDefaultMethods = allowDefaultMethods; } @Override public void visit( int version, int access, String name, String signature, String superName, String[] interfaces) { checkState(internalName == null, "not intended for reuse but reused for %s", name); internalName = name; isInterface = BitFlags.isSet(access, Opcodes.ACC_INTERFACE); super.visit(version, access, name, signature, superName, interfaces); } @Override public void visitEnd() { for (Map.Entry<Handle, MethodReferenceBridgeInfo> bridge : bridgeMethods.entrySet()) { Handle original = bridge.getKey(); Handle neededMethod = bridge.getValue().bridgeMethod(); checkState( neededMethod.getTag() == Opcodes.H_INVOKESTATIC || neededMethod.getTag() == Opcodes.H_INVOKEVIRTUAL, "Cannot generate bridge method %s to reach %s", neededMethod, original); checkState( bridge.getValue().referenced() != null, "Need referenced method %s to generate bridge %s", original, neededMethod); int access = Opcodes.ACC_BRIDGE | Opcodes.ACC_SYNTHETIC | Opcodes.ACC_FINAL; if (neededMethod.getTag() == Opcodes.H_INVOKESTATIC) { access |= Opcodes.ACC_STATIC; } MethodVisitor bridgeMethod = super.visitMethod( access, neededMethod.getName(), neededMethod.getDesc(), (String) null, toInternalNames(bridge.getValue().referenced().getExceptionTypes())); // Bridge is a factory method calling a constructor if (original.getTag() == Opcodes.H_NEWINVOKESPECIAL) { bridgeMethod.visitTypeInsn(Opcodes.NEW, original.getOwner()); bridgeMethod.visitInsn(Opcodes.DUP); } int slot = 0; if (neededMethod.getTag() != Opcodes.H_INVOKESTATIC) { bridgeMethod.visitVarInsn(Opcodes.ALOAD, slot++); } Type neededType = Type.getMethodType(neededMethod.getDesc()); for (Type arg : neededType.getArgumentTypes()) { bridgeMethod.visitVarInsn(arg.getOpcode(Opcodes.ILOAD), slot); slot += arg.getSize(); } bridgeMethod.visitMethodInsn( invokeOpcode(original), original.getOwner(), original.getName(), original.getDesc(), original.isInterface()); bridgeMethod.visitInsn(neededType.getReturnType().getOpcode(Opcodes.IRETURN)); bridgeMethod.visitMaxs(0, 0); // rely on class writer to compute these bridgeMethod.visitEnd(); } super.visitEnd(); } // If this method changes then InvokeDynamicLambdaMethodCollector may need changes well @Override public MethodVisitor visitMethod( int access, String name, String desc, String signature, String[] exceptions) { if (name.equals("$deserializeLambda$") && BitFlags.isSet(access, Opcodes.ACC_SYNTHETIC)) { // Android doesn't do anything special for lambda serialization so drop the special // deserialization hook that javac generates. This also makes sure we don't reference // java/lang/invoke/SerializedLambda, which doesn't exist on Android. return null; } if (name.startsWith("lambda$") && BitFlags.isSet(access, Opcodes.ACC_SYNTHETIC) && lambdaMethodsUsedInInvokeDyanmic.contains(MethodInfo.create(internalName, name, desc))) { if (!allowDefaultMethods && isInterface && BitFlags.isSet(access, Opcodes.ACC_STATIC)) { // There must be a lambda in the interface (which in the absence of hand-written default or // static interface methods must mean it's in the <clinit> method or inside another lambda). // We'll move this method out of this class, so just record and drop it here. // (Note lambda body methods have unique names, so we don't need to remember desc here.) aggregateInterfaceLambdaMethods.add(internalName + '#' + name); return null; } if (BitFlags.isSet(access, Opcodes.ACC_PRIVATE)) { // Make lambda body method accessible from lambda class access &= ~Opcodes.ACC_PRIVATE; if (allowDefaultMethods && isInterface) { // java 8 requires interface methods to have exactly one of ACC_PUBLIC and ACC_PRIVATE access |= Opcodes.ACC_PUBLIC; } else { // Method was private so it can be final, which should help VMs perform dispatch. access |= Opcodes.ACC_FINAL; } } // Guarantee unique lambda body method name to avoid accidental overriding. This wouldn't be // be necessary for static methods but in visitOuterClass we don't know whether a potential // outer lambda$ method is static or not, so we just always do it. name = uniqueInPackage(internalName, name); } MethodVisitor dest = super.visitMethod(access, name, desc, signature, exceptions); return dest != null ? new InvokedynamicRewriter(dest, access, name, desc, signature, exceptions) : null; } // If this method changes then InvokeDynamicLambdaMethodCollector may need changes well @Override public void visitOuterClass(String owner, String name, String desc) { if (name != null && name.startsWith("lambda$")) { // Reflect renaming of lambda$ methods. Proguard gets grumpy if we leave this inconsistent. name = uniqueInPackage(owner, name); } super.visitOuterClass(owner, name, desc); } // When adding visitXxx methods here then InvokeDynamicLambdaMethodCollector may need changes well static String uniqueInPackage(String owner, String name) { String suffix = "$" + owner.substring(owner.lastIndexOf('/') + 1); // For idempotency, we only attach the package-unique suffix if it isn't there already. This // prevents a cumulative effect when processing a class more than once (which can happen with // Bazel, e.g., when re-importing a deploy.jar). During reprocessing, invokedynamics are // already removed, so lambda$ methods have regular call sites that we would also have to re- // adjust if we just blindly appended something to lambda$ method names every time we see them. return name.endsWith(suffix) ? name : name + suffix; } /** * Makes {@link #visitEnd} generate a bridge method for the given method handle if the referenced * method will be invisible to the generated lambda class. * * @return struct containing either {@code invokedMethod} or {@code invokedMethod} and a handle * representing the bridge method that will be generated for {@code invokedMethod}. */ private MethodReferenceBridgeInfo queueUpBridgeMethodIfNeeded(Handle invokedMethod) throws ClassNotFoundException { if (invokedMethod.getName().startsWith("lambda$")) { // We adjust lambda bodies to be visible return MethodReferenceBridgeInfo.noBridge(invokedMethod); } // invokedMethod is a method reference if we get here Executable invoked = findTargetMethod(invokedMethod); if (isVisibleToLambdaClass(invoked, invokedMethod.getOwner())) { // Referenced method is visible to the generated class, so nothing to do return MethodReferenceBridgeInfo.noBridge(invokedMethod); } // We need a bridge method if we get here checkState( !isInterface, "%s is an interface and shouldn't need bridge to %s", internalName, invokedMethod); checkState( !invokedMethod.isInterface(), "%s's lambda classes can't see interface method: %s", internalName, invokedMethod); MethodReferenceBridgeInfo result = bridgeMethods.get(invokedMethod); if (result != null) { return result; // we're already queued up a bridge method for this method reference } String name = uniqueInPackage(internalName, "bridge$lambda$" + bridgeMethods.size()); Handle bridgeMethod; switch (invokedMethod.getTag()) { case Opcodes.H_INVOKESTATIC: bridgeMethod = new Handle( invokedMethod.getTag(), internalName, name, invokedMethod.getDesc(), /*itf*/ false); break; case Opcodes.H_INVOKEVIRTUAL: case Opcodes.H_INVOKESPECIAL: // we end up calling these using invokevirtual bridgeMethod = new Handle( Opcodes.H_INVOKEVIRTUAL, internalName, name, invokedMethod.getDesc(), /*itf*/ false); break; case Opcodes.H_NEWINVOKESPECIAL: { // Call invisible constructor through generated bridge "factory" method, so we need to // compute the descriptor for the bridge method from the constructor's descriptor String desc = Type.getMethodDescriptor( Type.getObjectType(invokedMethod.getOwner()), Type.getArgumentTypes(invokedMethod.getDesc())); bridgeMethod = new Handle(Opcodes.H_INVOKESTATIC, internalName, name, desc, /*itf*/ false); break; } case Opcodes.H_INVOKEINTERFACE: // Shouldn't get here default: throw new UnsupportedOperationException("Cannot bridge " + invokedMethod); } result = MethodReferenceBridgeInfo.bridge(invokedMethod, invoked, bridgeMethod); MethodReferenceBridgeInfo old = bridgeMethods.put(invokedMethod, result); checkState(old == null, "Already had bridge %s so we don't also want %s", old, result); return result; } /** * Checks whether the referenced method would be visible by an unrelated class in the same package * as the currently visited class. */ private boolean isVisibleToLambdaClass(Executable invoked, String owner) { int modifiers = invoked.getModifiers(); if (Modifier.isPrivate(modifiers)) { return false; } if (Modifier.isPublic(modifiers)) { return true; } // invoked is protected or package-private, either way we need it to be in the same package // because the additional visibility protected gives doesn't help lambda classes, which are in // a different class hierarchy (and typically just extend Object) return packageName(internalName).equals(packageName(owner)); } private Executable findTargetMethod(Handle invokedMethod) throws ClassNotFoundException { Type descriptor = Type.getMethodType(invokedMethod.getDesc()); Class<?> owner = loadFromInternal(invokedMethod.getOwner()); if (invokedMethod.getTag() == Opcodes.H_NEWINVOKESPECIAL) { for (Constructor<?> c : owner.getDeclaredConstructors()) { if (Type.getType(c).equals(descriptor)) { return c; } } } else { for (Method m : owner.getDeclaredMethods()) { if (m.getName().equals(invokedMethod.getName()) && Type.getType(m).equals(descriptor)) { return m; } } } throw new IllegalArgumentException("Referenced method not found: " + invokedMethod); } private Class<?> loadFromInternal(String internalName) throws ClassNotFoundException { return targetLoader.loadClass(internalName.replace('/', '.')); } static int invokeOpcode(Handle invokedMethod) { switch (invokedMethod.getTag()) { case Opcodes.H_INVOKESTATIC: return Opcodes.INVOKESTATIC; case Opcodes.H_INVOKEVIRTUAL: return Opcodes.INVOKEVIRTUAL; case Opcodes.H_INVOKESPECIAL: case Opcodes.H_NEWINVOKESPECIAL: // Must be preceded by NEW return Opcodes.INVOKESPECIAL; case Opcodes.H_INVOKEINTERFACE: return Opcodes.INVOKEINTERFACE; default: throw new UnsupportedOperationException("Don't know how to call " + invokedMethod); } } private static String[] toInternalNames(Class<?>[] classes) { String[] result = new String[classes.length]; for (int i = 0; i < classes.length; ++i) { result[i] = Type.getInternalName(classes[i]); } return result; } private static String packageName(String internalClassName) { int lastSlash = internalClassName.lastIndexOf('/'); return lastSlash > 0 ? internalClassName.substring(0, lastSlash) : ""; } /** * Desugaring that replaces invokedynamics for {@link java.lang.invoke.LambdaMetafactory} with * static factory method invocations and triggers a class to be generated for each invokedynamic. */ private class InvokedynamicRewriter extends MethodNode { private final MethodVisitor dest; public InvokedynamicRewriter( MethodVisitor dest, int access, String name, String desc, String signature, String[] exceptions) { super(Opcodes.ASM7, access, name, desc, signature, exceptions); this.dest = checkNotNull(dest, "Null destination for %s.%s : %s", internalName, name, desc); } @Override public void visitEnd() { accept(dest); } @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { if (!"java/lang/invoke/LambdaMetafactory".equals(bsm.getOwner())) { // Not an invokedynamic for a lambda expression super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); return; } try { Lookup lookup = createLookup(internalName); ArrayList<Object> args = new ArrayList<>(bsmArgs.length + 3); args.add(lookup); args.add(name); args.add(MethodType.fromMethodDescriptorString(desc, targetLoader)); for (Object bsmArg : bsmArgs) { args.add(toJvmMetatype(lookup, bsmArg)); } // Both bootstrap methods in LambdaMetafactory expect a MethodHandle as their 5th argument // so we can assume bsmArgs[1] (the 5th arg) to be a Handle. MethodReferenceBridgeInfo bridgeInfo = queueUpBridgeMethodIfNeeded((Handle) bsmArgs[1]); // Resolve the bootstrap method in "host configuration" (this tool's default classloader) // since targetLoader may only contain stubs that we can't actually execute. // generateLambdaClass() below will invoke the bootstrap method, so a stub isn't enough, // and ultimately we don't care if the bootstrap method was even on the bootclasspath // when this class was compiled (although it must've been since javac is unhappy otherwise). MethodHandle bsmMethod = toMethodHandle(publicLookup(), bsm, /*target*/ false); // Give generated classes to have more stable names (b/35643761). Use BSM's naming scheme // but with separate counter for each surrounding class. String lambdaClassName = internalName + "$$Lambda$" + (lambdaCount++); Type[] capturedTypes = Type.getArgumentTypes(desc); boolean needFactory = capturedTypes.length != 0 && !attemptAllocationBeforeArgumentLoads(lambdaClassName, capturedTypes); lambdas.generateLambdaClass( internalName, LambdaInfo.create( lambdaClassName, desc, needFactory, bridgeInfo.methodReference(), bridgeInfo.bridgeMethod()), bsmMethod, args); if (desc.startsWith("()")) { // For stateless lambda classes we'll generate a singleton instance that we can just load checkState(capturedTypes.length == 0); super.visitFieldInsn( Opcodes.GETSTATIC, lambdaClassName, LambdaClassFixer.SINGLETON_FIELD_NAME, desc.substring("()".length())); } else if (needFactory) { // If we were unable to inline the allocation of the generated lambda class then // invoke factory method of generated lambda class with the arguments on the stack super.visitMethodInsn( Opcodes.INVOKESTATIC, lambdaClassName, LambdaClassFixer.FACTORY_METHOD_NAME, desc, /*itf*/ false); } else { // Otherwise we inserted a new/dup pair of instructions above and now just need to invoke // the constructor of generated lambda class with the arguments on the stack super.visitMethodInsn( Opcodes.INVOKESPECIAL, lambdaClassName, "<init>", Type.getMethodDescriptor(Type.VOID_TYPE, capturedTypes), /*itf*/ false); } } catch (IOException | ReflectiveOperationException e) { throw new IllegalStateException( "Couldn't desugar invokedynamic for " + internalName + "." + name + " using " + bsm + " with arguments " + Arrays.toString(bsmArgs), e); } } /** * Tries to insert a new/dup for the given class name before expected existing instructions that * set up arguments for an invokedynamic factory method with the given types. * * <p>For lambda expressions and simple method references we can assume that arguments are set * up with loads of the captured (effectively) final variables. But method references, can in * general capture an expression, such as in {@code myObject.toString()::charAt} (a {@code * Function&lt;Integer, Character&gt;}), which can also cause null checks to be inserted. In * such more complicated cases this method may fail to insert a new/dup pair and returns {@code * false}. * * @param internalName internal name of the class to instantiate * @param paramTypes expected invokedynamic argument types, which also must be the parameters of * {@code internalName}'s constructor. * @return {@code true} if we were able to insert a new/dup, {@code false} otherwise */ private boolean attemptAllocationBeforeArgumentLoads(String internalName, Type[] paramTypes) { checkArgument(paramTypes.length > 0, "Expected at least one param for %s", internalName); // Walk backwards past loads corresponding to constructor arguments to find the instruction // after which we need to insert our NEW/DUP pair AbstractInsnNode insn = instructions.getLast(); for (int i = paramTypes.length - 1; 0 <= i; --i) { if (insn.getOpcode() == Opcodes.GETFIELD) { // Lambdas in anonymous inner classes have to load outer scope variables from fields, // which manifest as an ALOAD followed by one or more GETFIELDs FieldInsnNode getfield = (FieldInsnNode) insn; checkState( getfield.desc.length() == 1 ? getfield.desc.equals(paramTypes[i].getDescriptor()) : paramTypes[i].getDescriptor().length() > 1, "Expected getfield for %s to set up parameter %s for %s but got %s : %s", paramTypes[i], i, internalName, getfield.name, getfield.desc); insn = insn.getPrevious(); while (insn.getOpcode() == Opcodes.GETFIELD) { // Nested inner classes can cause a cascade of getfields from the outermost one inwards checkState( ((FieldInsnNode) insn).desc.startsWith("L"), "expect object type getfields to get to %s to set up parameter %s for %s, not: %s", paramTypes[i], i, internalName, ((FieldInsnNode) insn).desc); insn = insn.getPrevious(); } checkState( insn.getOpcode() == Opcodes.ALOAD, // should be a this pointer to be precise "Expected aload before getfield for %s to set up parameter %s for %s but got %s", getfield.name, i, internalName, insn.getOpcode()); } else if (!isPushForType(insn, paramTypes[i])) { // Otherwise expect load of a (effectively) final local variable or a constant. Not seeing // that means we're dealing with a method reference on some arbitrary expression, // <expression>::m. In that case we give up and keep using the factory method for now, // since inserting the NEW/DUP so the new object ends up in the right stack slot is hard // in that case. Note this still covers simple cases such as this::m or x::m, where x is a // local. checkState( paramTypes.length == 1, "Expected a load for %s to set up parameter %s for %s but got %s", paramTypes[i], i, internalName, insn.getOpcode()); return false; } insn = insn.getPrevious(); } TypeInsnNode newInsn = new TypeInsnNode(Opcodes.NEW, internalName); if (insn == null) { // Ran off the front of the instruction list instructions.insert(newInsn); } else { instructions.insert(insn, newInsn); } instructions.insert(newInsn, new InsnNode(Opcodes.DUP)); return true; } /** * Returns whether a given instruction can be used to push argument of {@code type} on stack. */ private /* static */ boolean isPushForType(AbstractInsnNode insn, Type type) { int opcode = insn.getOpcode(); if (opcode == type.getOpcode(Opcodes.ILOAD)) { return true; } // b/62060793: AsyncAwait rewrites bytecode to convert java methods into state machine with // support of lambdas. Constant zero values are pushed on stack for all yet uninitialized // local variables. And SIPUSH instruction is used to advance an internal state of a state // machine. switch (type.getSort()) { case Type.BOOLEAN: return opcode == Opcodes.ICONST_0 || opcode == Opcodes.ICONST_1; case Type.BYTE: case Type.CHAR: case Type.SHORT: case Type.INT: return opcode == Opcodes.SIPUSH || opcode == Opcodes.ICONST_0 || opcode == Opcodes.ICONST_1 || opcode == Opcodes.ICONST_2 || opcode == Opcodes.ICONST_3 || opcode == Opcodes.ICONST_4 || opcode == Opcodes.ICONST_5 || opcode == Opcodes.ICONST_M1; case Type.LONG: return opcode == Opcodes.LCONST_0 || opcode == Opcodes.LCONST_1; case Type.FLOAT: return opcode == Opcodes.FCONST_0 || opcode == Opcodes.FCONST_1 || opcode == Opcodes.FCONST_2; case Type.DOUBLE: return opcode == Opcodes.DCONST_0 || opcode == Opcodes.DCONST_1; case Type.OBJECT: case Type.ARRAY: return opcode == Opcodes.ACONST_NULL; default: // Support for BIPUSH and LDC* opcodes is not implemented as there is no known use case. return false; } } private Lookup createLookup(String lookupClass) throws ReflectiveOperationException { Class<?> clazz = loadFromInternal(lookupClass); Constructor<Lookup> constructor = Lookup.class.getDeclaredConstructor(Class.class); constructor.setAccessible(true); return constructor.newInstance(clazz); } /** * Produces a {@link MethodHandle} or {@link MethodType} using {@link #targetLoader} for the * given ASM {@link Handle} or {@link Type}. {@code lookup} is only used for resolving {@link * Handle}s. */ private Object toJvmMetatype(Lookup lookup, Object asm) throws ReflectiveOperationException { if (asm instanceof Number) { return asm; } if (asm instanceof Type) { Type type = (Type) asm; switch (type.getSort()) { case Type.OBJECT: return loadFromInternal(type.getInternalName()); case Type.METHOD: return MethodType.fromMethodDescriptorString(type.getDescriptor(), targetLoader); default: throw new IllegalArgumentException("Cannot convert: " + asm); } } if (asm instanceof Handle) { return toMethodHandle(lookup, (Handle) asm, /*target*/ true); } throw new IllegalArgumentException("Cannot convert: " + asm); } /** * Produces a {@link MethodHandle} using either the context or {@link #targetLoader} class * loader, depending on {@code target}. */ private MethodHandle toMethodHandle(Lookup lookup, Handle asmHandle, boolean target) throws ReflectiveOperationException { Class<?> owner = loadFromInternal(asmHandle.getOwner()); MethodType signature = MethodType.fromMethodDescriptorString( asmHandle.getDesc(), target ? targetLoader : Thread.currentThread().getContextClassLoader()); switch (asmHandle.getTag()) { case Opcodes.H_INVOKESTATIC: return lookup.findStatic(owner, asmHandle.getName(), signature); case Opcodes.H_INVOKEVIRTUAL: case Opcodes.H_INVOKEINTERFACE: return lookup.findVirtual(owner, asmHandle.getName(), signature); case Opcodes.H_INVOKESPECIAL: // we end up calling these using invokevirtual return lookup.findSpecial(owner, asmHandle.getName(), signature, owner); case Opcodes.H_NEWINVOKESPECIAL: return lookup.findConstructor(owner, signature); default: throw new UnsupportedOperationException("Cannot resolve " + asmHandle); } } } /** * Record of how a lambda class can reach its referenced method through a possibly-different * bridge method. * * <p>In a JVM, lambda classes are allowed to call the referenced methods directly, but we don't * have that luxury when the generated lambda class is evaluated using normal visibility rules. */ @AutoValue abstract static class MethodReferenceBridgeInfo { public static MethodReferenceBridgeInfo noBridge(Handle methodReference) { return new AutoValue_LambdaDesugaring_MethodReferenceBridgeInfo( methodReference, (Executable) null, methodReference); } public static MethodReferenceBridgeInfo bridge( Handle methodReference, Executable referenced, Handle bridgeMethod) { checkArgument(!bridgeMethod.equals(methodReference)); return new AutoValue_LambdaDesugaring_MethodReferenceBridgeInfo( methodReference, checkNotNull(referenced), bridgeMethod); } public abstract Handle methodReference(); /** Returns {@code null} iff {@link #bridgeMethod} equals {@link #methodReference}. */ @Nullable public abstract Executable referenced(); public abstract Handle bridgeMethod(); } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.exec.apple; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.exec.local.LocalEnvProvider; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.DottedVersion; import com.google.devtools.build.lib.shell.AbnormalTerminationException; import com.google.devtools.build.lib.shell.Command; import com.google.devtools.build.lib.shell.CommandException; import com.google.devtools.build.lib.shell.CommandResult; import com.google.devtools.build.lib.shell.TerminationStatus; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.vfs.Path; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Map; /** * Adds to the given environment all variables that are dependent on system state of the host * machine. * * <p>Admittedly, hermeticity is "best effort" in such cases; these environment values should be * as tied to configuration parameters as possible. * * <p>For example, underlying iOS toolchains require that SDKROOT resolve to an absolute system * path, but, when selecting which SDK to resolve, the version number comes from build * configuration. */ public final class XCodeLocalEnvProvider implements LocalEnvProvider { private static final String XCRUN_CACHE_FILENAME = "__xcruncache"; private static final String XCODE_LOCATOR_CACHE_FILENAME = "__xcodelocatorcache"; @Override public Map<String, String> rewriteLocalEnv( Map<String, String> env, Path execRoot, String productName) throws IOException { boolean containsXcodeVersion = env.containsKey(AppleConfiguration.XCODE_VERSION_ENV_NAME); boolean containsAppleSdkVersion = env.containsKey(AppleConfiguration.APPLE_SDK_VERSION_ENV_NAME); if (!containsXcodeVersion && !containsAppleSdkVersion) { return env; } ImmutableMap.Builder<String, String> newEnvBuilder = ImmutableMap.builder(); newEnvBuilder.putAll(env); // Empty developer dir indicates to use the system default. // TODO(bazel-team): Bazel's view of the xcode version and developer dir should be explicitly // set for build hermeticity. String developerDir = ""; if (containsXcodeVersion) { String version = env.get(AppleConfiguration.XCODE_VERSION_ENV_NAME); developerDir = getDeveloperDir(execRoot, DottedVersion.fromString(version), productName); newEnvBuilder.put("DEVELOPER_DIR", developerDir); } if (containsAppleSdkVersion) { // The Apple platform is needed to select the appropriate SDK. if (!env.containsKey(AppleConfiguration.APPLE_SDK_PLATFORM_ENV_NAME)) { throw new IOException("Could not resolve apple platform for determining SDK"); } String iosSdkVersion = env.get(AppleConfiguration.APPLE_SDK_VERSION_ENV_NAME); String appleSdkPlatform = env.get(AppleConfiguration.APPLE_SDK_PLATFORM_ENV_NAME); newEnvBuilder.put( "SDKROOT", getSdkRoot(execRoot, developerDir, iosSdkVersion, appleSdkPlatform, productName)); } return newEnvBuilder.build(); } /** * Returns the absolute root path of the target Apple SDK on the host system for a given * version of xcode (as defined by the given {@code developerDir}). This may spawn a * process and use the {@code /usr/bin/xcrun} binary to locate the target SDK. This uses a local * cache file under {@code bazel-out}, and will only spawn a new {@code xcrun} process in the case * of a cache miss. * * @param execRoot the execution root path, used to locate the cache file * @param developerDir the value of {@code DEVELOPER_DIR} for the target version of xcode * @param sdkVersion the sdk version, for example, "9.1" * @param appleSdkPlatform the sdk platform, for example, "iPhoneOS" * @param productName the product name * @throws IOException if there is an issue with obtaining the root from the spawned * process, either because the SDK platform/version pair doesn't exist, or there was an * unexpected issue finding or running the tool */ private static String getSdkRoot(Path execRoot, String developerDir, String sdkVersion, String appleSdkPlatform, String productName) throws IOException { if (OS.getCurrent() != OS.DARWIN) { throw new IOException("Cannot locate iOS SDK on non-darwin operating system"); } try { CacheManager cacheManager = new CacheManager(execRoot.getRelative( BlazeDirectories.getRelativeOutputPath(productName)), XCRUN_CACHE_FILENAME); String sdkString = appleSdkPlatform.toLowerCase() + sdkVersion; String cacheResult = cacheManager.getValue(developerDir, sdkString); if (cacheResult != null) { return cacheResult; } else { Map<String, String> env = Strings.isNullOrEmpty(developerDir) ? ImmutableMap.<String, String>of() : ImmutableMap.of("DEVELOPER_DIR", developerDir); CommandResult xcrunResult = new Command( new String[] {"/usr/bin/xcrun", "--sdk", sdkString, "--show-sdk-path"}, env, null).execute(); // calling xcrun via Command returns a value with a newline on the end. String sdkRoot = new String(xcrunResult.getStdout(), StandardCharsets.UTF_8).trim(); cacheManager.writeEntry(ImmutableList.of(developerDir, sdkString), sdkRoot); return sdkRoot; } } catch (AbnormalTerminationException e) { TerminationStatus terminationStatus = e.getResult().getTerminationStatus(); if (terminationStatus.exited()) { throw new IOException( String.format("xcrun failed with code %s.\n" + "This most likely indicates that SDK version [%s] for platform [%s] is " + "unsupported for the target version of xcode.\n" + "%s\n" + "Stderr: %s", terminationStatus.getExitCode(), sdkVersion, appleSdkPlatform, terminationStatus.toString(), new String(e.getResult().getStderr(), StandardCharsets.UTF_8))); } String message = String.format("xcrun failed.\n%s\n%s", e.getResult().getTerminationStatus(), new String(e.getResult().getStderr(), StandardCharsets.UTF_8)); throw new IOException(message, e); } catch (CommandException e) { throw new IOException(e); } } /** * Returns the absolute root path of the xcode developer directory on the host system for * the given xcode version. This may spawn a process and use the {@code xcode-locator} binary. * This uses a local cache file under {@code bazel-out}, and will only spawn a new process in the * case of a cache miss. * * @param execRoot the execution root path, used to locate the cache file * @param version the xcode version number to look up * @param productName the product name * @throws IOException if there is an issue with obtaining the path from the spawned * process, either because there is no installed xcode with the given version, or * there was an unexpected issue finding or running the tool */ private static String getDeveloperDir(Path execRoot, DottedVersion version, String productName) throws IOException { if (OS.getCurrent() != OS.DARWIN) { throw new IOException( "Cannot locate xcode developer directory on non-darwin operating system"); } try { CacheManager cacheManager = new CacheManager( execRoot.getRelative(BlazeDirectories.getRelativeOutputPath(productName)), XCODE_LOCATOR_CACHE_FILENAME); String cacheResult = cacheManager.getValue(version.toString()); if (cacheResult != null) { return cacheResult; } else { CommandResult xcodeLocatorResult = new Command(new String[] { execRoot.getRelative("_bin/xcode-locator").getPathString(), version.toString()}) .execute(); String developerDir = new String(xcodeLocatorResult.getStdout(), StandardCharsets.UTF_8).trim(); cacheManager.writeEntry(ImmutableList.of(version.toString()), developerDir); return developerDir; } } catch (AbnormalTerminationException e) { TerminationStatus terminationStatus = e.getResult().getTerminationStatus(); String message; if (e.getResult().getTerminationStatus().exited()) { message = String.format("xcode-locator failed with code %s.\n" + "This most likely indicates that xcode version %s is not available on the host " + "machine.\n" + "%s\n" + "stderr: %s", terminationStatus.getExitCode(), version, terminationStatus.toString(), new String(e.getResult().getStderr(), StandardCharsets.UTF_8)); } else { message = String.format("xcode-locator failed. %s\nstderr: %s", e.getResult().getTerminationStatus(), new String(e.getResult().getStderr(), StandardCharsets.UTF_8)); } throw new IOException(message, e); } catch (CommandException e) { throw new IOException(e); } } }
/* * Copyright (C) 2015 Oleg Akimov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.maritimus.atem4j.protocol.udp; import com.sun.istack.internal.NotNull; import io.maritimus.atem4j.protocol.Packet; import io.maritimus.atem4j.protocol.ParseException; import io.maritimus.atem4j.protocol.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.*; import java.nio.ByteBuffer; import java.nio.channels.DatagramChannel; import java.util.Random; import java.util.concurrent.ConcurrentLinkedQueue; /** * Created by Oleg Akimov on 30/07/15. */ public class UdpClient implements AutoCloseable { private static final Logger log = LoggerFactory.getLogger(UdpClient.class); public static final int DEFAULT_ATEM_PORT = 9910; public static final int MAX_SEND_BUF = 4096; public static final int MAX_RECEIVE_BUF = 4096; public static final int SO_RECEIVE_BUF = 64000; // socket option public static final int SEND_TIMEOUT = 1; // timeout between send ops in non-blocking mode public static final int RECEIVE_TIMEOUT = 1; public final InetSocketAddress atemAddress; public final int localPort; public final DatagramChannel channel; public final DatagramSocket socket; public final byte[] inBuf = new byte[MAX_RECEIVE_BUF]; public final ByteBuffer inBB = ByteBuffer.wrap(inBuf); public final ByteBuffer outBB = ByteBuffer.allocateDirect(MAX_SEND_BUF); public final ConcurrentLinkedQueue<Packet> inQueue = new ConcurrentLinkedQueue<>(); public final IUdpClientListener listener; public volatile boolean isStopped = false; public static UdpClient create(InetSocketAddress atemAddress, IUdpClientListener listener) throws java.io.IOException { return new UdpClient(atemAddress, 0, listener); } public static UdpClient create(String atemHost, IUdpClientListener listener) throws java.io.IOException { return create(createAtemAddress(atemHost, DEFAULT_ATEM_PORT), listener); } public static UdpClient create(String atemHost, int localPortMin, int localPortMax, IUdpClientListener listener) throws java.io.IOException { return create(createAtemAddress(atemHost), localPortMin, localPortMax, listener); } public static UdpClient create(InetSocketAddress atemAddresss, int localPortMin, int localPortMax, IUdpClientListener listener) throws java.io.IOException { if (localPortMax < localPortMin) { throw new IllegalArgumentException(String.format( "localPortMin = %d must be lesser or equal to localPortMax = %d", localPortMin, localPortMax )); } int localPort = localPortMax == localPortMin ? localPortMin : Utils.random(localPortMin, localPortMax); return new UdpClient(atemAddresss, localPort, listener); } public static InetSocketAddress createAtemAddress(@NotNull String atemHost) { return createAtemAddress(atemHost, DEFAULT_ATEM_PORT); } public static InetSocketAddress createAtemAddress(@NotNull String atemHost, int atemPort) { if (atemHost == null) { throw new IllegalArgumentException("atemHost must be not null"); } if (atemPort < 0 || atemPort > 65535) { throw new IllegalArgumentException(String.format("atemPort = %d must be in [1,65535]", atemPort)); } return new InetSocketAddress(atemHost, atemPort); } public UdpClient(@NotNull InetSocketAddress atemAddress, int localPort, @NotNull IUdpClientListener listener) throws java.io.IOException { if (atemAddress == null) { throw new IllegalArgumentException("atemAddress must be not null"); } if (localPort < 0 || localPort > 65535) { throw new IllegalArgumentException(String.format("localPort = %d must be in [1,65535]", localPort)); } if (listener == null) { throw new IllegalArgumentException("listener must be not null"); } this.atemAddress = atemAddress; this.listener = listener; SocketAddress localAddress = new InetSocketAddress(localPort); channel = DatagramChannel.open(); channel.configureBlocking(false); channel.bind(localAddress); channel.connect(atemAddress); if (channel.getOption(StandardSocketOptions.SO_RCVBUF) < SO_RECEIVE_BUF) { channel.setOption(StandardSocketOptions.SO_RCVBUF, SO_RECEIVE_BUF); } log.debug(String.format( "binding to %s with buf %d", localAddress, channel.getOption(StandardSocketOptions.SO_RCVBUF) )); socket = channel.socket(); this.localPort = socket.getLocalPort(); } public void stop() { inQueue.clear(); listener.onClientStop(); isStopped = true; if (channel.isOpen()) { try { channel.close(); } catch (IOException e) { log.error("Can't close udp client channel", e); } } } public void loop() { int num = 0; int bytesRead; while(true) { if (isStopped) { log.debug("udp client is stopped"); break; } bytesRead = 0; try { inBB.clear(); bytesRead = channel.read(inBB); // inBB.position === bytesRead } catch (PortUnreachableException ex) { listener.onPortUnreachableException(ex); } catch (Exception ex) { log.error("Error reading from server", ex); } finally { if (bytesRead > 0) { inBB.flip(); } else { inBB.clear(); inBB.limit(0); } } if (inBB.hasRemaining()) { num = num + 1; try { Packet packet = Packet.read(inBB); synchronized (inQueue) { inQueue.add(packet); inQueue.notifyAll(); } listener.onPacketReceived(packet); /* log.debug(String.format( "UDP packet #%d received from server: %s", num, packet )); */ } catch (ParseException ex) { listener.onParseException(ex); } } else { try { Thread.sleep(RECEIVE_TIMEOUT); } catch (InterruptedException ex) { if (Thread.interrupted()) { stop(); break; } } } } } synchronized public int send(@NotNull Packet p) throws IOException { if (p == null) { throw new IllegalArgumentException("packet p can't be null"); } outBB.clear(); p.write(outBB); outBB.flip(); if (!outBB.hasRemaining()) { throw new IllegalArgumentException(String.format("packet don't have data: ", p)); } int remaining = outBB.remaining(); int bytesSent = 0; while(!isStopped && (bytesSent = channel.send(outBB, atemAddress)) == 0) { try { Thread.sleep(SEND_TIMEOUT); } catch (InterruptedException ex) { if (Thread.interrupted()) { stop(); return -1; } } } if (bytesSent != remaining) { log.debug(String.format("Amount of bytes sent = %d is not equal to buffer size = %d", bytesSent, remaining)); } return bytesSent; } @Override public void close() throws Exception { stop(); listener.onClientClose();// TODO: move after channel close if (channel != null) { if (channel.isOpen()) { channel.close(); } } } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: RepGroupProtocol.java,v 1.17 2010/01/04 15:50:46 cwl Exp $ */ package com.sleepycat.je.rep.impl; import com.sleepycat.je.rep.NodeType; import com.sleepycat.je.rep.impl.node.NameIdPair; /** * Defines the protocol used in support of group membership. * * API to Master * ENSURE_NODE -> ENSURE_OK | FAIL * REMOVE_MEMBER -> OK | FAIL * * Monitor to Master * GROUP_REQ -> GROUP */ public class RepGroupProtocol extends TextProtocol { public static final String VERSION = "2"; public static enum FailReason { DEFAULT, MEMBER_NOT_FOUND, IS_MASTER; } /* The messages defined by this class. */ public final MessageOp ENSURE_NODE = new MessageOp("ENREQ", EnsureNode.class); public final MessageOp ENSURE_OK = new MessageOp("ENRESP", EnsureOK.class); public final MessageOp REMOVE_MEMBER = new MessageOp("RMREQ", RemoveMember.class); public final MessageOp GROUP_REQ = new MessageOp("GREQ", GroupRequest.class); public final MessageOp GROUP_RESP = new MessageOp("GRESP", GroupResponse.class); public final MessageOp RGFAIL_RESP = new MessageOp("GRFAIL", Fail.class); public RepGroupProtocol(String groupName, NameIdPair nameIdPair, RepImpl repImpl) { super(VERSION, groupName, nameIdPair, repImpl); this.initializeMessageOps(new MessageOp[] { ENSURE_NODE, ENSURE_OK, REMOVE_MEMBER, GROUP_REQ, GROUP_RESP, RGFAIL_RESP }); setTimeouts(repImpl, RepParams.REP_GROUP_OPEN_TIMEOUT, RepParams.REP_GROUP_READ_TIMEOUT); } public class RemoveMember extends RequestMessage { final String nodeName; public RemoveMember(String memberName) { this.nodeName = memberName; } public RemoveMember(String requestLine, String[] tokens) throws InvalidMessageException { super(requestLine, tokens); nodeName = nextPayloadToken(); } @Override public MessageOp getOp() { return REMOVE_MEMBER; } @Override protected String getMessagePrefix() { return messagePrefixNocheck; } public String wireFormat() { return wireFormatPrefix() + SEPARATOR + nodeName; } public String getNodeName() { return nodeName; } } public class GroupRequest extends RequestMessage { public GroupRequest() { } public GroupRequest(String line, String[] tokens) throws InvalidMessageException { super(line, tokens); } @Override public MessageOp getOp() { return GROUP_REQ; } @Override protected String getMessagePrefix() { return messagePrefixNocheck; } public String wireFormat() { return wireFormatPrefix(); } } public class EnsureNode extends RequestMessage { final RepNodeImpl node; public EnsureNode(RepNodeImpl node) { assert(node.getType() == NodeType.MONITOR); this.node = node; } public EnsureNode(String line, String[] tokens) throws InvalidMessageException { super(line, tokens); node = RepGroupImpl.hexDeserializeNode(nextPayloadToken()); } public RepNodeImpl getNode() { return node; } @Override public MessageOp getOp() { return ENSURE_NODE; } @Override protected String getMessagePrefix() { return messagePrefixNocheck; } public String wireFormat() { return wireFormatPrefix() + SEPARATOR + RepGroupImpl.serializeHex(node); } } public class EnsureOK extends OK { private final NameIdPair nameIdPair; public EnsureOK(NameIdPair nameIdPair) { super(); this.nameIdPair = nameIdPair; } public EnsureOK(String line, String[] tokens) throws InvalidMessageException { super(line, tokens); nameIdPair = new NameIdPair(nextPayloadToken(), Integer.parseInt(nextPayloadToken())); } public NameIdPair getNameIdPair() { return nameIdPair; } @Override public MessageOp getOp() { return ENSURE_OK; } @Override public String wireFormat() { return wireFormatPrefix() + SEPARATOR + nameIdPair.getName() + SEPARATOR + Integer.toString(nameIdPair.getId()); } } public class GroupResponse extends ResponseMessage { final RepGroupImpl group; public GroupResponse(RepGroupImpl group) { this.group = group; } public GroupResponse(String line, String[] tokens) throws InvalidMessageException { super(line, tokens); group = RepGroupImpl.deserializeHex (tokens, getCurrentTokenPosition()); } public RepGroupImpl getGroup() { return group; } @Override public MessageOp getOp() { return GROUP_RESP; } @Override protected String getMessagePrefix() { return messagePrefixNocheck; } public String wireFormat() { return wireFormatPrefix() + SEPARATOR + group.serializeHex(); } } /** * Extends the class Fail, adding a reason code to distinguish amongst * different types of failures. */ public class Fail extends TextProtocol.Fail { final FailReason reason; public Fail(FailReason reason, String message) { super(message); this.reason = reason; } public Fail(String line, String[] tokens) throws InvalidMessageException { super(line, tokens); reason = FailReason.valueOf(nextPayloadToken()); } @Override public String wireFormat() { return super.wireFormat() + SEPARATOR + reason.toString(); } public FailReason getReason() { return reason; } } }
/* * Copyright 2002-2004,2008-2009 Jeremias Maerki. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.krysalis.barcode4j.impl.code128; import org.krysalis.barcode4j.BarGroup; import org.krysalis.barcode4j.ClassicBarcodeLogicHandler; import org.krysalis.barcode4j.tools.MessageUtil; /** * This class is an implementation of the Code 128 barcode. * * @version $Id$ */ public class Code128LogicImpl { /** * The function 1 command. */ public static final char FNC_1 = 0xF1; /** * The function 2 command. */ public static final char FNC_2 = 0xF2; /** * The function 3 command. */ public static final char FNC_3 = 0xF3; /** * The function 4 command. */ public static final char FNC_4 = 0xF4; private static final byte[][] CHARSET = {{2, 1, 2, 2, 2, 2}, //000, SP, #032 {2, 2, 2, 1, 2, 2}, {2, 2, 2, 2, 2, 1}, {1, 2, 1, 2, 2, 3}, {1, 2, 1, 3, 2, 2}, {1, 3, 1, 2, 2, 2}, {1, 2, 2, 2, 1, 3}, {1, 2, 2, 3, 1, 2}, {1, 3, 2, 2, 1, 2}, {2, 2, 1, 2, 1, 3}, {2, 2, 1, 3, 1, 2}, {2, 3, 1, 2, 1, 2}, {1, 1, 2, 2, 3, 2}, {1, 2, 2, 1, 3, 2}, {1, 2, 2, 2, 3, 1}, {1, 1, 3, 2, 2, 2}, {1, 2, 3, 1, 2, 2}, //016, '0', #048 {1, 2, 3, 2, 2, 1}, {2, 2, 3, 2, 1, 1}, {2, 2, 1, 1, 3, 2}, {2, 2, 1, 2, 3, 1}, {2, 1, 3, 2, 1, 2}, {2, 2, 3, 1, 1, 2}, {3, 1, 2, 1, 3, 1}, {3, 1, 1, 2, 2, 2}, {3, 2, 1, 1, 2, 2}, //025, '9', #057 {3, 2, 1, 2, 2, 1}, //026, ':', #058 {3, 1, 2, 2, 1, 2}, {3, 2, 2, 1, 1, 2}, {3, 2, 2, 2, 1, 1}, {2, 1, 2, 1, 2, 3}, {2, 1, 2, 3, 2, 1}, {2, 3, 2, 1, 2, 1}, {1, 1, 1, 3, 2, 3}, //033, 'A', #065 {1, 3, 1, 1, 2, 3}, {1, 3, 1, 3, 2, 1}, {1, 1, 2, 3, 1, 3}, {1, 3, 2, 1, 1, 3}, {1, 3, 2, 3, 1, 1}, {2, 1, 1, 3, 1, 3}, {2, 3, 1, 1, 1, 3}, {2, 3, 1, 3, 1, 1}, {1, 1, 2, 1, 3, 3}, {1, 1, 2, 3, 3, 1}, {1, 3, 2, 1, 3, 1}, {1, 1, 3, 1, 2, 3}, {1, 1, 3, 3, 2, 1}, {1, 3, 3, 1, 2, 1}, {3, 1, 3, 1, 2, 1}, {2, 1, 1, 3, 3, 1}, {2, 3, 1, 1, 3, 1}, {2, 1, 3, 1, 1, 3}, {2, 1, 3, 3, 1, 1}, {2, 1, 3, 1, 3, 1}, {3, 1, 1, 1, 2, 3}, {3, 1, 1, 3, 2, 1}, {3, 3, 1, 1, 2, 1}, {3, 1, 2, 1, 1, 3}, {3, 1, 2, 3, 1, 1}, //058, 'Z', #090 {3, 3, 2, 1, 1, 1}, //059, '[', #091 {3, 1, 4, 1, 1, 1}, {2, 2, 1, 4, 1, 1}, {4, 3, 1, 1, 1, 1}, {1, 1, 1, 2, 2, 4}, //063, '_', #095 {1, 1, 1, 4, 2, 2}, //064, A:NUL/B:'`', #000/#096 {1, 2, 1, 1, 2, 4}, //065, A:SOH/B:'a'. #001/#097 {1, 2, 1, 4, 2, 1}, {1, 4, 1, 1, 2, 2}, {1, 4, 1, 2, 2, 1}, {1, 1, 2, 2, 1, 4}, {1, 1, 2, 4, 1, 2}, {1, 2, 2, 1, 1, 4}, {1, 2, 2, 4, 1, 1}, {1, 4, 2, 1, 1, 2}, {1, 4, 2, 2, 1, 1}, {2, 4, 1, 2, 1, 1}, {2, 2, 1, 1, 1, 4}, {4, 1, 3, 1, 1, 1}, {2, 4, 1, 1, 1, 2}, {1, 3, 4, 1, 1, 1}, {1, 1, 1, 2, 4, 2}, {1, 2, 1, 1, 4, 2}, {1, 2, 1, 2, 4, 1}, {1, 1, 4, 2, 1, 2}, {1, 2, 4, 1, 1, 2}, {1, 2, 4, 2, 1, 1}, {4, 1, 1, 2, 1, 2}, {4, 2, 1, 1, 1, 2}, {4, 2, 1, 2, 1, 1}, {2, 1, 2, 1, 4, 1}, {2, 1, 4, 1, 2, 1}, //090, A:SUB/B:'z', #026/#122 {4, 1, 2, 1, 2, 1}, {1, 1, 1, 1, 4, 3}, {1, 1, 1, 3, 4, 1}, {1, 3, 1, 1, 4, 1}, //094, A:RS/B:tilde, #030/#126 {1, 1, 4, 1, 1, 3}, //095, A:US/B:DEL, #031/#127 {1, 1, 4, 3, 1, 1}, {4, 1, 1, 1, 1, 3}, {4, 1, 1, 3, 1, 1}, {1, 1, 3, 1, 4, 1}, {1, 1, 4, 1, 3, 1}, {3, 1, 1, 1, 4, 1}, {4, 1, 1, 1, 3, 1}, {2, 1, 1, 4, 1, 2}, //103, Start A {2, 1, 1, 2, 1, 4}, //104, Start B {2, 1, 1, 2, 3, 2}}; //105, Start C private static final byte[] STOP = {2, 3, 3, 1, 1, 1, 2}; //106, STOP private Code128Constants codeset; /** * Default constructor. */ public Code128LogicImpl() { this(Code128Constants.CODESET_ALL); } /** * Main constructor. * * @param codeset the enabled codeset */ public Code128LogicImpl(Code128Constants codeset) { this.codeset = codeset; } /** * Determines whether a character can be encoded in Code 128. * * @param ch the character to check * @return true if it is a valid character */ public static boolean isValidChar(char ch) { return (ch >= 0 && ch <= 127) || (ch >= FNC_1 && ch <= FNC_4); } /** * Determines whether a character is defined in codeset A. * * @param ch the character to check * @return true if it is found in codeset A */ public static boolean isInCodeSetA(char ch) { return (ch >= 0 && ch <= 95) || (ch >= FNC_1 && ch <= FNC_4); } /** * Determines whether a character is defined in codeset B. * * @param ch the character to check * @return true if it is found in codeset B */ public static boolean isInCodeSetB(char ch) { return (ch >= 32 && ch <= 127) || (ch >= FNC_1 && ch <= FNC_4); } /** * Determines whether a character is a digit or a function 1 command. * * @param ch the character to check * @param second true if checking the character for the second position in a * duo. * @return true if the above condition is met */ public static boolean canBeInCodeSetC(char ch, boolean second) { if (second) { return ch >= '0' && ch <= '9'; } else { return (ch >= '0' && ch <= '9') || (ch == FNC_1); } } /** * Converts a character set index to a String representation. This is * primarily used for debugging purposes. * * @param index the character set index * @return the String representation */ public static String symbolCharToString(int index) { if (index >= 96 && index <= 105) { final String[] replacementString = new String[]{ "FNC3/96", "FNC2/97", "Shift/98", "CodeC/99", "CodeB/FNC4", "CodeA/FNC4", "FNC1", "StartA", "StartB", "StartC"}; return replacementString[index - 96]; } else { return "idx" + Integer.toString(index); } } /** * Converts an encoded Code 128 message into a String for debugging * purposes. * * @param encodedMsg the encoded message * @return the String representation */ public static String toString(int... encodedMsg) { if (encodedMsg == null) { return ""; } final StringBuilder sb = new StringBuilder(); for (int i = 0; i < encodedMsg.length; i++) { if (i > 0) { sb.append("|"); } sb.append(symbolCharToString(encodedMsg[i])); } return sb.toString(); } /** * Encodes a character. * * @param logic LogicHandler to send the barcode events to * @param index index withing the character set of the character to encode */ protected void encodeChar(ClassicBarcodeLogicHandler logic, int index) { logic.startBarGroup(BarGroup.MSG_CHARACTER, symbolCharToString(index)); for (byte i = 0; i < 6; i++) { final int width = CHARSET[index][i]; final boolean black = isBlack(i); logic.addBar(black, width); } logic.endBarGroup(); } /** * Encodes the special stop character. * * @param logic LogicHandler to send the barcode events to */ protected void encodeStop(ClassicBarcodeLogicHandler logic) { logic.startBarGroup(BarGroup.STOP_CHARACTER, null); for (byte i = 0; i < 7; i++) { final int width = STOP[i]; final boolean black = isBlack(i); logic.addBar(black, width); } logic.endBarGroup(); } private boolean isBlack(byte i) { return (i % 2) == 0; } /** * Returns the encoder to be used. The encoder is responsible for turning a * String message into an array of character set indexes. * <p> * Override this method to supply your own implementation. * * @return the requested encoder */ protected Code128Encoder getEncoder() { return new Code128Encoder(this.codeset); } /** * Encodes a message into an array of character set indexes. * * @param msg the message to encode * @return the requested array of character set indexes * @see #getEncoder() */ int[] createEncodedMessage(String msg) { return getEncoder().encode(msg); } /** * Generates the barcode logic * * @param logic the logic handler to receive the generated events * @param msg the message to encode */ public void generateBarcodeLogic(ClassicBarcodeLogicHandler logic, String msg) { logic.startBarcode(msg, MessageUtil.filterNonPrintableCharacters(msg)); final int[] encodedMsg = createEncodedMessage(msg); for (int i = 0; i < encodedMsg.length; i++) { encodeChar(logic, encodedMsg[i]); } //Calculate checksum int checksum = encodedMsg[0]; for (int i = 1; i < encodedMsg.length; i++) { checksum += i * encodedMsg[i]; } checksum = checksum % 103; encodeChar(logic, checksum); encodeStop(logic); logic.endBarcode(); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver12; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnVirtualPortCreateRequestVer12 implements OFBsnVirtualPortCreateRequest { private static final Logger logger = LoggerFactory.getLogger(OFBsnVirtualPortCreateRequestVer12.class); // version: 1.2 final static byte WIRE_VERSION = 3; final static int MINIMUM_LENGTH = 20; private final static long DEFAULT_XID = 0x0L; // OF message fields private final long xid; private final OFBsnVport vport; // // package private constructor - used by readers, builders, and factory OFBsnVirtualPortCreateRequestVer12(long xid, OFBsnVport vport) { if(vport == null) { throw new NullPointerException("OFBsnVirtualPortCreateRequestVer12: property vport cannot be null"); } this.xid = xid; this.vport = vport; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xfL; } @Override public OFBsnVport getVport() { return vport; } public OFBsnVirtualPortCreateRequest.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnVirtualPortCreateRequest.Builder { final OFBsnVirtualPortCreateRequestVer12 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean vportSet; private OFBsnVport vport; BuilderWithParent(OFBsnVirtualPortCreateRequestVer12 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnVirtualPortCreateRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xfL; } @Override public OFBsnVport getVport() { return vport; } @Override public OFBsnVirtualPortCreateRequest.Builder setVport(OFBsnVport vport) { this.vport = vport; this.vportSet = true; return this; } @Override public OFBsnVirtualPortCreateRequest build() { long xid = this.xidSet ? this.xid : parentMessage.xid; OFBsnVport vport = this.vportSet ? this.vport : parentMessage.vport; if(vport == null) throw new NullPointerException("Property vport must not be null"); // return new OFBsnVirtualPortCreateRequestVer12( xid, vport ); } } static class Builder implements OFBsnVirtualPortCreateRequest.Builder { // OF message fields private boolean xidSet; private long xid; private boolean vportSet; private OFBsnVport vport; @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnVirtualPortCreateRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xfL; } @Override public OFBsnVport getVport() { return vport; } @Override public OFBsnVirtualPortCreateRequest.Builder setVport(OFBsnVport vport) { this.vport = vport; this.vportSet = true; return this; } // @Override public OFBsnVirtualPortCreateRequest build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; if(!this.vportSet) throw new IllegalStateException("Property vport doesn't have default value -- must be set"); if(vport == null) throw new NullPointerException("Property vport must not be null"); return new OFBsnVirtualPortCreateRequestVer12( xid, vport ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnVirtualPortCreateRequest> { @Override public OFBsnVirtualPortCreateRequest readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 3 byte version = bb.readByte(); if(version != (byte) 0x3) throw new OFParseError("Wrong version: Expected=OFVersion.OF_12(3), got="+version); // fixed value property type == 4 byte type = bb.readByte(); if(type != (byte) 0x4) throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0xfL int subtype = bb.readInt(); if(subtype != 0xf) throw new OFParseError("Wrong subtype: Expected=0xfL(0xfL), got="+subtype); OFBsnVport vport = OFBsnVportVer12.READER.readFrom(bb); OFBsnVirtualPortCreateRequestVer12 bsnVirtualPortCreateRequestVer12 = new OFBsnVirtualPortCreateRequestVer12( xid, vport ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnVirtualPortCreateRequestVer12); return bsnVirtualPortCreateRequestVer12; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnVirtualPortCreateRequestVer12Funnel FUNNEL = new OFBsnVirtualPortCreateRequestVer12Funnel(); static class OFBsnVirtualPortCreateRequestVer12Funnel implements Funnel<OFBsnVirtualPortCreateRequestVer12> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnVirtualPortCreateRequestVer12 message, PrimitiveSink sink) { // fixed value property version = 3 sink.putByte((byte) 0x3); // fixed value property type = 4 sink.putByte((byte) 0x4); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0xfL sink.putInt(0xf); message.vport.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnVirtualPortCreateRequestVer12> { @Override public void write(ByteBuf bb, OFBsnVirtualPortCreateRequestVer12 message) { int startIndex = bb.writerIndex(); // fixed value property version = 3 bb.writeByte((byte) 0x3); // fixed value property type = 4 bb.writeByte((byte) 0x4); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0xfL bb.writeInt(0xf); message.vport.writeTo(bb); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnVirtualPortCreateRequestVer12("); b.append("xid=").append(xid); b.append(", "); b.append("vport=").append(vport); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnVirtualPortCreateRequestVer12 other = (OFBsnVirtualPortCreateRequestVer12) obj; if( xid != other.xid) return false; if (vport == null) { if (other.vport != null) return false; } else if (!vport.equals(other.vport)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((vport == null) ? 0 : vport.hashCode()); return result; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.lang.management.ManagementPermission; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryPoolMXBean; import java.lang.management.PlatformManagedObject; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * */ public class JvmInfo implements Streamable, ToXContent { private static JvmInfo INSTANCE; static { RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); // returns the <process id>@<host> long pid; String xPid = runtimeMXBean.getName(); try { xPid = xPid.split("@")[0]; pid = Long.parseLong(xPid); } catch (Exception e) { pid = -1; } JvmInfo info = new JvmInfo(); info.pid = pid; info.startTime = runtimeMXBean.getStartTime(); info.version = System.getProperty("java.version"); info.vmName = runtimeMXBean.getVmName(); info.vmVendor = runtimeMXBean.getVmVendor(); info.vmVersion = runtimeMXBean.getVmVersion(); info.mem = new Mem(); info.mem.heapInit = memoryMXBean.getHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getInit(); info.mem.heapMax = memoryMXBean.getHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getMax(); info.mem.nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getInit(); info.mem.nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getMax(); try { Class<?> vmClass = Class.forName("sun.misc.VM"); info.mem.directMemoryMax = (Long) vmClass.getMethod("maxDirectMemory").invoke(null); } catch (Throwable t) { // ignore } info.inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]); try { info.bootClassPath = runtimeMXBean.getBootClassPath(); } catch (UnsupportedOperationException e) { // oracle java 9 info.bootClassPath = System.getProperty("sun.boot.class.path"); if (info.bootClassPath == null) { // something else info.bootClassPath = "<unknown>"; } } info.classPath = runtimeMXBean.getClassPath(); info.systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties()); List<GarbageCollectorMXBean> gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans(); info.gcCollectors = new String[gcMxBeans.size()]; for (int i = 0; i < gcMxBeans.size(); i++) { GarbageCollectorMXBean gcMxBean = gcMxBeans.get(i); info.gcCollectors[i] = gcMxBean.getName(); } List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); info.memoryPools = new String[memoryPoolMXBeans.size()]; for (int i = 0; i < memoryPoolMXBeans.size(); i++) { MemoryPoolMXBean memoryPoolMXBean = memoryPoolMXBeans.get(i); info.memoryPools[i] = memoryPoolMXBean.getName(); } try { @SuppressWarnings("unchecked") Class<? extends PlatformManagedObject> clazz = (Class<? extends PlatformManagedObject>)Class.forName("com.sun.management.HotSpotDiagnosticMXBean"); Class<?> vmOptionClazz = Class.forName("com.sun.management.VMOption"); PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz); Method vmOptionMethod = clazz.getMethod("getVMOption", String.class); Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); Method valueMethod = vmOptionClazz.getMethod("getValue"); info.useCompressedOops = (String)valueMethod.invoke(useCompressedOopsVmOption); } catch (Throwable t) { // unable to deduce the state of compressed oops info.useCompressedOops = "unknown"; } INSTANCE = info; } public static JvmInfo jvmInfo() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new ManagementPermission("monitor")); sm.checkPropertyAccess("*"); } return INSTANCE; } long pid = -1; String version = ""; String vmName = ""; String vmVersion = ""; String vmVendor = ""; long startTime = -1; Mem mem; String[] inputArguments; String bootClassPath; String classPath; Map<String, String> systemProperties; String[] gcCollectors = Strings.EMPTY_ARRAY; String[] memoryPools = Strings.EMPTY_ARRAY; private String useCompressedOops; private JvmInfo() { } /** * The process id. */ public long pid() { return this.pid; } /** * The process id. */ public long getPid() { return pid; } public String version() { return this.version; } public String getVersion() { return this.version; } public int versionAsInteger() { try { int i = 0; String sVersion = ""; for (; i < version.length(); i++) { if (!Character.isDigit(version.charAt(i)) && version.charAt(i) != '.') { break; } if (version.charAt(i) != '.') { sVersion += version.charAt(i); } } if (i == 0) { return -1; } return Integer.parseInt(sVersion); } catch (Exception e) { return -1; } } public int versionUpdatePack() { try { int i = 0; String sVersion = ""; for (; i < version.length(); i++) { if (!Character.isDigit(version.charAt(i)) && version.charAt(i) != '.') { break; } if (version.charAt(i) != '.') { sVersion += version.charAt(i); } } if (i == 0) { return -1; } Integer.parseInt(sVersion); int from; if (version.charAt(i) == '_') { // 1.7.0_4 from = ++i; } else if (version.charAt(i) == '-' && version.charAt(i + 1) == 'u') { // 1.7.0-u2-b21 i = i + 2; from = i; } else { return -1; } for (; i < version.length(); i++) { if (!Character.isDigit(version.charAt(i)) && version.charAt(i) != '.') { break; } } if (from == i) { return -1; } return Integer.parseInt(version.substring(from, i)); } catch (Exception e) { return -1; } } public String getVmName() { return this.vmName; } public String getVmVersion() { return this.vmVersion; } public String getVmVendor() { return this.vmVendor; } public long getStartTime() { return this.startTime; } public Mem getMem() { return this.mem; } public String[] getInputArguments() { return this.inputArguments; } public String getBootClassPath() { return this.bootClassPath; } public String getClassPath() { return this.classPath; } public Map<String, String> getSystemProperties() { return this.systemProperties; } /** * The value of the JVM flag UseCompressedOops, if available otherwise * "unknown". The value "unknown" indicates that an attempt was * made to obtain the value of the flag on this JVM and the attempt * failed. * * @return the value of the JVM flag UseCompressedOops or "unknown" */ public String useCompressedOops() { return this.useCompressedOops; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.JVM); builder.field(Fields.PID, pid); builder.field(Fields.VERSION, version); builder.field(Fields.VM_NAME, vmName); builder.field(Fields.VM_VERSION, vmVersion); builder.field(Fields.VM_VENDOR, vmVendor); builder.dateValueField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime); builder.startObject(Fields.MEM); builder.byteSizeField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, mem.heapInit); builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, mem.heapMax); builder.byteSizeField(Fields.NON_HEAP_INIT_IN_BYTES, Fields.NON_HEAP_INIT, mem.nonHeapInit); builder.byteSizeField(Fields.NON_HEAP_MAX_IN_BYTES, Fields.NON_HEAP_MAX, mem.nonHeapMax); builder.byteSizeField(Fields.DIRECT_MAX_IN_BYTES, Fields.DIRECT_MAX, mem.directMemoryMax); builder.endObject(); builder.field(Fields.GC_COLLECTORS, gcCollectors); builder.field(Fields.MEMORY_POOLS, memoryPools); builder.field(Fields.USING_COMPRESSED_OOPS, useCompressedOops); builder.endObject(); return builder; } static final class Fields { static final XContentBuilderString JVM = new XContentBuilderString("jvm"); static final XContentBuilderString PID = new XContentBuilderString("pid"); static final XContentBuilderString VERSION = new XContentBuilderString("version"); static final XContentBuilderString VM_NAME = new XContentBuilderString("vm_name"); static final XContentBuilderString VM_VERSION = new XContentBuilderString("vm_version"); static final XContentBuilderString VM_VENDOR = new XContentBuilderString("vm_vendor"); static final XContentBuilderString START_TIME = new XContentBuilderString("start_time"); static final XContentBuilderString START_TIME_IN_MILLIS = new XContentBuilderString("start_time_in_millis"); static final XContentBuilderString MEM = new XContentBuilderString("mem"); static final XContentBuilderString HEAP_INIT = new XContentBuilderString("heap_init"); static final XContentBuilderString HEAP_INIT_IN_BYTES = new XContentBuilderString("heap_init_in_bytes"); static final XContentBuilderString HEAP_MAX = new XContentBuilderString("heap_max"); static final XContentBuilderString HEAP_MAX_IN_BYTES = new XContentBuilderString("heap_max_in_bytes"); static final XContentBuilderString NON_HEAP_INIT = new XContentBuilderString("non_heap_init"); static final XContentBuilderString NON_HEAP_INIT_IN_BYTES = new XContentBuilderString("non_heap_init_in_bytes"); static final XContentBuilderString NON_HEAP_MAX = new XContentBuilderString("non_heap_max"); static final XContentBuilderString NON_HEAP_MAX_IN_BYTES = new XContentBuilderString("non_heap_max_in_bytes"); static final XContentBuilderString DIRECT_MAX = new XContentBuilderString("direct_max"); static final XContentBuilderString DIRECT_MAX_IN_BYTES = new XContentBuilderString("direct_max_in_bytes"); static final XContentBuilderString GC_COLLECTORS = new XContentBuilderString("gc_collectors"); static final XContentBuilderString MEMORY_POOLS = new XContentBuilderString("memory_pools"); static final XContentBuilderString USING_COMPRESSED_OOPS = new XContentBuilderString("using_compressed_ordinary_object_pointers"); } public static JvmInfo readJvmInfo(StreamInput in) throws IOException { JvmInfo jvmInfo = new JvmInfo(); jvmInfo.readFrom(in); return jvmInfo; } @Override public void readFrom(StreamInput in) throws IOException { pid = in.readLong(); version = in.readString(); vmName = in.readString(); vmVersion = in.readString(); vmVendor = in.readString(); startTime = in.readLong(); inputArguments = new String[in.readInt()]; for (int i = 0; i < inputArguments.length; i++) { inputArguments[i] = in.readString(); } bootClassPath = in.readString(); classPath = in.readString(); systemProperties = new HashMap<>(); int size = in.readInt(); for (int i = 0; i < size; i++) { systemProperties.put(in.readString(), in.readString()); } mem = new Mem(); mem.readFrom(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); useCompressedOops = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(pid); out.writeString(version); out.writeString(vmName); out.writeString(vmVersion); out.writeString(vmVendor); out.writeLong(startTime); out.writeInt(inputArguments.length); for (String inputArgument : inputArguments) { out.writeString(inputArgument); } out.writeString(bootClassPath); out.writeString(classPath); out.writeInt(systemProperties.size()); for (Map.Entry<String, String> entry : systemProperties.entrySet()) { out.writeString(entry.getKey()); out.writeString(entry.getValue()); } mem.writeTo(out); out.writeStringArray(gcCollectors); out.writeStringArray(memoryPools); out.writeString(useCompressedOops); } public static class Mem implements Streamable { long heapInit = 0; long heapMax = 0; long nonHeapInit = 0; long nonHeapMax = 0; long directMemoryMax = 0; Mem() { } public ByteSizeValue getHeapInit() { return new ByteSizeValue(heapInit); } public ByteSizeValue getHeapMax() { return new ByteSizeValue(heapMax); } public ByteSizeValue getNonHeapInit() { return new ByteSizeValue(nonHeapInit); } public ByteSizeValue getNonHeapMax() { return new ByteSizeValue(nonHeapMax); } public ByteSizeValue getDirectMemoryMax() { return new ByteSizeValue(directMemoryMax); } @Override public void readFrom(StreamInput in) throws IOException { heapInit = in.readVLong(); heapMax = in.readVLong(); nonHeapInit = in.readVLong(); nonHeapMax = in.readVLong(); directMemoryMax = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(heapInit); out.writeVLong(heapMax); out.writeVLong(nonHeapInit); out.writeVLong(nonHeapMax); out.writeVLong(directMemoryMax); } } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.io; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkPositionIndex; import static com.google.common.base.Preconditions.checkPositionIndexes; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtIncompatible; import com.google.common.math.IntMath; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.EOFException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Queue; /** * Provides utility methods for working with byte arrays and I/O streams. * * @author Chris Nokleberg * @author Colin Decker * @since 1.0 */ @GwtIncompatible public final class ByteStreams { private static final int BUFFER_SIZE = 8192; /** Creates a new byte array for buffering reads or writes. */ static byte[] createBuffer() { return new byte[BUFFER_SIZE]; } /** * There are three methods to implement {@link FileChannel#transferTo(long, long, * WritableByteChannel)}: * * <ol> * <li>Use sendfile(2) or equivalent. Requires that both the input channel and the output * channel have their own file descriptors. Generally this only happens when both channels * are files or sockets. This performs zero copies - the bytes never enter userspace. * <li>Use mmap(2) or equivalent. Requires that either the input channel or the output channel * have file descriptors. Bytes are copied from the file into a kernel buffer, then directly * into the other buffer (userspace). Note that if the file is very large, a naive * implementation will effectively put the whole file in memory. On many systems with paging * and virtual memory, this is not a problem - because it is mapped read-only, the kernel * can always page it to disk "for free". However, on systems where killing processes * happens all the time in normal conditions (i.e., android) the OS must make a tradeoff * between paging memory and killing other processes - so allocating a gigantic buffer and * then sequentially accessing it could result in other processes dying. This is solvable * via madvise(2), but that obviously doesn't exist in java. * <li>Ordinary copy. Kernel copies bytes into a kernel buffer, from a kernel buffer into a * userspace buffer (byte[] or ByteBuffer), then copies them from that buffer into the * destination channel. * </ol> * * This value is intended to be large enough to make the overhead of system calls negligible, * without being so large that it causes problems for systems with atypical memory management if * approaches 2 or 3 are used. */ private static final int ZERO_COPY_CHUNK_SIZE = 512 * 1024; private ByteStreams() {} /** * Copies all bytes from the input stream to the output stream. Does not close or flush either * stream. * * @param from the input stream to read from * @param to the output stream to write to * @return the number of bytes copied * @throws IOException if an I/O error occurs */ @CanIgnoreReturnValue public static long copy(InputStream from, OutputStream to) throws IOException { checkNotNull(from); checkNotNull(to); byte[] buf = createBuffer(); long total = 0; while (true) { int r = from.read(buf); if (r == -1) { break; } to.write(buf, 0, r); total += r; } return total; } /** * Copies all bytes from the readable channel to the writable channel. Does not close or flush * either channel. * * @param from the readable channel to read from * @param to the writable channel to write to * @return the number of bytes copied * @throws IOException if an I/O error occurs */ @CanIgnoreReturnValue public static long copy(ReadableByteChannel from, WritableByteChannel to) throws IOException { checkNotNull(from); checkNotNull(to); if (from instanceof FileChannel) { FileChannel sourceChannel = (FileChannel) from; long oldPosition = sourceChannel.position(); long position = oldPosition; long copied; do { copied = sourceChannel.transferTo(position, ZERO_COPY_CHUNK_SIZE, to); position += copied; sourceChannel.position(position); } while (copied > 0 || position < sourceChannel.size()); return position - oldPosition; } ByteBuffer buf = ByteBuffer.wrap(createBuffer()); long total = 0; while (from.read(buf) != -1) { Java8Compatibility.flip(buf); while (buf.hasRemaining()) { total += to.write(buf); } Java8Compatibility.clear(buf); } return total; } /** Max array length on JVM. */ private static final int MAX_ARRAY_LEN = Integer.MAX_VALUE - 8; /** Large enough to never need to expand, given the geometric progression of buffer sizes. */ private static final int TO_BYTE_ARRAY_DEQUE_SIZE = 20; /** * Returns a byte array containing the bytes from the buffers already in {@code bufs} (which have * a total combined length of {@code totalLen} bytes) followed by all bytes remaining in the given * input stream. */ private static byte[] toByteArrayInternal(InputStream in, Queue<byte[]> bufs, int totalLen) throws IOException { // Starting with an 8k buffer, double the size of each successive buffer. Buffers are retained // in a deque so that there's no copying between buffers while reading and so all of the bytes // in each new allocated buffer are available for reading from the stream. for (int bufSize = BUFFER_SIZE; totalLen < MAX_ARRAY_LEN; bufSize = IntMath.saturatedMultiply(bufSize, 2)) { byte[] buf = new byte[Math.min(bufSize, MAX_ARRAY_LEN - totalLen)]; bufs.add(buf); int off = 0; while (off < buf.length) { // always OK to fill buf; its size plus the rest of bufs is never more than MAX_ARRAY_LEN int r = in.read(buf, off, buf.length - off); if (r == -1) { return combineBuffers(bufs, totalLen); } off += r; totalLen += r; } } // read MAX_ARRAY_LEN bytes without seeing end of stream if (in.read() == -1) { // oh, there's the end of the stream return combineBuffers(bufs, MAX_ARRAY_LEN); } else { throw new OutOfMemoryError("input is too large to fit in a byte array"); } } private static byte[] combineBuffers(Queue<byte[]> bufs, int totalLen) { byte[] result = new byte[totalLen]; int remaining = totalLen; while (remaining > 0) { byte[] buf = bufs.remove(); int bytesToCopy = Math.min(remaining, buf.length); int resultOffset = totalLen - remaining; System.arraycopy(buf, 0, result, resultOffset, bytesToCopy); remaining -= bytesToCopy; } return result; } /** * Reads all bytes from an input stream into a byte array. Does not close the stream. * * @param in the input stream to read from * @return a byte array containing all the bytes from the stream * @throws IOException if an I/O error occurs */ public static byte[] toByteArray(InputStream in) throws IOException { checkNotNull(in); return toByteArrayInternal(in, new ArrayDeque<byte[]>(TO_BYTE_ARRAY_DEQUE_SIZE), 0); } /** * Reads all bytes from an input stream into a byte array. The given expected size is used to * create an initial byte array, but if the actual number of bytes read from the stream differs, * the correct result will be returned anyway. */ static byte[] toByteArray(InputStream in, long expectedSize) throws IOException { checkArgument(expectedSize >= 0, "expectedSize (%s) must be non-negative", expectedSize); if (expectedSize > MAX_ARRAY_LEN) { throw new OutOfMemoryError(expectedSize + " bytes is too large to fit in a byte array"); } byte[] bytes = new byte[(int) expectedSize]; int remaining = (int) expectedSize; while (remaining > 0) { int off = (int) expectedSize - remaining; int read = in.read(bytes, off, remaining); if (read == -1) { // end of stream before reading expectedSize bytes // just return the bytes read so far return Arrays.copyOf(bytes, off); } remaining -= read; } // bytes is now full int b = in.read(); if (b == -1) { return bytes; } // the stream was longer, so read the rest normally Queue<byte[]> bufs = new ArrayDeque<byte[]>(TO_BYTE_ARRAY_DEQUE_SIZE + 2); bufs.add(bytes); bufs.add(new byte[] {(byte) b}); return toByteArrayInternal(in, bufs, bytes.length + 1); } /** * Reads and discards data from the given {@code InputStream} until the end of the stream is * reached. Returns the total number of bytes read. Does not close the stream. * * @since 20.0 */ @CanIgnoreReturnValue @Beta public static long exhaust(InputStream in) throws IOException { long total = 0; long read; byte[] buf = createBuffer(); while ((read = in.read(buf)) != -1) { total += read; } return total; } /** * Returns a new {@link ByteArrayDataInput} instance to read from the {@code bytes} array from the * beginning. */ @Beta public static ByteArrayDataInput newDataInput(byte[] bytes) { return newDataInput(new ByteArrayInputStream(bytes)); } /** * Returns a new {@link ByteArrayDataInput} instance to read from the {@code bytes} array, * starting at the given position. * * @throws IndexOutOfBoundsException if {@code start} is negative or greater than the length of * the array */ @Beta public static ByteArrayDataInput newDataInput(byte[] bytes, int start) { checkPositionIndex(start, bytes.length); return newDataInput(new ByteArrayInputStream(bytes, start, bytes.length - start)); } /** * Returns a new {@link ByteArrayDataInput} instance to read from the given {@code * ByteArrayInputStream}. The given input stream is not reset before being read from by the * returned {@code ByteArrayDataInput}. * * @since 17.0 */ @Beta public static ByteArrayDataInput newDataInput(ByteArrayInputStream byteArrayInputStream) { return new ByteArrayDataInputStream(checkNotNull(byteArrayInputStream)); } private static class ByteArrayDataInputStream implements ByteArrayDataInput { final DataInput input; ByteArrayDataInputStream(ByteArrayInputStream byteArrayInputStream) { this.input = new DataInputStream(byteArrayInputStream); } @Override public void readFully(byte b[]) { try { input.readFully(b); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public void readFully(byte b[], int off, int len) { try { input.readFully(b, off, len); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public int skipBytes(int n) { try { return input.skipBytes(n); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public boolean readBoolean() { try { return input.readBoolean(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public byte readByte() { try { return input.readByte(); } catch (EOFException e) { throw new IllegalStateException(e); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public int readUnsignedByte() { try { return input.readUnsignedByte(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public short readShort() { try { return input.readShort(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public int readUnsignedShort() { try { return input.readUnsignedShort(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public char readChar() { try { return input.readChar(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public int readInt() { try { return input.readInt(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public long readLong() { try { return input.readLong(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public float readFloat() { try { return input.readFloat(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public double readDouble() { try { return input.readDouble(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public String readLine() { try { return input.readLine(); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public String readUTF() { try { return input.readUTF(); } catch (IOException e) { throw new IllegalStateException(e); } } } /** Returns a new {@link ByteArrayDataOutput} instance with a default size. */ @Beta public static ByteArrayDataOutput newDataOutput() { return newDataOutput(new ByteArrayOutputStream()); } /** * Returns a new {@link ByteArrayDataOutput} instance sized to hold {@code size} bytes before * resizing. * * @throws IllegalArgumentException if {@code size} is negative */ @Beta public static ByteArrayDataOutput newDataOutput(int size) { // When called at high frequency, boxing size generates too much garbage, // so avoid doing that if we can. if (size < 0) { throw new IllegalArgumentException(String.format("Invalid size: %s", size)); } return newDataOutput(new ByteArrayOutputStream(size)); } /** * Returns a new {@link ByteArrayDataOutput} instance which writes to the given {@code * ByteArrayOutputStream}. The given output stream is not reset before being written to by the * returned {@code ByteArrayDataOutput} and new data will be appended to any existing content. * * <p>Note that if the given output stream was not empty or is modified after the {@code * ByteArrayDataOutput} is created, the contract for {@link ByteArrayDataOutput#toByteArray} will * not be honored (the bytes returned in the byte array may not be exactly what was written via * calls to {@code ByteArrayDataOutput}). * * @since 17.0 */ @Beta public static ByteArrayDataOutput newDataOutput(ByteArrayOutputStream byteArrayOutputStream) { return new ByteArrayDataOutputStream(checkNotNull(byteArrayOutputStream)); } private static class ByteArrayDataOutputStream implements ByteArrayDataOutput { final DataOutput output; final ByteArrayOutputStream byteArrayOutputStream; ByteArrayDataOutputStream(ByteArrayOutputStream byteArrayOutputStream) { this.byteArrayOutputStream = byteArrayOutputStream; output = new DataOutputStream(byteArrayOutputStream); } @Override public void write(int b) { try { output.write(b); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void write(byte[] b) { try { output.write(b); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void write(byte[] b, int off, int len) { try { output.write(b, off, len); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeBoolean(boolean v) { try { output.writeBoolean(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeByte(int v) { try { output.writeByte(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeBytes(String s) { try { output.writeBytes(s); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeChar(int v) { try { output.writeChar(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeChars(String s) { try { output.writeChars(s); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeDouble(double v) { try { output.writeDouble(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeFloat(float v) { try { output.writeFloat(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeInt(int v) { try { output.writeInt(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeLong(long v) { try { output.writeLong(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeShort(int v) { try { output.writeShort(v); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public void writeUTF(String s) { try { output.writeUTF(s); } catch (IOException impossible) { throw new AssertionError(impossible); } } @Override public byte[] toByteArray() { return byteArrayOutputStream.toByteArray(); } } private static final OutputStream NULL_OUTPUT_STREAM = new OutputStream() { /** Discards the specified byte. */ @Override public void write(int b) {} /** Discards the specified byte array. */ @Override public void write(byte[] b) { checkNotNull(b); } /** Discards the specified byte array. */ @Override public void write(byte[] b, int off, int len) { checkNotNull(b); } @Override public String toString() { return "ByteStreams.nullOutputStream()"; } }; /** * Returns an {@link OutputStream} that simply discards written bytes. * * @since 14.0 (since 1.0 as com.google.common.io.NullOutputStream) */ @Beta public static OutputStream nullOutputStream() { return NULL_OUTPUT_STREAM; } /** * Wraps a {@link InputStream}, limiting the number of bytes which can be read. * * @param in the input stream to be wrapped * @param limit the maximum number of bytes to be read * @return a length-limited {@link InputStream} * @since 14.0 (since 1.0 as com.google.common.io.LimitInputStream) */ @Beta public static InputStream limit(InputStream in, long limit) { return new LimitedInputStream(in, limit); } private static final class LimitedInputStream extends FilterInputStream { private long left; private long mark = -1; LimitedInputStream(InputStream in, long limit) { super(in); checkNotNull(in); checkArgument(limit >= 0, "limit must be non-negative"); left = limit; } @Override public int available() throws IOException { return (int) Math.min(in.available(), left); } // it's okay to mark even if mark isn't supported, as reset won't work @Override public synchronized void mark(int readLimit) { in.mark(readLimit); mark = left; } @Override public int read() throws IOException { if (left == 0) { return -1; } int result = in.read(); if (result != -1) { --left; } return result; } @Override public int read(byte[] b, int off, int len) throws IOException { if (left == 0) { return -1; } len = (int) Math.min(len, left); int result = in.read(b, off, len); if (result != -1) { left -= result; } return result; } @Override public synchronized void reset() throws IOException { if (!in.markSupported()) { throw new IOException("Mark not supported"); } if (mark == -1) { throw new IOException("Mark not set"); } in.reset(); left = mark; } @Override public long skip(long n) throws IOException { n = Math.min(n, left); long skipped = in.skip(n); left -= skipped; return skipped; } } /** * Attempts to read enough bytes from the stream to fill the given byte array, with the same * behavior as {@link DataInput#readFully(byte[])}. Does not close the stream. * * @param in the input stream to read from. * @param b the buffer into which the data is read. * @throws EOFException if this stream reaches the end before reading all the bytes. * @throws IOException if an I/O error occurs. */ @Beta public static void readFully(InputStream in, byte[] b) throws IOException { readFully(in, b, 0, b.length); } /** * Attempts to read {@code len} bytes from the stream into the given array starting at {@code * off}, with the same behavior as {@link DataInput#readFully(byte[], int, int)}. Does not close * the stream. * * @param in the input stream to read from. * @param b the buffer into which the data is read. * @param off an int specifying the offset into the data. * @param len an int specifying the number of bytes to read. * @throws EOFException if this stream reaches the end before reading all the bytes. * @throws IOException if an I/O error occurs. */ @Beta public static void readFully(InputStream in, byte[] b, int off, int len) throws IOException { int read = read(in, b, off, len); if (read != len) { throw new EOFException( "reached end of stream after reading " + read + " bytes; " + len + " bytes expected"); } } /** * Discards {@code n} bytes of data from the input stream. This method will block until the full * amount has been skipped. Does not close the stream. * * @param in the input stream to read from * @param n the number of bytes to skip * @throws EOFException if this stream reaches the end before skipping all the bytes * @throws IOException if an I/O error occurs, or the stream does not support skipping */ @Beta public static void skipFully(InputStream in, long n) throws IOException { long skipped = skipUpTo(in, n); if (skipped < n) { throw new EOFException( "reached end of stream after skipping " + skipped + " bytes; " + n + " bytes expected"); } } /** * Discards up to {@code n} bytes of data from the input stream. This method will block until * either the full amount has been skipped or until the end of the stream is reached, whichever * happens first. Returns the total number of bytes skipped. */ static long skipUpTo(InputStream in, final long n) throws IOException { long totalSkipped = 0; // A buffer is allocated if skipSafely does not skip any bytes. byte[] buf = null; while (totalSkipped < n) { long remaining = n - totalSkipped; long skipped = skipSafely(in, remaining); if (skipped == 0) { // Do a buffered read since skipSafely could return 0 repeatedly, for example if // in.available() always returns 0 (the default). int skip = (int) Math.min(remaining, BUFFER_SIZE); if (buf == null) { // Allocate a buffer bounded by the maximum size that can be requested, for // example an array of BUFFER_SIZE is unnecessary when the value of remaining // is smaller. buf = new byte[skip]; } if ((skipped = in.read(buf, 0, skip)) == -1) { // Reached EOF break; } } totalSkipped += skipped; } return totalSkipped; } /** * Attempts to skip up to {@code n} bytes from the given input stream, but not more than {@code * in.available()} bytes. This prevents {@code FileInputStream} from skipping more bytes than * actually remain in the file, something that it {@linkplain java.io.FileInputStream#skip(long) * specifies} it can do in its Javadoc despite the fact that it is violating the contract of * {@code InputStream.skip()}. */ private static long skipSafely(InputStream in, long n) throws IOException { int available = in.available(); return available == 0 ? 0 : in.skip(Math.min(available, n)); } /** * Process the bytes of the given input stream using the given processor. * * @param input the input stream to process * @param processor the object to which to pass the bytes of the stream * @return the result of the byte processor * @throws IOException if an I/O error occurs * @since 14.0 */ @Beta @CanIgnoreReturnValue // some processors won't return a useful result public static <T> T readBytes(InputStream input, ByteProcessor<T> processor) throws IOException { checkNotNull(input); checkNotNull(processor); byte[] buf = createBuffer(); int read; do { read = input.read(buf); } while (read != -1 && processor.processBytes(buf, 0, read)); return processor.getResult(); } /** * Reads some bytes from an input stream and stores them into the buffer array {@code b}. This * method blocks until {@code len} bytes of input data have been read into the array, or end of * file is detected. The number of bytes read is returned, possibly zero. Does not close the * stream. * * <p>A caller can detect EOF if the number of bytes read is less than {@code len}. All subsequent * calls on the same stream will return zero. * * <p>If {@code b} is null, a {@code NullPointerException} is thrown. If {@code off} is negative, * or {@code len} is negative, or {@code off+len} is greater than the length of the array {@code * b}, then an {@code IndexOutOfBoundsException} is thrown. If {@code len} is zero, then no bytes * are read. Otherwise, the first byte read is stored into element {@code b[off]}, the next one * into {@code b[off+1]}, and so on. The number of bytes read is, at most, equal to {@code len}. * * @param in the input stream to read from * @param b the buffer into which the data is read * @param off an int specifying the offset into the data * @param len an int specifying the number of bytes to read * @return the number of bytes read * @throws IOException if an I/O error occurs * @throws IndexOutOfBoundsException if {@code off} is negative, if {@code len} is negative, or if * {@code off + len} is greater than {@code b.length} */ @Beta @CanIgnoreReturnValue // Sometimes you don't care how many bytes you actually read, I guess. // (You know that it's either going to read len bytes or stop at EOF.) public static int read(InputStream in, byte[] b, int off, int len) throws IOException { checkNotNull(in); checkNotNull(b); if (len < 0) { throw new IndexOutOfBoundsException(String.format("len (%s) cannot be negative", len)); } checkPositionIndexes(off, off + len, b.length); int total = 0; while (total < len) { int result = in.read(b, off + total, len - total); if (result == -1) { break; } total += result; } return total; } }
/* * ColumnChanger.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.db.sqltemplates; import java.util.List; import workbench.resource.ResourceMgr; import workbench.resource.Settings; import workbench.db.ColumnIdentifier; import workbench.db.CommentSqlManager; import workbench.db.DbObject; import workbench.db.DbSettings; import workbench.db.MetaDataSqlManager; import workbench.db.TableIdentifier; import workbench.db.TableSourceBuilder; import workbench.db.WbConnection; import workbench.db.oracle.OracleUtils; import workbench.util.CollectionUtil; import workbench.util.SqlUtil; import workbench.util.StringUtil; /** * A class to generate ALTER statements for changes to column definitions * of a table. The necessary DBMS specific SQL statements are retrieved * through DbSettings * * @author Thomas Kellerer */ public class ColumnChanger { public static final String PARAM_TABLE_NAME = MetaDataSqlManager.TABLE_NAME_PLACEHOLDER; public static final String PARAM_COL_NAME = MetaDataSqlManager.COLUMN_NAME_PLACEHOLDER; public static final String PARAM_NEW_COL_NAME = "%new_column_name%"; public static final String PARAM_DATATYPE = "%datatype%"; public static final String PARAM_NEW_DATATYPE = "%new_datatype%"; /** * The placeholder for the complete DEFAULT xxx expression when adding a new column */ public static final String PARAM_DEFAULT_EXPR = "%default_expression%"; public static final String PARAM_NULLABLE = "%nullable%"; /** * The placeholder for the default <b>value</b> for generating ALTER column * statements (the DEFAULT keyword is already part of the template string) */ public static final String PARAM_DEFAULT_VALUE = "%default_value%"; // I'm storing connection and DbSettings in two different // variables so that I can initialize a ColumnChanger in the // Unit test without a connection. private WbConnection dbConn; private DbSettings dbSettings; private CommentSqlManager commentMgr; public ColumnChanger(WbConnection con) { dbConn = con; dbSettings = (con != null ? con.getDbSettings() : null); commentMgr = new CommentSqlManager(dbSettings != null ? dbSettings.getDbId() : ""); } /** * For unit testing * @param settings the DB configuration to be used */ ColumnChanger(DbSettings settings) { dbConn = null; dbSettings = settings; commentMgr = new CommentSqlManager(dbSettings != null ? dbSettings.getDbId() : ""); } public String getAlterScript(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { List<String> statements = getAlterStatements(table, oldDefinition, newDefinition); if (statements.isEmpty()) return null; StringBuilder result = new StringBuilder(statements.size() * 50); if (dbConn != null && dbConn.getMetadata().isOracle() && oldDefinition != null) { String oldComment = oldDefinition.getComment(); String newComment = newDefinition.getComment(); if (!StringUtil.equalStringOrEmpty(oldComment, newComment) && !OracleUtils.remarksEnabled(dbConn)) { result.append("-- "); result.append(ResourceMgr.getString("MsgSchemaReporterOracleRemarksWarning")); result.append('\n'); } } for (String sql : statements) { result.append(sql); result.append(";\n"); } return result.toString(); } public List<String> getAlterStatements(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { List<String> result = CollectionUtil.arrayList(); if (oldDefinition == null && canAddColumn()) { String sql = addColumn(table, newDefinition); if (sql != null) result.add(sql); if (StringUtil.isNonBlank(newDefinition.getComment())) { String comment = changeRemarks(table, null, newDefinition); if (comment != null) result.add(comment); } } else if (oldDefinition != null) { String sql = changeDataType(table, oldDefinition, newDefinition); if (sql != null) result.add(SqlUtil.trimSemicolon(sql)); sql = changeDefault(table, oldDefinition, newDefinition); if (sql != null) result.add(SqlUtil.trimSemicolon(sql)); sql = changeNullable(table, oldDefinition, newDefinition); if (sql != null) result.add(SqlUtil.trimSemicolon(sql)); sql = changeRemarks(table, oldDefinition, newDefinition); if (sql != null) result.add(SqlUtil.trimSemicolon(sql)); sql = renameColumn(table, oldDefinition, newDefinition); if (sql != null) result.add(SqlUtil.trimSemicolon(sql)); } return result; } protected String changeCommonPlaceholders(String sql, ColumnIdentifier newCol) { // Some stubid DBMS require the full definition of the column (including nullable, default and so on) // even if only the type should be changed or if the column is only renamed sql = sql.replace(PARAM_NULLABLE, nullableSql(newCol.isNullable())); String comment = newCol.getComment(); if (comment == null) comment = ""; sql = sql.replace(CommentSqlManager.COMMENT_PLACEHOLDER, comment.replace("'", "''")); String defaultValue = newCol.getDefaultValue(); if (StringUtil.isBlank(defaultValue)) { sql = sql.replace("DEFAULT " + PARAM_DEFAULT_VALUE, ""); sql = sql.replace(PARAM_DEFAULT_VALUE, ""); } else { sql = sql.replace(PARAM_DEFAULT_VALUE, defaultValue); } String dataType = newCol.getDbmsType(); sql = sql.replace(PARAM_NEW_DATATYPE, dataType); sql = sql.replace(PARAM_DATATYPE, dataType); return sql; } public boolean canAlterType() { String sql = dbSettings.getAlterColumnDataTypeSql(); return (sql != null); } public boolean canRenameColumn() { String sql = dbSettings.getRenameColumnSql(); return (sql != null); } public boolean canChangeNullable() { String dropNotNull = dbSettings.getAlterColumnDropNotNull(); String setNotNull = dbSettings.getAlterColumnSetNotNull(); return (dropNotNull != null && setNotNull != null); } public boolean canChangeDefault() { String alterDefault = dbSettings.getAlterColumnDefaultSql(); String setDefault = dbSettings.getSetColumnDefaultSql(); String dropDefault = dbSettings.getDropColumnDefaultSql(); return (alterDefault != null || (setDefault != null && dropDefault != null)); } public boolean canAddColumn() { String sql = dbSettings.getAddColumnSql(); return sql != null; } public boolean canChangeComment() { String sql = commentMgr.getCommentSqlTemplate("column", null); return (sql != null); } protected String getColumnExpression(ColumnIdentifier column) { String colname = column.getColumnName(); if (dbConn == null) return colname; if (dbConn.getMetadata().isReservedWord(colname)) return "\"" + colname + "\""; return colname; } protected String addColumn(TableIdentifier table, ColumnIdentifier newDefinition) { if (newDefinition == null) return null; String sql = dbSettings.getAddColumnSql(); sql = sql.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_COL_NAME, getColumnExpression(newDefinition)); sql = sql.replace(PARAM_DATATYPE, newDefinition.getDbmsType()); if (StringUtil.isBlank(newDefinition.getDefaultValue())) { sql = sql.replace(PARAM_DEFAULT_EXPR, ""); } else { sql = sql.replace(PARAM_DEFAULT_EXPR, "DEFAULT " + newDefinition.getDefaultValue()); } sql = ColumnDefinitionTemplate.replaceNullable(sql, dbSettings.getDbId(), newDefinition.isNullable(), null); return sql; } protected boolean dataTypeChanged(ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { String oldType = oldDefinition.getDbmsType(); String newType = newDefinition.getDbmsType(); return !(oldType.trim().equalsIgnoreCase(newType.trim())); } protected String changeDataType(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { String sql = dbSettings.getAlterColumnDataTypeSql(); if (StringUtil.isBlank(sql)) return null; if (!dataTypeChanged(oldDefinition, newDefinition)) return null; sql = sql.replace(PARAM_COL_NAME, getColumnExpression(oldDefinition)); sql = sql.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_DATATYPE, oldDefinition.getDbmsType()); sql = sql.replace(PARAM_NEW_DATATYPE, newDefinition.getDbmsType()); sql = changeCommonPlaceholders(sql, newDefinition); return sql; } protected String renameColumn(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { String sql = dbSettings.getRenameColumnSql(); if (StringUtil.isBlank(sql)) return null; String oldName = getColumnExpression(oldDefinition); String newName = getColumnExpression(newDefinition); if (oldName.trim().equalsIgnoreCase(newName.trim())) return null; sql = sql.replace(PARAM_COL_NAME, getColumnExpression(oldDefinition)); sql = sql.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_NEW_COL_NAME, getColumnExpression(newDefinition)); // Some stubid DBMS require the full data type definition of the column even if it should only be renamed... sql = changeCommonPlaceholders(sql, newDefinition); return sql; } private boolean nullableChanged(ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { boolean wasNullable = oldDefinition.isNullable(); boolean isNowNullable = newDefinition.isNullable(); return (wasNullable != isNowNullable); } private String changeNullable(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { boolean wasNullable = oldDefinition.isNullable(); boolean isNowNullable = newDefinition.isNullable(); if (wasNullable == isNowNullable) return null; String dropNotNull = dbSettings.getAlterColumnDropNotNull(); String setNotNull = dbSettings.getAlterColumnSetNotNull(); String sql = null; if (wasNullable && !isNowNullable) { // need to SET NOT NULL if (setNotNull == null) return null; sql = setNotNull; } else if (!wasNullable && isNowNullable) { sql = dropNotNull; } if (sql != null) { sql = sql.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_COL_NAME, getColumnExpression(oldDefinition)); sql = sql.replace(PARAM_DATATYPE, oldDefinition.getDbmsType()); sql = sql.replace(PARAM_NEW_DATATYPE, newDefinition.getDbmsType()); } sql = changeCommonPlaceholders(sql, newDefinition); return sql; } private String changeRemarks(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { String oldRemarks = (oldDefinition == null ? "" : oldDefinition.getComment()); String newRemarks = newDefinition.getComment(); if (StringUtil.equalStringOrEmpty(oldRemarks, newRemarks)) return null; if (StringUtil.isBlank(newRemarks)) newRemarks = ""; String action = CommentSqlManager.getAction(oldRemarks, newRemarks); String sql = commentMgr.getCommentSqlTemplate("column", action); if (StringUtil.isBlank(sql)) return null; sql = sql.replace(CommentSqlManager.COMMENT_FQ_OBJECT_NAME_PLACEHOLDER, table.getTableExpression(dbConn)); sql = sql.replace(CommentSqlManager.COMMENT_OBJECT_NAME_PLACEHOLDER, table.getTableName()); sql = sql.replace(PARAM_TABLE_NAME, table.getTableName()); sql = sql.replace(TableSourceBuilder.SCHEMA_PLACEHOLDER, table.getSchema() == null ? "" : table.getSchema()); sql = sql.replace(CommentSqlManager.COMMENT_COLUMN_PLACEHOLDER, getColumnExpression(oldDefinition == null ? newDefinition : oldDefinition)); sql = sql.replace(CommentSqlManager.COMMENT_PLACEHOLDER, newRemarks.replace("'", "''")); sql = sql.replace(PARAM_DATATYPE, newDefinition.getDbmsType()); if (oldDefinition != null) { sql = sql.replace(PARAM_DATATYPE, oldDefinition.getDbmsType()); } if (newDefinition != null) { sql = sql.replace(PARAM_NEW_DATATYPE, newDefinition.getDbmsType()); } return sql; } public String getColumnCommentSql(DbObject table, ColumnIdentifier column) { String remarks = column.getComment(); if (StringUtil.isBlank(remarks)) remarks = ""; String sql = commentMgr.getCommentSqlTemplate("column", CommentSqlManager.COMMENT_ACTION_SET); sql = sql.replace(CommentSqlManager.COMMENT_FQ_OBJECT_NAME_PLACEHOLDER, table.getObjectExpression(dbConn)); sql = sql.replace(CommentSqlManager.COMMENT_OBJECT_NAME_PLACEHOLDER, table.getObjectName()); sql = sql.replace(PARAM_TABLE_NAME, table.getObjectName()); sql = sql.replace(TableSourceBuilder.SCHEMA_PLACEHOLDER, table.getSchema() == null ? "" : table.getSchema()); sql = sql.replace(CommentSqlManager.COMMENT_COLUMN_PLACEHOLDER, getColumnExpression(column)); sql = sql.replace(CommentSqlManager.COMMENT_PLACEHOLDER, remarks.replace("'", "''")); sql = sql.replace(PARAM_DATATYPE, column.getDbmsType()); if (column != null) { sql = sql.replace(PARAM_NEW_DATATYPE, column.getDbmsType()); } return sql; } private String changeDefault(TableIdentifier table, ColumnIdentifier oldDefinition, ColumnIdentifier newDefinition) { String alterDefault = dbSettings.getAlterColumnDefaultSql(); String setDefault = dbSettings.getSetColumnDefaultSql(); String dropDefault = dbSettings.getDropColumnDefaultSql(); String oldDefault = oldDefinition.getDefaultValue(); String newDefault = newDefinition.getDefaultValue(); String sql = null; if (oldDefault == null && newDefault == null) return null; if (oldDefault != null && oldDefault.equals(newDefault)) return null; if (oldDefault != null && newDefault == null) { // drop default if (dropDefault == null) return null; sql = dropDefault.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); } // Cannot alter, need SET DEFAULT or DROP DEFAULT if (newDefault != null) { if (setDefault != null) { sql = setDefault.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_DEFAULT_VALUE, newDefault); } else if (alterDefault != null) { sql = alterDefault.replace(PARAM_TABLE_NAME, table.getTableExpression(dbConn)); sql = sql.replace(PARAM_DEFAULT_VALUE, newDefault); } } if (sql != null) { sql = sql.replace(PARAM_COL_NAME, getColumnExpression(oldDefinition)); sql = sql.replace(PARAM_DATATYPE, oldDefinition.getDbmsType()); sql = sql.replace(PARAM_NEW_DATATYPE, newDefinition.getDbmsType()); } return sql; } private String nullableSql(boolean flag) { if (flag) { return Settings.getInstance().getProperty("workbench.db." + dbSettings.getDbId() + ".nullkeyword", "NULL"); } else { return "NOT NULL"; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.qpid.management.common.sasl; import org.apache.harmony.javax.security.auth.callback.Callback; import org.apache.harmony.javax.security.auth.callback.CallbackHandler; import org.apache.harmony.javax.security.auth.callback.NameCallback; import org.apache.harmony.javax.security.auth.callback.PasswordCallback; import org.apache.harmony.javax.security.auth.callback.UnsupportedCallbackException; import de.measite.smack.Sasl; import org.apache.harmony.javax.security.sasl.SaslClient; import org.apache.harmony.javax.security.sasl.SaslException; import java.io.IOException; import java.io.UnsupportedEncodingException; public class PlainSaslClient implements SaslClient { private boolean completed; private CallbackHandler cbh; private String authorizationID; private String authenticationID; private byte password[]; private static byte SEPARATOR = 0; public PlainSaslClient(String authorizationID, CallbackHandler cbh) throws SaslException { completed = false; this.cbh = cbh; Object[] userInfo = getUserInfo(); this.authorizationID = authorizationID; this.authenticationID = (String) userInfo[0]; this.password = (byte[]) userInfo[1]; if (authenticationID == null || password == null) { throw new SaslException("PLAIN: authenticationID and password must be specified"); } } public byte[] evaluateChallenge(byte[] challenge) throws SaslException { if (completed) { throw new IllegalStateException("PLAIN: authentication already " + "completed"); } completed = true; try { byte authzid[] = authorizationID == null ? null : authorizationID.getBytes("UTF8"); byte authnid[] = authenticationID.getBytes("UTF8"); byte response[] = new byte[ password.length + authnid.length + 2 + // SEPARATOR (authzid != null ? authzid.length : 0) ]; int size = 0; if (authzid != null) { System.arraycopy(authzid, 0, response, 0, authzid.length); size = authzid.length; } response[size++] = SEPARATOR; System.arraycopy(authnid, 0, response, size, authnid.length); size += authnid.length; response[size++] = SEPARATOR; System.arraycopy(password, 0, response, size, password.length); clearPassword(); return response; } catch (UnsupportedEncodingException e) { throw new SaslException("PLAIN: Cannot get UTF-8 encoding of ids", e); } } public String getMechanismName() { return "PLAIN"; } public boolean hasInitialResponse() { return true; } public boolean isComplete() { return completed; } public byte[] unwrap(byte[] incoming, int offset, int len) throws SaslException { if (completed) { throw new IllegalStateException("PLAIN: this mechanism supports " + "neither integrity nor privacy"); } else { throw new IllegalStateException("PLAIN: authentication not " + "completed"); } } public byte[] wrap(byte[] outgoing, int offset, int len) throws SaslException { if (completed) { throw new IllegalStateException("PLAIN: this mechanism supports " + "neither integrity nor privacy"); } else { throw new IllegalStateException("PLAIN: authentication not " + "completed"); } } public Object getNegotiatedProperty(String propName) { if (completed) { if (propName.equals(Sasl.QOP)) { return "auth"; } else { return null; } } else { throw new IllegalStateException("PLAIN: authentication not " + "completed"); } } private void clearPassword() { if (password != null) { for (int i = 0 ; i < password.length ; i++) { password[i] = 0; } password = null; } } public void dispose() throws SaslException { clearPassword(); } protected void finalize() { clearPassword(); } private Object[] getUserInfo() throws SaslException { try { final String userPrompt = "PLAIN authentication id: "; final String pwPrompt = "PLAIN password: "; NameCallback nameCb = new NameCallback(userPrompt); PasswordCallback passwordCb = new PasswordCallback(pwPrompt, false); cbh.handle(new Callback[] { nameCb, passwordCb }); String userid = nameCb.getName(); char pwchars[] = passwordCb.getPassword(); byte pwbytes[]; if (pwchars != null) { pwbytes = (new String(pwchars)).getBytes("UTF8"); passwordCb.clearPassword(); } else { pwbytes = null; } return (new Object[] { userid, pwbytes }); } catch (IOException e) { throw new SaslException("Cannot get password", e); } catch (UnsupportedCallbackException e) { throw new SaslException("Cannot get userid/password", e); } } }
package org.tutor.struts2.pagehandlers; import java.sql.Time; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; import org.tutor.struts2.model.Comment; import org.tutor.struts2.model.Course; import org.tutor.struts2.model.Instructor; import org.tutor.struts2.model.Lecture; import org.tutor.struts2.model.Media; import org.tutor.struts2.model.Question; import org.tutor.struts2.model.Quiz; import org.tutor.struts2.model.Script; import org.tutor.struts2.model.Student; import org.tutor.struts2.model.Users; public class DataEntryTest { public static void main(String[] args) { Student user = new Student("abc@gmail.com", "iamabc"); user.setFirstname("Abc"); user.setLastname("Def"); user.setBio("I am Abc Def"); user.setPhotoPath("no-photo-male.png"); Instructor userI = new Instructor("xyz@gmail.com", "iamxyz"); userI.setFirstname("Xyz"); userI.setLastname("Uvw"); userI.setBio("I am Xyz Uvw"); userI.setPhotoPath("no-photo-male.png"); Course course = new Course("CSE470"); course.setCourseTitle("Software Engineering"); course.setCourseDescription("Concepts of software engineering: requirements definition, modular, structure design, data specifications, functional specifications, verification, documentation, software maintenance, Software support tools."); course.setInstructor(userI); user.getEnrolledCourses().add(course); userI.getInstructedCourses().add(course); Student user2 = new Student("pqr@gmail.com", "iampqr"); user2.setFirstname("Pqr"); user2.setLastname("Mno"); user2.setBio("I am Pqr Mno"); user2.setPhotoPath("no-photo-male.png"); Course course2 = new Course("CSE460"); course2.setCourseTitle("VLSI Design"); course2.setCourseDescription("Concepts of VLSI"); course2.setInstructor(userI); user2.getEnrolledCourses().add(course2); userI.getInstructedCourses().add(course2); Lecture lecture = new Lecture (); lecture.setLectureTitle("UML Diagrams"); lecture.setLectureDescription("A basic lecture on UML"); lecture.setCourse(course); Media slide = new Media(); slide.setMediaTitle("Use Case and Diagram"); slide.setType(".ppt"); slide.setPath("Usecase.ppt"); Media video = new Media(); video.setMediaTitle("Class Diagram"); video.setType(".mp4"); video.setPath("Classdiagram.mp4"); lecture.getMaterials().add(slide); lecture.getMaterials().add(video); Quiz test = new Quiz(); test.setQuizName("Quiz 1"); test.setDuration(new Time(0,1,0)); test.setLecture(lecture); Script s = new Script(); s.setStudent(user); s.setQuiz(test); test.getScripts().add(s); Question question=new Question("Question 1", 1, "A", "B", "C", "D", "A"); test.getQuestions().add(question); question=new Question("Question 2", 2, "A", "B", "C", "D", "C"); test.getQuestions().add(question); question=new Question("Question 3", 3, "A", "B", "C", "D", "B"); test.getQuestions().add(question); question=new Question("Question 4", 4, "A", "B", "C", "D", "D"); test.getQuestions().add(question); question=new Question("Question 5", 5, "A", "B", "C", "D", "C"); test.getQuestions().add(question); question=new Question("Question 6", 6, "A", "B", "C", "D", "A"); test.getQuestions().add(question); question=new Question("Question 7", 7, "A", "B", "C", "D", "D"); test.getQuestions().add(question); question=new Question("Question 8", 8, "A", "B", "C", "D", "B"); test.getQuestions().add(question); question=new Question("Question 9", 9, "A", "B", "C", "D", "C"); test.getQuestions().add(question); question=new Question("Question 10", 10, "A", "B", "C", "D", "B"); test.getQuestions().add(question); question=new Question("Question 11", 11, "A", "B", "C", "D", "D"); test.getQuestions().add(question); question=new Question("Question 12", 12, "A", "B", "C", "D", "A"); test.getQuestions().add(question); question=new Question("Question 13", 13, "A", "B", "C", "D", "C"); test.getQuestions().add(question); question=new Question("Question 14", 14, "A", "B", "C", "D", "A"); test.getQuestions().add(question); question=new Question("Question 15", 15, "A", "B", "C", "D", "B"); test.getQuestions().add(question); lecture.setQuiz(test); Lecture lecture1 = new Lecture (); lecture1.setLectureTitle("Process"); lecture1.setLectureDescription("A basic lecture on Process"); lecture1.setCourse(course); Media slide1 = new Media(); slide1.setMediaTitle("Different Processes"); slide1.setType(".ppt"); slide1.setPath("process.ppt"); Media video1 = new Media(); video1.setMediaTitle("Spiral Process"); video1.setType(".mp4"); video1.setPath("spiralprocess.mp4"); lecture1.getMaterials().add(slide1); lecture1.getMaterials().add(video1); Quiz test1 = new Quiz(); test1.setQuizName("Quiz 2"); test1.setDuration(new Time(0,30,0)); test1.setLecture(lecture1); Script s1 = new Script(); s1.setStudent(user); s1.setQuiz(test1); test1.getScripts().add(s1); Question question1=new Question(); question1.setQuestionNum(1); question1.setQuestion("What is Process?"); question1.setOption1("A"); question1.setOption2("B"); question1.setOption3("C"); question1.setOption4("D"); question1.setAnswer("B"); test1.getQuestions().add(question1); lecture1.setQuiz(test1); Comment comment = new Comment(); comment.setUser(user); comment.setComment("This resource is helpful"); comment.setLecture(lecture); lecture.getComments().add(comment); Comment comment1 = new Comment(); comment1.setUser(userI); comment1.setComment("This resource will give you proper idea"); comment1.setLecture(lecture1); lecture1.getComments().add(comment1); Comment comment2 = new Comment(); comment2.setUser(userI); comment2.setComment("Your feedback is appriciated"); comment2.setLecture(lecture); lecture.getComments().add(comment2); Comment comment3 = new Comment(); comment3.setUser(user); comment3.setComment("Thank you"); comment3.setLecture(lecture1); lecture1.getComments().add(comment3); SessionFactory sessionFactory=new Configuration().configure().buildSessionFactory(); Session session = sessionFactory.openSession(); session.beginTransaction(); session.persist(user); session.persist(userI); session.persist(user2); // session.save(course); session.persist(lecture); session.persist(lecture1); // session.save(test); // session.save(test1); // session.save(comment); // session.save(comment1); // session.save(comment2); // session.save(comment3); // try{ session.flush(); } catch (Exception e){ System.out.println("Exception"); } session.getTransaction().commit(); session.close(); session = sessionFactory.openSession(); session.beginTransaction(); String email = "abc@gmail.com"; Users userF = (Users) session.get(Users.class, email); System.out.println("\n\nRetrieved from User:"); System.out.println("Name: "+userF.getFirstname()+" "+userF.getLastname()); System.out.println("Email: "+userF.getBio()); System.out.println("Email: "+userF.getEmail()); System.out.println("Password: "+userF.getPassword()); System.out.println("Photo Path: "+userF.getPhotoPath()); if(userF.getRole().equalsIgnoreCase("student")){ Student userS = (Student) session.get(Student.class, email); System.out.println("\n\nRetrieved from Student:"); System.out.println("Name: "+userS.getFirstname()+" "+userS.getLastname()); System.out.println("Email: "+userS.getBio()); System.out.println("Email: "+userS.getEmail()); System.out.println("Password: "+userS.getPassword()); System.out.println("Photo Path: "+userS.getPhotoPath()); System.out.print("Enrolled courses: "); for (int i=0; i<userS.getEnrolledCourses().size(); i++) System.out.print(userS.getEnrolledCourses().get(i).getCourseID()); System.out.println(); } else{ Instructor userI2 = (Instructor) session.get(Instructor.class, email); System.out.println("\n\nRetrieved from Instructor:"); System.out.println("Name: "+userI2.getFirstname()+" "+userI2.getLastname()); System.out.println("Email: "+userI2.getBio()); System.out.println("Email: "+userI2.getEmail()); System.out.println("Password: "+userI2.getPassword()); System.out.println("Photo Path: "+userI2.getPhotoPath()); System.out.print("Instructed courses: "); for (int i=0; i<userI2.getInstructedCourses().size(); i++) System.out.print(userI2.getInstructedCourses().get(i).getCourseID()); System.out.println(); } Lecture l = (Lecture) session.get(Lecture.class, 2); System.out.println(l.getLectureID()+" "+l.getLectureTitle()); //session.delete(l); // test1 = new Quiz(); // test1.setQuizName("Quiz 3"); // test1.setDuration(new Time(0,30,0)); // test1.setLecture(l); // // Script s = new Script(); // s.setStudent(user); // test.getScripts().add(s); // // Question question2=new Question(); // question2.setQuestionNum(1); // question2.setQuestion("What is System?"); // question2.setOption1("A"); // question2.setOption2("B"); // question2.setOption3("C"); // question2.setOption4("D"); // question2.setAnswer("C"); // // test1.getQuestions().add(question2); // l.setQuiz(test1); // System.out.println(l.getLectureTitle()+" "+l.getQuiz().getQuizName()); session.getTransaction().commit(); session.close(); // session = sessionFactory.openSession(); // session.beginTransaction(); // // Quiz q = (Quiz) session.get(Quiz.class, 1); // session.delete(q); // // session.getTransaction().commit(); // session.close(); } }
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okhttp3.internal.http; import java.io.IOException; import java.io.InterruptedIOException; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.net.ProtocolException; import java.net.SocketTimeoutException; import java.security.cert.CertificateException; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; import okhttp3.Address; import okhttp3.ConnectionPool; import okhttp3.Route; import okhttp3.internal.Internal; import okhttp3.internal.RouteDatabase; import okhttp3.internal.Util; import okhttp3.internal.io.RealConnection; import okio.Sink; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * This class coordinates the relationship between three entities: * * <ul> * <li><strong>Connections:</strong> physical socket connections to remote servers. These are * potentially slow to establish so it is necessary to be able to cancel a connection * currently being connected. * <li><strong>Streams:</strong> logical HTTP request/response pairs that are layered on * connections. Each connection has its own allocation limit, which defines how many * concurrent streams that connection can carry. HTTP/1.x connections can carry 1 stream * at a time, SPDY and HTTP/2 typically carry multiple. * <li><strong>Calls:</strong> a logical sequence of streams, typically an initial request and * its follow up requests. We prefer to keep all streams of a single call on the same * connection for better behavior and locality. * </ul> * * <p>Instances of this class act on behalf of the call, using one or more streams over one or more * connections. This class has APIs to release each of the above resources: * * <ul> * <li>{@link #noNewStreams()} prevents the connection from being used for new streams in the * future. Use this after a {@code Connection: close} header, or when the connection may be * inconsistent. * <li>{@link #streamFinished streamFinished()} releases the active stream from this allocation. * Note that only one stream may be active at a given time, so it is necessary to call * {@link #streamFinished streamFinished()} before creating a subsequent stream with {@link * #newStream newStream()}. * <li>{@link #release()} removes the call's hold on the connection. Note that this won't * immediately free the connection if there is a stream still lingering. That happens when a * call is complete but its response body has yet to be fully consumed. * </ul> * * <p>This class supports {@linkplain #cancel asynchronous canceling}. This is intended to have the * smallest blast radius possible. If an HTTP/2 stream is active, canceling will cancel that stream * but not the other streams sharing its connection. But if the TLS handshake is still in progress * then canceling may break the entire connection. */ public final class StreamAllocation { public final Address address; private Route route; private final ConnectionPool connectionPool; // State guarded by connectionPool. private RouteSelector routeSelector; private RealConnection connection; private boolean released; private boolean canceled; private HttpStream stream; public StreamAllocation(ConnectionPool connectionPool, Address address) { this.connectionPool = connectionPool; this.address = address; this.routeSelector = new RouteSelector(address, routeDatabase()); } public HttpStream newStream(int connectTimeout, int readTimeout, int writeTimeout, boolean connectionRetryEnabled, boolean doExtensiveHealthChecks) throws RouteException, IOException { try { RealConnection resultConnection = findHealthyConnection(connectTimeout, readTimeout, writeTimeout, connectionRetryEnabled, doExtensiveHealthChecks); HttpStream resultStream; if (resultConnection.framedConnection != null) { resultStream = new Http2xStream(this, resultConnection.framedConnection); } else { resultConnection.socket().setSoTimeout(readTimeout); resultConnection.source.timeout().timeout(readTimeout, MILLISECONDS); resultConnection.sink.timeout().timeout(writeTimeout, MILLISECONDS); resultStream = new Http1xStream(this, resultConnection.source, resultConnection.sink); } synchronized (connectionPool) { stream = resultStream; return resultStream; } } catch (IOException e) { throw new RouteException(e); } } /** * Finds a connection and returns it if it is healthy. If it is unhealthy the process is repeated * until a healthy connection is found. */ private RealConnection findHealthyConnection(int connectTimeout, int readTimeout, int writeTimeout, boolean connectionRetryEnabled, boolean doExtensiveHealthChecks) throws IOException, RouteException { while (true) { RealConnection candidate = findConnection(connectTimeout, readTimeout, writeTimeout, connectionRetryEnabled); // If this is a brand new connection, we can skip the extensive health checks. synchronized (connectionPool) { if (candidate.successCount == 0) { return candidate; } } // Otherwise do a potentially-slow check to confirm that the pooled connection is still good. if (candidate.isHealthy(doExtensiveHealthChecks)) { return candidate; } connectionFailed(new IOException()); } } /** * Returns a connection to host a new stream. This prefers the existing connection if it exists, * then the pool, finally building a new connection. */ private RealConnection findConnection(int connectTimeout, int readTimeout, int writeTimeout, boolean connectionRetryEnabled) throws IOException, RouteException { Route selectedRoute; synchronized (connectionPool) { if (released) throw new IllegalStateException("released"); if (stream != null) throw new IllegalStateException("stream != null"); if (canceled) throw new IOException("Canceled"); RealConnection allocatedConnection = this.connection; if (allocatedConnection != null && !allocatedConnection.noNewStreams) { return allocatedConnection; } // Attempt to get a connection from the pool. RealConnection pooledConnection = Internal.instance.get(connectionPool, address, this); if (pooledConnection != null) { this.connection = pooledConnection; return pooledConnection; } selectedRoute = route; } if (selectedRoute == null) { selectedRoute = routeSelector.next(); synchronized (connectionPool) { route = selectedRoute; } } RealConnection newConnection = new RealConnection(selectedRoute); acquire(newConnection); synchronized (connectionPool) { Internal.instance.put(connectionPool, newConnection); this.connection = newConnection; if (canceled) throw new IOException("Canceled"); } newConnection.connect(connectTimeout, readTimeout, writeTimeout, address.connectionSpecs(), connectionRetryEnabled); routeDatabase().connected(newConnection.route()); return newConnection; } public void streamFinished(boolean noNewStreams, HttpStream stream) { synchronized (connectionPool) { if (stream == null || stream != this.stream) { throw new IllegalStateException("expected " + this.stream + " but was " + stream); } if (!noNewStreams) { connection.successCount++; } } deallocate(noNewStreams, false, true); } public HttpStream stream() { synchronized (connectionPool) { return stream; } } private RouteDatabase routeDatabase() { return Internal.instance.routeDatabase(connectionPool); } public synchronized RealConnection connection() { return connection; } public void release() { deallocate(false, true, false); } /** Forbid new streams from being created on the connection that hosts this allocation. */ public void noNewStreams() { deallocate(true, false, false); } /** * Releases resources held by this allocation. If sufficient resources are allocated, the * connection will be detached or closed. */ private void deallocate(boolean noNewStreams, boolean released, boolean streamFinished) { RealConnection connectionToClose = null; synchronized (connectionPool) { if (streamFinished) { this.stream = null; } if (released) { this.released = true; } if (connection != null) { if (noNewStreams) { connection.noNewStreams = true; } if (this.stream == null && (this.released || connection.noNewStreams)) { release(connection); if (connection.allocations.isEmpty()) { connection.idleAtNanos = System.nanoTime(); if (Internal.instance.connectionBecameIdle(connectionPool, connection)) { connectionToClose = connection; } } connection = null; } } } if (connectionToClose != null) { Util.closeQuietly(connectionToClose.socket()); } } public void cancel() { HttpStream streamToCancel; RealConnection connectionToCancel; synchronized (connectionPool) { canceled = true; streamToCancel = stream; connectionToCancel = connection; } if (streamToCancel != null) { streamToCancel.cancel(); } else if (connectionToCancel != null) { connectionToCancel.cancel(); } } public void connectionFailed(IOException e) { synchronized (connectionPool) { // Avoid this route if it's never seen a successful call. if (connection != null && connection.successCount == 0) { if (route != null && e != null) { routeSelector.connectFailed(route, e); } route = null; } } deallocate(true, false, true); } /** * Use this allocation to hold {@code connection}. Each call to this must be paired with a call to * {@link #release} on the same connection. */ public void acquire(RealConnection connection) { connection.allocations.add(new WeakReference<>(this)); } /** Remove this allocation from the connection's list of allocations. */ private void release(RealConnection connection) { for (int i = 0, size = connection.allocations.size(); i < size; i++) { Reference<StreamAllocation> reference = connection.allocations.get(i); if (reference.get() == this) { connection.allocations.remove(i); return; } } throw new IllegalStateException(); } public boolean recover(IOException e, Sink requestBodyOut) { if (connection != null) { connectionFailed(e); } boolean canRetryRequestBody = requestBodyOut == null || requestBodyOut instanceof RetryableSink; if ((routeSelector != null && !routeSelector.hasNext()) // No more routes to attempt. || !isRecoverable(e) || !canRetryRequestBody) { return false; } return true; } private boolean isRecoverable(IOException e) { // If there was a protocol problem, don't recover. if (e instanceof ProtocolException) { return false; } // If there was an interruption don't recover, but if there was a timeout // we should try the next route (if there is one). if (e instanceof InterruptedIOException) { return e instanceof SocketTimeoutException; } // Look for known client-side or negotiation errors that are unlikely to be fixed by trying // again with a different route. if (e instanceof SSLHandshakeException) { // If the problem was a CertificateException from the X509TrustManager, // do not retry. if (e.getCause() instanceof CertificateException) { return false; } } if (e instanceof SSLPeerUnverifiedException) { // e.g. a certificate pinning error. return false; } // An example of one we might want to retry with a different route is a problem connecting to a // proxy and would manifest as a standard IOException. Unless it is one we know we should not // retry, we return true and try a new route. return true; } @Override public String toString() { return address.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.manager.sqlserver; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException; import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.orm.CompilationManager; import com.cloudera.sqoop.testutil.CommonArgs; import com.cloudera.sqoop.testutil.ImportJobTestCase; import com.cloudera.sqoop.testutil.ReparseMapper; import com.cloudera.sqoop.tool.ImportTool; import com.cloudera.sqoop.util.ClassLoaderStack; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.fail; /** * Test that the parse() methods generated in user SqoopRecord implementations * work in SQL Server. * * This uses JDBC to import data from an SQLServer database to HDFS. * * Since this requires an SQLServer installation, * this class is named in such a way that Sqoop's default QA process does * not run it. You need to run this manually with * -Dtestcase=SQLServerParseMethodsTest or -Dthirdparty=true. * * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location * where Sqoop will be able to access it (since this library cannot be checked * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir. * * To set up your test environment: * Install SQL Server Express 2012 * Create a database SQOOPTEST * Create a login SQOOPUSER with password PASSWORD and grant all * access for SQOOPTEST to SQOOPUSER. * Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and * -Dms.sqlserver.password */ public class SQLServerParseMethodsTest extends ImportJobTestCase { @Before public void setUp() { super.setUp(); Path p = new Path(getWarehouseDir()); try { FileSystem fs = FileSystem.get(new Configuration()); fs.delete(p); } catch (IOException e) { LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(e)); fail("Setup fail with IOException: " + StringUtils.stringifyException(e)); } } @After public void tearDown() { try { dropTableIfExists(getTableName()); } catch (SQLException sqle) { LOG.info("Table clean-up failed: " + sqle); } finally { super.tearDown(); } } /** * Create the argv to pass to Sqoop. * * @return the argv as an array of strings. */ private String[] getArgv(boolean includeHadoopFlags, String fieldTerminator, String lineTerminator, String encloser, String escape, boolean encloserRequired) { ArrayList<String> args = new ArrayList<String>(); if (includeHadoopFlags) { CommonArgs.addHadoopFlags(args); } args.add("--table"); args.add(getTableName()); args.add("--warehouse-dir"); args.add(getWarehouseDir()); args.add("--connect"); args.add(getConnectString()); args.add("--as-textfile"); args.add("--split-by"); args.add("DATA_COL0"); // always split by first column. args.add("--fields-terminated-by"); args.add(fieldTerminator); args.add("--lines-terminated-by"); args.add(lineTerminator); args.add("--escaped-by"); args.add(escape); if (encloserRequired) { args.add("--enclosed-by"); } else { args.add("--optionally-enclosed-by"); } args.add(encloser); args.add("--num-mappers"); args.add("1"); return args.toArray(new String[0]); } public void runParseTest(String fieldTerminator, String lineTerminator, String encloser, String escape, boolean encloseRequired) throws IOException { ClassLoader prevClassLoader = null; String[] argv = getArgv(true, fieldTerminator, lineTerminator, encloser, escape, encloseRequired); runImport(argv); try { String tableClassName = getTableName(); argv = getArgv(false, fieldTerminator, lineTerminator, encloser, escape, encloseRequired); SqoopOptions opts = new ImportTool().parseArguments(argv, null, null, true); CompilationManager compileMgr = new CompilationManager(opts); String jarFileName = compileMgr.getJarFilename(); // Make sure the user's class is loaded into our address space. prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, tableClassName); JobConf job = new JobConf(); job.setJar(jarFileName); // Tell the job what class we're testing. job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName); // use local mode in the same JVM. ConfigurationHelper.setJobtrackerAddr(job, "local"); job.set("fs.default.name", "file:///"); String warehouseDir = getWarehouseDir(); Path warehousePath = new Path(warehouseDir); Path inputPath = new Path(warehousePath, getTableName()); Path outputPath = new Path(warehousePath, getTableName() + "-out"); job.setMapperClass(ReparseMapper.class); job.setNumReduceTasks(0); FileInputFormat.addInputPath(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); JobClient.runJob(job); } catch (InvalidOptionsException ioe) { LOG.error(StringUtils.stringifyException(ioe)); fail(ioe.toString()); } catch (ParseException pe) { LOG.error(StringUtils.stringifyException(pe)); fail(pe.toString()); } finally { if (null != prevClassLoader) { ClassLoaderStack.setCurrentClassLoader(prevClassLoader); } } } @Test public void testDefaults() throws IOException { String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" }; String[] vals = { "64", "'foo'", "128" }; createTableWithColTypes(types, vals); runParseTest(",", "\\n", "\\\"", "\\", false); } @Test public void testRequiredEnclose() throws IOException { String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" }; String[] vals = { "64", "'foo'", "128" }; createTableWithColTypes(types, vals); runParseTest(",", "\\n", "\\\"", "\\", true); } @Test public void testStringEscapes() throws IOException { String[] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)", }; String[] vals = { "'foo'", "'foo,bar'", "'foo''bar'", "'foo\\bar'", "'foo,bar''baz'", }; createTableWithColTypes(types, vals); runParseTest(",", "\\n", "\\\'", "\\", false); } @Test public void testNumericTypes() throws IOException { String[] types = { "INTEGER", "REAL", "FLOAT", "DATE", "TIME", "BIT", }; String[] vals = { "42", "36.0", "127.1", "'2009-07-02'", "'11:24:00'", "1", }; createTableWithColTypes(types, vals); runParseTest(",", "\\n", "\\\'", "\\", false); } protected boolean useHsqldbTestServer() { return false; } protected String getConnectString() { return MSSQLTestUtils.getDBConnectString(); } /** * Drop a table if it already exists in the database. * * @param table * the name of the table to drop. * @throws SQLException * if something goes wrong. */ protected void dropTableIfExists(String table) throws SQLException { Connection conn = getManager().getConnection(); String sqlStmt = "IF OBJECT_ID('" + table + "') IS NOT NULL DROP TABLE " + table; PreparedStatement statement = conn.prepareStatement(sqlStmt, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { statement.executeUpdate(); conn.commit(); } finally { statement.close(); } } protected SqoopOptions getSqoopOptions(Configuration conf) { String username = MSSQLTestUtils.getDBUserName(); String password = MSSQLTestUtils.getDBPassWord(); SqoopOptions opts = new SqoopOptions(conf); opts.setUsername(username); opts.setPassword(password); return opts; } }
package me.dotteam.dotprod; import android.content.Context; import android.content.SharedPreferences; import android.location.Location; import android.location.LocationManager; import android.preference.PreferenceManager; import android.util.Log; import java.lang.reflect.Method; import java.util.Random; import me.dotteam.dotprod.data.HikeDataDirector; import me.dotteam.dotprod.hw.HikeHardwareManager; import me.dotteam.dotprod.hw.SensorListenerInterface; import me.dotteam.dotprod.loc.HikeLocationEntity; /** * Created by foxtrot on 01/12/15. */ public class DemoShow extends Thread{ private static final String TAG = "DEMOMAN"; private final double[] latitude = {43.657630, 43.657710, 43.657760, 43.657930, 43.658060, 43.658210, 43.658380, 43.659150, 43.659190, 43.659500, 43.659640, 43.660310, 43.660440, 43.660530, 43.660670, 43.660930, 43.661170, 43.661340, 43.661470, 43.661530, 43.661620, 43.661680, 43.661730, 43.661750}; private final double[] longitude = {-79.920150, -79.920240, -79.920410, -79.920580, -79.920730, -79.920950, -79.921210, -79.922790, -79.922980, -79.923690, -79.924080, -79.925620, -79.925930, -79.926140, -79.926490, -79.927170, -79.927970, -79.928680, -79.929400, -79.929770, -79.931190, -79.932320, -79.934180, -79.934570}; private final int LOCATION_POINTS = 24; private final int MAX_TIME = 900000; //15 Minutes = 15 * 60 seconds private final int AVG_TIME_PER_POINT = 3750; //Environmental Conditions in Demo Place Georgetown, ON private double startTemp = 3; //Temperature in Degrees private double startHumidity = 95;//Humidity in percentage private double startPressure = 101.1;//Pressure in KiloPascals private Context mContext; //The guys running the show private HikeDataDirector mHDD; private HikeHardwareManager mHHM; private HikeLocationEntity mHLE; private int mSensorFrequency=500; private Method broadcastHW; private Method broadcastLoc; //Check what the directives are private SharedPreferences mPrefMan; private boolean driveSensors=true; private boolean driveLocation=true; private boolean driveStepCount=true; private Thread mSensorWorker; private Thread mStepWorker; private Thread mLocationWorker; private Random valueGenerator; public static boolean isRunning=false; public DemoShow(Context runningContext){ mContext = runningContext; valueGenerator = new Random(); } @Override public void run() { isRunning = true; //Get all the needed methods/objects mHDD = HikeDataDirector.getInstance(mContext); mHHM = HikeHardwareManager.getInstance(mContext); mHLE = HikeLocationEntity.getInstance(mContext); mPrefMan = PreferenceManager.getDefaultSharedPreferences(mContext); fetchDemoParams(); while(mHHM.getListenerCount()<1 && !mHLE.isRequestingLocationUpdates()){ //Just do some sleeping until its time to spring into action! try { sleep(10000); } catch (Exception e){ Log.wtf(TAG,"Got Interrupted, What the heck man?"); } //Leaving this loop means a hike has probably started } //Stop all Default Behaviour mHHM.stopSensors(); mHHM.startCompass(); mHLE.stopLocationUpdates(); //Get the Methods getMethodsThroughReflection(); if(driveLocation) { mLocationWorker = beginLocationDriver(); if (driveSensors) { mSensorWorker = beginSensorDriver(); } if(driveStepCount){ mStepWorker = beginStepDriver(); } } try { mLocationWorker.join(); //After this, there are only 10 seconds remaining to the demo sleep(30000); } catch (Exception e){ Log.wtf(TAG,"Got Interrupted, I'll clean up my mess now"); mLocationWorker.interrupt(); if(mSensorWorker!=null){ mSensorWorker.interrupt(); } if(mStepWorker!=null){ mStepWorker.interrupt(); } } isRunning = false; } private void fetchDemoParams(){ if(mPrefMan.contains("demodrive_sensors")){ driveSensors = mPrefMan.getBoolean("demodrive_sensors",driveSensors); } if(mPrefMan.contains("demodrive_location")){ driveLocation = mPrefMan.getBoolean("demodrive_location",driveLocation); } if(mPrefMan.contains("demodrive_stepcount")){ driveStepCount = mPrefMan.getBoolean("demodrive_stepcount",driveStepCount); } if(mPrefMan.contains("extsensor_period")){ mSensorFrequency = mPrefMan.getInt("extsensor_period", mSensorFrequency); } } private void getMethodsThroughReflection(){ try { broadcastLoc = mHLE.getClass().getMethod("onLocationChanged",Location.class); // broadcastHW = mHHM.getClass().getMethod("broadcastUpdate", new Class[]{SensorListenerInterface.HikeSensors.class, double.class}); broadcastHW = mHHM.getClass().getDeclaredMethod("broadcastUpdate", new Class[]{SensorListenerInterface.HikeSensors.class, Double.TYPE}); // broadcastLoc = mHLE.getClass().getMethod("broadcastUpdate", new Class[]{Location.class,float.class}); broadcastHW.setAccessible(true); broadcastLoc.setAccessible(true); } catch (Exception e){ Log.wtf(TAG,e); } } private Thread beginSensorDriver(){ Thread worker = new Thread(){ @Override public void run(){ int sensorToUpdate; //Run for as long as we are updating the UI with Location points while(isRunning) { try { sensorToUpdate = valueGenerator.nextInt() %3; switch (sensorToUpdate) { case 0: { broadcastHW.invoke(mHHM, SensorListenerInterface.HikeSensors.HUMIDITY, startHumidity + 3 * Math.sin(valueGenerator.nextDouble())); break; } case 1: { broadcastHW.invoke(mHHM, SensorListenerInterface.HikeSensors.PRESSURE, startPressure + valueGenerator.nextDouble()); break; } case 2: { broadcastHW.invoke(mHHM, SensorListenerInterface.HikeSensors.TEMPERATURE, startTemp + 0.3 * Math.sin(System.currentTimeMillis())); break; } } //Very unlikely the same sensor will get called twice sleep(mSensorFrequency/3); } catch (Exception e){ Log.wtf(TAG+"-SensorWorker",e); } } } }; worker.start(); return worker; } private Thread beginLocationDriver(){ Thread worker = new Thread(){ @Override public void run(){ try{ double startingAltitude = 258.5; for (int i = 0; i < LOCATION_POINTS; i++) { Location newLocation = new Location(LocationManager.GPS_PROVIDER); //DEMOMAN is the provider newLocation.setAccuracy(7.0f); newLocation.setLatitude(latitude[i]); newLocation.setLongitude(longitude[i]); newLocation.setAltitude(startingAltitude); newLocation.setTime(System.currentTimeMillis()); broadcastLoc.invoke(mHLE, newLocation); //We might change this, though we really only need it 15 minutes. sleep(AVG_TIME_PER_POINT); if(LOCATION_POINTS/2 >i){ startingAltitude -= (3*Math.sin(valueGenerator.nextDouble()*System.currentTimeMillis())); } else{ startingAltitude += i * valueGenerator.nextDouble(); } } } catch (Exception e){ Log.wtf(TAG+"-LocationWorker",e); } } }; worker.start(); return worker; } private Thread beginStepDriver(){ Thread worker = new Thread(){ @Override public void run(){ try { double steps =0; while(isRunning) { broadcastHW.invoke(mHHM, SensorListenerInterface.HikeSensors.PEDOMETER, steps); steps+=1; sleep(777); } } catch (Exception e){ Log.wtf(TAG+"-SensorWorker",e); } } }; worker.start(); return worker; } }
/* * Copyright (c) 1997, 2004, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import javax.swing.*; import javax.swing.event.*; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.util.*; import javax.swing.border.*; import javax.swing.tree.*; /** * A demo for illustrating how to do different things with JTree. * The data that this displays is rather boring, that is each node will * have 7 children that have random names based on the fonts. Each node * is then drawn with that font and in a different color. * While the data isn't interesting the example illustrates a number * of things: * * For an example of dynamicaly loading children refer to DynamicTreeNode. * For an example of adding/removing/inserting/reloading refer to the inner * classes of this class, AddAction, RemovAction, InsertAction and * ReloadAction. * For an example of creating your own cell renderer refer to * SampleTreeCellRenderer. * For an example of subclassing JTreeModel for editing refer to * SampleTreeModel. * * @author Scott Violet */ public class SampleTree { /** Window for showing Tree. */ protected JFrame frame; /** Tree used for the example. */ protected JTree tree; /** Tree model. */ protected DefaultTreeModel treeModel; /** * Constructs a new instance of SampleTree. */ public SampleTree() { // Force SampleTree to come up in the Cross Platform L&F try { UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); // If you want the System L&F instead, comment out the above line and // uncomment the following: // UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception exc) { System.err.println("Error loading L&F: " + exc); } JMenuBar menuBar = constructMenuBar(); JPanel panel = new JPanel(true); frame = new JFrame("SampleTree"); frame.getContentPane().add("Center", panel); frame.setJMenuBar(menuBar); frame.setBackground(Color.lightGray); /* Create the JTreeModel. */ DefaultMutableTreeNode root = createNewNode("Root"); treeModel = new SampleTreeModel(root); /* Create the tree. */ tree = new JTree(treeModel); /* Enable tool tips for the tree, without this tool tips will not be picked up. */ ToolTipManager.sharedInstance().registerComponent(tree); /* Make the tree use an instance of SampleTreeCellRenderer for drawing. */ tree.setCellRenderer(new SampleTreeCellRenderer()); /* Make tree ask for the height of each row. */ tree.setRowHeight(-1); /* Put the Tree in a scroller. */ JScrollPane sp = new JScrollPane(); sp.setPreferredSize(new Dimension(300, 300)); sp.getViewport().add(tree); /* And show it. */ panel.setLayout(new BorderLayout()); panel.add("Center", sp); panel.add("South", constructOptionsPanel()); frame.addWindowListener( new WindowAdapter() { public void windowClosing(WindowEvent e) {System.exit(0);}}); frame.pack(); frame.show(); } /** Constructs a JPanel containing check boxes for the different * options that tree supports. */ private JPanel constructOptionsPanel() { JCheckBox aCheckbox; JPanel retPanel = new JPanel(false); JPanel borderPane = new JPanel(false); borderPane.setLayout(new BorderLayout()); retPanel.setLayout(new FlowLayout()); aCheckbox = new JCheckBox("show top level handles"); aCheckbox.setSelected(tree.getShowsRootHandles()); aCheckbox.addChangeListener(new ShowHandlesChangeListener()); retPanel.add(aCheckbox); aCheckbox = new JCheckBox("show root"); aCheckbox.setSelected(tree.isRootVisible()); aCheckbox.addChangeListener(new ShowRootChangeListener()); retPanel.add(aCheckbox); aCheckbox = new JCheckBox("editable"); aCheckbox.setSelected(tree.isEditable()); aCheckbox.addChangeListener(new TreeEditableChangeListener()); aCheckbox.setToolTipText("Triple click to edit"); retPanel.add(aCheckbox); borderPane.add(retPanel, BorderLayout.CENTER); /* Create a set of radio buttons that dictate what selection should be allowed in the tree. */ ButtonGroup group = new ButtonGroup(); JPanel buttonPane = new JPanel(false); JRadioButton button; buttonPane.setLayout(new FlowLayout()); buttonPane.setBorder(new TitledBorder("Selection Mode")); button = new JRadioButton("Single"); button.addActionListener(new AbstractAction() { public boolean isEnabled() { return true; } public void actionPerformed(ActionEvent e) { tree.getSelectionModel().setSelectionMode (TreeSelectionModel.SINGLE_TREE_SELECTION); } }); group.add(button); buttonPane.add(button); button = new JRadioButton("Contiguous"); button.addActionListener(new AbstractAction() { public boolean isEnabled() { return true; } public void actionPerformed(ActionEvent e) { tree.getSelectionModel().setSelectionMode (TreeSelectionModel.CONTIGUOUS_TREE_SELECTION); } }); group.add(button); buttonPane.add(button); button = new JRadioButton("Discontiguous"); button.addActionListener(new AbstractAction() { public boolean isEnabled() { return true; } public void actionPerformed(ActionEvent e) { tree.getSelectionModel().setSelectionMode (TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION); } }); button.setSelected(true); group.add(button); buttonPane.add(button); borderPane.add(buttonPane, BorderLayout.SOUTH); // NOTE: This will be enabled in a future release. // Create a label and combobox to determine how many clicks are // needed to expand. /* JPanel clickPanel = new JPanel(); Object[] values = { "Never", new Integer(1), new Integer(2), new Integer(3) }; final JComboBox clickCBox = new JComboBox(values); clickPanel.setLayout(new FlowLayout()); clickPanel.add(new JLabel("Click count to expand:")); clickCBox.setSelectedIndex(2); clickCBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { Object selItem = clickCBox.getSelectedItem(); if(selItem instanceof Integer) tree.setToggleClickCount(((Integer)selItem).intValue()); else // Don't toggle tree.setToggleClickCount(0); } }); clickPanel.add(clickCBox); borderPane.add(clickPanel, BorderLayout.NORTH); */ return borderPane; } /** Construct a menu. */ private JMenuBar constructMenuBar() { JMenu menu; JMenuBar menuBar = new JMenuBar(); JMenuItem menuItem; /* Good ol exit. */ menu = new JMenu("File"); menuBar.add(menu); menuItem = menu.add(new JMenuItem("Exit")); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); }}); /* Tree related stuff. */ menu = new JMenu("Tree"); menuBar.add(menu); menuItem = menu.add(new JMenuItem("Add")); menuItem.addActionListener(new AddAction()); menuItem = menu.add(new JMenuItem("Insert")); menuItem.addActionListener(new InsertAction()); menuItem = menu.add(new JMenuItem("Reload")); menuItem.addActionListener(new ReloadAction()); menuItem = menu.add(new JMenuItem("Remove")); menuItem.addActionListener(new RemoveAction()); return menuBar; } /** * Returns the TreeNode instance that is selected in the tree. * If nothing is selected, null is returned. */ protected DefaultMutableTreeNode getSelectedNode() { TreePath selPath = tree.getSelectionPath(); if(selPath != null) return (DefaultMutableTreeNode)selPath.getLastPathComponent(); return null; } /** * Returns the selected TreePaths in the tree, may return null if * nothing is selected. */ protected TreePath[] getSelectedPaths() { return tree.getSelectionPaths(); } protected DefaultMutableTreeNode createNewNode(String name) { return new DynamicTreeNode(new SampleData(null, Color.black, name)); } /** * AddAction is used to add a new item after the selected item. */ class AddAction extends Object implements ActionListener { /** Number of nodes that have been added. */ public int addCount; /** * Messaged when the user clicks on the Add menu item. * Determines the selection from the Tree and adds an item * after that. If nothing is selected, an item is added to * the root. */ public void actionPerformed(ActionEvent e) { DefaultMutableTreeNode lastItem = getSelectedNode(); DefaultMutableTreeNode parent; /* Determine where to create the new node. */ if(lastItem != null) { parent = (DefaultMutableTreeNode)lastItem.getParent(); if(parent == null) { parent = (DefaultMutableTreeNode)treeModel.getRoot(); lastItem = null; } } else parent = (DefaultMutableTreeNode)treeModel.getRoot(); if (parent == null) { // new root treeModel.setRoot(createNewNode("Added " + Integer.toString(addCount++))); } else { int newIndex; if(lastItem == null) newIndex = treeModel.getChildCount(parent); else newIndex = parent.getIndex(lastItem) + 1; /* Let the treemodel know. */ treeModel.insertNodeInto(createNewNode("Added " + Integer.toString(addCount++)), parent, newIndex); } } } // End of SampleTree.AddAction /** * InsertAction is used to insert a new item before the selected item. */ class InsertAction extends Object implements ActionListener { /** Number of nodes that have been added. */ public int insertCount; /** * Messaged when the user clicks on the Insert menu item. * Determines the selection from the Tree and inserts an item * after that. If nothing is selected, an item is added to * the root. */ public void actionPerformed(ActionEvent e) { DefaultMutableTreeNode lastItem = getSelectedNode(); DefaultMutableTreeNode parent; /* Determine where to create the new node. */ if(lastItem != null) { parent = (DefaultMutableTreeNode)lastItem.getParent(); if(parent == null) { parent = (DefaultMutableTreeNode)treeModel.getRoot(); lastItem = null; } } else parent = (DefaultMutableTreeNode)treeModel.getRoot(); if (parent == null) { // new root treeModel.setRoot(createNewNode("Inserted " + Integer.toString(insertCount++))); } else { int newIndex; if(lastItem == null) newIndex = treeModel.getChildCount(parent); else newIndex = parent.getIndex(lastItem); /* Let the treemodel know. */ treeModel.insertNodeInto(createNewNode("Inserted " + Integer.toString(insertCount++)), parent, newIndex); } } } // End of SampleTree.InsertAction /** * ReloadAction is used to reload from the selected node. If nothing * is selected, reload is not issued. */ class ReloadAction extends Object implements ActionListener { /** * Messaged when the user clicks on the Reload menu item. * Determines the selection from the Tree and asks the treemodel * to reload from that node. */ public void actionPerformed(ActionEvent e) { DefaultMutableTreeNode lastItem = getSelectedNode(); if(lastItem != null) treeModel.reload(lastItem); } } // End of SampleTree.ReloadAction /** * RemoveAction removes the selected node from the tree. If * The root or nothing is selected nothing is removed. */ class RemoveAction extends Object implements ActionListener { /** * Removes the selected item as long as it isn't root. */ public void actionPerformed(ActionEvent e) { TreePath[] selected = getSelectedPaths(); if (selected != null && selected.length > 0) { TreePath shallowest; // The remove process consists of the following steps: // 1 - find the shallowest selected TreePath, the shallowest // path is the path with the smallest number of path // components. // 2 - Find the siblings of this TreePath // 3 - Remove from selected the TreePaths that are descendants // of the paths that are going to be removed. They will // be removed as a result of their ancestors being // removed. // 4 - continue until selected contains only null paths. while ((shallowest = findShallowestPath(selected)) != null) { removeSiblings(shallowest, selected); } } } /** * Removes the sibling TreePaths of <code>path</code>, that are * located in <code>paths</code>. */ private void removeSiblings(TreePath path, TreePath[] paths) { // Find the siblings if (path.getPathCount() == 1) { // Special case, set the root to null for (int counter = paths.length - 1; counter >= 0; counter--) { paths[counter] = null; } treeModel.setRoot(null); } else { // Find the siblings of path. TreePath parent = path.getParentPath(); MutableTreeNode parentNode = (MutableTreeNode)parent. getLastPathComponent(); ArrayList toRemove = new ArrayList(); int depth = parent.getPathCount(); // First pass, find paths with a parent TreePath of parent for (int counter = paths.length - 1; counter >= 0; counter--) { if (paths[counter] != null && paths[counter]. getParentPath().equals(parent)) { toRemove.add(paths[counter]); paths[counter] = null; } } // Second pass, remove any paths that are descendants of the // paths that are going to be removed. These paths are // implicitly removed as a result of removing the paths in // toRemove int rCount = toRemove.size(); for (int counter = paths.length - 1; counter >= 0; counter--) { if (paths[counter] != null) { for (int rCounter = rCount - 1; rCounter >= 0; rCounter--) { if (((TreePath)toRemove.get(rCounter)). isDescendant(paths[counter])) { paths[counter] = null; } } } } // Sort the siblings based on position in the model if (rCount > 1) { Collections.sort(toRemove, new PositionComparator()); } int[] indices = new int[rCount]; Object[] removedNodes = new Object[rCount]; for (int counter = rCount - 1; counter >= 0; counter--) { removedNodes[counter] = ((TreePath)toRemove.get(counter)). getLastPathComponent(); indices[counter] = treeModel.getIndexOfChild (parentNode, removedNodes[counter]); parentNode.remove(indices[counter]); } treeModel.nodesWereRemoved(parentNode, indices, removedNodes); } } /** * Returns the TreePath with the smallest path count in * <code>paths</code>. Will return null if there is no non-null * TreePath is <code>paths</code>. */ private TreePath findShallowestPath(TreePath[] paths) { int shallowest = -1; TreePath shallowestPath = null; for (int counter = paths.length - 1; counter >= 0; counter--) { if (paths[counter] != null) { if (shallowest != -1) { if (paths[counter].getPathCount() < shallowest) { shallowest = paths[counter].getPathCount(); shallowestPath = paths[counter]; if (shallowest == 1) { return shallowestPath; } } } else { shallowestPath = paths[counter]; shallowest = paths[counter].getPathCount(); } } } return shallowestPath; } /** * An Comparator that bases the return value on the index of the * passed in objects in the TreeModel. * <p> * This is actually rather expensive, it would be more efficient * to extract the indices and then do the comparision. */ private class PositionComparator implements Comparator { public int compare(Object o1, Object o2) { TreePath p1 = (TreePath)o1; int o1Index = treeModel.getIndexOfChild(p1.getParentPath(). getLastPathComponent(), p1.getLastPathComponent()); TreePath p2 = (TreePath)o2; int o2Index = treeModel.getIndexOfChild(p2.getParentPath(). getLastPathComponent(), p2.getLastPathComponent()); return o1Index - o2Index; } public boolean equals(Object obj) { return super.equals(obj); } } } // End of SampleTree.RemoveAction /** * ShowHandlesChangeListener implements the ChangeListener interface * to toggle the state of showing the handles in the tree. */ class ShowHandlesChangeListener extends Object implements ChangeListener { public void stateChanged(ChangeEvent e) { tree.setShowsRootHandles(((JCheckBox)e.getSource()).isSelected()); } } // End of class SampleTree.ShowHandlesChangeListener /** * ShowRootChangeListener implements the ChangeListener interface * to toggle the state of showing the root node in the tree. */ class ShowRootChangeListener extends Object implements ChangeListener { public void stateChanged(ChangeEvent e) { tree.setRootVisible(((JCheckBox)e.getSource()).isSelected()); } } // End of class SampleTree.ShowRootChangeListener /** * TreeEditableChangeListener implements the ChangeListener interface * to toggle between allowing editing and now allowing editing in * the tree. */ class TreeEditableChangeListener extends Object implements ChangeListener { public void stateChanged(ChangeEvent e) { tree.setEditable(((JCheckBox)e.getSource()).isSelected()); } } // End of class SampleTree.TreeEditableChangeListener static public void main(String args[]) { new SampleTree(); } }
/* * Copyright 2014 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.usc.pgroup.floe.resourcemanager; import edu.usc.pgroup.floe.container.ContainerInfo; import edu.usc.pgroup.floe.thriftgen.AlternateNotFoundException; import edu.usc.pgroup.floe.thriftgen.ScaleDirection; import edu.usc.pgroup.floe.thriftgen.TFloeApp; import edu.usc.pgroup.floe.thriftgen.TPellet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; /** * @author kumbhare */ public class ClusterResourceManager extends ResourceManager { /** * Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(ResourceMapping.class); /** * default constructor. */ public ClusterResourceManager() { } /** * Gets the initial resource mapping for the Floe Application. * @param appName Application name. * @param app The fully configured floe application, * with optional estimated input data rates and processing * costs for different pellets. * @return returns an initial mapping from pellet instances to containers. */ @Override public final ResourceMapping getInitialMapping( final String appName, final TFloeApp app) { ResourceMapping mapping = new ResourceMapping(appName, app); List<ContainerInfo> containers = getAvailableContainersWithRetry(); if (containers == null || containers.size() <= 0) { LOGGER.error("Error occurred while acquiring and all attempts to " + "retry have failed."); return null; } for (Map.Entry<String, TPellet> pelletEntry : app.get_pellets().entrySet()) { int numInstances = 1; if (pelletEntry.getValue().get_parallelism() > 0) { numInstances = pelletEntry.getValue().get_parallelism(); } //Integer tokens[] = {-598260261, 1224420058, 1077889423}; int idx = -1; ContainerInfo prev = null; for (int cnt = 0; cnt < numInstances; cnt++) { ContainerInfo container = getFreeContainer( pelletEntry.getKey(), containers, mapping); /*if (pelletEntry.getKey().equalsIgnoreCase("count")) { LOGGER.error("Setting token for count"); if (prev != container) { idx++; } prev = container; LOGGER.error("Token:{}", tokens[idx].toString()); mapping.createNewInstance(pelletEntry.getKey(), container, tokens[idx].toString()); } else {*/ mapping.createNewInstance(pelletEntry.getKey(), container, null); //} } } return mapping; } /** * This tries to isolate pellets on different containers. So if a * container already contains a certain type of pellet, * it is preferred to keep the same type of pellet on that container. * @param pelletId pelletid. * @param mapping Current resource mapping. * @param containers list of available containers (from ZK). * @return free container for the given Pellet. */ public final ContainerInfo getFreeContainer( final String pelletId, final List<ContainerInfo> containers, final ResourceMapping mapping) { List<ContainerInfo> feasible = new ArrayList<>(); for (ContainerInfo container : containers) { //If there are no cores available, move to next container. if (container.getAvailableCores() == 0) { LOGGER.info("No availalbe cores: {}", container.getContainerId()); continue; } //If the container has a different pellet running, // move to next container. LOGGER.info("Flakes on {} : {}", container.getContainerId(), container.getCurrentFlakes()); if (container.getCurrentFlakes() != null && container.getCurrentFlakes().size() > 0) { /*for (String cpid: container.getCurrentFlakes().keySet()) { LOGGER.info("Container hosts: {}, will put {}", cpid, pelletId); if (!cpid.equalsIgnoreCase(pelletId)) { LOGGER.info("Other pellet running: {}, expected {}", container.getContainerId(), pelletId); continue; //bug } }*/ if (!container.getCurrentFlakes().keySet().contains(pelletId)) { LOGGER.info("Other pellet running: {}, expected {}", container.getContainerId(), pelletId); continue; } } //If here, it means the container has free cores. //Now check if during the current mapping, all cores have been used. //If so, move to the next container. int usedCores = mapping.getUsedCores(container.getContainerId()); if (usedCores >= container.getNumCores()) { LOGGER.info("Resource full: {}, used:{}, available:{}", container.getContainerId(), usedCores, container.getNumCores()); continue; } //Also check if the current mapping's container's flake has the // given pellet id. //THE FOLLOWING IS TO RESTRICT ONE CONTAINTER TO SINGLE PELLET // TYPE> WE DID THIS FOR IPDPS EXPERIMENTS. DONT NEED THIS NOW> /* ResourceMapping.ContainerInstance cmapping = mapping.getContainer(container.getContainerId()); if (cmapping != null) { LOGGER.info("Resource mapping, but not deployed flakes {} on " + "container {}", cmapping.getFlakes(), container.getContainerId()); if (!cmapping.getFlakes().containsKey(pelletId)) { continue; } }*/ LOGGER.info("Feasible: {}", container.getContainerId()); feasible.add(container); } //Find the container Most cores used. int min = -1; ContainerInfo best = null; for (ContainerInfo c: feasible) { int usedCores = mapping.getUsedCores(c.getContainerId()); if (usedCores > min) { LOGGER.info("best: {}", c.getContainerId()); best = c; min = usedCores; } } return best; } /** * Updates the resource mapping based on the current floe application * status (and perf. numbers) * * @param app The current application status. (the perf numbers are not * included, which can be fetched from the Zookeeper) * @param current Current resource mapping. * @return will return ONLY the updated (added/removed/changed) mapping * parameters. anything not included should remain the same. */ @Override public final ResourceMapping updateResourceMapping( final TFloeApp app, final ResourceMapping current) { return null; } /** * Scales the given pellet up or down for the given number of instances. * * @param current the current resource mapping. * @param direction direction of scaling. * @param pelletName name of the pellet to scale. * @param count the number of instances to scale up/down. * @return the updated resource mapping with the ResourceMappingDelta set * appropriately. */ @Override public final ResourceMapping scale(final ResourceMapping current, final ScaleDirection direction, final String pelletName, final int count) { current.resetDelta(); if (direction == ScaleDirection.up) { List<ContainerInfo> containers = getAvailableContainersWithRetry(); if (containers == null || containers.size() <= 0) { LOGGER.error("Error occurred while acquiring and all attempts" + "to retry have failed."); return null; } //TODO: Order containers w.r.t availability. //int cindex = new Random().nextInt(containers.size()); //Add to containers in round robin fashion. for (int i = 0; i < count; i++) { ContainerInfo container = getFreeContainer( pelletName, containers, current); current.createNewInstance(pelletName, container, null); } } else if (direction == ScaleDirection.down) { List<ResourceMapping.FlakeInstance> flakes = current.getFlakeInstancesForPellet(pelletName); if (flakes == null || flakes.size() <= 0) { LOGGER.error("No flakes executing the given pellet exist."); return null; } LOGGER.info("Flakes:{}", flakes); //TODO: Order containers w.r.t availability. //Randomly select a flake to remove instance. int cindex = new Random().nextInt(flakes.size()); //Remove from containers in round robin fashion. for (int i = 0; i < count; i++) { current.removePelletInstance(pelletName, flakes.get(cindex++)); if (cindex == flakes.size()) { cindex = 0; } } } return current; } /** * Switches the active alternate for the pellet. * * @param currentMapping the current resource mapping (this is for a * particular app, so no need floe app parameter). * @param pelletName name of the pellet to switch alternate for. * @param alternateName the name of the alternate to switch to. * @return the updated resource mapping with the ResourceMappingDelta set * appropriately. * @throws edu.usc.pgroup.floe.thriftgen.AlternateNotFoundException if * the alternate is not found for the given pellet. */ @Override public final ResourceMapping switchAlternate( final ResourceMapping currentMapping, final String pelletName, final String alternateName) throws AlternateNotFoundException { currentMapping.resetDelta(); if (!currentMapping.switchAlternate(pelletName, alternateName)) { LOGGER.error("The given alternate: {} for pellet: {} does not " + "exist.", pelletName, alternateName); throw new AlternateNotFoundException("The given alternate: " + pelletName + " for pellet: " + alternateName + " does " + "not exist."); } return currentMapping; } /** * Prepares the application to be killed by removing all pelletinstances, * all flakes. * * @param currentMapping current resource mapping for the application to be * killed. * @return the updated resource mapping with the ResourceMappingDelta set * appropriately. */ @Override public final ResourceMapping kill(final ResourceMapping currentMapping) { currentMapping.resetDelta(); for (String peName : currentMapping.getFloeApp().get_pellets().keySet()) { List<ResourceMapping.FlakeInstance> flakes = currentMapping.getFlakeInstancesForPellet(peName); if (flakes == null || flakes.size() <= 0) { LOGGER.warn("No flakes executing the given pellet exist."); continue; } LOGGER.info("Flakes:{}", flakes); ResourceMapping.FlakeInstance flake; while (flakes.size() > 0) { flake = flakes.get(0); int numInstances = flake.getNumPelletInstances(); for (int i = 0; i < numInstances; i++) { currentMapping.removePelletInstance(peName, flake); } } } return currentMapping; } }
package org.hibernate.connection; import java.lang.management.ManagementFactory; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.SQLException; import java.util.Iterator; import java.util.Map; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.tomcat.jdbc.pool.DataSource; import org.apache.tomcat.jdbc.pool.PoolConfiguration; import org.apache.tomcat.jdbc.pool.PoolProperties; import org.hibernate.HibernateException; import org.hibernate.cfg.Environment; import org.hibernate.service.jdbc.connections.internal.ConnectionProviderInitiator; import org.hibernate.service.jdbc.connections.spi.ConnectionProvider; import org.hibernate.service.spi.Configurable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p>A connection provider that uses the Tomcat JDBC connection pool outside Tomcat container</p> * * <p>To use this connection provider set:<br> * <code>hibernate.connection.provider_class&nbsp;org.hibernate.connection.TomcatJDBCConnectionProvider</code></p> * * <pre>Supported Hibernate properties: * hibernate.connection.driver_class * hibernate.connection.url * hibernate.connection.username * hibernate.connection.password * hibernate.connection.isolation * hibernate.connection.autocommit * hibernate.connection.pool_size * hibernate.connection (JDBC driver properties)</pre> * <br> * * N.B.: All Tomcat JDBC connection pool properties are also supported by using the hibernate.tomcatJdbcPool prefix. * * @author Guenther Demetz */ public class TomcatJDBCConnectionProvider implements ConnectionProvider, Configurable { private static final Logger log = LoggerFactory.getLogger(TomcatJDBCConnectionProvider.class); private static final String PREFIX = "hibernate.tomcatJdbcPool."; private DataSource ds; PoolProperties tomcatJdbcPoolProperties; @Override public void configure(Map props) throws HibernateException { try { log.debug("Configure TomcatJDBCConnectionProvider"); // Tomcat JDBC connection pool properties used to create theDataSource tomcatJdbcPoolProperties = new PoolProperties(); // DriverClass & url String jdbcDriverClass = (String) props.get(Environment.DRIVER); String jdbcUrl = (String) props.get(Environment.URL); tomcatJdbcPoolProperties.setDriverClassName(jdbcDriverClass); tomcatJdbcPoolProperties.setUrl(jdbcUrl); //tomcatJdbcPoolProperties.setJmxEnabled(true); thats the default // Username / password String username = (String) props.get(Environment.USER); String password = (String) props.get(Environment.PASS); tomcatJdbcPoolProperties.setUsername(username); tomcatJdbcPoolProperties.setPassword(password); // Isolation level String isolationLevel = (String) props.get(Environment.ISOLATION); if ((isolationLevel != null) && (isolationLevel.trim().length() > 0)) { tomcatJdbcPoolProperties.setDefaultTransactionIsolation(Integer.parseInt(isolationLevel)); } // // Turn off autocommit (unless autocommit property is set) // Unfortunately since hibernate3 autocommit defaults to true but usually you don't need if it, when working outside a EJB-container // String autocommit = props.getProperty(Environment.AUTOCOMMIT); // if ((autocommit != null) && (autocommit.trim().length() > 0)) { // tomcatJdbcPoolProperties.setDefaultAutoCommit(Boolean.parseBoolean(autocommit)); // } else { // tomcatJdbcPoolProperties.setDefaultAutoCommit(false); // } // Pool size String poolSize = (String) props.get(Environment.POOL_SIZE); if ((poolSize != null) && (poolSize.trim().length() > 0)) { tomcatJdbcPoolProperties.setMaxActive(Integer.parseInt(poolSize)); } // Copy all "driver" properties into "connectionProperties" // ConnectionProviderInitiator. Properties driverProps = ConnectionProviderInitiator.getConnectionProperties(props); if (driverProps.size() > 0) { StringBuffer connectionProperties = new StringBuffer(); for (Iterator iter = driverProps.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); connectionProperties.append(key).append('=').append(value); if (iter.hasNext()) { connectionProperties.append(';'); } } tomcatJdbcPoolProperties.setConnectionProperties(connectionProperties.toString()); } // Copy all Tomcat JDBCPool properties removing the prefix for (Iterator iter = props.entrySet().iterator() ; iter.hasNext() ;) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); if (key.startsWith(PREFIX)) { String property = key.substring(PREFIX.length()); String value = (String) entry.getValue(); Method[] methods = PoolConfiguration.class.getMethods(); int i; for (i=0; i < methods.length; i++) { if (methods[i].getName().equalsIgnoreCase("set" + property)) { Method m = methods[i]; Object parameter = convertIntoTypedValue(m.getParameterTypes()[0], value); try { m.invoke(tomcatJdbcPoolProperties, new Object[]{ parameter }); } catch (Exception e) { i = methods.length; } break; } } if (i >= methods.length) { log.error("Unable to parse property " + key + " with value: " + value); throw new RuntimeException("Unable to parse property " + key + " with value: " + value); } } } // Let the factory create the pool ds = new DataSource(); ds.setPoolProperties(tomcatJdbcPoolProperties); ds.createPool(); if (ds.getPoolProperties().isJmxEnabled()) { MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); ObjectName objectname = null; try { objectname = new ObjectName("ConnectionPool:name=" + tomcatJdbcPoolProperties.getName()); if (!mBeanServer.isRegistered(objectname)) { mBeanServer.registerMBean(ds.getPool().getJmxPool(), objectname); } } catch (Exception e) { e.printStackTrace(); } } // Log pool statistics before continuing. logStatistics(); } catch (Exception e) { String message = "Could not create a TomcatJDBC pool"; log.error(message, e); if (ds != null) { try { ds.close(); } catch (Exception e2) { // ignore } ds = null; } throw new HibernateException(message, e); } log.debug("Configure TomcatJDBCConnectionProvider complete"); } private Object convertIntoTypedValue (Class clazz, String value) { if (clazz.isAssignableFrom(boolean.class)) { return Boolean.parseBoolean(value); } else if (clazz.isAssignableFrom(int.class)) { return Integer.parseInt(value); } else if (clazz.isAssignableFrom(long.class)) { return Long.parseLong(value); } else if (clazz.equals(String.class)) { return value; } else throw new RuntimeException("Unsupported Parameter type " + clazz); } public Connection getConnection() throws SQLException { Connection conn = null; try { conn = ds.getConnection(); } finally { logStatistics(); } return conn; } public void closeConnection(Connection conn) throws SQLException { try { conn.close(); } finally { logStatistics(); } } public void close() throws HibernateException { log.debug("Close TomcatJDBCConnectionProvider"); logStatistics(); try { if (ds != null) { ds.close(); ds = null; } else { log.warn("Cannot close TomcatJDBCConnectionProvider, pool (not initialized)"); } } catch (Exception e) { throw new HibernateException("Could not close DBCP pool", e); } log.debug("Close TomcatJDBCConnectionProvider complete"); } protected void logStatistics() { if (log.isDebugEnabled()) { log.info("active: " + ds.getNumActive() + " (max: " + ds.getMaxActive() + ") " + "idle: " + ds.getNumIdle() + "(max: " + ds.getMaxIdle() + ")"); } } public boolean supportsAggressiveRelease() { return false; } @Override public boolean isUnwrappableAs(Class unwrapType) { return false; } @Override public <T> T unwrap(Class<T> unwrapType) { return null; } }
/* * Copyright (c) 2013-2015 Cinchapi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cinchapi.concourse.security; import java.io.File; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.TimeUnit; import org.cinchapi.concourse.ConcourseBaseTest; import org.cinchapi.concourse.server.io.FileSystem; import org.cinchapi.concourse.testing.Variables; import org.cinchapi.concourse.thrift.AccessToken; import org.cinchapi.concourse.time.Time; import org.cinchapi.concourse.util.ByteBuffers; import org.cinchapi.concourse.util.TestData; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * Unit tests for {@link AccessManager}. * * @author Jeff Nelson */ public class AccessManagerTest extends ConcourseBaseTest { private String current = null; private AccessManager manager = null; @Rule public TestRule watcher = new TestWatcher() { @Override protected void finished(Description desc) { FileSystem.deleteFile(current); } @Override protected void starting(Description desc) { current = TestData.DATA_DIR + File.separator + Time.now(); manager = AccessManager.create(current); } }; @Test public void testDefaultAdminLogin() { ByteBuffer username = ByteBuffer.wrap("admin".getBytes()); ByteBuffer password = ByteBuffer.wrap("admin".getBytes()); Assert.assertTrue(manager.isExistingUsernamePasswordCombo(username, password)); } @Test public void testChangeAdminPassword() { ByteBuffer username = ByteBuffer.wrap("admin".getBytes()); ByteBuffer password = ByteBuffer.wrap("admin".getBytes()); ByteBuffer newPassword = getSecurePassword(); manager.createUser(username, newPassword); Assert.assertFalse(manager.isExistingUsernamePasswordCombo(username, password)); Assert.assertTrue(manager.isExistingUsernamePasswordCombo(username, newPassword)); } @Test public void testAddUsers() { Map<ByteBuffer, ByteBuffer> users = Maps.newHashMap(); for (int i = 0; i < TestData.getScaleCount(); i++) { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); users.put(username, password); manager.createUser(username, password); } for (Entry<ByteBuffer, ByteBuffer> entry : users.entrySet()) { Assert.assertTrue(manager.isExistingUsernamePasswordCombo( entry.getKey(), entry.getValue())); } } @Test public void testAllUsersHaveUniqueUids() { Set<ByteBuffer> emptySet = Sets.newHashSet(); Set<ByteBuffer> users = (Set<ByteBuffer>) addMoreUsers(emptySet, manager); Set<Short> uniqueUids = Sets.newHashSet(); for (ByteBuffer username : users) { short uid = manager.getUidByUsername(username); Assert.assertFalse(uniqueUids.contains(uid)); // check uniqueness uniqueUids.add(uid); } AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager users = (Set<ByteBuffer>) addMoreUsers(users, manager2); uniqueUids = Sets.newHashSet(); for (ByteBuffer username : users) { short uid = manager2.getUidByUsername(username); Assert.assertFalse(uniqueUids.contains(uid)); // check uniqueness uniqueUids.add(uid); } } @Test public void testAllUsersHaveUniqueUidsAfterSomeUserDeletions() { List<ByteBuffer> emptyList = Lists.newArrayList(); List<ByteBuffer> users = (List<ByteBuffer>) addMoreUsers(emptyList, manager); users = deleteSomeUsers(users, manager); Set<Short> uniqueUids = Sets.newHashSet(); for (ByteBuffer username : users) { short uid = manager.getUidByUsername(username); Assert.assertFalse(uniqueUids.contains(uid)); // check uniqueness uniqueUids.add(uid); } AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager Variables.register("users", users); users = deleteSomeUsers(users, manager2); Variables.register("users_after_delete", Lists.newArrayList(users)); users = (List<ByteBuffer>) addMoreUsers(users, manager2); Variables.register("users_after_add", Lists.newArrayList(users)); uniqueUids = Sets.newHashSet(); Variables.register("uniqueUids", uniqueUids); for (ByteBuffer username : users) { short uid = manager2.getUidByUsername(username); Variables.register("uid", uid); Assert.assertFalse(uniqueUids.contains(uid)); // check uniqueness uniqueUids.add(uid); } } @Test public void testUsersHaveSameUidsAsBeforeSomeUserDeletions() { List<ByteBuffer> emptySet = Lists.newArrayList(); List<ByteBuffer> users = (List<ByteBuffer>) addMoreUsers(emptySet, manager); Map<ByteBuffer, Short> uids = Maps.newHashMap(); for (ByteBuffer username : users) { // retrieve short uid = manager.getUidByUsername(username); // valid uids uids.put(username, uid); // after add users } users = deleteSomeUsers(users, manager); uids = Maps.newHashMap(); for (ByteBuffer username : users) { // retrieve short uid = manager.getUidByUsername(username); // valid uids uids.put(username, uid); // after delete users } for (ByteBuffer username : users) { short uid = manager.getUidByUsername(username); Assert.assertEquals((short) uids.get(username), uid);// check // uniqueness } AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager users = (List<ByteBuffer>) addMoreUsers(users, manager2); for (ByteBuffer username : users) { // retrieve short uid = manager2.getUidByUsername(username); // valid uids uids.put(username, uid); // after add users } for (ByteBuffer username : users) { short uid = manager2.getUidByUsername(username); Assert.assertEquals((short) uids.get(username), uid);// check // uniqueness } } @Test public void testAllUsersHaveSameUidsAsBeforeServerRestarts() { Set<ByteBuffer> emptySet = Sets.newHashSet(); Set<ByteBuffer> users = (Set<ByteBuffer>) addMoreUsers(emptySet, manager); Map<ByteBuffer, Short> uids = Maps.newHashMap(); for (ByteBuffer username : users) { // retrieve valid short uid = manager.getUidByUsername(username); // uids after uids.put(username, uid); // add users } AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager for (ByteBuffer username : users) { short uid = manager2.getUidByUsername(username); Assert.assertEquals((short) uids.get(username), uid); } } @Test public void testAllUsersHaveSameUidsAsBeforePasswordChange() { Set<ByteBuffer> emptySet = Sets.newHashSet(); Set<ByteBuffer> users = (Set<ByteBuffer>) addMoreUsers(emptySet, manager); Map<ByteBuffer, Short> uids = Maps.newHashMap(); for (ByteBuffer username : users) { // retrieve valid short uid = manager.getUidByUsername(username); // uids after uids.put(username, uid); // add users } for (ByteBuffer username : users) { // change password manager.createUser(username, getSecurePassword()); } for (ByteBuffer username : users) { short uid = manager.getUidByUsername(username); Assert.assertEquals((short) uids.get(username), uid); } AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager for (ByteBuffer username : users) { manager2.createUser(username, getSecurePassword()); // change // password } for (ByteBuffer username : users) { short uid = manager2.getUidByUsername(username); Assert.assertEquals((short) uids.get(username), uid); } } @Test(expected = IllegalArgumentException.class) public void testCantRevokeAdmin() { manager.deleteUser(toByteBuffer("admin")); } @Test public void testRevokeUser() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); manager.deleteUser(username); Assert.assertFalse(manager.isExistingUsernamePasswordCombo(username, password)); } @Test public void testIsValidUsernameAndPassword() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); ByteBuffer badpassword = toByteBuffer(TestData.getString() + "bad"); manager.createUser(username, password); Assert.assertTrue(manager.isExistingUsernamePasswordCombo(username, password)); Assert.assertFalse(manager.isExistingUsernamePasswordCombo(username, badpassword)); } @Test public void testDiskSync() { Map<ByteBuffer, ByteBuffer> users = Maps.newHashMap(); for (int i = 0; i < TestData.getScaleCount(); i++) { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); users.put(username, password); manager.createUser(username, password); } AccessManager manager2 = AccessManager.create(current); for (Entry<ByteBuffer, ByteBuffer> entry : users.entrySet()) { Assert.assertTrue(manager2.isExistingUsernamePasswordCombo( entry.getKey(), entry.getValue())); } } @Test(expected = IllegalArgumentException.class) public void testCantCreateAccessTokenForInvalidUser() { manager.getNewAccessToken(toByteBuffer(TestData.getString() + "foo")); } @Test public void testCanCreateAccessTokenForValidUser() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); Assert.assertTrue(manager.isValidAccessToken(token)); } @Test public void testAccessTokenIsNotValidIfServerRestarts() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); AccessManager manager2 = AccessManager.create(current); // simulate // server // restart by // creating new // manager Assert.assertFalse(manager2.isValidAccessToken(token)); } @Test public void testAccessTokenIsNotValidIfPasswordChanges() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); ByteBuffer password2 = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); manager.createUser(username, password2); Assert.assertFalse(manager.isValidAccessToken(token)); } @Test public void testAccessTokenIsNotValidIfAccessIsRevoked() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); manager.deleteUser(username); Assert.assertFalse(manager.isValidAccessToken(token)); } @Test public void testAccessTokenAutoExpiration() throws InterruptedException { manager = AccessManager.createForTesting(current, 60, TimeUnit.MILLISECONDS); ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); TimeUnit.MILLISECONDS.sleep(60); Assert.assertFalse(manager.isValidAccessToken(token)); } @Test public void testInvalidateAccessToken() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token = manager.getNewAccessToken(username); manager.expireAccessToken(token); Assert.assertFalse(manager.isValidAccessToken(token)); } @Test public void testTwoAccessTokensForSameUser() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token1 = manager.getNewAccessToken(username); AccessToken token2 = manager.getNewAccessToken(username); Assert.assertNotEquals(token1, token2); Assert.assertTrue(manager.isValidAccessToken(token1)); Assert.assertTrue(manager.isValidAccessToken(token2)); } @Test public void testInvalidatingOneAccessTokenDoesNotAffectOther() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); AccessToken token1 = manager.getNewAccessToken(username); AccessToken token2 = manager.getNewAccessToken(username); manager.expireAccessToken(token2); Assert.assertTrue(manager.isValidAccessToken(token1)); } @Test public void testRevokingAccessInvalidatesAllAccessTokens() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); List<AccessToken> tokens = Lists.newArrayList(); for (int i = 0; i < TestData.getScaleCount(); i++) { tokens.add(manager.getNewAccessToken(username)); } manager.deleteUser(username); for (AccessToken token : tokens) { Assert.assertFalse(manager.isValidAccessToken(token)); } } @Test public void testChangingPasswordInvalidatesAllAccessTokens() { ByteBuffer username = getAcceptableUsername(); ByteBuffer password = getSecurePassword(); manager.createUser(username, password); List<AccessToken> tokens = Lists.newArrayList(); for (int i = 0; i < TestData.getScaleCount(); i++) { tokens.add(manager.getNewAccessToken(username)); } manager.createUser(username, getSecurePassword()); for (AccessToken token : tokens) { Assert.assertFalse(manager.isValidAccessToken(token)); } } @Test public void testEmptyPasswordNotSecure() { Assert.assertFalse(AccessManager.isSecurePassword(ByteBuffers .fromString(""))); } @Test public void testAllWhitespacePasswordNotSecure() { Assert.assertFalse(AccessManager.isSecurePassword(ByteBuffers .fromString(" "))); } @Test public void testUsernameWithWhitespaceNotAcceptable() { Assert.assertFalse(AccessManager.isAcceptableUsername(ByteBuffers .fromString(" f "))); } /** * Convert a string to a ByteBuffer. * * @param string * @return the bytebuffer */ protected static ByteBuffer toByteBuffer(String string) { return ByteBuffer.wrap(string.getBytes()); } /** * Return a username that will pass the acceptance test. * * @return username */ protected static ByteBuffer getAcceptableUsername() { ByteBuffer username = null; while (username == null || !AccessManager.isAcceptableUsername(username)) { username = toByteBuffer(TestData.getString()); } return username; } /** * Return a password that will pass the security test. * * @return password */ protected static ByteBuffer getSecurePassword() { ByteBuffer password = null; while (password == null || !AccessManager.isSecurePassword(password)) { password = toByteBuffer(TestData.getString()); } return password; } /** * Return a collection of unique binary usernames that is * added to the specified {@code manager}, which is also a * superset of the {@code existingUsers} and newly added * usernames. * * @param existingUsers * @param manager * @return the valid usernames */ private static Collection<ByteBuffer> addMoreUsers( Collection<ByteBuffer> existingUsers, AccessManager manager) { Set<ByteBuffer> usernames = Sets.newHashSet(); int count = TestData.getScaleCount(); while (usernames.size() < count) { ByteBuffer username = getAcceptableUsername(); if(!usernames.contains(username)) { ByteBuffer password = getSecurePassword(); manager.createUser(username, password); existingUsers.add(username); usernames.add(username); } } return existingUsers; } /** * Return a list of binary usernames that is still valid * after some usernames in {@code existingUsers} has been * randomly deleted from {@code manager}. * * @param existingUsers * @param manager * @return the valid usernames */ private static List<ByteBuffer> deleteSomeUsers( List<ByteBuffer> existingUsers, AccessManager manager) { java.util.Random rand = new java.util.Random(); Set<ByteBuffer> removedUsers = Sets.newHashSet(); int count = rand.nextInt(existingUsers.size()); for (int i = 0; i < count; i++) { ByteBuffer username = existingUsers.get(rand.nextInt(existingUsers .size())); removedUsers.add(username); } for (ByteBuffer username : removedUsers) { manager.deleteUser(username); existingUsers.remove(username); } return existingUsers; } }
/******************************************************************************* * (c) Copyright 2017 Hewlett-Packard Development Company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * *******************************************************************************/ package io.cloudslang.content.mail.services; import com.sun.mail.util.ASCIIUtility; import io.cloudslang.content.mail.entities.GetMailMessageInputs; import io.cloudslang.content.mail.entities.SimpleAuthenticator; import io.cloudslang.content.mail.entities.StringOutputStream; import io.cloudslang.content.mail.sslconfig.EasyX509TrustManager; import io.cloudslang.content.mail.sslconfig.SSLUtils; import org.bouncycastle.cms.RecipientId; import org.bouncycastle.cms.RecipientInformation; import org.bouncycastle.cms.RecipientInformationStore; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.mail.smime.SMIMEEnveloped; import javax.mail.Authenticator; import javax.mail.BodyPart; import javax.mail.Flags; import javax.mail.Folder; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.NoSuchProviderException; import javax.mail.Part; import javax.mail.Session; import javax.mail.Store; import javax.mail.URLName; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; import javax.mail.internet.MimeUtility; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URL; import java.security.KeyStore; import java.security.SecureRandom; import java.security.Security; import java.security.cert.X509Certificate; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import java.util.Properties; import static org.apache.commons.lang3.StringUtils.isEmpty; import static org.apache.commons.lang3.StringUtils.isNotEmpty; import static org.bouncycastle.mail.smime.SMIMEUtil.toMimeBodyPart; /** * Created by giloan on 11/3/2014. */ public class GetMailMessage { public static final String RETURN_RESULT = "returnResult"; public static final String SUBJECT_HEADER = "Subject"; public static final String SUBJECT = "subject"; public static final String BODY_RESULT = "body"; public static final String PLAIN_TEXT_BODY_RESULT = "plainTextBody"; public static final String ATTACHED_FILE_NAMES_RESULT = "attachedFileNames"; public static final String RETURN_CODE = "returnCode"; public static final String EXCEPTION = "exception"; public static final String SUCCESS = "success"; public static final String FAILURE = "failure"; public static final String SUCCESS_RETURN_CODE = "0"; public static final String FAILURE_RETURN_CODE = "-1"; public static final String FILE = "file:"; public static final String HTTP = "http"; public static final String DEFAULT_PASSWORD_FOR_STORE = "changeit"; public static final String POP3 = "pop3"; public static final String IMAP = "imap"; public static final String IMAP_4 = "imap4"; public static final String IMAP_PORT = "143"; public static final String POP3_PORT = "110"; public static final String SPECIFY_PORT_FOR_PROTOCOL = "Please specify the port for the indicated protocol."; public static final String SPECIFY_PORT_OR_PROTOCOL_OR_BOTH = "Please specify the port, the protocol, or both."; public static final String SPECIFY_PROTOCOL_FOR_GIVEN_PORT = "Please specify the protocol for the indicated port."; public static final String TEXT_PLAIN = "text/plain"; public static final String TEXT_HTML = "text/html"; private static final String MULTIPART_MIXED = "multipart/mixed"; private static final String MULTIPART_RELATED = "multipart/related"; public static final String CONTENT_TYPE = "Content-Type"; public static final String SSL = "SSL"; public static final String STR_FALSE = "false"; public static final String STR_TRUE = "true"; public static final String MESSAGES_ARE_NUMBERED_STARTING_AT_1 = "Messages are numbered starting at 1 through " + "the total number of messages in the folder!"; public static final String STR_COMMA = ","; public static final String THE_SPECIFIED_FOLDER_DOES_NOT_EXIST_ON_THE_REMOTE_SERVER = "The specified folder does " + "not exist on the remote server."; public static final String UNRECOGNIZED_SSL_MESSAGE = "Unrecognized SSL message"; public static final String UNRECOGNIZED_SSL_MESSAGE_PLAINTEXT_CONNECTION = "Unrecognized SSL message, plaintext " + "connection?"; public static final String SSL_FACTORY = "javax.net.ssl.SSLSocketFactory"; private static final String HOST_NOT_SPECIFIED = "The required host input is not specified!"; private static final String MESSAGE_NUMBER_NOT_SPECIFIED = "The required messageNumber input is not specified!"; private static final String USERNAME_NOT_SPECIFIED = "The required username input is not specified!"; private static final String FOLDER_NOT_SPECIFIED = "The required folder input is not specified!"; public static final String PKCS_KEYSTORE_TYPE = "PKCS12"; public static final String BOUNCY_CASTLE_PROVIDER = "BC"; public static final String ENCRYPTED_CONTENT_TYPE = "application/pkcs7-mime; name=\"smime.p7m\"; " + "smime-type=enveloped-data"; public static final String SECURE_SUFFIX_FOR_POP3_AND_IMAP = "s"; public static final int ONE_SECOND = 1000; //Operation inputs private String host; private String port; private String protocol; private String username; private String password; private String folder; private boolean trustAllRoots; /** * The relative position of the message in the folder. Numbering starts from 1. */ private int messageNumber; private boolean subjectOnly = true; private boolean enableSSL; private boolean enableTLS; private String keystore; private String keystorePassword; private String trustKeystoreFile; private String trustPassword; private String characterSet; private String decryptionKeystore; private String decryptionKeyAlias; private String decryptionKeystorePass; private boolean deleteUponRetrieval; private boolean decryptMessage; private int timeout = -1; private boolean verifyCertificate = false; private RecipientId recId = null; private KeyStore ks = null; public Map<String, String> execute(GetMailMessageInputs getMailMessageInputs) throws Exception { Map<String, String> result = new HashMap<>(); try { processInputs(getMailMessageInputs); Message message = getMessage(); if (decryptMessage) { addDecryptionSettings(); } //delete message if (deleteUponRetrieval) { message.setFlag(Flags.Flag.DELETED, true); } if (subjectOnly) { String subject; if ((characterSet != null) && (characterSet.trim().length() > 0)) { //need to force the decode charset subject = message.getHeader(SUBJECT_HEADER)[0]; subject = changeHeaderCharset(subject, characterSet); subject = MimeUtility.decodeText(subject); } else { subject = message.getSubject(); } if (subject == null) { subject = ""; } result.put(SUBJECT, MimeUtility.decodeText(subject)); result.put(RETURN_RESULT, MimeUtility.decodeText(subject)); } else { try { // Get subject and attachedFileNames if ((characterSet != null) && (characterSet.trim().length() > 0)) { //need to force the decode charset String subject = message.getHeader(SUBJECT_HEADER)[0]; subject = changeHeaderCharset(subject, characterSet); result.put(SUBJECT, MimeUtility.decodeText(subject)); String attachedFileNames = changeHeaderCharset(getAttachedFileNames(message), characterSet); result.put(ATTACHED_FILE_NAMES_RESULT, decodeAttachedFileNames(attachedFileNames)); } else { //let everything as the sender intended it to be :) String subject = message.getSubject(); if (subject == null) { subject = ""; } result.put(SUBJECT, MimeUtility.decodeText(subject)); result.put(ATTACHED_FILE_NAMES_RESULT, decodeAttachedFileNames((getAttachedFileNames(message)))); } // Get the message body Map<String, String> messageByTypes = getMessageByContentTypes(message, characterSet); String lastMessageBody = ""; if (!messageByTypes.isEmpty()) { lastMessageBody = new LinkedList<>(messageByTypes.values()).getLast(); } if (lastMessageBody == null) { lastMessageBody = ""; } result.put(BODY_RESULT, MimeUtility.decodeText(lastMessageBody)); String plainTextBody = messageByTypes.containsKey(TEXT_PLAIN) ? messageByTypes.get(TEXT_PLAIN) : ""; result.put(PLAIN_TEXT_BODY_RESULT, MimeUtility.decodeText(plainTextBody)); StringOutputStream stream = new StringOutputStream(); message.writeTo(stream); result.put(RETURN_RESULT, stream.toString().replaceAll("" + (char) 0, "")); } catch (UnsupportedEncodingException except) { throw new UnsupportedEncodingException("The given encoding (" + characterSet + ") is invalid or not supported."); } } try { message.getFolder().close(true); } catch (Throwable ignore) { } result.put(RETURN_CODE, SUCCESS_RETURN_CODE); } catch (Exception e) { if (e.toString().contains(UNRECOGNIZED_SSL_MESSAGE)) { throw new Exception(UNRECOGNIZED_SSL_MESSAGE_PLAINTEXT_CONNECTION); } else { throw e; } } return result; } protected Message getMessage() throws Exception { Store store = createMessageStore(); Folder folder = store.getFolder(this.folder); if (!folder.exists()) { throw new Exception(THE_SPECIFIED_FOLDER_DOES_NOT_EXIST_ON_THE_REMOTE_SERVER); } folder.open(getFolderOpenMode()); if (messageNumber > folder.getMessageCount()) { throw new IndexOutOfBoundsException("message value was: " + messageNumber + " there are only " + folder.getMessageCount() + " messages in folder"); } return folder.getMessage(messageNumber); } protected Store createMessageStore() throws Exception { Properties props = new Properties(); if (timeout > 0) { props.put("mail." + protocol + ".timeout", timeout); } Authenticator auth = new SimpleAuthenticator(username, password); Store store; if (enableTLS || enableSSL) { addSSLSettings(trustAllRoots, keystore, keystorePassword, trustKeystoreFile, trustPassword); } if (enableTLS) { store = tryTLSOtherwiseTrySSL(props, auth); } else if (enableSSL) { store = connectUsingSSL(props, auth); } else { store = configureStoreWithoutSSL(props, auth); store.connect(); } return store; } private Store tryTLSOtherwiseTrySSL(Properties props, Authenticator auth) throws MessagingException { Store store = configureStoreWithTLS(props, auth); try { store.connect(host, username, password); } catch (Exception e) { if (enableSSL) { clearTLSProperties(props); store = connectUsingSSL(props, auth); } else { throw e; } } return store; } private Store connectUsingSSL(Properties props, Authenticator auth) throws MessagingException { Store store = configureStoreWithSSL(props, auth); store.connect(); return store; } private void clearTLSProperties(Properties props) { props.remove("mail." + protocol + ".ssl.enable"); props.remove("mail." + protocol + ".starttls.enable"); props.remove("mail." + protocol + ".starttls.required"); } protected Store configureStoreWithSSL(Properties props, Authenticator auth) throws NoSuchProviderException { props.setProperty("mail." + protocol + ".socketFactory.class", SSL_FACTORY); props.setProperty("mail." + protocol + ".socketFactory.fallback", STR_FALSE); props.setProperty("mail." + protocol + ".port", port); props.setProperty("mail." + protocol + ".socketFactory.port", port); URLName url = new URLName(protocol, host, Integer.parseInt(port), "", username, password); Session session = Session.getInstance(props, auth); return session.getStore(url); } protected Store configureStoreWithTLS(Properties props, Authenticator auth) throws NoSuchProviderException { props.setProperty("mail." + protocol + ".ssl.enable", STR_FALSE); props.setProperty("mail." + protocol + ".starttls.enable", STR_TRUE); props.setProperty("mail." + protocol + ".starttls.required", STR_TRUE); Session session = Session.getInstance(props, auth); return session.getStore(protocol + SECURE_SUFFIX_FOR_POP3_AND_IMAP); } protected Store configureStoreWithoutSSL(Properties props, Authenticator auth) throws NoSuchProviderException { props.put("mail." + protocol + ".host", host); props.put("mail." + protocol + ".port", port); Session session = Session.getInstance(props, auth); return session.getStore(protocol); } protected void addSSLSettings(boolean trustAllRoots, String keystore, String keystorePassword, String trustKeystore, String trustPassword) throws Exception { boolean useClientCert = false; boolean useTrustCert = false; String separator = getSystemFileSeparator(); String javaKeystore = getSystemJavaHome() + separator + "lib" + separator + "security" + separator + "cacerts"; if (keystore.length() == 0 && !trustAllRoots) { boolean storeExists = new File(javaKeystore).exists(); keystore = (storeExists) ? FILE + javaKeystore : null; if (null != keystorePassword) { if ("".equals(keystorePassword)) { keystorePassword = DEFAULT_PASSWORD_FOR_STORE; } } useClientCert = storeExists; } else { if (!trustAllRoots) { if (!keystore.startsWith(HTTP)) { keystore = FILE + keystore; } useClientCert = true; } } if (trustKeystore.length() == 0 && !trustAllRoots) { boolean storeExists = new File(javaKeystore).exists(); trustKeystore = (storeExists) ? FILE + javaKeystore : null; if (storeExists) { if (isEmpty(trustPassword)) { trustPassword = DEFAULT_PASSWORD_FOR_STORE; } } else { trustPassword = null; } useTrustCert = storeExists; } else { if (!trustAllRoots) { if (!trustKeystore.startsWith(HTTP)) { trustKeystore = FILE + trustKeystore; } useTrustCert = true; } } TrustManager[] trustManagers = null; KeyManager[] keyManagers = null; if (trustAllRoots) { trustManagers = new TrustManager[]{new EasyX509TrustManager()}; } if (useTrustCert) { KeyStore trustKeyStore = SSLUtils.createKeyStore(new URL(trustKeystore), trustPassword); trustManagers = SSLUtils.createAuthTrustManagers(trustKeyStore); } if (useClientCert) { KeyStore clientKeyStore = SSLUtils.createKeyStore(new URL(keystore), keystorePassword); keyManagers = SSLUtils.createKeyManagers(clientKeyStore, keystorePassword); } SSLContext context = SSLContext.getInstance(SSL); context.init(keyManagers, trustManagers, new SecureRandom()); SSLContext.setDefault(context); } private void addDecryptionSettings() throws Exception { char[] smimePw = new String(decryptionKeystorePass).toCharArray(); Security.addProvider(new BouncyCastleProvider()); ks = KeyStore.getInstance(PKCS_KEYSTORE_TYPE, BOUNCY_CASTLE_PROVIDER); InputStream decryptionStream = new URL(decryptionKeystore).openStream(); try { ks.load(decryptionStream, smimePw); } finally { decryptionStream.close(); } if ("".equals(decryptionKeyAlias)) { Enumeration aliases = ks.aliases(); while (aliases.hasMoreElements()) { String alias = (String) aliases.nextElement(); if (ks.isKeyEntry(alias)) { decryptionKeyAlias = alias; } } if ("".equals(decryptionKeyAlias)) { throw new Exception("Can't find a private key!"); } } // // find the certificate for the private key and generate a // suitable recipient identifier. // X509Certificate cert = (X509Certificate)ks.getCertificate(decryptionKeyAlias); if (null == cert) { throw new Exception("Can't find a key pair with alias \"" + decryptionKeyAlias + "\" in the given keystore"); } if (verifyCertificate) { cert.checkValidity(); } recId = new RecipientId(); recId.setSerialNumber(cert.getSerialNumber()); recId.setIssuer(cert.getIssuerX500Principal().getEncoded()); } protected String getSystemFileSeparator() { return System.getProperty("file.separator"); } protected String getSystemJavaHome() { return System.getProperty("java.home"); } protected void processInputs(GetMailMessageInputs getMailMessageInputs) throws Exception { String strHost = getMailMessageInputs.getHostname(); if (isEmpty(strHost)) { throw new Exception(HOST_NOT_SPECIFIED); } else { host = strHost.trim(); } port = getMailMessageInputs.getPort(); protocol = getMailMessageInputs.getProtocol(); String strUsername = getMailMessageInputs.getUsername(); if (isEmpty(strUsername)) { throw new Exception(USERNAME_NOT_SPECIFIED); } else { username = strUsername.trim(); } String strPassword = getMailMessageInputs.getPassword(); if (null == strPassword) { password = ""; } else { password = strPassword.trim(); } String strFolder = getMailMessageInputs.getFolder(); if (isEmpty(strFolder)) { throw new Exception(FOLDER_NOT_SPECIFIED); } else { folder = strFolder.trim(); } String trustAll = getMailMessageInputs.getTrustAllRoots(); // Default value of trustAllRoots is true trustAllRoots = !(null != trustAll && trustAll.equalsIgnoreCase(STR_FALSE)); String strMessageNumber = getMailMessageInputs.getMessageNumber(); if (isEmpty(strMessageNumber)) { throw new Exception(MESSAGE_NUMBER_NOT_SPECIFIED); } else { messageNumber = Integer.parseInt(strMessageNumber); } String strSubOnly = getMailMessageInputs.getSubjectOnly(); // Default value of subjectOnly is false subjectOnly = (strSubOnly != null && strSubOnly.equalsIgnoreCase(STR_TRUE)); String strEnableSSL = getMailMessageInputs.getEnableSSL(); // Default value of enableSSL is false; enableSSL = (null != strEnableSSL && strEnableSSL.equalsIgnoreCase(STR_TRUE)); String strEnableTLS = getMailMessageInputs.getEnableTLS(); enableTLS = (null != strEnableTLS && strEnableTLS.equalsIgnoreCase(STR_TRUE)); keystore = getMailMessageInputs.getKeystore(); keystorePassword = getMailMessageInputs.getKeystorePassword(); trustKeystoreFile = getMailMessageInputs.getTrustKeystore(); trustPassword = getMailMessageInputs.getTrustPassword(); characterSet = getMailMessageInputs.getCharacterSet(); String strDeleteUponRetrieval = getMailMessageInputs.getDeleteUponRetrieval(); // Default value for deleteUponRetrieval is false deleteUponRetrieval = (null != strDeleteUponRetrieval && strDeleteUponRetrieval.equalsIgnoreCase(STR_TRUE)); if (messageNumber < 1) { throw new Exception(MESSAGES_ARE_NUMBERED_STARTING_AT_1); } if ((isEmpty(protocol)) && (isEmpty(port))) { throw new Exception(SPECIFY_PORT_OR_PROTOCOL_OR_BOTH); } else if ((protocol != null && !"".equals(protocol)) && (!protocol.equalsIgnoreCase(IMAP)) && (!protocol.equalsIgnoreCase(POP3)) && (!protocol.equalsIgnoreCase(IMAP_4)) && (isEmpty(port))) { throw new Exception(SPECIFY_PORT_FOR_PROTOCOL); } else if ((isEmpty(protocol)) && (port != null && !"".equals(port)) && (!port.equalsIgnoreCase(IMAP_PORT)) && (!port.equalsIgnoreCase(POP3_PORT))) { throw new Exception(SPECIFY_PROTOCOL_FOR_GIVEN_PORT); } else if ((isEmpty(protocol)) && (port.trim().equalsIgnoreCase(IMAP_PORT))) { protocol = IMAP; } else if ((isEmpty(protocol)) && (port.trim().equalsIgnoreCase(POP3_PORT))) { protocol = POP3; } else if ((protocol.trim().equalsIgnoreCase(POP3)) && (isEmpty(port))) { port = POP3_PORT; } else if ((protocol.trim().equalsIgnoreCase(IMAP)) && (isEmpty(port))) { port = IMAP_PORT; } else if ((protocol.trim().equalsIgnoreCase(IMAP_4)) && (isEmpty(port))) { port = IMAP_PORT; } //The protocol should be given in lowercase to be recognised. protocol = protocol.toLowerCase(); if (protocol.trim().equalsIgnoreCase(IMAP_4)) { protocol = IMAP; } this.decryptionKeystore = getMailMessageInputs.getDecryptionKeystore(); if (isNotEmpty(this.decryptionKeystore)) { if (!decryptionKeystore.startsWith(HTTP)) { decryptionKeystore = FILE + decryptionKeystore; } decryptMessage = true; decryptionKeyAlias = getMailMessageInputs.getDecryptionKeyAlias(); if (null == decryptionKeyAlias) { decryptionKeyAlias = ""; } decryptionKeystorePass = getMailMessageInputs.getDecryptionKeystorePassword(); if (null == decryptionKeystorePass) { decryptionKeystorePass = ""; } } else { decryptMessage = false; } String timeout = getMailMessageInputs.getTimeout(); if (isNotEmpty(timeout)) { this.timeout = Integer.parseInt(timeout); if (this.timeout <= 0) { throw new Exception("timeout value must be a positive number"); } this.timeout *= ONE_SECOND; //timeouts in seconds } String verifyCertStr = getMailMessageInputs.getVerifyCertificate(); if (!isEmpty(verifyCertStr)) { verifyCertificate = Boolean.parseBoolean(verifyCertStr); } } protected Map<String, String> getMessageByContentTypes(Message message, String characterSet) throws Exception { Map<String, String> messageMap = new HashMap<>(); if (message.isMimeType(TEXT_PLAIN)) { messageMap.put(TEXT_PLAIN, MimeUtility.decodeText(message.getContent().toString())); } else if (message.isMimeType(TEXT_HTML)) { messageMap.put(TEXT_HTML, MimeUtility.decodeText(convertMessage(message.getContent().toString()))); } else if (message.isMimeType(MULTIPART_MIXED) || message.isMimeType(MULTIPART_RELATED)) { messageMap.put(MULTIPART_MIXED, extractMultipartMixedMessage(message, characterSet)); } else { Object obj = message.getContent(); Multipart mpart = (Multipart) obj; for (int i = 0, n = mpart.getCount(); i < n; i++) { Part part = mpart.getBodyPart(i); if (decryptMessage && part.getContentType() != null && part.getContentType().equals(ENCRYPTED_CONTENT_TYPE)) { part = decryptPart((MimeBodyPart)part); } String disposition = part.getDisposition(); String partContentType = part.getContentType().substring(0, part.getContentType().indexOf(";")); if (disposition == null) { if (part.getContent() instanceof MimeMultipart) { // multipart with attachment MimeMultipart mm = (MimeMultipart) part.getContent(); for (int j = 0; j < mm.getCount(); j++) { if (mm.getBodyPart(j).getContent() instanceof String) { BodyPart bodyPart = mm.getBodyPart(j); if ((characterSet != null) && (characterSet.trim().length() > 0)) { String contentType = bodyPart.getHeader(CONTENT_TYPE)[0]; contentType = contentType .replace(contentType.substring(contentType.indexOf("=") + 1), characterSet); bodyPart.setHeader(CONTENT_TYPE, contentType); } String partContentType1 = bodyPart .getContentType().substring(0, bodyPart.getContentType().indexOf(";")); messageMap.put(partContentType1, MimeUtility.decodeText(bodyPart.getContent().toString())); } } } else { //multipart - w/o attachment //if the user has specified a certain characterSet we decode his way if ((characterSet != null) && (characterSet.trim().length() > 0)) { InputStream istream = part.getInputStream(); ByteArrayInputStream bis = new ByteArrayInputStream(ASCIIUtility.getBytes(istream)); int count = bis.available(); byte[] bytes = new byte[count]; count = bis.read(bytes, 0, count); messageMap.put(partContentType, MimeUtility.decodeText(new String(bytes, 0, count, characterSet))); } else { messageMap.put(partContentType, MimeUtility.decodeText(part.getContent().toString())); } } } } //for } //else return messageMap; } private String extractMultipartMixedMessage(Message message, String characterSet) throws Exception { Object obj = message.getContent(); Multipart mpart = (Multipart) obj; for (int i = 0, n = mpart.getCount(); i < n; i++) { Part part = mpart.getBodyPart(i); if (decryptMessage && part.getContentType() != null && part.getContentType().equals(ENCRYPTED_CONTENT_TYPE)) { part = decryptPart((MimeBodyPart)part); } String disposition = part.getDisposition(); if (disposition != null) { // this means the part is not an inline image or attached file. continue; } if (part.isMimeType("multipart/related")) { // if related content then check it's parts String content = processMultipart(part); if (content != null) { return content; } } if (part.isMimeType("multipart/alternative")) { return extractAlternativeContent(part); } if (part.isMimeType("text/plain") || part.isMimeType("text/html")) { return part.getContent().toString(); } } return null; } private String processMultipart(Part part) throws IOException, MessagingException { Multipart relatedparts = (Multipart)part.getContent(); for (int j = 0; j < relatedparts.getCount(); j++) { Part rel = relatedparts.getBodyPart(j); if (rel.getDisposition() == null) { // again, if it's not an image or attachment(only those have disposition not null) if (rel.isMimeType("multipart/alternative")) { // last crawl through the alternative formats. return extractAlternativeContent(rel); } } } return null; } private String extractAlternativeContent(Part part) throws IOException, MessagingException { Multipart alternatives = (Multipart)part.getContent(); Object content = ""; for (int k = 0; k < alternatives.getCount(); k++) { Part alternative = alternatives.getBodyPart(k); if (alternative.getDisposition() == null) { content = alternative.getContent(); } } return content.toString(); } private MimeBodyPart decryptPart(MimeBodyPart part) throws Exception { SMIMEEnveloped smimeEnveloped = new SMIMEEnveloped(part); RecipientInformationStore recipients = smimeEnveloped.getRecipientInfos(); RecipientInformation recipient = recipients.get(recId); if (null == recipient) { StringBuilder errorMessage = new StringBuilder(); errorMessage.append("This email wasn't encrypted with \"" + recId.toString() + "\".\n"); errorMessage.append("The encryption recId is: "); for (Object rec : recipients.getRecipients()) { if (rec instanceof RecipientInformation) { RecipientId recipientId = ((RecipientInformation) rec).getRID(); errorMessage.append("\"" + recipientId.toString() + "\"\n"); } } throw new Exception(errorMessage.toString()); } return toMimeBodyPart(recipient.getContent(ks.getKey(decryptionKeyAlias, null), BOUNCY_CASTLE_PROVIDER)); } protected String getAttachedFileNames(Part part) throws Exception { String fileNames = ""; Object content = part.getContent(); if (!(content instanceof Multipart)) { if (decryptMessage && part.getContentType() != null && part.getContentType().equals(ENCRYPTED_CONTENT_TYPE)) { part = decryptPart((MimeBodyPart) part); } // non-Multipart MIME part ... // is the file name set for this MIME part? (i.e. is it an attachment?) if (part.getFileName() != null && !part.getFileName().equals("") && part.getInputStream() != null) { String fileName = part.getFileName(); // is the file name encoded? (consider it is if it's in the =?charset?encoding?encoded text?= format) if (fileName.indexOf('?') == -1) { // not encoded (i.e. a simple file name not containing '?')-> just return the file name return fileName; } // encoded file name -> remove any chars before the first "=?" and after the last "?=" return fileName.substring(fileName.indexOf("=?"), fileName.length() - ((new StringBuilder(fileName)).reverse()).indexOf("=?")); } } else { // a Multipart type of MIME part Multipart mpart = (Multipart) content; // iterate through all the parts in this Multipart ... for (int i = 0, n = mpart.getCount(); i < n; i++) { if (!"".equals(fileNames)) { fileNames += STR_COMMA; } // to the list of attachments built so far append the list of attachments in the current MIME part ... fileNames += getAttachedFileNames(mpart.getBodyPart(i)); } } return fileNames; } protected String decodeAttachedFileNames(String attachedFileNames) throws Exception { StringBuilder sb = new StringBuilder(); String delimiter = ""; // splits the input into comma-separated chunks and decodes each chunk according to its encoding ... for (String fileName : attachedFileNames.split(STR_COMMA)) { sb.append(delimiter).append(MimeUtility.decodeText(fileName)); delimiter = STR_COMMA; } // return the concatenation of the decoded chunks ... return sb.toString(); } protected String convertMessage(String msg) throws Exception { StringBuilder sb = new StringBuilder(); for (int i = 0; i < msg.length(); i++) { char currentChar = msg.charAt(i); if (currentChar == '\n') { sb.append("<br>"); } else { sb.append(currentChar); } } return sb.toString(); } protected int getFolderOpenMode() { return Folder.READ_WRITE; } /** * This method addresses the mail headers which contain encoded words. The syntax for an encoded word is defined in * RFC 2047 section 2: http://www.faqs.org/rfcs/rfc2047.html In some cases the header is marked as having a certain * charset but at decode not all the characters a properly decoded. This is why it can be useful to force it to * decode the text with a different charset. * For example when sending an email using Mozilla Thunderbird and JIS X 0213 characters the subject and attachment * headers are marked as =?Shift_JIS? but the JIS X 0213 characters are only supported in windows-31j. * <p/> * This method replaces the charset tag of the header with the new charset provided by the user. * * @param header - The header in which the charset will be replaced. * @param newCharset - The new charset that will be replaced in the given header. * @return The header with the new charset. */ public String changeHeaderCharset(String header, String newCharset) { //match for =?charset? return header.replaceAll("=\\?[^\\(\\)<>@,;:/\\[\\]\\?\\.= ]+\\?", "=?" + newCharset + "?"); } }
package com.teamcenter.TcLoadSimulate.Core; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.xml.bind.UnmarshalException; import org.eclipse.swt.SWT; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseListener; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import com.teamcenter.TcLoadSimulate.TcLoadSimulate; import com.teamcenter.TcLoadSimulate.Core.Events.EventListener; import com.teamcenter.TcLoadSimulate.Core.Events.Logger; /** * GUI class that initiates display and widgets. * */ public class UserInterface implements EventListener { /** * The display object */ private static Display display; /** * The shell object. */ private static Shell shell; /** * The table in which all worker information is shown. */ private static Table table; /** * The start button. */ private static Button startButton; /** * The stop button. */ private static Button stopButton; private static Button rereadButton; private static Button loadButton; private static Button outputButton; private static Text outputFile; public final static Table getTable() { return table; } public final static void DisplayError(String msg) { MessageBox msgBox = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); msgBox.setText("An error has occurred"); msgBox.setMessage(msg); msgBox.open(); } public final static void DisplayError(String msg, Exception e) { MessageBox msgBox = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); msgBox.setText("An error has occurred"); if (e instanceof UnmarshalException) { UnmarshalException ume = (UnmarshalException)e; msg += "\n\nMessage:\n\n" + ume.getLinkedException().getMessage(); } if (e.getStackTrace().length > 0) { msg += "\n\nStack trace:\n\n"; for (StackTraceElement ste : e.getStackTrace()) { msg += ste.toString(); } } msgBox.setMessage(msg); msgBox.open(); } /** * Setup the application window and all its widgets. * * @throws Exception */ public static void init() throws Exception { display = new Display(); shell = new Shell(display); shell.setText("TcLoadSimulate"); final Image imgTeamcenter = new Image(display, TcLoadSimulate.class .getClassLoader().getResourceAsStream( "com/teamcenter/TcLoadSimulate/Images/teamcenter.png")); shell.setImage(imgTeamcenter); shell.setLayout(new GridLayout(6, false)); shell.setSize(990, 700); shell.setMinimumSize(990, 700); table = new Table(shell, SWT.MULTI | SWT.FULL_SELECTION | SWT.FLAT); table.setHeaderVisible(true); table.setLinesVisible(true); GridData gd = new GridData(SWT.FILL, SWT.FILL, true, true); gd.horizontalSpan = 6; table.setLayoutData(gd); TableColumn dummy = new TableColumn(table, SWT.NONE); dummy.setWidth(0); dummy.setMoveable(false); dummy.setResizable(false); TableColumn status = new TableColumn(table, SWT.NONE); status.setWidth(9); status.setMoveable(false); status.setResizable(false); TableColumn date = new TableColumn(table, SWT.NONE); date.setText("Date"); date.setWidth(110); date.setMoveable(true); date.setResizable(false); TableColumn worker = new TableColumn(table, SWT.NONE); worker.setText("Worker"); worker.setWidth(80); worker.setMoveable(true); worker.setResizable(true); TableColumn module = new TableColumn(table, SWT.NONE); module.setText("Module"); module.setWidth(110); module.setMoveable(true); module.setResizable(true); TableColumn iteration = new TableColumn(table, SWT.RIGHT); iteration.setText("Iteration"); iteration.setWidth(70); iteration.setMoveable(true); iteration.setResizable(false); TableColumn time = new TableColumn(table, SWT.RIGHT); time.setText("Elapsed"); time.setWidth(70); time.setMoveable(true); time.setResizable(false); TableColumn progress = new TableColumn(table, SWT.RIGHT); progress.setText("Progress"); progress.setWidth(58); progress.setMoveable(true); progress.setResizable(false); TableColumn misc = new TableColumn(table, SWT.NONE); misc.setText("Extra information"); misc.setWidth(434); misc.setMoveable(false); misc.setResizable(true); final Composite c = new Composite(shell, SWT.NONE); GridLayout gl = new GridLayout(1, false); gl.marginHeight = 0; gl.marginWidth = 0; c.setLayout(gl); c.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); final Label outputLabel = new Label(c, SWT.NONE); outputLabel.setLayoutData(new GridData(SWT.LEFT, SWT.TOP, true, false)); outputLabel.setText("Output file:"); outputFile = new Text(c, SWT.SINGLE | SWT.BORDER); outputFile .setLayoutData(new GridData(SWT.FILL, SWT.BOTTOM, true, false)); if (TcLoadSimulate.configurationFile != null) outputFile.setText(TcLoadSimulate.configurationFile .getAbsolutePath()); outputButton = new Button(shell, SWT.FLAT); gd = new GridData(80, 30); gd.verticalAlignment = SWT.BOTTOM; outputButton.setLayoutData(gd); outputButton.setText("OUTPUT..."); outputButton.setToolTipText("Set output file."); outputButton.setFont(new Font(display, display.getSystemFont() .getFontData()[0].getName(), 8, SWT.BOLD)); loadButton = new Button(shell, SWT.FLAT); loadButton.setLayoutData(gd); loadButton.setText("OPEN..."); loadButton.setToolTipText("Open configuration file."); loadButton.setFont(new Font(display, display.getSystemFont() .getFontData()[0].getName(), 8, SWT.BOLD)); rereadButton = new Button(shell, SWT.FLAT); rereadButton.setLayoutData(gd); rereadButton.setText("REREAD"); rereadButton.setToolTipText("Reread configuration file."); rereadButton.setFont(new Font(display, display.getSystemFont() .getFontData()[0].getName(), 8, SWT.BOLD)); rereadButton.setEnabled(false); startButton = new Button(shell, SWT.FLAT); startButton.setLayoutData(gd); startButton.setImage(new Image(display, TcLoadSimulate.class .getClassLoader().getResourceAsStream( "com/teamcenter/TcLoadSimulate/Images/start.png"))); startButton.setText("START"); startButton.setFont(new Font(display, display.getSystemFont() .getFontData()[0].getName(), 8, SWT.BOLD)); startButton.setEnabled(false); stopButton = new Button(shell, SWT.FLAT); stopButton.setLayoutData(gd); stopButton.setImage(new Image(display, TcLoadSimulate.class .getClassLoader().getResourceAsStream( "com/teamcenter/TcLoadSimulate/Images/stop.png"))); stopButton.setText("STOP"); stopButton.setFont(new Font(display, display.getSystemFont() .getFontData()[0].getName(), 8, SWT.BOLD)); stopButton.setEnabled(false); // // Listeners // loadButton.addMouseListener(new MouseListener() { @Override public void mouseUp(MouseEvent e) { table.deselectAll(); table.notifyListeners(SWT.Selection, new Event()); final FileDialog dlg = new FileDialog(shell, SWT.OPEN); dlg.setText("Select configuration file"); dlg.setFilterExtensions(new String[] { "*.xml" }); if (TcLoadSimulate.configurationFile != null && TcLoadSimulate.configurationFile.exists()) dlg.setFilterPath(TcLoadSimulate.configurationFile .getParent()); else dlg.setFilterPath(TcLoadSimulate.appPath.getPath()); String file = dlg.open(); if (file != null) { TcLoadSimulate.configurationFile = new File(file); table.removeAll(); TcLoadSimulate.start(); } table.setFocus(); } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseDoubleClick(MouseEvent e) { } }); outputFile.addFocusListener(new FocusListener() { @Override public void focusLost(FocusEvent e) { Text t = (Text) e.widget; if (t.getText().isEmpty()) return; table.deselectAll(); table.notifyListeners(SWT.Selection, new Event()); File file = new File(t.getText()); Logger.reset(); try { file.createNewFile(); TcLoadSimulate.outputFile = file; } catch (IOException ex) { TcLoadSimulate.outputFile = null; DisplayError("File could not be created."); } } @Override public void focusGained(FocusEvent e) { } }); outputFile.addKeyListener(new KeyListener() { @Override public void keyReleased(KeyEvent e) { if (e.keyCode == 13) { table.setFocus(); } } @Override public void keyPressed(KeyEvent e) { } }); outputButton.addMouseListener(new MouseListener() { @Override public void mouseUp(MouseEvent e) { table.deselectAll(); table.notifyListeners(SWT.Selection, new Event()); final FileDialog dlg = new FileDialog(shell, SWT.SAVE); dlg.setText("Select output file"); dlg.setFilterExtensions(new String[] { "*.csv" }); String file = dlg.open(); if (file != null) { TcLoadSimulate.outputFile = new File(file); outputFile.setText(TcLoadSimulate.outputFile .getAbsolutePath()); outputFile.notifyListeners(SWT.FocusOut, new Event()); } table.setFocus(); } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseDoubleClick(MouseEvent e) { } }); rereadButton.addMouseListener(new MouseListener() { @Override public void mouseUp(MouseEvent e) { table.deselectAll(); table.notifyListeners(SWT.Selection, new Event()); rereadButton.setEnabled(false); table.removeAll(); table.setFocus(); TcLoadSimulate.start(); } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseDoubleClick(MouseEvent e) { } }); startButton.addMouseListener(new MouseListener() { @Override public void mouseUp(MouseEvent e) { final List<Worker> wl = new ArrayList<Worker>(); for (TableItem item : table.getSelection()) { wl.add((Worker) item.getData("worker")); } startButton.setEnabled(false); table.deselectAll(); Application.startWorkers(wl); table.setFocus(); } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseDoubleClick(MouseEvent e) { } }); stopButton.addMouseListener(new MouseListener() { @Override public void mouseUp(MouseEvent e) { final List<Worker> wl = new ArrayList<Worker>(); for (TableItem item : table.getSelection()) { wl.add((Worker) item.getData("worker")); } stopButton.setEnabled(false); table.deselectAll(); Application.stopWorkers(wl); table.setFocus(); } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseDoubleClick(MouseEvent e) { } }); table.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { if (table.getSelectionCount() == 0) { if (startButton.isEnabled()) startButton.setEnabled(false); if (stopButton.isEnabled()) stopButton.setEnabled(false); } else { boolean started = false; boolean stopped = false; for (TableItem item : table.getSelection()) { Worker w = (Worker) item.getData("worker"); if (w.mode == Mode.STARTED) started = true; else stopped = true; } if (started && !stopButton.isEnabled()) stopButton.setEnabled(true); else if (!started) stopButton.setEnabled(false); if (stopped && !startButton.isEnabled()) startButton.setEnabled(true); else if (!stopped) startButton.setEnabled(false); } } @Override public void widgetDefaultSelected(SelectionEvent e) { } }); table.addKeyListener(new KeyListener() { @Override public void keyReleased(KeyEvent e) { if (e.stateMask == SWT.CTRL && e.keyCode == 'a') { if (table.getSelectionCount() == table.getItems().length) table.deselectAll(); else table.selectAll(); table.notifyListeners(SWT.Selection, new Event()); } } @Override public void keyPressed(KeyEvent e) { } }); shell.open(); } public static final void loop() { // Need to reset reread button if it has been pressed if (!rereadButton.isDisposed() && !rereadButton.isEnabled()) rereadButton.setEnabled(true); while (!shell.isDisposed()) if (!display.readAndDispatch()) display.sleep(); } public static final void updateUI() { for (TableItem item : table.getItems()) { Worker w = (Worker) item.getData("worker"); if (w.mode == Mode.STARTED) { if (rereadButton.isEnabled()) rereadButton.setEnabled(false); if (outputButton.isEnabled()) outputButton.setEnabled(false); if (loadButton.isEnabled()) loadButton.setEnabled(false); if (outputFile.isEnabled()) outputFile.setEnabled(false); return; } if (!rereadButton.isEnabled()) rereadButton.setEnabled(true); if (!outputButton.isEnabled()) outputButton.setEnabled(true); if (!loadButton.isEnabled()) loadButton.setEnabled(true); if (!outputFile.isEnabled()) outputFile.setEnabled(true); } } private static final void updateWorkerStatus( final com.teamcenter.TcLoadSimulate.Core.Events.Event e) { final Worker w = (Worker) e.getSource(); switch (w.status) { case RUNNING: display.asyncExec(new Runnable() { @Override public void run() { TableItem t = w.getTableItem(); t.setBackground(1, new Color(display, 255, 255, 150)); t.setText(new String[] { null, null, e.getDate(), w.id, w.getModuleType(), w.getIterations(), "---", w.getPercent(), "" }); } }); break; case SLEEPING: display.asyncExec(new Runnable() { @Override public void run() { TableItem t = w.getTableItem(); t.setBackground(1, new Color(display, 150, 150, 255)); t.setText(new String[] { null, null, e.getDate(), w.id, w.getModuleType(), w.getIterations(), w.getModuleTimeDelta(), w.getPercent(), w.getModuleMiscInfo() }); } }); break; case FINISHED: display.asyncExec(new Runnable() { @Override public void run() { TableItem t = w.getTableItem(); t.setBackground(1, new Color(display, 150, 255, 150)); t.setText(new String[] { null, null, e.getDate(), w.id, w.getModuleType(), w.getIterations(), w.getModuleTimeDelta(), w.getPercent(), w.getModuleMiscInfo() }); } }); break; case ERROR: display.asyncExec(new Runnable() { @Override public void run() { TableItem t = w.getTableItem(); t.setBackground(1, new Color(display, 255, 150, 150)); t.setText(new String[] { null, null, e.getDate(), w.id, w.getModuleType(), null, null, null, "Please see standard error console output for detailed error message" }); } }); break; case NONE: break; default: break; } display.asyncExec(new Runnable() { @Override public void run() { updateUI(); } }); } @Override public void handleWorkerEvent( com.teamcenter.TcLoadSimulate.Core.Events.Event e) { if (e.getSource() instanceof Worker) { updateWorkerStatus(e); } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.bpmn.event.error.mapError; import java.util.HashMap; import java.util.Map; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.test.Deployment; import org.flowable.standalone.testing.helpers.ServiceTaskTestMock; /** * @author Saeid Mirzaei */ public class BoundaryErrorMapTest extends PluggableFlowableTestCase { // exception matches the only mapping, directly @Deployment public void testClassDelegateSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testExpressionSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testDelegateExpressionSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } // exception does not match the single mapping @Deployment(resources = "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testClassDelegateSingleDirectMap.bpmn20.xml") public void testClassDelegateSingleDirectMapNotMatchingException() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", IllegalStateException.class.getName()); assertEquals(0, ServiceTaskTestMock.CALL_COUNT.get()); try { runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); fail("exception expected, as there is no matching exception map"); } catch (Exception e) { assertFalse(FlagDelegate.isVisited()); } } @Deployment(resources = "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testExpressionSingleDirectMap.bpmn20.xml") public void testExpressionSingleDirectMapNotMatchingException() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", IllegalArgumentException.class.getName()); assertEquals(0, ServiceTaskTestMock.CALL_COUNT.get()); try { runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); fail("exception expected, as there is no matching exception map"); } catch (Exception e) { assertFalse(FlagDelegate.isVisited()); } } @Deployment(resources = "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testDelegateExpressionSingleDirectMap.bpmn20.xml") public void testDelegateExpressionSingleDirectMapNotMatchingException() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", IllegalArgumentException.class.getName()); assertEquals(0, ServiceTaskTestMock.CALL_COUNT.get()); try { runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); fail("exception expected, as there is no matching exception map"); } catch (Exception e) { assertFalse(FlagDelegate.isVisited()); } } // exception matches by inheritance @Deployment public void testClassDelegateSingleInheritedMap() { Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryEventChildException.class.getName()); FlagDelegate.reset(); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } // check the default map @Deployment public void testClassDelegateDefaultMap() { Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", Exception.class.getName()); FlagDelegate.reset(); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testExpressionDefaultMap() { Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", Exception.class.getName()); FlagDelegate.reset(); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testDelegateExpressionDefaultMap() { Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", Exception.class.getName()); FlagDelegate.reset(); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testSeqMultInstanceSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("processWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment public void testSubProcessSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("subprocssWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testCallProcessSingleDirectMap.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testCallProcessCalee.bpmn20.xml" }) public void testCallProcessSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("callProcssWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testExpressionCallProcessSingleDirectMap.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testCallProcessExpressionSubProcess.bpmn20.xml" }) public void testCallProcessExpressionSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("callProcssWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testDelegateExpressionCallProcessSingleDirectMap.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/mapError/BoundaryErrorMapTest.testCallProcessDelegateExpressionSubProcess.bpmn20.xml" }) public void testCallProcessDelegateExpressionSingleDirectMap() { FlagDelegate.reset(); Map<String, Object> vars = new HashMap<>(); vars.put("exceptionClass", BoundaryErrorParentException.class.getName()); runtimeService.startProcessInstanceByKey("callProcssWithSingleExceptionMap", vars); assertTrue(FlagDelegate.isVisited()); } }
package org.basex.query.func; import static org.basex.query.QueryError.*; import static org.basex.query.QueryText.*; import static org.basex.util.Token.*; import org.basex.query.*; import org.basex.query.ann.*; import org.basex.query.expr.*; import org.basex.query.expr.Expr.Flag; import org.basex.query.util.*; import org.basex.query.util.list.*; import org.basex.query.value.item.*; import org.basex.query.value.type.*; import org.basex.query.value.type.SeqType.Occ; import org.basex.query.var.*; import org.basex.util.*; import org.basex.util.hash.*; /** * This class provides access to built-in and user-defined functions. * * @author BaseX Team 2005-15, BSD License * @author Christian Gruen */ public final class Functions extends TokenSet { /** Singleton instance. */ private static final Functions INSTANCE = new Functions(); /** Function classes. */ private Function[] funcs = new Function[Array.CAPACITY]; /** * Returns the singleton instance. * @return instance */ public static Functions get() { return INSTANCE; } /** * Constructor, registering built-in XQuery functions. */ private Functions() { for(final Function sig : Function.VALUES) { final String desc = sig.desc; final byte[] ln = token(desc.substring(0, desc.indexOf('('))); final int i = put(new QNm(ln, sig.uri()).id()); if(funcs[i] != null) throw Util.notExpected("Function defined twice: " + sig); funcs[i] = sig; } } /** * Tries to resolve the specified function with xs namespace as a cast. * @param arity number of arguments * @param name function name * @param ii input info * @return cast type if found, {@code null} otherwise * @throws QueryException query exception */ private static Type getCast(final QNm name, final long arity, final InputInfo ii) throws QueryException { final byte[] ln = name.local(); Type type = ListType.find(name); if(type == null) type = AtomType.find(name, false); // no constructor function found, or abstract type specified if(type != null && type != AtomType.NOT && type != AtomType.AAT) { if(arity == 1) return type; throw FUNCTYPES_X_X_X_X.get(ii, name.string(), arity, "s", 1); } // include similar function name in error message final Levenshtein ls = new Levenshtein(); for(final AtomType t : AtomType.VALUES) { if(t.parent == null) continue; final byte[] u = t.name.uri(); if(eq(u, XS_URI) && t != AtomType.NOT && t != AtomType.AAT && ls.similar( lc(ln), lc(t.string()))) throw FUNCSIMILAR_X_X.get(ii, name.string(), t.string()); } // no similar name: constructor function found, or abstract type specified throw FUNCUNKNOWN_X.get(ii, name.string()); } /** * Tries to resolve the specified function as a built-in one. * @param name function name * @param arity number of arguments * @param ii input info * @return function spec if found, {@code null} otherwise * @throws QueryException query exception */ private Function getBuiltIn(final QNm name, final long arity, final InputInfo ii) throws QueryException { final int id = id(name.id()); if(id == 0) return null; final Function fl = funcs[id]; if(!eq(fl.uri(), name.uri())) return null; // check number of arguments if(arity >= fl.minMax[0] && arity <= fl.minMax[1]) return fl; throw FUNCARGNUM_X_X_X.get(ii, fl, arity, arity == 1 ? "" : "s"); } /** * Returns the specified function. * @param name function qname * @param args optional arguments * @param sc static context * @param ii input info * @return function instance * @throws QueryException query exception */ public StandardFunc get(final QNm name, final Expr[] args, final StaticContext sc, final InputInfo ii) throws QueryException { final Function fl = getBuiltIn(name, args.length, ii); return fl == null ? null : fl.get(sc, ii, args); } /** * Creates either a {@link FuncItem} or a {@link Closure} depending on when the method is called. * At parse and compile time a closure is generated to enable inlining and compilation, at * runtime we directly generate a function item. * @param anns function annotations * @param name function name, may be {@code null} * @param params formal parameters * @param ft function type * @param expr function body * @param scp variable scope * @param sc static context * @param ii input info * @param runtime run-time flag * @param updating flag for updating functions * @return the function expression */ private static Expr closureOrFItem(final AnnList anns, final QNm name, final Var[] params, final FuncType ft, final Expr expr, final VarScope scp, final StaticContext sc, final InputInfo ii, final boolean runtime, final boolean updating) { return runtime ? new FuncItem(sc, anns, name, params, ft, expr, scp.stackSize()) : new Closure(ii, name, updating ? null : ft.retType, params, expr, anns, null, sc, scp); } /** * Gets a function literal for a known function. * @param name function name * @param arity number of arguments * @param qc query context * @param sc static context * @param ii input info * @param runtime {@code true} if this method is called at run-time, {@code false} otherwise * @return function literal if found, {@code null} otherwise * @throws QueryException query exception */ public static Expr getLiteral(final QNm name, final int arity, final QueryContext qc, final StaticContext sc, final InputInfo ii, final boolean runtime) throws QueryException { // parse type constructors if(eq(name.uri(), XS_URI)) { final Type type = getCast(name, arity, ii); final VarScope scp = new VarScope(sc); final Var[] args = { scp.newLocal(qc, new QNm(ITEMM, ""), SeqType.AAT_ZO, true) }; final Expr e = new Cast(sc, ii, new VarRef(ii, args[0]), type.seqType()); final AnnList anns = new AnnList(); final FuncType ft = FuncType.get(anns, args, e.seqType()); return closureOrFItem(anns, name, args, ft, e, scp, sc, ii, runtime, false); } // built-in functions final Function fn = get().getBuiltIn(name, arity, ii); if(fn != null) { final AnnList anns = new AnnList(); final VarScope scp = new VarScope(sc); final FuncType ft = fn.type(arity, anns); final QNm[] argNames = fn.argNames(arity); final Var[] args = new Var[arity]; final Expr[] calls = new Expr[arity]; for(int i = 0; i < arity; i++) { args[i] = scp.newLocal(qc, argNames[i], ft.argTypes[i], true); calls[i] = new VarRef(ii, args[i]); } final StandardFunc sf = fn.get(sc, ii, calls); final boolean upd = sf.has(Flag.UPD); if(upd) { qc.updating(); anns.add(new Ann(ii, Annotation.UPDATING)); } if(!sf.has(Flag.CTX) && !sf.has(Flag.POS)) return closureOrFItem( anns, name, args, fn.type(arity, anns), sf, scp, sc, ii, runtime, upd); return new FuncLit(anns, name, args, sf, ft, scp, sc, ii); } // user-defined function final StaticFunc sf = qc.funcs.get(name, arity, ii, true); if(sf != null) { final FuncType ft = sf.funcType(); final VarScope scp = new VarScope(sc); final Var[] args = new Var[arity]; final Expr[] calls = new Expr[arity]; for(int a = 0; a < arity; a++) { args[a] = scp.newLocal(qc, sf.argName(a), ft.argTypes[a], true); calls[a] = new VarRef(ii, args[a]); } final boolean upd = sf.updating; final TypedFunc tf = qc.funcs.getFuncRef(sf.name, calls, sc, ii); final Expr f = closureOrFItem(tf.anns, sf.name, args, ft, tf.fun, scp, sc, ii, runtime, upd); if(upd) qc.updating(); return f; } // Java function (only allowed with administrator permissions) final VarScope scp = new VarScope(sc); final FuncType jt = FuncType.arity(arity); final Var[] vs = new Var[arity]; final Expr[] refs = new Expr[vs.length]; final int vl = vs.length; for(int v = 0; v < vl; v++) { vs[v] = scp.newLocal(qc, new QNm(ARG + (v + 1), ""), SeqType.ITEM_ZM, true); refs[v] = new VarRef(ii, vs[v]); } final Expr jm = JavaMapping.get(name, refs, qc, sc, ii); return jm == null ? null : new FuncLit(new AnnList(), name, vs, jm, jt, scp, sc, ii); } /** * Returns a function item for a user-defined function. * @param sf static function * @param qc query context * @param sc static context * @param info input info * @return resulting value * @throws QueryException query exception */ public static FuncItem getUser(final StaticFunc sf, final QueryContext qc, final StaticContext sc, final InputInfo info) throws QueryException { final FuncType ft = sf.funcType(); final VarScope scp = new VarScope(sc); final int arity = sf.args.length; final Var[] args = new Var[arity]; final int al = args.length; final Expr[] calls = new Expr[al]; for(int a = 0; a < al; a++) { args[a] = scp.newLocal(qc, sf.argName(a), ft.argTypes[a], true); calls[a] = new VarRef(info, args[a]); } final TypedFunc tf = qc.funcs.getFuncRef(sf.name, calls, sc, info); return new FuncItem(sc, tf.anns, sf.name, args, ft, tf.fun, scp.stackSize()); } /** * Returns a function with the specified name and number of arguments, * or {@code null}. * @param name name of the function * @param args optional arguments * @param qc query context * @param sc static context * @param ii input info * @return function instance * @throws QueryException query exception */ public static TypedFunc get(final QNm name, final Expr[] args, final QueryContext qc, final StaticContext sc, final InputInfo ii) throws QueryException { // get namespace and local name // parse type constructors if(eq(name.uri(), XS_URI)) { final Type type = getCast(name, args.length, ii); final SeqType to = SeqType.get(type, Occ.ZERO_ONE); return TypedFunc.constr(new Cast(sc, ii, args[0], to)); } // built-in functions final StandardFunc fun = get().get(name, args, sc, ii); if(fun != null) { final AnnList anns = new AnnList(); if(fun.sig.has(Flag.UPD)) { qc.updating(); anns.add(new Ann(ii, Annotation.UPDATING)); } return new TypedFunc(fun, anns); } // user-defined function final TypedFunc tf = qc.funcs.getRef(name, args, sc, ii); if(tf != null) return tf; // Java function (only allowed with administrator permissions) final JavaMapping jf = JavaMapping.get(name, args, qc, sc, ii); if(jf != null) return TypedFunc.java(jf); // add user-defined function that has not been declared yet if(FuncType.find(name) == null) return qc.funcs.getFuncRef(name, args, sc, ii); // no function found return null; } /** * Returns an exception if the name of a built-in function is similar to the specified name. * @param name name of input function * @param ii input info * @return query exception or {@code null} */ QueryException similarError(final QNm name, final InputInfo ii) { // find functions with identical URIs and similar local names final byte[] local = name.local(), uri = name.uri(); final Levenshtein ls = new Levenshtein(); for(final byte[] key : this) { final int i = indexOf(key, '}'); if(eq(uri, substring(key, 2, i)) && ls.similar(local, substring(key, i + 1))) return similarError(name, ii, key); } // find functions with identical local names for(final byte[] key : this) { final int i = indexOf(key, '}'); if(eq(local, substring(key, i + 1))) return similarError(name, ii, key); } // find functions with identical URIs and local names that start with the specified name for(final byte[] key : this) { final int i = indexOf(key, '}'); if(eq(uri, substring(key, 2, i)) && startsWith(substring(key, i + 1), local)) return similarError(name, ii, key); } return null; } /** * Returns an exception for the specified function. * @param name name of input function * @param ii input info * @param key key of built-in function * @return query exception */ private static QueryException similarError(final QNm name, final InputInfo ii, final byte[] key) { final int i = indexOf(key, '}'); return FUNCSIMILAR_X_X.get(ii, name.prefixId(FN_URI), new TokenBuilder( NSGlobal.prefix(substring(key, 2, i))).add(':').add(substring(key, i + 1)).finish()); } @Override protected void rehash(final int s) { super.rehash(s); funcs = Array.copy(funcs, new Function[s]); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.v2beta1.stub.AnswerRecordsStub; import com.google.cloud.dialogflow.v2beta1.stub.AnswerRecordsStubSettings; import com.google.common.util.concurrent.MoreExecutors; import com.google.protobuf.FieldMask; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service for managing * [AnswerRecords][google.cloud.dialogflow.v2beta1.AnswerRecord]. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * GetAnswerRecordRequest request = * GetAnswerRecordRequest.newBuilder().setName("name3373707").build(); * AnswerRecord response = answerRecordsClient.getAnswerRecord(request); * } * }</pre> * * <p>Note: close() needs to be called on the AnswerRecordsClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li>A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li>A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li>A "callable" method. This type of method takes no parameters and returns an immutable API * callable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of AnswerRecordsSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * AnswerRecordsSettings answerRecordsSettings = * AnswerRecordsSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create(answerRecordsSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * AnswerRecordsSettings answerRecordsSettings = * AnswerRecordsSettings.newBuilder().setEndpoint(myEndpoint).build(); * AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create(answerRecordsSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi @Generated("by gapic-generator-java") public class AnswerRecordsClient implements BackgroundResource { private final AnswerRecordsSettings settings; private final AnswerRecordsStub stub; /** Constructs an instance of AnswerRecordsClient with default settings. */ public static final AnswerRecordsClient create() throws IOException { return create(AnswerRecordsSettings.newBuilder().build()); } /** * Constructs an instance of AnswerRecordsClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final AnswerRecordsClient create(AnswerRecordsSettings settings) throws IOException { return new AnswerRecordsClient(settings); } /** * Constructs an instance of AnswerRecordsClient, using the given stub for making calls. This is * for advanced usage - prefer using create(AnswerRecordsSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final AnswerRecordsClient create(AnswerRecordsStub stub) { return new AnswerRecordsClient(stub); } /** * Constructs an instance of AnswerRecordsClient, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected AnswerRecordsClient(AnswerRecordsSettings settings) throws IOException { this.settings = settings; this.stub = ((AnswerRecordsStubSettings) settings.getStubSettings()).createStub(); } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") protected AnswerRecordsClient(AnswerRecordsStub stub) { this.settings = null; this.stub = stub; } public final AnswerRecordsSettings getSettings() { return settings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public AnswerRecordsStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deprecated. Retrieves a specific answer record. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * GetAnswerRecordRequest request = * GetAnswerRecordRequest.newBuilder().setName("name3373707").build(); * AnswerRecord response = answerRecordsClient.getAnswerRecord(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails * @deprecated This method is deprecated and will be removed in the next major version update. */ @Deprecated public final AnswerRecord getAnswerRecord(GetAnswerRecordRequest request) { return getAnswerRecordCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deprecated. Retrieves a specific answer record. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * GetAnswerRecordRequest request = * GetAnswerRecordRequest.newBuilder().setName("name3373707").build(); * ApiFuture<AnswerRecord> future = * answerRecordsClient.getAnswerRecordCallable().futureCall(request); * // Do something. * AnswerRecord response = future.get(); * } * }</pre> * * @deprecated This method is deprecated and will be removed in the next major version update. */ @Deprecated public final UnaryCallable<GetAnswerRecordRequest, AnswerRecord> getAnswerRecordCallable() { return stub.getAnswerRecordCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); * for (AnswerRecord element : answerRecordsClient.listAnswerRecords(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The project to list all answer records for in reverse chronological * order. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAnswerRecordsPagedResponse listAnswerRecords(LocationName parent) { ListAnswerRecordsRequest request = ListAnswerRecordsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listAnswerRecords(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * ProjectName parent = ProjectName.of("[PROJECT]"); * for (AnswerRecord element : answerRecordsClient.listAnswerRecords(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The project to list all answer records for in reverse chronological * order. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAnswerRecordsPagedResponse listAnswerRecords(ProjectName parent) { ListAnswerRecordsRequest request = ListAnswerRecordsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listAnswerRecords(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * String parent = ProjectName.of("[PROJECT]").toString(); * for (AnswerRecord element : answerRecordsClient.listAnswerRecords(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The project to list all answer records for in reverse chronological * order. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAnswerRecordsPagedResponse listAnswerRecords(String parent) { ListAnswerRecordsRequest request = ListAnswerRecordsRequest.newBuilder().setParent(parent).build(); return listAnswerRecords(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * ListAnswerRecordsRequest request = * ListAnswerRecordsRequest.newBuilder() * .setParent(ProjectName.of("[PROJECT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (AnswerRecord element : answerRecordsClient.listAnswerRecords(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAnswerRecordsPagedResponse listAnswerRecords(ListAnswerRecordsRequest request) { return listAnswerRecordsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * ListAnswerRecordsRequest request = * ListAnswerRecordsRequest.newBuilder() * .setParent(ProjectName.of("[PROJECT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<AnswerRecord> future = * answerRecordsClient.listAnswerRecordsPagedCallable().futureCall(request); * // Do something. * for (AnswerRecord element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListAnswerRecordsRequest, ListAnswerRecordsPagedResponse> listAnswerRecordsPagedCallable() { return stub.listAnswerRecordsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of all answer records in the specified project in reverse chronological order. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * ListAnswerRecordsRequest request = * ListAnswerRecordsRequest.newBuilder() * .setParent(ProjectName.of("[PROJECT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListAnswerRecordsResponse response = * answerRecordsClient.listAnswerRecordsCallable().call(request); * for (AnswerRecord element : response.getResponsesList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListAnswerRecordsRequest, ListAnswerRecordsResponse> listAnswerRecordsCallable() { return stub.listAnswerRecordsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the specified answer record. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * AnswerRecord answerRecord = AnswerRecord.newBuilder().build(); * FieldMask updateMask = FieldMask.newBuilder().build(); * AnswerRecord response = answerRecordsClient.updateAnswerRecord(answerRecord, updateMask); * } * }</pre> * * @param answerRecord Required. Answer record to update. * @param updateMask Required. The mask to control which fields get updated. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AnswerRecord updateAnswerRecord(AnswerRecord answerRecord, FieldMask updateMask) { UpdateAnswerRecordRequest request = UpdateAnswerRecordRequest.newBuilder() .setAnswerRecord(answerRecord) .setUpdateMask(updateMask) .build(); return updateAnswerRecord(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the specified answer record. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * UpdateAnswerRecordRequest request = * UpdateAnswerRecordRequest.newBuilder() * .setAnswerRecord(AnswerRecord.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * AnswerRecord response = answerRecordsClient.updateAnswerRecord(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AnswerRecord updateAnswerRecord(UpdateAnswerRecordRequest request) { return updateAnswerRecordCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the specified answer record. * * <p>Sample code: * * <pre>{@code * try (AnswerRecordsClient answerRecordsClient = AnswerRecordsClient.create()) { * UpdateAnswerRecordRequest request = * UpdateAnswerRecordRequest.newBuilder() * .setAnswerRecord(AnswerRecord.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * ApiFuture<AnswerRecord> future = * answerRecordsClient.updateAnswerRecordCallable().futureCall(request); * // Do something. * AnswerRecord response = future.get(); * } * }</pre> */ public final UnaryCallable<UpdateAnswerRecordRequest, AnswerRecord> updateAnswerRecordCallable() { return stub.updateAnswerRecordCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListAnswerRecordsPagedResponse extends AbstractPagedListResponse< ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord, ListAnswerRecordsPage, ListAnswerRecordsFixedSizeCollection> { public static ApiFuture<ListAnswerRecordsPagedResponse> createAsync( PageContext<ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord> context, ApiFuture<ListAnswerRecordsResponse> futureResponse) { ApiFuture<ListAnswerRecordsPage> futurePage = ListAnswerRecordsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListAnswerRecordsPagedResponse(input), MoreExecutors.directExecutor()); } private ListAnswerRecordsPagedResponse(ListAnswerRecordsPage page) { super(page, ListAnswerRecordsFixedSizeCollection.createEmptyCollection()); } } public static class ListAnswerRecordsPage extends AbstractPage< ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord, ListAnswerRecordsPage> { private ListAnswerRecordsPage( PageContext<ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord> context, ListAnswerRecordsResponse response) { super(context, response); } private static ListAnswerRecordsPage createEmptyPage() { return new ListAnswerRecordsPage(null, null); } @Override protected ListAnswerRecordsPage createPage( PageContext<ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord> context, ListAnswerRecordsResponse response) { return new ListAnswerRecordsPage(context, response); } @Override public ApiFuture<ListAnswerRecordsPage> createPageAsync( PageContext<ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord> context, ApiFuture<ListAnswerRecordsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListAnswerRecordsFixedSizeCollection extends AbstractFixedSizeCollection< ListAnswerRecordsRequest, ListAnswerRecordsResponse, AnswerRecord, ListAnswerRecordsPage, ListAnswerRecordsFixedSizeCollection> { private ListAnswerRecordsFixedSizeCollection( List<ListAnswerRecordsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListAnswerRecordsFixedSizeCollection createEmptyCollection() { return new ListAnswerRecordsFixedSizeCollection(null, 0); } @Override protected ListAnswerRecordsFixedSizeCollection createCollection( List<ListAnswerRecordsPage> pages, int collectionSize) { return new ListAnswerRecordsFixedSizeCollection(pages, collectionSize); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.impl; import com.intellij.lang.PsiBuilderFactory; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.progress.util.AbstractProgressIndicatorExBase; import com.intellij.openapi.progress.util.ProgressWrapper; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.FileIndexFacade; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.NotNullLazyValue; import com.intellij.openapi.vfs.NonPhysicalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileFilter; import com.intellij.openapi.wm.ex.ProgressIndicatorEx; import com.intellij.psi.*; import com.intellij.psi.impl.file.impl.FileManager; import com.intellij.psi.impl.file.impl.FileManagerImpl; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.Topic; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; public final class PsiManagerImpl extends PsiManagerEx implements Disposable { private static final Logger LOG = Logger.getInstance(PsiManagerImpl.class); private final Project myProject; private final NotNullLazyValue<? extends FileIndexFacade> myFileIndex; private final PsiModificationTracker myModificationTracker; private final FileManagerImpl myFileManager; private final List<PsiTreeChangePreprocessor> myTreeChangePreprocessors = ContainerUtil.createLockFreeCopyOnWriteList(); private final List<PsiTreeChangeListener> myTreeChangeListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private boolean myTreeChangeEventIsFiring; private VirtualFileFilter myAssertOnFileLoadingFilter = VirtualFileFilter.NONE; private final AtomicInteger myBatchFilesProcessingModeCount = new AtomicInteger(0); public static final Topic<AnyPsiChangeListener> ANY_PSI_CHANGE_TOPIC = new Topic<>(AnyPsiChangeListener.class, Topic.BroadcastDirection.TO_PARENT); public PsiManagerImpl(@NotNull Project project) { // we need to initialize PsiBuilderFactory service, so it won't initialize under PsiLock from ChameleonTransform PsiBuilderFactory.getInstance(); myProject = project; myFileIndex = NotNullLazyValue.createValue(() -> FileIndexFacade.getInstance(project)); myModificationTracker = PsiModificationTracker.SERVICE.getInstance(project); myFileManager = new FileManagerImpl(this, myFileIndex); myTreeChangePreprocessors.add((PsiTreeChangePreprocessor)myModificationTracker); } @Override public void dispose() { myFileManager.dispose(); } @Override public boolean isDisposed() { return myProject.isDisposed(); } @Override public void dropResolveCaches() { myFileManager.processQueue(); beforeChange(true); } @Override public void dropPsiCaches() { dropResolveCaches(); WriteAction.run(myFileManager::firePropertyChangedForUnloadedPsi); } @Override public boolean isInProject(@NotNull PsiElement element) { if (element instanceof PsiDirectoryContainer) { PsiDirectory[] dirs = ((PsiDirectoryContainer)element).getDirectories(); for (PsiDirectory dir : dirs) { if (!isInProject(dir)) return false; } return true; } PsiFile file = element.getContainingFile(); VirtualFile virtualFile = null; if (file != null) { virtualFile = file.getViewProvider().getVirtualFile(); } else if (element instanceof PsiFileSystemItem) { virtualFile = ((PsiFileSystemItem)element).getVirtualFile(); } if (file != null && file.isPhysical() && virtualFile.getFileSystem() instanceof NonPhysicalFileSystem) return true; return virtualFile != null && myFileIndex.getValue().isInContent(virtualFile); } @Override @TestOnly public void setAssertOnFileLoadingFilter(@NotNull VirtualFileFilter filter, @NotNull Disposable parentDisposable) { // Find something to ensure there are no changed files waiting to be processed in repository indices. myAssertOnFileLoadingFilter = filter; Disposer.register(parentDisposable, () -> myAssertOnFileLoadingFilter = VirtualFileFilter.NONE); } @Override public boolean isAssertOnFileLoading(@NotNull VirtualFile file) { return myAssertOnFileLoadingFilter.accept(file); } @Override @NotNull public Project getProject() { return myProject; } @Override @NotNull public FileManager getFileManager() { return myFileManager; } @Override public boolean areElementsEquivalent(PsiElement element1, PsiElement element2) { ProgressIndicatorProvider.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly if (element1 == element2) return true; if (element1 == null || element2 == null) { return false; } return element1.equals(element2) || element1.isEquivalentTo(element2) || element2.isEquivalentTo(element1); } @Override public PsiFile findFile(@NotNull VirtualFile file) { ProgressIndicatorProvider.checkCanceled(); return myFileManager.findFile(file); } @NotNull @Override public FileViewProvider findViewProvider(@NotNull VirtualFile file) { ProgressIndicatorProvider.checkCanceled(); return myFileManager.findViewProvider(file); } @Override public PsiDirectory findDirectory(@NotNull VirtualFile file) { ProgressIndicatorProvider.checkCanceled(); return myFileManager.findDirectory(file); } @Override public void reloadFromDisk(@NotNull PsiFile file) { myFileManager.reloadFromDisk(file); } @Override public void addPsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) { myTreeChangeListeners.add(listener); } @Override public void addPsiTreeChangeListener(@NotNull final PsiTreeChangeListener listener, @NotNull Disposable parentDisposable) { addPsiTreeChangeListener(listener); Disposer.register(parentDisposable, () -> removePsiTreeChangeListener(listener)); } @Override public void removePsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) { myTreeChangeListeners.remove(listener); } private static @NonNls String logPsi(@Nullable PsiElement element) { return element == null ? " null" : element.getClass().getName(); } @Override public void beforeChildAddition(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_ADDITION); if (LOG.isDebugEnabled()) { LOG.debug("beforeChildAddition: event = " + event); } fireEvent(event); } @Override public void beforeChildRemoval(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_REMOVAL); if (LOG.isDebugEnabled()) { LOG.debug("beforeChildRemoval: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent())); } fireEvent(event); } @Override public void beforeChildReplacement(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_REPLACEMENT); if (LOG.isDebugEnabled()) { LOG.debug("beforeChildReplacement: oldChild = " + logPsi(event.getOldChild())); } fireEvent(event); } public void beforeChildrenChange(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILDREN_CHANGE); if (LOG.isDebugEnabled()) { LOG.debug("beforeChildrenChange: parent = " + logPsi(event.getParent())); } fireEvent(event); } public void beforeChildMovement(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_CHILD_MOVEMENT); if (LOG.isDebugEnabled()) { LOG.debug("beforeChildMovement: child = " + logPsi(event.getChild()) + ", oldParent = " + logPsi(event.getOldParent()) + ", newParent = " + logPsi(event.getNewParent())); } fireEvent(event); } public void beforePropertyChange(@NotNull PsiTreeChangeEventImpl event) { beforeChange(true); event.setCode(PsiTreeChangeEventImpl.PsiEventType.BEFORE_PROPERTY_CHANGE); if (LOG.isDebugEnabled()) { LOG.debug("beforePropertyChange: element = " + logPsi(event.getElement()) + ", propertyName = " + event.getPropertyName() + ", oldValue = " + arrayToString(event.getOldValue())); } fireEvent(event); } private static Object arrayToString(Object value) { return value instanceof Object[] ? Arrays.deepToString((Object[])value) : value; } public void childAdded(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_ADDED); if (LOG.isDebugEnabled()) { LOG.debug("childAdded: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent())); } fireEvent(event); afterChange(true); } public void childRemoved(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_REMOVED); if (LOG.isDebugEnabled()) { LOG.debug("childRemoved: child = " + logPsi(event.getChild()) + ", parent = " + logPsi(event.getParent())); } fireEvent(event); afterChange(true); } public void childReplaced(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_REPLACED); if (LOG.isDebugEnabled()) { LOG.debug("childReplaced: oldChild = " + logPsi(event.getOldChild()) + ", newChild = " + logPsi(event.getNewChild()) + ", parent = " + logPsi(event.getParent())); } fireEvent(event); afterChange(true); } public void childMoved(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILD_MOVED); if (LOG.isDebugEnabled()) { LOG.debug("childMoved: child = " + logPsi(event.getChild()) + ", oldParent = " + logPsi(event.getOldParent()) + ", newParent = " + logPsi(event.getNewParent())); } fireEvent(event); afterChange(true); } public void childrenChanged(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.CHILDREN_CHANGED); if (LOG.isDebugEnabled()) { LOG.debug("childrenChanged: parent = " + logPsi(event.getParent())); } fireEvent(event); afterChange(true); } public void propertyChanged(@NotNull PsiTreeChangeEventImpl event) { event.setCode(PsiTreeChangeEventImpl.PsiEventType.PROPERTY_CHANGED); if (LOG.isDebugEnabled()) { LOG.debug( "propertyChanged: element = " + logPsi(event.getElement()) + ", propertyName = " + event.getPropertyName() + ", oldValue = " + arrayToString(event.getOldValue()) + ", newValue = " + arrayToString(event.getNewValue()) ); } fireEvent(event); afterChange(true); } public void addTreeChangePreprocessor(@NotNull PsiTreeChangePreprocessor preprocessor) { myTreeChangePreprocessors.add(preprocessor); } public void removeTreeChangePreprocessor(@NotNull PsiTreeChangePreprocessor preprocessor) { myTreeChangePreprocessors.remove(preprocessor); } private void fireEvent(@NotNull PsiTreeChangeEventImpl event) { boolean isRealTreeChange = event.getCode() != PsiTreeChangeEventImpl.PsiEventType.PROPERTY_CHANGED && event.getCode() != PsiTreeChangeEventImpl.PsiEventType.BEFORE_PROPERTY_CHANGE; PsiFile file = event.getFile(); if (file == null || file.isPhysical()) { ApplicationManager.getApplication().assertWriteAccessAllowed(); } if (isRealTreeChange) { LOG.assertTrue(!myTreeChangeEventIsFiring, "Changes to PSI are not allowed inside event processing"); myTreeChangeEventIsFiring = true; } try { for (PsiTreeChangePreprocessor preprocessor : myTreeChangePreprocessors) { preprocessor.treeChanged(event); } for (PsiTreeChangePreprocessor preprocessor : PsiTreeChangePreprocessor.EP.getExtensions(myProject)) { try { preprocessor.treeChanged(event); } catch (Throwable e) { LOG.error(e); } } for (PsiTreeChangeListener listener : myTreeChangeListeners) { notifyPsiTreeChangeListener(event, listener); } for (PsiTreeChangeListener listener : PsiTreeChangeListener.EP.getExtensions(myProject)) { notifyPsiTreeChangeListener(event, listener); } } finally { if (isRealTreeChange) { myTreeChangeEventIsFiring = false; } } } private static void notifyPsiTreeChangeListener(@NotNull PsiTreeChangeEventImpl event, PsiTreeChangeListener listener) { try { switch (event.getCode()) { case BEFORE_CHILD_ADDITION: listener.beforeChildAddition(event); break; case BEFORE_CHILD_REMOVAL: listener.beforeChildRemoval(event); break; case BEFORE_CHILD_REPLACEMENT: listener.beforeChildReplacement(event); break; case BEFORE_CHILD_MOVEMENT: listener.beforeChildMovement(event); break; case BEFORE_CHILDREN_CHANGE: listener.beforeChildrenChange(event); break; case BEFORE_PROPERTY_CHANGE: listener.beforePropertyChange(event); break; case CHILD_ADDED: listener.childAdded(event); break; case CHILD_REMOVED: listener.childRemoved(event); break; case CHILD_REPLACED: listener.childReplaced(event); break; case CHILD_MOVED: listener.childMoved(event); break; case CHILDREN_CHANGED: listener.childrenChanged(event); break; case PROPERTY_CHANGED: listener.propertyChanged(event); break; } } catch (Throwable e) { LOG.error(e); } } @Override public void beforeChange(boolean isPhysical) { myProject.getMessageBus().syncPublisher(ANY_PSI_CHANGE_TOPIC).beforePsiChanged(isPhysical); } @Override public void afterChange(boolean isPhysical) { myProject.getMessageBus().syncPublisher(ANY_PSI_CHANGE_TOPIC).afterPsiChanged(isPhysical); } @Override @NotNull public PsiModificationTracker getModificationTracker() { return myModificationTracker; } @Override public void startBatchFilesProcessingMode() { myBatchFilesProcessingModeCount.incrementAndGet(); } @Override public void finishBatchFilesProcessingMode() { int after = myBatchFilesProcessingModeCount.decrementAndGet(); LOG.assertTrue(after >= 0); } @Override public boolean isBatchFilesProcessingMode() { return myBatchFilesProcessingModeCount.get() > 0; } @TestOnly public void cleanupForNextTest() { assert ApplicationManager.getApplication().isUnitTestMode(); myFileManager.cleanupForNextTest(); dropPsiCaches(); } public void dropResolveCacheRegularly(@NotNull ProgressIndicator indicator) { indicator = ProgressWrapper.unwrap(indicator); if (indicator instanceof ProgressIndicatorEx) { ((ProgressIndicatorEx)indicator).addStateDelegate(new AbstractProgressIndicatorExBase() { private final AtomicLong lastClearedTimeStamp = new AtomicLong(); @Override public void setFraction(double fraction) { long current = System.currentTimeMillis(); long last = lastClearedTimeStamp.get(); if (current - last >= 500 && lastClearedTimeStamp.compareAndSet(last, current)) { // fraction is changed when each file is processed => // resolve caches used when searching in that file are likely to be not needed anymore dropResolveCaches(); } } }); } } }
package org.robolectric.shadows; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR2; import static android.os.Build.VERSION_CODES.M; import static android.os.Build.VERSION_CODES.N; import static com.google.common.truth.Truth.assertThat; import static org.robolectric.Shadows.shadowOf; import android.app.Notification; import android.app.PendingIntent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.Icon; import android.text.SpannableString; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.R; import org.robolectric.annotation.Config; @RunWith(AndroidJUnit4.class) public class ShadowNotificationBuilderTest { private final Notification.Builder builder = new Notification.Builder(ApplicationProvider.getApplicationContext()); @Test public void build_setsContentTitleOnNotification() throws Exception { Notification notification = builder.setContentTitle("Hello").build(); assertThat(shadowOf(notification).getContentTitle().toString()).isEqualTo("Hello"); } @Test public void build_whenSetOngoingNotSet_leavesSetOngoingAsFalse() { Notification notification = builder.build(); assertThat(shadowOf(notification).isOngoing()).isFalse(); } @Test public void build_whenSetOngoing_setsOngoingToTrue() { Notification notification = builder.setOngoing(true).build(); assertThat(shadowOf(notification).isOngoing()).isTrue(); } @Test @Config(minSdk = JELLY_BEAN_MR1) public void build_whenShowWhenNotSet_setsShowWhenOnNotificationToTrue() { Notification notification = builder.setWhen(100).setShowWhen(true).build(); assertThat(shadowOf(notification).isWhenShown()).isTrue(); } @Test @Config(minSdk = JELLY_BEAN_MR1) public void build_setShowWhenOnNotification() { Notification notification = builder.setShowWhen(false).build(); assertThat(shadowOf(notification).isWhenShown()).isFalse(); } @Test public void build_setsContentTextOnNotification() throws Exception { Notification notification = builder.setContentText("Hello Text").build(); assertThat(shadowOf(notification).getContentText().toString()).isEqualTo("Hello Text"); } @Test public void build_setsTickerOnNotification() throws Exception { Notification notification = builder.setTicker("My ticker").build(); assertThat(notification.tickerText).isEqualTo("My ticker"); } @Test public void build_setsContentInfoOnNotification() throws Exception { builder.setContentInfo("11"); Notification notification = builder.build(); assertThat(shadowOf(notification).getContentInfo().toString()).isEqualTo("11"); } @Test @Config(minSdk = M) public void build_setsIconOnNotification() throws Exception { Notification notification = builder.setSmallIcon(R.drawable.an_image).build(); assertThat(notification.getSmallIcon().getResId()).isEqualTo(R.drawable.an_image); } @Test public void build_setsWhenOnNotification() throws Exception { Notification notification = builder.setWhen(11L).build(); assertThat(notification.when).isEqualTo(11L); } @Test public void build_setsProgressOnNotification_true() throws Exception { Notification notification = builder.setProgress(36, 57, true).build(); // If indeterminate then max and progress values are ignored. assertThat(shadowOf(notification).isIndeterminate()).isTrue(); } @Test public void build_setsProgressOnNotification_false() throws Exception { Notification notification = builder.setProgress(50, 10, false).build(); assertThat(shadowOf(notification).getMax()).isEqualTo(50); assertThat(shadowOf(notification).getProgress()).isEqualTo(10); assertThat(shadowOf(notification).isIndeterminate()).isFalse(); } @Test @Config(minSdk = JELLY_BEAN_MR1) public void build_setsUsesChronometerOnNotification_true() throws Exception { Notification notification = builder.setUsesChronometer(true).setWhen(10).setShowWhen(true).build(); assertThat(shadowOf(notification).usesChronometer()).isTrue(); } @Test @Config(minSdk = JELLY_BEAN_MR1) public void build_setsUsesChronometerOnNotification_false() throws Exception { Notification notification = builder.setUsesChronometer(false).setWhen(10).setShowWhen(true).build(); assertThat(shadowOf(notification).usesChronometer()).isFalse(); } @Test @Config(maxSdk = M) public void build_handlesNullContentTitle() { Notification notification = builder.setContentTitle(null).build(); assertThat(shadowOf(notification).getContentTitle().toString()).isEmpty(); } @Test @Config(minSdk = N) public void build_handlesNullContentTitle_atLeastN() { Notification notification = builder.setContentTitle(null).build(); assertThat(shadowOf(notification).getContentTitle()).isNull(); } @Test @Config(maxSdk = M) public void build_handlesNullContentText() { Notification notification = builder.setContentText(null).build(); assertThat(shadowOf(notification).getContentText().toString()).isEmpty(); } @Test @Config(minSdk = N) public void build_handlesNullContentText_atLeastN() { Notification notification = builder.setContentText(null).build(); assertThat(shadowOf(notification).getContentText()).isNull(); } @Test public void build_handlesNullTicker() { Notification notification = builder.setTicker(null).build(); assertThat(notification.tickerText).isNull(); } @Test @Config(maxSdk = M) public void build_handlesNullContentInfo() { Notification notification = builder.setContentInfo(null).build(); assertThat(shadowOf(notification).getContentInfo().toString()).isEmpty(); } @Test @Config(minSdk = N) public void build_handlesNullContentInfo_atLeastN() { Notification notification = builder.setContentInfo(null).build(); assertThat(shadowOf(notification).getContentInfo()).isNull(); } @Test @Config(maxSdk = M) public void build_handlesNonStringContentText() { Notification notification = builder.setContentText(new SpannableString("Hello")).build(); assertThat(shadowOf(notification).getContentText().toString()).isEqualTo("Hello"); } @Test @Config(minSdk = N) public void build_handlesNonStringContentText_atLeastN() { Notification notification = builder.setContentText(new SpannableString("Hello")).build(); assertThat(shadowOf(notification).getContentText().toString()).isEqualTo("Hello"); } @Test @Config(maxSdk = M) public void build_handlesNonStringContentTitle() { Notification notification = builder.setContentTitle(new SpannableString("My title")).build(); assertThat(shadowOf(notification).getContentTitle().toString()).isEqualTo("My title"); } @Test @Config(minSdk = N) public void build_handlesNonStringContentTitle_atLeastN() { Notification notification = builder.setContentTitle(new SpannableString("My title")).build(); assertThat(shadowOf(notification).getContentTitle().toString()).isEqualTo("My title"); } @Test @Config(minSdk = JELLY_BEAN_MR2) public void build_addsActionToNotification() throws Exception { PendingIntent action = PendingIntent.getBroadcast(ApplicationProvider.getApplicationContext(), 0, null, 0); Notification notification = builder.addAction(0, "Action", action).build(); assertThat(notification.actions[0].actionIntent).isEqualTo(action); } @Test public void withBigTextStyle() { Notification notification = builder.setStyle(new Notification.BigTextStyle(builder) .bigText("BigText") .setBigContentTitle("Title") .setSummaryText("Summary")) .build(); assertThat(shadowOf(notification).getBigText()).isEqualTo("BigText"); assertThat(shadowOf(notification).getBigContentTitle()).isEqualTo("Title"); assertThat(shadowOf(notification).getBigContentText()).isEqualTo("Summary"); assertThat(shadowOf(notification).getBigPicture()).isNull(); } @Test @Config(minSdk = M) public void withBigPictureStyle() { Bitmap bigPicture = BitmapFactory.decodeResource( ApplicationProvider.getApplicationContext().getResources(), R.drawable.an_image); Icon bigLargeIcon = Icon.createWithBitmap(bigPicture); Notification notification = builder.setStyle(new Notification.BigPictureStyle(builder) .bigPicture(bigPicture) .bigLargeIcon(bigLargeIcon)) .build(); assertThat(shadowOf(notification).getBigPicture()).isEqualTo(bigPicture); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.containers; import com.intellij.openapi.util.Condition; import com.intellij.util.Function; import com.intellij.util.Functions; import com.intellij.util.NotNullizer; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayDeque; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import static com.intellij.openapi.util.Conditions.not; /** * A redesigned version of com.google.common.collect.TreeTraversal. * <p/> * The original JavaDoc: * <p/> * Views elements of a type {@code T} as nodes in a tree, and provides methods to traverse the trees * induced by this traverser. * * <p>For example, the tree * * <pre>{@code * h * / | \ * / e \ * d g * /|\ | * / | \ f * a b c }</pre> * * <p>can be iterated over in pre-order (hdabcegf), post-order (abcdefgh), or breadth-first order * (hdegabcf). * * <p>Null nodes are strictly forbidden. * * @author Louis Wasserman * <p/> * * @author gregsh */ public abstract class TreeTraversal { private static final NotNullizer ourNotNullizer = new NotNullizer("TreeTraversal.NotNull"); private final String debugName; protected TreeTraversal(@NotNull String debugName) { this.debugName = debugName; } @NotNull public final <T> JBIterable<T> traversal(@NotNull final Iterable<? extends T> roots, @NotNull final Function<? super T, ? extends Iterable<? extends T>> tree) { return new JBIterable<T>() { @NotNull @Override public Iterator<T> iterator() { return createIterator(roots, tree); } }; } @NotNull public final <T> JBIterable<T> traversal(@Nullable final T root, @NotNull final Function<? super T, ? extends Iterable<? extends T>> tree) { return traversal(ContainerUtil.createMaybeSingletonList(root), tree); } @NotNull public final <T> Function<T, JBIterable<T>> traversal(@NotNull final Function<? super T, ? extends Iterable<? extends T>> tree) { return t -> traversal(t, tree); } /** * Configures the traversal to skip already visited nodes. * @see TreeTraversal#unique(Function) */ @NotNull public final TreeTraversal unique() { return unique(Function.ID); } /** * Configures the traversal to skip already visited nodes. * @param identity function */ @NotNull public TreeTraversal unique(@NotNull final Function<?, ?> identity) { final TreeTraversal original = this; return new TreeTraversal(debugName + " (UNIQUE)") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull final Function<? super T, ? extends Iterable<? extends T>> tree) { class WrappedTree implements Condition<T>, Function<T, Iterable<? extends T>> { java.util.HashSet<Object> visited; @Override public boolean value(T e) { if (visited == null) visited = new java.util.HashSet<>(); //noinspection unchecked return visited.add(((Function<T, Object>)identity).fun(e)); } @Override public Iterable<? extends T> fun(T t) { return JBIterable.from(tree.fun(t)).filter(this); } } if (tree instanceof WrappedTree) return original.createIterator(roots, tree); WrappedTree wrappedTree = new WrappedTree(); return original.createIterator(JBIterable.from(roots).filter(wrappedTree), wrappedTree); } }; } /** * Configures the traversal to expand and return the nodes within the range only. * It is an optimized version of expand-and-filter operation. * It skips all the nodes "before" the {@code rangeCondition} return true for the first time, * processes as usual the nodes while the condition return true and * stops when the {@code rangeCondition} return false after that. */ @NotNull public <T> TreeTraversal onRange(@NotNull final Condition<T> rangeCondition) { final TreeTraversal original = this; return new TreeTraversal(original.toString() + " (ON_RANGE)") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull final Function<? super T, ? extends Iterable<? extends T>> tree) { final Condition<? super T> inRangeCondition = (Condition<? super T>)rangeCondition; final Condition<? super T> notInRangeCondition = (Condition<? super T>)not(rangeCondition); class WrappedTree implements Function<T, Iterable<? extends T>> { @Override public Iterable<? extends T> fun(T t) { return JBIterable.from(tree.fun(t)) .skipWhile(notInRangeCondition) .takeWhile(inRangeCondition); } } if (tree instanceof WrappedTree) return original.createIterator(roots, tree); WrappedTree wrappedTree = new WrappedTree(); return original.createIterator(JBIterable.from(roots).filter(inRangeCondition), wrappedTree); } }; } /** * Creates a new iterator for this type of traversal. * @param roots tree roots * @param tree tree structure the children for parent function. * May return null (useful for map representation). */ @NotNull public abstract <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree); @Override public final String toString() { return debugName; } public static abstract class It<T> extends JBIterator<T> { protected final Function<? super T, ? extends Iterable<? extends T>> tree; protected It(Function<? super T, ? extends Iterable<? extends T>> tree) { this.tree = tree; } } public static abstract class TracingIt<T> extends It<T> { @Nullable public T parent() { throw new UnsupportedOperationException(); } @NotNull public JBIterable<T> backtrace() { throw new UnsupportedOperationException(); } public boolean isDescending() { return true; } protected TracingIt(Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); } protected JBIterable<T> _transform(JBIterable<?> original) { JBIterable<?> result = original; for (Function<Object, Object> f : getTransformations()) { result = result.map(f); } //noinspection unchecked return (JBIterable<T>)result; } protected T _transform(Object original) { Object result = original; for (Function<Object, ?> f : getTransformations()) { result = f.fun(result); } //noinspection unchecked return (T)result; } } public static abstract class GuidedIt<T> extends It<T> { public interface Guide<T> { void guide(@NotNull GuidedIt<T> guidedIt); } @Nullable public T curChild, curParent; @Nullable public Iterable<? extends T> curChildren; public boolean curNoChildren; public abstract GuidedIt<T> queueNext(@Nullable T child); public abstract GuidedIt<T> result(@Nullable T node); public abstract GuidedIt<T> queueLast(@Nullable T child); protected GuidedIt(Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); } } @NotNull public static TreeTraversal GUIDED_TRAVERSAL(@NotNull final GuidedIt.Guide<?> guide) { return new TreeTraversal("GUIDED_TRAVERSAL") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { //noinspection unchecked return new GuidedItImpl<>(roots, tree, (GuidedIt.Guide<T>)guide); } }; } /** * Returns an iterator over the nodes in a tree structure, using bi-order traversal. * That is, each node returned before and after it's subtrees are traversed. * Direction can be retrieved through TracingIt#isDescending() * * <p>No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by {@code tree} are advanced. */ @NotNull public static final TreeTraversal BI_ORDER_DFS = new TreeTraversal("BI_ORDER_DFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new BiOrderIt<>(roots, tree, BiOrderIt.Order.BOTH); } }; /** * Returns an iterator over the nodes in a tree structure, using pre-order traversal. * That is, each node's subtrees are traversed after the node itself is returned. * * <p>No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by {@code tree} are advanced. */ @NotNull public static final TreeTraversal PRE_ORDER_DFS = new TreeTraversal("PRE_ORDER_DFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new BiOrderIt<>(roots, tree, BiOrderIt.Order.PRE); } }; /** * Returns an iterator over the nodes in a tree structure, using post-order DFS traversal. * That is, each node's subtrees are traversed before the node itself is returned. * <p/> * <p>No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by {@code tree} are advanced. */ @NotNull public static final TreeTraversal POST_ORDER_DFS = new TreeTraversal("POST_ORDER_DFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new BiOrderIt<>(roots, tree, BiOrderIt.Order.POST); } }; /** * Returns an iterator over the leaf nodes only in a tree structure, using DFS traversal. * That is, each node's subtrees are traversed before the node itself is returned. */ @NotNull public static final TreeTraversal LEAVES_DFS = new TreeTraversal("LEAVES_DFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new LeavesDfsIt<>(roots, tree); } }; /** * Returns an iterator over the nodes in a tree structure, using interlaced pre-order DFS traversal. * That is, all paths are traversed in an interlaced manner that is suitable for infinite and cyclic graphs * and each node's subtrees are traversed before the node itself is returned. * <p/> * <p>No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by {@code tree} are advanced. */ @NotNull public static final TreeTraversal INTERLEAVED_DFS = new TreeTraversal("INTERLEAVED_DFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new InterleavedIt<>(roots, tree); } }; /** * Returns an iterator over the nodes in a tree structure, using breadth-first traversal. * That is, all the nodes of depth 0 are returned, then depth 1, then 2, and so on. * <p/> * <p>No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by {@code tree} are advanced. */ @NotNull public static final TreeTraversal PLAIN_BFS = new TreeTraversal("PLAIN_BFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new PlainBfsIt<>(roots, tree); } }; /** * Same as {@code PLAIN_BFS} but with {@code TracingIt}. * That is, a path to the current node can be retrieved during some traversal. * @see TreeTraversal.TracingIt */ @NotNull public static final TreeTraversal TRACING_BFS = new TreeTraversal("TRACING_BFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new TracingBfsIt<>(roots, tree); } }; /** * Returns an iterator over the leaf nodes only in a tree structure, using BFS traversal. * That is, all the leaves of depth 0 are returned, then depth 1, then 2, and so on. */ @NotNull public static final TreeTraversal LEAVES_BFS = new TreeTraversal("LEAVES_BFS") { @NotNull @Override public <T> It<T> createIterator(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return new LeavesBfsIt<>(roots, tree); } }; // ----------------------------------------------------------------------------- // Iterators: DFS // ----------------------------------------------------------------------------- private abstract static class DfsIt<T, H extends P<T, H>> extends TracingIt<T> { H last; H cur; protected DfsIt(Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); } @Override protected void currentChanged() { cur = last; } @Override @Nullable public T parent() { if (cur == null) throw new NoSuchElementException(); H p = cur.parent; return p == null ? null : p.node == null ? null : _transform(p.node); } @Override @NotNull public JBIterable<T> backtrace() { if (cur == null) throw new NoSuchElementException(); return _transform(JBIterable.generate(cur, P.toPrev()).filterMap(P.toNode())); } } private static final class BiOrderIt<T> extends DfsIt<T, P1<T>> { private enum Order { PRE, POST, BOTH } private final Order order; private boolean curDescending; private boolean descending = true; BiOrderIt(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree, @NotNull Order order) { super(tree); this.order = order; last = P1.create(roots); } @Override protected void currentChanged() { super.currentChanged(); curDescending = descending; } @Nullable @Override public T parent() { return curDescending || cur == null ? super.parent() : cur.node == null ? null : _transform(cur.node); } @NotNull @Override public JBIterable<T> backtrace() { return curDescending ? super.backtrace() : JBIterable.of(current()).append(super.backtrace()); } @Override public boolean isDescending() { return curDescending; } @Override public T nextImpl() { while (last != null) { Iterator<? extends T> it = last.iterator(tree); if (it.hasNext()) { T result = it.next(); last = last.add(P1.create(result)); descending = true; if (order != Order.POST) return result; } else { T result = last.node; last = last.remove(); descending = false; if (order != Order.PRE && last != null) return result; } } descending = true; return stop(); } } private final static class LeavesDfsIt<T> extends DfsIt<T, P1<T>> { LeavesDfsIt(@NotNull Iterable<? extends T> roots, Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); last = P1.create(roots); } @Override public T nextImpl() { while (last != null) { P1<T> top = last; if (top.iterator(tree).hasNext() && !top.empty) { T child = top.iterator(tree).next(); last = last.add(P1.create(child)); } else { last = last.remove(); if (top.empty) return last == null ? stop() : top.node; } } return stop(); } } private final static class InterleavedIt<T> extends DfsIt<T, P2<T>> { P2<T> cur, max; InterleavedIt(@NotNull Iterable<? extends T> roots, Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); last = P2.create(roots); cur = max = last; } @Override public T nextImpl() { while (last != null) { if (cur == null) { cur = max; max = max.next; } Iterator<? extends T> it = cur.iterator(tree); if (it.hasNext()) { T result = it.next(); last = last.add(P2.create(result)); last.parent = cur; cur = cur.prev; if (max == null) { max = last; } return result; } else { if (cur == last) { last = cur.prev; } cur = cur.remove(); } } return stop(); } } // ----------------------------------------------------------------------------- // Iterators: BFS // ----------------------------------------------------------------------------- private static final class PlainBfsIt<T> extends It<T> { final ArrayDeque<T> queue = new ArrayDeque<>(); P1<T> top; PlainBfsIt(@NotNull Iterable<? extends T> roots, Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); JBIterable.from(roots).map(ourNotNullizer::notNullize).addAllTo(queue); } @Override public T nextImpl() { if (top != null) { JBIterable.from(top.iterable(tree)).map(ourNotNullizer::notNullize).addAllTo(queue); top = null; } if (queue.isEmpty()) return stop(); top = P1.create(ourNotNullizer.nullize(queue.remove())); return top.node; } } private static final class LeavesBfsIt<T> extends TracingIt<T> { final ArrayDeque<T> queue = new ArrayDeque<>(); LeavesBfsIt(@NotNull Iterable<? extends T> roots, Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); JBIterable.from(roots).map(ourNotNullizer::notNullize).addAllTo(queue); } @Override public T nextImpl() { while (!queue.isEmpty()) { T result = ourNotNullizer.nullize(queue.remove()); Iterable<? extends T> children = tree.fun(result); Iterator<? extends T> it = children == null ? null: children.iterator(); if (it == null || !it.hasNext()) return result; while (it.hasNext()) queue.add(ourNotNullizer.notNullize(it.next())); } return stop(); } } private final static class TracingBfsIt<T> extends TracingIt<T> { final ArrayDeque<T> queue = new ArrayDeque<>(); final Map<T, T> paths = ContainerUtil.newIdentityHashMap(); P1<T> top; P1<T> cur; TracingBfsIt(@NotNull Iterable<? extends T> roots, Function<? super T, ? extends Iterable<? extends T>> tree) { super(tree); JBIterable.from(roots).map(ourNotNullizer::notNullize).addAllTo(queue); } @Override protected void currentChanged() { cur = top; } @Override public T nextImpl() { if (top != null) { for (T t : top.iterable(tree)) { if (paths.containsKey(t)) continue; queue.add(ourNotNullizer.notNullize(t)); paths.put(t, top.node); } top = null; } if (queue.isEmpty()) return stop(); top = P1.create(ourNotNullizer.nullize(queue.remove())); return top.node; } @Override public T parent() { if (cur == null) throw new NoSuchElementException(); return _transform(paths.get(cur.node)); } @NotNull @Override public JBIterable<T> backtrace() { if (cur == null) throw new NoSuchElementException(); return _transform(JBIterable.generate(cur.node, Functions.fromMap(paths))); } } // ----------------------------------------------------------------------------- // Misc // ----------------------------------------------------------------------------- private static final class GuidedItImpl<T> extends GuidedIt<T> { final Guide<T> guide; P1<T> first, last; T curResult; GuidedItImpl(@NotNull Iterable<? extends T> roots, @NotNull Function<? super T, ? extends Iterable<? extends T>> tree, @NotNull Guide<T> guide) { super(tree); first = last = P1.create(roots); this.guide = guide; } @Override public GuidedIt<T> queueNext(T child) { if (child != null) last = last.add(P1.create(child)); return this; } @Override public GuidedIt<T> queueLast(T child) { if (child != null) first = first.addBefore(P1.create(child)); return this; } @Override public GuidedIt<T> result(T node) { curResult = node; return this; } @Override public T nextImpl() { if (guide == null) return stop(); while (last != null) { P<T, ?> top = last; Iterator<? extends T> it = top.iterator(tree); boolean hasNext = it.hasNext(); curResult = null; if (top.node != null || hasNext) { curChild = hasNext ? it.next() : null; curParent = top.node; curChildren = top.itle; curNoChildren = top.empty; guide.guide(this); } if (!hasNext) { last = last.remove(); } if (curResult != null) { return curResult; } } return stop(); } } private static class P<T, Self extends P<T, Self>> { T node; Iterable<? extends T> itle; Iterator<? extends T> it; boolean empty; Self parent; static <T, Self extends P<T, Self>> Self create(Self p, T node) { p.node = node; return p; } static <T, Self extends P<T, Self>> Self create(Self p, Iterable<? extends T> it) { p.itle = it; return p; } final Iterator<? extends T> iterator(@NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { if (it != null) return it; it = iterable(tree).iterator(); empty = itle == null || !it.hasNext(); return it; } final Iterable<? extends T> iterable(@NotNull Function<? super T, ? extends Iterable<? extends T>> tree) { return itle != null ? itle : JBIterable.from(itle = tree.fun(node)); } /** @noinspection unchecked */ static <T> Function<P<T, ?>, T> toNode() { return TO_NODE; } /** @noinspection unchecked */ static <T> Function<P<T, ?>, P<T, ?>> toPrev() { return TO_PREV; } static final Function TO_NODE = (Function<P<?, ?>, Object>)tp -> tp.node; static final Function TO_PREV = new Function.Mono<P<?, ?>>() { @Override public P<?, ?> fun(P<?, ?> tp) { return tp.parent; } }; } private static final class P1<T> extends P<T, P1<T>> { static <T> P1<T> create(T node) { return create(new P1<>(), node); } static <T> P1<T> create(Iterable<? extends T> it) { return create(new P1<T>(), it); } P1<T> add(@NotNull P1<T> next) { next.parent = this; return next; } P1<T> addBefore(@NotNull P1<T> next) { next.parent = null; this.parent = next; return next; } P1<T> remove() { P1<T> p = parent; //parent = null; return p; } @Override public String toString() { int h = 0; for (P1<T> p = parent; p != null; p = p.parent) h++; return h + ": " + node; } } private static final class P2<T> extends P<T, P2<T>> { P2<T> next, prev; static <T> P2<T> create(T node) { return create(new P2<>(), node); } static <T> P2<T> create(Iterable<? extends T> it) { return create(new P2<T>(), it); } P2<T> add(@NotNull P2<T> next) { next.next = this.next; next.prev = this; this.next = next; return next; } P2<T> remove() { P2<T> p = prev; P2<T> n = next; prev = next = null; if (p != null) p.next = n; if (n != null) n.prev = p; return p; } @Override public String toString() { int h = 0, t = 0; for (P2<T> p = prev; p != null; p = p.prev) h++; for (P2<T> p = next; p != null; p = p.next) t++; return h + " of " + (h + t + 1) + ": " + node; } } }
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.socket.nio; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoop; import io.netty.channel.EventLoopGroup; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.util.CharsetUtil; import io.netty.util.NetUtil; import io.netty.util.internal.PlatformDependent; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketOption; import java.net.StandardSocketOptions; import java.nio.channels.ClosedChannelException; import java.nio.channels.NetworkChannel; import java.util.Queue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotSame; public class NioSocketChannelTest extends AbstractNioChannelTest<NioSocketChannel> { /** * Reproduces the issue #1600 */ @Test public void testFlushCloseReentrance() throws Exception { NioEventLoopGroup group = new NioEventLoopGroup(1); try { final Queue<ChannelFuture> futures = new LinkedBlockingQueue<ChannelFuture>(); ServerBootstrap sb = new ServerBootstrap(); sb.group(group).channel(NioServerSocketChannel.class); sb.childOption(ChannelOption.SO_SNDBUF, 1024); sb.childHandler(new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { // Write a large enough data so that it is split into two loops. futures.add(ctx.write( ctx.alloc().buffer().writeZero(1048576)).addListener(ChannelFutureListener.CLOSE)); futures.add(ctx.write(ctx.alloc().buffer().writeZero(1048576))); ctx.flush(); futures.add(ctx.write(ctx.alloc().buffer().writeZero(1048576))); ctx.flush(); } }); SocketAddress address = sb.bind(0).sync().channel().localAddress(); Socket s = new Socket(NetUtil.LOCALHOST, ((InetSocketAddress) address).getPort()); InputStream in = s.getInputStream(); byte[] buf = new byte[8192]; for (;;) { if (in.read(buf) == -1) { break; } // Wait a little bit so that the write attempts are split into multiple flush attempts. Thread.sleep(10); } s.close(); assertThat(futures.size(), is(3)); ChannelFuture f1 = futures.poll(); ChannelFuture f2 = futures.poll(); ChannelFuture f3 = futures.poll(); assertThat(f1.isSuccess(), is(true)); assertThat(f2.isDone(), is(true)); assertThat(f2.isSuccess(), is(false)); assertThat(f2.cause(), is(instanceOf(ClosedChannelException.class))); assertThat(f3.isDone(), is(true)); assertThat(f3.isSuccess(), is(false)); assertThat(f3.cause(), is(instanceOf(ClosedChannelException.class))); } finally { group.shutdownGracefully().sync(); } } /** * Reproduces the issue #1679 */ @Test public void testFlushAfterGatheredFlush() throws Exception { NioEventLoopGroup group = new NioEventLoopGroup(1); try { ServerBootstrap sb = new ServerBootstrap(); sb.group(group).channel(NioServerSocketChannel.class); sb.childHandler(new ChannelInboundHandlerAdapter() { @Override public void channelActive(final ChannelHandlerContext ctx) throws Exception { // Trigger a gathering write by writing two buffers. ctx.write(Unpooled.wrappedBuffer(new byte[] { 'a' })); ChannelFuture f = ctx.write(Unpooled.wrappedBuffer(new byte[] { 'b' })); f.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { // This message must be flushed ctx.writeAndFlush(Unpooled.wrappedBuffer(new byte[]{'c'})); } }); ctx.flush(); } }); SocketAddress address = sb.bind(0).sync().channel().localAddress(); Socket s = new Socket(NetUtil.LOCALHOST, ((InetSocketAddress) address).getPort()); DataInput in = new DataInputStream(s.getInputStream()); byte[] buf = new byte[3]; in.readFully(buf); assertThat(new String(buf, CharsetUtil.US_ASCII), is("abc")); s.close(); } finally { group.shutdownGracefully().sync(); } } // Test for https://github.com/netty/netty/issues/4805 @Test @Timeout(value = 3000, unit = TimeUnit.MILLISECONDS) public void testChannelReRegisterReadSameEventLoop() throws Exception { testChannelReRegisterRead(true); } @Test @Timeout(value = 3000, unit = TimeUnit.MILLISECONDS) public void testChannelReRegisterReadDifferentEventLoop() throws Exception { testChannelReRegisterRead(false); } private static void testChannelReRegisterRead(final boolean sameEventLoop) throws Exception { final EventLoopGroup group = new NioEventLoopGroup(2); final CountDownLatch latch = new CountDownLatch(1); // Just some random bytes byte[] bytes = new byte[1024]; PlatformDependent.threadLocalRandom().nextBytes(bytes); Channel sc = null; Channel cc = null; ServerBootstrap b = new ServerBootstrap(); try { b.group(group) .channel(NioServerSocketChannel.class) .childOption(ChannelOption.SO_KEEPALIVE, true) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); pipeline.addLast(new SimpleChannelInboundHandler<ByteBuf>() { @Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf byteBuf) { // We was able to read something from the Channel after reregister. latch.countDown(); } @Override public void channelActive(final ChannelHandlerContext ctx) throws Exception { final EventLoop loop = group.next(); if (sameEventLoop) { deregister(ctx, loop); } else { loop.execute(new Runnable() { @Override public void run() { deregister(ctx, loop); } }); } } private void deregister(ChannelHandlerContext ctx, final EventLoop loop) { // As soon as the channel becomes active re-register it to another // EventLoop. After this is done we should still receive the data that // was written to the channel. ctx.deregister().addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture cf) { Channel channel = cf.channel(); assertNotSame(loop, channel.eventLoop()); group.next().register(channel); } }); } }); } }); sc = b.bind(0).syncUninterruptibly().channel(); Bootstrap bootstrap = new Bootstrap(); bootstrap.group(group).channel(NioSocketChannel.class); bootstrap.handler(new ChannelInboundHandlerAdapter()); cc = bootstrap.connect(sc.localAddress()).syncUninterruptibly().channel(); cc.writeAndFlush(Unpooled.wrappedBuffer(bytes)).syncUninterruptibly(); latch.await(); } finally { if (cc != null) { cc.close(); } if (sc != null) { sc.close(); } group.shutdownGracefully(); } } @Test @Timeout(value = 3000, unit = TimeUnit.MILLISECONDS) public void testShutdownOutputAndClose() throws IOException { NioEventLoopGroup group = new NioEventLoopGroup(1); ServerSocket socket = new ServerSocket(); socket.bind(new InetSocketAddress(0)); Socket accepted = null; try { Bootstrap sb = new Bootstrap(); sb.group(group).channel(NioSocketChannel.class); sb.handler(new ChannelInboundHandlerAdapter()); SocketChannel channel = (SocketChannel) sb.connect(socket.getLocalSocketAddress()) .syncUninterruptibly().channel(); accepted = socket.accept(); channel.shutdownOutput().syncUninterruptibly(); channel.close().syncUninterruptibly(); } finally { if (accepted != null) { try { accepted.close(); } catch (IOException ignore) { // ignore } } try { socket.close(); } catch (IOException ignore) { // ignore } group.shutdownGracefully(); } } @Override protected NioSocketChannel newNioChannel() { return new NioSocketChannel(); } @Override protected NetworkChannel jdkChannel(NioSocketChannel channel) { return channel.javaChannel(); } @Override protected SocketOption<?> newInvalidOption() { return StandardSocketOptions.IP_MULTICAST_IF; } }
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.component.wizard.resource.component.schemahandling.modal; import com.evolveum.midpoint.gui.api.model.LoadableModel; import com.evolveum.midpoint.gui.api.model.NonEmptyModel; import com.evolveum.midpoint.gui.api.page.PageBase; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.web.component.form.CheckFormGroup; import com.evolveum.midpoint.web.component.form.TextFormGroup; import com.evolveum.midpoint.web.component.input.DropDownChoicePanel; import com.evolveum.midpoint.web.component.wizard.resource.dto.PropertyLimitationsTypeDto; import com.evolveum.midpoint.web.page.admin.configuration.component.EmptyOnBlurAjaxFormUpdatingBehaviour; import com.evolveum.midpoint.web.page.admin.configuration.component.EmptyOnChangeAjaxFormUpdatingBehavior; import com.evolveum.midpoint.web.util.InfoTooltipBehavior; import com.evolveum.midpoint.xml.ns._public.common.common_3.LayerType; import com.evolveum.midpoint.xml.ns._public.common.common_3.PropertyAccessType; import com.evolveum.midpoint.xml.ns._public.common.common_3.PropertyLimitationsType; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.StringUtils; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.FormComponent; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.StringResourceModel; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; /** * @author shood * */ public class LimitationsEditorDialog extends ModalWindow{ private enum ChangeState{ SKIP, FIRST, LAST } private enum PropertyAccess{ Allow, Inherit, Deny } private static final String ID_REPEATER = "repeater"; private static final String ID_LIMITATIONS_LINK = "limitationsLink"; private static final String ID_LIMITATIONS_LABEL = "limitationsLinkName"; private static final String ID_LIMITATION_DELETE = "deleteLimitation"; private static final String ID_BODY = "accountBodyContainer"; private static final String ID_LAYER_SCHEMA = "layerSchema"; private static final String ID_LAYER_MODEL = "layerModel"; private static final String ID_LAYER_PRESENTATION = "layerPresentation"; private static final String ID_ACCESS_ADD = "addAccess"; private static final String ID_ACCESS_READ = "readAccess"; private static final String ID_ACCESS_MODIFY = "modifyAccess"; private static final String ID_MIN_OCCURS = "minOccurs"; private static final String ID_MAX_OCCURS = "maxOccurs"; private static final String ID_IGNORE = "ignore"; private static final String ID_BUTTON_ADD = "addButton"; private static final String ID_BUTTON_SAVE = "saveButton"; private static final String ID_BUTTON_CANCEL = "cancelButton"; private static final String ID_MAIN_FORM = "mainForm"; private static final String ID_T_LAYERS = "layersTooltip"; private static final String ID_T_PROPERTY = "propertyAccessTooltip"; private static final String ID_LABEL_SIZE = "col-md-4"; private static final String ID_INPUT_SIZE = "col-md-8"; private ChangeState changeState = ChangeState.FIRST; private boolean initialized; private IModel<List<PropertyLimitationsTypeDto>> model; private IModel<List<PropertyLimitationsType>> inputModel; @NotNull final private NonEmptyModel<Boolean> readOnlyModel; public LimitationsEditorDialog(String id, final IModel<List<PropertyLimitationsType>> limitation, NonEmptyModel<Boolean> readOnlyModel) { super(id); this.readOnlyModel = readOnlyModel; inputModel = limitation; model = new LoadableModel<List<PropertyLimitationsTypeDto>>(false) { @Override protected List<PropertyLimitationsTypeDto> load() { return loadLimitationsModel(limitation); } }; setOutputMarkupId(true); setTitle(createStringResource("LimitationsEditorDialog.label")); showUnloadConfirmation(false); setCssClassName(ModalWindow.CSS_CLASS_GRAY); setCookieName(LimitationsEditorDialog.class.getSimpleName() + ((int) (Math.random() * 100))); setInitialWidth(600); setInitialHeight(700); setWidthUnit("px"); WebMarkupContainer content = new WebMarkupContainer(getContentId()); content.setOutputMarkupId(true); setContent(content); } private List<PropertyLimitationsTypeDto> loadLimitationsModel(IModel<List<PropertyLimitationsType>> limList){ List<PropertyLimitationsTypeDto> limitations = new ArrayList<>(); List<PropertyLimitationsType> limitationTypeList = limList.getObject(); for(PropertyLimitationsType limitation: limitationTypeList){ limitations.add(new PropertyLimitationsTypeDto(limitation)); } return limitations; } @Override protected void onBeforeRender(){ super.onBeforeRender(); if (initialized) { return; } initLayout((WebMarkupContainer) get(getContentId())); initialized = true; } public void initLayout(WebMarkupContainer content) { Form form = new Form(ID_MAIN_FORM); form.setOutputMarkupId(true); content.add(form); ListView repeater = new ListView<PropertyLimitationsTypeDto>(ID_REPEATER, model){ @Override protected void populateItem(final ListItem<PropertyLimitationsTypeDto> item){ WebMarkupContainer linkContainer = new WebMarkupContainer(ID_LIMITATIONS_LINK); linkContainer.setOutputMarkupId(true); linkContainer.add(new AttributeModifier("href", createCollapseItemId(item, true))); item.add(linkContainer); Label linkLabel = new Label(ID_LIMITATIONS_LABEL, createLimitationsLabelModel(item)); linkContainer.add(linkLabel); AjaxLink delete = new AjaxLink(ID_LIMITATION_DELETE) { @Override public void onClick(AjaxRequestTarget target) { deleteLimitationPerformed(target, item); } }; delete.add(WebComponentUtil.visibleIfFalse(readOnlyModel)); linkContainer.add(delete); WebMarkupContainer limitationBody = new WebMarkupContainer(ID_BODY); limitationBody.setOutputMarkupId(true); limitationBody.setMarkupId(createCollapseItemId(item, false).getObject()); if (changeState != ChangeState.SKIP) { limitationBody.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() { @Override public String getObject() { if (changeState == ChangeState.FIRST && item.getIndex() == 0) { return "panel-collapse collapse in"; } else if (changeState == ChangeState.LAST && item.getIndex() == (getModelObject().size()-1)) { return "panel-collapse collapse in"; } else { return "panel-collapse collapse"; } } })); } limitationBody.add(WebComponentUtil.enabledIfFalse(readOnlyModel)); item.add(limitationBody); initLimitationBody(limitationBody, item); } }; repeater.setOutputMarkupId(true); form.add(repeater); initButtons(form); } private void initButtons(Form form) { AjaxLink add = new AjaxLink(ID_BUTTON_ADD) { @Override public void onClick(AjaxRequestTarget target) { addLimitationsPerformed(target); } }; add.add(WebComponentUtil.visibleIfFalse(readOnlyModel)); form.add(add); AjaxLink cancel = new AjaxLink(ID_BUTTON_CANCEL) { @Override public void onClick(AjaxRequestTarget target) { cancelPerformed(target); } }; form.add(cancel); AjaxLink save = new AjaxLink(ID_BUTTON_SAVE) { @Override public void onClick(AjaxRequestTarget target) { savePerformed(target); } }; save.add(WebComponentUtil.visibleIfFalse(readOnlyModel)); form.add(save); } private void initLimitationBody(final WebMarkupContainer body, ListItem<PropertyLimitationsTypeDto> item) { CheckFormGroup schema = new CheckFormGroup(ID_LAYER_SCHEMA, new PropertyModel<Boolean>(item.getModelObject(), PropertyLimitationsTypeDto.F_SCHEMA), createStringResource("LimitationsEditorDialog.label.schema"), ID_LABEL_SIZE, ID_INPUT_SIZE); schema.getCheck().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(schema); CheckFormGroup model = new CheckFormGroup(ID_LAYER_MODEL, new PropertyModel<Boolean>(item.getModelObject(), PropertyLimitationsTypeDto.F_MODEL), createStringResource("LimitationsEditorDialog.label.model"), ID_LABEL_SIZE, ID_INPUT_SIZE); model.getCheck().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(model); CheckFormGroup presentation = new CheckFormGroup(ID_LAYER_PRESENTATION, new PropertyModel<Boolean>(item.getModelObject(), PropertyLimitationsTypeDto.F_PRESENTATION), createStringResource("LimitationsEditorDialog.label.presentation"), ID_LABEL_SIZE, ID_INPUT_SIZE); presentation.getCheck().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(presentation); DropDownChoicePanel add = new DropDownChoicePanel(ID_ACCESS_ADD, getAddPropertyAccessModel(item.getModel()), WebComponentUtil.createReadonlyModelFromEnum(PropertyAccess.class), false); FormComponent<PropertyAccess> addInput = add.getBaseFormComponent(); addInput.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); addInput.add(new EmptyOnChangeAjaxFormUpdatingBehavior()); DropDownChoicePanel read = new DropDownChoicePanel(ID_ACCESS_READ, getReadPropertyAccessModel(item.getModel()), WebComponentUtil.createReadonlyModelFromEnum(PropertyAccess.class), false); FormComponent<PropertyAccess> readInput = read.getBaseFormComponent(); readInput.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); readInput.add(new EmptyOnChangeAjaxFormUpdatingBehavior()); DropDownChoicePanel modify = new DropDownChoicePanel(ID_ACCESS_MODIFY, getModifyPropertyAccessModel(item.getModel()), WebComponentUtil.createReadonlyModelFromEnum(PropertyAccess.class), false); FormComponent<PropertyAccess> modifyInput = modify.getBaseFormComponent(); modifyInput.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); modifyInput.add(new EmptyOnChangeAjaxFormUpdatingBehavior()); body.add(add); body.add(read); body.add(modify); TextFormGroup minOccurs = new TextFormGroup(ID_MIN_OCCURS, new PropertyModel<String>(item.getModelObject(), PropertyLimitationsTypeDto.F_LIMITATION + ".minOccurs"), createStringResource("LimitationsEditorDialog.label.minOccurs"), "SchemaHandlingStep.limitations.tooltip.minOccurs", true, ID_LABEL_SIZE, ID_INPUT_SIZE, false, false); minOccurs.getField().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(minOccurs); TextFormGroup maxOccurs = new TextFormGroup(ID_MAX_OCCURS, new PropertyModel<String>(item.getModelObject(), PropertyLimitationsTypeDto.F_LIMITATION + ".maxOccurs"), createStringResource("LimitationsEditorDialog.label.maxOccurs"), "SchemaHandlingStep.limitations.tooltip.maxOccurs", true, ID_LABEL_SIZE, ID_INPUT_SIZE, false, false); maxOccurs.getField().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(maxOccurs); CheckFormGroup ignore = new CheckFormGroup(ID_IGNORE, new PropertyModel<Boolean>(item.getModelObject(), PropertyLimitationsTypeDto.F_LIMITATION + ".ignore"), createStringResource("LimitationsEditorDialog.label.ignore"), "SchemaHandlingStep.limitations.tooltip.ignore", true, ID_LABEL_SIZE, ID_INPUT_SIZE); ignore.getCheck().add(prepareAjaxOnComponentTagUpdateBehavior()); body.add(ignore); Label layersTooltip = new Label(ID_T_LAYERS); layersTooltip.add(new InfoTooltipBehavior(true) { @Override public String getModalContainer(Component component) { return body.getMarkupId(); } }); body.add(layersTooltip); Label propertyTooltip = new Label(ID_T_PROPERTY); propertyTooltip.add(new InfoTooltipBehavior(true) { @Override public String getModalContainer(Component component) { return body.getMarkupId(); } }); body.add(propertyTooltip); } private AjaxFormComponentUpdatingBehavior prepareAjaxOnComponentTagUpdateBehavior(){ return new AjaxFormComponentUpdatingBehavior("blur") { @Override protected void onUpdate(AjaxRequestTarget target) {} }; } private IModel<String> createLimitationsLabelModel(final ListItem<PropertyLimitationsTypeDto> item){ return new AbstractReadOnlyModel<String>() { @Override public String getObject() { StringBuilder sb = new StringBuilder(); PropertyLimitationsTypeDto dto = item.getModelObject(); sb.append("#").append(item.getIndex()+1).append(" - "); List<LayerType> layers = new ArrayList<>(); if (dto.isModel()) { layers.add(LayerType.MODEL); } if (dto.isPresentation()) { layers.add(LayerType.PRESENTATION); } if (dto.isSchema()) { layers.add(LayerType.SCHEMA); } sb.append(StringUtils.join(layers, ", ")); sb.append(":"); if (dto.getLimitationObject().getAccess() != null) { List<String> accesses = new ArrayList<>(); PropertyAccessType access = dto.getLimitationObject().getAccess(); if (BooleanUtils.isTrue(access.isRead())) { accesses.add(getString("LimitationsEditorDialog.label.read")); } if (BooleanUtils.isTrue(access.isAdd())) { accesses.add(getString("LimitationsEditorDialog.label.add")); } if (BooleanUtils.isTrue(access.isModify())) { accesses.add(getString("LimitationsEditorDialog.label.modify")); } sb.append(StringUtils.join(accesses, ", ")); } return sb.toString(); } }; } private IModel<String> createCollapseItemId(final ListItem<PropertyLimitationsTypeDto> item, final boolean appendSelector){ return new AbstractReadOnlyModel<String>() { @Override public String getObject() { StringBuilder sb = new StringBuilder(); if (appendSelector) { sb.append("#"); } sb.append("collapse").append(item.getId()); return sb.toString(); } }; } public StringResourceModel createStringResource(String resourceKey, Object... objects) { return PageBase.createStringResourceStatic(this, resourceKey, objects); } private void addLimitationsPerformed(AjaxRequestTarget target){ changeState = ChangeState.LAST; model.getObject().add(new PropertyLimitationsTypeDto(new PropertyLimitationsType())); target.add(getContent()); } private void deleteLimitationPerformed(AjaxRequestTarget target, ListItem<PropertyLimitationsTypeDto> item){ changeState = ChangeState.SKIP; model.getObject().remove(item.getModelObject()); target.add(getContent()); } private void cancelPerformed(AjaxRequestTarget target){ close(target); } protected void savePerformed(AjaxRequestTarget target){ List<PropertyLimitationsTypeDto> list = model.getObject(); List<PropertyLimitationsType> outputList = new ArrayList<>(); for (PropertyLimitationsTypeDto dto: list) { outputList.add(dto.prepareDtoForSave()); } inputModel.setObject(outputList); close(target); } private IModel<PropertyAccess> getAddPropertyAccessModel(final IModel<PropertyLimitationsTypeDto> model){ return new IModel<PropertyAccess>() { @Override public PropertyAccess getObject() { Boolean add = model.getObject().getLimitationObject().getAccess().isAdd(); if (add == null){ return PropertyAccess.Inherit; } else if (add) { return PropertyAccess.Allow; } else { return PropertyAccess.Deny; } } @Override public void setObject(PropertyAccess propertyAccess) { if (propertyAccess.equals(PropertyAccess.Allow)) { model.getObject().getLimitationObject().getAccess().setAdd(true); } else if (propertyAccess.equals(PropertyAccess.Deny)) { model.getObject().getLimitationObject().getAccess().setAdd(false); } else { model.getObject().getLimitationObject().getAccess().setAdd(null); } } @Override public void detach() { } }; } private IModel<PropertyAccess> getReadPropertyAccessModel(final IModel<PropertyLimitationsTypeDto> model) { return new IModel<PropertyAccess>() { @Override public PropertyAccess getObject() { Boolean read = model.getObject().getLimitationObject().getAccess().isRead(); if (read == null) { return PropertyAccess.Inherit; } else if (read){ return PropertyAccess.Allow; } else { return PropertyAccess.Deny; } } @Override public void setObject(PropertyAccess propertyAccess) { if (propertyAccess.equals(PropertyAccess.Allow)) { model.getObject().getLimitationObject().getAccess().setRead(true); } else if (propertyAccess.equals(PropertyAccess.Deny)) { model.getObject().getLimitationObject().getAccess().setRead(false); } else { model.getObject().getLimitationObject().getAccess().setRead(null); } } @Override public void detach() { } }; } private IModel<PropertyAccess> getModifyPropertyAccessModel(final IModel<PropertyLimitationsTypeDto> model) { return new IModel<PropertyAccess>() { @Override public PropertyAccess getObject() { Boolean modify = model.getObject().getLimitationObject().getAccess().isModify(); if (modify == null) { return PropertyAccess.Inherit; } else if (modify) { return PropertyAccess.Allow; } else { return PropertyAccess.Deny; } } @Override public void setObject(PropertyAccess propertyAccess) { if (propertyAccess.equals(PropertyAccess.Allow)) { model.getObject().getLimitationObject().getAccess().setModify(true); } else if (propertyAccess.equals(PropertyAccess.Deny)) { model.getObject().getLimitationObject().getAccess().setModify(false); } else { model.getObject().getLimitationObject().getAccess().setModify(null); } } @Override public void detach() { } }; } }
/* * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rsocket.frame; import static io.rsocket.frame.FrameHeaderFlyweight.FLAGS_M; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.rsocket.FrameType; import java.nio.charset.StandardCharsets; public class SetupFrameFlyweight { private SetupFrameFlyweight() {} public static final int FLAGS_RESUME_ENABLE = 0b00_1000_0000; public static final int FLAGS_WILL_HONOR_LEASE = 0b00_0100_0000; public static final int FLAGS_STRICT_INTERPRETATION = 0b00_0010_0000; public static final int VALID_FLAGS = FLAGS_RESUME_ENABLE | FLAGS_WILL_HONOR_LEASE | FLAGS_STRICT_INTERPRETATION | FLAGS_M; public static final int CURRENT_VERSION = VersionFlyweight.encode(1, 0); // relative to start of passed offset private static final int VERSION_FIELD_OFFSET = FrameHeaderFlyweight.FRAME_HEADER_LENGTH; private static final int KEEPALIVE_INTERVAL_FIELD_OFFSET = VERSION_FIELD_OFFSET + Integer.BYTES; private static final int MAX_LIFETIME_FIELD_OFFSET = KEEPALIVE_INTERVAL_FIELD_OFFSET + Integer.BYTES; private static final int VARIABLE_DATA_OFFSET = MAX_LIFETIME_FIELD_OFFSET + Integer.BYTES; public static int computeFrameLength( final int flags, final String metadataMimeType, final String dataMimeType, final int metadataLength, final int dataLength) { return computeFrameLength(flags, 0, metadataMimeType, dataMimeType, metadataLength, dataLength); } private static int computeFrameLength( final int flags, final int resumeTokenLength, final String metadataMimeType, final String dataMimeType, final int metadataLength, final int dataLength) { int length = FrameHeaderFlyweight.computeFrameHeaderLength(FrameType.SETUP, metadataLength, dataLength); length += Integer.BYTES * 3; if ((flags & FLAGS_RESUME_ENABLE) != 0) { length += Short.BYTES + resumeTokenLength; } length += 1 + metadataMimeType.getBytes(StandardCharsets.UTF_8).length; length += 1 + dataMimeType.getBytes(StandardCharsets.UTF_8).length; return length; } public static int encode( final ByteBuf byteBuf, int flags, final int keepaliveInterval, final int maxLifetime, final String metadataMimeType, final String dataMimeType, final ByteBuf metadata, final ByteBuf data) { if ((flags & FLAGS_RESUME_ENABLE) != 0) { throw new IllegalArgumentException("RESUME_ENABLE not supported"); } return encode( byteBuf, flags, keepaliveInterval, maxLifetime, Unpooled.EMPTY_BUFFER, metadataMimeType, dataMimeType, metadata, data); } // Only exposed for testing, other code shouldn't create frames with resumption tokens for now static int encode( final ByteBuf byteBuf, int flags, final int keepaliveInterval, final int maxLifetime, final ByteBuf resumeToken, final String metadataMimeType, final String dataMimeType, final ByteBuf metadata, final ByteBuf data) { final int frameLength = computeFrameLength( flags, resumeToken.readableBytes(), metadataMimeType, dataMimeType, metadata.readableBytes(), data.readableBytes()); int length = FrameHeaderFlyweight.encodeFrameHeader(byteBuf, frameLength, flags, FrameType.SETUP, 0); byteBuf.setInt(VERSION_FIELD_OFFSET, CURRENT_VERSION); byteBuf.setInt(KEEPALIVE_INTERVAL_FIELD_OFFSET, keepaliveInterval); byteBuf.setInt(MAX_LIFETIME_FIELD_OFFSET, maxLifetime); length += Integer.BYTES * 3; if ((flags & FLAGS_RESUME_ENABLE) != 0) { byteBuf.setShort(length, resumeToken.readableBytes()); length += Short.BYTES; int resumeTokenLength = resumeToken.readableBytes(); byteBuf.setBytes(length, resumeToken, resumeTokenLength); length += resumeTokenLength; } length += putMimeType(byteBuf, length, metadataMimeType); length += putMimeType(byteBuf, length, dataMimeType); length += FrameHeaderFlyweight.encodeMetadata(byteBuf, FrameType.SETUP, length, metadata); length += FrameHeaderFlyweight.encodeData(byteBuf, length, data); return length; } public static int version(final ByteBuf byteBuf) { return byteBuf.getInt(VERSION_FIELD_OFFSET); } public static int keepaliveInterval(final ByteBuf byteBuf) { return byteBuf.getInt(KEEPALIVE_INTERVAL_FIELD_OFFSET); } public static int maxLifetime(final ByteBuf byteBuf) { return byteBuf.getInt(MAX_LIFETIME_FIELD_OFFSET); } public static String metadataMimeType(final ByteBuf byteBuf) { final byte[] bytes = getMimeType(byteBuf, metadataMimetypeOffset(byteBuf)); return new String(bytes, StandardCharsets.UTF_8); } public static String dataMimeType(final ByteBuf byteBuf) { int fieldOffset = metadataMimetypeOffset(byteBuf); fieldOffset += 1 + byteBuf.getByte(fieldOffset); final byte[] bytes = getMimeType(byteBuf, fieldOffset); return new String(bytes, StandardCharsets.UTF_8); } public static int payloadOffset(final ByteBuf byteBuf) { int fieldOffset = metadataMimetypeOffset(byteBuf); final int metadataMimeTypeLength = byteBuf.getByte(fieldOffset); fieldOffset += 1 + metadataMimeTypeLength; final int dataMimeTypeLength = byteBuf.getByte(fieldOffset); fieldOffset += 1 + dataMimeTypeLength; return fieldOffset; } private static int metadataMimetypeOffset(final ByteBuf byteBuf) { return VARIABLE_DATA_OFFSET + resumeTokenTotalLength(byteBuf); } private static int resumeTokenTotalLength(final ByteBuf byteBuf) { if ((FrameHeaderFlyweight.flags(byteBuf) & FLAGS_RESUME_ENABLE) == 0) { return 0; } else { return Short.BYTES + byteBuf.getShort(VARIABLE_DATA_OFFSET); } } private static int putMimeType( final ByteBuf byteBuf, final int fieldOffset, final String mimeType) { byte[] bytes = mimeType.getBytes(StandardCharsets.UTF_8); byteBuf.setByte(fieldOffset, (byte) bytes.length); byteBuf.setBytes(fieldOffset + 1, bytes); return 1 + bytes.length; } private static byte[] getMimeType(final ByteBuf byteBuf, final int fieldOffset) { final int length = byteBuf.getByte(fieldOffset); final byte[] bytes = new byte[length]; byteBuf.getBytes(fieldOffset + 1, bytes); return bytes; } }
package com.fsck.k9.mail.internet; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Locale; import java.util.regex.Pattern; import android.support.annotation.NonNull; import android.util.Log; import com.fsck.k9.mail.Body; import com.fsck.k9.mail.BodyPart; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Multipart; import com.fsck.k9.mail.Part; import org.apache.commons.io.IOUtils; import org.apache.james.mime4j.codec.Base64InputStream; import org.apache.james.mime4j.codec.QuotedPrintableInputStream; import org.apache.james.mime4j.util.MimeUtil; import static com.fsck.k9.mail.K9MailLib.LOG_TAG; public class MimeUtility { public static final String DEFAULT_ATTACHMENT_MIME_TYPE = "application/octet-stream"; public static final String K9_SETTINGS_MIME_TYPE = "application/x-k9settings"; /* * http://www.w3schools.com/media/media_mimeref.asp * + * http://www.stdicon.com/mimetypes */ private static final String[][] MIME_TYPE_BY_EXTENSION_MAP = new String[][] { //* Do not delete the next three lines { "", DEFAULT_ATTACHMENT_MIME_TYPE }, { "k9s", K9_SETTINGS_MIME_TYPE}, { "txt", "text/plain"}, //* Do not delete the previous three lines { "123", "application/vnd.lotus-1-2-3"}, { "323", "text/h323"}, { "3dml", "text/vnd.in3d.3dml"}, { "3g2", "video/3gpp2"}, { "3gp", "video/3gpp"}, { "aab", "application/x-authorware-bin"}, { "aac", "audio/x-aac"}, { "aam", "application/x-authorware-map"}, { "a", "application/octet-stream"}, { "aas", "application/x-authorware-seg"}, { "abw", "application/x-abiword"}, { "acc", "application/vnd.americandynamics.acc"}, { "ace", "application/x-ace-compressed"}, { "acu", "application/vnd.acucobol"}, { "acutc", "application/vnd.acucorp"}, { "acx", "application/internet-property-stream"}, { "adp", "audio/adpcm"}, { "aep", "application/vnd.audiograph"}, { "afm", "application/x-font-type1"}, { "afp", "application/vnd.ibm.modcap"}, { "ai", "application/postscript"}, { "aif", "audio/x-aiff"}, { "aifc", "audio/x-aiff"}, { "aiff", "audio/x-aiff"}, { "air", "application/vnd.adobe.air-application-installer-package+zip"}, { "ami", "application/vnd.amiga.ami"}, { "apk", "application/vnd.android.package-archive"}, { "application", "application/x-ms-application"}, { "apr", "application/vnd.lotus-approach"}, { "asc", "application/pgp-signature"}, { "asf", "video/x-ms-asf"}, { "asm", "text/x-asm"}, { "aso", "application/vnd.accpac.simply.aso"}, { "asr", "video/x-ms-asf"}, { "asx", "video/x-ms-asf"}, { "atc", "application/vnd.acucorp"}, { "atom", "application/atom+xml"}, { "atomcat", "application/atomcat+xml"}, { "atomsvc", "application/atomsvc+xml"}, { "atx", "application/vnd.antix.game-component"}, { "au", "audio/basic"}, { "avi", "video/x-msvideo"}, { "aw", "application/applixware"}, { "axs", "application/olescript"}, { "azf", "application/vnd.airzip.filesecure.azf"}, { "azs", "application/vnd.airzip.filesecure.azs"}, { "azw", "application/vnd.amazon.ebook"}, { "bas", "text/plain"}, { "bat", "application/x-msdownload"}, { "bcpio", "application/x-bcpio"}, { "bdf", "application/x-font-bdf"}, { "bdm", "application/vnd.syncml.dm+wbxml"}, { "bh2", "application/vnd.fujitsu.oasysprs"}, { "bin", "application/octet-stream"}, { "bmi", "application/vnd.bmi"}, { "bmp", "image/bmp"}, { "book", "application/vnd.framemaker"}, { "box", "application/vnd.previewsystems.box"}, { "boz", "application/x-bzip2"}, { "bpk", "application/octet-stream"}, { "btif", "image/prs.btif"}, { "bz2", "application/x-bzip2"}, { "bz", "application/x-bzip"}, { "c4d", "application/vnd.clonk.c4group"}, { "c4f", "application/vnd.clonk.c4group"}, { "c4g", "application/vnd.clonk.c4group"}, { "c4p", "application/vnd.clonk.c4group"}, { "c4u", "application/vnd.clonk.c4group"}, { "cab", "application/vnd.ms-cab-compressed"}, { "car", "application/vnd.curl.car"}, { "cat", "application/vnd.ms-pki.seccat"}, { "cct", "application/x-director"}, { "cc", "text/x-c"}, { "ccxml", "application/ccxml+xml"}, { "cdbcmsg", "application/vnd.contact.cmsg"}, { "cdf", "application/x-cdf"}, { "cdkey", "application/vnd.mediastation.cdkey"}, { "cdx", "chemical/x-cdx"}, { "cdxml", "application/vnd.chemdraw+xml"}, { "cdy", "application/vnd.cinderella"}, { "cer", "application/x-x509-ca-cert"}, { "cgm", "image/cgm"}, { "chat", "application/x-chat"}, { "chm", "application/vnd.ms-htmlhelp"}, { "chrt", "application/vnd.kde.kchart"}, { "cif", "chemical/x-cif"}, { "cii", "application/vnd.anser-web-certificate-issue-initiation"}, { "cla", "application/vnd.claymore"}, { "class", "application/java-vm"}, { "clkk", "application/vnd.crick.clicker.keyboard"}, { "clkp", "application/vnd.crick.clicker.palette"}, { "clkt", "application/vnd.crick.clicker.template"}, { "clkw", "application/vnd.crick.clicker.wordbank"}, { "clkx", "application/vnd.crick.clicker"}, { "clp", "application/x-msclip"}, { "cmc", "application/vnd.cosmocaller"}, { "cmdf", "chemical/x-cmdf"}, { "cml", "chemical/x-cml"}, { "cmp", "application/vnd.yellowriver-custom-menu"}, { "cmx", "image/x-cmx"}, { "cod", "application/vnd.rim.cod"}, { "com", "application/x-msdownload"}, { "conf", "text/plain"}, { "cpio", "application/x-cpio"}, { "cpp", "text/x-c"}, { "cpt", "application/mac-compactpro"}, { "crd", "application/x-mscardfile"}, { "crl", "application/pkix-crl"}, { "crt", "application/x-x509-ca-cert"}, { "csh", "application/x-csh"}, { "csml", "chemical/x-csml"}, { "csp", "application/vnd.commonspace"}, { "css", "text/css"}, { "cst", "application/x-director"}, { "csv", "text/csv"}, { "c", "text/plain"}, { "cu", "application/cu-seeme"}, { "curl", "text/vnd.curl"}, { "cww", "application/prs.cww"}, { "cxt", "application/x-director"}, { "cxx", "text/x-c"}, { "daf", "application/vnd.mobius.daf"}, { "dataless", "application/vnd.fdsn.seed"}, { "davmount", "application/davmount+xml"}, { "dcr", "application/x-director"}, { "dcurl", "text/vnd.curl.dcurl"}, { "dd2", "application/vnd.oma.dd2+xml"}, { "ddd", "application/vnd.fujixerox.ddd"}, { "deb", "application/x-debian-package"}, { "def", "text/plain"}, { "deploy", "application/octet-stream"}, { "der", "application/x-x509-ca-cert"}, { "dfac", "application/vnd.dreamfactory"}, { "dic", "text/x-c"}, { "diff", "text/plain"}, { "dir", "application/x-director"}, { "dis", "application/vnd.mobius.dis"}, { "dist", "application/octet-stream"}, { "distz", "application/octet-stream"}, { "djv", "image/vnd.djvu"}, { "djvu", "image/vnd.djvu"}, { "dll", "application/x-msdownload"}, { "dmg", "application/octet-stream"}, { "dms", "application/octet-stream"}, { "dna", "application/vnd.dna"}, { "doc", "application/msword"}, { "docm", "application/vnd.ms-word.document.macroenabled.12"}, { "docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, { "dot", "application/msword"}, { "dotm", "application/vnd.ms-word.template.macroenabled.12"}, { "dotx", "application/vnd.openxmlformats-officedocument.wordprocessingml.template"}, { "dp", "application/vnd.osgi.dp"}, { "dpg", "application/vnd.dpgraph"}, { "dsc", "text/prs.lines.tag"}, { "dtb", "application/x-dtbook+xml"}, { "dtd", "application/xml-dtd"}, { "dts", "audio/vnd.dts"}, { "dtshd", "audio/vnd.dts.hd"}, { "dump", "application/octet-stream"}, { "dvi", "application/x-dvi"}, { "dwf", "model/vnd.dwf"}, { "dwg", "image/vnd.dwg"}, { "dxf", "image/vnd.dxf"}, { "dxp", "application/vnd.spotfire.dxp"}, { "dxr", "application/x-director"}, { "ecelp4800", "audio/vnd.nuera.ecelp4800"}, { "ecelp7470", "audio/vnd.nuera.ecelp7470"}, { "ecelp9600", "audio/vnd.nuera.ecelp9600"}, { "ecma", "application/ecmascript"}, { "edm", "application/vnd.novadigm.edm"}, { "edx", "application/vnd.novadigm.edx"}, { "efif", "application/vnd.picsel"}, { "ei6", "application/vnd.pg.osasli"}, { "elc", "application/octet-stream"}, { "eml", "message/rfc822"}, { "emma", "application/emma+xml"}, { "eol", "audio/vnd.digital-winds"}, { "eot", "application/vnd.ms-fontobject"}, { "eps", "application/postscript"}, { "epub", "application/epub+zip"}, { "es3", "application/vnd.eszigno3+xml"}, { "esf", "application/vnd.epson.esf"}, { "espass", "application/vnd.espass-espass+zip"}, { "et3", "application/vnd.eszigno3+xml"}, { "etx", "text/x-setext"}, { "evy", "application/envoy"}, { "exe", "application/octet-stream"}, { "ext", "application/vnd.novadigm.ext"}, { "ez2", "application/vnd.ezpix-album"}, { "ez3", "application/vnd.ezpix-package"}, { "ez", "application/andrew-inset"}, { "f4v", "video/x-f4v"}, { "f77", "text/x-fortran"}, { "f90", "text/x-fortran"}, { "fbs", "image/vnd.fastbidsheet"}, { "fdf", "application/vnd.fdf"}, { "fe_launch", "application/vnd.denovo.fcselayout-link"}, { "fg5", "application/vnd.fujitsu.oasysgp"}, { "fgd", "application/x-director"}, { "fh4", "image/x-freehand"}, { "fh5", "image/x-freehand"}, { "fh7", "image/x-freehand"}, { "fhc", "image/x-freehand"}, { "fh", "image/x-freehand"}, { "fif", "application/fractals"}, { "fig", "application/x-xfig"}, { "fli", "video/x-fli"}, { "flo", "application/vnd.micrografx.flo"}, { "flr", "x-world/x-vrml"}, { "flv", "video/x-flv"}, { "flw", "application/vnd.kde.kivio"}, { "flx", "text/vnd.fmi.flexstor"}, { "fly", "text/vnd.fly"}, { "fm", "application/vnd.framemaker"}, { "fnc", "application/vnd.frogans.fnc"}, { "for", "text/x-fortran"}, { "fpx", "image/vnd.fpx"}, { "frame", "application/vnd.framemaker"}, { "fsc", "application/vnd.fsc.weblaunch"}, { "fst", "image/vnd.fst"}, { "ftc", "application/vnd.fluxtime.clip"}, { "f", "text/x-fortran"}, { "fti", "application/vnd.anser-web-funds-transfer-initiation"}, { "fvt", "video/vnd.fvt"}, { "fzs", "application/vnd.fuzzysheet"}, { "g3", "image/g3fax"}, { "gac", "application/vnd.groove-account"}, { "gdl", "model/vnd.gdl"}, { "geo", "application/vnd.dynageo"}, { "gex", "application/vnd.geometry-explorer"}, { "ggb", "application/vnd.geogebra.file"}, { "ggt", "application/vnd.geogebra.tool"}, { "ghf", "application/vnd.groove-help"}, { "gif", "image/gif"}, { "gim", "application/vnd.groove-identity-message"}, { "gmx", "application/vnd.gmx"}, { "gnumeric", "application/x-gnumeric"}, { "gph", "application/vnd.flographit"}, { "gqf", "application/vnd.grafeq"}, { "gqs", "application/vnd.grafeq"}, { "gram", "application/srgs"}, { "gre", "application/vnd.geometry-explorer"}, { "grv", "application/vnd.groove-injector"}, { "grxml", "application/srgs+xml"}, { "gsf", "application/x-font-ghostscript"}, { "gtar", "application/x-gtar"}, { "gtm", "application/vnd.groove-tool-message"}, { "gtw", "model/vnd.gtw"}, { "gv", "text/vnd.graphviz"}, { "gz", "application/x-gzip"}, { "h261", "video/h261"}, { "h263", "video/h263"}, { "h264", "video/h264"}, { "hbci", "application/vnd.hbci"}, { "hdf", "application/x-hdf"}, { "hh", "text/x-c"}, { "hlp", "application/winhlp"}, { "hpgl", "application/vnd.hp-hpgl"}, { "hpid", "application/vnd.hp-hpid"}, { "hps", "application/vnd.hp-hps"}, { "hqx", "application/mac-binhex40"}, { "hta", "application/hta"}, { "htc", "text/x-component"}, { "h", "text/plain"}, { "htke", "application/vnd.kenameaapp"}, { "html", "text/html"}, { "htm", "text/html"}, { "htt", "text/webviewhtml"}, { "hvd", "application/vnd.yamaha.hv-dic"}, { "hvp", "application/vnd.yamaha.hv-voice"}, { "hvs", "application/vnd.yamaha.hv-script"}, { "icc", "application/vnd.iccprofile"}, { "ice", "x-conference/x-cooltalk"}, { "icm", "application/vnd.iccprofile"}, { "ico", "image/x-icon"}, { "ics", "text/calendar"}, { "ief", "image/ief"}, { "ifb", "text/calendar"}, { "ifm", "application/vnd.shana.informed.formdata"}, { "iges", "model/iges"}, { "igl", "application/vnd.igloader"}, { "igs", "model/iges"}, { "igx", "application/vnd.micrografx.igx"}, { "iif", "application/vnd.shana.informed.interchange"}, { "iii", "application/x-iphone"}, { "imp", "application/vnd.accpac.simply.imp"}, { "ims", "application/vnd.ms-ims"}, { "ins", "application/x-internet-signup"}, { "in", "text/plain"}, { "ipk", "application/vnd.shana.informed.package"}, { "irm", "application/vnd.ibm.rights-management"}, { "irp", "application/vnd.irepository.package+xml"}, { "iso", "application/octet-stream"}, { "isp", "application/x-internet-signup"}, { "itp", "application/vnd.shana.informed.formtemplate"}, { "ivp", "application/vnd.immervision-ivp"}, { "ivu", "application/vnd.immervision-ivu"}, { "jad", "text/vnd.sun.j2me.app-descriptor"}, { "jam", "application/vnd.jam"}, { "jar", "application/java-archive"}, { "java", "text/x-java-source"}, { "jfif", "image/pipeg"}, { "jisp", "application/vnd.jisp"}, { "jlt", "application/vnd.hp-jlyt"}, { "jnlp", "application/x-java-jnlp-file"}, { "joda", "application/vnd.joost.joda-archive"}, { "jpeg", "image/jpeg"}, { "jpe", "image/jpeg"}, { "jpg", "image/jpeg"}, { "jpgm", "video/jpm"}, { "jpgv", "video/jpeg"}, { "jpm", "video/jpm"}, { "js", "application/x-javascript"}, { "json", "application/json"}, { "kar", "audio/midi"}, { "karbon", "application/vnd.kde.karbon"}, { "kfo", "application/vnd.kde.kformula"}, { "kia", "application/vnd.kidspiration"}, { "kil", "application/x-killustrator"}, { "kml", "application/vnd.google-earth.kml+xml"}, { "kmz", "application/vnd.google-earth.kmz"}, { "kne", "application/vnd.kinar"}, { "knp", "application/vnd.kinar"}, { "kon", "application/vnd.kde.kontour"}, { "kpr", "application/vnd.kde.kpresenter"}, { "kpt", "application/vnd.kde.kpresenter"}, { "ksh", "text/plain"}, { "ksp", "application/vnd.kde.kspread"}, { "ktr", "application/vnd.kahootz"}, { "ktz", "application/vnd.kahootz"}, { "kwd", "application/vnd.kde.kword"}, { "kwt", "application/vnd.kde.kword"}, { "latex", "application/x-latex"}, { "lbd", "application/vnd.llamagraphics.life-balance.desktop"}, { "lbe", "application/vnd.llamagraphics.life-balance.exchange+xml"}, { "les", "application/vnd.hhe.lesson-player"}, { "lha", "application/octet-stream"}, { "link66", "application/vnd.route66.link66+xml"}, { "list3820", "application/vnd.ibm.modcap"}, { "listafp", "application/vnd.ibm.modcap"}, { "list", "text/plain"}, { "log", "text/plain"}, { "lostxml", "application/lost+xml"}, { "lrf", "application/octet-stream"}, { "lrm", "application/vnd.ms-lrm"}, { "lsf", "video/x-la-asf"}, { "lsx", "video/x-la-asf"}, { "ltf", "application/vnd.frogans.ltf"}, { "lvp", "audio/vnd.lucent.voice"}, { "lwp", "application/vnd.lotus-wordpro"}, { "lzh", "application/octet-stream"}, { "m13", "application/x-msmediaview"}, { "m14", "application/x-msmediaview"}, { "m1v", "video/mpeg"}, { "m2a", "audio/mpeg"}, { "m2v", "video/mpeg"}, { "m3a", "audio/mpeg"}, { "m3u", "audio/x-mpegurl"}, { "m4u", "video/vnd.mpegurl"}, { "m4v", "video/x-m4v"}, { "ma", "application/mathematica"}, { "mag", "application/vnd.ecowin.chart"}, { "maker", "application/vnd.framemaker"}, { "man", "text/troff"}, { "mathml", "application/mathml+xml"}, { "mb", "application/mathematica"}, { "mbk", "application/vnd.mobius.mbk"}, { "mbox", "application/mbox"}, { "mc1", "application/vnd.medcalcdata"}, { "mcd", "application/vnd.mcd"}, { "mcurl", "text/vnd.curl.mcurl"}, { "mdb", "application/x-msaccess"}, { "mdi", "image/vnd.ms-modi"}, { "mesh", "model/mesh"}, { "me", "text/troff"}, { "mfm", "application/vnd.mfmp"}, { "mgz", "application/vnd.proteus.magazine"}, { "mht", "message/rfc822"}, { "mhtml", "message/rfc822"}, { "mid", "audio/midi"}, { "midi", "audio/midi"}, { "mif", "application/vnd.mif"}, { "mime", "message/rfc822"}, { "mj2", "video/mj2"}, { "mjp2", "video/mj2"}, { "mlp", "application/vnd.dolby.mlp"}, { "mmd", "application/vnd.chipnuts.karaoke-mmd"}, { "mmf", "application/vnd.smaf"}, { "mmr", "image/vnd.fujixerox.edmics-mmr"}, { "mny", "application/x-msmoney"}, { "mobi", "application/x-mobipocket-ebook"}, { "movie", "video/x-sgi-movie"}, { "mov", "video/quicktime"}, { "mp2a", "audio/mpeg"}, { "mp2", "video/mpeg"}, { "mp3", "audio/mpeg"}, { "mp4a", "audio/mp4"}, { "mp4s", "application/mp4"}, { "mp4", "video/mp4"}, { "mp4v", "video/mp4"}, { "mpa", "video/mpeg"}, { "mpc", "application/vnd.mophun.certificate"}, { "mpeg", "video/mpeg"}, { "mpe", "video/mpeg"}, { "mpg4", "video/mp4"}, { "mpga", "audio/mpeg"}, { "mpg", "video/mpeg"}, { "mpkg", "application/vnd.apple.installer+xml"}, { "mpm", "application/vnd.blueice.multipass"}, { "mpn", "application/vnd.mophun.application"}, { "mpp", "application/vnd.ms-project"}, { "mpt", "application/vnd.ms-project"}, { "mpv2", "video/mpeg"}, { "mpy", "application/vnd.ibm.minipay"}, { "mqy", "application/vnd.mobius.mqy"}, { "mrc", "application/marc"}, { "mscml", "application/mediaservercontrol+xml"}, { "mseed", "application/vnd.fdsn.mseed"}, { "mseq", "application/vnd.mseq"}, { "msf", "application/vnd.epson.msf"}, { "msh", "model/mesh"}, { "msi", "application/x-msdownload"}, { "ms", "text/troff"}, { "msty", "application/vnd.muvee.style"}, { "mts", "model/vnd.mts"}, { "mus", "application/vnd.musician"}, { "musicxml", "application/vnd.recordare.musicxml+xml"}, { "mvb", "application/x-msmediaview"}, { "mxf", "application/mxf"}, { "mxl", "application/vnd.recordare.musicxml"}, { "mxml", "application/xv+xml"}, { "mxs", "application/vnd.triscape.mxs"}, { "mxu", "video/vnd.mpegurl"}, { "nb", "application/mathematica"}, { "nc", "application/x-netcdf"}, { "ncx", "application/x-dtbncx+xml"}, { "n-gage", "application/vnd.nokia.n-gage.symbian.install"}, { "ngdat", "application/vnd.nokia.n-gage.data"}, { "nlu", "application/vnd.neurolanguage.nlu"}, { "nml", "application/vnd.enliven"}, { "nnd", "application/vnd.noblenet-directory"}, { "nns", "application/vnd.noblenet-sealer"}, { "nnw", "application/vnd.noblenet-web"}, { "npx", "image/vnd.net-fpx"}, { "nsf", "application/vnd.lotus-notes"}, { "nws", "message/rfc822"}, { "oa2", "application/vnd.fujitsu.oasys2"}, { "oa3", "application/vnd.fujitsu.oasys3"}, { "o", "application/octet-stream"}, { "oas", "application/vnd.fujitsu.oasys"}, { "obd", "application/x-msbinder"}, { "obj", "application/octet-stream"}, { "oda", "application/oda"}, { "odb", "application/vnd.oasis.opendocument.database"}, { "odc", "application/vnd.oasis.opendocument.chart"}, { "odf", "application/vnd.oasis.opendocument.formula"}, { "odft", "application/vnd.oasis.opendocument.formula-template"}, { "odg", "application/vnd.oasis.opendocument.graphics"}, { "odi", "application/vnd.oasis.opendocument.image"}, { "odp", "application/vnd.oasis.opendocument.presentation"}, { "ods", "application/vnd.oasis.opendocument.spreadsheet"}, { "odt", "application/vnd.oasis.opendocument.text"}, { "oga", "audio/ogg"}, { "ogg", "audio/ogg"}, { "ogv", "video/ogg"}, { "ogx", "application/ogg"}, { "onepkg", "application/onenote"}, { "onetmp", "application/onenote"}, { "onetoc2", "application/onenote"}, { "onetoc", "application/onenote"}, { "opf", "application/oebps-package+xml"}, { "oprc", "application/vnd.palm"}, { "org", "application/vnd.lotus-organizer"}, { "osf", "application/vnd.yamaha.openscoreformat"}, { "osfpvg", "application/vnd.yamaha.openscoreformat.osfpvg+xml"}, { "otc", "application/vnd.oasis.opendocument.chart-template"}, { "otf", "application/x-font-otf"}, { "otg", "application/vnd.oasis.opendocument.graphics-template"}, { "oth", "application/vnd.oasis.opendocument.text-web"}, { "oti", "application/vnd.oasis.opendocument.image-template"}, { "otm", "application/vnd.oasis.opendocument.text-master"}, { "otp", "application/vnd.oasis.opendocument.presentation-template"}, { "ots", "application/vnd.oasis.opendocument.spreadsheet-template"}, { "ott", "application/vnd.oasis.opendocument.text-template"}, { "oxt", "application/vnd.openofficeorg.extension"}, { "p10", "application/pkcs10"}, { "p12", "application/x-pkcs12"}, { "p7b", "application/x-pkcs7-certificates"}, { "p7c", "application/x-pkcs7-mime"}, { "p7m", "application/x-pkcs7-mime"}, { "p7r", "application/x-pkcs7-certreqresp"}, { "p7s", "application/x-pkcs7-signature"}, { "pas", "text/x-pascal"}, { "pbd", "application/vnd.powerbuilder6"}, { "pbm", "image/x-portable-bitmap"}, { "pcf", "application/x-font-pcf"}, { "pcl", "application/vnd.hp-pcl"}, { "pclxl", "application/vnd.hp-pclxl"}, { "pct", "image/x-pict"}, { "pcurl", "application/vnd.curl.pcurl"}, { "pcx", "image/x-pcx"}, { "pdb", "application/vnd.palm"}, { "pdf", "application/pdf"}, { "pfa", "application/x-font-type1"}, { "pfb", "application/x-font-type1"}, { "pfm", "application/x-font-type1"}, { "pfr", "application/font-tdpfr"}, { "pfx", "application/x-pkcs12"}, { "pgm", "image/x-portable-graymap"}, { "pgn", "application/x-chess-pgn"}, { "pgp", "application/pgp-encrypted"}, { "pic", "image/x-pict"}, { "pkg", "application/octet-stream"}, { "pki", "application/pkixcmp"}, { "pkipath", "application/pkix-pkipath"}, { "pkpass", "application/vnd-com.apple.pkpass"}, { "pko", "application/ynd.ms-pkipko"}, { "plb", "application/vnd.3gpp.pic-bw-large"}, { "plc", "application/vnd.mobius.plc"}, { "plf", "application/vnd.pocketlearn"}, { "pls", "application/pls+xml"}, { "pl", "text/plain"}, { "pma", "application/x-perfmon"}, { "pmc", "application/x-perfmon"}, { "pml", "application/x-perfmon"}, { "pmr", "application/x-perfmon"}, { "pmw", "application/x-perfmon"}, { "png", "image/png"}, { "pnm", "image/x-portable-anymap"}, { "portpkg", "application/vnd.macports.portpkg"}, { "pot,", "application/vnd.ms-powerpoint"}, { "pot", "application/vnd.ms-powerpoint"}, { "potm", "application/vnd.ms-powerpoint.template.macroenabled.12"}, { "potx", "application/vnd.openxmlformats-officedocument.presentationml.template"}, { "ppa", "application/vnd.ms-powerpoint"}, { "ppam", "application/vnd.ms-powerpoint.addin.macroenabled.12"}, { "ppd", "application/vnd.cups-ppd"}, { "ppm", "image/x-portable-pixmap"}, { "pps", "application/vnd.ms-powerpoint"}, { "ppsm", "application/vnd.ms-powerpoint.slideshow.macroenabled.12"}, { "ppsx", "application/vnd.openxmlformats-officedocument.presentationml.slideshow"}, { "ppt", "application/vnd.ms-powerpoint"}, { "pptm", "application/vnd.ms-powerpoint.presentation.macroenabled.12"}, { "pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation"}, { "pqa", "application/vnd.palm"}, { "prc", "application/x-mobipocket-ebook"}, { "pre", "application/vnd.lotus-freelance"}, { "prf", "application/pics-rules"}, { "ps", "application/postscript"}, { "psb", "application/vnd.3gpp.pic-bw-small"}, { "psd", "image/vnd.adobe.photoshop"}, { "psf", "application/x-font-linux-psf"}, { "p", "text/x-pascal"}, { "ptid", "application/vnd.pvi.ptid1"}, { "pub", "application/x-mspublisher"}, { "pvb", "application/vnd.3gpp.pic-bw-var"}, { "pwn", "application/vnd.3m.post-it-notes"}, { "pwz", "application/vnd.ms-powerpoint"}, { "pya", "audio/vnd.ms-playready.media.pya"}, { "pyc", "application/x-python-code"}, { "pyo", "application/x-python-code"}, { "py", "text/x-python"}, { "pyv", "video/vnd.ms-playready.media.pyv"}, { "qam", "application/vnd.epson.quickanime"}, { "qbo", "application/vnd.intu.qbo"}, { "qfx", "application/vnd.intu.qfx"}, { "qps", "application/vnd.publishare-delta-tree"}, { "qt", "video/quicktime"}, { "qwd", "application/vnd.quark.quarkxpress"}, { "qwt", "application/vnd.quark.quarkxpress"}, { "qxb", "application/vnd.quark.quarkxpress"}, { "qxd", "application/vnd.quark.quarkxpress"}, { "qxl", "application/vnd.quark.quarkxpress"}, { "qxt", "application/vnd.quark.quarkxpress"}, { "ra", "audio/x-pn-realaudio"}, { "ram", "audio/x-pn-realaudio"}, { "rar", "application/x-rar-compressed"}, { "ras", "image/x-cmu-raster"}, { "rcprofile", "application/vnd.ipunplugged.rcprofile"}, { "rdf", "application/rdf+xml"}, { "rdz", "application/vnd.data-vision.rdz"}, { "rep", "application/vnd.businessobjects"}, { "res", "application/x-dtbresource+xml"}, { "rgb", "image/x-rgb"}, { "rif", "application/reginfo+xml"}, { "rl", "application/resource-lists+xml"}, { "rlc", "image/vnd.fujixerox.edmics-rlc"}, { "rld", "application/resource-lists-diff+xml"}, { "rm", "application/vnd.rn-realmedia"}, { "rmi", "audio/midi"}, { "rmp", "audio/x-pn-realaudio-plugin"}, { "rms", "application/vnd.jcp.javame.midlet-rms"}, { "rnc", "application/relax-ng-compact-syntax"}, { "roff", "text/troff"}, { "rpm", "application/x-rpm"}, { "rpss", "application/vnd.nokia.radio-presets"}, { "rpst", "application/vnd.nokia.radio-preset"}, { "rq", "application/sparql-query"}, { "rs", "application/rls-services+xml"}, { "rsd", "application/rsd+xml"}, { "rss", "application/rss+xml"}, { "rtf", "application/rtf"}, { "rtx", "text/richtext"}, { "saf", "application/vnd.yamaha.smaf-audio"}, { "sbml", "application/sbml+xml"}, { "sc", "application/vnd.ibm.secure-container"}, { "scd", "application/x-msschedule"}, { "scm", "application/vnd.lotus-screencam"}, { "scq", "application/scvp-cv-request"}, { "scs", "application/scvp-cv-response"}, { "sct", "text/scriptlet"}, { "scurl", "text/vnd.curl.scurl"}, { "sda", "application/vnd.stardivision.draw"}, { "sdc", "application/vnd.stardivision.calc"}, { "sdd", "application/vnd.stardivision.impress"}, { "sdkd", "application/vnd.solent.sdkm+xml"}, { "sdkm", "application/vnd.solent.sdkm+xml"}, { "sdp", "application/sdp"}, { "sdw", "application/vnd.stardivision.writer"}, { "see", "application/vnd.seemail"}, { "seed", "application/vnd.fdsn.seed"}, { "sema", "application/vnd.sema"}, { "semd", "application/vnd.semd"}, { "semf", "application/vnd.semf"}, { "ser", "application/java-serialized-object"}, { "setpay", "application/set-payment-initiation"}, { "setreg", "application/set-registration-initiation"}, { "sfd-hdstx", "application/vnd.hydrostatix.sof-data"}, { "sfs", "application/vnd.spotfire.sfs"}, { "sgl", "application/vnd.stardivision.writer-global"}, { "sgml", "text/sgml"}, { "sgm", "text/sgml"}, { "sh", "application/x-sh"}, { "shar", "application/x-shar"}, { "shf", "application/shf+xml"}, { "sic", "application/vnd.wap.sic"}, { "sig", "application/pgp-signature"}, { "silo", "model/mesh"}, { "sis", "application/vnd.symbian.install"}, { "sisx", "application/vnd.symbian.install"}, { "sit", "application/x-stuffit"}, { "si", "text/vnd.wap.si"}, { "sitx", "application/x-stuffitx"}, { "skd", "application/vnd.koan"}, { "skm", "application/vnd.koan"}, { "skp", "application/vnd.koan"}, { "skt", "application/vnd.koan"}, { "slc", "application/vnd.wap.slc"}, { "sldm", "application/vnd.ms-powerpoint.slide.macroenabled.12"}, { "sldx", "application/vnd.openxmlformats-officedocument.presentationml.slide"}, { "slt", "application/vnd.epson.salt"}, { "sl", "text/vnd.wap.sl"}, { "smf", "application/vnd.stardivision.math"}, { "smi", "application/smil+xml"}, { "smil", "application/smil+xml"}, { "snd", "audio/basic"}, { "snf", "application/x-font-snf"}, { "so", "application/octet-stream"}, { "spc", "application/x-pkcs7-certificates"}, { "spf", "application/vnd.yamaha.smaf-phrase"}, { "spl", "application/x-futuresplash"}, { "spot", "text/vnd.in3d.spot"}, { "spp", "application/scvp-vp-response"}, { "spq", "application/scvp-vp-request"}, { "spx", "audio/ogg"}, { "src", "application/x-wais-source"}, { "srx", "application/sparql-results+xml"}, { "sse", "application/vnd.kodak-descriptor"}, { "ssf", "application/vnd.epson.ssf"}, { "ssml", "application/ssml+xml"}, { "sst", "application/vnd.ms-pkicertstore"}, { "stc", "application/vnd.sun.xml.calc.template"}, { "std", "application/vnd.sun.xml.draw.template"}, { "s", "text/x-asm"}, { "stf", "application/vnd.wt.stf"}, { "sti", "application/vnd.sun.xml.impress.template"}, { "stk", "application/hyperstudio"}, { "stl", "application/vnd.ms-pki.stl"}, { "stm", "text/html"}, { "str", "application/vnd.pg.format"}, { "stw", "application/vnd.sun.xml.writer.template"}, { "sus", "application/vnd.sus-calendar"}, { "susp", "application/vnd.sus-calendar"}, { "sv4cpio", "application/x-sv4cpio"}, { "sv4crc", "application/x-sv4crc"}, { "svd", "application/vnd.svd"}, { "svg", "image/svg+xml"}, { "svgz", "image/svg+xml"}, { "swa", "application/x-director"}, { "swf", "application/x-shockwave-flash"}, { "swi", "application/vnd.arastra.swi"}, { "sxc", "application/vnd.sun.xml.calc"}, { "sxd", "application/vnd.sun.xml.draw"}, { "sxg", "application/vnd.sun.xml.writer.global"}, { "sxi", "application/vnd.sun.xml.impress"}, { "sxm", "application/vnd.sun.xml.math"}, { "sxw", "application/vnd.sun.xml.writer"}, { "tao", "application/vnd.tao.intent-module-archive"}, { "t", "application/x-troff"}, { "tar", "application/x-tar"}, { "tcap", "application/vnd.3gpp2.tcap"}, { "tcl", "application/x-tcl"}, { "teacher", "application/vnd.smart.teacher"}, { "tex", "application/x-tex"}, { "texi", "application/x-texinfo"}, { "texinfo", "application/x-texinfo"}, { "text", "text/plain"}, { "tfm", "application/x-tex-tfm"}, { "tgz", "application/x-gzip"}, { "tiff", "image/tiff"}, { "tif", "image/tiff"}, { "tmo", "application/vnd.tmobile-livetv"}, { "torrent", "application/x-bittorrent"}, { "tpl", "application/vnd.groove-tool-template"}, { "tpt", "application/vnd.trid.tpt"}, { "tra", "application/vnd.trueapp"}, { "trm", "application/x-msterminal"}, { "tr", "text/troff"}, { "tsv", "text/tab-separated-values"}, { "ttc", "application/x-font-ttf"}, { "ttf", "application/x-font-ttf"}, { "twd", "application/vnd.simtech-mindmapper"}, { "twds", "application/vnd.simtech-mindmapper"}, { "txd", "application/vnd.genomatix.tuxedo"}, { "txf", "application/vnd.mobius.txf"}, { "txt", "text/plain"}, { "u32", "application/x-authorware-bin"}, { "udeb", "application/x-debian-package"}, { "ufd", "application/vnd.ufdl"}, { "ufdl", "application/vnd.ufdl"}, { "uls", "text/iuls"}, { "umj", "application/vnd.umajin"}, { "unityweb", "application/vnd.unity"}, { "uoml", "application/vnd.uoml+xml"}, { "uris", "text/uri-list"}, { "uri", "text/uri-list"}, { "urls", "text/uri-list"}, { "ustar", "application/x-ustar"}, { "utz", "application/vnd.uiq.theme"}, { "uu", "text/x-uuencode"}, { "vcd", "application/x-cdlink"}, { "vcf", "text/x-vcard"}, { "vcg", "application/vnd.groove-vcard"}, { "vcs", "text/x-vcalendar"}, { "vcx", "application/vnd.vcx"}, { "vis", "application/vnd.visionary"}, { "viv", "video/vnd.vivo"}, { "vor", "application/vnd.stardivision.writer"}, { "vox", "application/x-authorware-bin"}, { "vrml", "x-world/x-vrml"}, { "vsd", "application/vnd.visio"}, { "vsf", "application/vnd.vsf"}, { "vss", "application/vnd.visio"}, { "vst", "application/vnd.visio"}, { "vsw", "application/vnd.visio"}, { "vtu", "model/vnd.vtu"}, { "vxml", "application/voicexml+xml"}, { "w3d", "application/x-director"}, { "wad", "application/x-doom"}, { "wav", "audio/x-wav"}, { "wax", "audio/x-ms-wax"}, { "wbmp", "image/vnd.wap.wbmp"}, { "wbs", "application/vnd.criticaltools.wbs+xml"}, { "wbxml", "application/vnd.wap.wbxml"}, { "wcm", "application/vnd.ms-works"}, { "wdb", "application/vnd.ms-works"}, { "wiz", "application/msword"}, { "wks", "application/vnd.ms-works"}, { "wma", "audio/x-ms-wma"}, { "wmd", "application/x-ms-wmd"}, { "wmf", "application/x-msmetafile"}, { "wmlc", "application/vnd.wap.wmlc"}, { "wmlsc", "application/vnd.wap.wmlscriptc"}, { "wmls", "text/vnd.wap.wmlscript"}, { "wml", "text/vnd.wap.wml"}, { "wm", "video/x-ms-wm"}, { "wmv", "video/x-ms-wmv"}, { "wmx", "video/x-ms-wmx"}, { "wmz", "application/x-ms-wmz"}, { "wpd", "application/vnd.wordperfect"}, { "wpl", "application/vnd.ms-wpl"}, { "wps", "application/vnd.ms-works"}, { "wqd", "application/vnd.wqd"}, { "wri", "application/x-mswrite"}, { "wrl", "x-world/x-vrml"}, { "wrz", "x-world/x-vrml"}, { "wsdl", "application/wsdl+xml"}, { "wspolicy", "application/wspolicy+xml"}, { "wtb", "application/vnd.webturbo"}, { "wvx", "video/x-ms-wvx"}, { "x32", "application/x-authorware-bin"}, { "x3d", "application/vnd.hzn-3d-crossword"}, { "xaf", "x-world/x-vrml"}, { "xap", "application/x-silverlight-app"}, { "xar", "application/vnd.xara"}, { "xbap", "application/x-ms-xbap"}, { "xbd", "application/vnd.fujixerox.docuworks.binder"}, { "xbm", "image/x-xbitmap"}, { "xdm", "application/vnd.syncml.dm+xml"}, { "xdp", "application/vnd.adobe.xdp+xml"}, { "xdw", "application/vnd.fujixerox.docuworks"}, { "xenc", "application/xenc+xml"}, { "xer", "application/patch-ops-error+xml"}, { "xfdf", "application/vnd.adobe.xfdf"}, { "xfdl", "application/vnd.xfdl"}, { "xht", "application/xhtml+xml"}, { "xhtml", "application/xhtml+xml"}, { "xhvml", "application/xv+xml"}, { "xif", "image/vnd.xiff"}, { "xla", "application/vnd.ms-excel"}, { "xlam", "application/vnd.ms-excel.addin.macroenabled.12"}, { "xlb", "application/vnd.ms-excel"}, { "xlc", "application/vnd.ms-excel"}, { "xlm", "application/vnd.ms-excel"}, { "xls", "application/vnd.ms-excel"}, { "xlsb", "application/vnd.ms-excel.sheet.binary.macroenabled.12"}, { "xlsm", "application/vnd.ms-excel.sheet.macroenabled.12"}, { "xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"}, { "xlt", "application/vnd.ms-excel"}, { "xltm", "application/vnd.ms-excel.template.macroenabled.12"}, { "xltx", "application/vnd.openxmlformats-officedocument.spreadsheetml.template"}, { "xlw", "application/vnd.ms-excel"}, { "xml", "application/xml"}, { "xo", "application/vnd.olpc-sugar"}, { "xof", "x-world/x-vrml"}, { "xop", "application/xop+xml"}, { "xpdl", "application/xml"}, { "xpi", "application/x-xpinstall"}, { "xpm", "image/x-xpixmap"}, { "xpr", "application/vnd.is-xpr"}, { "xps", "application/vnd.ms-xpsdocument"}, { "xpw", "application/vnd.intercon.formnet"}, { "xpx", "application/vnd.intercon.formnet"}, { "xsl", "application/xml"}, { "xslt", "application/xslt+xml"}, { "xsm", "application/vnd.syncml+xml"}, { "xspf", "application/xspf+xml"}, { "xul", "application/vnd.mozilla.xul+xml"}, { "xvm", "application/xv+xml"}, { "xvml", "application/xv+xml"}, { "xwd", "image/x-xwindowdump"}, { "xyz", "chemical/x-xyz"}, { "z", "application/x-compress"}, { "zaz", "application/vnd.zzazz.deck+xml"}, { "zip", "application/zip"}, { "zir", "application/vnd.zul"}, { "zirz", "application/vnd.zul"}, { "zmm", "application/vnd.handheld-entertainment+xml"} }; public static String unfold(String s) { if (s == null) { return null; } return s.replaceAll("\r|\n", ""); } private static String decode(String s, Message message) { if (s == null) { return null; } else { return DecoderUtil.decodeEncodedWords(s, message); } } public static String unfoldAndDecode(String s) { return unfoldAndDecode(s, null); } public static String unfoldAndDecode(String s, Message message) { return decode(unfold(s), message); } // TODO implement proper foldAndEncode public static String foldAndEncode(String s) { return s; } /** * Returns the named parameter of a header field. If name is null the first * parameter is returned, or if there are no additional parameters in the * field the entire field is returned. Otherwise the named parameter is * searched for in a case insensitive fashion and returned. * * @param headerValue the header value * @param parameterName the parameter name * @return the value. if the parameter cannot be found the method returns null. */ public static String getHeaderParameter(String headerValue, String parameterName) { if (headerValue == null) { return null; } headerValue = headerValue.replaceAll("\r|\n", ""); String[] parts = headerValue.split(";"); if (parameterName == null && parts.length > 0) { return parts[0].trim(); } for (String part : parts) { if (parameterName != null && part.trim().toLowerCase(Locale.US).startsWith(parameterName.toLowerCase(Locale.US))) { String[] partParts = part.split("=", 2); if (partParts.length == 2) { String parameter = partParts[1].trim(); int len = parameter.length(); if (len >= 2 && parameter.startsWith("\"") && parameter.endsWith("\"")) { return parameter.substring(1, len - 1); } else { return parameter; } } } } return null; } public static Part findFirstPartByMimeType(Part part, String mimeType) { if (part.getBody() instanceof Multipart) { Multipart multipart = (Multipart)part.getBody(); for (BodyPart bodyPart : multipart.getBodyParts()) { Part ret = MimeUtility.findFirstPartByMimeType(bodyPart, mimeType); if (ret != null) { return ret; } } } else if (isSameMimeType(part.getMimeType(), mimeType)) { return part; } return null; } /** * Returns true if the given mimeType matches the matchAgainst specification. * @param mimeType A MIME type to check. * @param matchAgainst A MIME type to check against. May include wildcards such as image/* or * * /*. * @return */ public static boolean mimeTypeMatches(String mimeType, String matchAgainst) { Pattern p = Pattern.compile(matchAgainst.replaceAll("\\*", "\\.\\*"), Pattern.CASE_INSENSITIVE); return p.matcher(mimeType).matches(); } public static boolean isDefaultMimeType(String mimeType) { return isSameMimeType(mimeType, DEFAULT_ATTACHMENT_MIME_TYPE); } public static Body createBody(InputStream in, String contentTransferEncoding, String contentType) throws IOException { if (contentTransferEncoding != null) { contentTransferEncoding = MimeUtility.getHeaderParameter(contentTransferEncoding, null); } BinaryTempFileBody tempBody; if (MimeUtil.isMessage(contentType)) { tempBody = new BinaryTempFileMessageBody(contentTransferEncoding); } else { tempBody = new BinaryTempFileBody(contentTransferEncoding); } OutputStream out = tempBody.getOutputStream(); try { IOUtils.copy(in, out); } finally { out.close(); } return tempBody; } /** * Get decoded contents of a body. * <p/> * Right now only some classes retain the original encoding of the body contents. Those classes have to implement * the {@link RawDataBody} interface in order for this method to decode the data delivered by * {@link Body#getInputStream()}. * <p/> * The ultimate goal is to get to a point where all classes retain the original data and {@code RawDataBody} can be * merged into {@link Body}. */ public static InputStream decodeBody(Body body) throws MessagingException { InputStream inputStream; if (body instanceof RawDataBody) { RawDataBody rawDataBody = (RawDataBody) body; String encoding = rawDataBody.getEncoding(); final InputStream rawInputStream = rawDataBody.getInputStream(); if (MimeUtil.ENC_7BIT.equalsIgnoreCase(encoding) || MimeUtil.ENC_8BIT.equalsIgnoreCase(encoding) || MimeUtil.ENC_BINARY.equalsIgnoreCase(encoding)) { inputStream = rawInputStream; } else if (MimeUtil.ENC_BASE64.equalsIgnoreCase(encoding)) { inputStream = new Base64InputStream(rawInputStream, false) { @Override public void close() throws IOException { super.close(); closeInputStreamWithoutDeletingTemporaryFiles(rawInputStream); } }; } else if (MimeUtil.ENC_QUOTED_PRINTABLE.equalsIgnoreCase(encoding)) { inputStream = new QuotedPrintableInputStream(rawInputStream) { @Override public void close() throws IOException { super.close(); closeInputStreamWithoutDeletingTemporaryFiles(rawInputStream); } }; } else { Log.w(LOG_TAG, "Unsupported encoding: " + encoding); inputStream = rawInputStream; } } else { inputStream = body.getInputStream(); } return inputStream; } public static void closeInputStreamWithoutDeletingTemporaryFiles(InputStream rawInputStream) throws IOException { if (rawInputStream instanceof BinaryTempFileBody.BinaryTempFileBodyInputStream) { ((BinaryTempFileBody.BinaryTempFileBodyInputStream) rawInputStream).closeWithoutDeleting(); } else { rawInputStream.close(); } } public static String getMimeTypeByExtension(String filename) { String returnedType = null; String extension = null; if (filename != null && filename.lastIndexOf('.') != -1) { extension = filename.substring(filename.lastIndexOf('.') + 1).toLowerCase(Locale.US); returnedType = android.webkit.MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); } // If the MIME type set by the user's mailer is application/octet-stream, try to figure // out whether there's a sane file type extension. if (returnedType != null && !isSameMimeType(returnedType, DEFAULT_ATTACHMENT_MIME_TYPE)) { return returnedType; } else if (extension != null) { for (String[] contentTypeMapEntry : MIME_TYPE_BY_EXTENSION_MAP) { if (contentTypeMapEntry[0].equals(extension)) { return contentTypeMapEntry[1]; } } } return DEFAULT_ATTACHMENT_MIME_TYPE; } public static String getExtensionByMimeType(@NonNull String mimeType) { String lowerCaseMimeType = mimeType.toLowerCase(Locale.US); for (String[] contentTypeMapEntry : MIME_TYPE_BY_EXTENSION_MAP) { if (contentTypeMapEntry[1].equals(lowerCaseMimeType)) { return contentTypeMapEntry[0]; } } return null; } /** * Get a default content-transfer-encoding for use with a given content-type * when adding an unencoded attachment. It's possible that 8bit encodings * may later be converted to 7bit for 7bit transport. * <ul> * <li>null: base64 * <li>message/rfc822: 8bit * <li>message/*: 7bit * <li>multipart/signed: 7bit * <li>multipart/*: 8bit * <li>*&#47;*: base64 * </ul> * * @param type * A String representing a MIME content-type * @return A String representing a MIME content-transfer-encoding */ public static String getEncodingforType(String type) { if (type == null) { return (MimeUtil.ENC_BASE64); } else if (MimeUtil.isMessage(type)) { return (MimeUtil.ENC_8BIT); } else if (isSameMimeType(type, "multipart/signed") || isMessage(type)) { return (MimeUtil.ENC_7BIT); } else if (isMultipart(type)) { return (MimeUtil.ENC_8BIT); } else { return (MimeUtil.ENC_BASE64); } } public static boolean isMultipart(String mimeType) { return mimeType != null && mimeType.toLowerCase(Locale.US).startsWith("multipart/"); } public static boolean isMessage(String mimeType) { return isSameMimeType(mimeType, "message/rfc822"); } public static boolean isSameMimeType(String mimeType, String otherMimeType) { return mimeType != null && mimeType.equalsIgnoreCase(otherMimeType); } }
package com.hsr.hemant.ppp; import android.app.Activity; import android.content.Intent; import android.content.IntentSender; import android.location.Location; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks; import com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.LocationListener; import com.google.android.gms.location.LocationRequest; import com.google.android.gms.location.LocationServices; import com.google.android.gms.location.LocationSettingsRequest; import com.google.android.gms.location.LocationSettingsResult; import com.google.android.gms.location.LocationSettingsStatusCodes; import java.text.DateFormat; import java.util.Date; /** * Using location settings. * <p> * Uses the {@link com.google.android.gms.location.SettingsApi} to ensure that the device's system * settings are properly configured for the app's location needs. When making a request to * Location services, the device's system settings may be in a state that prevents the app from * obtaining the location data that it needs. For example, GPS or Wi-Fi scanning may be switched * off. The {@code SettingsApi} makes it possible to determine if a device's system settings are * adequate for the location request, and to optionally invoke a dialog that allows the user to * enable the necessary settings. * <p> * This sample allows the user to request location updates using the ACCESS_FINE_LOCATION setting * (as specified in AndroidManifest.xml). The sample requires that the device has location enabled * and set to the "High accuracy" mode. If location is not enabled, or if the location mode does * not permit high accuracy determination of location, the activity uses the {@code SettingsApi} * to invoke a dialog without requiring the developer to understand which settings are needed for * different Location requirements. */ public class FusedLocationManager extends AppCompatActivity implements ConnectionCallbacks, OnConnectionFailedListener, LocationListener, ResultCallback<LocationSettingsResult> { /** * The desired interval for location updates. Inexact. Updates may be more or less frequent. */ public static final long UPDATE_INTERVAL_IN_MILLISECONDS = 10000; /** * The fastest rate for active location updates. Exact. Updates will never be more frequent * than this value. */ public static final long FASTEST_UPDATE_INTERVAL_IN_MILLISECONDS = UPDATE_INTERVAL_IN_MILLISECONDS / 2; protected static final String TAG = "FusedLocatioManager"; /** * Constant used in the location settings dialog. */ protected static final int REQUEST_CHECK_SETTINGS = 0x1; // Keys for storing activity state in the Bundle. protected final static String KEY_REQUESTING_LOCATION_UPDATES = "requesting-location-updates"; protected final static String KEY_LOCATION = "location"; protected final static String KEY_LAST_UPDATED_TIME_STRING = "last-updated-time-string"; /** * Provides the entry point to Google Play services. */ protected GoogleApiClient mGoogleApiClient; /** * Stores parameters for requests to the FusedLocationProviderApi. */ protected LocationRequest mLocationRequest; /** * Stores the types of location services the client is interested in using. Used for checking * settings to determine if the device has optimal location settings. */ protected LocationSettingsRequest mLocationSettingsRequest; /** * Represents a geographical location. */ protected Location mCurrentLocation; /** * Tracks the status of the location updates request. Value changes when the user presses the * Start Updates and Stop Updates buttons. */ protected Boolean mRequestingLocationUpdates; /** * Time when the location was updated represented as a String. */ protected String mLastUpdateTime; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Locate the UI widgets. mRequestingLocationUpdates = false; mLastUpdateTime = ""; // Update values using data stored in the Bundle. updateValuesFromBundle(savedInstanceState); // Kick off the process of building the GoogleApiClient, LocationRequest, and // LocationSettingsRequest objects. buildGoogleApiClient(); createLocationRequest(); buildLocationSettingsRequest(); } /** * Updates fields based on data stored in the bundle. * * @param savedInstanceState The activity state saved in the Bundle. */ private void updateValuesFromBundle(Bundle savedInstanceState) { if (savedInstanceState != null) { // Update the value of mRequestingLocationUpdates from the Bundle, and make sure that // the Start Updates and Stop Updates buttons are correctly enabled or disabled. if (savedInstanceState.keySet().contains(KEY_REQUESTING_LOCATION_UPDATES)) { mRequestingLocationUpdates = savedInstanceState.getBoolean( KEY_REQUESTING_LOCATION_UPDATES); } // Update the value of mCurrentLocation from the Bundle and update the UI to show the // correct latitude and longitude. if (savedInstanceState.keySet().contains(KEY_LOCATION)) { // Since KEY_LOCATION was found in the Bundle, we can be sure that mCurrentLocation // is not null. mCurrentLocation = savedInstanceState.getParcelable(KEY_LOCATION); } // Update the value of mLastUpdateTime from the Bundle and update the UI. if (savedInstanceState.keySet().contains(KEY_LAST_UPDATED_TIME_STRING)) { mLastUpdateTime = savedInstanceState.getString(KEY_LAST_UPDATED_TIME_STRING); } updateUI(); } } /** * Builds a GoogleApiClient. Uses the {@code #addApi} method to request the * LocationServices API. */ protected synchronized void buildGoogleApiClient() { Log.i(TAG, "Building GoogleApiClient"); mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build(); } /** * Sets up the location request. Android has two location request settings: * {@code ACCESS_COARSE_LOCATION} and {@code ACCESS_FINE_LOCATION}. These settings control * the accuracy of the current location. This sample uses ACCESS_FINE_LOCATION, as defined in * the AndroidManifest.xml. * <p> * When the ACCESS_FINE_LOCATION setting is specified, combined with a fast update * interval (5 seconds), the Fused Location Provider API returns location updates that are * accurate to within a few feet. * <p> * These settings are appropriate for mapping applications that show real-time location * updates. */ protected void createLocationRequest() { mLocationRequest = new LocationRequest(); // Sets the desired interval for active location updates. This interval is // inexact. You may not receive updates at all if no location sources are available, or // you may receive them slower than requested. You may also receive updates faster than // requested if other applications are requesting location at a faster interval. mLocationRequest.setInterval(UPDATE_INTERVAL_IN_MILLISECONDS); // Sets the fastest rate for active location updates. This interval is exact, and your // application will never receive updates faster than this value. mLocationRequest.setFastestInterval(FASTEST_UPDATE_INTERVAL_IN_MILLISECONDS); mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY); } /** * Uses a {@link com.google.android.gms.location.LocationSettingsRequest.Builder} to build * a {@link com.google.android.gms.location.LocationSettingsRequest} that is used for checking * if a device has the needed location settings. */ protected void buildLocationSettingsRequest() { LocationSettingsRequest.Builder builder = new LocationSettingsRequest.Builder(); builder.addLocationRequest(mLocationRequest); mLocationSettingsRequest = builder.build(); } /** * Check if the device's location settings are adequate for the app's needs using the * {@link com.google.android.gms.location.SettingsApi#checkLocationSettings(GoogleApiClient, * LocationSettingsRequest)} method, with the results provided through a {@code PendingResult}. */ protected void checkLocationSettings() { PendingResult<LocationSettingsResult> result = LocationServices.SettingsApi.checkLocationSettings( mGoogleApiClient, mLocationSettingsRequest ); result.setResultCallback(this); } /** * The callback invoked when * {@link com.google.android.gms.location.SettingsApi#checkLocationSettings(GoogleApiClient, * LocationSettingsRequest)} is called. Examines the * {@link com.google.android.gms.location.LocationSettingsResult} object and determines if * location settings are adequate. If they are not, begins the process of presenting a location * settings dialog to the user. */ @Override public void onResult(LocationSettingsResult locationSettingsResult) { final Status status = locationSettingsResult.getStatus(); switch (status.getStatusCode()) { case LocationSettingsStatusCodes.SUCCESS: Log.i(TAG, "All location settings are satisfied."); startLocationUpdates(); break; case LocationSettingsStatusCodes.RESOLUTION_REQUIRED: Log.i(TAG, "Location settings are not satisfied. Show the user a dialog to" + "upgrade location settings "); try { // Show the dialog by calling startResolutionForResult(), and check the result // in onActivityResult(). status.startResolutionForResult(FusedLocationManager.this, REQUEST_CHECK_SETTINGS); } catch (IntentSender.SendIntentException e) { Log.i(TAG, "PendingIntent unable to execute request."); } break; case LocationSettingsStatusCodes.SETTINGS_CHANGE_UNAVAILABLE: Log.i(TAG, "Location settings are inadequate, and cannot be fixed here. Dialog " + "not created."); break; } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { switch (requestCode) { // Check for the integer request code originally supplied to startResolutionForResult(). case REQUEST_CHECK_SETTINGS: switch (resultCode) { case Activity.RESULT_OK: Log.i(TAG, "User agreed to make required location settings changes."); startLocationUpdates(); break; case Activity.RESULT_CANCELED: Log.i(TAG, "User chose not to make required location settings changes."); break; } break; } } /** * Handles the Start Updates button and requests start of location updates. Does nothing if * updates have already been requested. */ public void startUpdatesButtonHandler(View view) { checkLocationSettings(); } /** * Handles the Stop Updates button, and requests removal of location updates. */ public void stopUpdatesButtonHandler(View view) { // It is a good practice to remove location requests when the activity is in a paused or // stopped state. Doing so helps battery performance and is especially // recommended in applications that request frequent location updates. stopLocationUpdates(); } /** * Requests location updates from the FusedLocationApi. */ protected void startLocationUpdates() { LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mLocationRequest, this ).setResultCallback(new ResultCallback<Status>() { @Override public void onResult(Status status) { mRequestingLocationUpdates = true; setButtonsEnabledState(); } }); } /** * Updates all UI fields. */ private void updateUI() { setButtonsEnabledState(); updateLocationUI(); } /** * Disables both buttons when functionality is disabled due to insuffucient location settings. * Otherwise ensures that only one button is enabled at any time. The Start Updates button is * enabled if the user is not requesting location updates. The Stop Updates button is enabled * if the user is requesting location updates. */ private void setButtonsEnabledState() { if (mRequestingLocationUpdates) { } else { } } /** * Sets the value of the UI fields for the location latitude, longitude and last update time. */ private void updateLocationUI() { if (mCurrentLocation != null) { } } /** * Removes location updates from the FusedLocationApi. */ protected void stopLocationUpdates() { // It is a good practice to remove location requests when the activity is in a paused or // stopped state. Doing so helps battery performance and is especially // recommended in applications that request frequent location updates. LocationServices.FusedLocationApi.removeLocationUpdates( mGoogleApiClient, this ).setResultCallback(new ResultCallback<Status>() { @Override public void onResult(Status status) { mRequestingLocationUpdates = false; setButtonsEnabledState(); } }); } @Override protected void onStart() { super.onStart(); mGoogleApiClient.connect(); } @Override public void onResume() { super.onResume(); // Within {@code onPause()}, we pause location updates, but leave the // connection to GoogleApiClient intact. Here, we resume receiving // location updates if the user has requested them. if (mGoogleApiClient.isConnected() && mRequestingLocationUpdates) { startLocationUpdates(); } } @Override protected void onPause() { super.onPause(); // Stop location updates to save battery, but don't disconnect the GoogleApiClient object. if (mGoogleApiClient.isConnected()) { stopLocationUpdates(); } } @Override protected void onStop() { super.onStop(); mGoogleApiClient.disconnect(); } /** * Runs when a GoogleApiClient object successfully connects. */ @Override public void onConnected(Bundle connectionHint) { Log.i(TAG, "Connected to GoogleApiClient"); // If the initial location was never previously requested, we use // FusedLocationApi.getLastLocation() to get it. If it was previously requested, we store // its value in the Bundle and check for it in onCreate(). We // do not request it again unless the user specifically requests location updates by pressing // the Start Updates button. // // Because we cache the value of the initial location in the Bundle, it means that if the // user launches the activity, // moves to a new location, and then changes the device orientation, the original location // is displayed as the activity is re-created. if (mCurrentLocation == null) { mCurrentLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient); mLastUpdateTime = DateFormat.getTimeInstance().format(new Date()); updateLocationUI(); } } /** * Callback that fires when the location changes. */ @Override public void onLocationChanged(Location location) { mCurrentLocation = location; mLastUpdateTime = DateFormat.getTimeInstance().format(new Date()); updateLocationUI(); Toast.makeText(this, getResources().getString(R.string.cast_notification_connected_message), Toast.LENGTH_SHORT).show(); } @Override public void onConnectionSuspended(int cause) { Log.i(TAG, "Connection suspended"); } @Override public void onConnectionFailed(ConnectionResult result) { // Refer to the javadoc for ConnectionResult to see what error codes might be returned in // onConnectionFailed. Log.i(TAG, "Connection failed: ConnectionResult.getErrorCode() = " + result.getErrorCode()); } /** * Stores activity data in the Bundle. */ public void onSaveInstanceState(Bundle savedInstanceState) { savedInstanceState.putBoolean(KEY_REQUESTING_LOCATION_UPDATES, mRequestingLocationUpdates); savedInstanceState.putParcelable(KEY_LOCATION, mCurrentLocation); savedInstanceState.putString(KEY_LAST_UPDATED_TIME_STRING, mLastUpdateTime); super.onSaveInstanceState(savedInstanceState); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.folderisempty; import org.pentaho.di.job.entry.validator.AndValidator; import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils; import java.io.IOException; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSelectInfo; import org.apache.commons.vfs2.FileSelector; import org.apache.commons.vfs2.FileType; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.Result; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This defines a 'create folder' job entry. Its main use would be to create empty folder that can be used to control * the flow in ETL cycles. * * @author Sven/Samatar * @since 18-10-2007 * */ public class JobEntryFolderIsEmpty extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryFolderIsEmpty.class; // for i18n purposes, needed by Translator2!! private String foldername; private int filescount; private int folderscount; private boolean includeSubfolders; private boolean specifywildcard; private String wildcard; private Pattern pattern; public JobEntryFolderIsEmpty( String n ) { super( n, "" ); foldername = null; wildcard = null; includeSubfolders = false; specifywildcard = false; } public JobEntryFolderIsEmpty() { this( "" ); } public Object clone() { JobEntryFolderIsEmpty je = (JobEntryFolderIsEmpty) super.clone(); return je; } public String getXML() { StringBuilder retval = new StringBuilder( 50 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "foldername", foldername ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubfolders ) ); retval.append( " " ).append( XMLHandler.addTagValue( "specify_wildcard", specifywildcard ) ); retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard ) ); return retval.toString(); } public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); foldername = XMLHandler.getTagValue( entrynode, "foldername" ); includeSubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); specifywildcard = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "specify_wildcard" ) ); wildcard = XMLHandler.getTagValue( entrynode, "wildcard" ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( "Unable to load job entry of type 'create folder' from XML node", xe ); } } public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { foldername = rep.getJobEntryAttributeString( id_jobentry, "foldername" ); includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); specifywildcard = rep.getJobEntryAttributeBoolean( id_jobentry, "specify_wildcard" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry of type 'create Folder' from the repository for id_jobentry=" + id_jobentry, dbe ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "foldername", foldername ); rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", includeSubfolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "specify_wildcard", specifywildcard ); rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to save job entry of type 'create Folder' to the repository for id_job=" + id_job, dbe ); } } public void setSpecifyWildcard( boolean specifywildcard ) { this.specifywildcard = specifywildcard; } public boolean isSpecifyWildcard() { return specifywildcard; } public void setFoldername( String foldername ) { this.foldername = foldername; } public String getFoldername() { return foldername; } public String getRealFoldername() { return environmentSubstitute( getFoldername() ); } public String getWildcard() { return wildcard; } public String getRealWildcard() { return environmentSubstitute( getWildcard() ); } public void setWildcard( String wildcard ) { this.wildcard = wildcard; } public boolean isIncludeSubFolders() { return includeSubfolders; } public void setIncludeSubFolders( boolean includeSubfolders ) { this.includeSubfolders = includeSubfolders; } public Result execute( Result previousResult, int nr ) { // see PDI-10270 for details boolean oldBehavior = "Y".equalsIgnoreCase( getVariable( Const.KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES, "N" ) ); Result result = previousResult; result.setResult( false ); result.setNrErrors( oldBehavior ? 1 : 0 ); filescount = 0; folderscount = 0; pattern = null; if ( !Utils.isEmpty( getWildcard() ) ) { pattern = Pattern.compile( getRealWildcard() ); } if ( foldername != null ) { String realFoldername = getRealFoldername(); FileObject folderObject = null; try { folderObject = KettleVFS.getFileObject( realFoldername, this ); if ( folderObject.exists() ) { // Check if it's a folder if ( folderObject.getType() == FileType.FOLDER ) { // File provided is a folder, so we can process ... try { folderObject.findFiles( new TextFileSelector( folderObject.toString() ) ); } catch ( Exception ex ) { if ( !( ex.getCause() instanceof ExpectedException ) ) { throw ex; } } if ( log.isBasic() ) { log.logBasic( "Total files", "We found : " + filescount + " file(s)" ); } if ( filescount == 0 ) { result.setResult( true ); result.setNrLinesInput( folderscount ); } } else { // Not a folder, fail log.logError( "[" + realFoldername + "] is not a folder, failing." ); result.setNrErrors( 1 ); } } else { // No Folder found if ( log.isBasic() ) { logBasic( "we can not find [" + realFoldername + "] !" ); } result.setNrErrors( 1 ); } } catch ( Exception e ) { logError( "Error checking folder [" + realFoldername + "]", e ); result.setResult( false ); result.setNrErrors( 1 ); } finally { if ( folderObject != null ) { try { folderObject.close(); folderObject = null; } catch ( IOException ex ) { /* Ignore */ } } } } else { logError( "No Foldername is defined." ); result.setNrErrors( 1 ); } return result; } private class ExpectedException extends Exception { private static final long serialVersionUID = -692662556327569162L; } private class TextFileSelector implements FileSelector { String root_folder = null; public TextFileSelector( String rootfolder ) { if ( rootfolder != null ) { root_folder = rootfolder; } } public boolean includeFile( FileSelectInfo info ) throws ExpectedException { boolean returncode = false; FileObject file_name = null; boolean rethrow = false; try { if ( !info.getFile().toString().equals( root_folder ) ) { // Pass over the Base folder itself if ( ( info.getFile().getType() == FileType.FILE ) ) { if ( info.getFile().getParent().equals( info.getBaseFolder() ) ) { // We are in the Base folder if ( ( isSpecifyWildcard() && GetFileWildcard( info.getFile().getName().getBaseName() ) ) || !isSpecifyWildcard() ) { if ( log.isDetailed() ) { log.logDetailed( "We found file : " + info.getFile().toString() ); } filescount++; } } else { // We are not in the base Folder...ONLY if Use sub folders // We are in the Base folder if ( isIncludeSubFolders() ) { if ( ( isSpecifyWildcard() && GetFileWildcard( info.getFile().getName().getBaseName() ) ) || !isSpecifyWildcard() ) { if ( log.isDetailed() ) { log.logDetailed( "We found file : " + info.getFile().toString() ); } filescount++; } } } } else { folderscount++; } } if ( filescount > 0 ) { rethrow = true; throw new ExpectedException(); } return true; } catch ( Exception e ) { if ( !rethrow ) { log.logError( BaseMessages.getString( PKG, "JobFolderIsEmpty.Error" ), BaseMessages.getString( PKG, "JobFolderIsEmpty.Error.Exception", info.getFile().toString(), e.getMessage() ) ); returncode = false; } else { throw (ExpectedException) e; } } finally { if ( file_name != null ) { try { file_name.close(); file_name = null; } catch ( IOException ex ) { /* Ignore */ } } } return returncode; } public boolean traverseDescendents( FileSelectInfo info ) { return true; } } /********************************************************** * * @param selectedfile * @param sourceWildcard * @return True if the selectedfile matches the wildcard **********************************************************/ private boolean GetFileWildcard( String selectedfile ) { boolean getIt = true; // First see if the file matches the regular expression! if ( pattern != null ) { Matcher matcher = pattern.matcher( selectedfile ); getIt = matcher.matches(); } return getIt; } public boolean evaluates() { return true; } @Override public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { JobEntryValidatorUtils.andValidator().validate( this, "filename", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); } }
package org.transpeg; public abstract class Functor { public String name; public FuncType funcType; public Functor nextChoice = null; public Functor(String name, FuncType funcType) { this.name = name; this.funcType = funcType; } public String key() { if(this.funcType == null) { return this.name + "*"; } else { return this.name + ":" + this.funcType.getFuncParamSize(); } } @Override public String toString() { return this.key(); } protected abstract void matchSubNode(PegObject node); public final boolean match(PegObject node) { if(node.matched == null) { Functor cur = this; while(cur != null) { cur.matchSubNode(node); if(node.matched != null) { return true; } cur = cur.nextChoice; } } return false; } public abstract void build(PegObject node, MetaEngine driver); public MetaType getReturnType(MetaType defaultType) { if(this.funcType != null) { return this.funcType.getReturnType(); } return defaultType; } } class ErrorFunctor extends Functor { public ErrorFunctor() { super(BunSymbol.PerrorFunctor, null); } @Override protected void matchSubNode(PegObject node) { node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { PegObject msgNode = node.get(0, null); if(msgNode != null) { String errorMessage = node.getTextAt(0, "*error*"); driver.pushErrorMessage(msgNode.source, errorMessage); } else { driver.pushErrorMessage(node.source, "syntax error"); } } } class TypeFunctor extends Functor { public TypeFunctor(String name, MetaType type) { super(name, MetaType._LookupFuncType2(type)); } @Override protected void matchSubNode(PegObject node) { node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { driver.pushTypeOf(node); } } class NameFunctor extends Functor { private final int nameIndex; public NameFunctor(String name, int nameIndex, MetaType type) { super(name, MetaType._LookupFuncType2(type)); this.nameIndex = nameIndex; } @Override protected void matchSubNode(PegObject node) { node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { driver.pushName(this.name, this.nameIndex); } } class IntegerFunctor extends Functor { public IntegerFunctor(String name, SymbolTable gamma, String typeName) { super(name, gamma.getFuncType(typeName)); } @Override protected void matchSubNode(PegObject node) { node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { driver.pushInteger(node.getText()); } } class DefineFunctor extends Functor { public DefineFunctor() { super("define", null); } @Override protected void matchSubNode(PegObject node) { SymbolTable gamma = node.getSymbolTable(); System.out.println(node); SectionFunctor f = this.newFunctor(gamma, node); if(f != null) { gamma.addFunctor(f); } node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { // TODO Auto-generated method stub } private SectionFunctor newFunctor(SymbolTable gamma, PegObject defineNode) { PegObject sig = defineNode.get(0); String name = sig.getTextAt(0, null); UniMap<Integer> nameMap = new UniMap<Integer>(); FuncType funcType = this.newFuncType(gamma, sig.get(1), sig.get(2,null), nameMap); SectionFunctor functor = new SectionFunctor(name, funcType); for(int i = 1; i < defineNode.size(); i++) { Section section = this.newSection(defineNode.get(i), nameMap); functor.add(section); } return functor; } private MetaType newType(SymbolTable gamma, PegObject typeNode) { if(typeNode != null) { System.out.println(typeNode); if(typeNode.is("T")) { String typeName = typeNode.getText(); System.out.println("typename :" + typeName); Functor f = gamma.getSymbol(typeName); if(f != null) { return f.getReturnType(MetaType.UntypedType); } } else { Functor f = gamma.getFunctor(typeNode); if(f != null) { if(f.match(typeNode)) { return f.getReturnType(MetaType.UntypedType); } } } System.out.println("debug: newType " + typeNode); } return MetaType.UntypedType; } private FuncType newFuncType(SymbolTable gamma, PegObject paramNode, PegObject returnTypeNode, UniMap<Integer> nameMap) { UniArray<MetaType> typeList = new UniArray<MetaType>(new MetaType[paramNode.size()+1]); for(int i = 0; i < paramNode.size(); i++) { PegObject p = paramNode.get(i); String name = p.getTextAt(0, null); typeList.add(this.newType(gamma, p.get(1, null))); nameMap.put(name, i); } typeList.add(this.newType(gamma, returnTypeNode)); return MetaType._LookupFuncType2(typeList); } private Section newSection(PegObject sectionNode, UniMap<Integer> nameMap) { Section section = new Section(); int line = 0; for(int i = 0; i < sectionNode.size(); i++) { PegObject subNode = sectionNode.get(i); // System.out.println(subNode); if(subNode.is("section.label")) { System.out.println("TODO: section.label"); } if(subNode.is("section.line")) { if(line > 0) { section.addNewLine(); } section.addLineNode(subNode, nameMap); line = line + 1; } } return section; } } class SectionFunctor extends Functor { Section section; public SectionFunctor(String name, FuncType funcType) { super(name, funcType); this.section = null; } @Override protected void matchSubNode(PegObject node) { SymbolTable gamma = node.getSymbolTable(); for(int i = 0; i < node.size(); i++) { PegObject subNode = node.get(i); Functor f = gamma.getFunctor(subNode); if(f != null) { if(!f.match(subNode)) { return; } } } node.matched = this; } @Override public void build(PegObject node, MetaEngine driver) { Section cur = this.section; while(cur != null) { cur.build(node, driver); cur = cur.nextChoice; } } public void add(Section section) { Section sec = this.section; if(sec == null) { this.section = section; } else { while(sec.nextChoice != null) { sec = sec.nextChoice; } sec.nextChoice = section; } } } class Section { String label; UniArray<String> requirements; ChunkCommand chunks = null; Section nextChoice = null; public Section() { } void add(ChunkCommand chunk) { if(this.chunks == null) { this.chunks = chunk; } else { ChunkCommand cur = this.chunks; while(cur.next != null) { cur = cur.next; } cur.next = chunk; } } public void build(PegObject node, MetaEngine driver) { ChunkCommand cur = this.chunks; //System.out.println("debug command: " + cur); while(cur != null) { cur.push(node, driver); cur = cur.next; //System.out.println("debug command: " + cur); } } boolean addLineNode(PegObject lineNode, UniMap<Integer> nameMap) { for(int j = 0; j < lineNode.size(); j++) { PegObject chunkNode = lineNode.get(j); //System.out.println("debug: chunk: " + chunkNode); if(chunkNode.is("section.chunk")) { String s = chunkNode.getText(); if(s.equals("$$")) { s = "$"; } this.addChunk(s); } else if(chunkNode.is("section.command")) { if(chunkNode.size() == 1) { String name = chunkNode.getTextAt(0, null); Integer index = nameMap.get(name, null); if(index == null) { System.out.println("undefined name: " + name); return false; } this.addNode(index); } else { String cmd = chunkNode.getTextAt(0, null); String name = chunkNode.getTextAt(1, null); Integer index = nameMap.get(name, null); if(index == null) { System.out.println("undefined name: " + name); return false; } this.addCommand(cmd, index); } } } return true; } void addNewLine() { this.add(new NewLineChunk()); } void addChunk(String text) { this.add(new Chunk(text)); } void addNode(int index) { this.add(new NodeCommand(null, index)); } void addCommand(String cmd, int index) { if(cmd.equals("typeof")) { this.add(new TypeOfNodeCommand(cmd, index)); return; } this.add(new Command(cmd, index)); } } abstract class ChunkCommand { ChunkCommand next = null; public abstract void push(PegObject node, MetaEngine d); } class Chunk extends ChunkCommand { String text; Chunk(String text) { this.text = text; } @Override public void push(PegObject node, MetaEngine d) { d.pushCode(this.text); } } class NewLineChunk extends ChunkCommand { @Override public void push(PegObject node, MetaEngine d) { d.pushNewLine(); } } class Command extends ChunkCommand { String name; int index; Command(String name, int index) { this.name = name; this.index = index; } @Override public void push(PegObject node, MetaEngine d) { d.pushCommand(this.name, node.get(this.index)); } } class NodeCommand extends Command { NodeCommand(String name, int index) { super(name, index); } @Override public void push(PegObject node, MetaEngine d) { d.pushNode(node.get(this.index)); } } class TypeOfNodeCommand extends Command { TypeOfNodeCommand(String name, int index) { super(name, index); } @Override public void push(PegObject node, MetaEngine d) { d.pushTypeOf(node.get(this.index)); } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.group.ui.main; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.olat.basesecurity.BaseSecurity; import org.olat.basesecurity.BaseSecurityManager; import org.olat.basesecurity.SecurityGroup; import org.olat.commons.calendar.CalendarManager; import org.olat.commons.calendar.CalendarManagerFactory; import org.olat.commons.calendar.ui.components.KalendarRenderWrapper; import org.olat.core.commons.fullWebApp.LayoutMain3ColsController; import org.olat.core.commons.persistence.PersistenceHelper; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.Component; import org.olat.core.gui.components.table.BooleanColumnDescriptor; import org.olat.core.gui.components.table.DefaultColumnDescriptor; import org.olat.core.gui.components.table.Table; import org.olat.core.gui.components.table.TableController; import org.olat.core.gui.components.table.TableEvent; import org.olat.core.gui.components.table.TableGuiConfiguration; import org.olat.core.gui.components.tree.GenericTreeModel; import org.olat.core.gui.components.tree.GenericTreeNode; import org.olat.core.gui.components.tree.MenuTree; import org.olat.core.gui.components.tree.TreeEvent; import org.olat.core.gui.components.tree.TreeModel; import org.olat.core.gui.components.tree.TreeNode; import org.olat.core.gui.components.velocity.VelocityContainer; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.gui.control.controller.MainLayoutBasicController; import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController; import org.olat.core.gui.control.generic.dtabs.Activateable; import org.olat.core.gui.control.generic.modal.DialogBoxController; import org.olat.core.gui.control.generic.modal.DialogBoxUIFactory; import org.olat.core.gui.control.generic.tool.ToolController; import org.olat.core.gui.control.generic.tool.ToolFactory; import org.olat.core.gui.control.state.ControllerState; import org.olat.core.id.Identity; import org.olat.core.id.change.ChangeManager; import org.olat.core.logging.Tracing; import org.olat.core.logging.activity.ThreadLocalUserActivityLogger; import org.olat.core.util.Util; import org.olat.core.util.mail.ContactList; import org.olat.core.util.notifications.NotificationsManager; import org.olat.core.util.notifications.Publisher; import org.olat.core.util.notifications.SubscriptionContext; import org.olat.core.util.tree.TreeHelper; import org.olat.group.BusinessGroup; import org.olat.group.BusinessGroupManager; import org.olat.group.BusinessGroupManagerImpl; import org.olat.group.GroupLoggingAction; import org.olat.group.delete.TabbedPaneController; import org.olat.group.ui.BGConfigFlags; import org.olat.group.ui.BGControllerFactory; import org.olat.group.ui.BGTranslatorFactory; import org.olat.group.ui.BusinessGroupFormController; import org.olat.group.ui.run.BusinessGroupMainRunController; import org.olat.util.logging.activity.LoggingResourceable; import de.bps.olat.util.notifications.SubscriptionProvider; import de.bps.olat.util.notifications.SubscriptionProviderImpl; /** * Description: <br> * Controller to list all groups where the user is owner or participant. This controller does also feature create and delete methods for groups of type buddyGroup <br> * * <pre> * Possible activation messages: * &quot;cmd.menu.index&quot; : show groups overview * &quot;cmd.menu.buddy&quot; : show all buddy groups * &quot;cmd.menu.learn&quot; : show all leanringgroups * &quot;cmd.menu.right&quot; : show all right groups * &quot;addBuddyGroup&quot; : start add group workflow * </pre> * <P> * Initial Date: Aug 5, 2004 * * @author patrick */ public class BGMainController extends MainLayoutBasicController implements Activateable { private static final String PACKAGE = Util.getPackageName(BGMainController.class); /* * things a controller needs during its lifetime */ private final VelocityContainer main; private final LayoutMain3ColsController columnLayoutCtr; private final ToolController mainToolC; private final MenuTree menuTree; private static final String ACTION_ADD_BUDDYGROUP = "addBuddyGroup"; private static final String ACTION_DELETE_UNUSEDGROUP = "deleteunusedgroup"; private TableController groupListCtr; private BusinessGroupTableModelWithType groupListModel; private BusinessGroupFormController createBuddyGroupController; private BusinessGroup currBusinessGroup; private final Identity identity; private final BusinessGroupManager bgm; private TabbedPaneController deleteTabPaneCtr; private CloseableModalController cmc; private DialogBoxController deleteDialogBox; private DialogBoxController leaveDialogBox; // group list table rows private static final String TABLE_ACTION_LEAVE = "bgTblLeave"; private static final String TABLE_ACTION_DELETE = "bgTblDelete"; private static final String TABLE_ACTION_LAUNCH = "bgTblLaunch"; private static final String CMD_MENU_INDEX = "cmd.menu.index"; private static final String CMD_MENU_BUDDY = "cmd.menu.buddy"; private static final String CMD_MENU_LEARN = "cmd.menu.learn"; private static final String CMD_MENU_RIGHT = "cmd.menu.right"; /** * @param ureq * @param wControl * @param flags configuration flags * @param initialViewIdentifier */ public BGMainController(final UserRequest ureq, final WindowControl wControl, final String initialViewIdentifier) { super(ureq, wControl); identity = ureq.getIdentity(); setTranslator(BGTranslatorFactory.createBGPackageTranslator(PACKAGE, BusinessGroup.TYPE_BUDDYGROUP, ureq.getLocale())); bgm = BusinessGroupManagerImpl.getInstance(); // main component layed out in panel main = createVelocityContainer("index"); // toolboxes mainToolC = ToolFactory.createToolController(getWindowControl()); listenTo(mainToolC); mainToolC.addHeader(translate("tools.add.header")); mainToolC.addLink(ACTION_ADD_BUDDYGROUP, translate("tools.add.buddygroup")); if (ureq.getUserSession().getRoles().isOLATAdmin()) { mainToolC.addHeader(translate("tools.delete.header")); mainToolC.addLink(ACTION_DELETE_UNUSEDGROUP, translate("tools.delete.unusedgroup")); } // menu menuTree = new MenuTree("buddyGroupTree"); menuTree.setTreeModel(buildTreeModel()); menuTree.setSelectedNodeId(menuTree.getTreeModel().getRootNode().getIdent()); menuTree.addListener(this); // layout columnLayoutCtr = new LayoutMain3ColsController(ureq, getWindowControl(), menuTree, mainToolC.getInitialComponent(), main, "groumain"); columnLayoutCtr.addCssClassToMain("o_groups"); listenTo(columnLayoutCtr); putInitialPanel(columnLayoutCtr.getInitialComponent()); // start with list of all groups doAllGroupList(ureq, getWindowControl()); } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event) */ @Override public void event(final UserRequest ureq, final Component source, final Event event) { if (source == menuTree) { if (event.getCommand().equals(MenuTree.COMMAND_TREENODE_CLICKED)) { final TreeEvent te = (TreeEvent) event; final TreeNode clickedNode = menuTree.getTreeModel().getNodeById(te.getNodeId()); final Object userObject = clickedNode.getUserObject(); activateContent(ureq, userObject); } } } /** * Activate the content in the content area based on a user object representing the identifyer of the content * * @param ureq * @param userObject */ private void activateContent(final UserRequest ureq, final Object userObject) { if (userObject.equals(CMD_MENU_INDEX)) { doAllGroupList(ureq, getWindowControl()); } else if (userObject.equals(CMD_MENU_BUDDY)) { doBuddyGroupList(ureq, getWindowControl()); } else if (userObject.equals(CMD_MENU_LEARN)) { doLearningGroupList(ureq, getWindowControl()); } else if (userObject.equals(CMD_MENU_RIGHT)) { doRightGroupList(ureq, getWindowControl()); } setState(userObject.toString()); } @Override protected void adjustState(final ControllerState cstate, final UserRequest ureq) { final String cmd = cstate.getSerializedState(); activateContent(ureq, cmd); // adjust the menu final TreeNode rootNode = this.menuTree.getTreeModel().getRootNode(); final TreeNode activatedNode = TreeHelper.findNodeByUserObject(cmd, rootNode); this.menuTree.setSelectedNode(activatedNode); } /** * @param ureq * @param event */ private void handleEventsGroupTables(final UserRequest ureq, final Event event) { if (event.getCommand().equals(Table.COMMANDLINK_ROWACTION_CLICKED)) { final TableEvent te = (TableEvent) event; final String actionid = te.getActionId(); final int rowid = te.getRowId(); currBusinessGroup = groupListModel.getBusinessGroupAt(rowid); final String trnslP = currBusinessGroup.getName(); if (actionid.equals(TABLE_ACTION_LAUNCH)) { BGControllerFactory.getInstance().createRunControllerAsTopNavTab(currBusinessGroup, ureq, getWindowControl(), false, null); } else if (actionid.equals(TABLE_ACTION_DELETE) && currBusinessGroup.getType().equals(BusinessGroup.TYPE_BUDDYGROUP)) { // only for buddygroups allowed deleteDialogBox = activateYesNoDialog(ureq, null, translate("dialog.modal.bg.delete.text", trnslP), deleteDialogBox); } else if (actionid.equals(TABLE_ACTION_LEAVE) && currBusinessGroup.getType().equals(BusinessGroup.TYPE_BUDDYGROUP)) { // only for buddygroups allowed leaveDialogBox = activateYesNoDialog(ureq, null, translate("dialog.modal.bg.leave.text", trnslP), leaveDialogBox); } } } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event) */ @Override public void event(final UserRequest ureq, final Controller source, final Event event) { if (source == groupListCtr) { // an action from the groupList was clicked // e.g. LEAVE, DELETE, LAUNCH handleEventsGroupTables(ureq, event); } else if (source == mainToolC) { if (event.getCommand().startsWith(ACTION_ADD_BUDDYGROUP)) { initAddBuddygroupWorkflow(ureq); } else if (event.getCommand().startsWith(ACTION_DELETE_UNUSEDGROUP)) { initDeleteGroupWorkflow(ureq); } } else if (source == deleteDialogBox) { if (DialogBoxUIFactory.isOkEvent(event)) { doBuddyGroupDelete(ureq); }// else cancel was clicked or box closed } else if (source == leaveDialogBox) { if (event != Event.CANCELLED_EVENT) { if (DialogBoxUIFactory.isYesEvent(event)) { doBuddyGroupLeave(ureq); } }// else dialog was simply closed } else if (source == this.createBuddyGroupController) { this.cmc.deactivate(); // remove modal dialog removeAsListenerAndDispose(this.cmc); if (event == Event.DONE_EVENT) { // create new buddy group with the specified values // values are taken from the createBuddyGroupForm this.currBusinessGroup = createBuddyGroup(ureq); updateGroupListModelAll(); ChangeManager.changed(ChangeManager.ACTION_CREATE, this.currBusinessGroup); // after successfully creating a buddygroup 'launch' it final BusinessGroupMainRunController groupRunCtr = BGControllerFactory.getInstance().createRunControllerAsTopNavTab(this.currBusinessGroup, ureq, getWindowControl(), false, null); if (groupRunCtr != null) { groupRunCtr.activateAdministrationMode(ureq); } } else if (event == Event.FAILED_EVENT) { this.cmc = new CloseableModalController(getWindowControl(), translate("close"), this.createBuddyGroupController.getInitialComponent(), true, translate("create.form.title")); this.cmc.activate(); listenTo(this.cmc); } else if (event == Event.CANCELLED_EVENT) { // notthing to do } } } private void initDeleteGroupWorkflow(final UserRequest ureq) { removeAsListenerAndDispose(deleteTabPaneCtr); deleteTabPaneCtr = new TabbedPaneController(ureq, getWindowControl()); listenTo(deleteTabPaneCtr); main.setPage(Util.getPackageVelocityRoot(this.getClass()) + "/delete.html"); main.put("deleteTabs", deleteTabPaneCtr.getInitialComponent()); } /** * */ private void initAddBuddygroupWorkflow(final UserRequest ureq) { final BGConfigFlags flags = BGConfigFlags.createBuddyGroupDefaultFlags(); if (this.createBuddyGroupController != null) { removeAsListenerAndDispose(this.createBuddyGroupController); } this.createBuddyGroupController = new BusinessGroupFormController(ureq, getWindowControl(), null, flags.isEnabled(BGConfigFlags.GROUP_MINMAX_SIZE)); listenTo(this.createBuddyGroupController); this.cmc = new CloseableModalController(getWindowControl(), translate("close"), this.createBuddyGroupController.getInitialComponent(), true, translate("create.form.title")); this.cmc.activate(); listenTo(this.cmc); } /** * deletes this.currBusinessGroup. Checks if user is in owner group, otherwhise does nothing * * @param ureq */ private void doBuddyGroupDelete(final UserRequest ureq) { // 1) send notification mails to users final BaseSecurity securityManager = BaseSecurityManager.getInstance(); final ContactList owners = new ContactList(translate("userlist.owners.title")); final List ow = securityManager.getIdentitiesOfSecurityGroup(currBusinessGroup.getOwnerGroup()); owners.addAllIdentites(ow); final ContactList participants = new ContactList(translate("userlist.participants.title")); participants.addAllIdentites(securityManager.getIdentitiesOfSecurityGroup(currBusinessGroup.getPartipiciantGroup())); // check if user is in owner group (could fake link in table) if (!PersistenceHelper.listContainsObjectByKey(ow, ureq.getIdentity())) { Tracing.logWarn("User tried to delete a group but he was not owner of the group", null, BGMainController.class); return; } final List everybody = new ArrayList(); everybody.add(owners); everybody.add(participants); // inform Indexer about change ChangeManager.changed(ChangeManager.ACTION_DELETE, currBusinessGroup); // 3) delete the group currBusinessGroup = bgm.loadBusinessGroup(currBusinessGroup); // change state of publisher so that notifications of deleted group calendars make no problems final CalendarManager calMan = CalendarManagerFactory.getInstance().getCalendarManager(); final NotificationsManager nfm = NotificationsManager.getInstance(); final KalendarRenderWrapper calRenderWrapper = calMan.getGroupCalendar(currBusinessGroup); final SubscriptionProvider subProvider = new SubscriptionProviderImpl(calRenderWrapper); final SubscriptionContext subsContext = subProvider.getSubscriptionContext(); final Publisher pub = nfm.getPublisher(subsContext); if (pub != null) { pub.setState(1); // int 0 is OK -> all other is not OK } bgm.deleteBusinessGroupWithMail(currBusinessGroup, getWindowControl(), ureq, getTranslator(), everybody); // do Logging ThreadLocalUserActivityLogger.log(GroupLoggingAction.GROUP_DELETED, getClass(), LoggingResourceable.wrap(currBusinessGroup)); // 4) update Tables doAllGroupList(ureq, getWindowControl()); showInfo("info.group.deleted"); } /** * removes user from this.currBusinessGroup's owner and participant group. If no other owner are found the user won't be removed from the owner group * * @param ureq */ private void doBuddyGroupLeave(final UserRequest ureq) { final BaseSecurity securityManager = BaseSecurityManager.getInstance(); final BGConfigFlags flags = BGConfigFlags.createBuddyGroupDefaultFlags(); // 1) remove as owner final SecurityGroup owners = currBusinessGroup.getOwnerGroup(); if (securityManager.isIdentityInSecurityGroup(identity, owners)) { final List ownerList = securityManager.getIdentitiesOfSecurityGroup(owners); if (ownerList.size() > 1) { bgm.removeOwnerAndFireEvent(ureq.getIdentity(), ureq.getIdentity(), currBusinessGroup, flags, false); // update model updateGroupListModelAll(); } else { // he is the last owner, but there must be at least one oner // give him a warning, as long as he tries to leave, he gets // this warning. getWindowControl().setError(translate("msg.atleastone")); return; } } // if identity was also owner it must have succesfully removed to end here. // now remove the identity also as pariticipant. // 2) remove as participant final List<Identity> identities = new ArrayList<Identity>(1); identities.add(ureq.getIdentity()); bgm.removeParticipantsAndFireEvent(ureq.getIdentity(), identities, currBusinessGroup, flags); // update Tables doAllGroupList(ureq, getWindowControl()); } /** * Creates a new business group of type buddy group and adds this.identity as owner to the new group. * * @return BusinessGroup */ private BusinessGroup createBuddyGroup(final UserRequest ureq) { final String bgName = this.createBuddyGroupController.getGroupName(); final String bgDesc = this.createBuddyGroupController.getGroupDescription(); final Integer bgMin = this.createBuddyGroupController.getGroupMin(); final Integer bgMax = this.createBuddyGroupController.getGroupMax(); /* * this creates a BusinessGroup as BuddyGroup with the specified name and description and also the CollaborationTools are enabled during creation. The * GroupContext is null in the case of BuddyGroups. */ final BusinessGroup newGroup = bgm.createAndPersistBusinessGroup(BusinessGroup.TYPE_BUDDYGROUP, identity, bgName, bgDesc, bgMin, bgMax, null, null, null); // create buddylist for group // 2. Add user to group, fire events, do loggin etc. final BGConfigFlags flags = BGConfigFlags.createBuddyGroupDefaultFlags(); // do Logging ThreadLocalUserActivityLogger.addLoggingResourceInfo(LoggingResourceable.wrap(newGroup)); ThreadLocalUserActivityLogger.log(GroupLoggingAction.GROUP_CREATED, getClass()); bgm.addOwnerAndFireEvent(ureq.getIdentity(), ureq.getIdentity(), newGroup, flags, true); return newGroup; } /** * Prepare everything and show all groups * * @param ureq * @param wControl */ private void doAllGroupList(final UserRequest ureq, final WindowControl wControl) { // 1) initialize list controller and datamodel initGroupListCtrAndModel(true, ureq); // 2) load data into model updateGroupListModelAll(); // 3) set correct page main.setPage(Util.getPackageVelocityRoot(this.getClass()) + "/index.html"); // 4) update toolboxe columnLayoutCtr.hideCol2(false); } /** * Prepare everything and show all buddy groups * * @param ureq * @param wControl */ private void doBuddyGroupList(final UserRequest ureq, final WindowControl wControl) { // 1) initialize list controller and datamodel initGroupListCtrAndModel(true, ureq); // 2) load data into model updateGroupListModelBuddygroups(); // 3) set correct page main.setPage(Util.getPackageVelocityRoot(this.getClass()) + "/buddy.html"); // 4) update toolboxe columnLayoutCtr.hideCol2(false); } /** * Prepare everything and show all learning groups * * @param ureq * @param wControl */ private void doLearningGroupList(final UserRequest ureq, final WindowControl wControl) { // 1) initialize list controller and datamodel initGroupListCtrAndModel(false, ureq); // 2) load data into model updateGroupListModelLearninggroups(); // 3) set correct page main.setPage(Util.getPackageVelocityRoot(this.getClass()) + "/learning.html"); // 4) update toolboxe columnLayoutCtr.hideCol2(true); } /** * Prepare everything and show all right groups * * @param ureq * @param wControl */ private void doRightGroupList(final UserRequest ureq, final WindowControl wControl) { // 1) initialize list controller and datamodel initGroupListCtrAndModel(false, ureq); // 2) load data into model updateGroupListModelRightgroups(); // 3) set correct page main.setPage(Util.getPackageVelocityRoot(this.getClass()) + "/right.html"); // 4) update toolboxe columnLayoutCtr.hideCol2(true); } /** * Prepare everything and show delete groups workflow * * @param ureq * @param wControl */ private void doDeleteGroups(final UserRequest ureq, final WindowControl wControl) {} /** * Initialize the group list controller and the group list model given. * * @param withLeaveAndDelete config flag: true: leave and delete button are showed, false: not showed * @param ureq */ private void initGroupListCtrAndModel(final boolean withLeaveAndDelete, final UserRequest ureq) { // 1) init listing controller final TableGuiConfiguration tableConfig = new TableGuiConfiguration(); tableConfig.setTableEmptyMessage(translate("index.table.nogroup")); removeAsListenerAndDispose(groupListCtr); groupListCtr = new TableController(tableConfig, ureq, getWindowControl(), getTranslator()); listenTo(groupListCtr); groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor("table.header.bgname", 0, TABLE_ACTION_LAUNCH, getLocale())); groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor("table.header.description", 1, null, getLocale())); groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor("table.header.type", 2, null, getLocale())); if (withLeaveAndDelete) { groupListCtr.addColumnDescriptor(new BooleanColumnDescriptor("table.header.leave", 3, TABLE_ACTION_LEAVE, translate("table.header.leave"), null)); groupListCtr.addColumnDescriptor(new BooleanColumnDescriptor("table.header.delete", 4, TABLE_ACTION_DELETE, translate("table.header.delete"), null)); } // 2) init list model groupListModel = new BusinessGroupTableModelWithType(new ArrayList(), getTranslator()); groupListCtr.setTableDataModel(groupListModel); main.put("groupList", groupListCtr.getInitialComponent()); } /** * Get most recent data from the database and init the group list model with data for all groups */ private void updateGroupListModelAll() { final List wrapped = new ArrayList(); // buddy groups List groups = bgm.findBusinessGroupsOwnedBy(BusinessGroup.TYPE_BUDDYGROUP, identity, null); Iterator iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, Boolean.TRUE, Boolean.TRUE)); } groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_BUDDYGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, Boolean.TRUE, null)); } // learning groups groups = bgm.findBusinessGroupsOwnedBy(BusinessGroup.TYPE_LEARNINGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_LEARNINGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } // right groups groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_RIGHTGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } groupListModel.setEntries(wrapped); groupListCtr.modelChanged(); } /** * Get most recent data from the database and init the group list model with data for buddy groups */ private void updateGroupListModelBuddygroups() { final List wrapped = new ArrayList(); // buddy groups List groups = bgm.findBusinessGroupsOwnedBy(BusinessGroup.TYPE_BUDDYGROUP, identity, null); Iterator iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, Boolean.TRUE, Boolean.TRUE)); } groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_BUDDYGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, Boolean.TRUE, null)); } groupListModel.setEntries(wrapped); groupListCtr.modelChanged(); } /** * Get most recent data from the database and init the group list model with data for learning groups */ private void updateGroupListModelLearninggroups() { final List wrapped = new ArrayList(); // learning groups List groups = bgm.findBusinessGroupsOwnedBy(BusinessGroup.TYPE_LEARNINGROUP, identity, null); Iterator iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_LEARNINGROUP, identity, null); iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } groupListModel.setEntries(wrapped); groupListCtr.modelChanged(); } /** * Get most recent data from the database and init the group list model with data for right groups */ private void updateGroupListModelRightgroups() { final List wrapped = new ArrayList(); // buddy groups // right groups final List groups = bgm.findBusinessGroupsAttendedBy(BusinessGroup.TYPE_RIGHTGROUP, identity, null); final Iterator iter = groups.iterator(); while (iter.hasNext()) { final BusinessGroup group = (BusinessGroup) iter.next(); wrapped.add(wrapGroup(group, null, null)); } groupListModel.setEntries(wrapped); groupListCtr.modelChanged(); } /** * Wrapps a group and some data into an object[] that can be displayed by the group list model * * @param group * @param allowLeave true: user can leave * @param allowDelete true: user can delete * @return Object[] */ private Object[] wrapGroup(final BusinessGroup group, final Boolean allowLeave, final Boolean allowDelete) { return new Object[] { group, allowLeave, allowDelete }; } /** * @return TreeModel */ private TreeModel buildTreeModel() { final GenericTreeModel gtm = new GenericTreeModel(); final GenericTreeNode rootNode = new GenericTreeNode(); rootNode.setTitle(translate("menu.index")); rootNode.setUserObject(CMD_MENU_INDEX); rootNode.setAltText(translate("menu.index.alt")); gtm.setRootNode(rootNode); GenericTreeNode myEntriesTn = new GenericTreeNode(); myEntriesTn.setTitle(translate("menu.buddygroups")); myEntriesTn.setUserObject(CMD_MENU_BUDDY); myEntriesTn.setAltText(translate("menu.buddygroups.alt")); rootNode.addChild(myEntriesTn); myEntriesTn = new GenericTreeNode(); myEntriesTn.setTitle(translate("menu.learninggroups")); myEntriesTn.setUserObject(CMD_MENU_LEARN); myEntriesTn.setAltText(translate("menu.learninggroups.alt")); rootNode.addChild(myEntriesTn); myEntriesTn = new GenericTreeNode(); myEntriesTn.setTitle(translate("menu.rightgroups")); myEntriesTn.setUserObject(CMD_MENU_RIGHT); myEntriesTn.setAltText(translate("menu.rightgroups.alt")); rootNode.addChild(myEntriesTn); return gtm; } /** * @see org.olat.core.gui.control.generic.dtabs.Activateable#activate(org.olat.core.gui.UserRequest, java.lang.String) */ @Override public void activate(final UserRequest ureq, final String viewIdentifier) { // find the menu node that has the user object that represents the // viewIdentifyer final GenericTreeNode rootNode = (GenericTreeNode) this.menuTree.getTreeModel().getRootNode(); final TreeNode activatedNode = TreeHelper.findNodeByUserObject(viewIdentifier, rootNode); if (activatedNode != null) { this.menuTree.setSelectedNodeId(activatedNode.getIdent()); activateContent(ureq, activatedNode.getUserObject()); } else { // not found, activate the root node this.menuTree.setSelectedNodeId(rootNode.getIdent()); activateContent(ureq, rootNode.getUserObject()); // cehck for toolbox activation points if (viewIdentifier.equals(ACTION_ADD_BUDDYGROUP)) { initAddBuddygroupWorkflow(ureq); } } } /** * @see org.olat.core.gui.control.DefaultController#doDispose(boolean) */ @Override protected void doDispose() { // nothing to dispose } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.worklink.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/worklink-2018-09-25/AssociateWebsiteCertificateAuthority" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AssociateWebsiteCertificateAuthorityRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ARN of the fleet. * </p> */ private String fleetArn; /** * <p> * The root certificate of the CA. * </p> */ private String certificate; /** * <p> * The certificate name to display. * </p> */ private String displayName; /** * <p> * The ARN of the fleet. * </p> * * @param fleetArn * The ARN of the fleet. */ public void setFleetArn(String fleetArn) { this.fleetArn = fleetArn; } /** * <p> * The ARN of the fleet. * </p> * * @return The ARN of the fleet. */ public String getFleetArn() { return this.fleetArn; } /** * <p> * The ARN of the fleet. * </p> * * @param fleetArn * The ARN of the fleet. * @return Returns a reference to this object so that method calls can be chained together. */ public AssociateWebsiteCertificateAuthorityRequest withFleetArn(String fleetArn) { setFleetArn(fleetArn); return this; } /** * <p> * The root certificate of the CA. * </p> * * @param certificate * The root certificate of the CA. */ public void setCertificate(String certificate) { this.certificate = certificate; } /** * <p> * The root certificate of the CA. * </p> * * @return The root certificate of the CA. */ public String getCertificate() { return this.certificate; } /** * <p> * The root certificate of the CA. * </p> * * @param certificate * The root certificate of the CA. * @return Returns a reference to this object so that method calls can be chained together. */ public AssociateWebsiteCertificateAuthorityRequest withCertificate(String certificate) { setCertificate(certificate); return this; } /** * <p> * The certificate name to display. * </p> * * @param displayName * The certificate name to display. */ public void setDisplayName(String displayName) { this.displayName = displayName; } /** * <p> * The certificate name to display. * </p> * * @return The certificate name to display. */ public String getDisplayName() { return this.displayName; } /** * <p> * The certificate name to display. * </p> * * @param displayName * The certificate name to display. * @return Returns a reference to this object so that method calls can be chained together. */ public AssociateWebsiteCertificateAuthorityRequest withDisplayName(String displayName) { setDisplayName(displayName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFleetArn() != null) sb.append("FleetArn: ").append(getFleetArn()).append(","); if (getCertificate() != null) sb.append("Certificate: ").append(getCertificate()).append(","); if (getDisplayName() != null) sb.append("DisplayName: ").append(getDisplayName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AssociateWebsiteCertificateAuthorityRequest == false) return false; AssociateWebsiteCertificateAuthorityRequest other = (AssociateWebsiteCertificateAuthorityRequest) obj; if (other.getFleetArn() == null ^ this.getFleetArn() == null) return false; if (other.getFleetArn() != null && other.getFleetArn().equals(this.getFleetArn()) == false) return false; if (other.getCertificate() == null ^ this.getCertificate() == null) return false; if (other.getCertificate() != null && other.getCertificate().equals(this.getCertificate()) == false) return false; if (other.getDisplayName() == null ^ this.getDisplayName() == null) return false; if (other.getDisplayName() != null && other.getDisplayName().equals(this.getDisplayName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFleetArn() == null) ? 0 : getFleetArn().hashCode()); hashCode = prime * hashCode + ((getCertificate() == null) ? 0 : getCertificate().hashCode()); hashCode = prime * hashCode + ((getDisplayName() == null) ? 0 : getDisplayName().hashCode()); return hashCode; } @Override public AssociateWebsiteCertificateAuthorityRequest clone() { return (AssociateWebsiteCertificateAuthorityRequest) super.clone(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.HashMap; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.Progressable; /** * This is an implementation of the Hadoop Archive * Filesystem. This archive Filesystem has index files * of the form _index* and has contents of the form * part-*. The index files store the indexes of the * real files. The index files are of the form _masterindex * and _index. The master index is a level of indirection * in to the index file to make the look ups faster. the index * file is sorted with hash code of the paths that it contains * and the master index contains pointers to the positions in * index for ranges of hashcodes. */ public class HarFileSystem extends FilterFileSystem { public static final int VERSION = 3; private static final Map<URI, HarMetaData> harMetaCache = new ConcurrentHashMap<URI, HarMetaData>(); // uri representation of this Har filesystem private URI uri; // the top level path of the archive // in the underlying file system private Path archivePath; // the har auth private String harAuth; // pointer into the static metadata cache private HarMetaData metadata; /** * public construction of harfilesystem * */ public HarFileSystem() { } /** * Constructor to create a HarFileSystem with an * underlying filesystem. * @param fs */ public HarFileSystem(FileSystem fs) { super(fs); } /** * Initialize a Har filesystem per har archive. The * archive home directory is the top level directory * in the filesystem that contains the HAR archive. * Be careful with this method, you do not want to go * on creating new Filesystem instances per call to * path.getFileSystem(). * the uri of Har is * har://underlyingfsscheme-host:port/archivepath. * or * har:///archivepath. This assumes the underlying filesystem * to be used in case not specified. */ public void initialize(URI name, Configuration conf) throws IOException { // decode the name URI underLyingURI = decodeHarURI(name, conf); // we got the right har Path- now check if this is // truly a har filesystem Path harPath = archivePath( new Path(name.getScheme(), name.getAuthority(), name.getPath())); if (harPath == null) { throw new IOException("Invalid path for the Har Filesystem. " + name.toString()); } if (fs == null) { fs = FileSystem.get(underLyingURI, conf); } uri = harPath.toUri(); archivePath = new Path(uri.getPath()); harAuth = getHarAuth(underLyingURI); //check for the underlying fs containing // the index file Path masterIndexPath = new Path(archivePath, "_masterindex"); Path archiveIndexPath = new Path(archivePath, "_index"); if (!fs.exists(masterIndexPath) || !fs.exists(archiveIndexPath)) { throw new IOException("Invalid path for the Har Filesystem. " + "No index file in " + harPath); } metadata = harMetaCache.get(uri); if (metadata != null) { FileStatus mStat = fs.getFileStatus(masterIndexPath); FileStatus aStat = fs.getFileStatus(archiveIndexPath); if (mStat.getModificationTime() != metadata.getMasterIndexTimestamp() || aStat.getModificationTime() != metadata.getArchiveIndexTimestamp()) { // the archive has been overwritten since we last read it // remove the entry from the meta data cache metadata = null; harMetaCache.remove(uri); } } if (metadata == null) { metadata = new HarMetaData(fs, masterIndexPath, archiveIndexPath); metadata.parseMetaData(); harMetaCache.put(uri, metadata); } } // get the version of the filesystem from the masterindex file // the version is currently not useful since its the first version // of archives public int getHarVersion() throws IOException { if (metadata != null) { return metadata.getVersion(); } else { throw new IOException("Invalid meta data for the Har Filesystem"); } } /* * find the parent path that is the * archive path in the path. The last * path segment that ends with .har is * the path that will be returned. */ private Path archivePath(Path p) { Path retPath = null; Path tmp = p; for (int i=0; i< p.depth(); i++) { if (tmp.toString().endsWith(".har")) { retPath = tmp; break; } tmp = tmp.getParent(); } return retPath; } /** * decode the raw URI to get the underlying URI * @param rawURI raw Har URI * @return filtered URI of the underlying fileSystem */ private URI decodeHarURI(URI rawURI, Configuration conf) throws IOException { String tmpAuth = rawURI.getAuthority(); //we are using the default file //system in the config //so create a underlying uri and //return it if (tmpAuth == null) { //create a path return FileSystem.getDefaultUri(conf); } String host = rawURI.getHost(); if (host == null) { throw new IOException("URI: " + rawURI + " is an invalid Har URI since host==null." + " Expecting har://<scheme>-<host>/<path>."); } int i = host.indexOf('-'); if (i < 0) { throw new IOException("URI: " + rawURI + " is an invalid Har URI since '-' not found." + " Expecting har://<scheme>-<host>/<path>."); } final String underLyingScheme = host.substring(0, i); i++; final String underLyingHost = i == host.length()? null: host.substring(i); int underLyingPort = rawURI.getPort(); String auth = (underLyingHost == null && underLyingPort == -1)? null:(underLyingHost+":"+underLyingPort); URI tmp = null; if (rawURI.getQuery() != null) { // query component not allowed throw new IOException("query component in Path not supported " + rawURI); } try { tmp = new URI(underLyingScheme, auth, rawURI.getPath(), rawURI.getQuery(), rawURI.getFragment()); } catch (URISyntaxException e) { // do nothing should not happen } return tmp; } private static String decodeString(String str) throws UnsupportedEncodingException { return URLDecoder.decode(str, "UTF-8"); } private String decodeFileName(String fname) throws UnsupportedEncodingException { int version = metadata.getVersion(); if (version == 2 || version == 3){ return decodeString(fname); } return fname; } /** * return the top level archive. */ public Path getWorkingDirectory() { return new Path(uri.toString()); } /** * Create a har specific auth * har-underlyingfs:port * @param underLyingURI the uri of underlying * filesystem * @return har specific auth */ private String getHarAuth(URI underLyingUri) { String auth = underLyingUri.getScheme() + "-"; if (underLyingUri.getHost() != null) { auth += underLyingUri.getHost() + ":"; if (underLyingUri.getPort() != -1) { auth += underLyingUri.getPort(); } } else { auth += ":"; } return auth; } /** * Returns the uri of this filesystem. * The uri is of the form * har://underlyingfsschema-host:port/pathintheunderlyingfs */ @Override public URI getUri() { return this.uri; } @Override public String getCanonicalServiceName() { return null; } /** * this method returns the path * inside the har filesystem. * this is relative path inside * the har filesystem. * @param path the fully qualified path in the har filesystem. * @return relative path in the filesystem. */ private Path getPathInHar(Path path) { Path harPath = new Path(path.toUri().getPath()); if (archivePath.compareTo(harPath) == 0) return new Path(Path.SEPARATOR); Path tmp = new Path(harPath.getName()); Path parent = harPath.getParent(); while (!(parent.compareTo(archivePath) == 0)) { if (parent.toString().equals(Path.SEPARATOR)) { tmp = null; break; } tmp = new Path(parent.getName(), tmp); parent = parent.getParent(); } if (tmp != null) tmp = new Path(Path.SEPARATOR, tmp); return tmp; } //the relative path of p. basically // getting rid of /. Parsing and doing // string manipulation is not good - so // just use the path api to do it. private Path makeRelative(String initial, Path p) { String scheme = this.uri.getScheme(); String authority = this.uri.getAuthority(); Path root = new Path(Path.SEPARATOR); if (root.compareTo(p) == 0) return new Path(scheme, authority, initial); Path retPath = new Path(p.getName()); Path parent = p.getParent(); for (int i=0; i < p.depth()-1; i++) { retPath = new Path(parent.getName(), retPath); parent = parent.getParent(); } return new Path(new Path(scheme, authority, initial), retPath.toString()); } /* this makes a path qualified in the har filesystem * (non-Javadoc) * @see org.apache.hadoop.fs.FilterFileSystem#makeQualified( * org.apache.hadoop.fs.Path) */ @Override public Path makeQualified(Path path) { // make sure that we just get the // path component Path fsPath = path; if (!path.isAbsolute()) { fsPath = new Path(archivePath, path); } URI tmpURI = fsPath.toUri(); //change this to Har uri return new Path(uri.getScheme(), harAuth, tmpURI.getPath()); } /** * Fix offset and length of block locations. * Note that this method modifies the original array. * @param locations block locations of har part file * @param start the start of the desired range in the contained file * @param len the length of the desired range * @param fileOffsetInHar the offset of the desired file in the har part file * @return block locations with fixed offset and length */ static BlockLocation[] fixBlockLocations(BlockLocation[] locations, long start, long len, long fileOffsetInHar) { // offset 1 past last byte of desired range long end = start + len; for (BlockLocation location : locations) { // offset of part block relative to beginning of desired file // (may be negative if file starts in this part block) long harBlockStart = location.getOffset() - fileOffsetInHar; // offset 1 past last byte of har block relative to beginning of // desired file long harBlockEnd = harBlockStart + location.getLength(); if (start > harBlockStart) { // desired range starts after beginning of this har block // fix offset to beginning of relevant range (relative to desired file) location.setOffset(start); // fix length to relevant portion of har block location.setLength(location.getLength() - (start - harBlockStart)); } else { // desired range includes beginning of this har block location.setOffset(harBlockStart); } if (harBlockEnd > end) { // range ends before end of this har block // fix length to remove irrelevant portion at the end location.setLength(location.getLength() - (harBlockEnd - end)); } } return locations; } /** * Get block locations from the underlying fs and fix their * offsets and lengths. * @param file the input filestatus to get block locations * @param start the start of the desired range in the contained file * @param len the length of the desired range * @return block locations for this segment of file * @throws IOException */ @Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { HarStatus hstatus = getFileHarStatus(file.getPath()); Path partPath = new Path(archivePath, hstatus.getPartName()); FileStatus partStatus = metadata.getPartFileStatus(partPath); // get all part blocks that overlap with the desired file blocks BlockLocation[] locations = fs.getFileBlockLocations(partStatus, hstatus.getStartIndex() + start, len); return fixBlockLocations(locations, start, len, hstatus.getStartIndex()); } /** * the hash of the path p inside iniside * the filesystem * @param p the path in the harfilesystem * @return the hash code of the path. */ public static int getHarHash(Path p) { return (p.toString().hashCode() & 0x7fffffff); } static class Store { public Store() { begin = end = startHash = endHash = 0; } public Store(long begin, long end, int startHash, int endHash) { this.begin = begin; this.end = end; this.startHash = startHash; this.endHash = endHash; } public long begin; public long end; public int startHash; public int endHash; } /** * Get filestatuses of all the children of a given directory. This just reads * through index file and reads line by line to get all statuses for children * of a directory. Its a brute force way of getting all such filestatuses * * @param parent * the parent path directory * @param statuses * the list to add the children filestatuses to * @param children * the string list of children for this parent * @param archiveIndexStat * the archive index filestatus */ private void fileStatusesInIndex(HarStatus parent, List<FileStatus> statuses, List<String> children) throws IOException { String parentString = parent.getName(); if (!parentString.endsWith(Path.SEPARATOR)){ parentString += Path.SEPARATOR; } Path harPath = new Path(parentString); int harlen = harPath.depth(); final Map<String, FileStatus> cache = new TreeMap<String, FileStatus>(); for (HarStatus hstatus : metadata.archive.values()) { String child = hstatus.getName(); if ((child.startsWith(parentString))) { Path thisPath = new Path(child); if (thisPath.depth() == harlen + 1) { statuses.add(toFileStatus(hstatus, cache)); } } } } /** * Combine the status stored in the index and the underlying status. * @param h status stored in the index * @param cache caching the underlying file statuses * @return the combined file status * @throws IOException */ private FileStatus toFileStatus(HarStatus h, Map<String, FileStatus> cache) throws IOException { FileStatus underlying = null; if (cache != null) { underlying = cache.get(h.partName); } if (underlying == null) { final Path p = h.isDir? archivePath: new Path(archivePath, h.partName); underlying = fs.getFileStatus(p); if (cache != null) { cache.put(h.partName, underlying); } } long modTime = 0; int version = metadata.getVersion(); if (version < 3) { modTime = underlying.getModificationTime(); } else if (version == 3) { modTime = h.getModificationTime(); } return new FileStatus( h.isDir()? 0L: h.getLength(), h.isDir(), underlying.getReplication(), underlying.getBlockSize(), modTime, underlying.getAccessTime(), underlying.getPermission(), underlying.getOwner(), underlying.getGroup(), makeRelative(this.uri.getPath(), new Path(h.name))); } // a single line parser for hadoop archives status // stored in a single line in the index files // the format is of the form // filename "dir"/"file" partFileName startIndex length // <space seperated children> private class HarStatus { boolean isDir; String name; List<String> children; String partName; long startIndex; long length; long modificationTime = 0; public HarStatus(String harString) throws UnsupportedEncodingException { String[] splits = harString.split(" "); this.name = decodeFileName(splits[0]); this.isDir = "dir".equals(splits[1]) ? true: false; // this is equal to "none" if its a directory this.partName = splits[2]; this.startIndex = Long.parseLong(splits[3]); this.length = Long.parseLong(splits[4]); int version = metadata.getVersion(); String[] propSplits = null; // propSplits is used to retrieve the metainformation that Har versions // 1 & 2 missed (modification time, permission, owner group). // These fields are stored in an encoded string placed in different // locations depending on whether it's a file or directory entry. // If it's a directory, the string will be placed at the partName // location (directories have no partName because they don't have data // to be stored). This is done because the number of fields in a // directory entry is unbounded (all children are listed at the end) // If it's a file, the string will be the last field. if (isDir) { if (version == 3){ propSplits = decodeString(this.partName).split(" "); } children = new ArrayList<String>(); for (int i = 5; i < splits.length; i++) { children.add(decodeFileName(splits[i])); } } else if (version == 3) { propSplits = decodeString(splits[5]).split(" "); } if (propSplits != null && propSplits.length >= 4) { modificationTime = Long.parseLong(propSplits[0]); // the fields below are stored in the file but are currently not used // by HarFileSystem // permission = new FsPermission(Short.parseShort(propSplits[1])); // owner = decodeString(propSplits[2]); // group = decodeString(propSplits[3]); } } public boolean isDir() { return isDir; } public String getName() { return name; } public List<String> getChildren() { return children; } public String getFileName() { return name; } public String getPartName() { return partName; } public long getStartIndex() { return startIndex; } public long getLength() { return length; } public long getModificationTime() { return modificationTime; } } /** * return the filestatus of files in har archive. * The permission returned are that of the archive * index files. The permissions are not persisted * while creating a hadoop archive. * @param f the path in har filesystem * @return filestatus. * @throws IOException */ @Override public FileStatus getFileStatus(Path f) throws IOException { HarStatus hstatus = getFileHarStatus(f); return toFileStatus(hstatus, null); } private HarStatus getFileHarStatus(Path f) throws IOException { // get the fs DataInputStream for the underlying file // look up the index. Path p = makeQualified(f); Path harPath = getPathInHar(p); if (harPath == null) { throw new IOException("Invalid file name: " + f + " in " + uri); } HarStatus hstatus = metadata.archive.get(harPath); if (hstatus == null) { throw new FileNotFoundException("File: " + f + " does not exist in " + uri); } return hstatus; } /** * @return null since no checksum algorithm is implemented. */ public FileChecksum getFileChecksum(Path f) { return null; } /** * Returns a har input stream which fakes end of * file. It reads the index files to get the part * file name and the size and start of the file. */ @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { // get the fs DataInputStream for the underlying file HarStatus hstatus = getFileHarStatus(f); // we got it.. woo hooo!!! if (hstatus.isDir()) { throw new FileNotFoundException(f + " : not a file in " + archivePath); } return new HarFSDataInputStream(fs, new Path(archivePath, hstatus.getPartName()), hstatus.getStartIndex(), hstatus.getLength(), bufferSize); } /* * create throws an exception in Har filesystem. * The archive once created cannot be changed. */ public FSDataOutputStream create(Path f, int bufferSize) throws IOException { throw new IOException("Har: Create not allowed"); } public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { throw new IOException("Har: create not allowed."); } @Override public void close() throws IOException { if (fs != null) { try { fs.close(); } catch(IOException ie) { //this might already be closed // ignore } } } /** * Not implemented. */ @Override public boolean setReplication(Path src, short replication) throws IOException{ throw new IOException("Har: setreplication not allowed"); } /** * Not implemented. */ @Override public boolean delete(Path f, boolean recursive) throws IOException { throw new IOException("Har: delete not allowed"); } /** * liststatus returns the children of a directory * after looking up the index files. */ @Override public FileStatus[] listStatus(Path f) throws IOException { //need to see if the file is an index in file //get the filestatus of the archive directory // we will create fake filestatuses to return // to the client List<FileStatus> statuses = new ArrayList<FileStatus>(); Path tmpPath = makeQualified(f); Path harPath = getPathInHar(tmpPath); HarStatus hstatus = metadata.archive.get(harPath); if (hstatus == null) { throw new FileNotFoundException("File " + f + " not found in " + archivePath); } if (hstatus.isDir()) { fileStatusesInIndex(hstatus, statuses, hstatus.children); } else { statuses.add(toFileStatus(hstatus, null)); } return statuses.toArray(new FileStatus[statuses.size()]); } /** * return the top level archive path. */ public Path getHomeDirectory() { return new Path(uri.toString()); } public void setWorkingDirectory(Path newDir) { //does nothing. } /** * not implemented. */ public boolean mkdirs(Path f, FsPermission permission) throws IOException { throw new IOException("Har: mkdirs not allowed"); } /** * not implemented. */ public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { throw new IOException("Har: copyfromlocalfile not allowed"); } /** * copies the file in the har filesystem to a local file. */ public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException { FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf()); } /** * not implemented. */ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: startLocalOutput not allowed"); } /** * not implemented. */ public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: completeLocalOutput not allowed"); } /** * not implemented. */ public void setOwner(Path p, String username, String groupname) throws IOException { throw new IOException("Har: setowner not allowed"); } /** * Not implemented. */ public void setPermission(Path p, FsPermission permisssion) throws IOException { throw new IOException("Har: setPermission not allowed"); } /** * Hadoop archives input stream. This input stream fakes EOF * since archive files are part of bigger part files. */ private static class HarFSDataInputStream extends FSDataInputStream { /** * Create an input stream that fakes all the reads/positions/seeking. */ private static class HarFsInputStream extends FSInputStream { private long position, start, end; //The underlying data input stream that the // underlying filesystem will return. private FSDataInputStream underLyingStream; //one byte buffer private byte[] oneBytebuff = new byte[1]; HarFsInputStream(FileSystem fs, Path path, long start, long length, int bufferSize) throws IOException { underLyingStream = fs.open(path, bufferSize); underLyingStream.seek(start); // the start of this file in the part file this.start = start; // the position pointer in the part file this.position = start; // the end pointer in the part file this.end = start + length; } public synchronized int available() throws IOException { long remaining = end - underLyingStream.getPos(); if (remaining > (long)Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int) remaining; } public synchronized void close() throws IOException { underLyingStream.close(); super.close(); } //not implemented @Override public void mark(int readLimit) { // do nothing } /** * reset is not implemented */ public void reset() throws IOException { throw new IOException("reset not implemented."); } public synchronized int read() throws IOException { int ret = read(oneBytebuff, 0, 1); return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff); } public synchronized int read(byte[] b) throws IOException { int ret = read(b, 0, b.length); if (ret != -1) { position += ret; } return ret; } /** * */ public synchronized int read(byte[] b, int offset, int len) throws IOException { int newlen = len; int ret = -1; if (position + len > end) { newlen = (int) (end - position); } // end case if (newlen == 0) return ret; ret = underLyingStream.read(b, offset, newlen); position += ret; return ret; } public synchronized long skip(long n) throws IOException { long tmpN = n; if (tmpN > 0) { if (position + tmpN > end) { tmpN = end - position; } underLyingStream.seek(tmpN + position); position += tmpN; return tmpN; } return (tmpN < 0)? -1 : 0; } public synchronized long getPos() throws IOException { return (position - start); } public synchronized void seek(long pos) throws IOException { if (pos < 0 || (start + pos > end)) { throw new IOException("Failed to seek: EOF"); } position = start + pos; underLyingStream.seek(position); } public boolean seekToNewSource(long targetPos) throws IOException { //do not need to implement this // hdfs in itself does seektonewsource // while reading. return false; } /** * implementing position readable. */ public int read(long pos, byte[] b, int offset, int length) throws IOException { int nlength = length; if (start + nlength + pos > end) { nlength = (int) (end - (start + pos)); } return underLyingStream.read(pos + start , b, offset, nlength); } /** * position readable again. */ public void readFully(long pos, byte[] b, int offset, int length) throws IOException { if (start + length + pos > end) { throw new IOException("Not enough bytes to read."); } underLyingStream.readFully(pos + start, b, offset, length); } public void readFully(long pos, byte[] b) throws IOException { readFully(pos, b, 0, b.length); } } /** * constructors for har input stream. * @param fs the underlying filesystem * @param p The path in the underlying filesystem * @param start the start position in the part file * @param length the length of valid data in the part file * @param bufsize the buffer size * @throws IOException */ public HarFSDataInputStream(FileSystem fs, Path p, long start, long length, int bufsize) throws IOException { super(new HarFsInputStream(fs, p, start, length, bufsize)); } /** * constructor for har input stream. * @param fs the underlying filesystem * @param p the path in the underlying file system * @param start the start position in the part file * @param length the length of valid data in the part file. * @throws IOException */ public HarFSDataInputStream(FileSystem fs, Path p, long start, long length) throws IOException { super(new HarFsInputStream(fs, p, start, length, 0)); } } private class HarMetaData { private FileSystem fs; private int version; // the masterIndex of the archive private Path masterIndexPath; // the index file private Path archiveIndexPath; private long masterIndexTimestamp; private long archiveIndexTimestamp; List<Store> stores = new ArrayList<Store>(); Map<Path, HarStatus> archive = new HashMap<Path, HarStatus>(); private Map<Path, FileStatus> partFileStatuses = new HashMap<Path, FileStatus>(); public HarMetaData(FileSystem fs, Path masterIndexPath, Path archiveIndexPath) { this.fs = fs; this.masterIndexPath = masterIndexPath; this.archiveIndexPath = archiveIndexPath; } public FileStatus getPartFileStatus(Path partPath) throws IOException { FileStatus status; status = partFileStatuses.get(partPath); if (status == null) { status = fs.getFileStatus(partPath); partFileStatuses.put(partPath, status); } return status; } public long getMasterIndexTimestamp() { return masterIndexTimestamp; } public long getArchiveIndexTimestamp() { return archiveIndexTimestamp; } private int getVersion() { return version; } private void parseMetaData() throws IOException { FSDataInputStream in = fs.open(masterIndexPath); FileStatus masterStat = fs.getFileStatus(masterIndexPath); masterIndexTimestamp = masterStat.getModificationTime(); LineReader lin = new LineReader(in, getConf()); Text line = new Text(); long read = lin.readLine(line); // the first line contains the version of the index file String versionLine = line.toString(); String[] arr = versionLine.split(" "); version = Integer.parseInt(arr[0]); // make it always backwards-compatible if (this.version > HarFileSystem.VERSION) { throw new IOException("Invalid version " + this.version + " expected " + HarFileSystem.VERSION); } // each line contains a hashcode range and the index file name String[] readStr = null; while(read < masterStat.getLen()) { int b = lin.readLine(line); read += b; readStr = line.toString().split(" "); int startHash = Integer.parseInt(readStr[0]); int endHash = Integer.parseInt(readStr[1]); stores.add(new Store(Long.parseLong(readStr[2]), Long.parseLong(readStr[3]), startHash, endHash)); line.clear(); } try { // close the master index lin.close(); } catch(IOException io){ // do nothing just a read. } FSDataInputStream aIn = fs.open(archiveIndexPath); FileStatus archiveStat = fs.getFileStatus(archiveIndexPath); archiveIndexTimestamp = archiveStat.getModificationTime(); LineReader aLin; // now start reading the real index file for (Store s: stores) { read = 0; aIn.seek(s.begin); aLin = new LineReader(aIn, getConf()); while (read + s.begin < s.end) { int tmp = aLin.readLine(line); read += tmp; String lineFeed = line.toString(); String[] parsed = lineFeed.split(" "); parsed[0] = decodeFileName(parsed[0]); archive.put(new Path(parsed[0]), new HarStatus(lineFeed)); line.clear(); } } try { // close the archive index aIn.close(); } catch(IOException io) { // do nothing just a read. } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller.metrics; import java.lang.reflect.Type; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.internal.PropertyInfo; import org.apache.ambari.server.controller.internal.StackDefinedPropertyProvider; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.spi.Request; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.controller.spi.SystemException; import org.apache.ambari.server.controller.utilities.StreamProvider; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.services.MetricsRetrievalService; import org.apache.ambari.server.state.services.MetricsRetrievalService.MetricSourceType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.reflect.TypeToken; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; /** * Resolves metrics like api/cluster/summary/nimbus.uptime For every metric, * finds a relevant JSON value and returns it as a resource property. * <p/> * This class will delegate responsibility for actually retrieving JSON data * from a remote URL to the {@link MetricsRetrievalService}. It will also * leverage the {@link MetricsRetrievalService} to provide cached {@link Map} * instances for given URLs. * <p/> * This is because the REST API workflow will attempt to read data from this * provider during the context of a live Jetty thread. As a result, any attempt * to read remote resources will cause a delay in returning a response code. On * small clusters this mormally isn't a problem. However, as the cluster * increases in size, the thread pool would not be able to keep pace and would * eventually cause REST API request threads to wait while remote JSON data is * retrieved. */ public class RestMetricsPropertyProvider extends ThreadPoolEnabledPropertyProvider { protected final static Logger LOG = LoggerFactory.getLogger(RestMetricsPropertyProvider.class); @Inject private AmbariManagementController amc; @Inject private Clusters clusters; /** * Used to parse the REST JSON metrics. */ @Inject private Gson gson; /** * Used to submit asynchronous requests for remote metrics as well as querying * cached metrics. */ @Inject private MetricsRetrievalService metricsRetrievalService; private final Map<String, String> metricsProperties; private final StreamProvider streamProvider; private final String clusterNamePropertyId; private final String componentNamePropertyId; private final String statePropertyId; private final String componentName; private static final String DEFAULT_PORT_PROPERTY = "default_port"; private static final String PORT_CONFIG_TYPE_PROPERTY = "port_config_type"; private static final String PORT_PROPERTY_NAME_PROPERTY = "port_property_name"; private static final String HTTPS_PORT_PROPERTY_NAME_PROPERTY = "https_port_property_name"; /** * Protocol to use when connecting */ private static final String PROTOCOL_OVERRIDE_PROPERTY = "protocol"; private static final String HTTPS_PROTOCOL_PROPERTY = "https_property_name"; private static final String HTTP_PROTOCOL = "http"; private static final String HTTPS_PROTOCOL = "https"; private static final String DEFAULT_PROTOCOL = HTTP_PROTOCOL; /** * String that separates JSON URL from path inside JSON in metrics path */ public static final String URL_PATH_SEPARATOR = "##"; /** * Symbol that separates names of nested JSON sections in metrics path */ public static final String DOCUMENT_PATH_SEPARATOR = "#"; /** * Create a REST property provider. * * @param metricsProperties the map of per-component metrics properties * @param componentMetrics the map of supported metrics for component * @param streamProvider the stream provider * @param metricHostProvider metricsHostProvider instance * @param clusterNamePropertyId the cluster name property id * @param hostNamePropertyId the host name property id * @param componentNamePropertyId the component name property id * @param statePropertyId the state property id */ @AssistedInject RestMetricsPropertyProvider( @Assisted("metricsProperties") Map<String, String> metricsProperties, @Assisted("componentMetrics") Map<String, Map<String, PropertyInfo>> componentMetrics, @Assisted("streamProvider") StreamProvider streamProvider, @Assisted("metricHostProvider") MetricHostProvider metricHostProvider, @Assisted("clusterNamePropertyId") String clusterNamePropertyId, @Assisted("hostNamePropertyId") @Nullable String hostNamePropertyId, @Assisted("componentNamePropertyId") String componentNamePropertyId, @Assisted("statePropertyId") @Nullable String statePropertyId, @Assisted("componentName") @Nullable String componentName) { super(componentMetrics, hostNamePropertyId, metricHostProvider, clusterNamePropertyId); this.metricsProperties = metricsProperties; this.streamProvider = streamProvider; this.clusterNamePropertyId = clusterNamePropertyId; this.componentNamePropertyId = componentNamePropertyId; this.statePropertyId = statePropertyId; this.componentName = componentName; } // ----- MetricsProvider implementation ------------------------------------ /** * Populate a resource by obtaining the requested REST properties. * * @param resource the resource to be populated * @param request the request * @param predicate the predicate * @return the populated resource; null if the resource should NOT be * part of the result set for the given predicate */ @Override protected Resource populateResource(Resource resource, Request request, Predicate predicate, Ticket ticket) throws SystemException { // Remove request properties that request temporal information Set<String> ids = getRequestPropertyIds(request, predicate); Set<String> temporalIds = new HashSet<>(); String resourceComponentName = (String) resource.getPropertyValue(componentNamePropertyId); if (!componentName.equals(resourceComponentName)) { return resource; } for (String id : ids) { if (request.getTemporalInfo(id) != null) { temporalIds.add(id); } } ids.removeAll(temporalIds); if (ids.isEmpty()) { // no properties requested return resource; } // Don't attempt to get REST properties if the resource is in // an unhealthy state if (statePropertyId != null) { String state = (String) resource.getPropertyValue(statePropertyId); if (state != null && !healthyStates.contains(state)) { return resource; } } Map<String, PropertyInfo> propertyInfos = getComponentMetrics().get(StackDefinedPropertyProvider.WRAPPED_METRICS_KEY); if (propertyInfos == null) { // If there are no metrics defined for the given component then there is nothing to do. return resource; } String protocol = null; String port = "-1"; String hostname = null; try { String clusterName = (String) resource.getPropertyValue(clusterNamePropertyId); Cluster cluster = clusters.getCluster(clusterName); hostname = getHost(resource, clusterName, resourceComponentName); if (hostname == null) { String msg = String.format("Unable to get component REST metrics. " + "No host name for %s.", resourceComponentName); LOG.warn(msg); return resource; } protocol = resolveProtocol(cluster, hostname); port = resolvePort(cluster, hostname, resourceComponentName, metricsProperties, protocol); } catch (Exception e) { rethrowSystemException(e); } Set<String> resultIds = new HashSet<>(); for (String id : ids){ for (String metricId : propertyInfos.keySet()){ if (metricId.startsWith(id)){ resultIds.add(metricId); } } } // Extract set of URLs for metrics HashMap<String, Set<String>> urls = extractPropertyURLs(resultIds, propertyInfos); for (String url : urls.keySet()) { String spec = getSpec(protocol, hostname, port, url); // always submit a request to cache the latest data metricsRetrievalService.submitRequest(MetricSourceType.REST, streamProvider, spec); // check to see if there is a cached value and use it if there is Map<String, String> jsonMap = metricsRetrievalService.getCachedRESTMetric(spec); if (null == jsonMap) { return resource; } if (!ticket.isValid()) { return resource; } try { extractValuesFromJSON(jsonMap, urls.get(url), resource, propertyInfos); } catch (AmbariException ambariException) { AmbariException detailedException = new AmbariException(String.format( "Unable to get REST metrics from the for %s at %s", resourceComponentName, spec), ambariException); logException(detailedException); } } return resource; } @Override public Set<String> checkPropertyIds(Set<String> propertyIds) { Set<String> unsupported = new HashSet<>(); for (String propertyId : propertyIds) { if (!getComponentMetrics(). get(StackDefinedPropertyProvider.WRAPPED_METRICS_KEY). containsKey(propertyId)) { unsupported.add(propertyId); } } return unsupported; } // ----- helper methods ---------------------------------------------------- /** * If protocol is equal to HTTPS_PROTOCOL than returns HTTPS_PORT_PROPERTY_NAME_PROPERTY value from PORT_CONFIG_TYPE_PROPERTY * else uses port_config_type, port_property_name, default_port parameters from * metricsProperties to find out right port value for service * * @return determines REST port for service */ protected String resolvePort(Cluster cluster, String hostname, String componentName, Map<String, String> metricsProperties, String protocol) throws AmbariException { String portConfigType = null; String portPropertyNameInMetricsProperties = protocol.equalsIgnoreCase(HTTPS_PROTOCOL) ? HTTPS_PORT_PROPERTY_NAME_PROPERTY : PORT_PROPERTY_NAME_PROPERTY; String portPropertyName = null; if (metricsProperties.containsKey(PORT_CONFIG_TYPE_PROPERTY) && metricsProperties.containsKey(portPropertyNameInMetricsProperties)) { portConfigType = metricsProperties.get(PORT_CONFIG_TYPE_PROPERTY); portPropertyName = metricsProperties.get(portPropertyNameInMetricsProperties); } String portStr = getPropertyValueByNameAndConfigType(portPropertyName, portConfigType, cluster, hostname); if (portStr == null && metricsProperties.containsKey(DEFAULT_PORT_PROPERTY)) { if (metricsProperties.containsKey(DEFAULT_PORT_PROPERTY)) { portStr = metricsProperties.get(DEFAULT_PORT_PROPERTY); } else { String message = String.format("Can not determine REST port for " + "component %s. " + "Default REST port property %s is not defined at metrics.json " + "file for service, and there is no any other available ways " + "to determine port information.", componentName, DEFAULT_PORT_PROPERTY); throw new AmbariException(message); } } return portStr; } /** * Tries to get propertyName property from configType config for specified cluster and hostname * @param propertyName * @param configType * @param cluster * @param hostname * @return */ private String getPropertyValueByNameAndConfigType(String propertyName, String configType, Cluster cluster, String hostname){ String result = null; if (configType != null && propertyName != null) { try { Map<String, Map<String, String>> configTags = amc.findConfigurationTagsWithOverrides(cluster, hostname); if (configTags.containsKey(configType)) { Map<String, Map<String, String>> properties = amc.getConfigHelper().getEffectiveConfigProperties(cluster, Collections.singletonMap(configType, configTags.get(configType))); Map<String, String> config = properties.get(configType); if (config != null && config.containsKey(propertyName)) { result = config.get(propertyName); } } } catch (AmbariException e) { String message = String.format("Can not extract configs for " + "component = %s, hostname = %s, config type = %s, property name = %s", componentName, hostname, configType, propertyName); LOG.warn(message, e); } if (result == null) { String message = String.format( "Can not extract property for " + "component %s from configurations. " + "Config tag = %s, config key name = %s, " + "hostname = %s. Probably metrics.json file for " + "service is misspelled.", componentName, configType, propertyName, hostname); LOG.debug(message); } } return result; } /** * if HTTPS_PROTOCOL_PROPERTY is present in metrics properties then checks if it is present in PORT_CONFIG_TYPE_PROPERTY and returns "https" if it is. * * Otherwise extracts protocol type from metrics properties. If no protocol is defined, * uses default protocol. */ private String resolveProtocol(Cluster cluster, String hostname) { String protocol = DEFAULT_PROTOCOL; if (metricsProperties.containsKey(PORT_CONFIG_TYPE_PROPERTY) && metricsProperties.containsKey(HTTPS_PROTOCOL_PROPERTY)) { String configType = metricsProperties.get(PORT_CONFIG_TYPE_PROPERTY); String propertyName = metricsProperties.get(HTTPS_PROTOCOL_PROPERTY); String value = getPropertyValueByNameAndConfigType(propertyName, configType, cluster, hostname); if (value != null) { return HTTPS_PROTOCOL; } } if (metricsProperties.containsKey(PROTOCOL_OVERRIDE_PROPERTY)) { protocol = metricsProperties.get(PROTOCOL_OVERRIDE_PROPERTY).toLowerCase(); if (!protocol.equals(HTTP_PROTOCOL) && !protocol.equals(HTTPS_PROTOCOL)) { String message = String.format( "Unsupported protocol type %s, falling back to %s", protocol, DEFAULT_PROTOCOL); LOG.warn(message); protocol = DEFAULT_PROTOCOL; } } else { protocol = DEFAULT_PROTOCOL; } return protocol; } /** * Extracts JSON URL from metricsPath */ private String extractMetricsURL(String metricsPath) throws IllegalArgumentException { return validateAndExtractPathParts(metricsPath)[0]; } /** * Extracts part of metrics path that contains path through nested * JSON sections */ private String extractDocumentPath(String metricsPath) throws IllegalArgumentException { return validateAndExtractPathParts(metricsPath)[1]; } /** * Returns [MetricsURL, DocumentPath] or throws an exception * if metricsPath is invalid. */ private String[] validateAndExtractPathParts(String metricsPath) throws IllegalArgumentException { String[] pathParts = metricsPath.split(URL_PATH_SEPARATOR); if (pathParts.length == 2) { return pathParts; } else { // This warning is expected to occur only on development phase String message = String.format( "Metrics path %s does not contain or contains" + "more than one %s sequence. That probably " + "means that the mentioned metrics path is misspelled. " + "Please check the relevant metrics.json file", metricsPath, URL_PATH_SEPARATOR); throw new IllegalArgumentException(message); } } /** * Returns a map <document_url, requested_property_ids>. * requested_property_ids contain a set of property IDs * that should be fetched for this URL. Doing * that allows us to extract document only once when getting few properties * from this document. * * @param ids set of property IDs that should be fetched */ private HashMap<String, Set<String>> extractPropertyURLs(Set<String> ids, Map<String, PropertyInfo> propertyInfos) { HashMap<String, Set<String>> result = new HashMap<>(); for (String requestedPropertyId : ids) { PropertyInfo propertyInfo = propertyInfos.get(requestedPropertyId); String metricsPath = propertyInfo.getPropertyId(); String url = extractMetricsURL(metricsPath); Set<String> set; if (!result.containsKey(url)) { set = new HashSet<>(); result.put(url, set); } else { set = result.get(url); } set.add(requestedPropertyId); } return result; } /** * Extracts requested properties from a parsed {@link Map} of {@link String}. * * @param requestedPropertyIds * a set of property IDs that should be fetched for this URL * @param resource * all extracted values are placed into resource */ private void extractValuesFromJSON(Map<String, String> jsonMap, Set<String> requestedPropertyIds, Resource resource, Map<String, PropertyInfo> propertyInfos) throws AmbariException { Type type = new TypeToken<Map<Object, Object>>() {}.getType(); for (String requestedPropertyId : requestedPropertyIds) { PropertyInfo propertyInfo = propertyInfos.get(requestedPropertyId); String metricsPath = propertyInfo.getPropertyId(); String documentPath = extractDocumentPath(metricsPath); String[] docPath = documentPath.split(DOCUMENT_PATH_SEPARATOR); Map<String, String> subMap = jsonMap; for (int i = 0; i < docPath.length; i++) { String pathElement = docPath[i]; if (!subMap.containsKey(pathElement)) { String message = String.format( "Can not fetch %dth element of document path (%s) " + "from json. Wrong metrics path: %s", i, pathElement, metricsPath); throw new AmbariException(message); } Object jsonSubElement = jsonMap.get(pathElement); if (i == docPath.length - 1) { // Reached target document section // Extract property value resource.setProperty(requestedPropertyId, jsonSubElement); } else { // Navigate to relevant document section subMap = gson.fromJson((JsonElement) jsonSubElement, type); } } } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.testframework.sm.runner; import com.intellij.execution.Location; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.execution.testframework.*; import com.intellij.execution.testframework.sm.SMStacktraceParser; import com.intellij.execution.testframework.sm.SMStacktraceParserEx; import com.intellij.execution.testframework.sm.runner.events.TestFailedEvent; import com.intellij.execution.testframework.sm.runner.states.*; import com.intellij.execution.testframework.sm.runner.ui.TestsPresentationUtil; import com.intellij.execution.testframework.stacktrace.DiffHyperlink; import com.intellij.ide.util.EditSourceUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.pom.Navigatable; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.ContainerUtilRt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Represents a test result tree node. * Not thread-safe. All methods should be called in EDT only. * * @author Roman Chernyatchik */ public class SMTestProxy extends AbstractTestProxy { public static final Key<String> NODE_ID = Key.create("test.proxy.id"); private static final Logger LOG = Logger.getInstance(SMTestProxy.class.getName()); private final String myName; private boolean myIsSuite; private final String myLocationUrl; private final String myMetainfo; private final boolean myPreservePresentableName; private List<SMTestProxy> myChildren; private SMTestProxy myParent; private AbstractState myState = NotRunState.getInstance(); private Long myDuration = null; // duration is unknown private boolean myDurationIsCached = false; // is used for separating unknown and unset duration private boolean myHasCriticalErrors = false; private boolean myHasPassedTests = false; private boolean myHasPassedTestsCached = false; private String myStacktrace; private String myErrorMessage; private boolean myIsEmptyIsCached = false; // is used for separating unknown and unset values private boolean myIsEmpty = true; private SMTestLocator myLocator = null; private Printer myPreferredPrinter = null; private String myPresentableName; private boolean myConfig = false; //false:: printables appear as soon as they are discovered in the output; true :: predefined test structure private boolean myTreeBuildBeforeStart = false; public SMTestProxy(String testName, boolean isSuite, @Nullable String locationUrl) { this(testName, isSuite, locationUrl, false); } public SMTestProxy(String testName, boolean isSuite, @Nullable String locationUrl, boolean preservePresentableName) { this(testName, isSuite, locationUrl, null, preservePresentableName); } public SMTestProxy(String testName, boolean isSuite, @Nullable String locationUrl, @Nullable String metainfo, boolean preservePresentableName) { myName = testName; myIsSuite = isSuite; myLocationUrl = locationUrl; myMetainfo = metainfo; myPreservePresentableName = preservePresentableName; } public boolean isPreservePresentableName() { return myPreservePresentableName; } public void setLocator(@NotNull SMTestLocator testLocator) { myLocator = testLocator; } public void setConfig(boolean config) { myConfig = config; } public void setPreferredPrinter(@NotNull Printer preferredPrinter) { myPreferredPrinter = preferredPrinter; } public boolean isInProgress() { return myState.isInProgress(); } public boolean isDefect() { return myState.isDefect(); } public boolean shouldRun() { return true; } public int getMagnitude() { // Is used by some of Tests Filters //WARN: It is Hack, see PoolOfTestStates, API is necessary return getMagnitudeInfo().getValue(); } public TestStateInfo.Magnitude getMagnitudeInfo() { return myState.getMagnitude(); } public boolean hasErrors() { return myHasCriticalErrors; } /** * @return true if the state is final (PASSED, FAILED, IGNORED, TERMINATED) */ public boolean isFinal() { return myState.isFinal(); } private void setStacktraceIfNotSet(@Nullable String stacktrace) { if (myStacktrace == null) myStacktrace = stacktrace; } @Nullable public String getStacktrace() { return myStacktrace; } public String getErrorMessage() { return myErrorMessage; } public SMTestLocator getLocator() { return myLocator; } public boolean isLeaf() { return myChildren == null || myChildren.isEmpty(); } @Override public boolean hasPassedTests() { if (myHasPassedTestsCached) { return myHasPassedTests; } boolean hasPassedTests = calcPassedTests(); boolean canCache = !myState.isInProgress(); if (canCache) { myHasPassedTests = hasPassedTests; myHasPassedTestsCached = true; } return hasPassedTests; } @Override public boolean isInterrupted() { return myState.wasTerminated(); } private boolean calcPassedTests() { if (isPassed()) { return true; } for (SMTestProxy child : getChildren()) { if (child.hasPassedTests()) { return true; } } return false; } @Override public boolean isIgnored() { return myState.getMagnitude() == TestStateInfo.Magnitude.IGNORED_INDEX; } public boolean isPassed() { return myState.getMagnitude() == TestStateInfo.Magnitude.SKIPPED_INDEX || myState.getMagnitude() == TestStateInfo.Magnitude.COMPLETE_INDEX || myState.getMagnitude() == TestStateInfo.Magnitude.PASSED_INDEX; } public void addChild(@NotNull SMTestProxy child) { ApplicationManager.getApplication().assertIsDispatchThread(); if (myChildren == null) { myChildren = ContainerUtil.newArrayListWithCapacity(4); } myChildren.add(child); // add printable // // add link to child's future output in correct place // actually if after this suite will obtain output // it will place it after this child and before future child addLast(child); // add child // //TODO reset children cache child.setParent(this); boolean printOwnContentOnly = this instanceof SMRootTestProxy && ((SMRootTestProxy)this).shouldPrintOwnContentOnly(); if (!printOwnContentOnly) { child.setPrinter(myPrinter); } if (myPreferredPrinter != null && child.myPreferredPrinter == null) { child.setPreferredPrinter(myPreferredPrinter); } } @Nullable private Printer getRightPrinter(@Nullable Printer printer) { if (myPreferredPrinter != null && printer != null) { return myPreferredPrinter; } return printer; } public void setPrinter(Printer printer) { super.setPrinter(getRightPrinter(printer)); } public String getName() { return myName; } @Override public boolean isConfig() { return myConfig; } @Nullable public Location getLocation(@NotNull Project project, @NotNull GlobalSearchScope searchScope) { //determines location of test proxy return getLocation(project, searchScope, myLocationUrl); } protected Location getLocation(@NotNull Project project, @NotNull GlobalSearchScope searchScope, String locationUrl) { if (locationUrl != null && myLocator != null) { String protocolId = VirtualFileManager.extractProtocol(locationUrl); if (protocolId != null) { String path = VirtualFileManager.extractPath(locationUrl); if (!DumbService.isDumb(project) || DumbService.isDumbAware(myLocator)) { return DumbService.getInstance(project).computeWithAlternativeResolveEnabled(() -> { List<Location> locations = myLocator.getLocation(protocolId, path, myMetainfo, project, searchScope); return !locations.isEmpty() ? locations.get(0) : null; }); } } } return null; } @Nullable public Navigatable getDescriptor(@Nullable Location location, @NotNull TestConsoleProperties properties) { // by location gets navigatable element. // It can be file or place in file (e.g. when OPEN_FAILURE_LINE is enabled) if (location == null) return null; String stacktrace = myStacktrace; if (stacktrace != null && properties instanceof SMStacktraceParser && isLeaf()) { Navigatable result = properties instanceof SMStacktraceParserEx ? ((SMStacktraceParserEx)properties).getErrorNavigatable(location, stacktrace) : ((SMStacktraceParser)properties).getErrorNavigatable(location.getProject(), stacktrace); if (result != null) { return result; } } return EditSourceUtil.getDescriptor(location.getPsiElement()); } public boolean isSuite() { return myIsSuite; } public SMTestProxy getParent() { return myParent; } public List<? extends SMTestProxy> getChildren() { return myChildren != null ? myChildren : Collections.emptyList(); } public List<SMTestProxy> getAllTests() { final List<SMTestProxy> allTests = new ArrayList<>(); allTests.add(this); for (SMTestProxy child : getChildren()) { allTests.addAll(child.getAllTests()); } return allTests; } public void setStarted() { myState = !myIsSuite ? TestInProgressState.TEST : new SuiteInProgressState(this); } public void setSuiteStarted() { myState = new SuiteInProgressState(this); if (!myIsSuite) { myIsSuite = true; } } /** * Calculates and caches duration of test or suite * * @return null if duration is unknown, otherwise duration value in milliseconds; */ @Nullable @Override public Long getDuration() { // Returns duration value for tests // or cached duration for suites if (myDurationIsCached || !isSuite()) { return myDuration; } //For suites counts and caches durations of its children. Also it evaluates partial duration, //i.e. if duration is unknown it will be ignored in summary value. //If duration for all children is unknown summary duration will be also unknown //if one of children is ignored - it's duration will be 0 and if child wasn't run, //then it's duration will be unknown myDuration = calcSuiteDuration(); myDurationIsCached = true; return myDuration; } @Nullable @Override public String getDurationString(TestConsoleProperties consoleProperties) { switch (getMagnitudeInfo()) { case PASSED_INDEX: case RUNNING_INDEX: return !isSubjectToHide(consoleProperties) ? getDurationString() : null; case COMPLETE_INDEX: case FAILED_INDEX: case ERROR_INDEX: case IGNORED_INDEX: case SKIPPED_INDEX: case TERMINATED_INDEX: return getDurationString(); default: return null; } } private boolean isSubjectToHide(TestConsoleProperties consoleProperties) { return TestConsoleProperties.HIDE_PASSED_TESTS.value(consoleProperties) && getParent() != null && !isDefect(); } private String getDurationString() { final Long duration = getDuration(); return duration != null ? StringUtil.formatDuration(duration.longValue(), "\u2009") : null; } @Override public boolean shouldSkipRootNodeForExport() { return true; } /** * Sets duration of test * * @param duration In milliseconds */ public void setDuration(final long duration) { if (!isSuite()) { invalidateCachedDurationForContainerSuites(duration - (myDuration != null ? myDuration : 0)); myDurationIsCached = true; myDuration = (duration >= 0) ? duration : null; return; } else { invalidateCachedDurationForContainerSuites(-1); } // Not allow to directly set duration for suites. // It should be the sum of children. This requirement is only // for safety of current model and may be changed LOG.warn("Unsupported operation"); } public void setFinished() { if (myState.isFinal()) { // we shouldn't fire new printable because final state // has been already fired return; } if (!isSuite()) { // if isn't in other finished state (ignored, failed or passed) myState = TestPassedState.INSTANCE; } else { //Test Suite myState = determineSuiteStateOnFinished(); } // prints final state additional info fireOnNewPrintable(myState); } public void setTestFailed(@NotNull String localizedMessage, @Nullable String stackTrace, boolean testError) { setStacktraceIfNotSet(stackTrace); myErrorMessage = localizedMessage; TestFailedState failedState = new TestFailedState(localizedMessage, stackTrace); if (myState instanceof TestComparisionFailedState) { CompoundTestFailedState states = new CompoundTestFailedState(localizedMessage, stackTrace); states.addFailure((TestFailedState)myState); states.addFailure(failedState); fireOnNewPrintable(failedState); myState = states; } else if (myState instanceof CompoundTestFailedState) { ((CompoundTestFailedState)myState).addFailure(failedState); fireOnNewPrintable(failedState); } else if (myState instanceof TestFailedState) { ((TestFailedState)myState).addError(localizedMessage, stackTrace, myPrinter); } else { myState = testError ? new TestErrorState(localizedMessage, stackTrace) : failedState; fireOnNewPrintable(myState); } } public void setTestComparisonFailed(@NotNull final String localizedMessage, @Nullable final String stackTrace, @NotNull final String actualText, @NotNull final String expectedText) { setTestComparisonFailed(localizedMessage, stackTrace, actualText, expectedText, null, null); } public void setTestComparisonFailed(@NotNull final String localizedMessage, @Nullable final String stackTrace, @NotNull final String actualText, @NotNull final String expectedText, @NotNull final TestFailedEvent event) { TestComparisionFailedState comparisionFailedState = setTestComparisonFailed(localizedMessage, stackTrace, actualText, expectedText, event.getExpectedFilePath(), event.getActualFilePath()); comparisionFailedState.setToDeleteExpectedFile(event.isExpectedFileTemp()); comparisionFailedState.setToDeleteActualFile(event.isActualFileTemp()); } public TestComparisionFailedState setTestComparisonFailed(@NotNull final String localizedMessage, @Nullable final String stackTrace, @NotNull final String actualText, @NotNull final String expectedText, @Nullable final String expectedFilePath, @Nullable final String actualFilePath) { setStacktraceIfNotSet(stackTrace); myErrorMessage = localizedMessage; final TestComparisionFailedState comparisionFailedState = new TestComparisionFailedState(localizedMessage, stackTrace, actualText, expectedText, expectedFilePath, actualFilePath); if (myState instanceof CompoundTestFailedState) { ((CompoundTestFailedState)myState).addFailure(comparisionFailedState); } else if (myState instanceof TestFailedState) { final CompoundTestFailedState states = new CompoundTestFailedState(localizedMessage, stackTrace); states.addFailure((TestFailedState)myState); states.addFailure(comparisionFailedState); myState = states; } else { myState = comparisionFailedState; } fireOnNewPrintable(comparisionFailedState); return comparisionFailedState; } @Override public void dispose() { if (myState instanceof TestFailedState) { Disposer.dispose((TestFailedState)myState); } super.dispose(); } public void setTestIgnored(@Nullable String ignoreComment, @Nullable String stackTrace) { setStacktraceIfNotSet(stackTrace); myState = new TestIgnoredState(ignoreComment, stackTrace); fireOnNewPrintable(myState); } public void setParent(@Nullable final SMTestProxy parent) { myParent = parent; } public List<? extends SMTestProxy> collectChildren(@Nullable final Filter<SMTestProxy> filter) { return filterChildren(filter, collectChildren()); } public List<? extends SMTestProxy> collectChildren() { final List<? extends SMTestProxy> allChildren = getChildren(); final List<SMTestProxy> result = ContainerUtilRt.newArrayList(); result.addAll(allChildren); for (SMTestProxy p : allChildren) { result.addAll(p.collectChildren()); } return result; } public List<? extends SMTestProxy> getChildren(@Nullable Filter<? super SMTestProxy> filter) { return filterChildren(filter, getChildren()); } protected void addAfterLastPassed(Printable printable) { if (myTreeBuildBeforeStart) { int idx = 0; synchronized (myNestedPrintables) { for (Printable proxy : myNestedPrintables) { if (proxy instanceof SMTestProxy && !((SMTestProxy)proxy).isFinal()) { break; } idx++; } } insert(printable, idx); } else { addLast(printable); } } public void setTreeBuildBeforeStart() { myTreeBuildBeforeStart = true; } private static List<? extends SMTestProxy> filterChildren(@Nullable Filter<? super SMTestProxy> filter, List<? extends SMTestProxy> allChildren) { if (filter == Filter.NO_FILTER || filter == null) { return allChildren; } final List<SMTestProxy> selectedChildren = new ArrayList<>(); for (SMTestProxy child : allChildren) { if (filter.shouldAccept(child)) { selectedChildren.add(child); } } if ((selectedChildren.isEmpty())) { return Collections.emptyList(); } return selectedChildren; } public boolean wasLaunched() { return myState.wasLaunched(); } /** * Prints this proxy and all its children on given printer * * @param printer Printer */ public void printOn(final Printer printer) { final Printer rightPrinter = getRightPrinter(printer); super.printOn(rightPrinter); printState(myState, rightPrinter); } @Override public void printOwnPrintablesOn(Printer printer) { if (isLeaf()) { super.printOn(printer); } else { super.printOwnPrintablesOn(printer); } printState(myState, printer); } private static void printState(final AbstractState oldState, final Printer rightPrinter) { invokeInAlarm(() -> { //Tests State, that provide and formats additional output oldState.printOn(rightPrinter); }); } /** * @deprecated use {@link #addOutput(String, Key)} */ @Deprecated public void addStdOutput(final String output, final Key outputType) { addOutput(output, outputType); } public final void addStdOutput(@NotNull String output) { addOutput(output, ProcessOutputTypes.STDOUT); } public final void addStdErr(@NotNull String output) { addOutput(output, ProcessOutputTypes.STDERR); } public final void addSystemOutput(final String output) { addOutput(output, ProcessOutputTypes.SYSTEM); } public void addOutput(@NotNull String output, @NotNull Key outputType) { addAfterLastPassed(new Printable() { public void printOn(@NotNull Printer printer) { printer.printWithAnsiColoring(output, outputType); } }); } public void addError(final String output, @Nullable final String stackTrace, boolean isCritical) { myHasCriticalErrors = isCritical; if (isCritical) { invalidateCachedHasErrorMark(); } setStacktraceIfNotSet(stackTrace); addAfterLastPassed(new Printable() { public void printOn(final Printer printer) { String errorText = TestFailedState.buildErrorPresentationText(output, stackTrace); if (errorText != null) { TestFailedState.printError(printer, Collections.singletonList(errorText)); } } }); } private void invalidateCachedHasErrorMark() { myHasCriticalErrors = true; // Invalidates hasError state of container suite final SMTestProxy containerSuite = getParent(); if (containerSuite != null && !containerSuite.hasErrors()) { containerSuite.invalidateCachedHasErrorMark(); } } @NotNull public String getPresentableName() { if (myPresentableName == null) { if (myPreservePresentableName) { myPresentableName = TestsPresentationUtil.getPresentableNameTrimmedOnly(this); } else { myPresentableName = TestsPresentationUtil.getPresentableName(this); } } return myPresentableName; } @Override @Nullable public DiffHyperlink getDiffViewerProvider() { if (myState instanceof TestComparisionFailedState) { return ((TestComparisionFailedState)myState).getHyperlink(); } if (myState instanceof CompoundTestFailedState) { return ((CompoundTestFailedState)myState).getHyperlinks().get(0); } return null; } @NotNull @Override public List<DiffHyperlink> getDiffViewerProviders() { if (myState instanceof CompoundTestFailedState) { return ((CompoundTestFailedState)myState).getHyperlinks(); } return super.getDiffViewerProviders(); } @Override public String toString() { return getPresentableName(); } /** * Process was terminated */ public void setTerminated() { if (myState.isFinal()) { return; } myState = TerminatedState.INSTANCE; final List<? extends SMTestProxy> children = getChildren(); for (SMTestProxy child : children) { child.setTerminated(); } fireOnNewPrintable(myState); } public boolean wasTerminated() { return myState.wasTerminated(); } @Nullable public String getLocationUrl() { return myLocationUrl; } @Nullable public String getMetainfo() { return myMetainfo; } /** * Check if suite contains error tests or suites * * @return True if contains */ private boolean containsErrorTests() { final List<? extends SMTestProxy> children = getChildren(); for (SMTestProxy child : children) { if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.ERROR_INDEX) { return true; } } return false; } private boolean containsFailedTests() { final List<? extends SMTestProxy> children = getChildren(); for (SMTestProxy child : children) { if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.FAILED_INDEX) { return true; } } return false; } /** * Determines site state after it has been finished * * @return New state */ protected AbstractState determineSuiteStateOnFinished() { final AbstractState state; if (isLeaf()) { state = SuiteFinishedState.EMPTY_LEAF_SUITE; } else if (isEmptySuite()) { state = SuiteFinishedState.EMPTY_SUITE; } else { if (isDefect()) { // Test suit contains errors if at least one of its tests contains error if (containsErrorTests()) { state = SuiteFinishedState.ERROR_SUITE; } else { // if suite contains failed tests - all suite should be // consider as failed state = containsFailedTests() ? SuiteFinishedState.FAILED_SUITE : SuiteFinishedState.WITH_IGNORED_TESTS_SUITE; } } else { state = SuiteFinishedState.PASSED_SUITE; } } return state; } public boolean isEmptySuite() { if (myIsEmptyIsCached) { return myIsEmpty; } if (!isSuite()) { // test - no matter what we will return myIsEmpty = true; myIsEmptyIsCached = true; return true; } myIsEmpty = true; final List<? extends SMTestProxy> allTestCases = getChildren(); for (SMTestProxy testOrSuite : allTestCases) { if (testOrSuite.isSuite()) { // suite if (!testOrSuite.isEmptySuite()) { // => parent suite isn't empty myIsEmpty = false; myIsEmptyIsCached = true; break; } // all suites are empty myIsEmpty = true; // we can cache only final state, otherwise test may be added myIsEmptyIsCached = myState.isFinal(); } else { // test => parent suite isn't empty myIsEmpty = false; myIsEmptyIsCached = true; break; } } return myIsEmpty; } @Nullable private Long calcSuiteDuration() { long partialDuration = 0; boolean durationOfChildrenIsUnknown = true; for (SMTestProxy child : getChildren()) { final Long duration = child.getDuration(); if (duration != null) { durationOfChildrenIsUnknown = false; partialDuration += duration.longValue(); } } // Lets convert partial duration in integer object. Negative partial duration // means that duration of all children is unknown return durationOfChildrenIsUnknown ? null : partialDuration; } /** * Recursively invalidates cached duration for container(parent) suites or updates their value * @param duration */ private void invalidateCachedDurationForContainerSuites(long duration) { if (duration >= 0) { if (myDuration == null) { myDuration = duration; } else { myDuration += duration; } } else { // Invalidates duration of this suite myDuration = null; myDurationIsCached = false; } // Invalidates duration of container suite final SMTestProxy containerSuite = getParent(); if (containerSuite != null) { containerSuite.invalidateCachedDurationForContainerSuites(duration); } } public SMRootTestProxy getRoot() { SMTestProxy parent = getParent(); while (parent != null && !(parent instanceof SMRootTestProxy)) { parent = parent.getParent(); } return parent != null ? (SMRootTestProxy)parent : null; } public static class SMRootTestProxy extends SMTestProxy implements TestProxyRoot { private boolean myTestsReporterAttached; // false by default private String myPresentation; private String myComment; private String myRootLocationUrl; private ProcessHandler myHandler; private boolean myShouldPrintOwnContentOnly = false; public SMRootTestProxy() { this(false); } public SMRootTestProxy(boolean preservePresentableName) { super("[root]", true, null, preservePresentableName); } public void setTestsReporterAttached() { myTestsReporterAttached = true; } public boolean isTestsReporterAttached() { return myTestsReporterAttached; } @Override public String getPresentation() { return myPresentation; } public void setPresentation(String presentation) { myPresentation = presentation; } public void setComment(String comment) { myComment = comment; } @Override public String getComment() { return myComment; } public void setRootLocationUrl(String locationUrl) { myRootLocationUrl = locationUrl; } @Override public String getRootLocation() { return myRootLocationUrl; } public ProcessHandler getHandler() { return myHandler; } @Override public void setHandler(ProcessHandler handler) { myHandler = handler; } @Nullable @Override public Location getLocation(@NotNull Project project, @NotNull GlobalSearchScope searchScope) { return myRootLocationUrl != null ? super.getLocation(project, searchScope, myRootLocationUrl) : super.getLocation(project, searchScope); } @Override protected AbstractState determineSuiteStateOnFinished() { if (isLeaf() && !isTestsReporterAttached()) { return SuiteFinishedState.TESTS_REPORTER_NOT_ATTACHED; } return super.determineSuiteStateOnFinished(); } public void testingRestarted() { if (!getChildren().isEmpty()) { getChildren().clear(); } clear(); } boolean shouldPrintOwnContentOnly() { return myShouldPrintOwnContentOnly; } public void setShouldPrintOwnContentOnly(boolean shouldPrintOwnContentOnly) { myShouldPrintOwnContentOnly = shouldPrintOwnContentOnly; } public void printOn(@NotNull Printer printer) { if (myShouldPrintOwnContentOnly) { printOwnPrintablesOn(printer, false); } else { super.printOn(printer); } } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.actions; import com.intellij.application.options.editor.EditorOptions; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.formatting.FormattingModelBuilder; import com.intellij.ide.util.PropertiesComponent; import com.intellij.lang.LanguageFormatting; import com.intellij.notification.Notification; import com.intellij.notification.NotificationListener; import com.intellij.notification.NotificationType; import com.intellij.notification.Notifications; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.SelectionModel; import com.intellij.openapi.editor.ex.EditorSettingsExternalizable; import com.intellij.openapi.module.Module; import com.intellij.openapi.options.ShowSettingsUtil; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.psi.*; import com.intellij.psi.codeStyle.arrangement.engine.ArrangementEngine; import com.intellij.psi.util.PsiUtilCore; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import java.util.ArrayList; import java.util.Collections; public class ReformatCodeAction extends AnAction implements DumbAware { private static final @NonNls String HELP_ID = "editing.codeReformatting"; @Override public void actionPerformed(AnActionEvent event) { DataContext dataContext = event.getDataContext(); final Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null) { return; } PsiDocumentManager.getInstance(project).commitAllDocuments(); final Editor editor = CommonDataKeys.EDITOR.getData(dataContext); final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (files == null) { return; } PsiFile file = null; final PsiDirectory dir; boolean hasSelection = false; if (editor != null){ file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); if (file == null) return; dir = file.getContainingDirectory(); hasSelection = editor.getSelectionModel().hasSelection(); } else if (areFiles(files)) { final ReadonlyStatusHandler.OperationStatus operationStatus = ReadonlyStatusHandler.getInstance(project).ensureFilesWritable(files); if (!operationStatus.hasReadonlyFiles()) { final ReformatFilesDialog reformatFilesDialog = new ReformatFilesDialog(project, files); reformatFilesDialog.show(); if (!reformatFilesDialog.isOK()) return; if (reformatFilesDialog.optimizeImports() && !DumbService.getInstance(project).isDumb()) { new ReformatAndOptimizeImportsProcessor( project, convertToPsiFiles(files, project), reformatFilesDialog.isProcessOnlyChangedText() ).run(); } else { new ReformatCodeProcessor(project, convertToPsiFiles(files, project), null, reformatFilesDialog.isProcessOnlyChangedText()).run(); } } return; } else { Project projectContext = PlatformDataKeys.PROJECT_CONTEXT.getData(dataContext); Module moduleContext = LangDataKeys.MODULE_CONTEXT.getData(dataContext); if (projectContext != null || moduleContext != null) { final String text; if (moduleContext != null) { text = CodeInsightBundle.message("process.scope.module", moduleContext.getModuleFilePath()); } else { text = CodeInsightBundle.message("process.scope.project", project.getPresentableUrl()); } LayoutProjectCodeDialog dialog = new LayoutProjectCodeDialog(project, moduleContext, CodeInsightBundle.message("process.reformat.code"), text, true); dialog.show(); if (!dialog.isOK()) return; if (dialog.isOptimizeImports() && !DumbService.getInstance(project).isDumb()) { if (moduleContext != null) { new ReformatAndOptimizeImportsProcessor(project, moduleContext, dialog.isProcessOnlyChangedText()).run(); } else { new ReformatAndOptimizeImportsProcessor(project, dialog.isProcessOnlyChangedText()).run(); } } else { if (moduleContext != null) { new ReformatCodeProcessor(project, moduleContext, dialog.isProcessOnlyChangedText()).run(); } else { new ReformatCodeProcessor(project, dialog.isProcessOnlyChangedText()).run(); } } return; } PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext); if (element == null) return; if (element instanceof PsiDirectoryContainer) { dir = ((PsiDirectoryContainer)element).getDirectories()[0]; } else if (element instanceof PsiDirectory) { dir = (PsiDirectory)element; } else { file = element.getContainingFile(); if (file == null) return; dir = file.getContainingDirectory(); } } boolean optimizeImports = ReformatFilesDialog.isOptmizeImportsOptionOn(); boolean processWholeFile = false; boolean processChangedTextOnly = PropertiesComponent.getInstance().getBoolean(LayoutCodeConstants.PROCESS_CHANGED_TEXT_KEY, false); boolean rearrangeEntries = PropertiesComponent.getInstance().getBoolean(LayoutCodeConstants.REARRANGE_ENTRIES_KEY, false); final boolean showDialog = EditorSettingsExternalizable.getInstance().getOptions().SHOW_REFORMAT_DIALOG; if (showDialog || (file == null && dir != null)) { final LayoutCodeDialog dialog = new LayoutCodeDialog(project, CodeInsightBundle.message("process.reformat.code"), file, dir, hasSelection ? Boolean.TRUE : Boolean.FALSE, HELP_ID); dialog.show(); if (!dialog.isOK()) return; final boolean showDialogAtFuture = !dialog.isDoNotAskMe(); EditorSettingsExternalizable.getInstance().getOptions().SHOW_REFORMAT_DIALOG = showDialogAtFuture; updateShowDialogSetting(dialog, "\"Reformat Code\" dialog disabled"); optimizeImports = dialog.isOptimizeImports(); rearrangeEntries = dialog.isRearrangeEntries(); processWholeFile = dialog.isProcessWholeFile(); processChangedTextOnly = dialog.isProcessOnlyChangedText(); if (dialog.isProcessDirectory()){ if (optimizeImports) { new ReformatAndOptimizeImportsProcessor(project, dir, dialog.isIncludeSubdirectories(), processChangedTextOnly).run(); } else { new ReformatCodeProcessor(project, dir, dialog.isIncludeSubdirectories(), processChangedTextOnly).run(); } return; } } final TextRange range; if (!processWholeFile && editor != null && editor.getSelectionModel().hasSelection()){ range = TextRange.create(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd()); } else{ range = null; } if (optimizeImports && range == null) { if (file != null || dir == null) { new ReformatAndOptimizeImportsProcessor(project, file, processChangedTextOnly).run(); } else { new ReformatAndOptimizeImportsProcessor(project, dir, true, processChangedTextOnly).run(); } } else { new ReformatCodeProcessor(project, file, range, processChangedTextOnly).run(); } if (rearrangeEntries && file != null && editor != null) { final ArrangementEngine engine = ServiceManager.getService(project, ArrangementEngine.class); try { final PsiFile finalFile = file; SelectionModel selectionModel = editor.getSelectionModel(); final TextRange rangeToUse = selectionModel.hasSelection() ? TextRange.create(selectionModel.getSelectionStart(), selectionModel.getSelectionEnd()) : TextRange.create(0, editor.getDocument().getTextLength()); CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { engine.arrange(finalFile, Collections.singleton(rangeToUse)); } }, getTemplatePresentation().getText(), null); } finally { PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument()); } } } public static void updateShowDialogSetting(LayoutCodeDialog dialog, String title) { if (dialog.isDoNotAskMe()) { Notifications.Bus.notify(new Notification("Reformat Code", title, "<html>You can re-enable the dialog on the <a href=''>IDE Settings -> Editor</a> pane</html>", NotificationType.INFORMATION, new NotificationListener() { @Override public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent e) { if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { final ShowSettingsUtil util = ShowSettingsUtil.getInstance(); IdeFrame ideFrame = WindowManagerEx.getInstanceEx().findFrameFor(null); util.editConfigurable((JFrame)ideFrame, new EditorOptions()); } } })); } } public static PsiFile[] convertToPsiFiles(final VirtualFile[] files,Project project) { final PsiManager manager = PsiManager.getInstance(project); final ArrayList<PsiFile> result = new ArrayList<PsiFile>(); for (VirtualFile virtualFile : files) { final PsiFile psiFile = manager.findFile(virtualFile); if (psiFile != null) result.add(psiFile); } return PsiUtilCore.toPsiFileArray(result); } @Override public void update(AnActionEvent event){ Presentation presentation = event.getPresentation(); DataContext dataContext = event.getDataContext(); Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null){ presentation.setEnabled(false); return; } Editor editor = CommonDataKeys.EDITOR.getData(dataContext); final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (editor != null){ PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); if (file == null || file.getVirtualFile() == null) { presentation.setEnabled(false); return; } if (LanguageFormatting.INSTANCE.forContext(file) != null) { presentation.setEnabled(true); return; } } else if (files!= null && areFiles(files)) { boolean anyFormatters = false; for (VirtualFile virtualFile : files) { if (virtualFile.isDirectory()) { presentation.setEnabled(false); return; } final PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile); if (psiFile == null) { presentation.setEnabled(false); return; } final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(psiFile); if (builder != null) { anyFormatters = true; } } if (!anyFormatters) { presentation.setEnabled(false); return; } } else if (files != null && files.length == 1) { // skip. Both directories and single files are supported. } else if (LangDataKeys.MODULE_CONTEXT.getData(dataContext) == null && PlatformDataKeys.PROJECT_CONTEXT.getData(dataContext) == null) { PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext); if (element == null) { presentation.setEnabled(false); return; } if (!(element instanceof PsiDirectory)) { PsiFile file = element.getContainingFile(); if (file == null || LanguageFormatting.INSTANCE.forContext(file) == null) { presentation.setEnabled(false); return; } } } presentation.setEnabled(true); } public static boolean areFiles(final VirtualFile[] files) { if (files == null) return false; if (files.length < 2) return false; for (VirtualFile virtualFile : files) { if (virtualFile.isDirectory()) return false; } return true; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.composer.v1.model; /** * Specifies the selection and configuration of software inside the environment. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Composer API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class SoftwareConfig extends com.google.api.client.json.GenericJson { /** * Optional. Apache Airflow configuration properties to override. Property keys contain the * section and property names, separated by a hyphen, for example "core- * dags_are_paused_at_creation". Section names must not contain hyphens ("-"), opening square * brackets ("["), or closing square brackets ("]"). The property name must not be empty and must * not contain an equals sign ("=") or semicolon (";"). Section and property names must not * contain a period ("."). Apache Airflow configuration property names must be written in * [snake_case](https://en.wikipedia.org/wiki/Snake_case). Property values can contain any * character, and can be written in any lower/upper case format. Certain Apache Airflow * configuration property values are [blocked](/composer/docs/concepts/airflow-configurations), * and cannot be overridden. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> airflowConfigOverrides; /** * Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, * and webserver processes. Environment variable names must match the regular expression `a-zA- * Z_*`. They cannot specify Apache Airflow software configuration overrides (they cannot match * the regular expression `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the * following reserved names: * `AIRFLOW_HOME` * `C_FORCE_ROOT` * `CONTAINER_NAME` * `DAGS_FOLDER` * * `GCP_PROJECT` * `GCS_BUCKET` * `GKE_CLUSTER_NAME` * `SQL_DATABASE` * `SQL_INSTANCE` * * `SQL_PASSWORD` * `SQL_PROJECT` * `SQL_REGION` * `SQL_USER` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> envVariables; /** * The version of the software running in the environment. This encapsulates both the version of * Cloud Composer functionality and the version of Apache Airflow. It must match the regular * expression `composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?`. When * used as input, the server also checks if the provided version is supported and denies the * request for an unsupported version. The Cloud Composer portion of the version is a [semantic * version](https://semver.org) or `latest`. When the patch version is omitted, the current Cloud * Composer patch version is selected. When `latest` is provided instead of an explicit version * number, the server replaces `latest` with the current Cloud Composer version and stores that * version number in the same field. The portion of the image version that follows *airflow-* is * an official Apache Airflow repository [release name](https://github.com/apache/incubator- * airflow/releases). See also [Version List](/composer/docs/concepts/versioning/composer- * versions). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String imageVersion; /** * Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. Keys * refer to the lowercase package name such as "numpy" and values are the lowercase extras and * version specifier such as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To * specify a package without pinning it to a version specifier, use the empty string as the value. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> pypiPackages; /** * Optional. The major version of Python used to run the Apache Airflow scheduler, worker, and * webserver processes. Can be set to '2' or '3'. If not specified, the default is '3'. Cannot be * updated. This field is only supported for Cloud Composer environments in versions * composer-1.*.*-airflow-*.*.*. Environments in newer versions always use Python major version 3. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pythonVersion; /** * Optional. The number of schedulers for Airflow. This field is supported for Cloud Composer * environments in versions composer-1.*.*-airflow-2.*.*. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer schedulerCount; /** * Optional. Apache Airflow configuration properties to override. Property keys contain the * section and property names, separated by a hyphen, for example "core- * dags_are_paused_at_creation". Section names must not contain hyphens ("-"), opening square * brackets ("["), or closing square brackets ("]"). The property name must not be empty and must * not contain an equals sign ("=") or semicolon (";"). Section and property names must not * contain a period ("."). Apache Airflow configuration property names must be written in * [snake_case](https://en.wikipedia.org/wiki/Snake_case). Property values can contain any * character, and can be written in any lower/upper case format. Certain Apache Airflow * configuration property values are [blocked](/composer/docs/concepts/airflow-configurations), * and cannot be overridden. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getAirflowConfigOverrides() { return airflowConfigOverrides; } /** * Optional. Apache Airflow configuration properties to override. Property keys contain the * section and property names, separated by a hyphen, for example "core- * dags_are_paused_at_creation". Section names must not contain hyphens ("-"), opening square * brackets ("["), or closing square brackets ("]"). The property name must not be empty and must * not contain an equals sign ("=") or semicolon (";"). Section and property names must not * contain a period ("."). Apache Airflow configuration property names must be written in * [snake_case](https://en.wikipedia.org/wiki/Snake_case). Property values can contain any * character, and can be written in any lower/upper case format. Certain Apache Airflow * configuration property values are [blocked](/composer/docs/concepts/airflow-configurations), * and cannot be overridden. * @param airflowConfigOverrides airflowConfigOverrides or {@code null} for none */ public SoftwareConfig setAirflowConfigOverrides(java.util.Map<String, java.lang.String> airflowConfigOverrides) { this.airflowConfigOverrides = airflowConfigOverrides; return this; } /** * Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, * and webserver processes. Environment variable names must match the regular expression `a-zA- * Z_*`. They cannot specify Apache Airflow software configuration overrides (they cannot match * the regular expression `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the * following reserved names: * `AIRFLOW_HOME` * `C_FORCE_ROOT` * `CONTAINER_NAME` * `DAGS_FOLDER` * * `GCP_PROJECT` * `GCS_BUCKET` * `GKE_CLUSTER_NAME` * `SQL_DATABASE` * `SQL_INSTANCE` * * `SQL_PASSWORD` * `SQL_PROJECT` * `SQL_REGION` * `SQL_USER` * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getEnvVariables() { return envVariables; } /** * Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, * and webserver processes. Environment variable names must match the regular expression `a-zA- * Z_*`. They cannot specify Apache Airflow software configuration overrides (they cannot match * the regular expression `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the * following reserved names: * `AIRFLOW_HOME` * `C_FORCE_ROOT` * `CONTAINER_NAME` * `DAGS_FOLDER` * * `GCP_PROJECT` * `GCS_BUCKET` * `GKE_CLUSTER_NAME` * `SQL_DATABASE` * `SQL_INSTANCE` * * `SQL_PASSWORD` * `SQL_PROJECT` * `SQL_REGION` * `SQL_USER` * @param envVariables envVariables or {@code null} for none */ public SoftwareConfig setEnvVariables(java.util.Map<String, java.lang.String> envVariables) { this.envVariables = envVariables; return this; } /** * The version of the software running in the environment. This encapsulates both the version of * Cloud Composer functionality and the version of Apache Airflow. It must match the regular * expression `composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?`. When * used as input, the server also checks if the provided version is supported and denies the * request for an unsupported version. The Cloud Composer portion of the version is a [semantic * version](https://semver.org) or `latest`. When the patch version is omitted, the current Cloud * Composer patch version is selected. When `latest` is provided instead of an explicit version * number, the server replaces `latest` with the current Cloud Composer version and stores that * version number in the same field. The portion of the image version that follows *airflow-* is * an official Apache Airflow repository [release name](https://github.com/apache/incubator- * airflow/releases). See also [Version List](/composer/docs/concepts/versioning/composer- * versions). * @return value or {@code null} for none */ public java.lang.String getImageVersion() { return imageVersion; } /** * The version of the software running in the environment. This encapsulates both the version of * Cloud Composer functionality and the version of Apache Airflow. It must match the regular * expression `composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?`. When * used as input, the server also checks if the provided version is supported and denies the * request for an unsupported version. The Cloud Composer portion of the version is a [semantic * version](https://semver.org) or `latest`. When the patch version is omitted, the current Cloud * Composer patch version is selected. When `latest` is provided instead of an explicit version * number, the server replaces `latest` with the current Cloud Composer version and stores that * version number in the same field. The portion of the image version that follows *airflow-* is * an official Apache Airflow repository [release name](https://github.com/apache/incubator- * airflow/releases). See also [Version List](/composer/docs/concepts/versioning/composer- * versions). * @param imageVersion imageVersion or {@code null} for none */ public SoftwareConfig setImageVersion(java.lang.String imageVersion) { this.imageVersion = imageVersion; return this; } /** * Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. Keys * refer to the lowercase package name such as "numpy" and values are the lowercase extras and * version specifier such as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To * specify a package without pinning it to a version specifier, use the empty string as the value. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getPypiPackages() { return pypiPackages; } /** * Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. Keys * refer to the lowercase package name such as "numpy" and values are the lowercase extras and * version specifier such as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To * specify a package without pinning it to a version specifier, use the empty string as the value. * @param pypiPackages pypiPackages or {@code null} for none */ public SoftwareConfig setPypiPackages(java.util.Map<String, java.lang.String> pypiPackages) { this.pypiPackages = pypiPackages; return this; } /** * Optional. The major version of Python used to run the Apache Airflow scheduler, worker, and * webserver processes. Can be set to '2' or '3'. If not specified, the default is '3'. Cannot be * updated. This field is only supported for Cloud Composer environments in versions * composer-1.*.*-airflow-*.*.*. Environments in newer versions always use Python major version 3. * @return value or {@code null} for none */ public java.lang.String getPythonVersion() { return pythonVersion; } /** * Optional. The major version of Python used to run the Apache Airflow scheduler, worker, and * webserver processes. Can be set to '2' or '3'. If not specified, the default is '3'. Cannot be * updated. This field is only supported for Cloud Composer environments in versions * composer-1.*.*-airflow-*.*.*. Environments in newer versions always use Python major version 3. * @param pythonVersion pythonVersion or {@code null} for none */ public SoftwareConfig setPythonVersion(java.lang.String pythonVersion) { this.pythonVersion = pythonVersion; return this; } /** * Optional. The number of schedulers for Airflow. This field is supported for Cloud Composer * environments in versions composer-1.*.*-airflow-2.*.*. * @return value or {@code null} for none */ public java.lang.Integer getSchedulerCount() { return schedulerCount; } /** * Optional. The number of schedulers for Airflow. This field is supported for Cloud Composer * environments in versions composer-1.*.*-airflow-2.*.*. * @param schedulerCount schedulerCount or {@code null} for none */ public SoftwareConfig setSchedulerCount(java.lang.Integer schedulerCount) { this.schedulerCount = schedulerCount; return this; } @Override public SoftwareConfig set(String fieldName, Object value) { return (SoftwareConfig) super.set(fieldName, value); } @Override public SoftwareConfig clone() { return (SoftwareConfig) super.clone(); } }
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.importexport.utils; import com.google.common.collect.Sets; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.apimgt.api.APIDefinition; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIMgtAuthorizationFailedException; import org.wso2.carbon.apimgt.api.APIMgtResourceAlreadyExistsException; import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.FaultGatewaysException; import org.wso2.carbon.apimgt.api.doc.model.APIResource; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIStatus; import org.wso2.carbon.apimgt.api.model.ApiTypeWrapper; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.certificatemgt.ResponseCode; import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil; import org.wso2.carbon.apimgt.impl.importexport.APIImportExportException; import org.wso2.carbon.apimgt.impl.importexport.APIImportExportConstants; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.wsdl.util.SOAPToRESTConstants; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.api.RegistryException; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.DirectoryIteratorException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Set; /** * This class provides the functions utilized to import an API from an API archive. */ public final class APIImportUtil { private static final Log log = LogFactory.getLog(APIImportUtil.class); private static final String IN = "in"; private static final String OUT = "out"; private static final String SOAPTOREST = "SoapToRest"; /** * Load the graphQL schema definition from archive. * * @param pathToArchive Path to archive * @return Schema definition content * @throws IOException When SDL file not found */ private static String loadGraphqlSDLFile(String pathToArchive) throws IOException { if (CommonUtil.checkFileExistence(pathToArchive + APIImportExportConstants.GRAPHQL_SCHEMA_DEFINITION_LOCATION)) { if (log.isDebugEnabled()) { log.debug("Found graphQL sdl file " + pathToArchive + APIImportExportConstants.GRAPHQL_SCHEMA_DEFINITION_LOCATION); } return FileUtils.readFileToString( new File(pathToArchive, APIImportExportConstants.GRAPHQL_SCHEMA_DEFINITION_LOCATION)); } throw new IOException("Missing graphQL schema definition file. schema.graphql should be present."); } /** * This method imports an API. * * @param pathToArchive location of the extracted folder of the API * @param currentUser the current logged in user * @param isDefaultProviderAllowed decision to keep or replace the provider * @throws APIImportExportException if there is an error in importing an API */ public static void importAPI(String pathToArchive, String currentUser, boolean isDefaultProviderAllowed, APIProvider apiProvider, Boolean overwrite) throws APIImportExportException { String jsonContent = null; API importedApi = null; API targetApi = null; //target API when overwrite is ApiTypeWrapper apiTypeWrapper; String prevProvider; String apiName; String apiVersion; String currentTenantDomain; String currentStatus; String targetStatus; String lifecycleAction = null; UserRegistry registry; int tenantId = APIUtil.getTenantId(currentUser); try { registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(tenantId); // Get API Definition as JSON jsonContent = APIAndAPIProductCommonUtil.getAPIDefinitionAsJson(pathToArchive); if (jsonContent == null) { throw new IOException("Cannot find API definition. api.json or api.yaml should present"); } JsonElement configElement = new JsonParser().parse(jsonContent); JsonObject configObject = configElement.getAsJsonObject(); //locate the "providerName" within the "id" and set it as the current user JsonObject apiId = configObject.getAsJsonObject(APIImportExportConstants.ID_ELEMENT); prevProvider = apiId.get(APIImportExportConstants.PROVIDER_ELEMENT).getAsString(); apiName = apiId.get(APIImportExportConstants.API_NAME_ELEMENT).getAsString(); apiVersion = apiId.get(APIImportExportConstants.VERSION_ELEMENT).getAsString(); // Remove spaces of API Name/version if present if (apiName != null && apiVersion != null) { apiId.addProperty(APIImportExportConstants.API_NAME_ELEMENT, apiName = apiName.replace(" ", "")); apiId.addProperty(APIImportExportConstants.VERSION_ELEMENT, apiVersion = apiVersion.replace(" ", "")); } else { throw new IOException("API Name (id.apiName) and Version (id.version) must be provided in api.yaml"); } String prevTenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(prevProvider)); currentTenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(currentUser)); // If the original provider is preserved, if (isDefaultProviderAllowed) { if (!StringUtils.equals(prevTenantDomain, currentTenantDomain)) { String errorMessage = "Tenant mismatch! Please enable preserveProvider property " + "for cross tenant API Import."; throw new APIMgtAuthorizationFailedException(errorMessage); } importedApi = new Gson().fromJson(configElement, API.class); } else { String prevProviderWithDomain = APIUtil.replaceEmailDomain(prevProvider); String currentUserWithDomain = APIUtil.replaceEmailDomain(currentUser); apiId.addProperty(APIImportExportConstants.PROVIDER_ELEMENT, currentUserWithDomain); if (configObject.get(APIImportExportConstants.WSDL_URL) != null) { // If original provider is not preserved, replace provider name in the wsdl URL // with the current user with domain name configObject.addProperty(APIImportExportConstants.WSDL_URL, configObject.get(APIImportExportConstants.WSDL_URL).getAsString() .replace(prevProviderWithDomain, currentUserWithDomain)); } importedApi = new Gson().fromJson(configElement, API.class); //Replace context to match with current provider apiTypeWrapper = new ApiTypeWrapper(importedApi); APIAndAPIProductCommonUtil.setCurrentProviderToAPIProperties(apiTypeWrapper, currentTenantDomain, prevTenantDomain); } // Store imported API status targetStatus = importedApi.getStatus(); if (Boolean.TRUE.equals(overwrite)) { String provider = APIUtil .getAPIProviderFromAPINameVersionTenant(apiName, apiVersion, currentTenantDomain); APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(provider), apiName, apiVersion); // Checking whether the API exists if (!apiProvider.isAPIAvailable(apiIdentifier)) { String errorMessage = "Error occurred while updating. API: " + apiName + StringUtils.SPACE + APIConstants.API_DATA_VERSION + ": " + apiVersion + " not found"; throw new APIMgtResourceNotFoundException(errorMessage); } targetApi = apiProvider.getAPI(apiIdentifier); // Store target API status currentStatus = targetApi.getStatus(); // Since the overwrite should be done, the imported API Identifier should be equal to the target API Identifier importedApi.setId(targetApi.getId()); } else { if (apiProvider.isAPIAvailable(importedApi.getId()) || apiProvider.isApiNameWithDifferentCaseExist(apiName)) { String errorMessage = "Error occurred while adding the API. A duplicate API already exists " + "for " + importedApi.getId().getApiName() + '-' + importedApi.getId().getVersion(); throw new APIMgtResourceAlreadyExistsException(errorMessage); } if (apiProvider.isContextExist(importedApi.getContext())) { String errMsg = "Error occurred while adding the API [" + importedApi.getId().getApiName() + '-' + importedApi.getId().getVersion() + "]. A duplicate context[" + importedApi.getContext() + "] already exists"; throw new APIMgtResourceAlreadyExistsException(errMsg); } // Initialize to CREATED when import currentStatus = APIStatus.CREATED.toString(); } //set the status of imported API to CREATED (importing API) or current status of target API when updating importedApi.setStatus(currentStatus); // check whether targetStatus is reachable from current status, if not throw an exception if (!currentStatus.equals(targetStatus)) { lifecycleAction = APIAndAPIProductCommonUtil.getLifeCycleAction(currentTenantDomain, currentStatus, targetStatus, apiProvider); if (lifecycleAction == null) { String errMsg = "Error occurred while importing the API. " + targetStatus + " is not reachable from " + currentStatus; throw new APIImportExportException(errMsg); } } Set<Tier> allowedTiers; Set<Tier> unsupportedTiersList; allowedTiers = apiProvider.getTiers(); if (!(allowedTiers.isEmpty())) { unsupportedTiersList = Sets.difference(importedApi.getAvailableTiers(), allowedTiers); //If at least one unsupported tier is found, it should be removed before adding API if (!(unsupportedTiersList.isEmpty())) { //Process is continued with a warning and only supported tiers are added to the importer API unsupportedTiersList.forEach(unsupportedTier -> log.warn("Tier name : " + unsupportedTier.getName() + " is not supported.")); //Remove the unsupported tiers before adding the API importedApi.removeAvailableTiers(unsupportedTiersList); } } if (Boolean.FALSE.equals(overwrite)) { //Add API in CREATED state importedApi.setAsDefaultVersion(false); apiProvider.addAPI(importedApi); } //Swagger definition will only be available of API type HTTP. Web socket API does not have it. if (!APIConstants.APITransportType.WS.toString().equalsIgnoreCase(importedApi.getType())) { String swaggerContent = APIAndAPIProductCommonUtil.loadSwaggerFile(pathToArchive); // Check whether any of the resources should be removed from the API when updating, // that has already been used in API Products List<APIResource> resourcesToRemove = apiProvider.getResourcesToBeRemovedFromAPIProducts(importedApi.getId(), swaggerContent); // Do not allow to remove resources from API Products, hence throw an exception if (!resourcesToRemove.isEmpty()) { throw new APIImportExportException("Cannot remove following resource paths " + resourcesToRemove.toString() + " because they are used by one or more API Products"); } //preProcess swagger definition swaggerContent = OASParserUtil.preProcess(swaggerContent); addSwaggerDefinition(importedApi.getId(), swaggerContent, apiProvider); //If graphQL API, import graphQL schema definition to registry if (StringUtils.equals(importedApi.getType(), APIConstants.APITransportType.GRAPHQL.toString())) { String schemaDefinition = loadGraphqlSDLFile(pathToArchive); addGraphqlSchemaDefinition(importedApi, schemaDefinition, apiProvider); } else { //Load required properties from swagger to the API APIDefinition apiDefinition = OASParserUtil.getOASParser(swaggerContent); Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerContent); for (URITemplate uriTemplate : uriTemplates) { Scope scope = uriTemplate.getScope(); if (scope != null && !(APIUtil.isWhiteListedScope(scope.getKey())) && apiProvider.isScopeKeyAssignedLocally(importedApi.getId(), scope.getKey(), tenantId)) { String errorMessage = "Error in adding API. Scope " + scope.getKey() + " is already assigned by another API."; throw new APIImportExportException(errorMessage); } } importedApi.setUriTemplates(uriTemplates); Set<Scope> scopes = apiDefinition.getScopes(swaggerContent); importedApi.setScopes(scopes); boolean isBasepathExtractedFromSwagger = true; //Setup vendor extensions to API when importing through CTL tool importedApi = OASParserUtil.setExtensionsToAPI(swaggerContent, importedApi, isBasepathExtractedFromSwagger); } } // This is required to make url templates and scopes get effected apiProvider.updateAPI(importedApi); //Since Image, documents, sequences and WSDL are optional, exceptions are logged and ignored in implementation ApiTypeWrapper apiTypeWrapperWithUpdatedApi = new ApiTypeWrapper(importedApi); APIAndAPIProductCommonUtil.addAPIOrAPIProductImage(pathToArchive, apiTypeWrapperWithUpdatedApi, apiProvider); APIAndAPIProductCommonUtil.addAPIOrAPIProductDocuments(pathToArchive, apiTypeWrapperWithUpdatedApi, apiProvider); addAPISequences(pathToArchive, importedApi, registry); addAPISpecificSequences(pathToArchive, importedApi, registry); addAPIWsdl(pathToArchive, importedApi, apiProvider, registry); addEndpointCertificates(pathToArchive, importedApi, apiProvider, tenantId); addSOAPToREST(pathToArchive, importedApi, registry); if (apiProvider.isClientCertificateBasedAuthenticationConfigured()) { if (log.isDebugEnabled()) { log.debug("Mutual SSL enabled. Importing client certificates."); } APIAndAPIProductCommonUtil.addClientCertificates(pathToArchive, apiProvider); } // Change API lifecycle if state transition is required if (StringUtils.isNotEmpty(lifecycleAction)) { log.info("Changing lifecycle from " + currentStatus + " to " + targetStatus); apiProvider.changeAPILCCheckListItems(importedApi.getId(), APIImportExportConstants.REFER_REQUIRE_RE_SUBSCRIPTION_CHECK_ITEM, true); apiProvider.changeLifeCycleStatus(importedApi.getId(), lifecycleAction); //Change the status of the imported API to targetStatus importedApi.setStatus(targetStatus); } } catch (IOException e) { //Error is logged and APIImportExportException is thrown because adding API and swagger are mandatory steps String errorMessage = "Error while reading API meta information from path: " + pathToArchive; throw new APIImportExportException(errorMessage, e); } catch (FaultGatewaysException e) { String errorMessage = "Error while updating API: " + importedApi.getId().getApiName(); throw new APIImportExportException(errorMessage, e); } catch (RegistryException e) { String errorMessage = "Error while getting governance registry for tenant: " + tenantId; throw new APIImportExportException(errorMessage, e); } catch (APIManagementException e) { String errorMessage = "Error while importing API: "; if (importedApi != null) { errorMessage += importedApi.getId().getApiName() + StringUtils.SPACE + APIConstants.API_DATA_VERSION + ": " + importedApi.getId().getVersion(); } throw new APIImportExportException(errorMessage, e); } } /** * This method adds API sequences to the imported API. If the sequence is a newly defined one, it is added. * * @param pathToArchive location of the extracted folder of the API * @param importedApi the imported API object */ private static void addAPISequences(String pathToArchive, API importedApi, Registry registry) { String inSequenceFileName = importedApi.getInSequence() + APIConstants.XML_EXTENSION; String inSequenceFileLocation = pathToArchive + APIImportExportConstants.IN_SEQUENCE_LOCATION + inSequenceFileName; String regResourcePath; //Adding in-sequence, if any if (CommonUtil.checkFileExistence(inSequenceFileLocation)) { regResourcePath = APIConstants.API_CUSTOM_INSEQUENCE_LOCATION + inSequenceFileName; addSequenceToRegistry(false, registry, inSequenceFileLocation, regResourcePath); } String outSequenceFileName = importedApi.getOutSequence() + APIConstants.XML_EXTENSION; String outSequenceFileLocation = pathToArchive + APIImportExportConstants.OUT_SEQUENCE_LOCATION + outSequenceFileName; //Adding out-sequence, if any if (CommonUtil.checkFileExistence(outSequenceFileLocation)) { regResourcePath = APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION + outSequenceFileName; addSequenceToRegistry(false, registry, outSequenceFileLocation, regResourcePath); } String faultSequenceFileName = importedApi.getFaultSequence() + APIConstants.XML_EXTENSION; String faultSequenceFileLocation = pathToArchive + APIImportExportConstants.FAULT_SEQUENCE_LOCATION + faultSequenceFileName; //Adding fault-sequence, if any if (CommonUtil.checkFileExistence(faultSequenceFileLocation)) { regResourcePath = APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION + faultSequenceFileName; addSequenceToRegistry(false, registry, faultSequenceFileLocation, regResourcePath); } } /** * This method adds API Specific sequences added through the Publisher to the imported API. If the specific * sequence already exists, it is updated. * * @param pathToArchive location of the extracted folder of the API * @param importedApi the imported API object */ private static void addAPISpecificSequences(String pathToArchive, API importedApi, Registry registry) { String regResourcePath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + importedApi.getId().getProviderName() + RegistryConstants.PATH_SEPARATOR + importedApi.getId().getApiName() + RegistryConstants.PATH_SEPARATOR + importedApi.getId().getVersion() + RegistryConstants.PATH_SEPARATOR; String inSequenceFileName = importedApi.getInSequence(); String inSequenceFileLocation = pathToArchive + APIImportExportConstants.IN_SEQUENCE_LOCATION + APIImportExportConstants.CUSTOM_TYPE + File.separator + inSequenceFileName; //Adding in-sequence, if any if (CommonUtil.checkFileExistence(inSequenceFileLocation + APIConstants.XML_EXTENSION)) { String inSequencePath = APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN + RegistryConstants.PATH_SEPARATOR + inSequenceFileName; addSequenceToRegistry(true, registry, inSequenceFileLocation + APIConstants.XML_EXTENSION, regResourcePath + inSequencePath); } String outSequenceFileName = importedApi.getOutSequence() + APIConstants.XML_EXTENSION; String outSequenceFileLocation = pathToArchive + APIImportExportConstants.OUT_SEQUENCE_LOCATION + APIImportExportConstants.CUSTOM_TYPE + File.separator + outSequenceFileName; //Adding out-sequence, if any if (CommonUtil.checkFileExistence(outSequenceFileLocation)) { String outSequencePath = APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT + RegistryConstants.PATH_SEPARATOR + outSequenceFileName; addSequenceToRegistry(true, registry, outSequenceFileLocation, regResourcePath + outSequencePath); } String faultSequenceFileName = importedApi.getFaultSequence() + APIConstants.XML_EXTENSION; String faultSequenceFileLocation = pathToArchive + APIImportExportConstants.FAULT_SEQUENCE_LOCATION + APIImportExportConstants.CUSTOM_TYPE + File.separator + faultSequenceFileName; //Adding fault-sequence, if any if (CommonUtil.checkFileExistence(faultSequenceFileLocation)) { String faultSequencePath = APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + RegistryConstants.PATH_SEPARATOR + faultSequenceFileName; addSequenceToRegistry(true, registry, faultSequenceFileLocation, regResourcePath + faultSequencePath); } } /** * This method adds the sequence files to the registry. This updates the API specific sequences if already exists. * * @param isAPISpecific whether the adding sequence is API specific * @param registry the registry instance * @param sequenceFileLocation location of the sequence file */ private static void addSequenceToRegistry(Boolean isAPISpecific, Registry registry, String sequenceFileLocation, String regResourcePath) { try { if (registry.resourceExists(regResourcePath) && !isAPISpecific) { if (log.isDebugEnabled()) { log.debug("Sequence already exists in registry path: " + regResourcePath); } } else { if (log.isDebugEnabled()) { log.debug("Adding Sequence to the registry path : " + regResourcePath); } File sequenceFile = new File(sequenceFileLocation); try (InputStream seqStream = new FileInputStream(sequenceFile);) { byte[] inSeqData = IOUtils.toByteArray(seqStream); Resource inSeqResource = registry.newResource(); inSeqResource.setContent(inSeqData); registry.put(regResourcePath, inSeqResource); } } } catch (RegistryException e) { //this is logged and ignored because sequences are optional log.error("Failed to add sequences into the registry : " + regResourcePath, e); } catch (IOException e) { //this is logged and ignored because sequences are optional log.error("I/O error while writing sequence data to the registry : " + regResourcePath, e); } } /** * This method adds the WSDL to the registry, if there is a WSDL associated with the API. * * @param pathToArchive location of the extracted folder of the API * @param importedApi the imported API object */ private static void addAPIWsdl(String pathToArchive, API importedApi, APIProvider apiProvider, Registry registry) { String wsdlFileName = importedApi.getId().getApiName() + "-" + importedApi.getId().getVersion() + APIConstants.WSDL_FILE_EXTENSION; String wsdlPath = pathToArchive + APIImportExportConstants.WSDL_LOCATION + wsdlFileName; if (CommonUtil.checkFileExistence(wsdlPath)) { try { URL wsdlFileUrl = new File(wsdlPath).toURI().toURL(); importedApi.setWsdlUrl(wsdlFileUrl.toString()); APIUtil.createWSDL(registry, importedApi); apiProvider.updateAPI(importedApi); } catch (MalformedURLException e) { //this exception is logged and ignored since WSDL is optional for an API log.error("Error in getting WSDL URL. ", e); } catch (org.wso2.carbon.registry.core.exceptions.RegistryException e) { //this exception is logged and ignored since WSDL is optional for an API log.error("Error in putting the WSDL resource to registry. ", e); } catch (APIManagementException e) { //this exception is logged and ignored since WSDL is optional for an API log.error("Error in creating the WSDL resource in the registry. ", e); } catch (FaultGatewaysException e) { //This is logged and process is continued because WSDL is optional for an API log.error("Failed to update API after adding WSDL. ", e); } } } /** * This method adds Swagger API definition to registry. * * @param apiId Identifier of the imported API * @param swaggerContent Content of Swagger file * @throws APIImportExportException if there is an error occurs when adding Swagger definition */ private static void addSwaggerDefinition(APIIdentifier apiId, String swaggerContent, APIProvider apiProvider) throws APIImportExportException { try { apiProvider.saveSwagger20Definition(apiId, swaggerContent); } catch (APIManagementException e) { String errorMessage = "Error in adding Swagger definition for the API: " + apiId.getApiName() + StringUtils.SPACE + APIConstants.API_DATA_VERSION + ": " + apiId.getVersion(); throw new APIImportExportException(errorMessage, e); } } /** * This method adds GraphQL schema definition to the registry. * * @param api API to import * @param schemaDefinition Content of schema definition * @param apiProvider API Provider * @throws APIManagementException if there is an error occurs when adding schema definition */ private static void addGraphqlSchemaDefinition(API api, String schemaDefinition, APIProvider apiProvider) throws APIManagementException { apiProvider.saveGraphqlSchemaDefinition(api, schemaDefinition); } /** * This method import endpoint certificate. * * @param pathToArchive location of the extracted folder of the API * @param importedApi the imported API object * @throws APIImportExportException If an error occurs while importing endpoint certificates from file */ private static void addEndpointCertificates(String pathToArchive, API importedApi, APIProvider apiProvider, int tenantId) throws APIImportExportException { String jsonContent = null; String pathToYamlFile = pathToArchive + APIImportExportConstants.YAML_ENDPOINTS_CERTIFICATE_FILE; String pathToJsonFile = pathToArchive + APIImportExportConstants.JSON_ENDPOINTS_CERTIFICATE_FILE; try { // try loading file as YAML if (CommonUtil.checkFileExistence(pathToYamlFile)) { if (log.isDebugEnabled()) { log.debug("Found certificate file " + pathToYamlFile); } String yamlContent = FileUtils.readFileToString(new File(pathToYamlFile)); jsonContent = CommonUtil.yamlToJson(yamlContent); } else if (CommonUtil.checkFileExistence(pathToJsonFile)) { // load as a json fallback if (log.isDebugEnabled()) { log.debug("Found certificate file " + pathToJsonFile); } jsonContent = FileUtils.readFileToString(new File(pathToJsonFile)); } if (jsonContent == null) { log.debug("No certificate file found to be added, skipping certificate import."); return; } JsonElement configElement = new JsonParser().parse(jsonContent); JsonArray certificates = configElement.getAsJsonArray().getAsJsonArray(); certificates.forEach(certificate -> updateAPIWithCertificate(certificate, apiProvider, importedApi, tenantId)); } catch (IOException e) { String errorMessage = "Error in reading " + APIImportExportConstants.YAML_ENDPOINTS_CERTIFICATE_FILE + " file"; throw new APIImportExportException(errorMessage, e); } } /** * Update API with the certificate. * If certificate alias already exists for tenant in database, certificate content will be * updated in trust store. If cert alias does not exits in database for that tenant, add the certificate to * publisher and gateway nodes. In such case if alias already exits in the trust store, update the certificate * content for that alias. * * @param certificate Certificate JSON element * @param apiProvider API Provider * @param importedApi API to import * @param tenantId Tenant Id */ private static void updateAPIWithCertificate(JsonElement certificate, APIProvider apiProvider, API importedApi, int tenantId) { String certificateContent = certificate.getAsJsonObject() .get(APIImportExportConstants.CERTIFICATE_CONTENT_JSON_KEY).getAsString(); String alias = certificate.getAsJsonObject().get(APIImportExportConstants.ALIAS_JSON_KEY).getAsString(); String endpoint = certificate.getAsJsonObject().get(APIImportExportConstants.HOSTNAME_JSON_KEY) .getAsString(); try { if (apiProvider.isCertificatePresent(tenantId, alias) || (ResponseCode.ALIAS_EXISTS_IN_TRUST_STORE.getResponseCode() == (apiProvider.addCertificate(APIUtil.replaceEmailDomainBack(importedApi.getId().getProviderName()), certificateContent, alias, endpoint)))) { apiProvider.updateCertificate(certificateContent, alias); } } catch (APIManagementException e) { String errorMessage = "Error while importing certificate endpoint [" + endpoint + " ]" + "alias [" + alias + " ] tenant user [" + APIUtil.replaceEmailDomainBack(importedApi.getId().getProviderName()) + "]"; log.error(errorMessage, e); } } /** * This method adds API sequences to the imported API. If the sequence is a newly defined one, it is added. * * @param pathToArchive location of the extracted folder of the API */ private static void addSOAPToREST(String pathToArchive, API importedApi, Registry registry) throws APIImportExportException { String inFlowFileLocation = pathToArchive + File.separator + SOAPTOREST + File.separator + IN; String outFlowFileLocation = pathToArchive + File.separator + SOAPTOREST + File.separator + OUT; //Adding in-sequence, if any if (CommonUtil.checkFileExistence(inFlowFileLocation)) { APIIdentifier apiId = importedApi.getId(); String soapToRestLocationIn = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion() + RegistryConstants.PATH_SEPARATOR + SOAPToRESTConstants.SequenceGen.SOAP_TO_REST_IN_RESOURCE; String soapToRestLocationOut = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion() + RegistryConstants.PATH_SEPARATOR + SOAPToRESTConstants.SequenceGen.SOAP_TO_REST_OUT_RESOURCE; try { // Import inflow mediation logic Path inFlowDirectory = Paths.get(inFlowFileLocation); ImportMediationLogic(inFlowDirectory, registry, soapToRestLocationIn); // Import outflow mediation logic Path outFlowDirectory = Paths.get(outFlowFileLocation); ImportMediationLogic(outFlowDirectory, registry, soapToRestLocationOut); } catch (DirectoryIteratorException e) { throw new APIImportExportException("Error in importing SOAP to REST mediation logic", e); } } } /** * Method created to add inflow and outflow mediation logic * * @param flowDirectory inflow and outflow directory * @param registry Registry * @param soapToRestLocation folder location * @throws APIImportExportException */ private static void ImportMediationLogic(Path flowDirectory, Registry registry, String soapToRestLocation) throws APIImportExportException { InputStream inputFlowStream = null; try (DirectoryStream<Path> stream = Files.newDirectoryStream(flowDirectory)) { for (Path file : stream) { String fileName = file.getFileName().toString(); String method = ""; if (fileName.split(".xml").length != 0) { method = fileName.split(".xml")[0] .substring(file.getFileName().toString().lastIndexOf("_") + 1); } inputFlowStream = new FileInputStream(file.toFile()); byte[] inSeqData = IOUtils.toByteArray(inputFlowStream); Resource inSeqResource = (Resource) registry.newResource(); inSeqResource.setContent(inSeqData); inSeqResource.addProperty(SOAPToRESTConstants.METHOD, method); inSeqResource.setMediaType("text/xml"); registry.put(soapToRestLocation + RegistryConstants.PATH_SEPARATOR + file.getFileName(), inSeqResource); IOUtils.closeQuietly(inputFlowStream); } } catch (IOException | DirectoryIteratorException e) { throw new APIImportExportException("Error in importing SOAP to REST mediation logic", e); } catch (org.wso2.carbon.registry.core.exceptions.RegistryException e) { throw new APIImportExportException("Error in storing imported SOAP to REST mediation logic", e); } finally { IOUtils.closeQuietly(inputFlowStream); } } }
package org.testng.internal.annotations; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.List; import java.util.Map; import org.testng.ITestNGMethod; import org.testng.annotations.IAnnotation; import org.testng.annotations.IConfigurationAnnotation; import org.testng.annotations.IDataProviderAnnotation; import org.testng.annotations.IFactoryAnnotation; import org.testng.annotations.IParametersAnnotation; import org.testng.annotations.ITestAnnotation; import org.testng.collections.Maps; import org.testng.internal.ConstructorOrMethod; import org.testng.internal.TestNGMethod; import org.testng.internal.Utils; import org.testng.internal.reflect.ReflectionHelper; import org.testng.log4testng.Logger; import org.testng.xml.XmlTest; /** * Helper methods to find @Test and @Configuration tags. They minimize the amount of casting we need * to do. */ public class AnnotationHelper { private static final Logger LOGGER = Logger.getLogger(AnnotationHelper.class); private static final List<Class<? extends IAnnotation>> ALL_ANNOTATIONS = Arrays.asList( ITestAnnotation.class, IBeforeClass.class, IAfterClass.class, IBeforeMethod.class, IAfterMethod.class, IDataProviderAnnotation.class, IFactoryAnnotation.class, IParametersAnnotation.class, IBeforeSuite.class, IAfterSuite.class, IBeforeTest.class, IAfterTest.class, IBeforeGroups.class, IAfterGroups.class); private static final List<Class<? extends IAnnotation>> CONFIGURATION_CLASSES = Arrays.asList( IBeforeSuite.class, IAfterSuite.class, IBeforeTest.class, IAfterTest.class, IBeforeGroups.class, IAfterGroups.class, IBeforeClass.class, IAfterClass.class, IBeforeMethod.class, IAfterMethod.class); private AnnotationHelper() { // Utility class.defeat instantiation. } public static ITestAnnotation findTest(IAnnotationFinder finder, Class<?> cls) { return finder.findAnnotation(cls, ITestAnnotation.class); } public static ITestAnnotation findTest(IAnnotationFinder finder, Method m) { return finder.findAnnotation(m, ITestAnnotation.class); } public static ITestAnnotation findTest(IAnnotationFinder finder, ITestNGMethod m) { return finder.findAnnotation(m, ITestAnnotation.class); } public static IFactoryAnnotation findFactory(IAnnotationFinder finder, Method m) { return finder.findAnnotation(m, IFactoryAnnotation.class); } public static IFactoryAnnotation findFactory(IAnnotationFinder finder, Constructor c) { return finder.findAnnotation(c, IFactoryAnnotation.class); } public static IConfigurationAnnotation findConfiguration( IAnnotationFinder finder, ConstructorOrMethod m) { IConfigurationAnnotation result = null; IConfigurationAnnotation bs = (IConfigurationAnnotation) finder.findAnnotation(m, IBeforeSuite.class); IConfigurationAnnotation as = (IConfigurationAnnotation) finder.findAnnotation(m, IAfterSuite.class); IConfigurationAnnotation bt = (IConfigurationAnnotation) finder.findAnnotation(m, IBeforeTest.class); IConfigurationAnnotation at = (IConfigurationAnnotation) finder.findAnnotation(m, IAfterTest.class); IConfigurationAnnotation bg = (IConfigurationAnnotation) finder.findAnnotation(m, IBeforeGroups.class); IConfigurationAnnotation ag = (IConfigurationAnnotation) finder.findAnnotation(m, IAfterGroups.class); IConfigurationAnnotation bc = (IConfigurationAnnotation) finder.findAnnotation(m, IBeforeClass.class); IConfigurationAnnotation ac = (IConfigurationAnnotation) finder.findAnnotation(m, IAfterClass.class); IConfigurationAnnotation bm = (IConfigurationAnnotation) finder.findAnnotation(m, IBeforeMethod.class); IConfigurationAnnotation am = (IConfigurationAnnotation) finder.findAnnotation(m, IAfterMethod.class); if (bs != null || as != null || bt != null || at != null || bg != null || ag != null || bc != null || ac != null || bm != null || am != null) { result = createConfiguration(bs, as, bt, at, bg, ag, bc, ac, bm, am); } return result; } public static IConfigurationAnnotation findConfiguration(IAnnotationFinder finder, Method m) { return findConfiguration(finder, new ConstructorOrMethod(m)); } private static IConfigurationAnnotation createConfiguration( IConfigurationAnnotation bs, IConfigurationAnnotation as, IConfigurationAnnotation bt, IConfigurationAnnotation at, IConfigurationAnnotation bg, IConfigurationAnnotation ag, IConfigurationAnnotation bc, IConfigurationAnnotation ac, IConfigurationAnnotation bm, IConfigurationAnnotation am) { ConfigurationAnnotation result = new ConfigurationAnnotation(); if (bs != null) { result.setBeforeSuite(true); finishInitialize(result, bs); } if (as != null) { result.setAfterSuite(true); finishInitialize(result, as); } if (bt != null) { result.setBeforeTest(true); finishInitialize(result, bt); } if (at != null) { result.setAfterTest(true); finishInitialize(result, at); } if (bg != null) { result.setBeforeGroups(bg.getBeforeGroups()); finishInitialize(result, bg); } if (ag != null) { result.setAfterGroups(ag.getAfterGroups()); finishInitialize(result, ag); } if (bc != null) { result.setBeforeTestClass(true); finishInitialize(result, bc); } if (ac != null) { result.setAfterTestClass(true); finishInitialize(result, ac); } if (bm != null) { result.setBeforeTestMethod(true); finishInitialize(result, bm); } if (am != null) { result.setAfterTestMethod(true); finishInitialize(result, am); } return result; } private static void finishInitialize( ConfigurationAnnotation result, IConfigurationAnnotation bs) { result.setFakeConfiguration(true); result.setAlwaysRun(bs.getAlwaysRun()); result.setDependsOnGroups(bs.getDependsOnGroups()); result.setDependsOnMethods(bs.getDependsOnMethods()); result.setDescription(bs.getDescription()); result.setEnabled(bs.getEnabled()); result.setGroups(bs.getGroups()); result.setInheritGroups(bs.getInheritGroups()); result.setTimeOut(bs.getTimeOut()); } public static List<Class<? extends IAnnotation>> getAllAnnotations() { return ALL_ANNOTATIONS; } /** Delegation method for creating the list of <CODE>ITestMethod</CODE>s to be analysed. */ public static ITestNGMethod[] findMethodsWithAnnotation( Class<?> rootClass, Class<? extends IAnnotation> annotationClass, IAnnotationFinder annotationFinder, XmlTest xmlTest) { // Keep a map of the methods we saw so that we ignore a method in a superclass if it's // already been seen in a child class Map<String, ITestNGMethod> vResult = Maps.newHashMap(); try { vResult = Maps.newHashMap(); // Class[] classes = rootClass.getTestClasses(); Class<?> cls = rootClass; // // If the annotation is on the class or superclass, it applies to all public methods // except methods marked with @Configuration // // // Otherwise walk through all the methods and keep those // that have the annotation // // for (Class<?> cls : classes) { while (null != cls) { boolean hasClassAnnotation = isAnnotationPresent(annotationFinder, cls, annotationClass); Method[] methods = ReflectionHelper.getLocalMethods(cls); for (Method m : methods) { boolean hasMethodAnnotation = isAnnotationPresent(annotationFinder, m, annotationClass); boolean hasTestNGAnnotation = isAnnotationPresent(annotationFinder, m, IFactoryAnnotation.class) || isAnnotationPresent(annotationFinder, m, ITestAnnotation.class) || isAnnotationPresent(annotationFinder, m); boolean isPublic = Modifier.isPublic(m.getModifiers()); boolean isSynthetic = m.isSynthetic(); if ((isPublic && hasClassAnnotation && !isSynthetic && (!hasTestNGAnnotation)) || hasMethodAnnotation) { // Small hack to allow users to specify @Configuration classes even though // a class-level @Test annotation is present. In this case, don't count // that method as a @Test if (isAnnotationPresent(annotationFinder, m, IConfigurationAnnotation.class) && isAnnotationPresent(annotationFinder, cls, ITestAnnotation.class)) { Utils.log( "", 3, "Method " + m + " has a configuration annotation" + " and a class-level @Test. This method will only be kept as a" + " configuration method."); continue; } // Skip the method if it has a return type if (m.getReturnType() != void.class && !xmlTest.getAllowReturnValues()) { Utils.log( "", 2, "Method " + m + " has a @Test annotation" + " but also a return value:" + " ignoring it. Use <suite allow-return-values=\"true\"> to fix this"); continue; } String key = createMethodKey(m); if (null == vResult.get(key)) { ITestNGMethod tm = new TestNGMethod( /* m.getDeclaringClass(), */ m, annotationFinder, xmlTest, null); /* @@@ */ vResult.put(key, tm); } } } // for // Now explore the superclass cls = cls.getSuperclass(); } // while } catch (SecurityException e) { LOGGER.error(e.getMessage(), e); } return vResult.values().toArray(new ITestNGMethod[0]); } public static <A extends Annotation> A findAnnotationSuperClasses( Class<A> annotationClass, Class<?> parameterClass) { Class<?> c = parameterClass; while (c != null) { A result = c.getAnnotation(annotationClass); if (result != null) { return result; } else { c = c.getSuperclass(); } } return null; } private static boolean isAnnotationPresent( IAnnotationFinder annotationFinder, Method m) { for (Class<? extends IAnnotation> a : AnnotationHelper.CONFIGURATION_CLASSES) { if (annotationFinder.findAnnotation(m, a) != null) { return true; } } return false; } private static boolean isAnnotationPresent( IAnnotationFinder annotationFinder, Method m, Class<? extends IAnnotation> annotationClass) { if (!annotationClass.equals(IConfigurationAnnotation.class)) { return annotationFinder.findAnnotation(m, annotationClass) != null; } boolean found = false; for (Class<? extends IAnnotation> clazz : CONFIGURATION_CLASSES) { if (annotationFinder.findAnnotation(m, clazz) != null) { found = true; break; } } return found; } private static boolean isAnnotationPresent( IAnnotationFinder annotationFinder, Class<?> cls, Class<? extends IAnnotation> annotationClass) { return annotationFinder.findAnnotation(cls, annotationClass) != null; } /** * @return A hashcode representing the name of this method and its parameters, but without its * class */ private static String createMethodKey(Method m) { StringBuilder result = new StringBuilder(m.getName()); for (Class paramClass : m.getParameterTypes()) { result.append(' ').append(paramClass.toString()); } return result.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package flex.messaging.config; import flex.messaging.log.Log; import flex.messaging.log.Logger; import flex.messaging.util.PropertyStringResourceLoader; import flex.messaging.util.ResourceLoader; import flex.messaging.util.WatchedObject; import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.List; import java.util.Locale; import javax.servlet.ServletContext; /** * @exclude */ public class SystemSettings { private ResourceLoader resourceLoader; private Locale defaultLocale; private boolean enforceEndpointValidation; private boolean manageable; private boolean redeployEnabled; private int watchInterval; private List watches; private List touches; private String uuidGeneratorClassName; private String dotNetFrameworkVersion; public SystemSettings() { enforceEndpointValidation = false; manageable = true; redeployEnabled = false; resourceLoader = new PropertyStringResourceLoader(); touches = new ArrayList(); watches = new ArrayList(); watchInterval = 20; dotNetFrameworkVersion = null; } public void setDefaultLocale(Locale locale) { defaultLocale = locale; resourceLoader.setDefaultLocale(defaultLocale); } public Locale getDefaultLocale() { return defaultLocale; } public boolean isManageable() { return manageable; } public void setManageable(String manageable) { manageable = manageable.toLowerCase(); if (manageable.startsWith("f")) this.manageable = false; } public boolean isEnforceEndpointValidation() { return enforceEndpointValidation; } public void setEnforceEndpointValidation(String enforceEndpointValidation) { if (enforceEndpointValidation == null || enforceEndpointValidation.length() == 0) return; if (enforceEndpointValidation.toLowerCase().startsWith("t")) this.enforceEndpointValidation = true; } public ResourceLoader getResourceLoader() { return resourceLoader; } public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } public void setRedeployEnabled(String enabled) { enabled = enabled.toLowerCase(); if (enabled.startsWith("t")) this.redeployEnabled = true; } public boolean getRedeployEnabled() { return redeployEnabled; } public void setWatchInterval(String interval) { this.watchInterval = Integer.parseInt(interval); } public int getWatchInterval() { return watchInterval; } public void addWatchFile(String watch) { this.watches.add(watch); } public List getWatchFiles() { return watches; } public void addTouchFile(String touch) { this.touches.add(touch); } public List getTouchFiles() { return touches; } public void setPaths(ServletContext context) { if (redeployEnabled) { List resolvedWatches = new ArrayList(); for (int i = 0; i < watches.size(); i++) { String path = (String)watches.get(i); String resolvedPath = null; if (path.startsWith("{context.root}") || path.startsWith("{context-root}")) { path = path.substring(14); resolvedPath = context.getRealPath(path); if (resolvedPath != null) { try { resolvedWatches.add(new WatchedObject(resolvedPath)); } catch (FileNotFoundException fnfe) { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); if (logger != null) { logger.warn("The watch-file, " + path + ", could not be found and will be ignored."); } } } else { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); logger.warn("The watch-file, " + path + ", could not be resolved to a path and will be ignored."); } } else { try { resolvedWatches.add(new WatchedObject(path)); } catch (FileNotFoundException fnfe) { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); if (logger != null) { logger.warn("The watch-file, " + path + ", could not be found and will be ignored."); } } } } watches = resolvedWatches; List resolvedTouches = new ArrayList(); for (int i = 0; i < touches.size(); i++) { String path = (String)touches.get(i); String resolvedPath = null; if (path.startsWith("{context.root}") || path.startsWith("{context-root}")) { path = path.substring(14); resolvedPath = context.getRealPath(path); if (resolvedPath != null) { File file = new File(resolvedPath); if (!file.exists() || (!file.isFile() && !file.isDirectory()) || (!file.isAbsolute())) { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); logger.warn("The touch-file, " + path + ", could not be found and will be ignored."); } else { resolvedTouches.add(resolvedPath); } } else { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); logger.warn("The touch-file, " + path + ", could not be resolved to a path and will be ignored."); } } else { try { resolvedTouches.add(new WatchedObject(path)); } catch (FileNotFoundException fnfe) { Logger logger = Log.getLogger(ConfigurationManager.LOG_CATEGORY); if (logger != null) { logger.warn("The touch-file, " + path + ", could not be found and will be ignored."); } } } } touches = resolvedTouches; } } /** * Returns the UUID generator class name. * * @return The UUID generator class name. */ public String getUUIDGeneratorClassName() { return uuidGeneratorClassName; } /** * Sets the UUID generator class name. * * @param value The UUID generator class name. */ public void setUUIDGeneratorClassName(String value) { uuidGeneratorClassName = value; } /** * Set the dotnet framework version to use. * @param version the configured dotnet framework version */ public void setDotNetFrameworkVersion(String version) { dotNetFrameworkVersion = version; } /** * Get the dotnet framework version. * @return String the dotnet framework version */ public String getDotNetFrameworkVersion() { return dotNetFrameworkVersion; } /** * Clean up static member variables. */ public void clear() { resourceLoader = null; defaultLocale = null; watches = null; touches = null; dotNetFrameworkVersion = null; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.artifact_cache.ArtifactCache; import com.facebook.buck.artifact_cache.CacheResult; import com.facebook.buck.artifact_cache.CacheResultType; import com.facebook.buck.event.ArtifactCompressionEvent; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.event.ThrowableConsoleEvent; import com.facebook.buck.io.MoreFiles; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.rules.keys.AbiRule; import com.facebook.buck.rules.keys.AbiRuleKeyBuilderFactory; import com.facebook.buck.rules.keys.DependencyFileRuleKeyBuilderFactory; import com.facebook.buck.rules.keys.InputBasedRuleKeyBuilderFactory; import com.facebook.buck.rules.keys.SupportsDependencyFileRuleKey; import com.facebook.buck.rules.keys.SupportsInputBasedRuleKey; import com.facebook.buck.step.Step; import com.facebook.buck.step.StepFailedException; import com.facebook.buck.step.StepRunner; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.cache.FileHashCache; import com.facebook.buck.util.cache.StackedFileHashCache; import com.facebook.buck.util.concurrent.MoreFutures; import com.facebook.buck.zip.Unzip; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.hash.HashCode; import com.google.common.util.concurrent.AsyncFunction; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.SettableFuture; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * A build engine used to build a {@link BuildRule} which also caches the results. If the current * {@link RuleKey} of the build rules matches the one on disk, it does not do any work. It also * tries to fetch its output from an {@link ArtifactCache} to avoid doing any computation. */ public class CachingBuildEngine implements BuildEngine { private static final Logger LOG = Logger.get(CachingBuildEngine.class); /** * These are the values returned by {@link #build(BuildContext, BuildRule)}. * This must always return the same value for the build of each target. */ private final ConcurrentMap<BuildTarget, ListenableFuture<BuildResult>> results = Maps.newConcurrentMap(); private final ConcurrentMap<BuildTarget, ListenableFuture<RuleKey>> ruleKeys = Maps.newConcurrentMap(); private final ConcurrentMap<BuildTarget, ListenableFuture<ImmutableSortedSet<BuildRule>>> ruleDeps = Maps.newConcurrentMap(); @Nullable private volatile Throwable firstFailure = null; private final ListeningExecutorService service; private final FileHashCache fileHashCache; private final BuildMode buildMode; private final DepFiles depFiles; private final RuleKeyFactories ruleKeyFactories; public CachingBuildEngine( ListeningExecutorService service, FileHashCache fileHashCache, BuildMode buildMode, DepFiles depFiles, BuildRuleResolver resolver) { this.service = service; this.fileHashCache = fileHashCache; this.buildMode = buildMode; this.depFiles = depFiles; this.ruleKeyFactories = RuleKeyFactories.build(fileHashCache, resolver); } @VisibleForTesting CachingBuildEngine( ListeningExecutorService service, FileHashCache fileHashCache, BuildMode buildMode, DepFiles depFiles, RuleKeyBuilderFactory inputBasedRuleKeyBuilderFactory, RuleKeyBuilderFactory abiRuleKeyBuilderFactory, RuleKeyBuilderFactory depFileRuleKeyBuilderFactory) { this.service = service; this.fileHashCache = fileHashCache; this.buildMode = buildMode; this.depFiles = depFiles; this.ruleKeyFactories = new RuleKeyFactories( inputBasedRuleKeyBuilderFactory, abiRuleKeyBuilderFactory, depFileRuleKeyBuilderFactory); } @VisibleForTesting void setBuildRuleResult( BuildRule buildRule, BuildRuleSuccessType success, CacheResult cacheResult) { results.put( buildRule.getBuildTarget(), Futures.immediateFuture(BuildResult.success(buildRule, success, cacheResult))); } @Override public boolean isRuleBuilt(BuildTarget buildTarget) throws InterruptedException { ListenableFuture<BuildResult> resultFuture = results.get(buildTarget); return resultFuture != null && MoreFutures.isSuccess(resultFuture); } @Nullable @Override public RuleKey getRuleKey(BuildTarget buildTarget) { return Futures.getUnchecked(ruleKeys.get(buildTarget)); } // Dispatch and return a future resolving to a list of all results of this rules dependencies. private ListenableFuture<List<BuildResult>> getDepResults( BuildRule rule, BuildContext context, ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) { List<ListenableFuture<BuildResult>> depResults = Lists.newArrayListWithExpectedSize(rule.getDeps().size()); for (BuildRule dep : rule.getDeps()) { depResults.add(getBuildRuleResultWithRuntimeDeps(dep, context, asyncCallbacks)); } return Futures.allAsList(depResults); } private ListenableFuture<BuildResult> processBuildRule( final BuildRule rule, final BuildContext context, final OnDiskBuildInfo onDiskBuildInfo, final BuildInfoRecorder buildInfoRecorder, final BuildableContext buildableContext, ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) throws InterruptedException { // If we've already seen a failure, exit early. if (!context.isKeepGoing() && firstFailure != null) { return Futures.immediateFuture(BuildResult.canceled(rule, firstFailure)); } // 1. Check if it's already built. Optional<RuleKey> cachedRuleKey = onDiskBuildInfo.getRuleKey(BuildInfo.METADATA_KEY_FOR_RULE_KEY); if (rule.getRuleKey().equals(cachedRuleKey.orNull())) { return Futures.immediateFuture( BuildResult.success( rule, BuildRuleSuccessType.MATCHING_RULE_KEY, CacheResult.localKeyUnchangedHit())); } // 2. Rule key cache lookup. final CacheResult cacheResult = tryToFetchArtifactFromBuildCacheAndOverlayOnTopOfProjectFilesystem( rule, rule.getRuleKey(), buildInfoRecorder, context.getArtifactCache(), // TODO(simons): This should be a shared between all tests, not one per cell rule.getProjectFilesystem(), context); if (cacheResult.getType().isSuccess()) { return Futures.immediateFuture( BuildResult.success(rule, BuildRuleSuccessType.FETCHED_FROM_CACHE, cacheResult)); } // Log to the event bus. context.getEventBus().logVerboseAndPost(LOG, BuildRuleEvent.suspended(rule)); // 3. Build deps. return Futures.transform( getDepResults(rule, context, asyncCallbacks), new AsyncFunction<List<BuildResult>, BuildResult>() { @Override public ListenableFuture<BuildResult> apply(@Nonnull List<BuildResult> depResults) throws Exception { // Log to the event bus. context.getEventBus().logVerboseAndPost(LOG, BuildRuleEvent.resumed(rule)); // If any dependency wasn't successful, cancel ourselves. for (BuildResult depResult : depResults) { if (buildMode != BuildMode.POPULATE_FROM_REMOTE_CACHE && depResult.getStatus() != BuildRuleStatus.SUCCESS) { return Futures.immediateFuture( BuildResult.canceled(rule, Preconditions.checkNotNull(depResult.getFailure()))); } } // If we've already seen a failure, exit early. if (buildMode != BuildMode.POPULATE_FROM_REMOTE_CACHE && !context.isKeepGoing() && firstFailure != null) { return Futures.immediateFuture(BuildResult.canceled(rule, firstFailure)); } // Dep-file rule keys. if (useDependencyFileRuleKey(rule)) { // Try to get the current dep-file rule key. Optional<RuleKey> depFileRuleKey = calculateDepFileRuleKey( rule, onDiskBuildInfo.getValues(BuildInfo.METADATA_KEY_FOR_DEP_FILE), onDiskBuildInfo.getMultimap(BuildInfo.METADATA_KEY_FOR_INPUT_MAP), /* allowMissingInputs */ true); if (depFileRuleKey.isPresent()) { // Check the input-based rule key says we're already built. Optional<RuleKey> lastDepFileRuleKey = onDiskBuildInfo.getRuleKey(BuildInfo.METADATA_KEY_FOR_DEP_FILE_RULE_KEY); if (depFileRuleKey.equals(lastDepFileRuleKey)) { return Futures.immediateFuture( BuildResult.success( rule, BuildRuleSuccessType.MATCHING_DEP_FILE_RULE_KEY, CacheResult.localKeyUnchangedHit())); } } } RuleKeyFactories cellData = CachingBuildEngine.this.ruleKeyFactories; Preconditions.checkNotNull(cellData); // Input-based rule keys. if (rule instanceof SupportsInputBasedRuleKey) { // Calculate the input-based rule key and record it in the metadata. RuleKey inputRuleKey = cellData.inputBasedRuleKeyBuilderFactory.build(rule); buildInfoRecorder.addBuildMetadata( BuildInfo.METADATA_KEY_FOR_INPUT_BASED_RULE_KEY, inputRuleKey.toString()); // Check the input-based rule key says we're already built. Optional<RuleKey> lastInputRuleKey = onDiskBuildInfo.getRuleKey(BuildInfo.METADATA_KEY_FOR_INPUT_BASED_RULE_KEY); if (inputRuleKey.equals(lastInputRuleKey.orNull())) { return Futures.immediateFuture( BuildResult.success( rule, BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY, CacheResult.localKeyUnchangedHit())); } // Try to fetch the artifact using the input-based rule key. CacheResult cacheResult = tryToFetchArtifactFromBuildCacheAndOverlayOnTopOfProjectFilesystem( rule, inputRuleKey, buildInfoRecorder, context.getArtifactCache(), // TODO(simons): This should be a shared between all tests, not one per cell rule.getProjectFilesystem(), context); if (cacheResult.getType().isSuccess()) { return Futures.immediateFuture( BuildResult.success( rule, BuildRuleSuccessType.FETCHED_FROM_CACHE_INPUT_BASED, cacheResult)); } } // 4. ABI check // Deciding whether we need to rebuild is tricky business. We want to rebuild as little // as possible while always being sound. // // For java_library rules that depend only on their first-order deps, // they only need to rebuild themselves if any of the following conditions hold: // (1) The definition of the build rule has changed. // (2) Any of the input files (which includes resources as well as .java files) have // changed. // (3) The ABI of any of its dependent java_library rules has changed. // // For other types of build rules, we have to be more conservative when rebuilding. In // those cases, we rebuild if any of the following conditions hold: // (1) The definition of the build rule has changed. // (2) Any of the input files have changed. // (3) Any of the RuleKeys of this rule's deps have changed. // // Because a RuleKey for a rule will change if any of its transitive deps have changed, // that means a change in one of the leaves can result in almost all rules being // rebuilt, which is slow. Fortunately, we limit the effects of this when building Java // code when checking the ABI of deps instead of the RuleKey for deps. if (rule instanceof AbiRule) { RuleKey abiRuleKey = cellData.abiRuleKeyBuilderFactory.build(rule); buildInfoRecorder.addBuildMetadata( BuildInfo.METADATA_KEY_FOR_ABI_RULE_KEY, abiRuleKey.toString()); Optional<RuleKey> lastAbiRuleKey = onDiskBuildInfo.getRuleKey(BuildInfo.METADATA_KEY_FOR_ABI_RULE_KEY); if (abiRuleKey.equals(lastAbiRuleKey.orNull())) { return Futures.immediateFuture( BuildResult.success( rule, BuildRuleSuccessType.MATCHING_ABI_RULE_KEY, CacheResult.localKeyUnchangedHit())); } } if (buildMode != BuildMode.POPULATE_FROM_REMOTE_CACHE) { // 5. build the rule executeCommandsNowThatDepsAreBuilt(rule, context, buildableContext); return Futures.immediateFuture( BuildResult.success(rule, BuildRuleSuccessType.BUILT_LOCALLY, cacheResult)); } else { LOG.info("Cannot populate cache for " + rule.getBuildTarget().getFullyQualifiedName()); return Futures.immediateFuture(BuildResult.canceled(rule, new HumanReadableException("Skipping %s: in cache population mode " + "local builds are disabled", rule))); } } }, service); } private ListenableFuture<BuildResult> processBuildRule( final BuildRule rule, final BuildContext context, ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) throws InterruptedException { // Log to the event bus. context.getEventBus().logVerboseAndPost(LOG, BuildRuleEvent.resumed(rule)); final OnDiskBuildInfo onDiskBuildInfo = context.createOnDiskBuildInfoFor( rule.getBuildTarget(), rule.getProjectFilesystem()); final BuildInfoRecorder buildInfoRecorder = context.createBuildInfoRecorder(rule.getBuildTarget(), rule.getProjectFilesystem()) .addBuildMetadata( BuildInfo.METADATA_KEY_FOR_RULE_KEY, rule.getRuleKey().toString()); final BuildableContext buildableContext = new DefaultBuildableContext(buildInfoRecorder); // Dispatch the build job for this rule. ListenableFuture<BuildResult> buildResult = processBuildRule( rule, context, onDiskBuildInfo, buildInfoRecorder, buildableContext, asyncCallbacks); // If we're performing a deep build, guarantee that all dependencies will *always* get // materialized locally by chaining up to our result future. if (buildMode == BuildMode.DEEP || buildMode == BuildMode.POPULATE_FROM_REMOTE_CACHE) { buildResult = MoreFutures.chainExceptions( getDepResults(rule, context, asyncCallbacks), buildResult); } // Setup a callback to handle either the cached or built locally cases. AsyncFunction<BuildResult, BuildResult> callback = new AsyncFunction<BuildResult, BuildResult>() { @Override public ListenableFuture<BuildResult> apply(@Nonnull BuildResult input) throws Exception { // If we weren't successful, exit now. if (input.getStatus() != BuildRuleStatus.SUCCESS) { return Futures.immediateFuture(input); } // We shouldn't see any build fail result at this point. BuildRuleSuccessType success = Preconditions.checkNotNull(input.getSuccess()); // If we didn't build the rule locally, reload the recorded paths from the build // metadata. if (success != BuildRuleSuccessType.BUILT_LOCALLY) { for (String str : onDiskBuildInfo.getValues(BuildInfo.METADATA_KEY_FOR_RECORDED_PATHS).get()) { buildInfoRecorder.recordArtifact(Paths.get(str)); } } // If the success type means the rule has potentially changed it's outputs... if (success.outputsHaveChanged()) { // The build has succeeded, whether we've fetched from cache, or built locally. // So run the post-build steps. if (rule instanceof HasPostBuildSteps) { executePostBuildSteps( rule, ((HasPostBuildSteps) rule).getPostBuildSteps(context, buildableContext), context); } // Invalidate any cached hashes for the output paths, since we've updated them. for (Path path : buildInfoRecorder.getRecordedPaths()) { fileHashCache.invalidate(path); } } // If this rule uses dep files and we built locally, make sure we store the new dep file // list and re-calculate the dep file rule key. if (useDependencyFileRuleKey(rule) && success == BuildRuleSuccessType.BUILT_LOCALLY) { // Query the rule for the actual inputs it used, and verify these are relative. ImmutableList<Path> inputs = ((SupportsDependencyFileRuleKey) rule).getInputsAfterBuildingLocally(); for (Path path : inputs) { Preconditions.checkState( !path.isAbsolute(), String.format( "%s: reported absolute path as an input: %s", rule.getBuildTarget(), path)); } // Record the inputs into our metadata for next time. ImmutableList<String> inputStrings = FluentIterable.from(inputs) .transform(Functions.toStringFunction()) .toList(); buildInfoRecorder.addMetadata( BuildInfo.METADATA_KEY_FOR_DEP_FILE, inputStrings); // Get the input from the rule if applicable Optional<ImmutableMultimap<String, String>> inputMap = ((SupportsDependencyFileRuleKey) rule).getSymlinkTreeInputMap(); if (inputMap.isPresent()) { buildInfoRecorder.addMetadata(BuildInfo.METADATA_KEY_FOR_INPUT_MAP, inputMap.get()); } // Re-calculate and store the depfile rule key for next time. Optional<RuleKey> depFileRuleKey = calculateDepFileRuleKey( rule, Optional.of(inputStrings), inputMap, /* allowMissingInputs */ false); Preconditions.checkState(depFileRuleKey.isPresent()); buildInfoRecorder.addBuildMetadata( BuildInfo.METADATA_KEY_FOR_DEP_FILE_RULE_KEY, depFileRuleKey.get().toString()); } // Make sure that all of the local files have the same values they would as if the // rule had been built locally. buildInfoRecorder.addBuildMetadata( BuildInfo.METADATA_KEY_FOR_TARGET, rule.getBuildTarget().toString()); buildInfoRecorder.addMetadata( BuildInfo.METADATA_KEY_FOR_RECORDED_PATHS, FluentIterable.from(buildInfoRecorder.getRecordedPaths()) .transform(Functions.toStringFunction())); if (success.shouldWriteRecordedMetadataToDiskAfterBuilding()) { try { boolean clearExistingMetadata = success.shouldClearAndOverwriteMetadataOnDisk(); buildInfoRecorder.writeMetadataToDisk(clearExistingMetadata); } catch (IOException e) { throw new IOException( String.format("Failed to write metadata to disk for %s.", rule), e); } } // Give the rule a chance to populate its internal data structures now that all of // the files should be in a valid state. try { if (rule instanceof InitializableFromDisk) { doInitializeFromDisk((InitializableFromDisk<?>) rule, onDiskBuildInfo); } } catch (IOException e) { throw new IOException(String.format("Error initializing %s from disk.", rule), e); } return Futures.immediateFuture(input); } }; buildResult = Futures.transform(buildResult, callback); // Handle either build success or failure. final SettableFuture<BuildResult> result = SettableFuture.create(); asyncCallbacks.add( MoreFutures.addListenableCallback( buildResult, new FutureCallback<BuildResult>() { // TODO(mbolin): Delete all files produced by the rule, as they are not guaranteed // to be valid at this point? private void cleanupAfterError() { try { onDiskBuildInfo.deleteExistingMetadata(); } catch (Throwable t) { context.getEventBus().post( ThrowableConsoleEvent.create( t, "Error when deleting metadata for %s.", rule)); } } private void uploadToCache(BuildRuleSuccessType success) { // Collect up all the rule keys we have index the artifact in the cache with. Set<RuleKey> ruleKeys = Sets.newHashSet(); // If the rule key has changed (and is not already in the cache), we need to push // the artifact to cache using the new key. if (success.shouldUploadResultingArtifact()) { ruleKeys.add(rule.getRuleKey()); } // If the input-based rule key has changed, we need to push the artifact to cache // using the new key. if (rule instanceof SupportsInputBasedRuleKey && success.shouldUploadResultingArtifactInputBased()) { ruleKeys.add( onDiskBuildInfo.getRuleKey( BuildInfo.METADATA_KEY_FOR_INPUT_BASED_RULE_KEY).get()); } // If we have any rule keys to push to the cache with, do the upload now. if (!ruleKeys.isEmpty()) { try { buildInfoRecorder.performUploadToArtifactCache( ImmutableSet.copyOf(ruleKeys), context.getArtifactCache(), context.getEventBus()); } catch (Throwable t) { context.getEventBus().post( ThrowableConsoleEvent.create( t, "Error uploading to cache for %s.", rule)); } } } private void handleResult(BuildResult input) { Optional<Long> outputSize = Optional.absent(); Optional<HashCode> outputHash = Optional.absent(); Optional<BuildRuleSuccessType> successType = Optional.absent(); if (input.getStatus() == BuildRuleStatus.FAIL) { // Make this failure visible for other rules, so that they can stop early. firstFailure = input.getFailure(); // If we failed, cleanup the state of this rule. cleanupAfterError(); } // Unblock dependents. result.set(input); if (input.getStatus() == BuildRuleStatus.SUCCESS) { BuildRuleSuccessType success = Preconditions.checkNotNull(input.getSuccess()); successType = Optional.of(success); uploadToCache(success); // Calculate the hash and size of the rule outputs that we built locally. if (success == BuildRuleSuccessType.BUILT_LOCALLY) { try { outputSize = Optional.of(buildInfoRecorder.getOutputSize()); outputHash = Optional.of(buildInfoRecorder.getOutputHash(fileHashCache)); } catch (IOException e) { context.getEventBus().post( ThrowableConsoleEvent.create( e, "Error getting output hash and size for %s.", rule)); } } } // Log the result to the event bus. context.getEventBus().logVerboseAndPost( LOG, BuildRuleEvent.finished( rule, input.getStatus(), input.getCacheResult(), successType, outputHash, outputSize)); } @Override public void onSuccess(BuildResult input) { handleResult(input); } @Override public void onFailure(@Nonnull Throwable thrown) { handleResult(BuildResult.failure(rule, thrown)); // Reset interrupted flag once failure has been recorded. if (thrown instanceof InterruptedException) { Thread.currentThread().interrupt(); } } })); return result; } // Provide a future that resolve to the result of executing this rule and its runtime // dependencies. private ListenableFuture<BuildResult> getBuildRuleResultWithRuntimeDepsUnlocked( final BuildRule rule, final BuildContext context, final ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) { // If the rule is already executing, return it's result future from the cache. ListenableFuture<BuildResult> existingResult = results.get(rule.getBuildTarget()); if (existingResult != null) { return existingResult; } // Get the future holding the result for this rule and, if we have no additional runtime deps // to attach, return it. ListenableFuture<RuleKey> ruleKey = calculateRuleKey(rule, context); ListenableFuture<BuildResult> result = Futures.transform( ruleKey, new AsyncFunction<RuleKey, BuildResult>() { @Override public ListenableFuture<BuildResult> apply(@Nonnull RuleKey input) throws Exception { return processBuildRule(rule, context, asyncCallbacks); } }, service); if (!(rule instanceof HasRuntimeDeps)) { results.put(rule.getBuildTarget(), result); return result; } // Collect any runtime deps we have into a list of futures. ImmutableSortedSet<BuildRule> runtimeDeps = ((HasRuntimeDeps) rule).getRuntimeDeps(); List<ListenableFuture<BuildResult>> runtimeDepResults = Lists.newArrayListWithExpectedSize(runtimeDeps.size()); for (BuildRule dep : runtimeDeps) { runtimeDepResults.add( getBuildRuleResultWithRuntimeDepsUnlocked(dep, context, asyncCallbacks)); } // Create a new combined future, which runs the original rule and all the runtime deps in // parallel, but which propagates an error if any one of them fails. result = MoreFutures.chainExceptions( Futures.allAsList(runtimeDepResults), result); results.put(rule.getBuildTarget(), result); return result; } private ListenableFuture<BuildResult> getBuildRuleResultWithRuntimeDeps( final BuildRule rule, final BuildContext context, final ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) { // If the rule is already executing, return it's result future from the cache without acquiring // the lock. ListenableFuture<BuildResult> existingResult = results.get(rule.getBuildTarget()); if (existingResult != null) { return existingResult; } // Otherwise, grab the lock and delegate to the real method, synchronized (results) { return getBuildRuleResultWithRuntimeDepsUnlocked(rule, context, asyncCallbacks); } } public ListenableFuture<?> walkRule( BuildRule rule, final ConcurrentMap<BuildRule, Integer> seen) { ListenableFuture<?> result = Futures.immediateFuture(null); if (seen.putIfAbsent(rule, 0) == null) { result = Futures.transform( getRuleDeps(rule), new AsyncFunction<ImmutableSortedSet<BuildRule>, List<Object>>() { @Override public ListenableFuture<List<Object>> apply( @Nonnull ImmutableSortedSet<BuildRule> deps) { List<ListenableFuture<?>> results = Lists.newArrayListWithExpectedSize(deps.size()); for (BuildRule dep : deps) { results.add(walkRule(dep, seen)); } return Futures.allAsList(results); } }); } return result; } @Override public int getNumRulesToBuild(Iterable<BuildRule> rules) { ConcurrentMap<BuildRule, Integer> seen = Maps.newConcurrentMap(); ListenableFuture<Void> result = Futures.immediateFuture(null); for (BuildRule rule : rules) { result = MoreFutures.chainExceptions(walkRule(rule, seen), result); } Futures.getUnchecked(result); return seen.size(); } private synchronized ListenableFuture<ImmutableSortedSet<BuildRule>> getRuleDeps( final BuildRule rule) { ListenableFuture<ImmutableSortedSet<BuildRule>> deps = ruleDeps.get(rule.getBuildTarget()); if (deps == null) { deps = service.submit( new Callable<ImmutableSortedSet<BuildRule>>() { @Override public ImmutableSortedSet<BuildRule> call() throws Exception { ImmutableSortedSet.Builder<BuildRule> deps = ImmutableSortedSet.naturalOrder(); deps.addAll(rule.getDeps()); if (rule instanceof HasRuntimeDeps) { deps.addAll(((HasRuntimeDeps) rule).getRuntimeDeps()); } return deps.build(); } }); ruleDeps.put(rule.getBuildTarget(), deps); } return deps; } private synchronized ListenableFuture<RuleKey> calculateRuleKey( final BuildRule rule, final BuildContext context) { ListenableFuture<RuleKey> ruleKey = ruleKeys.get(rule.getBuildTarget()); if (ruleKey == null) { // Grab all the dependency rule key futures. Since our rule key calculation depends on this // one, we need to wait for them to complete. ListenableFuture<List<RuleKey>> depKeys = Futures.transform( getRuleDeps(rule), new AsyncFunction<ImmutableSortedSet<BuildRule>, List<RuleKey>>() { @Override public ListenableFuture<List<RuleKey>> apply( @Nonnull ImmutableSortedSet<BuildRule> deps) { List<ListenableFuture<RuleKey>> depKeys = Lists.newArrayListWithExpectedSize(rule.getDeps().size()); for (BuildRule dep : deps) { depKeys.add(calculateRuleKey(dep, context)); } return Futures.allAsList(depKeys); } }); // Setup a future to calculate this rule key once the dependencies have been calculated. ruleKey = Futures.transform( depKeys, new Function<List<RuleKey>, RuleKey>() { @Override public RuleKey apply(List<RuleKey> input) { context.getEventBus().logVerboseAndPost( LOG, BuildRuleEvent.started(rule)); try { return rule.getRuleKey(); } finally { context.getEventBus().logVerboseAndPost( LOG, BuildRuleEvent.suspended(rule)); } } }, service); // Record the rule key future. ruleKeys.put(rule.getBuildTarget(), ruleKey); } return ruleKey; } @Override public ListenableFuture<BuildResult> build(BuildContext context, BuildRule rule) { // Keep track of all jobs that run asynchronously with respect to the build dep chain. We want // to make sure we wait for these before calling yielding the final build result. final ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks = new ConcurrentLinkedQueue<>(); final ListenableFuture<BuildResult> resultFuture = getBuildRuleResultWithRuntimeDeps(rule, context, asyncCallbacks); return Futures.transform( resultFuture, new AsyncFunction<BuildResult, BuildResult>() { @Override public ListenableFuture<BuildResult> apply(@Nonnull BuildResult result) throws Exception { return Futures.transform( Futures.allAsList(asyncCallbacks), Functions.constant(result)); } }); } private CacheResult tryToFetchArtifactFromBuildCacheAndOverlayOnTopOfProjectFilesystem( BuildRule rule, RuleKey ruleKey, BuildInfoRecorder buildInfoRecorder, ArtifactCache artifactCache, ProjectFilesystem filesystem, BuildContext buildContext) throws InterruptedException { // Create a temp file whose extension must be ".zip" for Filesystems.newFileSystem() to infer // that we are creating a zip-based FileSystem. Path zipFile; try { zipFile = Files.createTempFile( "buck_artifact_" + MoreFiles.sanitize(rule.getBuildTarget().getShortName()), ".zip"); } catch (IOException e) { throw new RuntimeException(e); } // TODO(mbolin): Change ArtifactCache.fetch() so that it returns a File instead of takes one. // Then we could download directly from the remote cache into the on-disk cache and unzip it // from there. CacheResult cacheResult = buildInfoRecorder.fetchArtifactForBuildable(ruleKey, zipFile, artifactCache); if (!cacheResult.getType().isSuccess()) { try { Files.delete(zipFile); } catch (IOException e) { LOG.warn(e, "failed to delete %s", zipFile); } return cacheResult; } LOG.debug("Fetched '%s' from cache with rulekey '%s'", rule, ruleKey); // We unzip the file in the root of the project directory. // Ideally, the following would work: // // Path pathToZip = Paths.get(zipFile.getAbsolutePath()); // FileSystem fs = FileSystems.newFileSystem(pathToZip, /* loader */ null); // Path root = Iterables.getOnlyElement(fs.getRootDirectories()); // MoreFiles.copyRecursively(root, projectRoot); // // Unfortunately, this does not appear to work, in practice, because MoreFiles fails when trying // to resolve a Path for a zip entry against a file Path on disk. ArtifactCompressionEvent.Started started = ArtifactCompressionEvent.started( ArtifactCompressionEvent.Operation.DECOMPRESS, ImmutableSet.of(ruleKey)); buildContext.getEventBus().post(started); try { Unzip.extractZipFile( zipFile.toAbsolutePath(), filesystem, Unzip.ExistingFileMode.OVERWRITE_AND_CLEAN_DIRECTORIES); // We only delete the ZIP file when it has been unzipped successfully. Otherwise, we leave it // around for debugging purposes. Files.delete(zipFile); if (cacheResult.getType() == CacheResultType.HIT) { // If we have a hit, also write out the build metadata. Path metadataDir = BuildInfo.getPathToMetadataDirectory(rule.getBuildTarget()); for (Map.Entry<String, String> ent : cacheResult.getMetadata().entrySet()) { Path dest = metadataDir.resolve(ent.getKey()); filesystem.createParentDirs(dest); filesystem.writeContentsToPath(ent.getValue(), dest); } } } catch (IOException e) { // In the wild, we have seen some inexplicable failures during this step. For now, we try to // give the user as much information as we can to debug the issue, but return CacheResult.MISS // so that Buck will fall back on doing a local build. buildContext.getEventBus().post(ConsoleEvent.warning( "Failed to unzip the artifact for %s at %s.\n" + "The rule will be built locally, " + "but here is the stacktrace of the failed unzip call:\n" + rule.getBuildTarget(), zipFile.toAbsolutePath(), Throwables.getStackTraceAsString(e))); return CacheResult.miss(); } finally { buildContext.getEventBus().post(ArtifactCompressionEvent.finished(started)); } return cacheResult; } /** * Execute the commands for this build rule. Requires all dependent rules are already built * successfully. */ private void executeCommandsNowThatDepsAreBuilt( BuildRule rule, BuildContext context, BuildableContext buildableContext) throws InterruptedException, StepFailedException { LOG.debug("Building locally: %s", rule); // Attempt to get an approximation of how long it takes to actually run the command. @SuppressWarnings("PMD.PrematureDeclaration") long start = System.nanoTime(); // Get and run all of the commands. List<Step> steps = rule.getBuildSteps(context, buildableContext); StepRunner stepRunner = context.getStepRunner(); Optional<BuildTarget> optionalTarget = Optional.of(rule.getBuildTarget()); for (Step step : steps) { stepRunner.runStepForBuildTarget(step, optionalTarget); // Check for interruptions that may have been ignored by step. if (Thread.interrupted()) { Thread.currentThread().interrupt(); throw new InterruptedException(); } } long end = System.nanoTime(); LOG.debug("Build completed: %s %s (%dns)", rule.getType(), rule.getFullyQualifiedName(), end - start); } private void executePostBuildSteps( BuildRule rule, Iterable<Step> postBuildSteps, BuildContext context) throws InterruptedException, StepFailedException { LOG.debug("Running post-build steps for %s", rule); StepRunner stepRunner = context.getStepRunner(); Optional<BuildTarget> optionalTarget = Optional.of(rule.getBuildTarget()); for (Step step : postBuildSteps) { stepRunner.runStepForBuildTarget(step, optionalTarget); // Check for interruptions that may have been ignored by step. if (Thread.interrupted()) { Thread.currentThread().interrupt(); throw new InterruptedException(); } } LOG.debug("Finished running post-build steps for %s", rule); } private <T> void doInitializeFromDisk( InitializableFromDisk<T> initializable, OnDiskBuildInfo onDiskBuildInfo) throws IOException { BuildOutputInitializer<T> buildOutputInitializer = initializable.getBuildOutputInitializer(); T buildOutput = buildOutputInitializer.initializeFromDisk(onDiskBuildInfo); buildOutputInitializer.setBuildOutput(buildOutput); } @Nullable @Override public BuildResult getBuildRuleResult(BuildTarget buildTarget) throws ExecutionException, InterruptedException { ListenableFuture<BuildResult> result = results.get(buildTarget); if (result == null) { return null; } return result.get(); } private boolean useDependencyFileRuleKey(BuildRule rule) { return depFiles == DepFiles.ENABLED && rule instanceof SupportsDependencyFileRuleKey && ((SupportsDependencyFileRuleKey) rule).useDependencyFileRuleKeys(); } private Optional<RuleKey> calculateDepFileRuleKey( BuildRule rule, Optional<ImmutableList<String>> depFile, Optional<ImmutableMultimap<String, String>> inputMap, boolean allowMissingInputs) throws IOException { Preconditions.checkState(useDependencyFileRuleKey(rule)); // Extract the dep file from the last build. If we don't find one, abort. if (!depFile.isPresent()) { return Optional.absent(); } RuleKeyFactories cellData = this.ruleKeyFactories; Preconditions.checkNotNull(cellData); // Add in the inputs explicitly listed in the dep file. If any inputs are no longer on disk, // this means something changed and a dep-file based rule key can't be calculated. ImmutableList<Path> inputs = FluentIterable.from(depFile.get()).transform(MorePaths.TO_PATH).toList(); RuleKeyBuilder builder = cellData.depFileRuleKeyBuilderFactory.newInstance(rule); for (Path input : inputs) { try { builder.setPath(input); } catch (NoSuchFileException e) { if (!allowMissingInputs) { throw e; } return Optional.absent(); } } // If there is an input map, use that to represent the mapping of inputs done by symlink trees. if (inputMap.isPresent()) { for (BuildRule dep : rule.getDeps()) { if (dep instanceof SymlinkTree) { continue; } builder.setReflectively("buck.deps", dep); } builder.setReflectively("buck.input-map", inputMap); } else { builder.setReflectively("buck.deps", rule.getDeps()); } return Optional.of(builder.build()); } /** * The mode in which to build rules. */ public enum BuildMode { // Perform a shallow build, only locally materializing the bare minimum needed to build the // top-level build targets. SHALLOW, // Perform a deep build, locally materializing all the transitive dependencies of the top-level // build targets. DEEP, // Perform local cache population by only loading all the transitive dependencies of // the top-level build targets from the remote cache, without building missing or changed // dependencies locally. POPULATE_FROM_REMOTE_CACHE, } /** * Whether to use dependency files or not. */ public enum DepFiles { ENABLED, DISABLED, } @VisibleForTesting static class RuleKeyFactories { public final RuleKeyBuilderFactory inputBasedRuleKeyBuilderFactory; public final RuleKeyBuilderFactory abiRuleKeyBuilderFactory; public final RuleKeyBuilderFactory depFileRuleKeyBuilderFactory; public static RuleKeyFactories build( FileHashCache sharedHashCache, BuildRuleResolver ruleResolver) { ImmutableList.Builder<FileHashCache> caches = ImmutableList.builder(); caches.add(sharedHashCache); StackedFileHashCache fileHashCache = new StackedFileHashCache(caches.build()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); return new RuleKeyFactories( new InputBasedRuleKeyBuilderFactory( fileHashCache, pathResolver), new AbiRuleKeyBuilderFactory( fileHashCache, pathResolver), new DependencyFileRuleKeyBuilderFactory( fileHashCache, pathResolver)); } @VisibleForTesting RuleKeyFactories( RuleKeyBuilderFactory inputBasedRuleKeyBuilderFactory, RuleKeyBuilderFactory abiRuleKeyBuilderFactory, RuleKeyBuilderFactory depFileRuleKeyBuilderFactory) { this.inputBasedRuleKeyBuilderFactory = inputBasedRuleKeyBuilderFactory; this.abiRuleKeyBuilderFactory = abiRuleKeyBuilderFactory; this.depFileRuleKeyBuilderFactory = depFileRuleKeyBuilderFactory; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.execution; import com.intellij.execution.actions.RunContextAction; import com.intellij.execution.compound.CompoundRunConfiguration; import com.intellij.execution.compound.SettingsAndEffectiveTarget; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.execution.impl.ExecutionManagerImpl; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ExecutionEnvironmentBuilder; import com.intellij.execution.runners.ExecutionUtil; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.ui.RunContentDescriptor; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.*; import com.intellij.openapi.util.Trinity; import com.intellij.util.IconUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; public class ExecutorRegistryImpl extends ExecutorRegistry implements Disposable, ApplicationComponent { private static final Logger LOG = Logger.getInstance(ExecutorRegistryImpl.class); @NonNls public static final String RUNNERS_GROUP = "RunnerActions"; @NonNls public static final String RUN_CONTEXT_GROUP = "RunContextGroupInner"; private List<Executor> myExecutors = new ArrayList<>(); private ActionManager myActionManager; private final Map<String, Executor> myId2Executor = new THashMap<>(); private final Set<String> myContextActionIdSet = new THashSet<>(); private final Map<String, AnAction> myId2Action = new THashMap<>(); private final Map<String, AnAction> myContextActionId2Action = new THashMap<>(); // [Project, ExecutorId, RunnerId] private final Set<Trinity<Project, String, String>> myInProgress = Collections.synchronizedSet(new THashSet<>()); public ExecutorRegistryImpl(ActionManager actionManager) { myActionManager = actionManager; } synchronized void initExecutor(@NotNull final Executor executor) { if (myId2Executor.get(executor.getId()) != null) { LOG.error("Executor with id: \"" + executor.getId() + "\" was already registered!"); } if (myContextActionIdSet.contains(executor.getContextActionId())) { LOG.error("Executor with context action id: \"" + executor.getContextActionId() + "\" was already registered!"); } myExecutors.add(executor); myId2Executor.put(executor.getId(), executor); myContextActionIdSet.add(executor.getContextActionId()); registerAction(executor.getId(), new ExecutorAction(executor), RUNNERS_GROUP, myId2Action); registerAction(executor.getContextActionId(), new RunContextAction(executor), RUN_CONTEXT_GROUP, myContextActionId2Action); } private void registerAction(@NotNull final String actionId, @NotNull final AnAction anAction, @NotNull final String groupId, @NotNull final Map<String, AnAction> map) { AnAction action = myActionManager.getAction(actionId); if (action == null) { myActionManager.registerAction(actionId, anAction); map.put(actionId, anAction); action = anAction; } ((DefaultActionGroup)myActionManager.getAction(groupId)).add(action); } synchronized void deinitExecutor(@NotNull final Executor executor) { myExecutors.remove(executor); myId2Executor.remove(executor.getId()); myContextActionIdSet.remove(executor.getContextActionId()); unregisterAction(executor.getId(), RUNNERS_GROUP, myId2Action); unregisterAction(executor.getContextActionId(), RUN_CONTEXT_GROUP, myContextActionId2Action); } private void unregisterAction(@NotNull final String actionId, @NotNull final String groupId, @NotNull final Map<String, AnAction> map) { final DefaultActionGroup group = (DefaultActionGroup)myActionManager.getAction(groupId); if (group != null) { group.remove(myActionManager.getAction(actionId)); final AnAction action = map.get(actionId); if (action != null) { myActionManager.unregisterAction(actionId); map.remove(actionId); } } } @Override @NotNull public synchronized Executor[] getRegisteredExecutors() { return myExecutors.toArray(new Executor[0]); } @Override public Executor getExecutorById(final String executorId) { return myId2Executor.get(executorId); } @Override public void initComponent() { MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(); connection.subscribe(ExecutionManager.EXECUTION_TOPIC, new ExecutionListener() { @Override public void processStartScheduled(@NotNull String executorId, @NotNull ExecutionEnvironment environment) { myInProgress.add(createExecutionId(executorId, environment)); } @Override public void processNotStarted(@NotNull String executorId, @NotNull ExecutionEnvironment environment) { myInProgress.remove(createExecutionId(executorId, environment)); } @Override public void processStarted(@NotNull String executorId, @NotNull ExecutionEnvironment environment, @NotNull ProcessHandler handler) { myInProgress.remove(createExecutionId(executorId, environment)); } }); connection.subscribe(ProjectManager.TOPIC, new ProjectManagerListener() { @Override public void projectClosed(final Project project) { // perform cleanup synchronized (myInProgress) { for (Iterator<Trinity<Project, String, String>> it = myInProgress.iterator(); it.hasNext(); ) { if (project == it.next().first) { it.remove(); } } } } }); for (Executor executor : Executor.EXECUTOR_EXTENSION_NAME.getExtensions()) { initExecutor(executor); } } @NotNull private static Trinity<Project, String, String> createExecutionId(String executorId, @NotNull ExecutionEnvironment environment) { return Trinity.create(environment.getProject(), executorId, environment.getRunner().getRunnerId()); } @Override public boolean isStarting(Project project, final String executorId, final String runnerId) { return myInProgress.contains(Trinity.create(project, executorId, runnerId)); } @Override public boolean isStarting(@NotNull ExecutionEnvironment environment) { return isStarting(environment.getProject(), environment.getExecutor().getId(), environment.getRunner().getRunnerId()); } @Override public synchronized void dispose() { if (!myExecutors.isEmpty()) { for (Executor executor : new ArrayList<>(myExecutors)) { deinitExecutor(executor); } } myExecutors = null; myActionManager = null; } private class ExecutorAction extends AnAction implements DumbAware { private final Executor myExecutor; private ExecutorAction(@NotNull final Executor executor) { super(executor.getStartActionText(), executor.getDescription(), executor.getIcon()); myExecutor = executor; } private boolean canRun(@NotNull Project project, @NotNull List<SettingsAndEffectiveTarget> pairs) { if (pairs.isEmpty()) { return false; } for (SettingsAndEffectiveTarget pair : pairs) { RunnerAndConfigurationSettings runnerAndConfigurationSettings = pair.getSettings(); RunConfiguration configuration = runnerAndConfigurationSettings.getConfiguration(); if (configuration instanceof CompoundRunConfiguration) { if (!canRun(project, ((CompoundRunConfiguration)configuration).getConfigurationsWithEffectiveRunTargets())) { return false; } } final ProgramRunner runner = RunnerRegistry.getInstance().getRunner(myExecutor.getId(), configuration); if (!ExecutionTargetManager.canRun(runnerAndConfigurationSettings, pair.getTarget()) && runner != null && !isStarting(project, myExecutor.getId(), runner.getRunnerId())) { return false; } } return true; } @Override public void update(final AnActionEvent e) { final Presentation presentation = e.getPresentation(); final Project project = e.getProject(); if (project == null || !project.isInitialized() || project.isDisposed()) { presentation.setEnabled(false); return; } final RunnerAndConfigurationSettings selectedConfiguration = getSelectedConfiguration(project); boolean enabled = false; String text; if (selectedConfiguration != null) { if (DumbService.isDumb(project) && !selectedConfiguration.getType().isDumbAware()) { presentation.setEnabled(false); return; } presentation.setIcon(getInformativeIcon(project, selectedConfiguration)); RunConfiguration configuration = selectedConfiguration.getConfiguration(); if (configuration instanceof CompoundRunConfiguration) { enabled = canRun(project, ((CompoundRunConfiguration)configuration).getConfigurationsWithEffectiveRunTargets()); } else { enabled = canRun(project, Collections.singletonList(new SettingsAndEffectiveTarget(selectedConfiguration, ExecutionTargetManager.getActiveTarget(project)))); } if (enabled) { presentation.setDescription(myExecutor.getDescription()); } text = myExecutor.getStartActionText(selectedConfiguration.getName()); } else { text = getTemplatePresentation().getTextWithMnemonic(); } presentation.setEnabled(enabled); presentation.setText(text); } private Icon getInformativeIcon(Project project, final RunnerAndConfigurationSettings selectedConfiguration) { final ExecutionManagerImpl executionManager = ExecutionManagerImpl.getInstance(project); RunConfiguration configuration = selectedConfiguration.getConfiguration(); if (configuration instanceof RunnerIconProvider) { RunnerIconProvider provider = (RunnerIconProvider)configuration; Icon icon = provider.getExecutorIcon(configuration, myExecutor); if (icon != null) { return icon; } } List<RunContentDescriptor> runningDescriptors = executionManager.getRunningDescriptors(s -> s != null && s.getConfiguration() == selectedConfiguration.getConfiguration()); runningDescriptors = ContainerUtil.filter(runningDescriptors, descriptor -> { RunContentDescriptor contentDescriptor = executionManager.getContentManager().findContentDescriptor(myExecutor, descriptor.getProcessHandler()); return contentDescriptor != null && executionManager.getExecutors(contentDescriptor).contains(myExecutor); }); if (!runningDescriptors.isEmpty() && DefaultRunExecutor.EXECUTOR_ID.equals(myExecutor.getId()) && selectedConfiguration.isSingleton()) { return AllIcons.Actions.Restart; } if (runningDescriptors.isEmpty()) { return myExecutor.getIcon(); } if (runningDescriptors.size() == 1) { return ExecutionUtil.getLiveIndicator(myExecutor.getIcon()); } else { return IconUtil.addText(myExecutor.getIcon(), String.valueOf(runningDescriptors.size())); } } @Nullable private RunnerAndConfigurationSettings getSelectedConfiguration(@NotNull final Project project) { return RunManager.getInstance(project).getSelectedConfiguration(); } private void run(@NotNull Project project, @Nullable RunnerAndConfigurationSettings configuration, @NotNull DataContext dataContext) { if (configuration != null && configuration.getConfiguration() instanceof CompoundRunConfiguration) { for (SettingsAndEffectiveTarget pair : ((CompoundRunConfiguration)configuration.getConfiguration()).getConfigurationsWithEffectiveRunTargets()) { run(project, pair.getSettings(), dataContext); } } else { ExecutionEnvironmentBuilder builder = configuration == null ? null : ExecutionEnvironmentBuilder.createOrNull(myExecutor, configuration); if (builder == null) { return; } ExecutionManager.getInstance(project).restartRunProfile(builder.activeTarget().dataContext(dataContext).build()); } } @Override public void actionPerformed(final AnActionEvent e) { final Project project = e.getProject(); if (project == null || project.isDisposed()) { return; } run(project, getSelectedConfiguration(project), e.getDataContext()); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.network; import org.elasticsearch.action.support.replication.ReplicationTask; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.tasks.RawTaskStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.local.LocalTransport; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Supplier; /** * A module to handle registering and binding all network related classes. */ public final class NetworkModule { public static final String TRANSPORT_TYPE_KEY = "transport.type"; public static final String HTTP_TYPE_KEY = "http.type"; public static final String LOCAL_TRANSPORT = "local"; public static final String HTTP_TYPE_DEFAULT_KEY = "http.type.default"; public static final String TRANSPORT_TYPE_DEFAULT_KEY = "transport.type.default"; public static final Setting<String> TRANSPORT_DEFAULT_TYPE_SETTING = Setting.simpleString(TRANSPORT_TYPE_DEFAULT_KEY, Property.NodeScope); public static final Setting<String> HTTP_DEFAULT_TYPE_SETTING = Setting.simpleString(HTTP_TYPE_DEFAULT_KEY, Property.NodeScope); public static final Setting<String> HTTP_TYPE_SETTING = Setting.simpleString(HTTP_TYPE_KEY, Property.NodeScope); public static final Setting<Boolean> HTTP_ENABLED = Setting.boolSetting("http.enabled", true, Property.NodeScope); public static final Setting<String> TRANSPORT_TYPE_SETTING = Setting.simpleString(TRANSPORT_TYPE_KEY, Property.NodeScope); private final Settings settings; private final boolean transportClient; private static final List<NamedWriteableRegistry.Entry> namedWriteables = new ArrayList<>(); private static final List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>(); static { registerAllocationCommand(CancelAllocationCommand::new, CancelAllocationCommand::fromXContent, CancelAllocationCommand.COMMAND_NAME_FIELD); registerAllocationCommand(MoveAllocationCommand::new, MoveAllocationCommand::fromXContent, MoveAllocationCommand.COMMAND_NAME_FIELD); registerAllocationCommand(AllocateReplicaAllocationCommand::new, AllocateReplicaAllocationCommand::fromXContent, AllocateReplicaAllocationCommand.COMMAND_NAME_FIELD); registerAllocationCommand(AllocateEmptyPrimaryAllocationCommand::new, AllocateEmptyPrimaryAllocationCommand::fromXContent, AllocateEmptyPrimaryAllocationCommand.COMMAND_NAME_FIELD); registerAllocationCommand(AllocateStalePrimaryAllocationCommand::new, AllocateStalePrimaryAllocationCommand::fromXContent, AllocateStalePrimaryAllocationCommand.COMMAND_NAME_FIELD); namedWriteables.add( new NamedWriteableRegistry.Entry(Task.Status.class, ReplicationTask.Status.NAME, ReplicationTask.Status::new)); namedWriteables.add( new NamedWriteableRegistry.Entry(Task.Status.class, RawTaskStatus.NAME, RawTaskStatus::new)); } private final Map<String, Supplier<Transport>> transportFactories = new HashMap<>(); private final Map<String, Supplier<HttpServerTransport>> transportHttpFactories = new HashMap<>(); private final List<TransportInterceptor> transportIntercetors = new ArrayList<>(); /** * Creates a network module that custom networking classes can be plugged into. * @param settings The settings for the node * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. */ public NetworkModule(Settings settings, boolean transportClient, List<NetworkPlugin> plugins, ThreadPool threadPool, BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { this.settings = settings; this.transportClient = transportClient; registerTransport(LOCAL_TRANSPORT, () -> new LocalTransport(settings, threadPool, namedWriteableRegistry, circuitBreakerService)); for (NetworkPlugin plugin : plugins) { if (transportClient == false && HTTP_ENABLED.get(settings)) { Map<String, Supplier<HttpServerTransport>> httpTransportFactory = plugin.getHttpTransports(settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService, dispatcher); for (Map.Entry<String, Supplier<HttpServerTransport>> entry : httpTransportFactory.entrySet()) { registerHttpTransport(entry.getKey(), entry.getValue()); } } Map<String, Supplier<Transport>> httpTransportFactory = plugin.getTransports(settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); for (Map.Entry<String, Supplier<Transport>> entry : httpTransportFactory.entrySet()) { registerTransport(entry.getKey(), entry.getValue()); } List<TransportInterceptor> transportInterceptors = plugin.getTransportInterceptors(threadPool.getThreadContext()); for (TransportInterceptor interceptor : transportInterceptors) { registerTransportInterceptor(interceptor); } } } public boolean isTransportClient() { return transportClient; } /** Adds a transport implementation that can be selected by setting {@link #TRANSPORT_TYPE_KEY}. */ private void registerTransport(String key, Supplier<Transport> factory) { if (transportFactories.putIfAbsent(key, factory) != null) { throw new IllegalArgumentException("transport for name: " + key + " is already registered"); } } /** Adds an http transport implementation that can be selected by setting {@link #HTTP_TYPE_KEY}. */ // TODO: we need another name than "http transport"....so confusing with transportClient... private void registerHttpTransport(String key, Supplier<HttpServerTransport> factory) { if (transportClient) { throw new IllegalArgumentException("Cannot register http transport " + key + " for transport client"); } if (transportHttpFactories.putIfAbsent(key, factory) != null) { throw new IllegalArgumentException("transport for name: " + key + " is already registered"); } } /** * Register an allocation command. * <p> * This lives here instead of the more aptly named ClusterModule because the Transport client needs these to be registered. * </p> * @param reader the reader to read it from a stream * @param parser the parser to read it from XContent * @param commandName the names under which the command should be parsed. The {@link ParseField#getPreferredName()} is special because * it is the name under which the command's reader is registered. */ private static <T extends AllocationCommand> void registerAllocationCommand(Writeable.Reader<T> reader, CheckedFunction<XContentParser, T, IOException> parser, ParseField commandName) { namedXContents.add(new NamedXContentRegistry.Entry(AllocationCommand.class, commandName, parser)); namedWriteables.add(new NamedWriteableRegistry.Entry(AllocationCommand.class, commandName.getPreferredName(), reader)); } public static List<NamedWriteableRegistry.Entry> getNamedWriteables() { return Collections.unmodifiableList(namedWriteables); } public static List<NamedXContentRegistry.Entry> getNamedXContents() { return Collections.unmodifiableList(namedXContents); } public Supplier<HttpServerTransport> getHttpServerTransportSupplier() { final String name; if (HTTP_TYPE_SETTING.exists(settings)) { name = HTTP_TYPE_SETTING.get(settings); } else { name = HTTP_DEFAULT_TYPE_SETTING.get(settings); } final Supplier<HttpServerTransport> factory = transportHttpFactories.get(name); if (factory == null) { throw new IllegalStateException("Unsupported http.type [" + name + "]"); } return factory; } public boolean isHttpEnabled() { return transportClient == false && HTTP_ENABLED.get(settings); } public Supplier<Transport> getTransportSupplier() { final String name; if (TRANSPORT_TYPE_SETTING.exists(settings)) { name = TRANSPORT_TYPE_SETTING.get(settings); } else { name = TRANSPORT_DEFAULT_TYPE_SETTING.get(settings); } final Supplier<Transport> factory = transportFactories.get(name); if (factory == null) { throw new IllegalStateException("Unsupported transport.type [" + name + "]"); } return factory; } /** * Registers a new {@link TransportInterceptor} */ private void registerTransportInterceptor(TransportInterceptor interceptor) { this.transportIntercetors.add(Objects.requireNonNull(interceptor, "interceptor must not be null")); } /** * Returns a composite {@link TransportInterceptor} containing all registered interceptors * @see #registerTransportInterceptor(TransportInterceptor) */ public TransportInterceptor getTransportInterceptor() { return new CompositeTransportInterceptor(this.transportIntercetors); } static final class CompositeTransportInterceptor implements TransportInterceptor { final List<TransportInterceptor> transportInterceptors; private CompositeTransportInterceptor(List<TransportInterceptor> transportInterceptors) { this.transportInterceptors = new ArrayList<>(transportInterceptors); } @Override public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(String action, String executor, boolean forceExecution, TransportRequestHandler<T> actualHandler) { for (TransportInterceptor interceptor : this.transportInterceptors) { actualHandler = interceptor.interceptHandler(action, executor, forceExecution, actualHandler); } return actualHandler; } @Override public AsyncSender interceptSender(AsyncSender sender) { for (TransportInterceptor interceptor : this.transportInterceptors) { sender = interceptor.interceptSender(sender); } return sender; } } }
package water.api; import water.H2O; import water.Iced; import water.IcedWrapper; import water.Weaver; import water.api.SchemaMetadataBase.FieldMetadataBase; import water.exceptions.H2OIllegalArgumentException; import water.util.IcedHashMapBase; import water.util.Log; import water.util.ReflectionUtils; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; /** * The metadata info on all the fields in a Schema. This is used to help Schema be self-documenting, * and to generate language bindings for route handlers and entities. */ public final class SchemaMetadata extends Iced { public int version; public String name; public String superclass; public String type; public List<FieldMetadata> fields; public String markdown; // TODO: combine with ModelParameterSchemaV2. static public final class FieldMetadata extends Iced { /** * Field name in the POJO. Set through reflection. */ public String name; /** * Type for this field. Set through reflection. */ public String type; /** * Type for this field is itself a Schema. Set through reflection. */ public boolean is_schema; /** * Schema name for this field, if it is_schema. Set through reflection. */ public String schema_name; /** * Value for this field. Set through reflection. */ public Iced value; /** * A short help description to appear alongside the field in a UI. Set from the @API annotation. */ String help; /** * The label that should be displayed for the field if the name is insufficient. Set from the @API annotation. */ String label; /** * Is this field required, or is the default value generally sufficient? Set from the @API annotation. */ boolean required; /** * How important is this field? The web UI uses the level to do a slow reveal of the parameters. Set from the @API annotation. */ API.Level level; /** * Is this field an input, output or inout? Set from the @API annotation. */ API.Direction direction; /** * Is this field inherited from a class higher in the hierarchy? */ public boolean is_inherited; /** * Is this field gridable? Set from the @API annotation. */ public boolean is_gridable; // The following are markers for *input* fields. /** * For enum-type fields the allowed values are specified using the values annotation. * This is used in UIs to tell the user the allowed values, and for validation. * Set from the @API annotation. */ String[] values; /** * Should this field be rendered in the JSON representation? Set from the @API annotation. */ boolean json; /** * For Vec-type fields this is the set of Frame-type fields which must contain the named column. * For example, for a SupervisedModel the response_column must be in both the training_frame * and (if it's set) the validation_frame. */ String[] is_member_of_frames; /** * For Vec-type fields this is the set of other Vec-type fields which must contain * mutually exclusive values. For example, for a SupervisedModel the response_column * must be mutually exclusive with the weights_column. */ String[] is_mutually_exclusive_with; public FieldMetadata() { } /** * Create a new FieldMetadata object for the given Field of the given Schema. * @param schema water.api.Schema object * @param f java.lang.reflect.Field for the Schema class */ public FieldMetadata(Schema schema, Field f, List<Field>superclassFields) { super(); try { f.setAccessible(true); // handle private and protected fields this.name = f.getName(); Object o = f.get(schema); this.value = consValue(o); boolean is_enum = Enum.class.isAssignableFrom(f.getType()) || (f.getType().isArray() && Enum.class.isAssignableFrom(f.getType().getComponentType())); this.is_schema = Schema.class.isAssignableFrom(f.getType()) || (f.getType().isArray() && Schema.class.isAssignableFrom(f.getType().getComponentType())); this.type = consType(schema, ReflectionUtils.findActualFieldClass(schema.getClass(), f), f.getName()); // Note, this has to work when the field is null. In addition, if the field's type is a base class we want to see if we have a versioned schema for its Iced type and, if so, use it. if (this.is_schema) { // First, get the class of the field: NOTE: this gets the actual type for genericized fields, but not for arrays of genericized fields Class<? extends Schema> schema_class = f.getType().isArray() ? (Class<? extends Schema>)f.getType().getComponentType() : ReflectionUtils.findActualFieldClass(schema.getClass(), f); // Now see if we have a versioned schema for its Iced type: Class<? extends Schema> versioned_schema_class = Schema.schemaClass(schema.getSchemaVersion(), Schema.getImplClass(schema_class)); // If we found a versioned schema class for its iced type use it, else fall back to the type of the field: if (null != versioned_schema_class) { this.schema_name = versioned_schema_class.getSimpleName(); } else { this.schema_name = schema_class.getSimpleName(); } } else if (is_enum && !f.getType().isArray()) { this.schema_name = f.getType().getSimpleName(); } else if (is_enum && f.getType().isArray()) { this.schema_name = f.getType().getComponentType().getSimpleName(); } this.is_inherited = (superclassFields.contains(f)); API annotation = f.getAnnotation(API.class); if (null != annotation) { String l = annotation.label(); this.help = annotation.help(); this.label = (null == l || l.isEmpty() ? f.getName() : l); this.required = annotation.required(); this.level = annotation.level(); this.direction = annotation.direction(); this.is_gridable = annotation.gridable(); this.values = annotation.values(); this.json = annotation.json(); this.is_member_of_frames = annotation.is_member_of_frames(); this.is_mutually_exclusive_with = annotation.is_mutually_exclusive_with(); // TODO: need to form the transitive closure // If the field is an enum then the values annotation field had better be set. . . if (is_enum && (null == this.values || 0 == this.values.length)) { throw H2O.fail("Didn't find values annotation for enum field: " + this.name); } } } catch (Exception e) { throw H2O.fail("Caught exception accessing field: " + f + " for schema object: " + schema + ": " + e.toString()); } } // FieldMetadata(Schema, Field) /** * Factory method to create a new FieldMetadata instance if the Field has an @API annotation. * @param schema water.api.Schema object * @param f java.lang.reflect.Field for the Schema class * @return a new FieldMetadata instance if the Field has an @API annotation, else null */ public static FieldMetadata createIfApiAnnotation(Schema schema, Field f, List<Field> superclassFields) { f.setAccessible(true); // handle private and protected fields if (null != f.getAnnotation(API.class)) return new FieldMetadata(schema, f, superclassFields); Log.warn("Skipping field that lacks an annotation: " + schema.toString() + "." + f); return null; } /** For a given Class generate a client-friendly type name (e.g., int[][] or Frame). */ public static String consType(Schema schema, Class clz, String field_name) { boolean is_enum = Enum.class.isAssignableFrom(clz); boolean is_array = clz.isArray(); // built-in Java types: if (is_enum) return "enum"; if (String.class.isAssignableFrom(clz)) return "string"; // lower-case, to be less Java-centric if (clz.equals(Boolean.TYPE) || clz.equals(Byte.TYPE) || clz.equals(Short.TYPE) || clz.equals(Integer.TYPE) || clz.equals(Long.TYPE) || clz.equals(Float.TYPE) || clz.equals(Double.TYPE)) return clz.toString(); if (is_array) return consType(schema, clz.getComponentType(), field_name) + "[]"; if (Map.class.isAssignableFrom(clz)) { if (IcedHashMapBase.class.isAssignableFrom(clz)) { String type0 = ReflectionUtils.findActualClassParameter(clz, 0).getSimpleName(); String type1 = ReflectionUtils.findActualClassParameter(clz, 1).getSimpleName(); if ("String".equals(type0)) type0 = "string"; if ("String".equals(type1)) type1 = "string"; return "Map<" + type0 + "," + type1 + ">"; } else { Log.warn("Schema Map field isn't a subclass of IcedHashMap, so its metadata won't have type parameters: " + schema.getClass().getSimpleName() + "." + field_name); return "Map"; } } if (List.class.isAssignableFrom(clz)) return "List"; // H2O-specific types: // TODO: NOTE, this is a mix of Schema types and Iced types; that's not right. . . // Should ONLY have schema types. // Also, this mapping could/should be moved to Schema. if (water.Key.class.isAssignableFrom(clz)) { Log.warn("Raw Key (not KeySchema) in Schema: " + schema.getClass() + " field: " + field_name); return "Key"; } if (KeyV3.class.isAssignableFrom(clz)) { return "Key<" + KeyV3.getKeyedClassType((Class<? extends KeyV3>) clz) + ">"; } if (Schema.class.isAssignableFrom(clz)) { return Schema.getImplClass((Class<Schema>)clz).getSimpleName(); // same as Schema.schema_type } if (Iced.class.isAssignableFrom(clz)) { if (clz == Schema.Meta.class) { // Special case where we allow an Iced in a Schema so we don't get infinite meta-regress: return "Schema.Meta"; } else { // Special cases: polymorphic metadata fields that can contain scalars, Schemas (any Iced, actually), or arrays of these: if (schema instanceof ModelParameterSchemaV3 && ("default_value".equals(field_name) || "actual_value".equals(field_name))) return "Polymorphic"; if ((schema instanceof FieldMetadataV3 || schema instanceof FieldMetadataBase) && "value".equals(field_name)) return "Polymorphic"; if (((schema instanceof TwoDimTableBase || schema instanceof TwoDimTableV3) && "data".equals(field_name))) // IcedWrapper return "Polymorphic"; Log.warn("WARNING: found non-Schema Iced field: " + clz.toString() + " in Schema: " + schema.getClass() + " field: " + field_name); return clz.getSimpleName(); } } String msg = "Don't know how to generate a client-friendly type name for class: " + clz.toString() + " in Schema: " + schema.getClass() + " field: " + field_name; Log.warn(msg); throw H2O.fail(msg); } public static Iced consValue(Object o) { if (null == o) return null; Class clz = o.getClass(); if (water.Iced.class.isAssignableFrom(clz)) return (Iced)o; if (clz.isArray()) { return new IcedWrapper(o); } /* if (water.Keyed.class.isAssignableFrom(o.getClass())) { Keyed k = (Keyed)o; return k._key.toString(); } if (! o.getClass().isArray()) { if (Schema.class.isAssignableFrom(o.getClass())) { return new String(((Schema)o).writeJSON(new AutoBuffer()).buf()); } else { return o.toString(); } } StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < Array.getLength(o); i++) { if (i > 0) sb.append(", "); sb.append(consValue(Array.get(o, i))); } sb.append("]"); return sb.toString(); */ // Primitive type if (clz.isPrimitive()) return new IcedWrapper(o); if (o instanceof Number) return new IcedWrapper(o); if (o instanceof Boolean) return new IcedWrapper(o); if (o instanceof String) return new IcedWrapper(o); if (o instanceof Enum) return new IcedWrapper(o); throw new H2OIllegalArgumentException("o", "consValue", o); } } // FieldMetadata public SchemaMetadata() { fields = new ArrayList<>(); } public SchemaMetadata(Schema schema) { version = schema.get__meta().getSchema_version(); name = schema.get__meta().getSchema_name(); type = schema.get__meta().getSchema_type(); superclass = schema.getClass().getSuperclass().getSimpleName(); // Get metadata of all annotated fields fields = getFieldMetadata(schema); // Also generates markdown markdown = schema.markdown(this, true, true).toString(); } /** * Returns metadata of all annotated fields. * * @param schema a schema instance * @return list of field metadata */ public static List<FieldMetadata> getFieldMetadata(Schema schema) { List<Field> superclassFields = Arrays.asList(Weaver.getWovenFields(schema.getClass().getSuperclass())); List<FieldMetadata> fields = new ArrayList<>(); // Fields up to but not including Schema for (Field field : Weaver.getWovenFields(schema.getClass())) { FieldMetadata fmd = FieldMetadata.createIfApiAnnotation(schema, field, superclassFields); if (null != fmd) // skip transient or other non-annotated fields fields.add(fmd); // NOTE: we include non-JSON fields here; remove them later if we don't want them } return fields; } public static SchemaMetadata createSchemaMetadata(String classname) throws IllegalArgumentException { try { Class<? extends Schema> clz = (Class<? extends Schema>) Class.forName(classname); Schema s = clz.newInstance(); return new SchemaMetadata(s); } catch (Exception e) { String msg = "Caught exception fetching schema: " + classname + ": " + e; Log.warn(msg); throw new IllegalArgumentException(msg); } } }
package eu.qualimaster.TimeTravelPip.topology; import java.util.*; import java.io.IOException; import java.lang.NoSuchMethodException; import java.lang.reflect.InvocationTargetException; import java.io.OutputStream; import java.io.Serializable; import java.net.Socket; import java.io.PrintWriter; import eu.qualimaster.common.logging.DataLogger; import java.net.UnknownHostException; import com.esotericsoftware.kryo.io.Output; import org.apache.log4j.Logger; import backtype.storm.tuple.*; import backtype.storm.task.*; import backtype.storm.topology.*; import eu.qualimaster.events.EventManager; import eu.qualimaster.monitoring.events.AlgorithmChangedMonitoringEvent; import eu.qualimaster.monitoring.events.ParameterChangedMonitoringEvent; import eu.qualimaster.families.inf.*; import eu.qualimaster.families.inf.IFTimeGraphIndexer.*; import eu.qualimaster.families.imp.*; import eu.qualimaster.common.signal.*; import eu.qualimaster.base.algorithm.*; import eu.qualimaster.base.algorithm.IFamily.State; import eu.qualimaster.infrastructure.PipelineOptions; import eu.qualimaster.pipeline.DefaultModeException; import eu.qualimaster.pipeline.DefaultModeMonitoringEvent; import eu.qualimaster.base.serializer.KryoSwitchTupleSerializer; import backtype.storm.Config; import eu.qualimaster.base.pipeline.CollectingTopologyInfo; import eu.qualimaster.pipeline.AlgorithmChangeParameter; import eu.qualimaster.timegraph.TimeGraphIndexer; import eu.qualimaster.families.inf.IFTimeGraphMapper.*; import eu.qualimaster.families.inf.IFTimeGraphIndexer.*; import eu.qualimaster.families.inf.IFTimeGraphQueryExecutor.*; import eu.qualimaster.base.algorithm.IItemEmitter; /** * Defines the FamilyElment in the pipeline(GEN). **/ @SuppressWarnings({ "rawtypes", "serial" }) public class PipelineVar_10_FamilyElement4FamilyElement extends BaseSignalBolt { final static Logger logger = Logger.getLogger(PipelineVar_10_FamilyElement4FamilyElement.class); transient OutputCollector _collector; private boolean algChange = false; private boolean firstTuple = false; private long record = 0; private int taskId; private transient FTimeGraphIndexer.IFTimeGraphIndexerExternalResponsesOutput externalResponsesResult = new FTimeGraphIndexer.IFTimeGraphIndexerExternalResponsesOutput(); private transient FTimeGraphIndexer.IFTimeGraphIndexerInternalRequestsOutput internalRequestsResult = new FTimeGraphIndexer.IFTimeGraphIndexerInternalRequestsOutput(); private transient IFTimeGraphIndexer alg = null; private transient List<Integer> taskIdTimeGraphIndexer; private transient List<Integer> taskIdTimeGraphQueryExecutor; private int PipelineVar_10_FamilyElement4TaskId; transient IIFTimeGraphMapperDataStreamOutput iTupleDataStream = null; transient IIFTimeGraphIndexerInternalRequestsOutput iTupleInternalRequests = null; transient IIFTimeGraphQueryExecutorUnicastRequestsOutput iTupleUnicastRequests = null; transient IIFTimeGraphQueryExecutorBroadcastRequestsOutput iTupleBroadcastRequests = null; transient IIFTimeGraphIndexerDataStreamInput inputDataStream = null; transient IIFTimeGraphIndexerInternalRequestsInput inputInternalRequests = null; transient IIFTimeGraphIndexerUnicastRequestsInput inputUnicastRequests = null; transient IIFTimeGraphIndexerBroadcastRequestsInput inputBroadcastRequests = null; public PipelineVar_10_FamilyElement4FamilyElement(String name, String namespace) { super(name, namespace, true); } /** * Sends an algorithm change event and considers whether the coordination layer shall be bypassed for direct * testing. * @param algorithm the new algorithm * @param causeMsgId the message id of the causing message (may be empty or null) */ private void sendAlgorithmChangeEvent(String algorithm, String causeMsgId) { EventManager.send(new AlgorithmChangedMonitoringEvent(getPipeline(), getName(), algorithm, causeMsgId)); } /** * Sends an parameter change event and considers whether the coordination layer shall be bypassed for direct * testing. * @param parameter the parameter to be changed * @param value the new value * @param causeMsgId the message id of the causing message (may be empty or null) */ private void sendParameterChangeEvent(String parameter, Serializable value, String causeMsgId) { EventManager.send(new ParameterChangedMonitoringEvent(getPipeline(), getName(), parameter, value, causeMsgId)); } /** * Sends an a default mode monitoring event with a DefaultModeException case. * @param exceptionCase the DefaultModeException case */ private void sendDefaultModeMonitoringEvent(DefaultModeException exceptionCase) { EventManager.send(new DefaultModeMonitoringEvent(getPipeline(), getName(), exceptionCase)); } public void prepare(Map map, TopologyContext topologyContext, OutputCollector collector) { super.prepare(map, topologyContext, collector); _collector = collector; taskId = topologyContext.getThisTaskId(); algChange = false; taskIdTimeGraphIndexer = topologyContext.getComponentTasks("PipelineVar_10_FamilyElement4"); logger.info("taskIdsTimeGraphIndexer = " + taskIdTimeGraphIndexer); taskIdTimeGraphQueryExecutor = topologyContext.getComponentTasks("PipelineVar_10_FamilyElement5"); logger.info("taskIdsTimeGraphQueryExecutor = " + taskIdTimeGraphQueryExecutor); PipelineVar_10_FamilyElement4TaskId = topologyContext.getThisTaskId(); logger.info("PipelineVar_10_FamilyElement4TaskId = " + PipelineVar_10_FamilyElement4TaskId); externalResponsesResult = new FTimeGraphIndexer.IFTimeGraphIndexerExternalResponsesOutput(); IItemEmitter<IIFTimeGraphIndexerExternalResponsesOutput> externalResponsesEmitter = new IItemEmitter<IIFTimeGraphIndexerExternalResponsesOutput>() { @Override public void emitDirect(String streamId, IIFTimeGraphIndexerExternalResponsesOutput item) { _collector.emitDirect(item.getTaskId(), "PipelineVar_10_FamilyElement4ExternalResponses", new Values(item)); } }; externalResponsesResult.setEmitter(externalResponsesEmitter); internalRequestsResult = new FTimeGraphIndexer.IFTimeGraphIndexerInternalRequestsOutput(); IItemEmitter<IIFTimeGraphIndexerInternalRequestsOutput> internalRequestsEmitter = new IItemEmitter<IIFTimeGraphIndexerInternalRequestsOutput>() { @Override public void emitDirect(String streamId, IIFTimeGraphIndexerInternalRequestsOutput item) { _collector.emitDirect(item.getTaskId(), "PipelineVar_10_FamilyElement4InternalRequests", new Values(item)); } }; internalRequestsResult.setEmitter(internalRequestsEmitter); if("STATIC".equals(map.get(Constants.CONFIG_KEY_INIT_MODE))) { try { try { Class cls = Class.forName("eu.qualimaster.timegraph.TimeGraphIndexer"); alg = (IFTimeGraphIndexer) cls.getConstructor(List.class, List.class, int.class).newInstance(taskIdTimeGraphIndexer, taskIdTimeGraphQueryExecutor, PipelineVar_10_FamilyElement4TaskId); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } if (alg != null) { alg.switchState(State.ACTIVATE); //activate the current algorithm } sendAlgorithmChangeEvent("TimeGraphIndexer", null); algChange = true; } catch (Throwable e) { if (e instanceof DefaultModeException) { logger.error("Throwable caught - turning to default mode"); e.printStackTrace(); sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } initMonitor(); logger.info("The end of the prepare method."); } @Override protected boolean initMonitorDuringPrepare() { return false; } public void forwardTuple(Object tupleItem) { startMonitoring(); // delegate to family "fTimeGraphIndexer" if (!firstTuple) {//the first tuple arrived firstTuple = true; record = System.currentTimeMillis(); } if(tupleItem instanceof IIFTimeGraphMapperDataStreamOutput) { iTupleDataStream = (IIFTimeGraphMapperDataStreamOutput)tupleItem; inputDataStream = new FTimeGraphIndexer.IFTimeGraphIndexerDataStreamInput(); inputDataStream.setUpdate(iTupleDataStream.getUpdate()); inputDataStream.setIsAddition(iTupleDataStream.getIsAddition()); try { if(alg != null) { alg.calculate(inputDataStream, externalResponsesResult, internalRequestsResult); } else { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second logger.error("Algorithm is not assigned!"); record = now; } } } catch(Throwable e) { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second e.printStackTrace(); record = now; } externalResponsesResult.setExternalResponse(null); internalRequestsResult.setInternalRequest(null); if (e instanceof DefaultModeException) { sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } if(tupleItem instanceof IIFTimeGraphIndexerInternalRequestsOutput) { iTupleInternalRequests = (IIFTimeGraphIndexerInternalRequestsOutput)tupleItem; inputInternalRequests = new FTimeGraphIndexer.IFTimeGraphIndexerInternalRequestsInput(); inputInternalRequests.setInternalRequest(iTupleInternalRequests.getInternalRequest()); try { if(alg != null) { alg.calculate(inputInternalRequests, externalResponsesResult, internalRequestsResult); } else { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second logger.error("Algorithm is not assigned!"); record = now; } } } catch(Throwable e) { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second e.printStackTrace(); record = now; } externalResponsesResult.setExternalResponse(null); internalRequestsResult.setInternalRequest(null); if (e instanceof DefaultModeException) { sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } if(tupleItem instanceof IIFTimeGraphQueryExecutorUnicastRequestsOutput) { iTupleUnicastRequests = (IIFTimeGraphQueryExecutorUnicastRequestsOutput)tupleItem; inputUnicastRequests = new FTimeGraphIndexer.IFTimeGraphIndexerUnicastRequestsInput(); inputUnicastRequests.setUnicastExternalRequest(iTupleUnicastRequests.getUnicastExternalRequest()); try { if(alg != null) { alg.calculate(inputUnicastRequests, externalResponsesResult, internalRequestsResult); } else { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second logger.error("Algorithm is not assigned!"); record = now; } } } catch(Throwable e) { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second e.printStackTrace(); record = now; } externalResponsesResult.setExternalResponse(null); internalRequestsResult.setInternalRequest(null); if (e instanceof DefaultModeException) { sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } if(tupleItem instanceof IIFTimeGraphQueryExecutorBroadcastRequestsOutput) { iTupleBroadcastRequests = (IIFTimeGraphQueryExecutorBroadcastRequestsOutput)tupleItem; inputBroadcastRequests = new FTimeGraphIndexer.IFTimeGraphIndexerBroadcastRequestsInput(); inputBroadcastRequests.setBroadcastExternalRequest(iTupleBroadcastRequests.getBroadcastExternalRequest()); try { if(alg != null) { alg.calculate(inputBroadcastRequests, externalResponsesResult, internalRequestsResult); } else { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second logger.error("Algorithm is not assigned!"); record = now; } } } catch(Throwable e) { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second e.printStackTrace(); record = now; } externalResponsesResult.setExternalResponse(null); internalRequestsResult.setInternalRequest(null); if (e instanceof DefaultModeException) { sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } if(!(alg instanceof ITopologyCreate)) { eu.qualimaster.base.algorithm.IOutputItemIterator<IIFTimeGraphIndexerExternalResponsesOutput> iterExternalResponses = externalResponsesResult.iterator(); iterExternalResponses.reset(); while (iterExternalResponses.hasNext()) { IIFTimeGraphIndexerExternalResponsesOutput out = iterExternalResponses.next(); _collector.emitDirect(out.getTaskId(), "PipelineVar_10_FamilyElement4ExternalResponses", new Values(out)); } eu.qualimaster.base.algorithm.IOutputItemIterator<IIFTimeGraphIndexerInternalRequestsOutput> iterInternalRequests = internalRequestsResult.iterator(); iterInternalRequests.reset(); while (iterInternalRequests.hasNext()) { IIFTimeGraphIndexerInternalRequestsOutput out = iterInternalRequests.next(); _collector.emitDirect(out.getTaskId(), "PipelineVar_10_FamilyElement4InternalRequests", new Values(out)); } } // _collector.ack(tuple); endMonitoring(); } @Override public void execute(Tuple tuple) { forwardTuple(tuple.getValue(0)); } @Override public void notifyAlgorithmChange(AlgorithmChangeSignal signal) { logger.info("Received algorithm switching signal " + signal.getAlgorithm()); try { switch (signal.getAlgorithm()) { case "TimeGraphIndexer": if (!algChange || !(alg instanceof TimeGraphIndexer)) { algChange = true; if(alg != null) { alg.switchState(State.PASSIVATE); //passivate the previous algorithm } try { Class cls = Class.forName("eu.qualimaster.timegraph.TimeGraphIndexer"); alg = (IFTimeGraphIndexer) cls.getConstructor(List.class, List.class, int.class).newInstance(taskIdTimeGraphIndexer, taskIdTimeGraphQueryExecutor, PipelineVar_10_FamilyElement4TaskId); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } sendAlgorithmChangeEvent("TimeGraphIndexer", signal.getCauseMessageId()); if(alg != null) { alg.switchState(State.ACTIVATE); //activate the current algorithm } } break; } super.notifyAlgorithmChange(signal); } catch (Throwable e) { if (e instanceof DefaultModeException) { logger.error("Throwable caught - turning to default mode"); e.printStackTrace(); sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } @Override public void cleanup() { super.cleanup(); if(alg != null) { alg.switchState(State.TERMINATING); } } public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream("PipelineVar_10_FamilyElement4ExternalResponses", new Fields("PipelineVar_10_FamilyElement4ExternalResponsesFields")); declarer.declareStream("PipelineVar_10_FamilyElement4InternalRequests", new Fields("PipelineVar_10_FamilyElement4InternalRequestsFields")); } @Override protected void prepareShutdown(ShutdownSignal signal) { super.prepareShutdown(signal); if(alg != null) { alg.switchState(State.TERMINATING); } } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krms.impl.repository; import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.mo.common.Versioned; import org.kuali.rice.krad.data.jpa.PortableSequenceGenerator; import org.kuali.rice.krms.api.repository.action.ActionDefinition; import org.kuali.rice.krms.api.repository.action.ActionDefinitionContract; import org.kuali.rice.krms.api.repository.type.KrmsAttributeDefinition; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Version; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * The Action Business Object is the Action mutable class. * * @author Kuali Rice Team (rice.collab@kuali.org) * @see ActionDefinition * @see ActionDefinitionContract * @see org.kuali.rice.krms.framework.engine.Action */ @Entity @Table(name = "KRMS_ACTN_T") public class ActionBo implements ActionDefinitionContract, Versioned, Serializable { private static final long serialVersionUID = 1l; @PortableSequenceGenerator(name = "KRMS_ACTN_S") @GeneratedValue(generator = "KRMS_ACTN_S") @Id @Column(name = "ACTN_ID") private String id; @Column(name = "NMSPC_CD") private String namespace; @Column(name = "NM") private String name; @Column(name = "DESC_TXT") private String description; @Column(name = "TYP_ID") private String typeId; @ManyToOne() @JoinColumn(name = "RULE_ID") private RuleBo rule; @Column(name = "SEQ_NO") private Integer sequenceNumber; @Column(name = "VER_NBR") @Version private Long versionNumber; @OneToMany(orphanRemoval = true, mappedBy = "action", cascade = { CascadeType.MERGE, CascadeType.REMOVE, CascadeType.PERSIST }) private List<ActionAttributeBo> attributeBos; @Override public Map<String, String> getAttributes() { HashMap<String, String> attributes = new HashMap<String, String>(); if (attributeBos != null) for (ActionAttributeBo attr : attributeBos) { if (attr.getAttributeDefinition() == null) { attributes.put("", ""); } else { attributes.put(attr.getAttributeDefinition().getName(), attr.getValue()); } } return attributes; } /** * Set the Action Attributes * * @param attributes to add to this Action */ public void setAttributes(Map<String, String> attributes) { this.attributeBos = new ArrayList<ActionAttributeBo>(); if (!StringUtils.isBlank(this.typeId)) { List<KrmsAttributeDefinition> attributeDefinitions = KrmsRepositoryServiceLocator.getKrmsAttributeDefinitionService().findAttributeDefinitionsByType(this.getTypeId()); Map<String, KrmsAttributeDefinition> attributeDefinitionsByName = new HashMap<String, KrmsAttributeDefinition>(); if (attributeDefinitions != null) { for (KrmsAttributeDefinition attributeDefinition : attributeDefinitions) { attributeDefinitionsByName.put(attributeDefinition.getName(), attributeDefinition); } } for (Map.Entry<String, String> attr : attributes.entrySet()) { KrmsAttributeDefinition attributeDefinition = attributeDefinitionsByName.get(attr.getKey()); if (attributeDefinition != null) { ActionAttributeBo attributeBo = new ActionAttributeBo(); attributeBo.setAction(this); attributeBo.setValue(attr.getValue()); attributeBo.setAttributeDefinition(KrmsAttributeDefinitionBo.from(attributeDefinition)); attributeBos.add(attributeBo); } } } } /** * Converts a mutable bo to it's immutable counterpart * * @param bo the mutable business object * @return the immutable object */ public static ActionDefinition to(ActionBo bo) { if (bo == null) { return null; } return ActionDefinition.Builder.create(bo).build(); } /** * Converts a immutable object to it's mutable bo counterpart * * @param im immutable object * @return the mutable bo */ public static ActionBo from(ActionDefinition im) { if (im == null) { return null; } ActionBo bo = new ActionBo(); bo.id = im.getId(); bo.namespace = im.getNamespace(); bo.name = im.getName(); bo.typeId = im.getTypeId(); bo.description = im.getDescription(); // we don't set the rule because we only have the ruleId in the ActionDefinition. If you need the RuleBo as // well, use RuleBo.from to convert the RuleDefinition and all it's children as well. bo.sequenceNumber = im.getSequenceNumber(); bo.setVersionNumber(im.getVersionNumber()); // build the list of action attribute BOs List<ActionAttributeBo> attrs = new ArrayList<ActionAttributeBo>(); // for each converted pair, build an ActionAttributeBo and add it to the set for (Map.Entry<String, String> entry : im.getAttributes().entrySet()) { KrmsAttributeDefinitionBo attrDefBo = KrmsRepositoryServiceLocator.getKrmsAttributeDefinitionService().getKrmsAttributeBo(entry.getKey(), im.getNamespace()); ActionAttributeBo attributeBo = new ActionAttributeBo(); attributeBo.setAction(bo); attributeBo.setValue(entry.getValue()); attributeBo.setAttributeDefinition(attrDefBo); attrs.add(attributeBo); } bo.setAttributeBos(attrs); return bo; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getNamespace() { return namespace; } public void setNamespace(String namespace) { this.namespace = namespace; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getTypeId() { return typeId; } public void setTypeId(String typeId) { this.typeId = typeId; } public String getRuleId() { if (rule != null) { return rule.getId(); } return null; } public RuleBo getRule() { return rule; } public void setRule(RuleBo rule) { this.rule = rule; } public Integer getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(Integer sequenceNumber) { this.sequenceNumber = sequenceNumber; } public Long getVersionNumber() { return versionNumber; } public void setVersionNumber(Long versionNumber) { this.versionNumber = versionNumber; } public List<ActionAttributeBo> getAttributeBos() { return attributeBos; } public void setAttributeBos(List<ActionAttributeBo> attributeBos) { this.attributeBos = attributeBos; } }