answer
stringlengths
17
10.2M
package edu.duke.cabig.c3pr.service.impl; import java.util.List; import org.apache.log4j.Logger; import edu.duke.cabig.c3pr.dao.ParticipantDao; import edu.duke.cabig.c3pr.domain.DiseaseHistory; import edu.duke.cabig.c3pr.domain.Participant; import edu.duke.cabig.c3pr.domain.StudyParticipantAssignment; import edu.duke.cabig.c3pr.domain.SubjectEligibilityAnswer; import edu.duke.cabig.c3pr.domain.SubjectStratificationAnswer; import edu.duke.cabig.c3pr.esb.impl.MessageBroadcastServiceImpl; import edu.duke.cabig.c3pr.service.ParticipantService; import edu.duke.cabig.c3pr.utils.XMLUtils; /** * @author Kulasekaran, Ramakrishna * @version 1.0 * */ public class ParticipantServiceImpl implements ParticipantService { private static final Logger logger = Logger.getLogger(ParticipantServiceImpl.class); private String isBroadcastEnable="true"; private MessageBroadcastServiceImpl messageBroadcaster; public MessageBroadcastServiceImpl getMessageBroadcaster() { return messageBroadcaster; } public void setMessageBroadcaster( MessageBroadcastServiceImpl messageBroadcaster) { this.messageBroadcaster = messageBroadcaster; } public String getIsBroadcastEnable() { return isBroadcastEnable; } public void setIsBroadcastEnable(String isBroadcastEnable) { this.isBroadcastEnable = isBroadcastEnable; } ParticipantDao participantDao; public ParticipantDao getParticipantDao() { return participantDao; } public void setParticipantDao(ParticipantDao participantDao) { this.participantDao = participantDao; } /** * Search using a sample. Populate a Participant object * @param Participant object * @return List of Participant objects based on the sample participant object * @throws Runtime exception */ public List<Participant> search(Participant participant) throws Exception { return participantDao.searchByExample(participant, true); } public void createRegistration(StudyParticipantAssignment studyParticipantAssignment) { if (logger.isDebugEnabled()) { logger.debug("processFinish(HttpServletRequest, HttpServletResponse, Object, BindException) - in process finish"); //$NON-NLS-1$ } studyParticipantAssignment.getParticipant().getStudyParticipantAssignments().size(); studyParticipantAssignment.getParticipant().addStudyParticipantAssignment(studyParticipantAssignment); studyParticipantAssignment.setRegistrationStatus(evaluateStatus(studyParticipantAssignment)); if(!hasDiseaseHistory(studyParticipantAssignment.getDiseaseHistory())){ studyParticipantAssignment.setDiseaseHistory(null); } if(studyParticipantAssignment.getScheduledArms().get(studyParticipantAssignment.getScheduledArms().size()-1).getArm()==null){ studyParticipantAssignment.getScheduledArms().remove(studyParticipantAssignment.getScheduledArms().size()-1); } if(studyParticipantAssignment.getEligibilityIndicator()){ List<SubjectEligibilityAnswer> criterias=studyParticipantAssignment.getSubjectEligibilityAnswers(); if (logger.isDebugEnabled()) { logger.debug("createRegistration(StudyParticipantAssignment) - studyParticipantAssignment.getEligibilityIndicator():" + studyParticipantAssignment.getEligibilityIndicator()); //$NON-NLS-1$ } studyParticipantAssignment.setEligibilityWaiverReasonText(""); if (logger.isDebugEnabled()) { logger.debug("createRegistration(StudyParticipantAssignment) - printing answers....."); //$NON-NLS-1$ } for(int i=0 ; i<criterias.size() ; i++){ if (logger.isDebugEnabled()) { logger.debug("createRegistration(StudyParticipantAssignment) - question : " + criterias.get(i).getEligibilityCriteria().getQuestionText()); //$NON-NLS-1$ } if (logger.isDebugEnabled()) { logger.debug("createRegistration(StudyParticipantAssignment) - ----- answer : " + criterias.get(i).getAnswerText()); //$NON-NLS-1$ } } } if (logger.isDebugEnabled()) { logger.debug("createRegistration(HttpServletRequest, HttpServletResponse, Object, BindException) - Calling participant service"); //$NON-NLS-1$ } participantDao.save(studyParticipantAssignment.getParticipant()); studyParticipantAssignment.setStudyParticipantIdentifier(studyParticipantAssignment.getId()+ ""); if(isBroadcastEnable.equalsIgnoreCase("true")){ String xml = ""; try { xml = XMLUtils.toXml(studyParticipantAssignment); if (logger.isDebugEnabled()) { logger.debug(" - XML for Registration"); //$NON-NLS-1$ } if (logger.isDebugEnabled()) { logger.debug(" - " + xml); //$NON-NLS-1$ } messageBroadcaster.initialize(); messageBroadcaster.broadcast(xml); } catch (Exception e) { // TODO Auto-generated catch block logger.error("", e); //$NON-NLS-1$ } } } public static String evaluateStatus(StudyParticipantAssignment studyParticipantAssignment){ String status="Complete"; if(studyParticipantAssignment.getInformedConsentSignedDateStr().equals("")){ return "Incomplete"; }else if(studyParticipantAssignment.getTreatingPhysician()==null){ return "Incomplete"; }else if(studyParticipantAssignment.getScheduledArms().get(studyParticipantAssignment.getScheduledArms().size()-1).getArm()==null){ return "Incomplete"; }else if(!evaluateStratificationIndicator(studyParticipantAssignment)){ return "Incomplete"; }else if(!studyParticipantAssignment.getEligibilityIndicator()){ return "Incomplete"; } return status; } private static boolean evaluateStratificationIndicator(StudyParticipantAssignment studyParticipantAssignment){ List<SubjectStratificationAnswer> answers=studyParticipantAssignment.getSubjectStratificationAnswers(); for(SubjectStratificationAnswer subjectStratificationAnswer:answers){ if(subjectStratificationAnswer.getStratificationCriterionAnswer()==null){ return false; } } return true; } private boolean hasDiseaseHistory(DiseaseHistory diseaseHistory){ if(diseaseHistory.getAnatomicSite()==null&&(diseaseHistory.getOtherPrimaryDiseaseSiteCode()==null||diseaseHistory.getOtherPrimaryDiseaseSiteCode().equals(""))&& (diseaseHistory.getOtherPrimaryDiseaseCode()==null||diseaseHistory.getOtherPrimaryDiseaseCode().equals(""))&&diseaseHistory.getStudyDisease()==null) return false; return true; } }
package maxwell_lt.mobblocker.blocks; import java.util.List; import java.util.Random; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.monster.AbstractSkeleton; import net.minecraft.entity.monster.EntityMob; import net.minecraft.entity.monster.EntityWitch; import net.minecraft.entity.passive.EntityWolf; import net.minecraft.entity.projectile.EntityArrow; import net.minecraft.entity.projectile.EntityPotion; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ITickable; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; public class TileEntityChunkProtector extends TileEntity implements ITickable { // Used to get random teleportation coords: Random rand; int ticksInWorld; int ticksBeforeDestroyed = 72000; // 72000 = 1 IRL hour AxisAlignedBB chunkBounds; public TileEntityChunkProtector() { super(); this.rand = new Random(); this.ticksInWorld = 0; this.chunkBounds = getChunk(getPos()); } @Override public void update() { if (!world.isRemote) { teleportMobs(chunkBounds); killArrows(chunkBounds); killPotions(chunkBounds); calmAngryWolves(chunkBounds); // Set metadata this.ticksInWorld++; if (ticksInWorld <= ticksBeforeDestroyed * 0.3F) { world.setBlockState(getPos(), world.getBlockState(getPos()).withProperty(BlockChunkProtector.DECAYLEVEL, 0)); } else if (ticksInWorld > ticksBeforeDestroyed * 0.3F && ticksInWorld <= ticksBeforeDestroyed * 0.7F) { world.setBlockState(getPos(), world.getBlockState(getPos()).withProperty(BlockChunkProtector.DECAYLEVEL, 1)); } else if (ticksInWorld > ticksBeforeDestroyed * 0.7F && ticksInWorld < ticksBeforeDestroyed) { world.setBlockState(getPos(), world.getBlockState(getPos()).withProperty(BlockChunkProtector.DECAYLEVEL, 2)); } else { world.setBlockToAir(getPos()); } } } // Teleports every hostile mob in the chunk like endermen. private void teleportMobs(AxisAlignedBB chunkBounds) { // Gets a list of all the entities in the same chunk as this block List<EntityMob> list = world.getEntitiesWithinAABB(EntityMob.class, chunkBounds); for (EntityMob entity : list) { boolean moved = false; // Stores the status of teleportation attempts. int counter = 0; // Used to prevent infinite loops. while (!moved) { counter++; if (counter > 10) break; // Breaks out of a possible infinite loop. // Implementation of Enderman random teleport code: double newX = entity.posX + (this.rand.nextDouble() - 0.5D) * 64.0D; double newY = entity.posY + (double)(this.rand.nextInt(64) - 32); double newZ = entity.posZ + (this.rand.nextDouble() - 0.5D) * 64.0D; moved = entity.attemptTeleport(newX, world.getTopSolidOrLiquidBlock(new BlockPos(newX, newY, newZ)).getY(), newZ); } // Reset loop controllers: counter = 0; moved = false; } } private void killArrows(AxisAlignedBB chunkBounds) { List<EntityArrow> list = world.getEntitiesWithinAABB(EntityArrow.class, chunkBounds); for (EntityArrow arrow : list) { if (arrow.shootingEntity instanceof AbstractSkeleton) { if (arrow.isBurning()) { arrow.setDead(); } else { arrow.setFire(1); arrow.setVelocity(0, 0, 0); } } } } private void killPotions(AxisAlignedBB chunkBounds) { List<EntityPotion> list = world.getEntitiesWithinAABB(EntityPotion.class, chunkBounds); for (EntityPotion potion : list) { if (potion.getThrower() instanceof EntityWitch) { potion.setDead(); } } } private void calmAngryWolves(AxisAlignedBB chunkBounds) { // Currently broken, only the helper wolves are calmed. List<EntityWolf> list = world.getEntitiesWithinAABB(EntityWolf.class, chunkBounds); for (EntityWolf wolf : list) { if (wolf.isAngry()) { wolf.setAttackTarget(null); wolf.setRevengeTarget(null); wolf.setAngry(false); } } } // Returns an AxisAlignedBB that surrounds the entire chunk a given BlockPos is in. private AxisAlignedBB getChunk(BlockPos blockpos) { return new AxisAlignedBB(blockpos.getX() & ~0xF, 0, blockpos.getZ() & ~0xF, (blockpos.getX() & ~0xF) + 16, 256, (blockpos.getZ() & ~0xF) + 16); } @Override public boolean shouldRefresh(World world, BlockPos pos, IBlockState oldState, IBlockState newState) { return (oldState.getBlock() != newState.getBlock()); } }
package io.mgba.Data.DTOs; import android.os.Parcel; import android.os.Parcelable; import com.arlib.floatingsearchview.suggestions.model.SearchSuggestion; import java.io.File; import io.mgba.Data.ContentProvider.game.GameCursor; import io.mgba.Data.Platform; import io.mgba.Services.IO.FilesService; public class Game implements Parcelable, SearchSuggestion { public static final Creator<Game> CREATOR = new Creator<Game>() { @Override public Game createFromParcel(Parcel source) { return new Game(source); } @Override public Game[] newArray(int size) { return new Game[size]; } }; private final File file; private final Platform platform; private String name; private String description; private String released; private String developer; private String genre; private String coverURL = null; private String MD5; private boolean favourite; public Game(String path, String name, String description, String released, String developer, String genre, String coverURL, String MD5, boolean favourite, Platform platform) { this.file = new File(path); this.name = name; this.description = description; this.released = released; this.developer = developer; this.genre = genre; this.coverURL = coverURL; this.MD5 = MD5; this.favourite = favourite; this.platform = platform; } public Game(String path, Platform platform) { this.file = new File(path); this.platform = platform; } protected Game(Parcel in) { this.file = (File) in.readSerializable(); this.name = in.readString(); this.description = in.readString(); this.released = in.readString(); this.developer = in.readString(); this.genre = in.readString(); this.coverURL = in.readString(); this.MD5 = in.readString(); this.favourite = in.readByte() != 0; int tmpPlatform = in.readInt(); this.platform = tmpPlatform == -1 ? null : Platform.values()[tmpPlatform]; } public String getName() { if(name == null) setName(FilesService.getFileWithoutExtension(file)); return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getReleased() { return released; } public void setReleased(String released) { this.released = released; } public String getDeveloper() { return developer; } public void setDeveloper(String developer) { this.developer = developer; } public String getGenre() { return genre; } public void setGenre(String genre) { this.genre = genre; } public String getCoverURL() { return coverURL; } public void setCoverURL(String coverURL) { this.coverURL = coverURL; } public File getFile() { return file; } public String getMD5() { return MD5; } public void setMD5(String MD5) { this.MD5 = MD5; } public boolean isFavourite() { return favourite; } public void setFavourite(boolean favourite) { this.favourite = favourite; } public Platform getPlatform() { return platform; } public boolean isAdvanced() { return platform.getValue() == Platform.GBA.getValue(); } public void compare(GameCursor dbVersion){ if(name == null) setName(dbVersion.getName()); if(description == null) setDescription(dbVersion.getDescription()); if(released == null) setReleased(dbVersion.getReleased()); if(developer == null) setDeveloper(dbVersion.getDeveloper()); if(genre == null) setGenre(dbVersion.getGenre()); if(coverURL == null) setCoverURL(dbVersion.getCover()); if(MD5 == null) setMD5(dbVersion.getMd5()); if(!isFavourite()) setFavourite(dbVersion.getIsfavourite()); } @Override public String getBody() { return getName(); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeSerializable(this.file); dest.writeString(this.name); dest.writeString(this.description); dest.writeString(this.released); dest.writeString(this.developer); dest.writeString(this.genre); dest.writeString(this.coverURL); dest.writeString(this.MD5); dest.writeByte(this.favourite ? (byte) 1 : (byte) 0); dest.writeInt(this.platform == null ? -1 : this.platform.ordinal()); } }
package net.geforcemods.securitycraft.blocks; import java.util.Random; import net.geforcemods.securitycraft.api.IOwnable; import net.geforcemods.securitycraft.api.LinkableBlockEntity; import net.minecraft.core.BlockPos; import net.minecraft.server.level.ServerLevel; import net.minecraft.world.entity.LivingEntity; import net.minecraft.world.entity.player.Player; import net.minecraft.world.item.Item; import net.minecraft.world.item.ItemStack; import net.minecraft.world.level.BlockGetter; import net.minecraft.world.level.Level; import net.minecraft.world.level.block.Block; import net.minecraft.world.level.block.DoorBlock; import net.minecraft.world.level.block.EntityBlock; import net.minecraft.world.level.block.LevelEvent; import net.minecraft.world.level.block.entity.BlockEntity; import net.minecraft.world.level.block.state.BlockState; import net.minecraft.world.level.block.state.properties.DoubleBlockHalf; import net.minecraft.world.level.material.PushReaction; import net.minecraft.world.phys.HitResult; public abstract class SpecialDoorBlock extends DoorBlock implements EntityBlock { public SpecialDoorBlock(Block.Properties properties) { super(properties); } @Override public void setPlacedBy(Level level, BlockPos pos, BlockState state, LivingEntity placer, ItemStack stack) { super.setPlacedBy(level, pos, state, placer, stack); if (level.getBlockEntity(pos) instanceof IOwnable lowerBe && level.getBlockEntity(pos.above()) instanceof IOwnable upperBe) { if (placer instanceof Player player) { lowerBe.setOwner(player.getGameProfile().getId().toString(), player.getName().getString()); upperBe.setOwner(player.getGameProfile().getId().toString(), player.getName().getString()); } if (lowerBe instanceof LinkableBlockEntity linkable1 && upperBe instanceof LinkableBlockEntity linkable2) LinkableBlockEntity.link(linkable1, linkable2); } } @Override public void tick(BlockState state, ServerLevel level, BlockPos pos, Random rand) { BlockState upperState = level.getBlockState(pos); if (!upperState.getValue(DoorBlock.OPEN)) return; BlockState lowerState; if (upperState.getValue(DoorBlock.HALF) == DoubleBlockHalf.LOWER) { lowerState = upperState; pos = pos.above(); upperState = level.getBlockState(pos); } else lowerState = level.getBlockState(pos.below()); level.setBlock(pos, upperState.setValue(DoorBlock.OPEN, false), 3); level.setBlock(pos.below(), lowerState.setValue(DoorBlock.OPEN, false), 3); level.levelEvent(null, LevelEvent.SOUND_CLOSE_IRON_DOOR, pos, 0); } @Override public void onRemove(BlockState state, Level level, BlockPos pos, BlockState newState, boolean isMoving) { super.onRemove(state, level, pos, newState, isMoving); if (state.getBlock() != newState.getBlock()) level.removeBlockEntity(pos); } @Override public boolean triggerEvent(BlockState state, Level level, BlockPos pos, int id, int param) { super.triggerEvent(state, level, pos, id, param); BlockEntity blockEntity = level.getBlockEntity(pos); return blockEntity == null ? false : blockEntity.triggerEvent(id, param); } @Override public ItemStack getCloneItemStack(BlockState state, HitResult target, BlockGetter level, BlockPos pos, Player player) { return new ItemStack(getDoorItem()); } @Override public PushReaction getPistonPushReaction(BlockState state) { return PushReaction.BLOCK; } public abstract Item getDoorItem(); }
package net.openhft.chronicle.queue.impl.single; import net.openhft.chronicle.bytes.*; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.StackTrace; import net.openhft.chronicle.core.annotation.UsedViaReflection; import net.openhft.chronicle.core.io.AbstractCloseable; import net.openhft.chronicle.core.io.IORuntimeException; import net.openhft.chronicle.queue.ChronicleQueue; import net.openhft.chronicle.queue.ExcerptAppender; import net.openhft.chronicle.queue.QueueSystemProperties; import net.openhft.chronicle.queue.impl.ExcerptContext; import net.openhft.chronicle.queue.impl.WireStore; import net.openhft.chronicle.queue.impl.WireStorePool; import net.openhft.chronicle.queue.impl.table.AbstractTSQueueLock; import net.openhft.chronicle.wire.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.StreamCorruptedException; import java.nio.BufferOverflowException; import static net.openhft.chronicle.wire.Wires.*; class StoreAppender extends AbstractCloseable implements ExcerptAppender, ExcerptContext, InternalAppender { @NotNull private final SingleChronicleQueue queue; @NotNull private final WriteLock writeLock; private final WriteLock appendLock; @NotNull private final StoreAppenderContext writeContext; private final WireStorePool storePool; private final boolean checkInterrupts; @Nullable SingleChronicleQueueStore store; private int cycle = Integer.MIN_VALUE; @Nullable private Wire wire; @Nullable private Wire wireForIndex; private long positionOfHeader = 0; private long lastIndex = Long.MIN_VALUE; private long lastPosition; private int lastCycle; @Nullable private Pretoucher pretoucher = null; private NativeBytesStore<Void> batchTmp; private Wire bufferWire = null; @UsedViaReflection private final Finalizer finalizer; private boolean disableThreadSafetyCheck; private int count = 0; StoreAppender(@NotNull final SingleChronicleQueue queue, @NotNull final WireStorePool storePool, final boolean checkInterrupts) { this.queue = queue; this.storePool = storePool; this.checkInterrupts = checkInterrupts; this.writeLock = queue.writeLock(); this.appendLock = queue.appendLock(); this.writeContext = new StoreAppenderContext(); // always put references to "this" last. queue.addCloseListener(this); queue.cleanupStoreFilesWithNoData(); int cycle = queue.cycle(); int lastCycle = queue.lastCycle(); if (lastCycle != cycle && lastCycle >= 0) // ensure that the EOF is written on the last cycle setCycle2(lastCycle, false); finalizer = Jvm.isResourceTracing() ? new Finalizer() : null; } private void checkAppendLock() { checkAppendLock(false); } /** * check the appendLock * * @param allowMyProcess this will only be true for any writes coming from the sink replicator */ private void checkAppendLock(boolean allowMyProcess) { if (appendLock.locked()) checkAppendLockLocked(allowMyProcess); } private void checkAppendLockLocked(boolean allowMyProcess) { // separate method as this is in fast path if (appendLock instanceof AbstractTSQueueLock) { final AbstractTSQueueLock appendLock = (AbstractTSQueueLock) this.appendLock; final long lockedBy = appendLock.lockedBy(); if (lockedBy == AbstractTSQueueLock.UNLOCKED) return; boolean myPID = lockedBy == Jvm.getProcessId(); if (allowMyProcess && myPID) return; throw new IllegalStateException("locked: unable to append because a lock is being held by pid=" + (myPID ? "me" : lockedBy) + ", file=" + queue.file()); } else throw new IllegalStateException("locked: unable to append, file=" + queue.file()); } private static void releaseBytesFor(Wire w) { if (w != null) { w.bytes().release(INIT); } } @Deprecated // Should not be providing accessors to reference-counted objects @NotNull WireStore store() { if (store == null) setCycle(cycle()); return store; } /** * @param marshallable to write to excerpt. */ @Override public void writeBytes(@NotNull final WriteBytesMarshallable marshallable) { throwExceptionIfClosed(); try (DocumentContext dc = writingDocument()) { Bytes<?> bytes = dc.wire().bytes(); long wp = bytes.writePosition(); marshallable.writeMarshallable(bytes); if (wp == bytes.writePosition()) dc.rollbackOnClose(); } } @Override protected void performClose() { releaseBytesFor(wireForIndex); releaseBytesFor(wire); releaseBytesFor(bufferWire); if (pretoucher != null) pretoucher.close(); if (store != null) { storePool.closeStore(store); store = null; } storePool.close(); pretoucher = null; wireForIndex = null; wire = null; bufferWire = null; } @Override public void pretouch() { throwExceptionIfClosed(); try { if (pretoucher == null) pretoucher = new Pretoucher(queue()); pretoucher.execute(); } catch (Throwable e) { Jvm.warn().on(getClass(), e); throw Jvm.rethrow(e); } } @Nullable @Override public Wire wire() { return wire; } @Nullable @Override public Wire wireForIndex() { return wireForIndex; } @Override public long timeoutMS() { return queue.timeoutMS; } void lastIndex(long index) { this.lastIndex = index; } @Override public boolean recordHistory() { return sourceId() != 0; } void setCycle(int cycle) { if (cycle != this.cycle) setCycle2(cycle, true); } private void setCycle2(final int cycle, final boolean createIfAbsent) { queue.throwExceptionIfClosed(); if (cycle < 0) throw new IllegalArgumentException("You can not have a cycle that starts " + "before Epoch. cycle=" + cycle); SingleChronicleQueue queue = this.queue; SingleChronicleQueueStore oldStore = this.store; SingleChronicleQueueStore newStore = storePool.acquire(cycle, queue.epoch(), createIfAbsent, oldStore); if (newStore != oldStore) { this.store = newStore; if (oldStore != null) storePool.closeStore(oldStore); } resetWires(queue); // only set the cycle after the wire is set. this.cycle = cycle; if (this.store == null) return; assert wire.startUse(); wire.parent(this); wire.pauser(queue.pauserSupplier.get()); resetPosition(); queue.onRoll(cycle); } private void resetWires(@NotNull final ChronicleQueue queue) { WireType wireType = queue.wireType(); { Wire oldw = this.wire; this.wire = store == null ? null : createWire(wireType); assert wire != oldw || wire == null; releaseBytesFor(oldw); } { Wire old = this.wireForIndex; this.wireForIndex = store == null ? null : createWire(wireType); assert wire != old || wire == null; releaseBytesFor(old); } } private Wire createWire(@NotNull final WireType wireType) { final Wire w = wireType.apply(store.bytes()); if (store.dataVersion() > 0) w.usePadding(true); return w; } /** * @return true if the header number is changed, otherwise false * @throws UnrecoverableTimeoutException todo */ private boolean resetPosition() { long originalHeaderNumber = wire.headerNumber(); try { if (store == null || wire == null) return false; long position = store.writePosition(); position(position, position); Bytes<?> bytes = wire.bytes(); assert !QueueSystemProperties.CHECK_INDEX || checkPositionOfHeader(bytes); final long headerNumber = store.lastSequenceNumber(this); wire.headerNumber(queue.rollCycle().toIndex(cycle, headerNumber + 1) - 1); assert !QueueSystemProperties.CHECK_INDEX || wire.headerNumber() != -1 || checkIndex(wire.headerNumber(), positionOfHeader); bytes.writeLimit(bytes.capacity()); assert !QueueSystemProperties.CHECK_INDEX || checkWritePositionHeaderNumber(); return originalHeaderNumber != wire.headerNumber(); } catch (@NotNull BufferOverflowException | StreamCorruptedException e) { throw new AssertionError(e); } } private boolean checkPositionOfHeader(final Bytes<?> bytes) { if (positionOfHeader == 0) { return true; } int header = bytes.readVolatileInt(positionOfHeader); // ready or an incomplete message header? return isReadyData(header) || isNotComplete(header); } @NotNull @Override // throws UnrecoverableTimeoutException public DocumentContext writingDocument() { return writingDocument(false); // avoid overhead of a default method. } @NotNull @Override // throws UnrecoverableTimeoutException public DocumentContext writingDocument(final boolean metaData) { throwExceptionIfClosed(); // we allow the sink process to write metaData checkAppendLock(metaData); count++; if (count > 1) { assert metaData == writeContext.metaData; return writeContext; } if (queue.doubleBuffer && writeLock.locked() && !metaData) { writeContext.isClosed = false; writeContext.rollbackOnClose = false; writeContext.buffered = true; if (bufferWire == null) { Bytes bufferBytes = Bytes.allocateElasticOnHeap(); bufferWire = queue().wireType().apply(bufferBytes); } writeContext.wire = bufferWire; writeContext.metaData(false); } else { writeLock.lock(); int cycle = queue.cycle(); if (wire == null) setWireIfNull(cycle); if (this.cycle != cycle) rollCycleTo(cycle); int safeLength = (int) queue.overlapSize(); resetPosition(); assert !QueueSystemProperties.CHECK_INDEX || checkWritePositionHeaderNumber(); // sets the writeLimit based on the safeLength openContext(metaData, safeLength); // Move readPosition to the start of the context. i.e. readRemaining() == 0 wire.bytes().readPosition(wire.bytes().writePosition()); } return writeContext; } @Override public DocumentContext acquireWritingDocument(boolean metaData) { if (!DISABLE_THREAD_SAFETY) this.threadSafetyCheck(true); if (writeContext.wire != null && writeContext.isOpen() && writeContext.chainedElement()) return writeContext; return writingDocument(metaData); } public void normaliseEOFs() { final WriteLock writeLock = queue.writeLock(); writeLock.lock(); try { normaliseEOFs0(); } finally { writeLock.unlock(); } } private void normaliseEOFs0() { int last = queue.lastCycle(); int first = queue.firstCycle(); for(int cycle = first; cycle < last; ++cycle) { setCycle2(cycle, false); if(wire != null) store.writeEOF(wire, timeoutMS()); } } private void setWireIfNull(final int cycle) { int lastCycle = queue.lastCycle(); if (lastCycle == Integer.MIN_VALUE) lastCycle = cycle; else { int cur = lastCycle - 1; int firstCycle = queue.firstCycle(); while (cur >= firstCycle) { setCycle2(cur, false); if (wire != null) { if (!store.writeEOF(wire, timeoutMS())) break; } cur } } setCycle2(lastCycle, true); } private long writeHeader(@NotNull final Wire wire, final int safeLength) { Bytes<?> bytes = wire.bytes(); // writePosition points at the last record in the queue, so we can just skip it and we're ready for write long pos = positionOfHeader; long lastPos = store.writePosition(); if (pos < lastPos) { // queue moved since we last touched it - recalculate header number try { wire.headerNumber(queue.rollCycle().toIndex(cycle, store.lastSequenceNumber(this))); } catch (StreamCorruptedException ex) { Jvm.warn().on(getClass(), "Couldn't find last sequence", ex); } } int header = bytes.readVolatileInt(lastPos); assert header != NOT_INITIALIZED; lastPos += lengthOf(bytes.readVolatileInt(lastPos)) + SPB_HEADER_SIZE; bytes.writePosition(lastPos); return wire.enterHeader(safeLength); } private void openContext(final boolean metaData, final int safeLength) { assert wire != null; this.positionOfHeader = writeHeader(wire, safeLength); // sets wire.bytes().writePosition = position + 4; writeContext.isClosed = false; writeContext.rollbackOnClose = false; writeContext.buffered = false; writeContext.wire = wire; // Jvm.isDebug() ? acquireBufferWire() : wire; writeContext.metaData(metaData); } boolean checkWritePositionHeaderNumber() { if (wire == null || wire.headerNumber() == Long.MIN_VALUE) return true; try { long pos = positionOfHeader; long seq1 = queue.rollCycle().toSequenceNumber(wire.headerNumber() + 1) - 1; long seq2 = store.sequenceForPosition(this, pos, true); if (seq1 != seq2) { String message = "~~~~~~~~~~~~~~ " + "thread: " + Thread.currentThread().getName() + " pos: " + pos + " header: " + wire.headerNumber() + " seq1: " + seq1 + " seq2: " + seq2; AssertionError ae = new AssertionError(message); throw ae; } } catch (Exception e) { // TODO FIX Jvm.warn().on(getClass(), e); throw Jvm.rethrow(e); } return true; } @Override public int sourceId() { return queue.sourceId; } @Override public void writeBytes(@NotNull final BytesStore bytes) { throwExceptionIfClosed(); checkAppendLock(); writeLock.lock(); try { int cycle = queue.cycle(); if (wire == null) setWireIfNull(cycle); if (this.cycle != cycle) rollCycleTo(cycle); this.positionOfHeader = writeHeader(wire, (int) queue.overlapSize()); // writeHeader sets wire.byte().writePosition assert ((AbstractWire) wire).isInsideHeader(); beforeAppend(wire, wire.headerNumber() + 1); Bytes<?> wireBytes = wire.bytes(); wireBytes.write(bytes); wire.updateHeader(positionOfHeader, false, 0); lastIndex(wire.headerNumber()); lastPosition = positionOfHeader; lastCycle = cycle; store.writePosition(positionOfHeader); writeIndexForPosition(lastIndex, positionOfHeader); } catch (StreamCorruptedException e) { throw new AssertionError(e); } finally { writeLock.unlock(); } } public void writeBytes(final long index, @NotNull final BytesStore bytes) { throwExceptionIfClosed(); checkAppendLock(); writeLock.lock(); try { writeBytesInternal(index, bytes); } finally { writeLock.unlock(); } } /** * Appends bytes without write lock. Should only be used if write lock is acquired externally. Never use without write locking as it WILL corrupt * the queue file and cause data loss. */ protected void writeBytesInternal(final long index, @NotNull final BytesStore bytes) { writeBytesInternal(index, bytes, false); } protected void writeBytesInternal(final long index, @NotNull final BytesStore bytes, boolean metadata) { checkAppendLock(true); final int cycle = queue.rollCycle().toCycle(index); if (wire == null) setWireIfNull(cycle); if (this.cycle != cycle) rollCycleTo(cycle); long headerNumber = wire.headerNumber(); boolean isNextIndex = index == headerNumber + 1; if (!isNextIndex) { // in case our cached headerNumber is incorrect. if (resetPosition()) { headerNumber = wire.headerNumber(); /// if the header number has changed then we will have roll if (queue.rollCycle().toCycle(headerNumber) != cycle) { rollCycleTo(cycle); headerNumber = wire.headerNumber(); } } isNextIndex = index == headerNumber + 1; if (!isNextIndex) { if (index > headerNumber + 1) throw new IllegalStateException("Unable to move to index " + Long.toHexString(index) + " beyond the end of the queue, current: " + Long.toHexString(headerNumber)); // this can happen when using queue replication when we are back filling from a number of sinks at them same time // its normal behaviour in the is use case so should not be a WARN if (Jvm.isDebugEnabled(getClass())) Jvm.debug().on(getClass(), "Trying to overwrite index " + Long.toHexString(index) + " which is before the end of the queue"); return; } } writeBytesInternal(bytes, metadata); headerNumber = wire.headerNumber(); boolean isIndex = index == headerNumber; if (!isIndex) { writeBytesInternal(bytes, metadata); Thread.yield(); } } private void writeBytesInternal(@NotNull final BytesStore bytes, boolean metadata) { assert writeLock.locked(); try { int safeLength = (int) queue.overlapSize(); assert count == 0 : "count=" + count; openContext(metadata, safeLength); try { writeContext.wire().bytes().write(bytes); } finally { writeContext.close(false); count = 0; } } finally { writeContext.isClosed = true; } } private void position(final long position, final long startOfMessage) { // did the position jump too far forward. if (position > store.writePosition() + queue.blockSize()) throw new IllegalArgumentException("pos: " + position + ", store.writePosition()=" + store.writePosition() + " queue.blockSize()=" + queue.blockSize()); position0(position, startOfMessage, wire.bytes()); } @Override public long lastIndexAppended() { if (lastIndex != Long.MIN_VALUE) return lastIndex; if (lastPosition == Long.MIN_VALUE || wire == null) { throw new IllegalStateException("nothing has been appended, so there is no last index"); } try { long sequenceNumber = store.sequenceForPosition(this, lastPosition, true); long index = queue.rollCycle().toIndex(lastCycle, sequenceNumber); lastIndex(index); return index; } catch (Exception e) { throw Jvm.rethrow(e); } } @Override public int cycle() { if (cycle == Integer.MIN_VALUE) { int cycle = this.queue.lastCycle(); if (cycle < 0) cycle = queue.cycle(); return cycle; } return cycle; } @Override @NotNull public SingleChronicleQueue queue() { return queue; } /* * overridden in delta wire */ @SuppressWarnings("unused") void beforeAppend(final Wire wire, final long index) { } /* * wire must be not null when this method is called */ // throws UnrecoverableTimeoutException private void rollCycleTo(final int cycle) { // only a valid check if the wire was set. if (this.cycle == cycle) throw new AssertionError(); store.writeEOF(wire, timeoutMS()); int lastCycle = queue.lastCycle(); if (lastCycle < cycle && lastCycle != this.cycle && lastCycle >= 0) { setCycle2(lastCycle, false); rollCycleTo(cycle); } else { setCycle2(cycle, true); } } /** * Write an EOF marker on the current cycle if it is about to roll. It would do this any way if a new message was written, but this doesn't create * a new cycle or add a message. Only used by tests. */ void writeEndOfCycleIfRequired() { if (wire != null && queue.cycle() != cycle) store.writeEOF(wire, timeoutMS()); } // throws UnrecoverableTimeoutException void writeIndexForPosition(final long index, final long position) throws StreamCorruptedException { long sequenceNumber = queue.rollCycle().toSequenceNumber(index); store.setPositionForSequenceNumber(this, sequenceNumber, position); } boolean checkIndex(final long index, final long position) { try { final long seq1 = queue.rollCycle().toSequenceNumber(index + 1) - 1; final long seq2 = store.sequenceForPosition(this, position, true); if (seq1 != seq2) { final long seq3 = store.indexing .linearScanByPosition(wireForIndex(), position, 0, 0, true); System.out.println("Thread=" + Thread.currentThread().getName() + " pos: " + position + " seq1: " + Long.toHexString(seq1) + " seq2: " + Long.toHexString(seq2) + " seq3: " + Long.toHexString(seq3)); System.out.println(store.dump()); assert seq1 == seq3 : "seq1=" + seq1 + ", seq3=" + seq3; assert seq1 == seq2 : "seq1=" + seq1 + ", seq2=" + seq2; } } catch (@NotNull EOFException | UnrecoverableTimeoutException | StreamCorruptedException e) { throw new AssertionError(e); } return true; } @Override public String toString() { return "StoreAppender{" + "queue=" + queue + ", cycle=" + cycle + ", position=" + positionOfHeader + ", lastIndex=" + lastIndex + ", lastPosition=" + lastPosition + ", lastCycle=" + lastCycle + '}'; } void position0(final long position, final long startOfMessage, Bytes<?> bytes) { this.positionOfHeader = position; bytes.writePosition(startOfMessage); } @Override public @NotNull ExcerptAppender disableThreadSafetyCheck(boolean disableThreadSafetyCheck) { this.disableThreadSafetyCheck = disableThreadSafetyCheck; return this; } @Override protected boolean threadSafetyCheck(boolean isUsed) { return disableThreadSafetyCheck || super.threadSafetyCheck(isUsed); } @Override public File currentFile() { SingleChronicleQueueStore store = this.store; return store == null ? null : store.currentFile(); } private class Finalizer { @Override protected void finalize() throws Throwable { super.finalize(); writeContext.rollbackOnClose(); warnAndCloseIfNotClosed(); } } final class StoreAppenderContext implements WriteDocumentContext { boolean isClosed = true; private boolean metaData = false; private boolean rollbackOnClose = false; private boolean buffered = false; @Nullable private Wire wire; private boolean alreadyClosedFound; private StackTrace closedHere; private boolean chainedElement; @Override public int sourceId() { return StoreAppender.this.sourceId(); } @Override public boolean isPresent() { return false; } @Override public Wire wire() { return wire; } @Override public boolean isMetaData() { return metaData; } /** * Call this if you have detected an error condition and you want the context rolled back when it is closed, rather than committed */ @Override public void rollbackOnClose() { this.rollbackOnClose = true; } @Override public void close() { close(true); } public void close(boolean unlock) { if (chainedElement) return; if (isClosed) { Jvm.warn().on(getClass(), "Already Closed, close was called twice.", new StackTrace("Second close", closedHere)); alreadyClosedFound = true; return; } count if (count > 0) return; if (alreadyClosedFound) { closedHere = new StackTrace("Closed here"); } try { // historically there have been problems with an interrupted thread causing exceptions // in calls below, and we saw half-written messages final boolean interrupted = checkInterrupts && Thread.currentThread().isInterrupted(); if (interrupted) throw new InterruptedException(); if (rollbackOnClose) { doRollback(); return; } if (wire == StoreAppender.this.wire) { try { wire.updateHeader(positionOfHeader, metaData, 0); } catch (IllegalStateException e) { if (queue.isClosed()) return; throw e; } lastPosition = positionOfHeader; lastCycle = cycle; if (!metaData) { lastIndex(wire.headerNumber()); store.writePosition(positionOfHeader); if (lastIndex != Long.MIN_VALUE) writeIndexForPosition(lastIndex, positionOfHeader); } } else if (wire != null) { if (buffered) { writeBytes(wire.bytes()); unlock = false; wire.clear(); } else { writeBytesInternal(wire.bytes(), metaData); wire = StoreAppender.this.wire; } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException(e); } catch (StreamCorruptedException | UnrecoverableTimeoutException e) { throw new IllegalStateException(e); } finally { isClosed = true; if (unlock) try { writeLock.unlock(); } catch (Exception ex) { Jvm.warn().on(getClass(), "Exception while unlocking: ", ex); } } } private void doRollback() { if (buffered) { assert wire != StoreAppender.this.wire; wire.clear(); } else { // zero out all contents... final Bytes<?> bytes = wire.bytes(); try { for (long i = positionOfHeader; i <= bytes.writePosition(); i++) bytes.writeByte(i, (byte) 0); long lastPosition = StoreAppender.this.lastPosition; position0(lastPosition, lastPosition, bytes); ((AbstractWire) wire).forceNotInsideHeader(); } catch (BufferOverflowException | IllegalStateException e) { if (bytes instanceof MappedBytes && ((MappedBytes) bytes).isClosed()) { Jvm.warn().on(getClass(), "Unable to roll back excerpt as it is closed."); return; } throw e; } } } @Override public long index() { if (buffered) { throw new IndexNotAvailableException("Index is unavailable when double buffering"); } if (this.wire.headerNumber() == Long.MIN_VALUE) { try { wire.headerNumber(queue.rollCycle().toIndex(cycle, store.lastSequenceNumber(StoreAppender.this))); long headerNumber0 = wire.headerNumber(); assert (((AbstractWire) this.wire).isInsideHeader()); return isMetaData() ? headerNumber0 : headerNumber0 + 1; } catch (IOException e) { throw new IORuntimeException(e); } } return isMetaData() ? Long.MIN_VALUE : this.wire.headerNumber() + 1; } @Override public boolean isOpen() { return !isClosed; } @Override public boolean isNotComplete() { return !isClosed; } @Override public void start(boolean metaData) { throw new UnsupportedOperationException(); } public void metaData(boolean metaData) { this.metaData = metaData; } @Override public boolean chainedElement() { return chainedElement; } @Override public void chainedElement(boolean chainedElement) { this.chainedElement = chainedElement; } } }
package org.videolan.jvlc; import java.awt.Dimension; import org.videolan.jvlc.internal.LibVlc; import org.videolan.jvlc.internal.LibVlc.LibVlcInstance; import org.videolan.jvlc.internal.LibVlc.libvlc_exception_t; public class Video { private final LibVlcInstance libvlcInstance; private final LibVlc libvlc; public Video( JVLC jvlc) { this.libvlcInstance = jvlc.getInstance(); this.libvlc = jvlc.getLibvlc(); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#destroyVideo() */ public void destroyVideo(MediaInstance media) { libvlc_exception_t exception = new libvlc_exception_t(); libvlc.libvlc_video_destroy(media.getInstance(), exception ); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#getFullscreen() */ public boolean getFullscreen(MediaInstance media) { libvlc_exception_t exception = new libvlc_exception_t(); return libvlc.libvlc_get_fullscreen(media.getInstance(), exception) == 1 ? true : false; } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#getSnapshot(java.lang.String) */ public void getSnapshot(MediaInstance media, String filepath, int width, int height) { libvlc_exception_t exception = new libvlc_exception_t(); libvlc.libvlc_video_take_snapshot(media.getInstance(), filepath, width, height, exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#getVideoHeight() */ public int getHeight(MediaInstance media) { libvlc_exception_t exception = new libvlc_exception_t(); return libvlc.libvlc_video_get_height(media.getInstance(), exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#getVideoWidth() */ public int getWidth(MediaInstance media) { libvlc_exception_t exception = new libvlc_exception_t(); return libvlc.libvlc_video_get_height(media.getInstance(), exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#reparentVideo(java.awt.Component) */ public void reparent(MediaInstance media, java.awt.Canvas canvas) { libvlc_exception_t exception = new libvlc_exception_t(); long drawable = com.sun.jna.Native.getComponentID(canvas); libvlc.libvlc_video_reparent(media.getInstance(), drawable, exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#resizeVideo(int, int) */ public void setSize(int width, int height) { libvlc_exception_t exception = new libvlc_exception_t(); libvlc.libvlc_video_set_size(libvlcInstance, width, height, exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#setFullscreen(boolean) */ public void setFullscreen(MediaInstance media, boolean fullscreen) { libvlc_exception_t exception = new libvlc_exception_t(); libvlc.libvlc_set_fullscreen(media.getInstance(), fullscreen? 1 : 0, exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#toggleFullscreen() */ public void toggleFullscreen(MediaInstance media) { libvlc_exception_t exception = new libvlc_exception_t(); libvlc.libvlc_toggle_fullscreen(media.getInstance(), exception); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#getSize() */ public Dimension getSize(MediaInstance media) { return new Dimension (getWidth(media), getHeight(media)); } /* (non-Javadoc) * @see org.videolan.jvlc.VideoIntf#setSize(java.awt.Dimension) */ public void setSize(Dimension d) { setSize(d.width, d.height); } }
package net.sf.katta.index.indexer.merge; import java.io.FileNotFoundException; import java.io.IOException; import net.sf.katta.util.Logger; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; public class DfsIndexRecordReader implements RecordReader<Text, DocumentInformation> { private IDocumentDuplicateInformation _duplicateInformation; private IndexReader _indexReader; private int _maxDoc; private int _doc; private Path _indexPath; public static final String INVALID = "INVALID"; private FileSplit _fileSplit; public DfsIndexRecordReader(JobConf jobConf, InputSplit inputSplit, IDocumentDuplicateInformation duplicateInformation) throws IOException { _duplicateInformation = duplicateInformation; FileSystem fileSystem = FileSystem.get(jobConf); _fileSplit = (FileSplit) inputSplit; Path indexPath = _fileSplit.getPath(); //we use md5 for uncompressed folder, because some shards can have the same name String md5 = MD5Hash.digest(indexPath.toString()).toString(); Path workingFolder = new Path(jobConf.getOutputPath(), ".indexes/" + indexPath.getName() + "-" + md5 + "-uncompress"); _indexPath = new Path(jobConf.getOutputPath().getParent().getParent(), ".indexes/" + indexPath.getName() + "-" + md5 + "-uncompress"); try { _indexReader = IndexReader.open(new DfsIndexDirectory(fileSystem, indexPath, workingFolder)); _maxDoc = _indexReader.maxDoc(); } catch (FileNotFoundException e) { Logger.warn("can not open index '" + indexPath + "', ignore this index.", e); } } public boolean next(Text key, DocumentInformation value) throws IOException { boolean ret = false; if (_doc < _maxDoc) { ret = true; String keyInfo = null; String sortValue = null; try { Document document = _indexReader.document(_doc); keyInfo = _duplicateInformation.getKey(document); sortValue = _duplicateInformation.getSortValue(document); } catch (Exception e) { Logger.warn("can not read document from split '" + _fileSplit.getPath() + "'", e); } if ((keyInfo == null || keyInfo.trim().equals(""))) { keyInfo = INVALID; } if ((sortValue == null || sortValue.trim().equals(""))) { sortValue = "" + Integer.MIN_VALUE; } key.set(keyInfo); value.setDocId(_doc); value.setSortValue(sortValue); value.setIndexPath(_indexPath.toString()); _doc++; } return ret; } public Text createKey() { return new Text(); } public DocumentInformation createValue() { return new DocumentInformation(); } public long getPos() throws IOException { return _doc; } public void close() throws IOException { if (_indexReader != null) { _indexReader.close(); } } public float getProgress() throws IOException { return 0; } }
package net.yeputons.cscenter.dbfall2013.scaling; import net.yeputons.cscenter.dbfall2013.engines.hashtrie.HashTrieEngine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.yeputons.cscenter.dbfall2013.util.DataInputStream; import net.yeputons.cscenter.dbfall2013.util.DataOutputStream; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.net.*; import java.nio.ByteBuffer; import java.util.*; public class ShardingNode { static final Logger log = LoggerFactory.getLogger(ShardingNode.class); File storage; HashTrieEngine engine; protected void processClient(Socket clientSocket) throws Exception { DataInputStream in = new DataInputStream(clientSocket.getInputStream()); DataOutputStream out = new DataOutputStream(clientSocket.getOutputStream()); while (true) { byte[] cmd = new byte[3]; in.readFully(cmd); if (Arrays.equals(cmd, "clr".getBytes())) { synchronized (engine) { try { engine.clear(); out.write("ok".getBytes()); } catch (IllegalStateException e) { String message = "Illegal state: " + e.getCause(); out.write("no".getBytes()); out.writeArray(message.getBytes()); } } } else if (Arrays.equals(cmd, "siz".getBytes())) { out.write("ok".getBytes()); int siz; synchronized (engine) { siz = engine.size(); } out.writeInt(siz); } else if (Arrays.equals(cmd, "del".getBytes())) { byte[] key = in.readArray(); synchronized (engine) { engine.remove(ByteBuffer.wrap(key)); } out.write("ok".getBytes()); } else if (Arrays.equals(cmd, "put".getBytes())) { byte[] value = in.readArray(); byte[] key = in.readArray(); synchronized (engine) { engine.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); } out.write("ok".getBytes()); } else if (Arrays.equals(cmd, "get".getBytes())) { byte[] key = in.readArray(); ByteBuffer res; synchronized (engine) { res = engine.get(ByteBuffer.wrap(key)); } out.write("ok".getBytes()); out.writeArray(res == null ? null : res.array()); } else if (Arrays.equals(cmd, "hi!".getBytes())) { out.write("ok".getBytes()); } else if (Arrays.equals(cmd, "key".getBytes())) { synchronized (engine) { if (engine.isCompactionInProgress()) { out.write("no".getBytes()); out.writeArray("Compaction is in progress".getBytes()); } else { out.write("ok".getBytes()); out.writeInt(engine.size()); for (Map.Entry<ByteBuffer, ByteBuffer> key : engine) out.writeArray(key.getKey().array()); } } } else if (Arrays.equals(cmd, "its".getBytes())) { synchronized (engine) { if (engine.isCompactionInProgress()) { out.write("no".getBytes()); out.writeArray("Compaction is in progress".getBytes()); } else { out.write("ok".getBytes()); out.writeInt(engine.size()); for (Map.Entry<ByteBuffer, ByteBuffer> entry : engine.entrySet()) { out.writeArray(entry.getKey().array()); out.writeArray(entry.getValue().array()); } } } } else if (Arrays.equals(cmd, "pak".getBytes())) { synchronized (engine) { if (engine.isCompactionInProgress()) { out.write("no".getBytes()); out.writeArray("Compaction is in progress".getBytes()); } else { engine.runCompaction(); out.write("ok".getBytes()); } } } else if (Arrays.equals(cmd, "dwn".getBytes())) { out.write("ok".getBytes()); this.stop(); } else { out.write("no".getBytes()); out.writeArray("Invalid command".getBytes()); } out.flush(); } } Set<Socket> clients; protected ServerSocket serverSocket; protected volatile boolean isRunning; public void run(File storage_, InetSocketAddress bindTo) throws Exception { storage = storage_; log.info("Starting node on {}, please be patient...", bindTo); isRunning = true; engine = new HashTrieEngine(storage); serverSocket = new ServerSocket(bindTo.getPort(), 0, bindTo.getAddress()); clients = new HashSet<Socket>(); log.info("Node is up and ready to accept connections"); while (true) { try { final Socket clientSocket = serverSocket.accept(); log.debug("New client from {}:{} connected", clientSocket.getInetAddress(), clientSocket.getPort()); new Thread(new Runnable() { @Override public void run() { synchronized (clients) { if (!isRunning) { try { clientSocket.close(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } return; } clients.add(clientSocket); } try { processClient(clientSocket); } catch (EOFException e) { } catch (SocketException e) { } catch (Exception e) { e.printStackTrace(); } log.debug("Client from {}:{} disconnected", clientSocket.getInetAddress(), clientSocket.getPort()); try { clientSocket.close(); } catch (SocketException e) { } catch (IOException e) { e.printStackTrace(); } synchronized (clients) { clients.remove(clientSocket); } } }).start(); } catch (SocketException e) { } if (!isRunning) break; } log.info("Terminating connections..."); synchronized (clients) { for (Socket s : clients) s.close(); } serverSocket.close(); log.info("Terminating engine..."); synchronized (engine) { engine.close(); } isRunning = false; log.info("Node is down"); } public void stop() { isRunning = false; try { serverSocket.close(); } catch (IOException e) { e.printStackTrace(); } } public static void main(String[] args) throws Exception { if (args.length != 3) { System.err.println("Arguments: <storage file> <ip to listen on> <port>\n"); System.exit(1); } String host = args[1]; int port = Integer.parseInt(args[2]); new ShardingNode().run(new File(args[0]), new InetSocketAddress(host, port)); } }
package nonapi.io.github.classgraph.classpath; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.List; import nonapi.io.github.classgraph.utils.LogNode; import nonapi.io.github.classgraph.utils.VersionFinder; /** A class to find the unique ordered classpath elements. */ class CallStackReader { /** * Constructor. */ private CallStackReader() { // Cannot be constructed } /** * Get the call stack via the StackWalker API (JRE 9+). * * @return the call stack, or null if it could not be obtained. */ private static Class<?>[] getCallStackViaStackWalker() { try { // // Implement the following via reflection, for JDK7 compatibility: // List<Class<?>> stackFrameClasses = new ArrayList<>(); // StackWalker.getInstance(Option.RETAIN_CLASS_REFERENCE) // .forEach(sf -> stackFrameClasses.add(sf.getDeclaringClass())); final Class<?> consumerClass = Class.forName("java.util.function.Consumer"); final List<Class<?>> stackFrameClasses = new ArrayList<>(); final Class<?> stackWalkerOptionClass = Class.forName("java.lang.StackWalker$Option"); final Object retainClassReference = Class.forName("java.lang.Enum") .getMethod("valueOf", Class.class, String.class) .invoke(null, stackWalkerOptionClass, "RETAIN_CLASS_REFERENCE"); final Class<?> stackWalkerClass = Class.forName("java.lang.StackWalker"); final Object stackWalkerInstance = stackWalkerClass.getMethod("getInstance", stackWalkerOptionClass) .invoke(null, retainClassReference); final Method stackFrameGetDeclaringClassMethod = Class.forName("java.lang.StackWalker$StackFrame") .getMethod("getDeclaringClass"); stackWalkerClass.getMethod("forEach", consumerClass).invoke(stackWalkerInstance, // InvocationHandler proxy for Consumer<StackFrame> Proxy.newProxyInstance(consumerClass.getClassLoader(), new Class[] { consumerClass }, new InvocationHandler() { @Override public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable { // Consumer<StackFrame> has only one method: void accept(StackFrame) final Class<?> declaringClass = (Class<?>) stackFrameGetDeclaringClassMethod .invoke(args[0]); stackFrameClasses.add(declaringClass); return null; } })); return stackFrameClasses.toArray(new Class<?>[0]); } catch (Exception | LinkageError e) { return null; } } private static final class CallerResolver extends SecurityManager { /* (non-Javadoc) * @see java.lang.SecurityManager#getClassContext() */ @Override protected Class<?>[] getClassContext() { return super.getClassContext(); } } /** * Get the call stack via the SecurityManager API. * * @param log * the log * @return the call stack. */ private static Class<?>[] getCallStackViaSecurityManager(final LogNode log) { try { return new CallerResolver().getClassContext(); } catch (final SecurityException e) { // Creating a SecurityManager can fail if the current SecurityManager does not allow if (log != null) { log.log("Exception while trying to obtain call stack via SecurityManager", e); } return null; } } /** * Get the class context. * * @param log * the log * @return The classes in the call stack. */ static Class<?>[] getClassContext(final LogNode log) { // For JRE 9+, use StackWalker to get call stack. // N.B. need to work around StackWalker bug fixed in JDK 13, and backported to 12.0.2 and 11.0.4 // (probably introduced in JDK 9, when StackWalker was introduced): Class<?>[] stackClasses = null; if ((VersionFinder.JAVA_MAJOR_VERSION == 11 && (VersionFinder.JAVA_MINOR_VERSION >= 1 || VersionFinder.JAVA_SUB_VERSION >= 4) && !VersionFinder.JAVA_IS_EA_VERSION) || (VersionFinder.JAVA_MAJOR_VERSION == 12 && (VersionFinder.JAVA_MINOR_VERSION >= 1 || VersionFinder.JAVA_SUB_VERSION >= 2) && !VersionFinder.JAVA_IS_EA_VERSION) || (VersionFinder.JAVA_MAJOR_VERSION == 13 && !VersionFinder.JAVA_IS_EA_VERSION) || VersionFinder.JAVA_MAJOR_VERSION > 13) { // Invoke with doPrivileged -- see: stackClasses = AccessController.doPrivileged(new PrivilegedAction<Class<?>[]>() { @Override public Class<?>[] run() { return getCallStackViaStackWalker(); } }); } // For JRE 7 and 8, use SecurityManager to get call stack if (stackClasses == null || stackClasses.length == 0) { stackClasses = AccessController.doPrivileged(new PrivilegedAction<Class<?>[]>() { @Override public Class<?>[] run() { return getCallStackViaSecurityManager(log); } }); } // As a fallback, use getStackTrace() to try to get the call stack if (stackClasses == null || stackClasses.length == 0) { StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); if (stackTrace == null || stackTrace.length == 0) { try { throw new Exception(); } catch (final Exception e) { stackTrace = e.getStackTrace(); } } final List<Class<?>> stackClassesList = new ArrayList<>(); for (final StackTraceElement elt : stackTrace) { try { stackClassesList.add(Class.forName(elt.getClassName())); } catch (final ClassNotFoundException | LinkageError ignored) { // Ignored } } if (!stackClassesList.isEmpty()) { stackClasses = stackClassesList.toArray(new Class<?>[0]); } else { // Last-ditch effort -- include just this class in the call stack stackClasses = new Class<?>[] { CallStackReader.class }; } } return stackClasses; } }
package online.zhaopei.myproject.schedule; import com.github.pagehelper.PageHelper; import online.zhaopei.myproject.common.tool.PaymentTool; import online.zhaopei.myproject.config.ApplicationProp; import online.zhaopei.myproject.domain.ecssent.InvtHead; import online.zhaopei.myproject.domain.gjent.ImpPayHead; import online.zhaopei.myproject.domain.gjpayment.*; import online.zhaopei.myproject.service.ecssent.InvtHeadService; import online.zhaopei.myproject.service.ecssent.PubRtnService; import online.zhaopei.myproject.service.ecssent.ServerSystemService; import online.zhaopei.myproject.service.ecssent.VeHeadService; import online.zhaopei.myproject.service.gjent.ImpPayHeadService; import online.zhaopei.myproject.service.gjent.PersonalInfoService; import online.zhaopei.myproject.service.gjpayment.PaymentMessageService; import online.zhaopei.myproject.service.para.SyncPaymentInfoService; import org.apache.commons.io.FileUtils; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.mail.javamail.JavaMailSender; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.Scheduled; import java.io.File; import java.io.PrintWriter; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import java.util.logging.Logger; @Configuration @EnableAsync @EnableScheduling public class ScheduledTaskConfig { @Autowired private ImpPayHeadService impPayHeadService; @Autowired private PaymentMessageService paymentMessageService; @Autowired private SyncPaymentInfoService syncPaymentInfoService; @Autowired private InvtHeadService invtHeadService; @Autowired private PersonalInfoService personalInfoService; @Autowired private ServerSystemService serverSystemService; @Autowired private JavaMailSender mailSender; @Autowired private VeHeadService veHeadService; @Autowired private PubRtnService pubRtnService; @Autowired private ApplicationProp app; @Scheduled(cron = "0 0 1 * * *") public void deleteExportFile() throws Exception { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); String regex = "^[a-z_]{1,}" + sdf.format(Calendar.getInstance().getTime()) + "[0-9]{9}.csv$"; File file = new File("export"); if (file.isDirectory()) { File[] children = file.listFiles(); for (File f : children) if (!f.getName().matches(regex)) f.delete(); } } // @Scheduled(cron = "0 0 */1 * * *") public void clearErrorCount() throws Exception { this.personalInfoService.clearErrorCount(); } /** * * @throws Exception */ @Scheduled(cron = "0 0/30 * * * *") public void reissueNonSyncInvtList() throws Exception { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS"); SimpleDateFormat sdf1 = new SimpleDateFormat("yyyyMMddHHmm"); SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd"); Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.MINUTE, -30); String endDate = sdf1.format(calendar.getTime()); calendar.add(Calendar.DATE, -15); String startDate = sdf2.format(calendar.getTime()); PageHelper.startPage(1, 1000); List<InvtHead> invtList = this.invtHeadService.getNonSyncInvtList(startDate, endDate); String suffix = "BuFaZbq.txt"; String reissueFileName = null; File reissueTmpFile = null; File reissueFile = null; PrintWriter reissuePw = null; if (null != invtList && !invtList.isEmpty()) { try { reissueFileName = sdf.format(Calendar.getInstance().getTime()) + "_" + suffix; reissueTmpFile = new File(this.app.getReissueTmpDir() + reissueFileName); reissueFile = new File(this.app.getReissueDir() + reissueFileName); reissuePw = new PrintWriter(reissueTmpFile); for (InvtHead ih : invtList) { reissuePw.println(ih.getInvtNo()); } reissuePw.flush(); reissuePw.close(); reissuePw = null; FileUtils.copyFile(reissueTmpFile, reissueFile); } catch(Exception e) { e.printStackTrace(); } finally { if (null != reissuePw) { reissuePw.close(); } } } } @Scheduled(fixedDelay = 600000) public void deleteRepeatInvtNo() throws Exception { List<InvtHead> invtHeadList = this.invtHeadService.getInvtHeadListByRepeatInvtNo(); if (null != invtHeadList && !invtHeadList.isEmpty()) { for (InvtHead ih : invtHeadList) { if (0 == this.pubRtnService.countPubRtnByBizGuid(ih.getHeadGuid())) { this.invtHeadService.deleteInvtHeadByHeadGuid(ih.getHeadGuid()); } } } } /** * 10 * @throws Exception */ @Scheduled(fixedDelay = 600000) public void syncInvtNoStatus() throws Exception { this.invtHeadService.syncInvtNoStatus("26", "800"); this.invtHeadService.syncInvtNoStatus("24", "500"); } @Scheduled(cron = "0 0 */2 * * *") public void modifyInvtStatus() throws Exception { List<String> headGuidList = this.invtHeadService.getReleaseBackStaggeredInvtList(); if (null != headGuidList && !headGuidList.isEmpty()) { for (String headGuid : headGuidList) { this.invtHeadService.updateInvtHeadStatus(headGuid, "100"); } } } /** * 10 * @throws Exception */ @Scheduled(fixedDelay = 600000) public void checkServer() throws Exception { this.veHeadService.syncVeENo(); // List<ServerSystem> serverSystemList = this.serverSystemService.getServerSystemList(new ServerSystem()); // String url = null; // Mem mem = null; // CpuPerc cpuPerc = null; // List<FileSystemInfo> fileSystemInfoList = null; // for (ServerSystem ss : serverSystemList) { // try { // mem = HttpClientTool.getMemJson(url); // if (80 < mem.getUsedPercent()) { // MailTool.sendDefaultMail(mailSender, "85%", "[" + ss.getIp() + "]\r\n 80%"); // cpuPerc = HttpClientTool.getCpuPercJson(url); // if (0.2 > cpuPerc.getIdle()) { // MailTool.sendDefaultMail(mailSender, "CPU85%", "[" + ss.getIp() + "]\r\n CPU85%"); // fileSystemInfoList = HttpClientTool.getFileSystemInfoListJson(url); // for (FileSystemInfo fsi : fileSystemInfoList) { // if (0.8 < fsi.getFileSystemUsage().getUsePercent()) { // MailTool.sendDefaultMail(mailSender, ":[" + fsi.getFileSystem().getDirName() + "] 85%", "[" + ss.getIp() + "]\r\n :[" // + fsi.getFileSystem().getDirName() + "]85%"); // } catch (Exception e) { // e.printStackTrace(); } @Scheduled(initialDelay = 10000, fixedDelay = 60000) public void syncPaymentInfo() throws Exception { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); SimpleDateFormat sdfDay = new SimpleDateFormat("yyyyMMdd"); ImpPayHead insertImpPayHead = null; ImpPayHead searchImpPayHead = null; CbecMessage cbecMessage = null; CbecMessageCiq cbecMessageCiq = new CbecMessageCiq(); MessageHeadCiq messageHeadCiq = new MessageHeadCiq(); MessageBodyCiq messageBodyCiq = new MessageBodyCiq(); BodyMasterCiq bodyMasterCiq = new BodyMasterCiq(); PaymentMessage searchPm = new PaymentMessage(); List<PaymentMessage> resultPaymentMessageList = null; Long lastSyncTime = this.syncPaymentInfoService.getSyncTime(); if (null != lastSyncTime && 10000000000000L < lastSyncTime) { searchPm.setBeginCreateDate(String.valueOf(lastSyncTime)); } Calendar yesterdayCalendar = Calendar.getInstance(); yesterdayCalendar.add(Calendar.DAY_OF_YEAR, -1); searchPm.setBeginDateNum(Long.valueOf(sdfDay.format(yesterdayCalendar.getTime()) + "00")); searchPm.setXmlContent("<BILLMODE>0</BILLMODE>"); searchPm.setOrderBy("created_date asc"); resultPaymentMessageList = this.paymentMessageService.getPaymentMessageList(searchPm); if (null != resultPaymentMessageList && !resultPaymentMessageList.isEmpty()) { for (PaymentMessage pm : resultPaymentMessageList) { cbecMessage = PaymentTool.buildCbecMessageByString(pm.getXmlContent(), pm.getCreatedDate()); insertImpPayHead = PaymentTool.buildImpPayHeadByCbecMessage(cbecMessage); if (null != insertImpPayHead) { try { searchImpPayHead = new ImpPayHead(insertImpPayHead.getUuid()); if (0 < this.impPayHeadService.countImpPayHead(searchImpPayHead)) { continue; } BeanUtils.copyProperties(cbecMessage.getMessageBody().getBodyMaster(), bodyMasterCiq); BeanUtils.copyProperties(cbecMessage.getMessageHead(), messageHeadCiq); bodyMasterCiq.setCoinInsp(bodyMasterCiq.getMonetaryType()); bodyMasterCiq.setMonetaryType(null); messageBodyCiq.setBodyMaster(bodyMasterCiq); cbecMessageCiq.setMessageHead(messageHeadCiq); cbecMessageCiq.setMessageBody(messageBodyCiq); PaymentTool.generateCbecMessageCiq(cbecMessageCiq, this.app.getCiqDir(), this.app.getBackDir()); cbecMessage.getMessageBody().getBodyMaster().setPayEnterpriseCode("4100300536"); cbecMessage.getMessageBody().getBodyMaster().setMonetaryType("156"); PaymentTool.generateCbecMessageCiq(cbecMessage, this.app.getUnifiedCiqDir(), this.app.getUnifiedBackDir()); searchImpPayHead = new ImpPayHead(insertImpPayHead.getPayCode(), insertImpPayHead.getPayTransactionId()); if (1 == this.impPayHeadService.countImpPayHead(searchImpPayHead)) { this.impPayHeadService.updateImpPayHead(insertImpPayHead); } else { this.impPayHeadService.insertPayHead(insertImpPayHead); this.syncPaymentInfoService.updateSyncTime(Long.valueOf(sdf.format(pm.getCreatedDate()))); } } catch (Exception e) { e.printStackTrace(); } } } } } }
package org.cloudfoundry.samples.handson.ex5; import org.springframework.stereotype.Controller; import org.springframework.validation.Errors; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import java.util.List; /** * A very basic controller for listing and inserting {@link Person} objects in an RDBMS. * * @author Eric Bottard * @author Florent Biville */ @Controller public class PersonController { // TODO: configure the jdbcTemplate with an injected DataSource // private NamedParameterJdbcTemplate jdbcTemplate; // @Inject // public void setDataSource(DataSource dataSource) { // this.jdbcTemplate = ... @RequestMapping(value = "/ex5", method=RequestMethod.GET) public ModelAndView show(Person command) { ModelAndView mav = new ModelAndView("ex5-form"); // TODO: retrieve persons and add them to the model // under key "persons" List<Person> persons = null; mav.addObject("persons", persons); return mav; } public String add(Person person, Errors errors, RedirectAttributes redirectAttributes) { // TODO: save the person POJO in the db using a JdbcTemplate return "redirect:/ex5"; } }
package org.jenkinsci.plugins.IBM_zOS_Connector; import org.apache.commons.net.PrintCommandListener; import org.apache.commons.net.ftp.*; import java.io.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * <h1>zFTPConnector</h1> * FTP-based communication with z/OS-like systems. * Used for submitting jobs, fetching job log and extraction of MaxCC. * * @author <a href="mailto:[email protected]">Alexander Shcherbakov</a> * * @version 1.0 */ public class zFTPConnector { // Server info. /** * LPAR name or IP to connect to. */ private String server; /** * FTP port for connection */ private int port; // Credentials. /** * UserID. */ private String userID; /** * User password. */ private String password; // Wait parameters. /** * Time to wait before giving up in milliseconds. If set to <code>0</code> will wait forever. */ private long waitTime; // Job info from JES-like system. /** * JobID in JES. */ private String jobID; /** * Job's MaxCC. */ private String jobCC; // Work elements. /** * Will ask LPAR once in 10 seconds. */ private static final long waitInterval = 10*1000; /** * FTPClient from <i>Apache Commons-Net</i>. Used for FTP communication. */ private FTPClient FTPClient; /** * Pattern for search of jobName */ private static final Pattern JesJobName = Pattern.compile("250-It is known to JES as (.*)"); /** * Pattern for check of job status. */ private static final Pattern JobNotFinished = Pattern.compile(".*No spool files available for.*"); /** * Basic constructor with minimal parameters required. * * @param server LPAR name or IP address to connect to. * @param port LPAR password. * @param userID UserID. * @param password User password. */ public zFTPConnector (String server, int port, String userID, String password) { // Copy values this.server = server; this.port = port; this.userID = userID; this.password = password; // Create FTPClient this.FTPClient = new FTPClient(); // Make password invisible from log this.FTPClient.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out), true)); } /** * Try to connect to the <b><code>server</code></b> using the parameters passed to the constructor. * * @return Whether the connection was established using the parameters passed to the constructor. * * @see zFTPConnector#zFTPConnector(java.lang.String, int, java.lang.String, java.lang.String) */ private boolean connect() { // Perform the connection. try { int reply; // Temp value to contain server response. // Try to connect. this.FTPClient.connect(this.server, this.port); // After connection attempt, check the reply code to verify success. reply = this.FTPClient.getReplyCode(); if (!FTPReply.isPositiveCompletion(reply)) { // Bad reply code. this.FTPClient.disconnect(); // Disconnect from LPAR. System.err.println("FTP server refused connection."); // Print error. return false; // Finish with failure. } } // IOException handling catch (IOException e) { // Clos the connection if it's still opened. if (this.FTPClient.isConnected()) { try { this.FTPClient.disconnect(); } catch (IOException f) { // Do nothing } } System.err.println("Could not connect to server."); e.printStackTrace(); return false; } // Finall, return with success. return true; } /** * Try to logon to the <b><code>server</code></b> using the parameters passed to the constructor. * Also, <code>SITE FILETYPE=JES JESJOBNAME=*</code> command is invoked. * * @return Whether the credentials supplied are valid and the connection was established. * * @see zFTPConnector#zFTPConnector(java.lang.String, int, java.lang.String, java.lang.String) * @see zFTPConnector#connect() */ private boolean logon() { // Check whether we are already connected. If not, try to reconnect. if (!this.FTPClient.isConnected()) if(!this.connect()) return false; // Couldn't connect to the server. Can't check the credentials. // Perform the login process. try { int reply; // Temp value for server reply code. // Try to login. if (!this.FTPClient.login(this.userID, this.password)) { // If couldn't login, we should logout and return failure. this.FTPClient.logout(); return false; } // Try to set filetype and jesjobname. if (!this.FTPClient.doCommand("site filetype=jes jesjobname=* jesjobowner=*", "")) { this.FTPClient.disconnect(); System.err.println("Couldn't set FileType and JESJobName"); return false; } // Check reply. reply = this.FTPClient.getReplyCode(); if (!FTPReply.isPositiveCompletion(reply)) { this.FTPClient.disconnect(); System.err.println("FTP server refused to change FileType and JESJobName."); return false; } } catch (IOException e) { if (this.FTPClient.isConnected()) { try { this.FTPClient.disconnect(); } catch (IOException f) { // do nothing } } System.err.println("Could not connect to server."); e.printStackTrace(); return false; } // If go here, everything went fine. return true; } /** * Submit job for execution. * * @param inputStream JCL text of the job. * @param wait Whether we need for the job to complete. * @param waitTime Maximum wait time in minutes. If set to <code>0</code>, will wait forever. * @param outputStream Stream to put job log. Can be <code>Null</code>. * @param deleteLogFromSpool Whether the job log should be deleted fro spool upon job end. * * @return Whether the job was successfully submitted and the job log was fetched. * <br><b><code>jobCC</code></b> holds the response of the operation (including errors). * * @see zFTPConnector#connect() * @see zFTPConnector#logon() * @see zFTPConnector#waitForCompletion(OutputStream) * @see zFTPConnector#deleteJobLog() */ public boolean submit(InputStream inputStream, boolean wait, int waitTime, OutputStream outputStream, boolean deleteLogFromSpool) { this.waitTime = ((long)waitTime) * 60 * 1000; // Minutes to milliseconds. // Verify connection. if(!this.FTPClient.isConnected()) if(!this.logon()) { this.jobCC = "COULD_NOT_CONNECT"; return false; } this.FTPClient.enterLocalPassiveMode(); try { // Submit the job. this.FTPClient.storeFile("jenkins.sub", inputStream); // Scan reply from server to get JobID. for (String s : this.FTPClient.getReplyStrings()) { Matcher matcher = JesJobName.matcher(s); if(matcher.matches()) { // Set jobID this.jobID = matcher.group(1); break; } } inputStream.close(); } catch (FTPConnectionClosedException e) { System.err.println("Server closed connection."); e.printStackTrace(); this.jobCC = "SERVER_CLOSED_CONNECTION"; return false; } catch (IOException e) { e.printStackTrace(); this.jobCC = "IO_ERROR"; return false; } if (wait) { // Wait for completion. if(this.waitForCompletion(outputStream)) { if (deleteLogFromSpool) // Delete job log from spool. this.deleteJobLog(); return true; } else { if (this.jobCC == null) this.jobCC = "JOB_DID_NOT_FINISH_IN_TIME"; return false; } } // If we are here, everything went fine. return true; } /** * Wait for he completion of the job. * * @param outputStream Stream to hold job log. * * @return Whether the job finished in time. * * @see zFTPConnector#submit(InputStream, boolean, int, OutputStream, boolean) * @see zFTPConnector#fetchJobLog(OutputStream) */ private boolean waitForCompletion(OutputStream outputStream) { // Initialize current time and estimated time. long curr = System.currentTimeMillis(); long jobEndTime = System.currentTimeMillis() + this.waitTime; boolean eternal = (waitTime == 0); // Perform wait while (eternal || (curr <= jobEndTime)) { // Try to fetch job log. if (this.fetchJobLog(outputStream)) return true; else { // Couldn't fetch the job log. Need to wait. try { Thread.sleep(waitInterval); curr = System.currentTimeMillis(); } catch (InterruptedException e) { System.err.println("Interrupted."); this.jobCC = "WAIT_INTERRUPTED"; return false; } } } // Exit with wait error. this.jobCC = "WAIT_ERROR"; return false; } /** * Fetch job log from spool. * * @param outputStream Stream to hold the job log. * * @return Whether the job log was fetched from the LPAR. * * @see zFTPConnector#waitForCompletion(OutputStream) */ private boolean fetchJobLog(OutputStream outputStream) { // Initialize temp variables. InputStreamReader tempInputStreamReader = null; OutputStreamWriter tempOutputStreamWriter = null; BufferedReader tempReader = null; BufferedWriter tempWriter = null; // Verify connection. if(!this.FTPClient.isConnected()) if(!this.logon()) { this.jobCC = "FETCH_LOG_ERROR_LOGIN"; return false; } this.FTPClient.enterLocalPassiveMode(); // Try fetching. try { // Temp variables. int reply; boolean foundRC = false; File tempFile; // Create temp file to hold job log. Need this to scan for MaxCC. try { tempFile = File.createTempFile("Jenkins", "tmp"); tempFile.deleteOnExit(); } catch(Exception e){ // if any error occurs e.printStackTrace(); this.jobCC = "ERROR_CREATING_TEMP_FILE"; return false; } FileOutputStream tempFileOutputStream = new FileOutputStream(tempFile,false); // Try fetching the log. if(!this.FTPClient.retrieveFile(this.jobID,tempFileOutputStream)) { this.jobCC = "RETR_ERR_JOB_NOT_FINISHED_OR_NOT_FOUND"; return false; } Pattern JobRC = Pattern.compile(".*?\\d{2}\\.\\d{2}\\.\\d{2} "+jobID+" .{8} RC (.*?) ET .*"); reply = this.FTPClient.getReplyCode(); if (FTPReply.isPositiveCompletion(reply)) { // If job hasn't finished we need to exit. for (String s : this.FTPClient.getReplyStrings()) { Matcher matcher = JobNotFinished.matcher(s); if (matcher.matches()) { this.jobCC = "JOB_NOT_FINISHED_OR_NOT_FOUND"; return false; } } // Prepare to scan for MaxCC and copy job log. String tempLine; FileInputStream tempInpStream = new FileInputStream(tempFile); tempInputStreamReader = new InputStreamReader(tempInpStream); tempReader = new BufferedReader(tempInputStreamReader); if(outputStream != null) { tempOutputStreamWriter = new OutputStreamWriter(outputStream); tempWriter = new BufferedWriter(tempOutputStreamWriter); } // Scan while ((tempLine = tempReader.readLine()) != null) { // Chack line if(!foundRC) { Matcher matcher = JobRC.matcher(tempLine); if (matcher.matches()) { jobCC = matcher.group(1); foundRC = true; } } // If need output - copy the line. if(outputStream != null) { tempWriter.write(tempLine); tempWriter.newLine(); } } // Close everything. tempInputStreamReader.close(); tempReader.close(); if(tempWriter != null) tempWriter.close(); if(tempOutputStreamWriter != null) tempOutputStreamWriter.close(); if(outputStream != null) outputStream.close(); // Finish with success. return true; } // Close everything and return failure. if(outputStream != null) outputStream.close(); this.jobCC = "FETCH_LOG_FETCH_ERROR"; return false; } catch (IOException e) { try { if(tempInputStreamReader != null) tempInputStreamReader.close(); if(tempReader != null) tempReader.close(); if(tempWriter != null) tempWriter.close(); if(tempOutputStreamWriter != null) tempOutputStreamWriter.close(); if(outputStream != null) outputStream.close(); } catch (IOException ignored) {} this.jobCC = "FETCH_LOG_IO_ERROR"; return false; } } /** * Delete job log from spool. Job is distinguished by previously obtained <code>jobID</code>. * * @see zFTPConnector#submit(InputStream, boolean, int, OutputStream, boolean) */ private void deleteJobLog () { // Verify connection. if(!this.FTPClient.isConnected()) if(!this.logon()) { return; } this.FTPClient.enterLocalPassiveMode(); // Delete log. try { this.FTPClient.deleteFile(this.jobID); } catch (IOException e) { // Do nothing. } } /** * Get JobID. * * @return Current <b><code>jobID</code></b>. */ public String getJobID() { return this.jobID; } /** * Get JobCC. * * @return Current <b><code>jobCC</code></b>. */ public String getJobCC() { return this.jobCC; } }
package org.kumoricon.presenter.utility; import org.kumoricon.model.badge.Badge; import org.kumoricon.model.badge.BadgeFactory; import org.kumoricon.model.badge.BadgeRepository; import org.kumoricon.model.role.Right; import org.kumoricon.model.role.RightRepository; import org.kumoricon.model.role.Role; import org.kumoricon.model.role.RoleRepository; import org.kumoricon.model.user.User; import org.kumoricon.model.user.UserRepository; import org.kumoricon.view.utility.LoadBaseDataView; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Controller; import java.util.HashMap; @Controller @Scope("request") public class LoadBaseDataPresenter { @Autowired private UserRepository userRepository; @Autowired private RoleRepository roleRepository; @Autowired private BadgeRepository badgeRepository; @Autowired private RightRepository rightRepository; private LoadBaseDataView view; public LoadBaseDataPresenter() { } public void loadDataButtonClicked() { if (targetTablesAreEmpty()) { addRights(); addRoles(); addUsers(); addBadges(); } } private Boolean targetTablesAreEmpty() { // Abort if there is more than one right, role, or user - it should just have the admin user // with the Admin role and super_admin right. if (rightRepository.count() > 1) { view.addResult("Error: rights table not empty. Aborting."); return false; } else if (roleRepository.count() > 1) { view.addResult("Error: roles table not empty. Aborting."); return false; } else if (userRepository.count() > 1) { view.addResult("Error: users table not empty. Aborting."); return false; } else if (badgeRepository.count() > 0) { view.addResult("Error: badges table not empty. Aborting."); return false; } return true; } public void setView(LoadBaseDataView view) { this.view = view; } private void addRights() { view.addResult("Creating rights"); String[] rights = {"at_con_registration", "pre_reg_check_in", "attendee_search", "attendee_edit", "attendee_edit_notes", "attendee_override_price", "print_badge", "reprint_badge", "reprint_badge_with_override", "badge_type_press", "view_attendance_report", "view_revenue_report", "view_staff_report", "view_role_report", "manage_staff", "manage_pass_types", "manage_roles", "manage_devices", "import_pre_reg_data", "load_base_data"}; for (String right : rights) { rightRepository.save(new Right(right)); } } private void addRoles() { view.addResult("Creating roles"); HashMap<String, String[]> roles = new HashMap<>(); roles.put("Staff", new String[] {"at_con_registration", "pre_reg_check_in", "attendee_search", "print_badge", "reprint_badge_with_override"}); roles.put("Coordinator", new String[] {"at_con_registration", "pre_reg_check_in", "attendee_search", "print_badge", "attendee_edit", "attendee_edit_notes", "attendee_override_price", "reprint_badge", "view_staff_report"}); roles.put("Manager", new String[] {"at_con_registration", "pre_reg_check_in", "attendee_search", "print_badge", "attendee_edit", "attendee_edit_notes", "attendee_override_price", "reprint_badge", "manage_staff", "view_staff_report"}); roles.put("Director", new String[] {"at_con_registration", "pre_reg_check_in", "attendee_search", "print_badge", "attendee_edit", "attendee_edit_notes", "attendee_override_price", "reprint_badge", "manage_staff", "manage_pass_types", "view_role_report", "view_attendance_report", "view_revenue_report", "view_staff_report"}); roles.put("Ops", new String[] {"attendee_search", "attendee_edit_notes"}); HashMap<String, Right> rightMap = getRightsHashMap(); for (String roleName : roles.keySet()) { Role role = new Role(roleName); for (String rightName : roles.get(roleName)) { if (rightMap.containsKey(rightName)) { role.addRight(rightMap.get(rightName)); } else { view.addResult("Error creating role " + roleName + ". Right " + rightName + " not found"); } } view.addResult(" Creating " + role.toString()); roleRepository.save(role); } } private void addUsers() { view.addResult("Creating users"); String[][] userList = { {"Staff", "User", "Staff"}, {"Coordinator", "User", "Coordinator"}, {"Manager", "User", "Manager"}, {"Director", "User", "Director"}, {"Ops", "User", "ops"}}; for (String[] currentUser : userList) { User user = new User(currentUser[0], currentUser[1]); user.setUsername(currentUser[0]); Role role = roleRepository.findByNameIgnoreCase(currentUser[2]); if (role == null) { view.addResult(" Error creating user " + currentUser[0] + ". Role " + currentUser[2] + " not found"); } else { user.setRole(role); view.addResult(" Creating " + user.toString()); userRepository.save(user); } } } private void addBadges() { view.addResult("Creating badges"); String[][] badgeList = { {"Weekend", "60", "60", "45"}, {"Friday", "40", "40", "30"}, {"Saturday", "40", "40", "30"}, {"Sunday", "30", "30", "20"}}; for (String[] currentBadge : badgeList) { Badge badge = BadgeFactory.badgeFactory(currentBadge[0], currentBadge[0], Float.parseFloat(currentBadge[1]), Float.parseFloat(currentBadge[2]), Float.parseFloat(currentBadge[3])); view.addResult(" Creating " + badge.toString()); badgeRepository.save(badge); } // Create VIP badge with warning message Badge vip = BadgeFactory.badgeFactory("VIP", "VIP", 300, 300, 300); vip.setWarningMessage("VIP check in. See your coordinator"); view.addResult(" Creating " + vip.toString()); badgeRepository.save(vip); // Create badge types with security restrictions below Badge press = BadgeFactory.badgeFactory("Press", "Weekend", 0f, 0f, 0f); press.setRequiredRight("badge_type_press"); view.addResult(" Creating " + press.toString()); badgeRepository.save(press); } private HashMap<String, Right> getRightsHashMap() { HashMap<String, Right> rightHashMap = new HashMap<>(); for (Right r : rightRepository.findAll()) { rightHashMap.put(r.getName(), r); } return rightHashMap; } }
package org.ligoj.app.plugin.prov.model; import javax.persistence.FetchType; import javax.persistence.ManyToOne; import javax.persistence.MappedSuperclass; import javax.persistence.Transient; import javax.validation.constraints.NotNull; import javax.validation.constraints.PositiveOrZero; import org.ligoj.bootstrap.core.model.AbstractDescribedEntity; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AccessLevel; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; /** * A resource with floating cost. * * @param <P> * Price configuration type. */ @Getter @Setter @MappedSuperclass @NoArgsConstructor(access = AccessLevel.PROTECTED) public abstract class AbstractQuoteResource<P extends AbstractPrice<?>> extends AbstractDescribedEntity<Integer> implements Costed { /** * SID */ private static final long serialVersionUID = 1L; /** * The minimal computed monthly cost of the resource. */ @NotNull @PositiveOrZero private double cost; /** * Maximal determined monthly cost of the resource. When there is an unbound maximal (<code>null</code>) quantity, * the minimal cost is used. */ @NotNull @PositiveOrZero private double maxCost; /** * The parent quote. */ @NotNull @ManyToOne(fetch = FetchType.LAZY) @JsonIgnore private ProvQuote configuration; /** * Optional expected location for this resource. */ @ManyToOne private ProvLocation location; /** * Return resolved price configuration. * * @return Resolved price configuration. */ public abstract P getPrice(); /** * Set the resolved price configuration. * * @param price * The resolved price. */ public abstract void setPrice(P price); /** * Return the effective location applied to the current resource. * * @return The related location. Never <code>null</code>. */ @Transient @JsonIgnore public ProvLocation getResolvedLocation() { return location == null ? getConfiguration().getLocation() : location; } /** * Return the resource type. * * @return The resource type. */ public abstract ResourceType getResourceType(); }
package org.mitallast.queue.queue.service; import org.fusesource.leveldbjni.JniDBFactory; import org.iq80.leveldb.*; import org.mitallast.queue.QueueException; import org.mitallast.queue.QueueRuntimeException; import org.mitallast.queue.common.bigqueue.Files; import org.mitallast.queue.common.settings.Settings; import org.mitallast.queue.queue.*; import org.mitallast.queue.queues.stats.QueueStats; import java.io.File; import java.io.IOException; import java.util.Map; import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; public class LevelDbQueueService extends AbstractQueueComponent implements QueueService<String> { private final static String LEVEL_DB_DIR = "level_db"; private final Options options = new Options(); private final WriteOptions writeOptions = new WriteOptions(); private final ReadOptions readOptions = new ReadOptions(); { options.createIfMissing(true); options.maxOpenFiles(4096); options.blockSize(65536); options.verifyChecksums(false); options.paranoidChecks(false); writeOptions.snapshot(false); writeOptions.sync(false); readOptions.fillCache(true); readOptions.verifyChecksums(false); } private final ReentrantLock lock = new ReentrantLock(); private String workDir; private String levelDbDir; private DB levelDb; public LevelDbQueueService(Settings settings, Settings queueSettings, Queue queue) { super(settings, queueSettings, queue); workDir = this.settings.get("work_dir"); if (!workDir.endsWith(File.separator)) { workDir += File.separator; } workDir += queue.getName() + File.separator; levelDbDir = workDir + LEVEL_DB_DIR; } @Override public long enqueue(QueueMessage<String> message) { if (message.getUid() == null) { // write without lock message.setUid(UUID.randomUUID().toString()); byte[] uid = message.getUid().getBytes(); byte[] msg = message.getMessage().getBytes(); levelDb.put(uid, msg, writeOptions); return 0; } else { // write with lock byte[] uid = message.getUid().getBytes(); byte[] msg = message.getMessage().getBytes(); lock.lock(); try { if (levelDb.get(uid, readOptions) != null) { throw new QueueMessageUidDuplicateException(message.getUid()); } levelDb.put(uid, msg, writeOptions); return 0; } finally { lock.unlock(); } } } @Override public QueueMessage<String> dequeue() { lock.lock(); try { try (DBIterator iterator = levelDb.iterator(readOptions)) { if (!iterator.hasNext()) { return null; } Map.Entry<byte[], byte[]> entry = iterator.next(); levelDb.delete(entry.getKey(), writeOptions); return new QueueMessage<>(new String(entry.getValue()), new String(entry.getKey())); } } catch (IOException e) { throw new QueueRuntimeException(e); } finally { lock.unlock(); } } @Override public QueueMessage<String> peek() { lock.lock(); try { try (DBIterator iterator = levelDb.iterator(readOptions)) { if (!iterator.hasNext()) { return null; } Map.Entry<byte[], byte[]> entry = iterator.next(); return new QueueMessage<>(new String(entry.getValue()), new String(entry.getKey())); } } catch (IOException e) { throw new QueueRuntimeException(e); } finally { lock.unlock(); } } @Override public long size() { return 0; } @Override public QueueType type() { return QueueType.LEVEL_DB; } @Override public void removeQueue() { lock.lock(); try { logger.info("close queue"); close(); logger.info("delete directory"); Files.deleteDirectory(new File(levelDbDir)); logger.info("directory deleted"); } catch (Throwable e) { throw new QueueRuntimeException(e); } finally { lock.unlock(); } } @Override public boolean isSupported(QueueMessage message) { return message.getMessage() instanceof String; } @Override protected void doStart() throws QueueException { lock.lock(); try { try { File levelDbDirFile = new File(levelDbDir); if (!levelDbDirFile.exists()) { if (!levelDbDirFile.mkdirs()) { throw new IOException("Error create " + levelDbDirFile); } } levelDb = JniDBFactory.factory.open(levelDbDirFile, options); } catch (IOException e) { throw new QueueRuntimeException(e); } } finally { lock.unlock(); } } @Override protected void doStop() throws QueueException { lock.lock(); try { this.levelDb.close(); this.levelDb = null; } catch (IOException e) { throw new QueueRuntimeException(e); } finally { lock.unlock(); } } @Override protected void doClose() throws QueueException { } @Override public QueueStats stats() { QueueStats stats = new QueueStats(); stats.setQueue(queue); stats.setSize(size()); return stats; } }
package org.purl.wf4ever.robundle.fs; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.ref.WeakReference; import java.net.URI; import java.net.URISyntaxException; import java.nio.channels.SeekableByteChannel; import java.nio.charset.Charset; import java.nio.file.AccessMode; import java.nio.file.CopyOption; import java.nio.file.DirectoryStream; import java.nio.file.DirectoryStream.Filter; import java.nio.file.FileStore; import java.nio.file.FileSystem; import java.nio.file.FileSystemAlreadyExistsException; import java.nio.file.FileSystemNotFoundException; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.FileAttributeView; import java.nio.file.spi.FileSystemProvider; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.zip.CRC32; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class BundleFileSystemProvider extends FileSystemProvider { private static final String APPLICATION_VND_WF4EVER_ROBUNDLE_ZIP = "application/vnd.wf4ever.robundle+zip"; private static final Charset UTF8 = Charset.forName("UTF-8"); private static final String WIDGET = "widget"; protected static void addMimeTypeToZip(ZipOutputStream out, String mimetype) throws IOException { if (mimetype == null) { mimetype = APPLICATION_VND_WF4EVER_ROBUNDLE_ZIP; } // FIXME: Make the mediatype a parameter byte[] bytes = mimetype.getBytes(UTF8); // We'll have to do the mimetype file quite low-level // in order to ensure it is STORED and not COMPRESSED ZipEntry entry = new ZipEntry("mimetype"); entry.setMethod(ZipEntry.STORED); entry.setSize(bytes.length); CRC32 crc = new CRC32(); crc.update(bytes); entry.setCrc(crc.getValue()); out.putNextEntry(entry); out.write(bytes); out.closeEntry(); } protected static void createBundleAsZip(Path bundle, String mimetype) throws FileNotFoundException, IOException { // Create ZIP file as try (ZipOutputStream out = new ZipOutputStream(Files.newOutputStream( bundle, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING))) { addMimeTypeToZip(out, mimetype); } } public static BundleFileSystemProvider getInstance() { for (FileSystemProvider provider : FileSystemProvider .installedProviders()) { if (provider instanceof BundleFileSystemProvider) { return (BundleFileSystemProvider) provider; } } throw new IllegalStateException("FileSystemProvider has not been installed: " + BundleFileSystemProvider.class); } public static BundleFileSystem newFileSystemFromExisting(Path bundle) throws FileNotFoundException, IOException { URI w; try { w = new URI("widget", bundle.toUri().toASCIIString(), null); } catch (URISyntaxException e) { throw new IllegalArgumentException("Can't create widget: URI for " + bundle); } FileSystem fs = FileSystems.newFileSystem(w, Collections.<String,Object>emptyMap()); return (BundleFileSystem) fs; } public static BundleFileSystem newFileSystemFromNew(Path bundle) throws FileNotFoundException, IOException { return newFileSystemFromNew(bundle, APPLICATION_VND_WF4EVER_ROBUNDLE_ZIP); } public static BundleFileSystem newFileSystemFromNew(Path bundle, String mimetype) throws FileNotFoundException, IOException { createBundleAsZip(bundle, mimetype); return newFileSystemFromExisting(bundle); } Map<URI, WeakReference<BundleFileSystem>> openFilesystems = new HashMap<>(); protected URI baseURIFor(URI uri) { if (!(uri.getScheme().equals(WIDGET))) { throw new IllegalArgumentException("Unsupported scheme in: " + uri); } if (!uri.isOpaque()) { return uri.resolve("/"); } Path localPath = localPathFor(uri); Path realPath; try { realPath = localPath.toRealPath(); } catch (IOException ex) { realPath = localPath.toAbsolutePath(); } // Generate a UUID from the MD5 of the URI of the real path (!) UUID uuid = UUID.nameUUIDFromBytes(realPath.toUri().toASCIIString() .getBytes(UTF8)); try { return new URI(WIDGET, uuid.toString(), "/", null); } catch (URISyntaxException e) { throw new IllegalStateException("Can't create widget:// URI for: " + uuid); } } @Override public void checkAccess(Path path, AccessMode... modes) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); origProvider(path).checkAccess(fs.unwrap(path), modes); } @Override public void copy(Path source, Path target, CopyOption... options) throws IOException { BundleFileSystem fs = (BundleFileSystem) source.getFileSystem(); origProvider(source).copy(fs.unwrap(source), fs.unwrap(target), options); } @Override public void createDirectory(Path dir, FileAttribute<?>... attrs) throws IOException { BundleFileSystem fs = (BundleFileSystem) dir.getFileSystem(); origProvider(dir).createDirectory(fs.unwrap(dir), attrs); } @Override public void delete(Path path) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); origProvider(path).delete(fs.unwrap(path)); } @Override public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> type, LinkOption... options) { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).getFileAttributeView(fs.unwrap(path), type, options); } @Override public FileStore getFileStore(Path path) throws IOException { BundlePath bpath = (BundlePath)path; return bpath.getFileSystem().getFileStore(); } @Override public BundleFileSystem getFileSystem(URI uri) { WeakReference<BundleFileSystem> ref = openFilesystems .get(baseURIFor(uri)); if (ref == null) { throw new FileSystemNotFoundException(uri.toString()); } BundleFileSystem fs = ref.get(); if (fs == null) { throw new FileSystemNotFoundException(uri.toString()); } return fs; } @Override public Path getPath(URI uri) { BundleFileSystem fs = getFileSystem(uri); Path r = fs.getRootDirectory(); if (uri.isOpaque()) { return r; } else { return r.resolve(uri.getPath()); } } @Override public String getScheme() { return WIDGET; } @Override public boolean isHidden(Path path) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).isHidden(fs.unwrap(path)); } @Override public boolean isSameFile(Path path, Path path2) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).isSameFile(fs.unwrap(path), fs.unwrap(path2)); } private Path localPathFor(URI uri) { URI localUri = URI.create(uri.getSchemeSpecificPart()); return Paths.get(localUri); } @Override public void move(Path source, Path target, CopyOption... options) throws IOException { BundleFileSystem fs = (BundleFileSystem) source.getFileSystem(); origProvider(source).copy(fs.unwrap(source), fs.unwrap(target), options); } @Override public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException { final BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).newByteChannel(fs.unwrap(path), options, attrs); } @Override public DirectoryStream<Path> newDirectoryStream(Path dir, final Filter<? super Path> filter) throws IOException { final BundleFileSystem fs = (BundleFileSystem) dir.getFileSystem(); final DirectoryStream<Path> stream = origProvider(dir) .newDirectoryStream(fs.unwrap(dir), new Filter<Path>() { @Override public boolean accept(Path entry) throws IOException { return filter.accept(fs.wrap(entry)); } }); return new DirectoryStream<Path>() { @Override public void close() throws IOException { stream.close(); } @Override public Iterator<Path> iterator() { return fs.wrapIterator(stream.iterator()); } }; } @Override public BundleFileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException { Path localPath = localPathFor(uri); URI baseURI = baseURIFor(uri); FileSystem origFs = FileSystems.newFileSystem(localPath, null); BundleFileSystem fs; synchronized (openFilesystems) { WeakReference<BundleFileSystem> existingRef = openFilesystems .get(baseURI); if (existingRef != null) { BundleFileSystem existing = existingRef.get(); if (existing.isOpen()) { throw new FileSystemAlreadyExistsException( baseURI.toASCIIString()); } } fs = new BundleFileSystem(origFs, this, baseURI); openFilesystems.put(baseURI, new WeakReference<BundleFileSystem>(fs)); } return fs; } private FileSystemProvider origProvider(Path path) { return ((BundlePath) path).getFileSystem().origFS.provider(); } @Override public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> type, LinkOption... options) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).readAttributes(fs.unwrap(path), type, options); } @Override public Map<String, Object> readAttributes(Path path, String attributes, LinkOption... options) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); return origProvider(path).readAttributes(fs.unwrap(path), attributes, options); } @Override public void setAttribute(Path path, String attribute, Object value, LinkOption... options) throws IOException { BundleFileSystem fs = (BundleFileSystem) path.getFileSystem(); origProvider(path).setAttribute(fs.unwrap(path), attribute, value, options); } }
package stream.flarebot.flarebot.commands.music; import com.arsenarsen.lavaplayerbridge.player.Player; import net.dv8tion.jda.core.entities.Member; import net.dv8tion.jda.core.entities.Message; import net.dv8tion.jda.core.entities.TextChannel; import net.dv8tion.jda.core.entities.User; import stream.flarebot.flarebot.FlareBot; import stream.flarebot.flarebot.commands.*; import stream.flarebot.flarebot.objects.GuildWrapper; import stream.flarebot.flarebot.util.MessageUtils; public class ResumeCommand implements Command { @Override public void onCommand(User sender, GuildWrapper guild, TextChannel channel, Message message, String[] args, Member member) { Player player = FlareBot.getInstance().getMusicManager().getPlayer(guild.getGuildId()); if (player.getPlayingTrack() == null) { MessageUtils.sendErrorMessage("There is no music playing!", channel); } else if (!player.getPaused()) { MessageUtils.sendErrorMessage("The music is already playing!", channel); } else { player.play(); MessageUtils.sendSuccessMessage("Resuming...!", channel); } } @Override public String getCommand() { return "resume"; } @Override public String getDescription() { return "Resumes your playlist"; } @Override public String getUsage() { return "`{%}resume` - Resumes the playlist."; } @Override public CommandType getType() { return CommandType.MUSIC; } }
package uk.ac.ebi.phenotype.stats.graphs; import java.io.IOException; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang.WordUtils; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import uk.ac.ebi.phenotype.bean.StatisticalResultBean; import uk.ac.ebi.phenotype.pojo.Parameter; import uk.ac.ebi.phenotype.pojo.Pipeline; import uk.ac.ebi.phenotype.pojo.Procedure; public class PhenomeChartProvider { private static final Logger logger = Logger .getLogger(PhenomeChartProvider.class); public String createChart(String alleleAccession, JSONArray series, JSONArray categories) { String chartString=" $(function () { " +" phenomeChart = new Highcharts.Chart({ " +" chart: {" +"renderTo: 'chart" + alleleAccession+"'," +" type: 'scatter'," +" zoomType: 'xy'" +" }," +" title: {" +" text: ' "+"P-values Overview" +"' }," +" subtitle: {" +" text: ' "+"Parameter by parameter"+" ' " +" }," +" xAxis: {" +" categories: "+ categories.toString() + "," +" title: {" +" enabled: true," +" text: 'Parameters' " +" }, " +" labels: { " +" rotation: -90, " +" align: 'right', " +" style: { " +" fontSize: '11px', " +" fontFamily: 'Verdana, sans-serif' " +" } " +" }, " +" showLastLabel: true " +" }, " +" yAxis: { " +" title: { " +" text: '"+"-Log10(p-value)"+"' " +" }, " + "plotLines : [{" + "value : " + -Math.log10( 0.0001 ) + "," + "color : 'green', " + "dashStyle : 'shortdash'," + "width : 2," + "label : { useHTML: true, text : 'Significance threshold 1.00&#215;10<sup>-4</sup>' }" + "}]" +" }, " +" credits: { " +" enabled: false " +" }, " +" plotOptions: { " +" scatter: { " +" marker: { " +" radius: 5, " +" states: { " +" hover: { " +" enabled: true, " +" lineColor: 'rgb(100,100,100)' " +" } " +" } " +" }, " +" states: { " +" hover: { " +" marker: { " +" enabled: false " +" } " +" } " +" } " +" } " +" }, " +" series: "+ series.toString() +" }); " +" }); "; return chartString; } public String generatePhenomeChart( String alleleAccession, Map<String, StatisticalResultBean> statisticalResults, Pipeline pipeline) throws IOException, URISyntaxException { JSONArray series=new JSONArray(); JSONArray categories = new JSONArray(); try { int index = 0; // Create a statistical series for every procedure in the pipeline // Start from the pipeline so that there is no need to keep this // information from the caller side // get All procedures and generate a Map Parameter => Procedure Map<Parameter, Procedure> parametersToProcedure = new HashMap<Parameter, Procedure>(); Map<String, Parameter> parametersMap = new HashMap<String, Parameter>(); for (Procedure procedure: pipeline.getProcedures()) { JSONObject scatterJsonObject=new JSONObject(); // Tooltip first for correct formatting /* tooltip: { headerFormat: '<b>{series.name}</b><br>', pointFormat: '{point.name}<br/>value: {point.y}' },*/ JSONObject tooltip=new JSONObject(); tooltip.put("headerFormat", "<b>{series.name}</b><br>"); tooltip.put("pointFormat", "{point.name}<br/>p-value: {point.pValue}"); scatterJsonObject.put("tooltip", tooltip); scatterJsonObject.put("type", "scatter"); scatterJsonObject.put("name", procedure.getName()); JSONArray dataArray=new JSONArray(); // create a series here for (Parameter parameter: procedure.getParameters()) { /* data: [{ name: 'IMPC_...', x: 1, y: 2 }, { name: 'IMPC_...', x: 2, y: 5 }] */ if (statisticalResults.containsKey(parameter.getStableId()) && statisticalResults.get(parameter.getStableId()).getIsSuccessful() ) { categories.put(parameter.getStableId()); JSONObject dataPoint=new JSONObject(); dataPoint.put("name", parameter.getName()); dataPoint.put("stableId", parameter.getStableId()); dataPoint.put("x", index); dataPoint.put("y", statisticalResults.get(parameter.getStableId()).getLogValue()); dataPoint.put("pValue", statisticalResults.get(parameter.getStableId()).getpValue()); dataArray.put(dataPoint); index++; } } if (dataArray.length() > 0) { scatterJsonObject.put("data", dataArray); series.put(scatterJsonObject); } } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } String chartString=createChart(alleleAccession, series, categories); return chartString; } }
// modification, are permitted provided that the following conditions are met: // documentation and/or other materials provided with the distribution. // * Neither the name of the <organization> nor the // names of its contributors may be used to endorse or promote products // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL DAVID J. PEARCE BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package wyc.builder; import java.util.*; import static wyc.lang.WhileyFile.internalFailure; import static wyc.lang.WhileyFile.syntaxError; import static wyil.util.ErrorMessages.*; import wybs.lang.Attribute; import wybs.lang.NameID; import wybs.lang.Path; import wybs.lang.SyntacticElement; import wybs.lang.SyntaxError; import wybs.util.Pair; import wybs.util.ResolveError; import wybs.util.Triple; import wyc.lang.*; import wyc.lang.Stmt.*; import wyc.lang.WhileyFile.Context; import wyil.lang.*; /** * <p> * Responsible for compiling the declarations, statements and expression found * in a WhileyFile into WyIL declarations and bytecode blocks. For example: * </p> * * <pre> * type nat is (int x) where x >= 0 * * function f(nat x) => int: * return x-1 * </pre> * * <p> * The code generator is responsible for generating the code for the constraint * on <code>nat</code>, as well as compiling the function's statements into * their corresponding WyIL bytecodes. For example, the code generated * constraint on type <code>nat</code> would look like this: * </p> * * <pre> * type nat is int * where: * load x * const 0 * ifge goto exit * fail("type constraint not satisfied") * .exit: * </pre> * * This WyIL bytecode simply compares the local variable x against 0. Here, x * represents the value held in a variable of type <code>nat</code>. If the * constraint fails, then the given message is printed. * * @author David J. Pearce * */ public final class CodeGenerator { /** * The lambdas are anonymous functions used within statements and * expressions in the source file. These are compiled into anonymised WyIL * functions, since WyIL does not have an internal notion of a lambda. */ private final ArrayList<WyilFile.MethodDeclaration> lambdas = new ArrayList<WyilFile.MethodDeclaration>(); /** * The scopes stack is used for determining the correct scoping for continue * and break statements. Whenever we begin translating a loop of some kind, * a <code>LoopScope</code> is pushed on the stack. Once the translation of * that loop is complete, this is then popped off the stack. */ private Stack<Scope> scopes = new Stack<Scope>(); // WhileyFile /** * Generate a WyilFile from a given WhileyFile by translating all of the * declarations, statements and expressions into WyIL declarations and * bytecode blocks. * * @param wf * The WhileyFile to be translated. * @return */ public WyilFile generate(WhileyFile wf) { ArrayList<WyilFile.Declaration> declarations = new ArrayList<WyilFile.Declaration>(); // Go through each declaration and translate in the order of appearance. for (WhileyFile.Declaration d : wf.declarations) { try { if (d instanceof WhileyFile.Type) { declarations.add(generate((WhileyFile.Type) d)); } else if (d instanceof WhileyFile.Constant) { declarations.add(generate((WhileyFile.Constant) d)); } else if (d instanceof WhileyFile.FunctionOrMethod) { declarations.add(generate((WhileyFile.FunctionOrMethod) d)); } } catch (SyntaxError se) { throw se; } catch (Throwable ex) { WhileyFile.internalFailure(ex.getMessage(), (WhileyFile.Context) d, d, ex); } } // Add any lambda functions which were used within some expression. Each // of these is guaranteed to have been given a unique and valid WyIL // name. declarations.addAll(lambdas); // Done return new WyilFile(wf.module, wf.filename, declarations); } // Constant Declarations /** * Generate a WyilFile constant declaration from a WhileyFile constant * declaration. This requires evaluating the given expression to produce a * constant value. If this cannot be done, then a syntax error is raised to * indicate an invalid constant declaration was encountered. */ private WyilFile.ConstantDeclaration generate(WhileyFile.Constant cd) { // TODO: this the point where were should an evaluator return new WyilFile.ConstantDeclaration(cd.modifiers, cd.name, cd.resolvedValue); } // Type Declarations /** * Generate a WyilFile type declaration from a WhileyFile type declaration. * If a type invariant is given, then this will need to be translated into * Wyil bytecode. * * @param td * @return * @throws Exception */ private WyilFile.TypeDeclaration generate(WhileyFile.Type td) throws Exception { Block invariant = null; if (td.invariant != null) { // Create an empty invariant block to be populated during constraint // generation. invariant = new Block(1); // Setup the environment which maps source variables to block // registers. This is determined by allocating the root variable to // register 0, and then creating any variables declared in the type // pattern by from this root. Environment environment = new Environment(); int root = environment.allocate(td.resolvedType.raw()); addDeclaredVariables(root, td.pattern, td.resolvedType.raw(), environment, invariant); // Finally, translate the invariant expression. int target = generate(td.invariant, environment, invariant, td); // TODO: assign target register to something? } return new WyilFile.TypeDeclaration(td.modifiers, td.name(), td.resolvedType.nominal(), invariant); } // Function / Method Declarations private WyilFile.MethodDeclaration generate( WhileyFile.FunctionOrMethod fd) throws Exception { Type.FunctionOrMethod ftype = fd.resolvedType().raw(); // The environment maintains the mapping from source-level variables to // the registers in WyIL block(s). Environment environment = new Environment(); // Generate pre-condition // First, allocate parameters to registers in the current block for (int i=0;i!=fd.parameters.size();++i) { WhileyFile.Parameter p = fd.parameters.get(i); environment.allocate(ftype.params().get(i), p.name()); } // TODO: actually translate pre-condition Block precondition = null; // Generate post-condition Block postcondition = null; if (fd.ensures.size() > 0) { // This indicates one or more explicit ensures clauses are given. // Therefore, we must translate each of these into Wyil bytecodes. // First, we need to create an appropriate environment within which // to translate the post-conditions. Environment postEnv = new Environment(); int root = postEnv.allocate(fd.resolvedType().ret().raw()); // FIXME: can't we reuse the original environment? Well, if we // allocated the return variable after the parameters then we // probably could. for (int i = 0; i != fd.parameters.size(); ++i) { WhileyFile.Parameter p = fd.parameters.get(i); postEnv.allocate(ftype.params().get(i), p.name()); } postcondition = new Block(postEnv.size()); addDeclaredVariables(root, fd.ret, fd.resolvedType().ret().raw(), postEnv, postcondition); for (Expr condition : fd.ensures) { // TODO: actually translate these conditions. } } // Generate body Block body = new Block(fd.parameters.size()); for (Stmt s : fd.statements) { generate(s, environment, body, fd); } // The following is sneaky. It guarantees that every method ends in a // return. For methods that actually need a value, this is either // removed as dead-code or remains and will cause an error. body.append(Code.Return(),attributes(fd)); List<WyilFile.Case> ncases = new ArrayList<WyilFile.Case>(); ArrayList<String> locals = new ArrayList<String>(); ncases.add(new WyilFile.Case(body,precondition,postcondition,locals)); // Done return new WyilFile.MethodDeclaration(fd.modifiers, fd.name(), fd .resolvedType().raw(), ncases); } // Statements public void generate(Stmt stmt, Environment environment, Block codes, Context context) { try { if (stmt instanceof VariableDeclaration) { generate((VariableDeclaration) stmt, environment, codes, context); } else if (stmt instanceof Assign) { generate((Assign) stmt, environment, codes, context); } else if (stmt instanceof Assert) { generate((Assert) stmt, environment, codes, context); } else if (stmt instanceof Assume) { generate((Assume) stmt, environment, codes, context); } else if (stmt instanceof Return) { generate((Return) stmt, environment, codes, context); } else if (stmt instanceof Debug) { generate((Debug) stmt, environment, codes, context); } else if (stmt instanceof IfElse) { generate((IfElse) stmt, environment, codes, context); } else if (stmt instanceof Switch) { generate((Switch) stmt, environment, codes, context); } else if (stmt instanceof TryCatch) { generate((TryCatch) stmt, environment, codes, context); } else if (stmt instanceof Break) { generate((Break) stmt, environment, codes, context); } else if (stmt instanceof Throw) { generate((Throw) stmt, environment, codes, context); } else if (stmt instanceof While) { generate((While) stmt, environment, codes, context); } else if (stmt instanceof DoWhile) { generate((DoWhile) stmt, environment, codes, context); } else if (stmt instanceof ForAll) { generate((ForAll) stmt, environment, codes, context); } else if (stmt instanceof Expr.MethodCall) { generate((Expr.MethodCall) stmt, Code.NULL_REG, environment, codes, context); } else if (stmt instanceof Expr.FunctionCall) { generate((Expr.FunctionCall) stmt, Code.NULL_REG, environment, codes, context); } else if (stmt instanceof Expr.IndirectMethodCall) { generate((Expr.IndirectMethodCall) stmt, Code.NULL_REG, environment, codes, context); } else if (stmt instanceof Expr.IndirectFunctionCall) { generate((Expr.IndirectFunctionCall) stmt, Code.NULL_REG, environment, codes, context); } else if (stmt instanceof Expr.New) { generate((Expr.New) stmt, environment, codes, context); } else if (stmt instanceof Skip) { generate((Skip) stmt, environment, codes, context); } else { // should be dead-code WhileyFile.internalFailure("unknown statement: " + stmt.getClass().getName(), context, stmt); } } catch (ResolveError rex) { WhileyFile.syntaxError(rex.getMessage(), context, stmt, rex); } catch (SyntaxError sex) { throw sex; } catch (Exception ex) { WhileyFile.internalFailure(ex.getMessage(), context, stmt, ex); } } public void generate(VariableDeclaration s, Environment environment, Block codes, Context context) { // First, we allocate this variable to a given slot in the environment. int target = environment.allocate(s.type.raw(), s.name); // Second, translate initialiser expression if it exists. if(s.expr != null) { int operand = generate(s.expr, environment, codes, context); codes.append(Code.Assign(s.expr.result().raw(), target, operand), attributes(s)); } } public void generate(Assign s, Environment environment, Block codes, Context context) { // First, we translate the right-hand side expression and assign it to a // temporary register. int operand = generate(s.rhs, environment, codes, context); // Second, we update the left-hand side of this assignment // appropriately. if (s.lhs instanceof Expr.AssignedVariable) { Expr.AssignedVariable v = (Expr.AssignedVariable) s.lhs; // This is the easiest case. Having translated the right-hand side // expression, we now assign it directly to the register allocated // for variable on the left-hand side. int target = environment.get(v.var); codes.append(Code.Assign(s.rhs.result().raw(), target, operand), attributes(s)); } else if(s.lhs instanceof Expr.RationalLVal) { Expr.RationalLVal tg = (Expr.RationalLVal) s.lhs; // Having translated the right-hand side expression, we now // destructure it using the numerator and denominator unary // bytecodes. Expr.AssignedVariable lv = (Expr.AssignedVariable) tg.numerator; Expr.AssignedVariable rv = (Expr.AssignedVariable) tg.denominator; codes.append(Code.UnArithOp(s.rhs.result() .raw(), environment.get(lv.var), operand, Code.UnArithKind.NUMERATOR), attributes(s)); codes.append(Code.UnArithOp(s.rhs.result().raw(), environment.get(rv.var), operand, Code.UnArithKind.DENOMINATOR), attributes(s)); } else if(s.lhs instanceof Expr.Tuple) { Expr.Tuple tg = (Expr.Tuple) s.lhs; // Having translated the right-hand side expression, we now // destructure it using tupleload bytecodes and assign to those // variables on the left-hand side. ArrayList<Expr> fields = new ArrayList<Expr>(tg.fields); for (int i = 0; i != fields.size(); ++i) { Expr.AssignedVariable v = (Expr.AssignedVariable) fields.get(i); codes.append(Code.TupleLoad((Type.EffectiveTuple) s.rhs .result().raw(), environment.get(v.var), operand, i), attributes(s)); } } else if (s.lhs instanceof Expr.IndexOf || s.lhs instanceof Expr.FieldAccess) { // This is the more complicated case, since the left-hand side // expression is recursive. However, the WyIL update bytecode comes // to the rescue here. All we need to do is extract the variable // being updated and give this to the update bytecode. For example, // in the expression "x.y.f = e" we have that variable "x" is being // updated. ArrayList<String> fields = new ArrayList<String>(); ArrayList<Integer> operands = new ArrayList<Integer>(); Expr.AssignedVariable lhs = extractLVal(s.lhs, fields, operands, environment, codes, context); int target = environment.get(lhs.var); codes.append(Code.Update(lhs.type.raw(), target, operand, operands, lhs.afterType.raw(), fields), attributes(s)); } else { WhileyFile.syntaxError("invalid assignment", context, s); } } /** * This function recurses down the left-hand side of an assignment (e.g. * x[i] = e, x.f = e, etc) with a complex lval. The primary goal is to * identify the left-most variable which is actually being updated. A * secondary goal is to collect the sequence of field names being updated, * and translate any index expressions and store them in temporary * registers. * * @param e * The LVal being extract from. * @param fields * The list of fields being used in the assignment. * Initially, this is empty and is filled by this method as it * traverses the lval. * @param operands * The list of temporary registers in which evaluated index * expression are stored. Initially, this is empty and is filled * by this method as it traverses the lval. * @param environment * Mapping from variable names to block registers. * @param codes * Code block into which this statement is to be translated. * @param context * Enclosing context of this statement (i.e. type, constant, * function or method declaration). The context is used to aid * with error reporting as it determines the enclosing file. * @return */ private Expr.AssignedVariable extractLVal(Expr.LVal e, ArrayList<String> fields, ArrayList<Integer> operands, Environment environment, Block codes, Context context) { if (e instanceof Expr.AssignedVariable) { Expr.AssignedVariable v = (Expr.AssignedVariable) e; return v; } else if (e instanceof Expr.Dereference) { Expr.Dereference pa = (Expr.Dereference) e; return extractLVal((Expr.LVal) pa.src, fields, operands, environment, codes, context); } else if (e instanceof Expr.IndexOf) { Expr.IndexOf la = (Expr.IndexOf) e; int operand = generate(la.index, environment, codes, context); Expr.AssignedVariable l = extractLVal((Expr.LVal) la.src, fields, operands, environment, codes, context); operands.add(operand); return l; } else if (e instanceof Expr.FieldAccess) { Expr.FieldAccess ra = (Expr.FieldAccess) e; Expr.AssignedVariable r = extractLVal((Expr.LVal) ra.src, fields, operands, environment, codes, context); fields.add(ra.name); return r; } else { WhileyFile.syntaxError(errorMessage(INVALID_LVAL_EXPRESSION), context, e); return null; // dead code } } public void generate(Assert s, Environment environment, Block codes, Context context) { // TODO: implement me } public void generate(Assume s, Environment environment, Block codes, Context context) { // TODO: need to implement this translation. } public void generate(Return s, Environment environment, Block codes, Context context) { if (s.expr != null) { int operand = generate(s.expr, environment, codes, context); // Here, we don't put the type propagated for the return expression. // Instead, we use the declared return type of this function. This // has the effect of forcing an implicit coercion between the // actual value being returned and its required type. Type ret = ((WhileyFile.FunctionOrMethod) context).resolvedType() .raw().ret(); codes.append(Code.Return(ret, operand), attributes(s)); } else { codes.append(Code.Return(), attributes(s)); } } public void generate(Skip s, Environment environment, Block codes, Context context) { codes.append(Code.Nop, attributes(s)); } public void generate(Debug s, Environment environment, Block codes, Context context) { int operand = generate(s.expr, environment, codes, context); codes.append(Code.Debug(operand), attributes(s)); } public void generate(IfElse s, Environment environment, Block codes, Context context) { String falseLab = Block.freshLabel(); String exitLab = s.falseBranch.isEmpty() ? falseLab : Block .freshLabel(); generateCondition(falseLab, invert(s.condition), environment, codes, context); for (Stmt st : s.trueBranch) { generate(st, environment, codes, context); } if (!s.falseBranch.isEmpty()) { codes.append(Code.Goto(exitLab)); codes.append(Code.Label(falseLab)); for (Stmt st : s.falseBranch) { generate(st, environment, codes, context); } } codes.append(Code.Label(exitLab)); } public void generate(Throw s, Environment environment, Block codes, Context context) { int operand = generate(s.expr, environment, codes, context); codes.append(Code.Throw(s.expr.result().raw(), operand), s.attributes()); } public void generate(Break s, Environment environment, Block codes, Context context) { BreakScope scope = findEnclosingScope(BreakScope.class); if (scope == null) { WhileyFile.syntaxError(errorMessage(BREAK_OUTSIDE_LOOP), context, s); } codes.append(Code.Goto(scope.label)); } public void generate(Switch s, Environment environment, Block codes, Context context) throws Exception { String exitLab = Block.freshLabel(); int operand = generate(s.expr, environment, codes, context); String defaultTarget = exitLab; HashSet<Constant> values = new HashSet(); ArrayList<Pair<Constant, String>> cases = new ArrayList(); int start = codes.size(); for (Stmt.Case c : s.cases) { if (c.expr.isEmpty()) { // A case with an empty match represents the default label. We // must check that we have not already seen a case with an empty // match (otherwise, we'd have two default labels ;) if (defaultTarget != exitLab) { WhileyFile.syntaxError( errorMessage(DUPLICATE_DEFAULT_LABEL), context, c); } else { defaultTarget = Block.freshLabel(); codes.append(Code.Label(defaultTarget), attributes(c)); for (Stmt st : c.stmts) { generate(st, environment, codes, context); } codes.append(Code.Goto(exitLab), attributes(c)); } } else if (defaultTarget == exitLab) { String target = Block.freshLabel(); codes.append(Code.Label(target), attributes(c)); // Case statements in Whiley may have multiple matching constant // values. Therefore, we iterate each matching value and // construct a mapping from that to a label indicating the start // of the case body. for (Constant constant : c.constants) { // Check whether this case constant has already been used as // a case constant elsewhere. If so, then report an error. if (values.contains(constant)) { WhileyFile.syntaxError( errorMessage(DUPLICATE_CASE_LABEL), context, c); } cases.add(new Pair(constant, target)); values.add(constant); } for (Stmt st : c.stmts) { generate(st, environment, codes, context); } codes.append(Code.Goto(exitLab), attributes(c)); } else { // This represents the case where we have another non-default // case after the default case. Such code cannot be executed, // and is therefore reported as an error. WhileyFile.syntaxError(errorMessage(UNREACHABLE_CODE), context, c); } } codes.insert(start, Code.Switch(s.expr.result().raw(), operand, defaultTarget, cases), attributes(s)); codes.append(Code.Label(exitLab), attributes(s)); } public void generate(TryCatch s, Environment environment, Block codes, Context context) throws Exception { int start = codes.size(); int exceptionRegister = environment.allocate(Type.T_ANY); String exitLab = Block.freshLabel(); for (Stmt st : s.body) { generate(st, environment, codes, context); } codes.append(Code.Goto(exitLab),attributes(s)); String endLab = null; ArrayList<Pair<Type,String>> catches = new ArrayList<Pair<Type,String>>(); for(Stmt.Catch c : s.catches) { Code.Label lab; if(endLab == null) { endLab = Block.freshLabel(); lab = Code.TryEnd(endLab); } else { lab = Code.Label(Block.freshLabel()); } Type pt = c.type.raw(); // TODO: deal with exception type constraints catches.add(new Pair<Type,String>(pt,lab.label)); codes.append(lab, attributes(c)); environment.put(exceptionRegister, c.variable); for (Stmt st : c.stmts) { generate(st, environment, codes, context); } codes.append(Code.Goto(exitLab),attributes(c)); } codes.insert(start, Code.TryCatch(exceptionRegister,endLab,catches),attributes(s)); codes.append(Code.Label(exitLab), attributes(s)); } public void generate(While s, Environment environment, Block codes, Context context) { String label = Block.freshLabel(); String exit = Block.freshLabel(); codes.append(Code.Loop(label, Collections.EMPTY_SET), attributes(s)); generateCondition(exit, invert(s.condition), environment, codes, context); scopes.push(new BreakScope(exit)); for (Stmt st : s.body) { generate(st, environment, codes, context); } scopes.pop(); // break // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(label), attributes(s)); codes.append(Code.Label(exit), attributes(s)); } public void generate(DoWhile s, Environment environment, Block codes, Context context) { String label = Block.freshLabel(); String exit = Block.freshLabel(); codes.append(Code.Loop(label, Collections.EMPTY_SET), attributes(s)); scopes.push(new BreakScope(exit)); for (Stmt st : s.body) { generate(st, environment, codes, context); } scopes.pop(); // break generateCondition(exit, invert(s.condition), environment, codes, context); // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(label), attributes(s)); codes.append(Code.Label(exit), attributes(s)); } public void generate(ForAll s, Environment environment, Block codes, Context context) { String label = Block.freshLabel(); String exit = Block.freshLabel(); int sourceRegister = generate(s.source, environment, codes, context); // FIXME: loss of nominal information Type.EffectiveCollection rawSrcType = s.srcType.raw(); if (s.variables.size() > 1) { // this is the destructuring case // FIXME: support destructuring of lists and sets if (!(rawSrcType instanceof Type.EffectiveMap)) { WhileyFile.syntaxError(errorMessage(INVALID_MAP_EXPRESSION), context, s.source); } Type.EffectiveMap dict = (Type.EffectiveMap) rawSrcType; Type.Tuple element = (Type.Tuple) Type.Tuple(dict.key(), dict.value()); int indexRegister = environment.allocate(element); codes.append(Code .ForAll((Type.EffectiveMap) rawSrcType, sourceRegister, indexRegister, Collections.EMPTY_SET, label), attributes(s)); for (int i = 0; i < s.variables.size(); ++i) { String var = s.variables.get(i); int varReg = environment.allocate(element.element(i), var); codes.append(Code.TupleLoad(element, varReg, indexRegister, i), attributes(s)); } } else { // easy case. int indexRegister = environment.allocate(rawSrcType.element(), s.variables.get(0)); codes.append(Code.ForAll(s.srcType.raw(), sourceRegister, indexRegister, Collections.EMPTY_SET, label), attributes(s)); } // FIXME: add a continue scope scopes.push(new BreakScope(exit)); for (Stmt st : s.body) { generate(st, environment, codes, context); } scopes.pop(); // break // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(label), attributes(s)); codes.append(Code.Label(exit), attributes(s)); } // Conditions public void generateCondition(String target, Expr condition, Environment environment, Block codes, Context context) { try { if (condition instanceof Expr.Constant) { generateCondition(target, (Expr.Constant) condition, environment, codes, context); } else if (condition instanceof Expr.UnOp) { generateCondition(target, (Expr.UnOp) condition, environment, codes, context); } else if (condition instanceof Expr.BinOp) { generateCondition(target, (Expr.BinOp) condition, environment, codes, context); } else if (condition instanceof Expr.Comprehension) { generateCondition(target, (Expr.Comprehension) condition, environment, codes, context); } else if (condition instanceof Expr.ConstantAccess || condition instanceof Expr.LocalVariable || condition instanceof Expr.AbstractInvoke || condition instanceof Expr.AbstractIndirectInvoke || condition instanceof Expr.FieldAccess || condition instanceof Expr.IndexOf) { // The default case simply compares the computed value against // true. In some cases, we could do better. For example, !(x < // 5) could be rewritten into x>=5. int r1 = generate(condition, environment, codes, context); int r2 = environment.allocate(Type.T_BOOL); codes.append(Code.Const(r2, Constant.V_BOOL(true)), attributes(condition)); codes.append(Code.If(Type.T_BOOL, r1, r2, Code.Comparator.EQ, target), attributes(condition)); } else { syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, condition); } } catch (SyntaxError se) { throw se; } catch (Exception ex) { internalFailure(ex.getMessage(), context, condition, ex); } } public void generateCondition(String target, Expr.Constant c, Environment environment, Block codes, Context context) { Constant.Bool b = (Constant.Bool) c.value; if (b.value) { codes.append(Code.Goto(target)); } else { // do nout } } public void generateCondition(String target, Expr.BinOp v, Environment environment, Block codes, Context context) throws Exception { Expr.BOp bop = v.op; if (bop == Expr.BOp.OR) { generateCondition(target, v.lhs, environment, codes, context); generateCondition(target, v.rhs, environment, codes, context); } else if (bop == Expr.BOp.AND) { String exitLabel = Block.freshLabel(); generateCondition(exitLabel, invert(v.lhs), environment, codes, context); generateCondition(target, v.rhs, environment, codes, context); codes.append(Code.Label(exitLabel)); } else if (bop == Expr.BOp.IS) { generateTypeCondition(target, v, environment, codes, context); } else { Code.Comparator cop = OP2COP(bop, v, context); if (cop == Code.Comparator.EQ && v.lhs instanceof Expr.LocalVariable && v.rhs instanceof Expr.Constant && ((Expr.Constant) v.rhs).value == Constant.V_NULL) { // this is a simple rewrite to enable type inference. Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs; if (environment.get(lhs.var) == null) { syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs); } int slot = environment.get(lhs.var); codes.append( Code.IfIs(v.srcType.raw(), slot, Type.T_NULL, target), attributes(v)); } else if (cop == Code.Comparator.NEQ && v.lhs instanceof Expr.LocalVariable && v.rhs instanceof Expr.Constant && ((Expr.Constant) v.rhs).value == Constant.V_NULL) { // this is a simple rewrite to enable type inference. String exitLabel = Block.freshLabel(); Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs; if (environment.get(lhs.var) == null) { syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs); } int slot = environment.get(lhs.var); codes.append(Code.IfIs(v.srcType.raw(), slot, Type.T_NULL, exitLabel), attributes(v)); codes.append(Code.Goto(target)); codes.append(Code.Label(exitLabel)); } else { int lhs = generate(v.lhs, environment, codes, context); int rhs = generate(v.rhs, environment, codes, context); codes.append(Code.If(v.srcType.raw(), lhs, rhs, cop, target), attributes(v)); } } } public void generateTypeCondition(String target, Expr.BinOp v, Environment environment, Block codes, Context context) throws Exception { int leftOperand; if (v.lhs instanceof Expr.LocalVariable) { Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs; if (environment.get(lhs.var) == null) { syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs); } leftOperand = environment.get(lhs.var); } else { leftOperand = generate(v.lhs, environment, codes, context); } Expr.TypeVal rhs = (Expr.TypeVal) v.rhs; codes.append(Code.IfIs(v.srcType.raw(), leftOperand, rhs.type.raw(), target), attributes(v)); } public void generateCondition(String target, Expr.UnOp v, Environment environment, Block codes, Context context) { Expr.UOp uop = v.op; switch (uop) { case NOT: String label = Block.freshLabel(); generateCondition(label, v.mhs, environment, codes, context); codes.append(Code.Goto(target)); codes.append(Code.Label(label)); return; } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, v); } public void generateCondition(String target, Expr.Comprehension e, Environment environment, Block codes, Context context) { if (e.cop != Expr.COp.NONE && e.cop != Expr.COp.SOME && e.cop != Expr.COp.ALL) { syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, e); } ArrayList<Triple<Integer, Integer, Type.EffectiveCollection>> slots = new ArrayList(); for (Pair<String, Expr> src : e.sources) { Nominal.EffectiveCollection srcType = (Nominal.EffectiveCollection) src .second().result(); int srcSlot; int varSlot = environment.allocate(srcType.raw().element(), src.first()); if (src.second() instanceof Expr.LocalVariable) { // this is a little optimisation to produce slightly better // code. Expr.LocalVariable v = (Expr.LocalVariable) src.second(); if (environment.get(v.var) != null) { srcSlot = environment.get(v.var); } else { // fall-back plan ... srcSlot = generate(src.second(), environment, codes, context); } } else { srcSlot = generate(src.second(), environment, codes, context); } slots.add(new Triple<Integer, Integer, Type.EffectiveCollection>( varSlot, srcSlot, srcType.raw())); } ArrayList<String> labels = new ArrayList<String>(); String loopLabel = Block.freshLabel(); for (Triple<Integer, Integer, Type.EffectiveCollection> p : slots) { Type.EffectiveCollection srcType = p.third(); String lab = loopLabel + "$" + p.first(); codes.append(Code.ForAll(srcType, p.second(), p.first(), Collections.EMPTY_LIST, lab), attributes(e)); labels.add(lab); } if (e.cop == Expr.COp.NONE) { String exitLabel = Block.freshLabel(); generateCondition(exitLabel, e.condition, environment, codes, context); for (int i = (labels.size() - 1); i >= 0; --i) { // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(labels.get(i))); } codes.append(Code.Goto(target)); codes.append(Code.Label(exitLabel)); } else if (e.cop == Expr.COp.SOME) { generateCondition(target, e.condition, environment, codes, context); for (int i = (labels.size() - 1); i >= 0; --i) { // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(labels.get(i))); } } else if (e.cop == Expr.COp.ALL) { String exitLabel = Block.freshLabel(); generateCondition(exitLabel, invert(e.condition), environment, codes, context); for (int i = (labels.size() - 1); i >= 0; --i) { // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(labels.get(i))); } codes.append(Code.Goto(target)); codes.append(Code.Label(exitLabel)); } // LONE and ONE will be harder } // Expressions public int generate(Expr expression, Environment environment, Block codes, Context context) { try { if (expression instanceof Expr.Constant) { return generate((Expr.Constant) expression, environment, codes, context); } else if (expression instanceof Expr.LocalVariable) { return generate((Expr.LocalVariable) expression, environment, codes, context); } else if (expression instanceof Expr.ConstantAccess) { return generate((Expr.ConstantAccess) expression, environment, codes, context); } else if (expression instanceof Expr.Set) { return generate((Expr.Set) expression, environment, codes, context); } else if (expression instanceof Expr.List) { return generate((Expr.List) expression, environment, codes, context); } else if (expression instanceof Expr.SubList) { return generate((Expr.SubList) expression, environment, codes, context); } else if (expression instanceof Expr.SubString) { return generate((Expr.SubString) expression, environment, codes, context); } else if (expression instanceof Expr.BinOp) { return generate((Expr.BinOp) expression, environment, codes, context); } else if (expression instanceof Expr.LengthOf) { return generate((Expr.LengthOf) expression, environment, codes, context); } else if (expression instanceof Expr.Dereference) { return generate((Expr.Dereference) expression, environment, codes, context); } else if (expression instanceof Expr.Cast) { return generate((Expr.Cast) expression, environment, codes, context); } else if (expression instanceof Expr.IndexOf) { return generate((Expr.IndexOf) expression, environment, codes, context); } else if (expression instanceof Expr.UnOp) { return generate((Expr.UnOp) expression, environment, codes, context); } else if (expression instanceof Expr.FunctionCall) { return generate((Expr.FunctionCall) expression, environment, codes, context); } else if (expression instanceof Expr.MethodCall) { return generate((Expr.MethodCall) expression, environment, codes, context); } else if (expression instanceof Expr.IndirectFunctionCall) { return generate((Expr.IndirectFunctionCall) expression, environment, codes, context); } else if (expression instanceof Expr.IndirectMethodCall) { return generate((Expr.IndirectMethodCall) expression, environment, codes, context); } else if (expression instanceof Expr.Comprehension) { return generate((Expr.Comprehension) expression, environment, codes, context); } else if (expression instanceof Expr.FieldAccess) { return generate((Expr.FieldAccess) expression, environment, codes, context); } else if (expression instanceof Expr.Record) { return generate((Expr.Record) expression, environment, codes, context); } else if (expression instanceof Expr.Tuple) { return generate((Expr.Tuple) expression, environment, codes, context); } else if (expression instanceof Expr.Map) { return generate((Expr.Map) expression, environment, codes, context); } else if (expression instanceof Expr.FunctionOrMethod) { return generate((Expr.FunctionOrMethod) expression, environment, codes, context); } else if (expression instanceof Expr.Lambda) { return generate((Expr.Lambda) expression, environment, codes, context); } else if (expression instanceof Expr.New) { return generate((Expr.New) expression, environment, codes, context); } else { // should be dead-code internalFailure("unknown expression: " + expression.getClass().getName(), context, expression); } } catch (ResolveError rex) { syntaxError(rex.getMessage(), context, expression, rex); } catch (SyntaxError se) { throw se; } catch (Exception ex) { internalFailure(ex.getMessage(), context, expression, ex); } return -1; // deadcode } public int generate(Expr.MethodCall expr, Environment environment, Block codes, Context context) throws ResolveError { int target = environment.allocate(expr.result().raw()); generate(expr, target, environment, codes, context); return target; } public void generate(Expr.MethodCall expr, int target, Environment environment, Block codes, Context context) throws ResolveError { int[] operands = generate(expr.arguments, environment, codes, context); codes.append(Code.Invoke(expr.methodType.raw(), target, operands, expr.nid()), attributes(expr)); } public int generate(Expr.FunctionCall expr, Environment environment, Block codes, Context context) throws ResolveError { int target = environment.allocate(expr.result().raw()); generate(expr, target, environment, codes, context); return target; } public void generate(Expr.FunctionCall expr, int target, Environment environment, Block codes, Context context) throws ResolveError { int[] operands = generate(expr.arguments, environment, codes, context); codes.append( Code.Invoke(expr.functionType.raw(), target, operands, expr.nid()), attributes(expr)); } public int generate(Expr.IndirectFunctionCall expr, Environment environment, Block codes, Context context) throws ResolveError { int target = environment.allocate(expr.result().raw()); generate(expr, target, environment, codes, context); return target; } public void generate(Expr.IndirectFunctionCall expr, int target, Environment environment, Block codes, Context context) throws ResolveError { int operand = generate(expr.src, environment, codes, context); int[] operands = generate(expr.arguments, environment, codes, context); codes.append(Code.IndirectInvoke(expr.functionType.raw(), target, operand, operands), attributes(expr)); } public int generate(Expr.IndirectMethodCall expr, Environment environment, Block codes, Context context) throws ResolveError { int target = environment.allocate(expr.result().raw()); generate(expr, target, environment, codes, context); return target; } public void generate(Expr.IndirectMethodCall expr, int target, Environment environment, Block codes, Context context) throws ResolveError { int operand = generate(expr.src, environment, codes, context); int[] operands = generate(expr.arguments, environment, codes, context); codes.append(Code.IndirectInvoke(expr.methodType.raw(), target, operand, operands), attributes(expr)); } private int generate(Expr.Constant expr, Environment environment, Block codes, Context context) { Constant val = expr.value; int target = environment.allocate(val.type()); codes.append(Code.Const(target, expr.value), attributes(expr)); return target; } private int generate(Expr.FunctionOrMethod expr, Environment environment, Block codes, Context context) { Type.FunctionOrMethod type = expr.type.raw(); int target = environment.allocate(type); codes.append( Code.Lambda(type, target, Collections.EMPTY_LIST, expr.nid), attributes(expr)); return target; } private int generate(Expr.Lambda expr, Environment environment, Block codes, Context context) { Type.FunctionOrMethod tfm = expr.type.raw(); List<Type> tfm_params = tfm.params(); List<WhileyFile.Parameter> expr_params = expr.parameters; // Create environment for the lambda body. ArrayList<Integer> operands = new ArrayList<Integer>(); ArrayList<Type> paramTypes = new ArrayList<Type>(); Environment benv = new Environment(); for (int i = 0; i != tfm_params.size(); ++i) { Type type = tfm_params.get(i); benv.allocate(type, expr_params.get(i).name); paramTypes.add(type); operands.add(Code.NULL_REG); } for(Pair<Type,String> v : Exprs.uses(expr.body,context)) { if(benv.get(v.second()) == null) { Type type = v.first(); benv.allocate(type,v.second()); paramTypes.add(type); operands.add(environment.get(v.second())); } } // Generate body based on current environment Block body = new Block(expr_params.size()); if(tfm.ret() != Type.T_VOID) { int target = generate(expr.body, benv, body, context); body.append(Code.Return(tfm.ret(), target), attributes(expr)); } else { body.append(Code.Return(), attributes(expr)); } // Create concrete type for private lambda function Type.FunctionOrMethod cfm; if(tfm instanceof Type.Function) { cfm = Type.Function(tfm.ret(),tfm.throwsClause(),paramTypes); } else { cfm = Type.Method(tfm.ret(),tfm.throwsClause(),paramTypes); } // Construct private lambda function using generated body int id = expr.attribute(Attribute.Source.class).start; String name = "$lambda" + id; ArrayList<Modifier> modifiers = new ArrayList<Modifier>(); modifiers.add(Modifier.PRIVATE); ArrayList<WyilFile.Case> cases = new ArrayList<WyilFile.Case>(); cases.add(new WyilFile.Case(body, null, null, Collections.EMPTY_LIST, attributes(expr))); WyilFile.MethodDeclaration lambda = new WyilFile.MethodDeclaration( modifiers, name, cfm, cases, attributes(expr)); lambdas.add(lambda); Path.ID mid = context.file().module; NameID nid = new NameID(mid, name); // Finally, create the lambda int target = environment.allocate(tfm); codes.append( Code.Lambda(cfm, target, operands, nid), attributes(expr)); return target; } private int generate(Expr.ConstantAccess expr, Environment environment, Block codes, Context context) throws ResolveError { Constant val = expr.value; int target = environment.allocate(val.type()); codes.append(Code.Const(target, val), attributes(expr)); return target; } private int generate(Expr.LocalVariable expr, Environment environment, Block codes, Context context) throws ResolveError { if (environment.get(expr.var) != null) { Type type = expr.result().raw(); int operand = environment.get(expr.var); int target = environment.allocate(type); codes.append(Code.Assign(type, target, operand), attributes(expr)); return target; } else { syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), context, expr); return -1; } } private int generate(Expr.UnOp expr, Environment environment, Block codes, Context context) { int operand = generate(expr.mhs, environment, codes, context); int target = environment.allocate(expr.result().raw()); switch (expr.op) { case NEG: codes.append(Code.UnArithOp(expr.result().raw(), target, operand, Code.UnArithKind.NEG), attributes(expr)); break; case INVERT: codes.append(Code.Invert(expr.result().raw(), target, operand), attributes(expr)); break; case NOT: String falseLabel = Block.freshLabel(); String exitLabel = Block.freshLabel(); generateCondition(falseLabel, expr.mhs, environment, codes, context); codes.append(Code.Const(target, Constant.V_BOOL(true)), attributes(expr)); codes.append(Code.Goto(exitLabel)); codes.append(Code.Label(falseLabel)); codes.append(Code.Const(target, Constant.V_BOOL(false)), attributes(expr)); codes.append(Code.Label(exitLabel)); break; default: // should be dead-code internalFailure("unexpected unary operator encountered", context, expr); return -1; } return target; } private int generate(Expr.LengthOf expr, Environment environment, Block codes, Context context) { int operand = generate(expr.src, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.LengthOf(expr.srcType.raw(), target, operand), attributes(expr)); return target; } private int generate(Expr.Dereference expr, Environment environment, Block codes, Context context) { int operand = generate(expr.src, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.Dereference(expr.srcType.raw(), target, operand), attributes(expr)); return target; } private int generate(Expr.IndexOf expr, Environment environment, Block codes, Context context) { int srcOperand = generate(expr.src, environment, codes, context); int idxOperand = generate(expr.index, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.IndexOf(expr.srcType.raw(), target, srcOperand, idxOperand), attributes(expr)); return target; } private int generate(Expr.Cast expr, Environment environment, Block codes, Context context) { int operand = generate(expr.expr, environment, codes, context); Type from = expr.expr.result().raw(); Type to = expr.result().raw(); int target = environment.allocate(to); // TODO: include constraints codes.append(Code.Convert(from, target, operand, to), attributes(expr)); return target; } private int generate(Expr.BinOp v, Environment environment, Block codes, Context context) throws Exception { // could probably use a range test for this somehow if (v.op == Expr.BOp.EQ || v.op == Expr.BOp.NEQ || v.op == Expr.BOp.LT || v.op == Expr.BOp.LTEQ || v.op == Expr.BOp.GT || v.op == Expr.BOp.GTEQ || v.op == Expr.BOp.SUBSET || v.op == Expr.BOp.SUBSETEQ || v.op == Expr.BOp.ELEMENTOF || v.op == Expr.BOp.AND || v.op == Expr.BOp.OR) { String trueLabel = Block.freshLabel(); String exitLabel = Block.freshLabel(); generateCondition(trueLabel, v, environment, codes, context); int target = environment.allocate(Type.T_BOOL); codes.append(Code.Const(target, Constant.V_BOOL(false)), attributes(v)); codes.append(Code.Goto(exitLabel)); codes.append(Code.Label(trueLabel)); codes.append(Code.Const(target, Constant.V_BOOL(true)), attributes(v)); codes.append(Code.Label(exitLabel)); return target; } else { Expr.BOp bop = v.op; int leftOperand = generate(v.lhs, environment, codes, context); int rightOperand = generate(v.rhs, environment, codes, context); Type result = v.result().raw(); int target = environment.allocate(result); switch (bop) { case UNION: codes.append(Code.BinSetOp((Type.EffectiveSet) result, target, leftOperand, rightOperand, Code.BinSetKind.UNION), attributes(v)); break; case INTERSECTION: codes.append(Code .BinSetOp((Type.EffectiveSet) result, target, leftOperand, rightOperand, Code.BinSetKind.INTERSECTION), attributes(v)); break; case DIFFERENCE: codes.append(Code.BinSetOp((Type.EffectiveSet) result, target, leftOperand, rightOperand, Code.BinSetKind.DIFFERENCE), attributes(v)); break; case LISTAPPEND: codes.append(Code.BinListOp((Type.EffectiveList) result, target, leftOperand, rightOperand, Code.BinListKind.APPEND), attributes(v)); break; case STRINGAPPEND: Type lhs = v.lhs.result().raw(); Type rhs = v.rhs.result().raw(); Code.BinStringKind op; if (lhs == Type.T_STRING && rhs == Type.T_STRING) { op = Code.BinStringKind.APPEND; } else if (lhs == Type.T_STRING && Type.isSubtype(Type.T_CHAR, rhs)) { op = Code.BinStringKind.LEFT_APPEND; } else if (rhs == Type.T_STRING && Type.isSubtype(Type.T_CHAR, lhs)) { op = Code.BinStringKind.RIGHT_APPEND; } else { // this indicates that one operand must be explicitly // converted // into a string. op = Code.BinStringKind.APPEND; } codes.append( Code.BinStringOp(target, leftOperand, rightOperand, op), attributes(v)); break; default: codes.append(Code.BinArithOp(result, target, leftOperand, rightOperand, OP2BOP(bop, v, context)), attributes(v)); } return target; } } private int generate(Expr.Set expr, Environment environment, Block codes, Context context) { int[] operands = generate(expr.arguments, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.NewSet(expr.type.raw(), target, operands), attributes(expr)); return target; } private int generate(Expr.List expr, Environment environment, Block codes, Context context) { int[] operands = generate(expr.arguments, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.NewList(expr.type.raw(), target, operands), attributes(expr)); return target; } private int generate(Expr.SubList expr, Environment environment, Block codes, Context context) { int srcOperand = generate(expr.src, environment, codes, context); int startOperand = generate(expr.start, environment, codes, context); int endOperand = generate(expr.end, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.SubList(expr.type.raw(), target, srcOperand, startOperand, endOperand), attributes(expr)); return target; } private int generate(Expr.SubString v, Environment environment, Block codes, Context context) { int srcOperand = generate(v.src, environment, codes, context); int startOperand = generate(v.start, environment, codes, context); int endOperand = generate(v.end, environment, codes, context); int target = environment.allocate(v.result().raw()); codes.append( Code.SubString(target, srcOperand, startOperand, endOperand), attributes(v)); return target; } private int generate(Expr.Comprehension e, Environment environment, Block codes, Context context) { // First, check for boolean cases which are handled mostly by // generateCondition. if (e.cop == Expr.COp.SOME || e.cop == Expr.COp.NONE || e.cop == Expr.COp.ALL) { String trueLabel = Block.freshLabel(); String exitLabel = Block.freshLabel(); generateCondition(trueLabel, e, environment, codes, context); int target = environment.allocate(Type.T_BOOL); codes.append(Code.Const(target, Constant.V_BOOL(false)), attributes(e)); codes.append(Code.Goto(exitLabel)); codes.append(Code.Label(trueLabel)); codes.append(Code.Const(target, Constant.V_BOOL(true)), attributes(e)); codes.append(Code.Label(exitLabel)); return target; } else { // Ok, non-boolean case. ArrayList<Triple<Integer, Integer, Type.EffectiveCollection>> slots = new ArrayList(); for (Pair<String, Expr> p : e.sources) { Expr src = p.second(); Type.EffectiveCollection rawSrcType = (Type.EffectiveCollection) src .result().raw(); int varSlot = environment.allocate(rawSrcType.element(), p.first()); int srcSlot; if (src instanceof Expr.LocalVariable) { // this is a little optimisation to produce slightly better // code. Expr.LocalVariable v = (Expr.LocalVariable) src; if (environment.get(v.var) != null) { srcSlot = environment.get(v.var); } else { // fall-back plan ... srcSlot = generate(src, environment, codes, context); } } else { srcSlot = generate(src, environment, codes, context); } slots.add(new Triple(varSlot, srcSlot, rawSrcType)); } Type resultType; int target = environment.allocate(e.result().raw()); if (e.cop == Expr.COp.LISTCOMP) { resultType = e.type.raw(); codes.append(Code.NewList((Type.List) resultType, target, Collections.EMPTY_LIST), attributes(e)); } else { resultType = e.type.raw(); codes.append(Code.NewSet((Type.Set) resultType, target, Collections.EMPTY_LIST), attributes(e)); } // At this point, it would be good to determine an appropriate loop // invariant for a set comprehension. This is easy enough in the // case of // a single variable comprehension, but actually rather difficult // for a // multi-variable comprehension. // For example, consider <code>{x+y | x in xs, y in ys, x<0 && // y<0}</code> // What is an appropriate loop invariant here? String continueLabel = Block.freshLabel(); ArrayList<String> labels = new ArrayList<String>(); String loopLabel = Block.freshLabel(); for (Triple<Integer, Integer, Type.EffectiveCollection> p : slots) { String label = loopLabel + "$" + p.first(); codes.append(Code.ForAll(p.third(), p.second(), p.first(), Collections.EMPTY_LIST, label), attributes(e)); labels.add(label); } if (e.condition != null) { generateCondition(continueLabel, invert(e.condition), environment, codes, context); } int operand = generate(e.value, environment, codes, context); // FIXME: following broken for list comprehensions codes.append(Code.BinSetOp((Type.Set) resultType, target, target, operand, Code.BinSetKind.LEFT_UNION), attributes(e)); if (e.condition != null) { codes.append(Code.Label(continueLabel)); } for (int i = (labels.size() - 1); i >= 0; --i) { // Must add NOP before loop end to ensure labels at the boundary // get written into Wyil files properly. See Issue #253. codes.append(Code.Nop); codes.append(Code.LoopEnd(labels.get(i))); } return target; } } private int generate(Expr.Record expr, Environment environment, Block codes, Context context) { ArrayList<String> keys = new ArrayList<String>(expr.fields.keySet()); Collections.sort(keys); int[] operands = new int[expr.fields.size()]; for (int i = 0; i != operands.length; ++i) { String key = keys.get(i); Expr arg = expr.fields.get(key); operands[i] = generate(arg, environment, codes, context); } int target = environment.allocate(expr.result().raw()); codes.append(Code.NewRecord(expr.result().raw(), target, operands), attributes(expr)); return target; } private int generate(Expr.Tuple expr, Environment environment, Block codes, Context context) { int[] operands = generate(expr.fields, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.NewTuple(expr.result().raw(), target, operands), attributes(expr)); return target; } private int generate(Expr.Map expr, Environment environment, Block codes, Context context) { int[] operands = new int[expr.pairs.size() * 2]; for (int i = 0; i != expr.pairs.size(); ++i) { Pair<Expr, Expr> e = expr.pairs.get(i); operands[i << 1] = generate(e.first(), environment, codes, context); operands[(i << 1) + 1] = generate(e.second(), environment, codes, context); } int target = environment.allocate(expr.result().raw()); codes.append(Code.NewMap(expr.result().raw(), target, operands), attributes(expr)); return target; } private int generate(Expr.FieldAccess expr, Environment environment, Block codes, Context context) { int operand = generate(expr.src, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append( Code.FieldLoad(expr.srcType.raw(), target, operand, expr.name), attributes(expr)); return target; } private int generate(Expr.New expr, Environment environment, Block codes, Context context) throws ResolveError { int operand = generate(expr.expr, environment, codes, context); int target = environment.allocate(expr.result().raw()); codes.append(Code.NewObject(expr.type.raw(), target, operand)); return target; } private int[] generate(List<Expr> arguments, Environment environment, Block codes, Context context) { int[] operands = new int[arguments.size()]; for (int i = 0; i != operands.length; ++i) { Expr arg = arguments.get(i); operands[i] = generate(arg, environment, codes, context); } return operands; } // Helpers @SuppressWarnings("incomplete-switch") private Code.BinArithKind OP2BOP(Expr.BOp bop, SyntacticElement elem, Context context) { switch (bop) { case ADD: return Code.BinArithKind.ADD; case SUB: return Code.BinArithKind.SUB; case MUL: return Code.BinArithKind.MUL; case DIV: return Code.BinArithKind.DIV; case REM: return Code.BinArithKind.REM; case RANGE: return Code.BinArithKind.RANGE; case BITWISEAND: return Code.BinArithKind.BITWISEAND; case BITWISEOR: return Code.BinArithKind.BITWISEOR; case BITWISEXOR: return Code.BinArithKind.BITWISEXOR; case LEFTSHIFT: return Code.BinArithKind.LEFTSHIFT; case RIGHTSHIFT: return Code.BinArithKind.RIGHTSHIFT; } syntaxError(errorMessage(INVALID_BINARY_EXPRESSION), context, elem); return null; } @SuppressWarnings("incomplete-switch") private Code.Comparator OP2COP(Expr.BOp bop, SyntacticElement elem, Context context) { switch (bop) { case EQ: return Code.Comparator.EQ; case NEQ: return Code.Comparator.NEQ; case LT: return Code.Comparator.LT; case LTEQ: return Code.Comparator.LTEQ; case GT: return Code.Comparator.GT; case GTEQ: return Code.Comparator.GTEQ; case SUBSET: return Code.Comparator.SUBSET; case SUBSETEQ: return Code.Comparator.SUBSETEQ; case ELEMENTOF: return Code.Comparator.ELEMOF; } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, elem); return null; } /** * The purpose of this method is to construct aliases for variables declared * as part of type patterns. For example: * * <pre> * type tup as {int x, int y} where x < y * </pre> * * Here, variables <code>x</code> and <code>y</code> are declared as part of * the type pattern, and we translate them into the aliases : $.x and $.y, * where "$" is the root variable passed as a parameter. * * @param src * @param t * @param environment */ public static void addDeclaredVariables(int root, TypePattern pattern, Type type, Environment environment, Block blk) { if(pattern instanceof TypePattern.Record) { TypePattern.Record tp = (TypePattern.Record) pattern; Type.Record tt = (Type.Record) type; for(TypePattern element : tp.elements) { String fieldName = element.var; Type fieldType = tt.field(fieldName); int target = environment.allocate(fieldType); blk.append(Code.FieldLoad(tt, target, root, fieldName)); addDeclaredVariables(target, element, fieldType, environment, blk); } } else if(pattern instanceof TypePattern.Tuple){ TypePattern.Tuple tp = (TypePattern.Tuple) pattern; Type.Tuple tt = (Type.Tuple) type; for(int i=0;i!=tp.elements.size();++i) { TypePattern element = tp.elements.get(i); Type elemType = tt.element(i); int target = environment.allocate(elemType); blk.append(Code.TupleLoad(tt, target, root, i)); addDeclaredVariables(target, element, elemType, environment, blk); } } else { // do nothing for leaf } if (pattern.var != null) { environment.put(root, pattern.var); } } @SuppressWarnings("incomplete-switch") private static Expr invert(Expr e) { if (e instanceof Expr.BinOp) { Expr.BinOp bop = (Expr.BinOp) e; Expr.BinOp nbop = null; switch (bop.op) { case AND: nbop = new Expr.BinOp(Expr.BOp.OR, invert(bop.lhs), invert(bop.rhs), attributes(e)); break; case OR: nbop = new Expr.BinOp(Expr.BOp.AND, invert(bop.lhs), invert(bop.rhs), attributes(e)); break; case EQ: nbop = new Expr.BinOp(Expr.BOp.NEQ, bop.lhs, bop.rhs, attributes(e)); break; case NEQ: nbop = new Expr.BinOp(Expr.BOp.EQ, bop.lhs, bop.rhs, attributes(e)); break; case LT: nbop = new Expr.BinOp(Expr.BOp.GTEQ, bop.lhs, bop.rhs, attributes(e)); break; case LTEQ: nbop = new Expr.BinOp(Expr.BOp.GT, bop.lhs, bop.rhs, attributes(e)); break; case GT: nbop = new Expr.BinOp(Expr.BOp.LTEQ, bop.lhs, bop.rhs, attributes(e)); break; case GTEQ: nbop = new Expr.BinOp(Expr.BOp.LT, bop.lhs, bop.rhs, attributes(e)); break; } if (nbop != null) { nbop.srcType = bop.srcType; return nbop; } } else if (e instanceof Expr.UnOp) { Expr.UnOp uop = (Expr.UnOp) e; switch (uop.op) { case NOT: return uop.mhs; } } Expr.UnOp r = new Expr.UnOp(Expr.UOp.NOT, e); r.type = Nominal.T_BOOL; return r; } /** * The attributes method extracts those attributes of relevance to WyIL, and * discards those which are only used for the wyc front end. * * @param elem * @return */ private static Collection<Attribute> attributes(SyntacticElement elem) { ArrayList<Attribute> attrs = new ArrayList<Attribute>(); attrs.add(elem.attribute(Attribute.Source.class)); return attrs; } public static final class Environment { private final HashMap<String, Integer> var2idx; private final ArrayList<Type> idx2type; public Environment() { var2idx = new HashMap<String, Integer>(); idx2type = new ArrayList<Type>(); } public Environment(Environment env) { var2idx = new HashMap<String, Integer>(env.var2idx); idx2type = new ArrayList<Type>(env.idx2type); } public int allocate(Type t) { int idx = idx2type.size(); idx2type.add(t); return idx; } public int allocate(Type t, String v) { int r = allocate(t); var2idx.put(v, r); return r; } public int size() { return idx2type.size(); } public Integer get(String v) { return var2idx.get(v); } public String get(int idx) { for (Map.Entry<String, Integer> e : var2idx.entrySet()) { int jdx = e.getValue(); if (jdx == idx) { return e.getKey(); } } return null; } public void put(int idx, String v) { var2idx.put(v, idx); } public ArrayList<Type> asList() { return idx2type; } public String toString() { return idx2type.toString() + "," + var2idx.toString(); } } @SuppressWarnings("unchecked") private <T extends Scope> T findEnclosingScope(Class<T> c) { for(int i=scopes.size()-1;i>=0;--i) { Scope s = scopes.get(i); if(c.isInstance(s)) { return (T) s; } } return null; } private abstract class Scope {} private class BreakScope extends Scope { public String label; public BreakScope(String l) { label = l; } } private class ContinueScope extends Scope { public String label; public ContinueScope(String l) { label = l; } } }
package xyz.brassgoggledcoders.boilerplate.blocks; import net.minecraft.block.ITileEntityProvider; import net.minecraft.block.material.Material; import net.minecraft.block.state.IBlockState; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import javax.annotation.Nonnull; public abstract class BlockTEBase<T extends TileEntity> extends BlockBase implements IHasTileEntity, ITileEntityProvider { public BlockTEBase(Material material, String name) { super(material, name); this.isBlockContainer = true; } @Override public void breakBlock(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull IBlockState state) { super.breakBlock(world, pos, state); world.removeTileEntity(pos); } @SuppressWarnings("deprecation") @Override public boolean eventReceived(IBlockState state, World world, BlockPos pos, int id, int param) { super.eventReceived(state, world, pos, id, param); TileEntity tileentity = world.getTileEntity(pos); return tileentity != null && tileentity.receiveClientEvent(id, param); } @SuppressWarnings("unchecked") public T getTileEntity(World world, BlockPos pos) { TileEntity tileEntity = world.getTileEntity(pos); if(tileEntity != null && tileEntity.getClass() == this.getTileEntityClass()) { return (T)tileEntity; } return null; } @Override @Nonnull @SuppressWarnings("deprecation") public TileEntity createNewTileEntity(@Nonnull World world, int meta) { return createTileEntity(world, getStateFromMeta(meta)); } @Override @Nonnull public abstract TileEntity createTileEntity(@Nonnull World world, @Nonnull IBlockState blockState); }
package jlibs.nblr.rules; import java.util.*; /** * @author Santhosh Kumar T */ public class Paths extends ArrayList<Path>{ public final Path owner; public final int depth; public Paths(Path owner){ this.owner = owner; if(owner!=null){ owner.children = this; depth = owner.depth+1; }else depth = 1; } public boolean add(Path path){ if(owner==null) path.branch = size(); else path.branch = owner.branch; path.parent = owner; path.depth = depth; return super.add(path); } public List<Path> leafs(){ List<Path> list = new ArrayList<Path>(); leafs(list); return list; } private void leafs(List<Path> list){ for(Path path: this){ if(path.children==null) list.add(path); else path.children.leafs(list); } } @SuppressWarnings({"SimplifiableIfStatement"}) private static boolean clashes(Path p1, Path p2){ if(p1.matcher()==null && p2.matcher()==null) throw new IllegalStateException("Ambiguous Routes: "+p1+" AND "+p2); if(p1.matcher()!=null && p2.matcher()!=null){ if(p1.fallback() || p2.fallback()) return false; else return p1.clashesWith(p2); } return false; } public static Paths travel(Node fromNode){ Paths rootPaths = new Paths(null); List<Path> list = new ArrayList<Path>(); while(true){ if(list.size()==0){ rootPaths.populate(fromNode); list.addAll(rootPaths); }else{ List<Path> newList = new ArrayList<Path>(); for(Path path: list){ if(path.matcher()!=null){ Paths paths = new Paths(path); paths.populate((Node)path.get(path.size()-1)); newList.addAll(paths); } } list = newList; } TreeSet<Integer> clashingIndexes = new TreeSet<Integer>(); for(int ibranch=0; ibranch<rootPaths.size()-1; ibranch++){ for(int jbranch=ibranch+1; jbranch<rootPaths.size(); jbranch++){ int i = 0; for(Path ipath: list){ if(ipath.branch==ibranch){ int j = 0; for(Path jpath: list){ if(jpath.branch==jbranch){ if(clashes(ipath, jpath)){ if(ipath.hasLoop() && jpath.hasLoop()) throw new IllegalStateException("Infinite lookAhead needed: "+ipath+" and "+jpath); clashingIndexes.add(i); clashingIndexes.add(j); } } j++; } } i++; } } } if(clashingIndexes.size()==0) return rootPaths; List<Path> clashingPaths = new ArrayList<Path>(clashingIndexes.size()); for(int id: clashingIndexes) clashingPaths.add(list.get(id)); list = clashingPaths; } } private void populate(Node fromNode){ populate(fromNode, new ArrayDeque<Object>()); } private void populate(Node fromNode, Deque<Object> stack){ if(stack.contains(fromNode)) throw new IllegalStateException("infinite loop detected"); stack.push(fromNode); if(fromNode.outgoing.size()>0){ for(Edge edge: fromNode.outgoing){ stack.push(edge); if(edge.matcher!=null){ stack.push(edge.target); add(new Path(stack)); stack.pop(); }else if(edge.ruleTarget!=null) populate(edge.ruleTarget.node(), stack); else populate(edge.target, stack); stack.pop(); } }else{ int rulesPopped = 0; Node target = null; Path temp = new Path(stack); temp.parent = this.owner; Path p = temp; while(p!=null && target==null){ boolean wasNode = false; for(int i=p.size()-1; i>=0; i Object obj = p.get(i); if(obj instanceof Node){ if(wasNode) rulesPopped++; wasNode = true; }else if(obj instanceof Edge){ wasNode = false; Edge edge = (Edge)obj; if(edge.ruleTarget!=null){ if(rulesPopped==0){ target = edge.target; break; }else rulesPopped } } } p = p.parent; } if(target==null){ add(new Path(stack)); }else populate(target, stack); } stack.pop(); } }
package au.edu.griffith.ict; import java.util.HashMap; import java.util.LinkedList; import java.util.Scanner; public class Main { // custManager: CustomerManager // itemManager: Menu // orders: Order[*] // users: User[*] private CustomerManager customers; private Menu menu; private LinkedList<Order> orders; private HashMap<Integer, User> users; private float dayTotal; //This wasn't in the design docs. public static void main(String[] args){ System.out.println("Hello World"); } //void addOrder(Order order){ // Adds an order to the database/array //void removeOrder(Order order){ // Removes an Order from the database/array //Order[] getOrders(Customer customer){ // Gets an array of Order objects for a given Customer. //float getDayTotal(){ // Return the total takings for the day... //void display(){ // Display... what? /*private Order buildOrder(){ Order }*/ /** Prints the menu out to the user */ private void displayMenu(){ for(int i = 0; i < menu.getItems(); i++){ MenuItem item = menu.getItem(i); if(item == null) continue; System.out.printf("%.4f %.20s %.2f", item.getItemNo(), item.getName(), item.getPrice()); } } /** * Requests the user to input a new menu item * @return Displays the menu to the user and requests they enter an item. * Returns null if there is no new item. */ private MenuItem requestItem(){ System.out.println("Please select a menu item. To cancel, just press enter."); displayMenu(); System.out.println("Menu item ID: "); Scanner sc = new Scanner(System.in); String s = sc.nextLine(); sc.close(); if(s.isEmpty()) return null; try{ MenuItem item = menu.getItem(Integer.parseInt(s)); return item; } catch(NumberFormatException e){ System.out.println(s + " is not a valid number! Cancelling."); } return null; } /** Requests the user input a new integer * @return The int they entered */ private int requestNumber(){ Scanner sc = new Scanner(System.in); int i = sc.nextInt(); sc.close(); return i; } }
package edu.colorado.csdms.heat; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * JUnit tests for the Heat class. */ public class HeatTest { private List<Integer> shape; private List<Double> spacing; private List<Double> origin; private Double alpha; private Double time; private Double timeStep; private double[][] temperature; private Heat heat; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { Integer nRows = 10; Integer nCols = 20; Double dx = 1.0; Double dy = 1.0; Double xStart = 0.0; Double yStart = 0.0; alpha = 1.0; heat = new Heat(nRows, nCols, dx, dy, xStart, yStart, alpha); time = 0.0; timeStep = 0.25; shape = new ArrayList<Integer>(Arrays.asList(nRows, nCols)); spacing = new ArrayList<Double>(Arrays.asList(dx, dy)); origin = new ArrayList<Double>(Arrays.asList(xStart, yStart)); // Initialize temperature of plate. The top row is hot. temperature = new double[shape.get(1)][shape.get(0)]; for (int i = 0; i < shape.get(1); i++) { temperature[i][0] = 20.0; } } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { } /** * Test method for {@link edu.colorado.csdms.heat.Heat#Heat(java.lang.Integer, java.lang.Integer, java.lang.Double, java.lang.Double, java.lang.Double, java.lang.Double, java.lang.Double)}. */ @Test public final void testHeatIntegerIntegerDoubleDoubleDoubleDoubleDouble() { Integer nRows = 10; Integer nCols = 20; Double dx = 1.0; Double dy = 1.0; Double xStart = 0.0; Double yStart = 0.0; alpha = 1.0; Heat newHeat = new Heat(nRows, nCols, dx, dy, xStart, yStart, alpha); assertNotNull(newHeat); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#Heat()}. */ @Test public final void testHeat() { Heat newHeat = new Heat(); assertNotNull(newHeat); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#Heat(java.lang.String)}. */ @Test public final void testHeatString() { fail("Not yet implemented"); // TODO } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getShape()}. */ @Test public final void testGetShape() { assertEquals(shape, heat.getShape()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setShape(java.util.List)}. */ @Test public final void testSetShape() { List<Integer> newShape = new ArrayList<Integer>(); newShape.add(100); newShape.add(50); heat.setShape(newShape); assertEquals(newShape, heat.getShape()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getSpacing()}. */ @Test public final void testGetSpacing() { assertEquals(spacing, heat.getSpacing()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setSpacing(java.util.List)}. */ @Test public final void testSetSpacing() { List<Double> newSpacing = new ArrayList<Double>(); newSpacing.add(500.0); newSpacing.add(250.0); heat.setSpacing(newSpacing); assertEquals(newSpacing, heat.getSpacing()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getOrigin()}. */ @Test public final void testGetOrigin() { assertEquals(origin, heat.getOrigin()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setOrigin(java.util.List)}. */ @Test public final void testSetOrigin() { List<Double> newOrigin = new ArrayList<Double>(); newOrigin.add(10.0); newOrigin.add(15.0); heat.setOrigin(newOrigin); assertEquals(newOrigin, heat.getOrigin()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getAlpha()}. */ @Test public final void testGetAlpha() { assertEquals(alpha, heat.getAlpha()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setAlpha(java.lang.Double)}. */ @Test public final void testSetAlpha() { Double newAlpha = 0.2; heat.setAlpha(newAlpha); assertEquals(newAlpha, heat.getAlpha()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getTime()}. */ @Test public final void testGetTime() { assertEquals(time, heat.getTime()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setTime(java.lang.Double)}. */ @Test public final void testSetTime() { Double newTime = 42.0; heat.setTime(newTime); assertEquals(newTime, heat.getTime()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getTimeStep()}. */ @Test public final void testGetTimeStep() { assertTrue(heat.getTimeStep() > 0.0); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setTimeStep(java.lang.Double)}. */ @Test public final void testSetTimeStep() { Double newTimeStep = 5.0; heat.setTimeStep(newTimeStep); assertEquals(newTimeStep, heat.getTimeStep()); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#getTemperature()}. */ @Test public final void testGetTemperature() { // Check that the first column matches. double expected[] = temperature[0]; double actual[] = heat.getTemperature()[0]; assertArrayEquals(expected, actual, 0); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#setTemperature(double[][])}. */ @Test public final void testSetTemperature() { double[][] newTemperature = new double[shape.get(1)][shape.get(0)]; for (int i = 0; i < shape.get(1); i++) { double[] iCol = newTemperature[i]; for (int j = 0; j < iCol.length; j++) { iCol[j] = j + 1; } } heat.setTemperature(newTemperature); // Check that the first column matches. double expected[] = newTemperature[0]; double actual[] = heat.getTemperature()[0]; assertArrayEquals(expected, actual, 0); } /** * Test method for {@link edu.colorado.csdms.heat.Heat#advanceInTime()}. */ @Test public final void testAdvanceInTime() { Double finalTime = heat.getTime() + heat.getTimeStep(); heat.advanceInTime(); assertEquals(finalTime, heat.getTime()); } }
package org.apache.hadoop.mapred.workflow.scheduling; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.ResourceStatus; import org.apache.hadoop.mapred.workflow.MachineType; import org.apache.hadoop.mapred.workflow.TimePriceTable.TableEntry; import org.apache.hadoop.mapred.workflow.TimePriceTable.TableKey; import org.apache.hadoop.mapred.workflow.WorkflowConf; /** * Class representing a workflow directed acyclic graph. */ public class WorkflowDAG implements Writable { private Set<WorkflowNode> nodes; private Set<WorkflowNode> entryNodes = null; private Set<WorkflowNode> exitNodes = null; private Map<WorkflowNode, Set<WorkflowNode>> successors; private Map<WorkflowNode, Set<WorkflowNode>> predecessors; public WorkflowDAG() { nodes = new HashSet<WorkflowNode>(); successors = new HashMap<WorkflowNode, Set<WorkflowNode>>(); predecessors = new HashMap<WorkflowNode, Set<WorkflowNode>>(); } private void addNode(WorkflowNode node) { // Initialize the successors and predecessors of a node before it is added. successors.put(node, new HashSet<WorkflowNode>()); predecessors.put(node, new HashSet<WorkflowNode>()); // Add the node to the workflow. nodes.add(node); } private void removeNode(WorkflowNode node) { // Remove pointers to the node, and pointers from the node. for (WorkflowNode linkedNode : nodes) { successors.get(linkedNode).remove(node); predecessors.get(linkedNode).remove(node); } successors.remove(node); predecessors.remove(node); // Remove the node itself. nodes.remove(node); } private void addSuccessor(WorkflowNode node, WorkflowNode successor) { successors.get(node).add(successor); } private void addPredecessor(WorkflowNode node, WorkflowNode predecessor) { predecessors.get(node).add(predecessor); } /** * Return the predecessors of the specified {@link WorkflowNode}. */ public Set<WorkflowNode> getPredecessors(WorkflowNode node) { return predecessors.get(node); } /** * Return the successors of the specified {@link WorkflowNode}. */ public Set<WorkflowNode> getSuccessors(WorkflowNode node) { return successors.get(node); } /** * Return the set of nodes for the workflow. */ public Set<WorkflowNode> getNodes() { return nodes; } /** * Get the set of entry nodes for the workflow. */ public Set<WorkflowNode> getEntryNodes() { if (entryNodes == null) { entryNodes = new HashSet<WorkflowNode>(); for (WorkflowNode node : nodes) { if (predecessors.get(node).size() == 0) { entryNodes.add(node); } } } return entryNodes; } /** * Get the set of exit nodes for the workflow. */ public Set<WorkflowNode> getExitNodes() { if (exitNodes == null) { exitNodes = new HashSet<WorkflowNode>(); for (WorkflowNode node : nodes) { if (successors.get(node).size() == 0) { exitNodes.add(node); } } } return exitNodes; } /** * Using the current node-machineType pairings, return a critical path. * * @param table A time-price table to use for computation of times/prices. * * @return A list of {@link WorkflowNode}s on the critical path. */ public List<WorkflowNode> getCriticalPath(Map<TableKey, TableEntry> table) throws IOException { Map<WorkflowNode, Float> distances = getWorkflowNodeWeights(table); List<WorkflowNode> criticalPath = new ArrayList<WorkflowNode>(); // Get the exit nodes before adding the fake node so it is not included. Set<WorkflowNode> exitNodes = getExitNodes(); // Add a fake exit node connecting to all real exit nodes. // This allows computation of only one path, rather than multiple paths. WorkflowNode fakeExitNode = new WorkflowNode("fakeNode", 0, 0); this.addNode(fakeExitNode); for (WorkflowNode exit : exitNodes) { this.addPredecessor(fakeExitNode, exit); } WorkflowNode criticalNode = fakeExitNode; do { criticalNode = getNextCriticalNode(distances, criticalNode); criticalPath.add(0, criticalNode); } while (criticalNode != null); // Null check is after adding, so remove the null element. criticalPath.remove(0); removeNode(fakeExitNode); return criticalPath; } private WorkflowNode getNextCriticalNode( Map<WorkflowNode, Float> distances, WorkflowNode current) { float maxDistance = 0; WorkflowNode criticalNode = null; for (WorkflowNode predecessor : getPredecessors(current)) { float distance = distances.get(predecessor); if (distance > maxDistance) { maxDistance = distance; criticalNode = predecessor; } } return criticalNode; } /** * Compute the weights of WorkflowNodes. Each distance is the value of the * longest path from a source node to the node, as measured by maximum * execution time. * * @param table A time-price table to use for time/price computations. * * @return A map of {@link WorkflowNode} to their weightings. */ private Map<WorkflowNode, Float> getWorkflowNodeWeights( Map<TableKey, TableEntry> table) throws IOException { Map<WorkflowNode, Float> distances = new HashMap<WorkflowNode, Float>(); List<WorkflowNode> ordering = getTopologicalOrdering(); // Initialize (time) distances. for (WorkflowNode node : getNodes()) { distances.put(node, Float.MIN_VALUE); } for (WorkflowNode entry : getEntryNodes()) { float maxTime = getNodeMaxTime(table, entry); distances.put(entry, maxTime); } // Relax the nodes to find their proper weight. for (WorkflowNode node : ordering) { for (WorkflowNode next : getSuccessors(node)) { // Add the weight of executing the next node. float otherPath = distances.get(node) + getNodeMaxTime(table, next); if (distances.get(next) < otherPath) { distances.put(next, otherPath); } } } return distances; } // Get the execution time of a node. // This is the sum of the slowest contained map + reduce tasks. private float getNodeMaxTime(Map<TableKey, TableEntry> table, WorkflowNode node) throws IOException { float maxMapWeight = 0f; float maxRedWeight = 0f; for (WorkflowTask task : node.getTasks()) { String type = task.getMachineType(); TableKey key = new TableKey(node.getJobName(), type, task.isMapTask()); TableEntry entry = table.get(key); if (entry != null) { float weight = entry.execTime; if (task.isMapTask() && weight > maxMapWeight) { maxMapWeight = weight; } else if (!task.isMapTask() && weight > maxRedWeight) { maxRedWeight = weight; } } else { // IOException because info wasn't read from the configuration file. throw new IOException("Entry for " + node.getJobName() + " (" + (task.isMapTask() ? "map" : "reduce") + ") / " + type + " does not exist in the time price table."); } } return (maxMapWeight + maxRedWeight); } /** * Compute and return the total execution time (makespan) of a WorkflowDAG. */ public float getTime(Map<TableKey, TableEntry> table) throws IOException { // Get the critical path. List<WorkflowNode> criticalPath = getCriticalPath(table); // Compute the execution time along the path. float time = 0f; for (WorkflowNode node : criticalPath) { time += getNodeMaxTime(table, node); } return time; } /** * Compute and return the total cost of a WorkflowDAG. */ public float getCost(Map<TableKey, TableEntry> table) throws IOException { // Add up the cost of all the nodes/tasks in the dag. float cost = 0f; for (WorkflowNode node : getNodes()) { for (WorkflowTask task : node.getTasks()) { String type = task.getMachineType(); TableKey key = new TableKey(node.getJobName(), type, task.isMapTask()); TableEntry entry = table.get(key); if (entry != null) { cost += entry.cost; } else { // IOException because info wasn't read from the configuration file. throw new IOException("Entry for " + node.getJobName() + " (" + (task.isMapTask() ? "map" : "reduce") + ") / " + type + " does not exist in the time price table."); } } } return cost; } /** * Compute and return a topological ordering on the input Workflow Dag. * * @return A list of {@link WorkflowNode}. */ private List<WorkflowNode> getTopologicalOrdering() { Set<WorkflowNode> nodes = getEntryNodes(); Set<WorkflowNode> marked = new HashSet<WorkflowNode>(); List<WorkflowNode> ordering = new ArrayList<WorkflowNode>(); for (WorkflowNode node : nodes) { constructTopologicalOrdering(node, marked, ordering); } return ordering; } private void constructTopologicalOrdering( WorkflowNode node, Set<WorkflowNode> marked, List<WorkflowNode> ordering) { marked.add(node); for (WorkflowNode next : getSuccessors(node)) { if (!marked.contains(next)) { constructTopologicalOrdering(next, marked, ordering); } } ordering.add(0, node); } /** * Construct a basic workflow DAG, with jobs/tasks initial state being * assigned to the least expensive machine type. * * @param machineTypes A set of {@link MachineType}. * @param machines A map of hadoop-named cluster machines/nodes, represented * by their {@link ResourceStatus}. * @param workflow A {@link WorkflowConf}. * * @return A directed acyclic graph representing the workflow. */ public static WorkflowDAG construct(Set<MachineType> machineTypes, Map<String, ResourceStatus> machines, WorkflowConf workflow) { WorkflowDAG dag = new WorkflowDAG(); // A temporary mapping to help with DAG creation. Map<JobConf, WorkflowNode> confToNode = new HashMap<JobConf, WorkflowNode>(); // Create a WorkflowNode for each JobConf. Map<String, JobConf> workflowJobs = workflow.getJobs(); for (String jobName : workflowJobs.keySet()) { JobConf workflowJob = workflowJobs.get(jobName); int maps = workflowJob.getNumMapTasks(); int reduces = workflowJob.getNumReduceTasks(); WorkflowNode node = new WorkflowNode(jobName, maps, reduces); dag.addNode(node); confToNode.put(workflowJob, node); } // Copy over dependencies, add successors & find entry/exit jobs. Map<String, Set<String>> workflowDependencies = workflow.getDependencies(); for (String successor : workflowDependencies.keySet()) { Set<String> dependencies = workflowDependencies.get(successor); for (String dependency : dependencies) { WorkflowNode node = confToNode.get(workflowJobs.get(successor)); WorkflowNode dep = confToNode.get(workflowJobs.get(dependency)); dag.addPredecessor(node, dep); dag.addSuccessor(dep, node); } } return dag; } @Override public void readFields(DataInput in) throws IOException { // Only need to read nodes, predecessors, & successors. // We'll use the same functions as during ordinary graph construction. HashMap<String, WorkflowNode> nodeMap = new HashMap<String, WorkflowNode>(); int numNodes = in.readInt(); for (int i = 0; i < numNodes; i++) { WorkflowNode node = new WorkflowNode(); node.readFields(in); this.addNode(node); nodeMap.put(node.getJobName(), node); } // Predecessors. int numPredKeys = in.readInt(); // Size of the map. for (int i = 0; i < numPredKeys; i++) { WorkflowNode keyNode = new WorkflowNode(); keyNode.readFields(in); // Map key. int numValues = in.readInt(); // Size of Map values (set). for (int j = 0; j < numValues; j++) { WorkflowNode valueNode = new WorkflowNode(); valueNode.readFields(in); // Map value. // Add the predecessor (use the nodes already present in the nodes set). WorkflowNode key = nodeMap.get(keyNode.getJobName()); WorkflowNode value = nodeMap.get(valueNode.getJobName()); predecessors.get(key).add(value); } } // Successors. int numSuccKeys = in.readInt(); // Size of the map. for (int i = 0; i < numSuccKeys; i++) { WorkflowNode keyNode = new WorkflowNode(); keyNode.readFields(in); // Map key. int numValues = in.readInt(); // Size of Map values (set). for (int j = 0; j < numValues; j++) { WorkflowNode valueNode = new WorkflowNode(); valueNode.readFields(in); // Map value. // Add the successor. WorkflowNode key = nodeMap.get(keyNode.getJobName()); WorkflowNode value = nodeMap.get(valueNode.getJobName()); successors.get(key).add(value); } } } @Override public void write(DataOutput out) throws IOException { // Only need to write nodes, predecessors, & successors. out.writeInt(nodes.size()); for (WorkflowNode node : nodes) { node.write(out); } writeWorkflowNodeMap(out, predecessors); writeWorkflowNodeMap(out, successors); } private void writeWorkflowNodeMap(DataOutput out, Map<WorkflowNode, Set<WorkflowNode>> map) throws IOException { out.writeInt(map.size()); // Size of the map. for (WorkflowNode key : map.keySet()) { key.write(out); // Map key. Set<WorkflowNode> values = map.get(key); out.writeInt(values.size()); // Size of Map values (set). for (WorkflowNode value : values) { value.write(out); // Map value. } } } }
package model.supervised.generative; import algorithms.parameterestimate.DataPointSet; import algorithms.parameterestimate.MixtureGaussianEM; import data.DataSet; import model.Predictable; import model.Trainable; import org.apache.commons.math3.distribution.MultivariateNormalDistribution; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import utils.random.RandomUtils; import utils.sort.SortIntDoubleUtils; //import org.neu.util.rand.RandomUtils; //import org.neu.util.sort.SortIntDoubleUtils; import java.util.HashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; public class MixtureGaussianDiscriminantAnalysis implements Trainable, Predictable{ public static int COMPONENTS = 3; public static int MAX_THREADS = 1; private static final Logger log = LogManager.getLogger(MixtureGaussianDiscriminantAnalysis.class); private DataSet data = null; private HashMap<Integer, Object> indexClassMap = null; private MultivariateNormalDistribution[][] models = null; private double[][] componentsPi = null; private double[] priors = null; private int classCount = Integer.MIN_VALUE; private int featureLength = Integer.MIN_VALUE; private ExecutorService service = null; private CountDownLatch countDownLatch = null; @Override public double predict(double[] feature) { double[] probabilities = probs(feature); int[] index = RandomUtils.getIndexes(classCount); SortIntDoubleUtils.sort(index, probabilities); log.debug(probabilities[probabilities.length - 1]); return index[index.length - 1]; } public double[] probs(double[] feature) { double[] probabilities = new double[classCount]; for (int i = 0; i < models.length; i++) { probabilities[i] = mixtureDensity(feature, i) * priors[i]; } return probabilities; } @Override public double score(double[] feature) { double[] probabilities = new double[classCount]; for (int i = 0; i < models.length; i++) { probabilities[i] = mixtureDensity(feature, i) * priors[i]; } double score = probabilities[1] - probabilities[0]; return score; } @Override public void train() { for (int classIndex : indexClassMap.keySet()) { priors[classIndex] = data.getCategoryProportion(classIndex); } service = Executors.newFixedThreadPool(MAX_THREADS); countDownLatch = new CountDownLatch(classCount); log.info("Task Count: {}", countDownLatch.getCount()); for (int classIndex : indexClassMap.keySet()) { final int CLASS_INDEX = classIndex; service.submit(() -> { try { DataPointSet dataPointSet = new DataPointSet(data, CLASS_INDEX, RandomUtils.getIndexes(featureLength)); MixtureGaussianEM em = new MixtureGaussianEM(dataPointSet, COMPONENTS); em.run(); double[][][] sigma = em.getSigma(); double[][] mu = em.getMu(); double[] pi = em.getPi(); MultivariateNormalDistribution[] mixtureDistribution = new MultivariateNormalDistribution[COMPONENTS]; for (int i = 0; i < COMPONENTS; i++) { mixtureDistribution[i] = new MultivariateNormalDistribution(mu[i], sigma[i]); } componentsPi[CLASS_INDEX] = pi; models[CLASS_INDEX] = mixtureDistribution; log.info("class: {} finished EM ...", CLASS_INDEX); }catch (Throwable t) { log.error(t.getMessage(), t); } countDownLatch.countDown(); }); } try { TimeUnit.MILLISECONDS.sleep(10); countDownLatch.await(); }catch (Throwable t) { log.error(t.getMessage(), t); } service.shutdown(); log.info("MixtureGaussianDiscriminantAnalysis Training finished, service shutdown ..."); } @Override public void initialize(DataSet d) { this.data =d; indexClassMap = d.getLabels().getIndexClassMap(); classCount = indexClassMap.size(); featureLength = data.getFeatureLength(); models = new MultivariateNormalDistribution[classCount][]; componentsPi = new double[classCount][]; priors = new double[classCount]; } private double mixtureDensity(double[] feature, int modelIndex) { return IntStream.range(0, COMPONENTS).mapToDouble(i -> models[modelIndex][i].density(feature) * componentsPi[modelIndex][i]).sum(); } }
package net.drewke.tdme.engine.subsystems.object; import net.drewke.tdme.engine.Timing; import net.drewke.tdme.engine.model.AnimationSetup; import net.drewke.tdme.engine.model.Face; import net.drewke.tdme.engine.model.FacesEntity; import net.drewke.tdme.engine.model.Group; import net.drewke.tdme.engine.model.Material; import net.drewke.tdme.engine.model.Model; import net.drewke.tdme.engine.primitives.BoundingBox; import net.drewke.tdme.math.Vector3; import net.drewke.tdme.utils.HashMap; /** * Model utilities * @author Andreas Drewke * @version $Id$ */ public class ModelUtilitiesInternal { /** * Model statistics class * @author Andreas Drewke * @version $Id$ */ public static class ModelStatistics { private int opaqueFaceCount; private int transparentFaceCount; private int materialCount; /** * Constructor * @param solid face count * @param transparent face count * @param material count */ public ModelStatistics(int opaqueFaceCount, int transparentFaceCount, int materialCount) { super(); this.opaqueFaceCount = opaqueFaceCount; this.transparentFaceCount = transparentFaceCount; this.materialCount = materialCount; } /** * @return opaque face count */ public int getOpaqueFaceCount() { return opaqueFaceCount; } /** * @return transparent face count */ public int getTransparentFaceCount() { return transparentFaceCount; } /** * @return material count */ public int getMaterialCount() { return materialCount; } /* * (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { return "ModelStatistics [opaqueFaceCount=" + opaqueFaceCount + ", transparentFaceCount=" + transparentFaceCount + ", materialCount=" + materialCount + "]"; } } /** * Creates a bounding box from given model * @param model * @return axis aligned bounding box */ public static BoundingBox createBoundingBox(Model model) { return ModelUtilitiesInternal.createBoundingBox(new Object3DModelInternal(model)); } /** * Creates a bounding box from given object3d model * @param model * @return axis aligned bounding box */ public static BoundingBox createBoundingBox(Object3DModelInternal object3DModelInternal) { Model model = object3DModelInternal.getModel(); AnimationSetup defaultAnimation = model.getAnimationSetup(Model.ANIMATIONSETUP_DEFAULT); float minX = 0f, minY = 0f, minZ = 0f; float maxX = 0f, maxY = 0f, maxZ = 0f; boolean firstVertex = true; // create bounding box for whole animation at 60fps AnimationState animationState = new AnimationState(); animationState.setup = defaultAnimation; animationState.lastAtTime = Timing.UNDEFINED; animationState.currentAtTime = 0L; animationState.time = 0.0f; animationState.finished = false; for (float t = 0.0f; t <= (defaultAnimation != null?defaultAnimation.getFrames():0.0f) / model.getFPS(); t+= 1f / model.getFPS()) { // calculate transformations matrices without world transformations object3DModelInternal.computeTransformationsMatrices( model.getSubGroups(), object3DModelInternal.getModel().getImportTransformationsMatrix().clone().multiply(object3DModelInternal.getTransformationsMatrix()), animationState, 0 ); Object3DGroup.computeTransformations( object3DModelInternal.object3dGroups, object3DModelInternal.transformationsMatrices ); // parse through object groups to determine min, max for(Object3DGroup object3DGroup: object3DModelInternal.object3dGroups) { for(Vector3 vertex: object3DGroup.mesh.transformedVertices) { // vertex xyz array float[] vertexXYZ = vertex.getArray(); // determine min, max if (firstVertex == true) { minX = vertexXYZ[0]; minY = vertexXYZ[1]; minZ = vertexXYZ[2]; maxX = vertexXYZ[0]; maxY = vertexXYZ[1]; maxZ = vertexXYZ[2]; firstVertex = false; } else { if (vertexXYZ[0] < minX) minX = vertexXYZ[0]; if (vertexXYZ[1] < minY) minY = vertexXYZ[1]; if (vertexXYZ[2] < minZ) minZ = vertexXYZ[2]; if (vertexXYZ[0] > maxX) maxX = vertexXYZ[0]; if (vertexXYZ[1] > maxY) maxY = vertexXYZ[1]; if (vertexXYZ[2] > maxZ) maxZ = vertexXYZ[2]; } } } animationState.currentAtTime = (long)(t * 1000f); animationState.lastAtTime = (long)(t * 1000f); } // skip on models without meshes to be rendered if (firstVertex == true) return null; // otherwise go with bounding box return new BoundingBox( new Vector3(minX, minY, minZ), new Vector3(maxX, maxY, maxZ) ); } /** * Invert normals of a model * @param model */ public static void invertNormals(Model model) { invertNormals(model.getSubGroups()); } /** * Invert normals recursive * @param groups */ private static void invertNormals(HashMap<String, Group> groups) { for (Group group: groups.getValuesIterator()) { // invert for(Vector3 normal: group.getNormals()) { normal.scale(-1f); } // process sub groups invertNormals(group.getSubGroups()); } } /** * Compute model statistics * @param model * @return model statistics */ public static ModelStatistics computeModelStatistics(Model model) { return ModelUtilitiesInternal.computeModelStatistics(new Object3DModelInternal(model)); } /** * Compute model statistics * @param object 3d model internal * @return model statistics */ public static ModelStatistics computeModelStatistics(Object3DModelInternal object3DModelInternal) { HashMap<String, Integer> materialCountById = new HashMap<String, Integer>(); int opaqueFaceCount = 0; int transparentFaceCount = 0; for(Object3DGroup object3DGroup: object3DModelInternal.object3dGroups) { // check each faces entity FacesEntity[] facesEntities = object3DGroup.group.getFacesEntities(); int facesEntityIdxCount = facesEntities.length; for (int faceEntityIdx = 0; faceEntityIdx < facesEntityIdxCount; faceEntityIdx++) { FacesEntity facesEntity = facesEntities[faceEntityIdx]; int faces = facesEntity.getFaces().length; // material Material material = facesEntity.getMaterial(); // determine if transparent boolean transparentFacesEntity = false; // via material if (material != null) { if (material.hasTransparency() == true) transparentFacesEntity = true; } // setup material usage String materialId = material == null?"tdme.material.none":material.getId(); Integer materialCount = materialCountById.get(materialId); if (materialCount == null) { materialCountById.put(materialId, 1); } else { materialCount++; } // skip, if requested if (transparentFacesEntity == true) { // keep track of rendered faces transparentFaceCount+= faces; // skip to next entity continue; } opaqueFaceCount+= faces; } } // determine final material count int materialCount = 0; for (Integer material: materialCountById.getValuesIterator()) { materialCount++; } return new ModelStatistics(opaqueFaceCount, transparentFaceCount, materialCount); } /** * Compute if model 1 equals model 2 * @param model 1 * @param model 2 * @return model1 equals model2 */ public static boolean equals(Model model1, Model model2) { return ModelUtilitiesInternal.equals(new Object3DModelInternal(model1), new Object3DModelInternal(model2)); } /** * Compute if model 1 equals model 2 * @param model 1 * @param model 2 * @return model1 equals model2 */ public static boolean equals(Object3DModelInternal object3DModel1Internal, Object3DModelInternal object3DModel2Internal) { // check number of object 3d groups if (object3DModel1Internal.object3dGroups.length != object3DModel2Internal.object3dGroups.length) return false; for(int i = 0; i < object3DModel1Internal.object3dGroups.length; i++) { Object3DGroup object3DGroupModel1 = object3DModel1Internal.object3dGroups[i]; Object3DGroup object3DGroupModel2 = object3DModel2Internal.object3dGroups[i]; FacesEntity[] facesEntitiesModel1 = object3DGroupModel1.group.getFacesEntities(); FacesEntity[] facesEntitiesModel2 = object3DGroupModel2.group.getFacesEntities(); // check transformation matrix if (object3DGroupModel1.group.getTransformationsMatrix().equals(object3DGroupModel2.group.getTransformationsMatrix()) == false) return false; // check number of faces entities if (facesEntitiesModel1.length != facesEntitiesModel2.length) return false; // check each faces entity for (int j = 0; j < facesEntitiesModel1.length; j++) { FacesEntity facesEntityModel1 = facesEntitiesModel1[j]; FacesEntity facesEntityModel2 = facesEntitiesModel2[j]; // check material // TODO: check if it should be allowed to have NULL material if (facesEntityModel1.getMaterial() == null && facesEntityModel2.getMaterial() != null) return false; if (facesEntityModel1.getMaterial() != null && facesEntityModel2.getMaterial() == null) return false; if (facesEntityModel1.getMaterial() != null && facesEntityModel2.getMaterial() != null && facesEntityModel1.getMaterial().getId().equals(facesEntityModel2.getMaterial().getId()) == false) { return false; } // check faces Face[] facesModel1 = facesEntityModel1.getFaces(); Face[] facesModel2 = facesEntityModel2.getFaces(); // number of faces in faces entity if (facesModel1.length != facesModel2.length) return false; // face indices for (int k = 0; k < facesModel1.length; k++) { // vertex indices int[] vertexIndicesModel1 = facesModel1[k].getVertexIndices(); int[] vertexIndicesModel2 = facesModel2[k].getVertexIndices(); if (vertexIndicesModel1[0] != vertexIndicesModel2[0] || vertexIndicesModel1[1] != vertexIndicesModel2[1] || vertexIndicesModel1[2] != vertexIndicesModel2[2]) { return false; } // TODO: maybe other indices } // TODO: check vertices, normals and such } } return true; } }
package net.java.sip.communicator.impl.gui.main.contactlist; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.event.*; import net.java.sip.communicator.impl.gui.*; import net.java.sip.communicator.impl.gui.main.*; import net.java.sip.communicator.util.*; import net.java.sip.communicator.util.swing.*; import net.java.sip.communicator.util.swing.event.*; /** * The field shown on the top of the main window, which allows the user to * search for users. * @author Yana Stamcheva */ public class SearchField extends SIPCommTextField implements TextFieldChangeListener, FilterQueryListener { /** * The logger used by this class. */ private final Logger logger = Logger.getLogger(SearchField.class); /** * The main application window. */ private final MainFrame mainFrame; /** * Creates the <tt>SearchField</tt>. * @param frame the main application window */ public SearchField(MainFrame frame) { super(GuiActivator.getResources() .getI18NString("service.gui.ENTER_NAME_OR_NUMBER")); this.mainFrame = frame; SearchFieldUI textFieldUI = new SearchFieldUI(); textFieldUI.setDeleteButtonEnabled(true); this.setUI(textFieldUI); this.setBorder(null); this.setOpaque(false); this.setPreferredSize(new Dimension(100, 22)); this.setDragEnabled(true); this.addTextChangeListener(this); InputMap imap = getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "escape"); ActionMap amap = getActionMap(); amap.put("escape", new AbstractAction() { public void actionPerformed(ActionEvent e) { setText(""); SearchField.this.mainFrame.requestFocusInCenterPanel(); } }); } /** * Handles the change when a char has been inserted in the field. */ public void textInserted() { // Should explicitly check if there's a text, because the default text // triggers also an insertUpdate event. String filterString = this.getText(); if (filterString == null || filterString.length() <= 0) return; updateContactListView(); } /** * Handles the change when a char has been removed from the field. */ public void textRemoved() { updateContactListView(); } /** * Do not need this for the moment. * @param e the <tt>DocumentEvent</tt> that notified us */ public void changedUpdate(DocumentEvent e) {} /** * Schedules an update if necessary. */ private void updateContactListView() { String filterString = getText(); FilterQuery filterQuery = null; if (filterString != null && filterString.length() > 0) { TreeContactList.searchFilter .setFilterString(filterString); filterQuery = GuiActivator.getContactList() .applyFilter(TreeContactList.searchFilter); } else { filterQuery = GuiActivator.getContactList().applyDefaultFilter(); } if (logger.isDebugEnabled()) logger.debug("Is filter query for string " + filterString + " : " + filterQuery); if (filterQuery != null && !filterQuery.isCanceled()) { // If we already have a result here we update the interface. if (filterQuery.isSucceeded()) enableUnknownContactView(false); else // Otherwise we will listen for events for changes in status // of this query. filterQuery.setQueryListener(this); } else // If the query is null or is canceled, we would simply check the // contact list content. enableUnknownContactView(GuiActivator.getContactList().isEmpty()); } /** * Sets the unknown contact view to the main contact list window. * @param isEnabled indicates if the unknown contact view should be enabled * or disabled. */ public void enableUnknownContactView(final boolean isEnabled) { SwingUtilities.invokeLater(new Runnable() { public void run() { mainFrame.enableUnknownContactView(isEnabled); } }); } /** * Indicates that the given <tt>query</tt> has finished with failure, i.e. * no results for the filter were found. * @param query the <tt>FilterQuery</tt>, where this listener is registered */ public void filterQueryFailed(FilterQuery query) { /// If don't have matching contacts we enter the unknown contact // view. enableUnknownContactView(true); query.setQueryListener(null); } /** * Indicates that the given <tt>query</tt> has finished with success, i.e. * the filter has returned results. * @param query the <tt>FilterQuery</tt>, where this listener is registered */ public void filterQuerySucceeded(FilterQuery query) { // If the unknown contact view was previously enabled, but we // have found matching contacts we enter the normal view. enableUnknownContactView(false); GuiActivator.getContactList().selectFirstContact(); query.setQueryListener(null); } }
package netspy.components.gui.components.listeners; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.lang.reflect.Field; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JTextField; import javax.swing.plaf.metal.MetalFileChooserUI; import netspy.NetSpy; import netspy.components.config.ConfigPropertiesManager; import netspy.components.filehandling.manager.FileManager; import netspy.components.gui.components.frame.NetSpyFrame; import netspy.components.gui.components.popups.ErrorNotificationPopup; import netspy.components.mailing.EmailHandler; /** * Class NetSpyActionListener. * */ public class NetSpyActionListener implements ActionListener { /** The owner. Used for accessing the text fields. */ private NetSpyFrame owner; /** * Instantiates a new file chooser action listener. * * @param owner the owner */ public NetSpyActionListener(NetSpyFrame owner) { this.owner = owner; } /* (non-Javadoc) * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed(ActionEvent e) { switch ( ((JButton) e.getSource()).getName() ) { case NetSpyFrame.BUTTON_ID_MAIL_PATH: final JFileChooser mailPathChooser = new JFileChooser(); mailPathChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); mailPathChooser.setCurrentDirectory(new File(this.owner.getInputMailPath().getText())); final int returnValMailPath = mailPathChooser.showOpenDialog(null); final File fileMailPath = mailPathChooser.getSelectedFile(); if (returnValMailPath == JFileChooser.APPROVE_OPTION) { // change text field accordingly after choosing a file/folder // but first check if file/folder is valid: if (fileMailPath.isDirectory()) { this.owner.getInputMailPath().setText(fileMailPath.getAbsolutePath()); new ConfigPropertiesManager().setInboxPath(fileMailPath.getAbsolutePath()); // check with file-names } else if (fileMailPath.isFile()) { if (!fileMailPath.getName().endsWith(EmailHandler.EML_FILE_EXTENSION)) { new ErrorNotificationPopup("Ungültige Dateierweiterung", "Es sind nur .eml-Dateien erlaubt!"); break; } else { this.owner.getInputMailPath().setText(fileMailPath.getAbsolutePath()); new ConfigPropertiesManager().setInboxPath(fileMailPath.getAbsolutePath()); } } } else if (returnValMailPath == JFileChooser.ERROR_OPTION) { new ErrorNotificationPopup("Unbekannter Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } break; case NetSpyFrame.BUTTON_ID_QUARANTINE_PATH: final JFileChooser quarantinePathChooser = new JFileChooser(); quarantinePathChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); quarantinePathChooser.setCurrentDirectory(new File(this.owner.getInputQuarantinePath().getText())); MetalFileChooserUI ui = (MetalFileChooserUI) quarantinePathChooser.getUI(); Field field = null; try { field = MetalFileChooserUI.class.getDeclaredField("fileNameTextField"); field.setAccessible(true); JTextField tf = (JTextField) field.get(ui); tf.setEditable(false); } catch (NoSuchFieldException e1) { // could not fiend textfield inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (SecurityException e1) { // could not access access-property of textfield inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (IllegalArgumentException e1) { // could not access ui component of text field inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (IllegalAccessException e1) { // could not access ui component of text field inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } final int returnValQuarantine = quarantinePathChooser.showOpenDialog(null); final File fileQuarantinePath = quarantinePathChooser.getSelectedFile(); if (returnValQuarantine == JFileChooser.APPROVE_OPTION) { // change text field accordingly after choosing a folder this.owner.getInputQuarantinePath().setText(fileQuarantinePath.getPath()); new ConfigPropertiesManager().setQuarantinePath(fileQuarantinePath.getAbsolutePath()); } else if (returnValQuarantine == JFileChooser.ERROR_OPTION) { new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } break; case NetSpyFrame.BUTTON_ID_BLACKWORD_PATH: final JFileChooser blackwordPathChooser = new JFileChooser(); blackwordPathChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); blackwordPathChooser.setCurrentDirectory(new File(this.owner.getInputBlackwordPath().getText())); final int returnValBlackword = blackwordPathChooser.showOpenDialog(null); final File fileBlackwordPath = blackwordPathChooser.getSelectedFile(); if (returnValBlackword == JFileChooser.APPROVE_OPTION) { // only blacklist.txt file accepted if (!fileBlackwordPath.getName().equals(FileManager.BLACKLIST_FILE_NAME)) { new ErrorNotificationPopup("Falsche Datei", "Datei muss '" + FileManager.BLACKLIST_FILE_NAME + "' heißen!"); break; } // change text field accordingly after choosing a folder this.owner.getInputBlackwordPath().setText(fileBlackwordPath.getPath()); new ConfigPropertiesManager().setBlackwordPath(fileBlackwordPath.getAbsolutePath()); } else if (returnValBlackword == JFileChooser.ERROR_OPTION) { new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } break; case NetSpyFrame.BUTTON_ID_LOG_PATH: final JFileChooser logPathChooser = new JFileChooser(); logPathChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); logPathChooser.setCurrentDirectory(new File(this.owner.getInputLogPath().getText())); MetalFileChooserUI uiLog = (MetalFileChooserUI) logPathChooser.getUI(); Field fieldLog = null; try { fieldLog = MetalFileChooserUI.class.getDeclaredField("fileNameTextField"); fieldLog.setAccessible(true); JTextField tf = (JTextField) fieldLog.get(uiLog); tf.setEditable(false); } catch (NoSuchFieldException e1) { // could not fiend textfield inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (SecurityException e1) { // could not access access-property of textfield inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (IllegalArgumentException e1) { // could not access ui component of text field inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } catch (IllegalAccessException e1) { // could not access ui component of text field inside file chooser new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } final int returnValLog = logPathChooser.showOpenDialog(null); final File fileLogPath = logPathChooser.getSelectedFile(); if (returnValLog == JFileChooser.APPROVE_OPTION) { // change text field accordingly after choosing a folder this.owner.getInputLogPath().setText(fileLogPath.getPath()); new ConfigPropertiesManager().setLogPath(fileLogPath.getAbsolutePath()); } else if (returnValLog == JFileChooser.ERROR_OPTION) { new ErrorNotificationPopup("Fehler", "Es ist ein unbekannter Fehler aufgetreten!"); } break; case NetSpyFrame.BUTTON_ID_START_SCAN: if (!allPathsAreSet()) { this.owner.getLogBox().append("Scan konnte nicht gestartet werden!"); new ErrorNotificationPopup("Fehlende Pfadangaben", "Bitte überprüfen Sie Ihre Eingaben bezüglich der Pfade!"); break; } else { NetSpy.run(); } break; case NetSpyFrame.BUTTON_ID_CLEAR_LOGBOX: this.owner.getLogBox().clear(); break; default: break; } } /** * All paths are set. * * @return true, if all paths are set */ private boolean allPathsAreSet() { if (this.owner.getInputMailPath().getText().equals("")) { return false; } if (this.owner.getInputBlackwordPath().getText().equals("")) { return false; } if (this.owner.getInputLogPath().getText().equals("")) { return false; } if (this.owner.getInputQuarantinePath().getText().equals("")) { return false; } return true; } /** * Contains dir eml files. * * @param dir the dir * @return true, if successful */ @SuppressWarnings("unused") private boolean containsDirEmlFiles(File dir) { if (!dir.isDirectory()) { return false; } for (File fileInDir : dir.listFiles()) { if (fileInDir.getName().endsWith(EmailHandler.EML_FILE_EXTENSION)) { return true; } else if (containsDirEmlFiles(fileInDir)) { return true; } } return false; } }
package com.archimatetool.commandline; import java.io.IOException; import java.io.PrintWriter; import java.util.Comparator; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtensionRegistry; import org.eclipse.core.runtime.Platform; import org.eclipse.equinox.app.IApplication; import org.eclipse.equinox.app.IApplicationContext; import org.eclipse.swt.widgets.Display; import com.archimatetool.editor.utils.StringUtils; /** * The main application class for standalone operation. * * @author Phillip Beauvoir */ public class CentralScrutinizer implements IApplication { /** * Constructor */ public CentralScrutinizer() { } private class ProviderInfo { String id; String name; String description; public ProviderInfo(String id, String name, String description) { this.id = id; this.name = name; this.description = description; } } private Map<ICommandLineProvider, ProviderInfo> providers; @Override public Object start(IApplicationContext context) throws Exception { // Register providers registerProviders(); // Process options CommandLine commandLine = processOptions(); // Show help if set and exit if(commandLine.hasOption("help")) { //$NON-NLS-1$ showHelp(); if(commandLine.hasOption("pause")) { //$NON-NLS-1$ pause(); } return EXIT_OK; } // Run provider options return runProviderOptions(commandLine); } // Collect registered command line providers private void registerProviders() { // Sort the providers by priority... Comparator<ICommandLineProvider> comparator = (ICommandLineProvider p1, ICommandLineProvider p2) -> { int result = p1.getPriority() - p2.getPriority(); return result == 0 ? p1.getPriority() : result; // Can't have duplicate comparison value }; providers = new TreeMap<ICommandLineProvider, ProviderInfo>(comparator); IExtensionRegistry registry = Platform.getExtensionRegistry(); for(IConfigurationElement configurationElement : registry.getConfigurationElementsFor(ICommandLineProvider.EXTENSION_ID)) { try { String id = configurationElement.getAttribute("id"); //$NON-NLS-1$ String name = configurationElement.getAttribute("name"); //$NON-NLS-1$ String description = configurationElement.getAttribute("description"); //$NON-NLS-1$ ICommandLineProvider provider = (ICommandLineProvider)configurationElement.createExecutableExtension("class"); //$NON-NLS-1$ if(id != null && provider != null) { ProviderInfo info = new ProviderInfo(id, name, description); providers.put(provider, info); } } catch(CoreException ex) { ex.printStackTrace(); } } } private CommandLine processOptions() throws ParseException { // Get core options Options options = getCoreOptions(); // Add provider options for(ICommandLineProvider provider : providers.keySet()) { Options providerOptions = provider.getOptions(); if(providerOptions != null) { for(Option option : providerOptions.getOptions()) { options.addOption(option); } } } // Parse options CommandLineParser parser = new DefaultParser(); return parser.parse(options, Platform.getApplicationArgs(), false); } private Options getCoreOptions() { Options options = new Options(); options.addOption("h", "help", false, Messages.CentralScrutinizer_0); //$NON-NLS-1$ //$NON-NLS-2$ options.addOption("a", "abortOnException", false, Messages.CentralScrutinizer_1); //$NON-NLS-1$ //$NON-NLS-2$ options.addOption("p", "pause", false, Messages.CentralScrutinizer_6); //$NON-NLS-1$ //$NON-NLS-2$ return options; } // Run providers' options private int runProviderOptions(CommandLine commandLine) { // Ensure Display is initialised ensureDefaultDisplay(); // Invoke providers' run() method for(ICommandLineProvider provider : providers.keySet()) { try { provider.run(commandLine); } catch(Exception ex) { ex.printStackTrace(); if(commandLine.hasOption("abortOnException")) { //$NON-NLS-1$ return -1; } else { // Consume? } } } if(commandLine.hasOption("pause")) { //$NON-NLS-1$ pause(); } return EXIT_OK; } private void showHelp() { HelpFormatter formatter = new HelpFormatter(); //formatter.setOptionComparator(null); int width = 140; PrintWriter pw = new PrintWriter(System.out); System.out.println(Messages.CentralScrutinizer_2); System.out.println(); System.out.println(Messages.CentralScrutinizer_3); System.out.println("---------------"); //$NON-NLS-1$ formatter.printOptions(pw, width, getCoreOptions(), 1, 10); pw.flush(); System.out.println(); System.out.println(Messages.CentralScrutinizer_4); System.out.println("---------------------"); //$NON-NLS-1$ for(Entry<ICommandLineProvider, ProviderInfo> info : providers.entrySet()) { String pluginName = info.getValue().name; if(!StringUtils.isSet(pluginName)) { pluginName = info.getValue().id; } String pluginDescription = StringUtils.safeString(info.getValue().description); System.out.println(" [" + pluginName + "] " + pluginDescription); //$NON-NLS-1$//$NON-NLS-2$ } System.out.println(); System.out.println(Messages.CentralScrutinizer_5); System.out.println("--------"); //$NON-NLS-1$ Options allOptions = new Options(); for(ICommandLineProvider provider : providers.keySet()) { for(Option option : provider.getOptions().getOptions()) { allOptions.addOption(option); } } formatter.printOptions(pw, width, allOptions, 0, 10); pw.flush(); } @Override public void stop() { } private void pause() { System.out.println(); System.out.println(Messages.CentralScrutinizer_7); try { System.in.read(); } catch(IOException ex) { ex.printStackTrace(); } } /** * Some classes like ColorFactory use the Display class to do their thing * This ensures that the default display is created */ private void ensureDefaultDisplay() { if(Display.getCurrent() == null) { Display.getDefault(); } } }
package org.atrzaska.ebiznes.projekt1.gui; import org.atrzaska.ebiznes.projekt1.api.RestaurantRecommender; import org.atrzaska.ebiznes.projekt1.api.RestaurantRecommenderBuilder; public class RecommenderBuilderCreatorForm extends javax.swing.JFrame { protected RestaurantRecommender restaurantRecommender; /** * Creates new form RecommenderBuilderCreatorForm * @param restaurantRecommender */ public RecommenderBuilderCreatorForm(RestaurantRecommender restaurantRecommender) { this.restaurantRecommender = restaurantRecommender; initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { buttonGroup1 = new javax.swing.ButtonGroup(); boxRecommenderType = new javax.swing.JComboBox(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); boxSimilarityType = new javax.swing.JComboBox(); panelSposobWyboru = new javax.swing.JPanel(); jLabel3 = new javax.swing.JLabel(); radioNNUserBased = new javax.swing.JRadioButton(); radioThresholdBased = new javax.swing.JRadioButton(); jLabel4 = new javax.swing.JLabel(); txtWartosc = new javax.swing.JTextField(); jSeparator1 = new javax.swing.JSeparator(); btnOK = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setTitle("wybór sposobu rekomendacji"); boxRecommenderType.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "na podstawie użytkowników", "na podstawie restauracji" })); boxRecommenderType.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { boxRecommenderTypeActionPerformed(evt); } }); jLabel1.setText("Generuj rekomendajce"); jLabel2.setText("Metoda obliczania podobieństwa"); boxSimilarityType.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "UncenteredCosineSimilarity", "PearsonCorrelationSimilarity", "CityBlockSimilarity", "TanimotoCoefficientSimilarity", "EuclideanDistanceSimilarity" })); panelSposobWyboru.setBorder(javax.swing.BorderFactory.createTitledBorder("sposób wyboru podobnych użytkowników")); jLabel3.setText("wybierz"); buttonGroup1.add(radioNNUserBased); radioNNUserBased.setSelected(true); radioNNUserBased.setText("NearestNUserNeighborhood"); radioNNUserBased.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { radioNNUserBasedActionPerformed(evt); } }); buttonGroup1.add(radioThresholdBased); radioThresholdBased.setText("ThresholdUserNeighborhood"); radioThresholdBased.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { radioThresholdBasedActionPerformed(evt); } }); jLabel4.setText("podaj wartość"); txtWartosc.setText("10"); javax.swing.GroupLayout panelSposobWyboruLayout = new javax.swing.GroupLayout(panelSposobWyboru); panelSposobWyboru.setLayout(panelSposobWyboruLayout); panelSposobWyboruLayout.setHorizontalGroup( panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(panelSposobWyboruLayout.createSequentialGroup() .addContainerGap() .addGroup(panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(panelSposobWyboruLayout.createSequentialGroup() .addGroup(panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(radioThresholdBased) .addComponent(jLabel4)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(txtWartosc, javax.swing.GroupLayout.PREFERRED_SIZE, 250, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(panelSposobWyboruLayout.createSequentialGroup() .addGroup(panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(radioNNUserBased) .addComponent(jLabel3)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); panelSposobWyboruLayout.setVerticalGroup( panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(panelSposobWyboruLayout.createSequentialGroup() .addComponent(jLabel3) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(radioNNUserBased) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(radioThresholdBased) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(panelSposobWyboruLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(txtWartosc, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(0, 8, Short.MAX_VALUE)) ); btnOK.setText("OK"); btnOK.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnOKActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(29, 29, 29) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jSeparator1) .addComponent(panelSposobWyboru, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addComponent(jLabel2)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 122, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(boxRecommenderType, 0, 255, Short.MAX_VALUE) .addComponent(boxSimilarityType, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(btnOK, javax.swing.GroupLayout.PREFERRED_SIZE, 130, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(35, 35, 35) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(boxRecommenderType, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel1)) .addGap(18, 18, 18) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel2) .addComponent(boxSimilarityType, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(panelSposobWyboru, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 45, Short.MAX_VALUE) .addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(btnOK) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private void boxRecommenderTypeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_boxRecommenderTypeActionPerformed String selectedValue = (String) boxRecommenderType.getSelectedItem(); if(selectedValue.equalsIgnoreCase("na podstawie restauracji")) { panelSposobWyboru.setVisible(false); } else { panelSposobWyboru.setVisible(true); } }//GEN-LAST:event_boxRecommenderTypeActionPerformed private void btnOKActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnOKActionPerformed String recommenderType = (String) boxRecommenderType.getSelectedItem(); String similarityType = (String) boxSimilarityType.getSelectedItem(); RestaurantRecommenderBuilder recommenderBuilder = (RestaurantRecommenderBuilder) restaurantRecommender.getRecommenderBuilder(); if(recommenderType.equals("na podstawie użytkowników")) { recommenderBuilder.setRecommenderType("user based"); recommenderBuilder.setSimilarityType(similarityType); if(radioNNUserBased.isSelected()) { recommenderBuilder.setUserNeighborhoodType("NearestNUserNeighborhood"); } else { recommenderBuilder.setUserNeighborhoodType("ThresholdUserNeighborhood"); } recommenderBuilder.setUserNeightborhoodValue(txtWartosc.getText()); } else { recommenderBuilder.setRecommenderType("item based"); recommenderBuilder.setSimilarityType(similarityType); } this.setVisible(false); this.dispose(); // this.restaurantRecommender. }//GEN-LAST:event_btnOKActionPerformed private void radioThresholdBasedActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_radioThresholdBasedActionPerformed this.txtWartosc.setText("0.1"); }//GEN-LAST:event_radioThresholdBasedActionPerformed private void radioNNUserBasedActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_radioNNUserBasedActionPerformed this.txtWartosc.setText("10"); }//GEN-LAST:event_radioNNUserBasedActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JComboBox boxRecommenderType; private javax.swing.JComboBox boxSimilarityType; private javax.swing.JButton btnOK; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JSeparator jSeparator1; private javax.swing.JPanel panelSposobWyboru; private javax.swing.JRadioButton radioNNUserBased; private javax.swing.JRadioButton radioThresholdBased; private javax.swing.JTextField txtWartosc; // End of variables declaration//GEN-END:variables }
package org.ensembl.healthcheck.testcase.variation; import java.sql.Connection; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.DatabaseType; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.Species; import org.ensembl.healthcheck.Team; import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase; /** * Check that all tables have data. */ public class EmptyVariationTables extends SingleDatabaseTestCase { /** * Creates a new instance of EmptyVariationTablesTestCase */ public EmptyVariationTables() { addToGroup("variation"); addToGroup("variation-release"); setDescription("Checks that all tables have data"); setTeamResponsible(Team.VARIATION); } /** * Define what tables are to be checked. */ private String[] getTablesToCheck(final DatabaseRegistryEntry dbre) { String[] tables = getTableNames(dbre.getConnection()); Species species = dbre.getSpecies(); String[] unusedTables = { "coord_system" }; String[] humanOnlyTables = { "protein_function_predictions", "phenotype", "tagged_variation_feature", "variation_annotation", "associate_study" }; String[] svTables = { "study", "structural_variation", "structural_variation_feature", "structural_variation_association", "structural_variation_annotation", "variation_set_structural_variation" }; String[] sampleTables = { "population_genotype", "population_structure", "sample_synonym" }; String[] setTables = { "variation_set", "variation_set_structure", "variation_set_variation" }; String[] genotypeTables = { "compressed_genotype_region", "compressed_genotype_var" }; String[] coverageTables = { "read_coverage" }; // first drop the unused tables tables = remove(tables, unusedTables); // then human specific ones unless we're running on human if (species != Species.HOMO_SAPIENS) { tables = remove(tables, humanOnlyTables); } // only these species have coverage data if (species != Species.RATTUS_NORVEGICUS && species != Species.MUS_MUSCULUS && species != Species.PONGO_ABELII && species != Species.HOMO_SAPIENS) { tables = remove(tables, coverageTables); } // only these species have structural variation data if (species != Species.HOMO_SAPIENS && species != Species.MUS_MUSCULUS && species != Species.CANIS_FAMILIARIS && species != Species.SUS_SCROFA && species != Species.MACACA_MULATTA) { tables = remove(tables, svTables); } // only these species do not have sample data if (species == Species.ANOPHELES_GAMBIAE || species == Species.ORNITHORHYNCHUS_ANATINUS || species == Species.PONGO_ABELII || species == Species.TETRAODON_NIGROVIRIDIS) { tables = remove(tables, sampleTables); } return tables; } /** * Check that every table has more than 0 rows. * * @param dbre * The database to check. * @return true if the test passed. */ public boolean run(DatabaseRegistryEntry dbre) { boolean result = true; String[] tables = getTablesToCheck(dbre); Connection con = dbre.getConnection(); for (int i = 0; i < tables.length; i++) { String table = tables[i]; // logger.finest("Checking that " + table + " has rows"); if (!tableHasRows(con, table)) { ReportManager.problem(this, con, table + " has zero rows"); result = false; } } if (result) { ReportManager.correct(this, con, "All required tables have data"); } return result; } // run private String[] remove(final String[] tables, final String table) { String[] result = new String[tables.length - 1]; int j = 0; for (int i = 0; i < tables.length; i++) { if (!tables[i].equalsIgnoreCase(table)) { if (j < result.length) { result[j++] = tables[i]; } else { logger.severe("Cannot remove " + table + " since it's not in the list!"); } } } return result; } private String[] remove(final String[] src, final String[] tablesToRemove) { String[] result = src; for (int i = 0; i < tablesToRemove.length; i++) { result = remove(result, tablesToRemove[i]); } return result; } /** * This only applies to variation databases. */ public void types() { removeAppliesToType(DatabaseType.OTHERFEATURES); removeAppliesToType(DatabaseType.CDNA); removeAppliesToType(DatabaseType.CORE); removeAppliesToType(DatabaseType.VEGA); } } // EmptyVariationTablesTestCase
package com.yahoo.vespa.config.server; import com.yahoo.cloud.config.ConfigserverConfig; import com.yahoo.config.model.api.ModelFactory; import com.yahoo.config.model.provision.Host; import com.yahoo.config.model.provision.Hosts; import com.yahoo.config.model.provision.InMemoryProvisioner; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.Environment; import com.yahoo.config.provision.RegionName; import com.yahoo.component.Version; import com.yahoo.config.provision.Zone; import com.yahoo.container.handler.VipStatus; import com.yahoo.container.jdisc.config.HealthMonitorConfig; import com.yahoo.container.jdisc.state.StateMonitor; import com.yahoo.jdisc.core.SystemTimer; import com.yahoo.path.Path; import com.yahoo.text.Utf8; import com.yahoo.vespa.config.server.deploy.DeployTester; import com.yahoo.vespa.config.server.rpc.RpcServer; import com.yahoo.vespa.config.server.version.VersionState; import com.yahoo.vespa.curator.Curator; import com.yahoo.vespa.curator.mock.MockCurator; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.function.BooleanSupplier; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * @author Ulf Lilleengen * @author Harald Musum */ public class ConfigServerBootstrapTest { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test public void testBootstrap() throws Exception { ConfigserverConfig configserverConfig = createConfigserverConfig(temporaryFolder); InMemoryProvisioner provisioner = new InMemoryProvisioner(true, "host0", "host1", "host3"); DeployTester tester = new DeployTester(configserverConfig, provisioner); tester.deployApp("src/test/apps/hosted/"); File versionFile = temporaryFolder.newFile(); VersionState versionState = new VersionState(versionFile); assertTrue(versionState.isUpgraded()); RpcServer rpcServer = createRpcServer(configserverConfig); VipStatus vipStatus = new VipStatus(); // Take a host away so that there are too few for the application, to verify we can still bootstrap provisioner.allocations().values().iterator().next().remove(0); ConfigServerBootstrap bootstrap = new ConfigServerBootstrap(tester.applicationRepository(), rpcServer, versionState, createStateMonitor(), vipStatus, ConfigServerBootstrap.Mode.INITIALIZE_ONLY); assertFalse(vipStatus.isInRotation()); bootstrap.start(); waitUntil(rpcServer::isRunning, "failed waiting for Rpc server running"); waitUntil(() -> bootstrap.status() == StateMonitor.Status.up, "failed waiting for status 'up'"); waitUntil(vipStatus::isInRotation, "failed waiting for server to be in rotation"); bootstrap.deconstruct(); assertEquals(StateMonitor.Status.down, bootstrap.status()); assertFalse(rpcServer.isRunning()); assertFalse(vipStatus.isInRotation()); } @Test public void testBootstrapWhenRedeploymentFails() throws Exception { ConfigserverConfig configserverConfig = createConfigserverConfig(temporaryFolder); DeployTester tester = new DeployTester(configserverConfig); tester.deployApp("src/test/apps/hosted/"); File versionFile = temporaryFolder.newFile(); VersionState versionState = new VersionState(versionFile); assertTrue(versionState.isUpgraded()); // Manipulate application package so that it will fail deployment when config server starts java.nio.file.Files.delete(Paths.get(configserverConfig.configServerDBDir()) .resolve("tenants/") .resolve(tester.tenant().getName().value()) .resolve("sessions/2/services.xml")); RpcServer rpcServer = createRpcServer(configserverConfig); VipStatus vipStatus = new VipStatus(); ConfigServerBootstrap bootstrap = new ConfigServerBootstrap(tester.applicationRepository(), rpcServer, versionState, createStateMonitor(), vipStatus, ConfigServerBootstrap.Mode.INITIALIZE_ONLY); assertFalse(vipStatus.isInRotation()); // Call method directly, to be sure that it is finished redeploying all applications and we can check status bootstrap.start(); // App is invalid, bootstrapping was unsuccessful. Status should be 'initializing', // rpc server should not be running and it should be out of rotation assertEquals(StateMonitor.Status.initializing, bootstrap.status()); assertFalse(rpcServer.isRunning()); assertFalse(vipStatus.isInRotation()); bootstrap.deconstruct(); } // Tests that we do not try to create the config model version stored in zookeeper when not on hosted vespa, since // we are then only able to create the latest version @Test public void testBootstrapNonHostedOneConfigModel() throws Exception { ConfigserverConfig configserverConfig = createConfigserverConfigNonHosted(temporaryFolder); String vespaVersion = "1.2.3"; List<ModelFactory> modelFactories = Collections.singletonList(DeployTester.createModelFactory(Version.fromString(vespaVersion))); List<Host> hosts = createHosts(vespaVersion); InMemoryProvisioner provisioner = new InMemoryProvisioner(new Hosts(hosts), true); Curator curator = new MockCurator(); DeployTester tester = new DeployTester(modelFactories, configserverConfig, Clock.systemUTC(), new Zone(Environment.dev, RegionName.defaultName()), provisioner, curator); tester.deployApp("src/test/apps/app/", vespaVersion, Instant.now()); ApplicationId applicationId = tester.applicationId(); File versionFile = temporaryFolder.newFile(); VersionState versionState = new VersionState(versionFile); assertTrue(versionState.isUpgraded()); // Ugly hack, but I see no other way of doing it: // Manipulate application version in zookeeper so that it is an older version than the model we know, which is // the case when upgrading on non-hosted installations curator.set(Path.fromString("/config/v2/tenants/" + applicationId.tenant().value() + "/sessions/2/version"), Utf8.toBytes("1.2.2")); RpcServer rpcServer = createRpcServer(configserverConfig); VipStatus vipStatus = new VipStatus(); ConfigServerBootstrap bootstrap = new ConfigServerBootstrap(tester.applicationRepository(), rpcServer, versionState, createStateMonitor(), vipStatus, ConfigServerBootstrap.Mode.BOOTSTRAP_IN_SEPARATE_THREAD); waitUntil(rpcServer::isRunning, "failed waiting for Rpc server running"); waitUntil(() -> bootstrap.status() == StateMonitor.Status.up, "failed waiting for status 'up'"); waitUntil(vipStatus::isInRotation, "failed waiting for server to be in rotation"); } private void waitUntil(BooleanSupplier booleanSupplier, String messageIfWaitingFails) throws InterruptedException { Duration timeout = Duration.ofSeconds(60); Instant endTime = Instant.now().plus(timeout); while (Instant.now().isBefore(endTime)) { if (booleanSupplier.getAsBoolean()) return; Thread.sleep(10); } throw new RuntimeException(messageIfWaitingFails); } private MockRpc createRpcServer(ConfigserverConfig configserverConfig) throws IOException { return new MockRpc(configserverConfig.rpcport(), temporaryFolder.newFolder()); } private StateMonitor createStateMonitor() { return new StateMonitor(new HealthMonitorConfig(new HealthMonitorConfig.Builder().initialStatus("initializing")), new SystemTimer()); } private static ConfigserverConfig createConfigserverConfig(TemporaryFolder temporaryFolder) throws IOException { return createConfigserverConfig(temporaryFolder, true); } private static ConfigserverConfig createConfigserverConfigNonHosted(TemporaryFolder temporaryFolder) throws IOException { return createConfigserverConfig(temporaryFolder, false); } private static ConfigserverConfig createConfigserverConfig(TemporaryFolder temporaryFolder, boolean hosted) throws IOException { return new ConfigserverConfig(new ConfigserverConfig.Builder() .configServerDBDir(temporaryFolder.newFolder("serverdb").getAbsolutePath()) .configDefinitionsDir(temporaryFolder.newFolder("configdefinitions").getAbsolutePath()) .hostedVespa(hosted) .multitenant(hosted) .maxDurationOfBootstrap(2) /* seconds */ .sleepTimeWhenRedeployingFails(0)); /* seconds */ } private List<Host> createHosts(String vespaVersion) { return Arrays.asList(createHost("host1", vespaVersion), createHost("host2", vespaVersion), createHost("host3", vespaVersion)); } private Host createHost(String hostname, String version) { return new Host(hostname, Collections.emptyList(), Optional.empty(), Optional.of(com.yahoo.component.Version.fromString(version))); } public static class MockRpc extends com.yahoo.vespa.config.server.rpc.MockRpc { volatile boolean isRunning = false; MockRpc(int port, File tempDir) { super(port, tempDir); } @Override public void run() { isRunning = true; } @Override public void stop() { isRunning = false; } @Override public boolean isRunning() { return isRunning; } } }
package com.yahoo.search.dispatch.rpc; import ai.vespa.searchlib.searchprotocol.protobuf.SearchProtocol; import ai.vespa.searchlib.searchprotocol.protobuf.SearchProtocol.StringProperty; import ai.vespa.searchlib.searchprotocol.protobuf.SearchProtocol.TensorProperty; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.yahoo.document.GlobalId; import com.yahoo.fs4.GetDocSumsPacket; import com.yahoo.io.GrowableByteBuffer; import com.yahoo.prelude.fastsearch.DocumentDatabase; import com.yahoo.prelude.fastsearch.FastHit; import com.yahoo.prelude.fastsearch.GroupingListHit; import com.yahoo.prelude.fastsearch.VespaBackEndSearcher; import com.yahoo.search.Query; import com.yahoo.search.Result; import com.yahoo.search.grouping.vespa.GroupingExecutor; import com.yahoo.search.query.Model; import com.yahoo.search.query.QueryTree; import com.yahoo.search.query.Ranking; import com.yahoo.search.query.Sorting; import com.yahoo.search.query.Sorting.Order; import com.yahoo.search.result.Coverage; import com.yahoo.search.result.Relevance; import com.yahoo.searchlib.aggregation.Grouping; import com.yahoo.vespa.objects.BufferSerializer; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; public class ProtobufSerialization { private static final int INITIAL_SERIALIZATION_BUFFER_SIZE = 10 * 1024; public static byte[] serializeSearchRequest(Query query, String serverId) { return convertFromQuery(query, serverId).toByteArray(); } private static SearchProtocol.SearchRequest convertFromQuery(Query query, String serverId) { var builder = SearchProtocol.SearchRequest.newBuilder().setHits(query.getHits()).setOffset(query.getOffset()) .setTimeout((int) query.getTimeLeft()); var documentDb = query.getModel().getDocumentDb(); if (documentDb != null) { builder.setDocumentType(documentDb); } builder.setQueryTreeBlob(serializeQueryTree(query.getModel().getQueryTree())); if (query.getGroupingSessionCache() || query.getRanking().getQueryCache()) { // TODO verify that the session key is included whenever rank properties would have been builder.setSessionKey(query.getSessionId(serverId).toString()); } if (query.properties().getBoolean(Model.ESTIMATE)) { builder.setHits(0); } if (GroupingExecutor.hasGroupingList(query)) { List<Grouping> groupingList = GroupingExecutor.getGroupingList(query); BufferSerializer gbuf = new BufferSerializer(new GrowableByteBuffer()); gbuf.putInt(null, groupingList.size()); for (Grouping g : groupingList) { g.serialize(gbuf); } gbuf.getBuf().flip(); builder.setGroupingBlob(ByteString.copyFrom(gbuf.getBuf().getByteBuffer())); } if (query.getGroupingSessionCache()) { builder.setCacheGrouping(true); } mergeToSearchRequestFromRanking(query.getRanking(), builder); return builder.build(); } private static void mergeToSearchRequestFromRanking(Ranking ranking, SearchProtocol.SearchRequest.Builder builder) { builder.setRankProfile(ranking.getProfile()); if (ranking.getQueryCache()) { builder.setCacheQuery(true); } if (ranking.getSorting() != null) { mergeToSearchRequestFromSorting(ranking.getSorting(), builder); } if (ranking.getLocation() != null) { builder.setGeoLocation(ranking.getLocation().toString()); } var featureMap = ranking.getFeatures().asMap(); MapConverter.convertMapStrings(featureMap, builder::addFeatureOverrides); MapConverter.convertMapTensors(featureMap, builder::addTensorFeatureOverrides); mergeRankProperties(ranking, builder::addRankProperties, builder::addTensorRankProperties); } private static void mergeToSearchRequestFromSorting(Sorting sorting, SearchProtocol.SearchRequest.Builder builder) { for (var field : sorting.fieldOrders()) { var sortField = SearchProtocol.SortField.newBuilder() .setField(field.getSorter().getName()) .setAscending(field.getSortOrder() == Order.ASCENDING).build(); builder.addSorting(sortField); } } public static SearchProtocol.DocsumRequest.Builder createDocsumRequestBuilder(Query query, String serverId, String summaryClass, boolean includeQueryData) { var builder = SearchProtocol.DocsumRequest.newBuilder() .setTimeout((int) query.getTimeLeft()) .setDumpFeatures(query.properties().getBoolean(Ranking.RANKFEATURES, false)); if (summaryClass != null) { builder.setSummaryClass(summaryClass); } var documentDb = query.getModel().getDocumentDb(); if (documentDb != null) { builder.setDocumentType(documentDb); } var ranking = query.getRanking(); if (ranking.getQueryCache()) { builder.setCacheQuery(true); builder.setSessionKey(query.getSessionId(serverId).toString()); } builder.setRankProfile(query.getRanking().getProfile()); if (includeQueryData) { mergeQueryDataToDocsumRequest(query, builder); } return builder; } public static byte[] serializeDocsumRequest(SearchProtocol.DocsumRequest.Builder builder, List<FastHit> documents) { builder.clearGlobalIds(); for (var hit : documents) { builder.addGlobalIds(ByteString.copyFrom(hit.getGlobalId().getRawId())); } return builder.build().toByteArray(); } private static void mergeQueryDataToDocsumRequest(Query query, SearchProtocol.DocsumRequest.Builder builder) { var ranking = query.getRanking(); var featureMap = ranking.getFeatures().asMap(); builder.setQueryTreeBlob(serializeQueryTree(query.getModel().getQueryTree())); if (ranking.getLocation() != null) { builder.setGeoLocation(ranking.getLocation().toString()); } MapConverter.convertMapStrings(featureMap, builder::addFeatureOverrides); MapConverter.convertMapTensors(featureMap, builder::addTensorFeatureOverrides); MapConverter.convertStringMultiMap(query.getPresentation().getHighlight().getHighlightTerms(), builder::addHighlightTerms); mergeRankProperties(ranking, builder::addRankProperties, builder::addTensorRankProperties); } public static byte[] serializeResult(Result searchResult) { return convertFromResult(searchResult).toByteArray(); } public static Result deserializeToSearchResult(byte[] payload, Query query, VespaBackEndSearcher searcher, int partId, int distKey) throws InvalidProtocolBufferException { var protobuf = SearchProtocol.SearchReply.parseFrom(payload); var result = convertToResult(query, protobuf, searcher.getDocumentDatabase(query), partId, distKey, searcher.getName()); return result; } private static Result convertToResult(Query query, SearchProtocol.SearchReply protobuf, DocumentDatabase documentDatabase, int partId, int distKey, String source) { var result = new Result(query); result.setTotalHitCount(protobuf.getTotalHitCount()); result.setCoverage(convertToCoverage(protobuf)); int hitItems = protobuf.getHitsCount(); var haveGrouping = protobuf.getGroupingBlob() != null && !protobuf.getGroupingBlob().isEmpty(); if(haveGrouping) { hitItems++; } result.hits().ensureCapacity(hitItems); if (haveGrouping) { BufferSerializer buf = new BufferSerializer(new GrowableByteBuffer(protobuf.getGroupingBlob().asReadOnlyByteBuffer())); int cnt = buf.getInt(null); ArrayList<Grouping> list = new ArrayList<>(cnt); for (int i = 0; i < cnt; i++) { Grouping g = new Grouping(); g.deserialize(buf); list.add(g); } GroupingListHit hit = new GroupingListHit(list, documentDatabase.getDocsumDefinitionSet()); hit.setQuery(query); result.hits().add(hit); } var sorting = query.getRanking().getSorting(); for (var replyHit : protobuf.getHitsList()) { FastHit hit = new FastHit(); hit.setQuery(query); hit.setRelevance(new Relevance(replyHit.getRelevance())); hit.setGlobalId(new GlobalId(replyHit.getGlobalId().toByteArray())); if (!replyHit.getSortData().isEmpty()) { hit.setSortData(replyHit.getSortData().toByteArray(), sorting); } hit.setFillable(); hit.setCached(false); hit.setPartId(partId); hit.setDistributionKey(distKey); hit.setSource(source); result.hits().add(hit); } if(sorting != null) { result.hits().setSorted(true); } return result; } private static Coverage convertToCoverage(SearchProtocol.SearchReply protobuf) { var coverage = new Coverage(protobuf.getCoverageDocs(), protobuf.getActiveDocs(), 1); coverage.setNodesTried(1).setSoonActive(protobuf.getSoonActiveDocs()); int degradedReason = 0; if (protobuf.getDegradedByMatchPhase()) degradedReason |= Coverage.DEGRADED_BY_MATCH_PHASE; if (protobuf.getDegradedBySoftTimeout()) degradedReason |= Coverage.DEGRADED_BY_TIMEOUT; coverage.setDegradedReason(degradedReason); return coverage; } private static SearchProtocol.SearchReply convertFromResult(Result result) { var builder = SearchProtocol.SearchReply.newBuilder(); var coverage = result.getCoverage(false); if (coverage != null) { builder.setCoverageDocs(coverage.getDocs()).setActiveDocs(coverage.getActive()).setSoonActiveDocs(coverage.getSoonActive()) .setDegradedBySoftTimeout(coverage.isDegradedByTimeout()).setDegradedByMatchPhase(coverage.isDegradedByMatchPhase()); } result.hits().iterator().forEachRemaining(hit -> { var hitBuilder = SearchProtocol.Hit.newBuilder(); if (hit.getRelevance() != null) { hitBuilder.setRelevance(hit.getRelevance().getScore()); } if (hit instanceof FastHit) { FastHit fhit = (FastHit) hit; hitBuilder.setGlobalId(ByteString.copyFrom(fhit.getGlobalId().getRawId())); } builder.addHits(hitBuilder); }); return builder.build(); } private static ByteString serializeQueryTree(QueryTree queryTree) { int bufferSize = INITIAL_SERIALIZATION_BUFFER_SIZE; while (true) { try { ByteBuffer treeBuffer = ByteBuffer.allocate(bufferSize); queryTree.encode(treeBuffer); treeBuffer.flip(); return ByteString.copyFrom(treeBuffer); } catch (java.nio.BufferOverflowException e) { bufferSize *= 2; } } } private static void mergeRankProperties(Ranking ranking, Consumer<StringProperty.Builder> stringProperties, Consumer<TensorProperty.Builder> tensorProperties) { MapConverter.convertMultiMap(ranking.getProperties().asMap(), propB -> { if (!GetDocSumsPacket.sessionIdKey.equals(propB.getName())) { stringProperties.accept(propB); } }, tensorProperties); } }
package org.jboss.as.controller.access.rbac; import static org.jboss.as.controller.PathAddress.pathAddress; import static org.jboss.as.controller.PathElement.pathElement; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.NAME; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_ATTRIBUTE_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.jboss.as.controller.AbstractAddStepHandler; import org.jboss.as.controller.AbstractRemoveStepHandler; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.OperationStepHandler; import org.jboss.as.controller.ProcessType; import org.jboss.as.controller.SimpleAttributeDefinitionBuilder; import org.jboss.as.controller.SimpleResourceDefinition; import org.jboss.as.controller.access.constraint.ApplicationTypeConfig; import org.jboss.as.controller.access.constraint.SensitivityClassification; import org.jboss.as.controller.access.management.AccessConstraintDefinition; import org.jboss.as.controller.access.management.ApplicationTypeAccessConstraintDefinition; import org.jboss.as.controller.access.management.SensitiveTargetAccessConstraintDefinition; import org.jboss.as.controller.descriptions.NonResolvingResourceDescriptionResolver; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.controller.operations.global.GlobalOperationHandlers; import org.jboss.as.controller.registry.ManagementResourceRegistration; import org.jboss.as.controller.registry.Resource; import org.jboss.dmr.ModelNode; import org.jboss.dmr.ModelType; import org.junit.Before; import org.junit.Test; /** * @author Ladislav Thon <[email protected]> */ public class ReadAttributeTestCase extends AbstractRbacTestBase { // ..._RESOURCE_1 -> default read attribute handler // ..._RESOURCE_2 -> own implementation of read attribute handler public static final String UNCONSTRAINED_RESOURCE_1 = "unconstrained-resource-1"; public static final String SENSITIVE_CONSTRAINED_RESOURCE_1 = "sensitive-constrained-resource-1"; public static final String APPLICATION_CONSTRAINED_RESOURCE_1 = "application-constrained-resource-1"; public static final String UNCONSTRAINED_RESOURCE_2 = "unconstrained-resource-2"; public static final String SENSITIVE_CONSTRAINED_RESOURCE_2 = "sensitive-constrained-resource-2"; public static final String APPLICATION_CONSTRAINED_RESOURCE_2 = "application-constrained-resource-2"; public static final String UNCONSTRAINED_READONLY_ATTRIBUTE = "unconstrained-readonly-attribute"; public static final String VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE = "value of unconstrained-readonly-attribute"; public static final String SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE = "sensitive-constrained-readonly-attribute"; public static final String VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE = "value of sensitive-constrained-readonly-attribute"; public static final String APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE = "application-constrained-readonly-attribute"; public static final String VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE = "value of application-constrained-readonly-attribute"; public static final String FOO = "foo"; @Before public void setUp() { ModelNode operation = Util.createOperation(ADD, pathAddress(UNCONSTRAINED_RESOURCE_1, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); operation = Util.createOperation(ADD, pathAddress(APPLICATION_CONSTRAINED_RESOURCE_1, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); operation = Util.createOperation(ADD, pathAddress(SENSITIVE_CONSTRAINED_RESOURCE_1, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); operation = Util.createOperation(ADD, pathAddress(UNCONSTRAINED_RESOURCE_2, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); operation = Util.createOperation(ADD, pathAddress(APPLICATION_CONSTRAINED_RESOURCE_2, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); operation = Util.createOperation(ADD, pathAddress(SENSITIVE_CONSTRAINED_RESOURCE_2, FOO)); executeWithRoles(operation, StandardRole.SUPERUSER); } @Test public void testMonitor() { test(false, StandardRole.MONITOR); } @Test public void testOperator() { test(false, StandardRole.OPERATOR); } @Test public void testMaintainer() { test(false, StandardRole.MAINTAINER); } @Test public void testDeployer() { // would be hard to reuse test(..., StandardRole.DEPLOYER) StandardRole role = StandardRole.DEPLOYER; ResultExpectation readExpectation = ResultExpectation.PERMITTED; testOperation(readExpectation, UNCONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, UNCONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, UNCONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, UNCONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); readExpectation = ResultExpectation.DENIED; testOperation(readExpectation, UNCONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, UNCONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); readExpectation = ResultExpectation.NO_ACCESS; testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, SENSITIVE_CONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); readExpectation = ResultExpectation.PERMITTED; testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, role); readExpectation = ResultExpectation.DENIED; testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, role); readExpectation = ResultExpectation.PERMITTED; testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); testOperation(readExpectation, APPLICATION_CONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, role); } @Test public void testAdministrator() { test(true, StandardRole.ADMINISTRATOR); } @Test public void testAuditor() { test(true, StandardRole.AUDITOR); } @Test public void testSuperuser() { test(true, StandardRole.SUPERUSER); } @Test public void testMonitorOperator() { test(false, StandardRole.MONITOR, StandardRole.OPERATOR); } @Test public void testMonitorAdministrator() { test(true, StandardRole.MONITOR, StandardRole.ADMINISTRATOR); } @Test public void testAdministratorAuditor() { test(true, StandardRole.ADMINISTRATOR, StandardRole.AUDITOR); } private void testOperation(ResultExpectation resultExpectation, String resourceName, String attributeName, String attributeValue, StandardRole... roles) { ModelNode operation = Util.createOperation(READ_ATTRIBUTE_OPERATION, pathAddress(resourceName, FOO)); operation.get(NAME).set(attributeName); ModelNode result = executeWithRoles(operation, roles); assertOperationResult(result, resultExpectation); if (resultExpectation == ResultExpectation.PERMITTED) { assertEquals(attributeValue, result.get(RESULT).asString()); } else { assertFalse(result.hasDefined(RESULT)); } } private void test(boolean canAccessSensitive, StandardRole... roles) { testOperation(ResultExpectation.PERMITTED, UNCONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, UNCONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.DENIED, UNCONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.DENIED, UNCONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, UNCONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, UNCONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.NO_ACCESS, SENSITIVE_CONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, APPLICATION_CONSTRAINED_RESOURCE_1, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, APPLICATION_CONSTRAINED_RESOURCE_2, UNCONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.DENIED, APPLICATION_CONSTRAINED_RESOURCE_1, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(canAccessSensitive ? ResultExpectation.PERMITTED : ResultExpectation.DENIED, APPLICATION_CONSTRAINED_RESOURCE_2, SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, APPLICATION_CONSTRAINED_RESOURCE_1, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); testOperation(ResultExpectation.PERMITTED, APPLICATION_CONSTRAINED_RESOURCE_2, APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, roles); } // model definition private static final SensitivityClassification MY_SENSITIVITY = new SensitivityClassification("test", "my-sensitivity", true, true, true); private static final AccessConstraintDefinition MY_SENSITIVE_CONSTRAINT = new SensitiveTargetAccessConstraintDefinition(MY_SENSITIVITY); private static final ApplicationTypeConfig MY_APPLICATION_TYPE = new ApplicationTypeConfig("test", "my-application-type", true); private static final AccessConstraintDefinition MY_APPLICATION_CONSTRAINT = new ApplicationTypeAccessConstraintDefinition(MY_APPLICATION_TYPE); @Override protected void initModel(Resource rootResource, ManagementResourceRegistration registration) { GlobalOperationHandlers.registerGlobalOperations(registration, ProcessType.EMBEDDED_SERVER); registration.registerSubModel(new TestResourceDefinition(UNCONSTRAINED_RESOURCE_1, true)); registration.registerSubModel(new TestResourceDefinition(SENSITIVE_CONSTRAINED_RESOURCE_1, true, MY_SENSITIVE_CONSTRAINT)); registration.registerSubModel(new TestResourceDefinition(APPLICATION_CONSTRAINED_RESOURCE_1, true, MY_APPLICATION_CONSTRAINT)); registration.registerSubModel(new TestResourceDefinition(UNCONSTRAINED_RESOURCE_2, false)); registration.registerSubModel(new TestResourceDefinition(SENSITIVE_CONSTRAINED_RESOURCE_2, false, MY_SENSITIVE_CONSTRAINT)); registration.registerSubModel(new TestResourceDefinition(APPLICATION_CONSTRAINED_RESOURCE_2, false, MY_APPLICATION_CONSTRAINT)); } private static final class TestResourceDefinition extends SimpleResourceDefinition { private final List<AccessConstraintDefinition> constraintDefinitions; private final boolean useDefaultReadAttributeHandler; TestResourceDefinition(String path, boolean useDefaultReadAttributeHandler, AccessConstraintDefinition... constraintDefinitions) { super(pathElement(path), new NonResolvingResourceDescriptionResolver(), new AbstractAddStepHandler() {}, new AbstractRemoveStepHandler() {} ); this.useDefaultReadAttributeHandler = useDefaultReadAttributeHandler; this.constraintDefinitions = Collections.unmodifiableList(Arrays.asList(constraintDefinitions)); } @Override public void registerAttributes(ManagementResourceRegistration resourceRegistration) { super.registerAttributes(resourceRegistration); OperationStepHandler readAttributeHandler = useDefaultReadAttributeHandler ? null : new TestReadAttributeHandler(new ModelNode(VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE)); ModelNode defaultValue = useDefaultReadAttributeHandler ? new ModelNode(VALUE_OF_UNCONSTRAINED_READONLY_ATTRIBUTE) : null; AttributeDefinition attributeDefinition = SimpleAttributeDefinitionBuilder .create(UNCONSTRAINED_READONLY_ATTRIBUTE, ModelType.STRING) .setDefaultValue(defaultValue) .build(); resourceRegistration.registerReadOnlyAttribute(attributeDefinition, readAttributeHandler); readAttributeHandler = useDefaultReadAttributeHandler ? null : new TestReadAttributeHandler(new ModelNode(VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE)); defaultValue = useDefaultReadAttributeHandler ? new ModelNode(VALUE_OF_SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE) : null; attributeDefinition = SimpleAttributeDefinitionBuilder .create(SENSITIVE_CONSTRAINED_READONLY_ATTRIBUTE, ModelType.STRING) .setDefaultValue(defaultValue) .setAccessConstraints(MY_SENSITIVE_CONSTRAINT) .build(); resourceRegistration.registerReadOnlyAttribute(attributeDefinition, readAttributeHandler); readAttributeHandler = useDefaultReadAttributeHandler ? null : new TestReadAttributeHandler(new ModelNode(VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE)); defaultValue = useDefaultReadAttributeHandler ? new ModelNode(VALUE_OF_APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE) : null; attributeDefinition = SimpleAttributeDefinitionBuilder .create(APPLICATION_CONSTRAINED_READONLY_ATTRIBUTE, ModelType.STRING) .setDefaultValue(defaultValue) .setAccessConstraints(MY_APPLICATION_CONSTRAINT) .build(); resourceRegistration.registerReadOnlyAttribute(attributeDefinition, readAttributeHandler); } @Override public List<AccessConstraintDefinition> getAccessConstraints() { return constraintDefinitions; } } private static final class TestReadAttributeHandler implements OperationStepHandler { private final ModelNode value; private TestReadAttributeHandler(ModelNode value) { this.value = value; } @Override public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { context.getResult().set(value); context.stepCompleted(); } } }
package org.jivesoftware.sparkplugin; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smackx.jingle.JingleSession; import org.jivesoftware.spark.ChatManager; import org.jivesoftware.spark.SparkManager; import org.jivesoftware.spark.component.RolloverButton; import org.jivesoftware.spark.component.tabbedPane.SparkTab; import org.jivesoftware.spark.ui.ChatRoom; import javax.swing.BorderFactory; import javax.swing.JLabel; import javax.swing.JPanel; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.GradientPaint; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.text.SimpleDateFormat; import java.util.Date; /** * The UI for calls with Roster members. * * @author Derek DeMoro */ public class JingleRoom extends JPanel { private JLabel connectedLabel; private String phoneNumber; private JLabel phoneLabel; private PreviousConversationPanel historyPanel; private boolean onHold; private boolean muted; private CallPanelButton muteButton; private RolloverButton hangUpButton; private final SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy h:mm a"); private static String CONNECTED = "Connected"; protected final Color greenColor = new Color(91, 175, 41); protected final Color orangeColor = new Color(229, 139, 11); protected final Color blueColor = new Color(64, 103, 162); protected final Color redColor = new Color(211, 0, 0); private boolean callWasTransferred; private ChatRoom chatRoom; private JingleSession session; private JavaMixer mixer = new JavaMixer(); public JingleRoom(JingleSession session, ChatRoom chatRoom) { this.session = session; this.chatRoom = chatRoom; setLayout(new GridBagLayout()); setBorder(BorderFactory.createLineBorder(Color.lightGray)); // Build Top Layer final JPanel topPanel = buildTopPanel(); add(topPanel, new GridBagConstraints(1, 5, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0)); // Build Control Panel final JPanel controlPanel = buildControlPanel(); add(controlPanel, new GridBagConstraints(1, 6, 1, 1, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 0)); // Add Previous Conversation historyPanel = new PreviousConversationPanel(); historyPanel.addPreviousConversations(""); // add(historyPanel, new GridBagConstraints(1, 8, 1, 1, 0.0, 1.0, GridBagConstraints.SOUTH, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 100)); // Setup default settings setupDefaults(); } /** * Builds the information block. * * @return the UI representing the Information Block. */ private JPanel buildTopPanel() { final JPanel panel = new JPanel(new GridBagLayout()); panel.setOpaque(false); // Add phone label phoneLabel = new JLabel(); phoneLabel.setFont(new Font("Arial", Font.BOLD, 13)); phoneLabel.setForeground(new Color(64, 103, 162)); panel.add(phoneLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 2, 2, 2), 0, 0)); // Add Connected Label connectedLabel = new JLabel(CONNECTED); connectedLabel.setFont(new Font("Arial", Font.BOLD, 13)); connectedLabel.setHorizontalTextPosition(JLabel.CENTER); connectedLabel.setHorizontalAlignment(JLabel.CENTER); panel.add(connectedLabel, new GridBagConstraints(0, 1, 2, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0)); return panel; } /** * Builds the Control Panel. * * @return the control panel. */ private JPanel buildControlPanel() { // Add Control Panel final JPanel mainPanel = new JPanel(new GridBagLayout()); mainPanel.setOpaque(false); // Add Input Volume To Control Panel final ControlPanel inputPanel = new ControlPanel(new GridBagLayout()); final JLabel inputIcon = new JLabel(JinglePhoneRes.getImageIcon("MICROPHONE_IMAGE")); inputPanel.add(mixer.getPrefferedInputVolume(), new GridBagConstraints(0, 0, 1, 1, 0.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.VERTICAL, new Insets(2, 2, 2, 2), 0, 0)); inputPanel.add(inputIcon, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0)); // Add Output Volume To Control Panel final ControlPanel outputPanel = new ControlPanel(new GridBagLayout()); final JLabel outputIcon = new JLabel(JinglePhoneRes.getImageIcon("SPEAKER_IMAGE")); outputPanel.add(mixer.getPrefferedMasterVolume(), new GridBagConstraints(0, 0, 1, 1, 0.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.VERTICAL, new Insets(2, 2, 2, 2), 0, 0)); outputPanel.add(outputIcon, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0)); // Build ControlPanel List muteButton = new CallPanelButton(JinglePhoneRes.getImageIcon("MUTE_IMAGE").getImage(), "Mute"); muteButton.setToolTipText("Mute this call."); // Add Components to Main Panel mainPanel.add(inputPanel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.2, GridBagConstraints.NORTHWEST, GridBagConstraints.VERTICAL, new Insets(2, 1, 2, 1), 0, 50)); mainPanel.add(outputPanel, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.2, GridBagConstraints.NORTHWEST, GridBagConstraints.VERTICAL, new Insets(2, 1, 2, 1), 0, 50)); mainPanel.add(muteButton, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(2, 1, 2, 1), 0, 0)); // Add End Call button hangUpButton = new RolloverButton(" End Call", JinglePhoneRes.getImageIcon("HANG_UP_PHONE_77x24_IMAGE")); hangUpButton.setHorizontalTextPosition(JLabel.CENTER); hangUpButton.setFont(new Font("Dialog", Font.BOLD, 11)); hangUpButton.setForeground(new Color(153, 32, 10)); hangUpButton.setMargin(new Insets(0, 0, 0, 0)); mainPanel.add(hangUpButton, new GridBagConstraints(0, 1, 3, 1, 0.0, 0.8, GridBagConstraints.NORTH, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0)); return mainPanel; } public void setupDefaults() { muteButton.addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent mouseEvent) { toggleMute(); } }); hangUpButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { hangUpButton.setEnabled(false); try { session.terminate(); } catch (XMPPException e) { e.printStackTrace(); } } }); } /** * Called when a new call is established. */ private void callStarted() { // Show History historyPanel.removeAll(); historyPanel.addPreviousConversations(phoneNumber); hangUpButton.setEnabled(true); muteButton.setEnabled(true); setStatus(CONNECTED, false); // Add notification to ChatRoom if one exists. if (chatRoom != null) { final SimpleDateFormat formatter = new SimpleDateFormat("h:mm a"); String time = formatter.format(new Date()); chatRoom.getTranscriptWindow().insertNotificationMessage("Call started at " + time, ChatManager.NOTIFICATION_COLOR); } } /** * Called when the call is ended. This does basic container cleanup. */ public void callEnded() { if (!callWasTransferred) { historyPanel.callEnded(); setStatus("Call Ended", redColor); } hangUpButton.setEnabled(false); hangUpButton.setOpaque(false); muteButton.setEnabled(false); muteButton.setOpaque(false); setStatus("Call Ended", redColor); // Add notification to ChatRoom if one exists. if (chatRoom != null) { final SimpleDateFormat formatter = new SimpleDateFormat("h:mm a"); String time = formatter.format(new Date()); chatRoom.getTranscriptWindow().insertNotificationMessage("Call ended at " + time, ChatManager.NOTIFICATION_COLOR); } } private void setStatus(String status, boolean alert) { if (alert) { connectedLabel.setForeground(orangeColor); } else { connectedLabel.setForeground(greenColor); } connectedLabel.setText(status); } private void setStatus(String status, Color color) { connectedLabel.setForeground(color); connectedLabel.setText(status); } private void toggleMute() { if (muted) { muted = false; muteButton.setToolTipText("Mute"); muteButton.setButtonSelected(false); setStatus(CONNECTED, false); // Change state JingleStateManager.getInstance().addJingleSession(chatRoom, JingleStateManager.JingleRoomState.inJingleCall); } else { muted = true; muteButton.setToolTipText("Unmute"); muteButton.setButtonSelected(true); setStatus("Muted", true); // Change state JingleStateManager.getInstance().addJingleSession(chatRoom, JingleStateManager.JingleRoomState.muted); } muteButton.invalidate(); muteButton.validate(); muteButton.repaint(); // Notify state change SparkManager.getChatManager().notifySparkTabHandlers(chatRoom); } public void actionPerformed(ActionEvent e) { } public void paintComponent(Graphics g) { BufferedImage cache = new BufferedImage(2, getHeight(), BufferedImage.TYPE_INT_RGB); Graphics2D g2d = cache.createGraphics(); GradientPaint paint = new GradientPaint(0, 0, new Color(241, 245, 250), 0, getHeight(), new Color(244, 250, 255), true); g2d.setPaint(paint); g2d.fillRect(0, 0, getWidth(), getHeight()); g2d.dispose(); g.drawImage(cache, 0, 0, getWidth(), getHeight(), null); } public Dimension getPreferredSize() { Dimension dim = super.getPreferredSize(); dim.width = 0; return dim; } public SparkTab getSparkTab() { int index = SparkManager.getChatManager().getChatContainer().indexOfComponent(chatRoom); return SparkManager.getChatManager().getChatContainer().getTabAt(index); } }
package io.quarkus.deployment.dev.testing; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.BLUE; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.GREEN; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.RED; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.RESET; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.helpOption; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.statusFooter; import static io.quarkus.deployment.dev.testing.TestConsoleHandler.MessageFormat.statusHeader; import java.io.IOException; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Date; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import org.jboss.logging.Logger; import org.junit.platform.engine.TestExecutionResult; import org.junit.platform.launcher.TestIdentifier; import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.deployment.dev.ClassScanResult; import io.quarkus.deployment.dev.RuntimeUpdatesProcessor; import io.quarkus.deployment.dev.console.AeshConsole; import io.quarkus.dev.console.InputHandler; import io.quarkus.dev.console.QuarkusConsole; import io.quarkus.dev.spi.DevModeType; public class TestConsoleHandler implements TestListener { private static final Logger log = Logger.getLogger("io.quarkus.test"); public static final String PAUSED_PROMPT = "Tests paused, press [" + BLUE + "r" + RESET + "] to resume, [" + BLUE + "h" + RESET + "] for more options>" + RESET; public static final String PAUSED_PROMPT_NO_HTTP = "Tests paused, press [" + BLUE + "r" + RESET + "] to resume, [" + BLUE + "s" + RESET + "] to restart with changes, [" + BLUE + "h" + RESET + "] for more options>" + RESET; public static final String FIRST_RUN_PROMPT = BLUE + "Running tests for the first time" + RESET; public static final String RUNNING_PROMPT = "Press [" + BLUE + "r" + RESET + "] to re-run, [" + BLUE + "v" + RESET + "] to view full results, [" + BLUE + "p" + RESET + "] to pause, [" + BLUE + "h" + RESET + "] for more options>"; public static final String RUNNING_PROMPT_NO_HTTP = "Press [" + BLUE + "r" + RESET + "] to re-run, [" + BLUE + "v" + RESET + "] to view full results, [" + BLUE + "p" + RESET + "] to pause, [" + BLUE + "s" + RESET + "] to restart with changes, [" + BLUE + "h" + RESET + "] for more options>"; final DevModeType devModeType; boolean firstRun = true; boolean disabled = true; boolean currentlyFailing = false; volatile InputHandler.ConsoleStatus promptHandler; volatile TestController testController; private String lastResults; /** * If HTTP is not present we add the 'press s to reload' option to the prompt * to make it clear to users they can restart their apps. */ private final boolean hasHttp; public TestConsoleHandler(DevModeType devModeType, boolean hasHttp) { this.devModeType = devModeType; this.hasHttp = hasHttp; } public void install() { QuarkusConsole.INSTANCE.pushInputHandler(inputHandler); QuarkusClassLoader classLoader = (QuarkusClassLoader) getClass().getClassLoader(); classLoader.addCloseTask(new Runnable() { @Override public void run() { QuarkusConsole.INSTANCE.popInputHandler(); } }); } private final InputHandler inputHandler = new InputHandler() { @Override public void handleInput(int[] keys) { //common commands, work every time for (int k : keys) { if (k == 'h') { printUsage(); } else if (k == 'i' && devModeType != DevModeType.TEST_ONLY) { testController.toggleInstrumentation(); } else if (k == 'l' && devModeType != DevModeType.TEST_ONLY) { RuntimeUpdatesProcessor.INSTANCE.toggleLiveReloadEnabled(); } else if (k == 's' && devModeType != DevModeType.TEST_ONLY) { try { RuntimeUpdatesProcessor.INSTANCE.doScan(true, true); } catch (IOException e) { log.error("Live reload scan failed", e); } } else if (k == 'q') { //we don't call Quarkus.exit() here as that would just result //in a 'press any key to restart' prompt new Thread(new Runnable() { @Override public void run() { System.exit(0); } }, "Quarkus exit thread").run(); } else { if (disabled) { if (k == 'r') { promptHandler.setStatus(BLUE + "Starting tests" + RESET); TestSupport.instance().get().start(); } } else if (!firstRun) { //TODO: some of this is a bit yuck, this needs some work if (k == 'r') { testController.runAllTests(); } else if (k == 'f') { testController.runFailedTests(); } else if (k == 'v') { testController.printFullResults(); } else if (k == 'o' && devModeType != DevModeType.TEST_ONLY) { testController.toggleTestOutput(); } else if (k == 'p') { TestSupport.instance().get().stop(); } else if (k == 'b') { testController.toggleBrokenOnlyMode(); } } } } } @Override public void promptHandler(InputHandler.ConsoleStatus promptHandler) { TestConsoleHandler.this.promptHandler = promptHandler; } }; @Override public void listenerRegistered(TestController testController) { this.testController = testController; promptHandler.setPrompt(hasHttp ? PAUSED_PROMPT : PAUSED_PROMPT_NO_HTTP); } public void printUsage() { System.out.println(RESET + "\nThe following commands are available:"); if (disabled) { System.out.println(helpOption("r", "Resume testing")); } else { System.out.println(helpOption("r", "Re-run all tests")); System.out.println(helpOption("f", "Re-run failed tests")); System.out.println(helpOption("b", "Toggle 'broken only' mode, where only failing tests are run", testController.isBrokenOnlyMode())); System.out.println(helpOption("v", "Print failures from the last test run")); if (devModeType != DevModeType.TEST_ONLY) { System.out.println(helpOption("o", "Toggle test output", testController.isDisplayTestOutput())); } System.out.println(helpOption("p", "Pause tests")); } if (devModeType != DevModeType.TEST_ONLY) { System.out .println(helpOption("i", "Toggle instrumentation based reload", testController.isInstrumentationEnabled())); System.out.println(helpOption("l", "Toggle live reload", testController.isLiveReloadEnabled())); System.out.println(helpOption("s", "Force restart with any changes")); } System.out.println(helpOption("h", "Display this help")); System.out.println(helpOption("q", "Quit")); } @Override public void testsEnabled() { disabled = false; if (firstRun) { promptHandler.setStatus(null); promptHandler.setResults(null); promptHandler.setPrompt(FIRST_RUN_PROMPT); } else { promptHandler.setPrompt(hasHttp ? RUNNING_PROMPT : RUNNING_PROMPT_NO_HTTP); promptHandler.setResults(lastResults); promptHandler.setStatus(null); } } @Override public void testsDisabled() { disabled = true; promptHandler.setPrompt(hasHttp ? PAUSED_PROMPT : PAUSED_PROMPT_NO_HTTP); promptHandler.setStatus(null); promptHandler.setResults(null); } @Override public void testCompileFailed(String message) { promptHandler.setCompileError(message); } @Override public void testCompileSucceeded() { promptHandler.setCompileError(null); } @Override public void testRunStarted(Consumer<TestRunListener> listenerConsumer) { AtomicLong totalNoTests = new AtomicLong(); AtomicLong skipped = new AtomicLong(); AtomicLong methodCount = new AtomicLong(); AtomicLong failureCount = new AtomicLong(); listenerConsumer.accept(new TestRunListener() { @Override public void runStarted(long toRun) { totalNoTests.set(toRun); promptHandler.setStatus("Running 0/" + toRun + "."); } @Override public void testComplete(TestResult result) { if (result.getTestExecutionResult().getStatus() == TestExecutionResult.Status.FAILED) { failureCount.incrementAndGet(); } else if (result.getTestExecutionResult().getStatus() == TestExecutionResult.Status.ABORTED) { skipped.incrementAndGet(); } methodCount.incrementAndGet(); } @Override public void runComplete(TestRunResults results) { firstRun = false; SimpleDateFormat df = new SimpleDateFormat("kk:mm:ss"); String end = " Tests completed at " + df.format(new Date()); if (results.getTrigger() != null) { ClassScanResult trigger = results.getTrigger(); Set<Path> paths = new LinkedHashSet<>(); paths.addAll(trigger.getChangedClasses()); paths.addAll(trigger.getAddedClasses()); paths.addAll(trigger.getDeletedClasses()); if (paths.size() == 1) { end = end + " due to changes to " + paths.iterator().next().getFileName() + "."; } else if (paths.size() > 1) { end = end + " due to changes to " + paths.iterator().next().getFileName() + " and " + (paths.size() - 1) + " other files."; } else { //should never happen end = end + "."; } } else { end = end + "."; } if (results.getTotalCount() == 0) { lastResults = BLUE + "No tests found" + RESET; } else if (results.getFailedCount() == 0 && results.getPassedCount() == 0) { lastResults = String.format(BLUE + "All %d tests were skipped" + RESET, results.getSkippedCount()); } else if (results.getCurrentFailing().isEmpty()) { if (currentlyFailing) { log.info(GREEN + "All tests are now passing" + RESET); } currentlyFailing = false; lastResults = String.format( GREEN + "All %d " + pluralize("test is", "tests are", results.getPassedCount()) + " passing " + "(%d skipped), " + "%d " + pluralize("test was", "tests were", results.getCurrentTotalCount() - results.getCurrentSkippedCount()) + " run in %dms." + end + RESET, results.getPassedCount(), results.getSkippedCount(), results.getCurrentTotalCount() - results.getCurrentSkippedCount(), results.getTotalTime()); } else { currentlyFailing = true; //TODO: this should not use the logger, it should print a nicer status log.error(statusHeader("TEST REPORT #" + results.getId())); for (Map.Entry<String, TestClassResult> classEntry : results.getCurrentFailing().entrySet()) { for (TestResult test : classEntry.getValue().getFailing()) { log.error( RED + "Test " + test.getDisplayName() + " failed \n" + RESET, test.getTestExecutionResult().getThrowable().get()); } } log.error( statusFooter(RED + results.getCurrentFailedCount() + " TESTS FAILED")); lastResults = String.format( RED + "%d " + pluralize("test", "tests", results.getCurrentFailedCount()) + " failed" + RESET + " (" + GREEN + "%d passing" + RESET + ", " + BLUE + "%d skipped" + RESET + "), " + RED + "%d " + pluralize("test was", "tests were", results.getCurrentTotalCount() - results.getCurrentSkippedCount()) + " run in %dms." + end + RESET, results.getCurrentFailedCount(), results.getPassedCount(), results.getSkippedCount(), results.getCurrentTotalCount() - results.getCurrentSkippedCount(), results.getTotalTime()); } //this will re-print when using the basic console if (!disabled) { promptHandler.setPrompt(hasHttp ? RUNNING_PROMPT : RUNNING_PROMPT_NO_HTTP); promptHandler.setResults(lastResults); promptHandler.setStatus(null); } } @Override public void noTests(TestRunResults results) { runComplete(results); } @Override public void runAborted() { firstRun = false; } @Override public void testStarted(TestIdentifier testIdentifier, String className) { String status = "Running " + methodCount.get() + "/" + totalNoTests + (failureCount.get() == 0 ? "." : ". " + failureCount + " " + pluralize("failure", "failures", failureCount) + " so far.") + " Running: " + className + "#" + testIdentifier.getDisplayName(); if (TestSupport.instance().get().isDisplayTestOutput() && QuarkusConsole.INSTANCE instanceof AeshConsole) { log.info(status); } promptHandler.setStatus(status); } }); } static class MessageFormat { public static final String RED = "\u001B[91m"; public static final String GREEN = "\u001b[32m"; public static final String BLUE = "\u001b[34m"; public static final String RESET = "\u001b[0m"; private MessageFormat() { } public static String statusHeader(String header) { return RESET + "==================== " + header + RESET + " ===================="; } public static String statusFooter(String footer) { return RESET + ">>>>>>>>>>>>>>>>>>>> " + footer + RESET + " <<<<<<<<<<<<<<<<<<<<"; } public static String toggleStatus(boolean enabled) { return " (" + (enabled ? GREEN + "enabled" + RESET + "" : RED + "disabled") + RESET + ")"; } public static String helpOption(String key, String description) { return "[" + BLUE + key + RESET + "] - " + description; } public static String helpOption(String key, String description, boolean enabled) { return helpOption(key, description) + toggleStatus(enabled); } } private static String pluralize(String singular, String plural, long number) { if (number == 1L) { return singular; } return plural; } private static String pluralize(String singular, String plural, AtomicLong number) { return pluralize(singular, plural, number.get()); } }
package org.csstudio.platform.logging; import java.util.Enumeration; import java.util.Properties; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.spi.LoggerRepository; import org.csstudio.platform.CSSPlatformPlugin; import org.csstudio.platform.security.SecureStorage; /** * The central logging service of the CSS platform, based on Log4j. * The service is implemented as singleton. A reference can be obtained by * <pre> * CentralLogger.getInstance() * </pre> * * Logging is straight forward: * <pre> * final Logger log = CentralLogger.getInstance().getLogger(this); * log.debug("test log message"); * log.info("test log message"); * </pre> * Eclipse plugin log messages will also be forwarded to this Log4j log, * so existing code using the plugin log will continue to function. * <p> * <u>General Idea for using the log levels:</u> * <ul> * <li><b>Fatal:</b> Error that allows nothing more than exiting the application. * No way to bring up a dialog box or other means to tell the user * what is happening. * <li><b>Error:</b> Ran into error like "Cannot open file" * which certainly impacts the user, but it can be handled * by for example displaying the error in a dialog box * without stopping all of CSS. * <li><b>Warn:</b> For example an exception in <code>close()</code>. * A system expert should look at this, but the user * won't really notice anything. * <li><b>Info:</b> Application "start" messages from CSS, Interconnection Server, * maybe "User Fred authenticated". * <li><b>Debug:</b> Plugin start/stop, PV connected, PV received sample, * SDS display "abc" opened, ... * </ul> * It would be nice to have multiple debug levels, but there's * only one. Log4j itself allows fine-grained configuration based on * the message creator (name of class behind the <code>this</code> * in the above <code>....getLogger(this)</code>), but the CSS * logging configuration via Eclipse preferences currently only * allows a global log level selection. * <p> * Alternatively, the following style is still supported, but with the drawback * that the source file info of the log calls will not reflect where your code * actually invoked the logger. Instead, they will show where 'debug', 'info' * etc. are defined inside the CentralLogger. * <pre> * CentralLogger.getInstance().debug(this, "test log message"); * CentralLogger.getInstance().info(this, "test log message"); * </pre> * @author Alexander Will, Sven Wende, Kay Kasemir */ public final class CentralLogger { /** * Holds the only one instance of this class. */ private static CentralLogger _instance = null; /** * Log4j property for the usage of the css core console logger. */ public static final String PROP_LOG4J_CONSOLE = "css_console"; //$NON-NLS-1$ /** * Log4j property for the usage of the css core file logger. */ public static final String PROP_LOG4J_FILE = "css_file"; //$NON-NLS-1$ /** * Log4j property for the usage of the css core JMS logger. */ public static final String PROP_LOG4J_JMS = "css_jms"; //$NON-NLS-1$ /** * Log4j property for the css console appender. */ public static final String PROP_LOG4J_CONSOLE_APPENDER = "log4j.appender.css_console"; //$NON-NLS-1$ /** * Log4j property for the css console appender layout. */ public static final String PROP_LOG4J_CONSOLE_LAYOUT = "log4j.appender.css_console.layout"; //$NON-NLS-1$ /** * Log4j property for the css console appender pattern. */ public static final String PROP_LOG4J_CONSOLE_PATTERN = "log4j.appender.css_console.layout.ConversionPattern"; //$NON-NLS-1$ /** * Log4j property for the css console appender threshold. */ public static final String PROP_LOG4J_CONSOLE_THRESHOLD = "log4j.appender.css_console.Threshold"; //$NON-NLS-1$ /** * Log4j property that indicates that the console appender should be aware * of changing standard system output channels. */ public static final String PROP_LOG4J_CONSOLE_FOLLOW = "log4j.appender.css_console.Follow"; //$NON-NLS-1$ /** * Log4j property for the css file appender. */ public static final String PROP_LOG4J_FILE_APPENDER = "log4j.appender.css_file"; //$NON-NLS-1$ /** * Log4j property for the css file appender layout. */ public static final String PROP_LOG4J_FILE_LAYOUT = "log4j.appender.css_file.layout"; //$NON-NLS-1$ /** * Log4j property for the css file appender pattern. */ public static final String PROP_LOG4J_FILE_PATTERN = "log4j.appender.css_file.layout.ConversionPattern"; //$NON-NLS-1$ /** * Log4j property for the css file appender log file destination. */ public static final String PROP_LOG4J_FILE_DESTINATION = "log4j.appender.css_file.File"; //$NON-NLS-1$ /** * Log4j property for the css file appender threshold. */ public static final String PROP_LOG4J_FILE_THRESHOLD = "log4j.appender.css_file.Threshold"; //$NON-NLS-1$ /** * Log4j property for the css file appender append property. */ public static final String PROP_LOG4J_FILE_APPEND = "log4j.appender.css_file.Append"; //$NON-NLS-1$ /** * Log4j property for the css file appender maximum size property. */ public static final String PROP_LOG4J_FILE_MAX_SIZE = "log4j.appender.css_file.MaxFileSize"; //$NON-NLS-1$ /** * Log4j property for the css file appender maximum backup index property. */ public static final String PROP_LOG4J_FILE_MAX_INDEX = "log4j.appender.css_file.MaxBackupIndex"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender. */ public static final String PROP_LOG4J_JMS_APPENDER = "log4j.appender.css_jms"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender threshold. */ public static final String PROP_LOG4J_JMS_THRESHOLD = "log4j.appender.css_jms.Threshold"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender layout. */ public static final String PROP_LOG4J_JMS_LAYOUT = "log4j.appender.css_jms.layout"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender pattern. */ public static final String PROP_LOG4J_JMS_PATTERN = "log4j.appender.css_jms.layout.ConversionPattern"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender provider url. */ public static final String PROP_LOG4J_JMS_URL = "log4j.appender.css_jms.providerURL"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender topic binding name. */ public static final String PROP_LOG4J_JMS_TOPIC = "log4j.appender.css_jms.topicBindingName"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender user name. */ public static final String PROP_LOG4J_JMS_USER = "log4j.appender.css_jms.userName"; //$NON-NLS-1$ /** * Log4j property for the css JMS appender password. */ public static final String PROP_LOG4J_JMS_PASSWORD = "log4j.appender.css_jms.password"; //$NON-NLS-1$ /** * Private constructor due to singleton pattern. */ private CentralLogger() { configure(); } /** * Return the only one instance of this class. * * @return The only one instance of this class. */ public static CentralLogger getInstance() { if (_instance == null) { _instance = new CentralLogger(); } return _instance; } /** * Configure the log4j library. */ public void configure() { final CSSPlatformPlugin plugin = CSSPlatformPlugin.getDefault(); if (plugin == null) { // Not running in full Eclipse environment, probably because // this is called from a JUnit test. // Setup basic console log. BasicConfigurator.configure(); return; } // Else: Configure Log4j from plugin properties final Properties p = createLog4jProperties( plugin.getPluginPreferences()); PropertyConfigurator.configure(p); } /** * Obtain a logger for the given class. * @param caller Calling class, may be <code>null</code>. * @return A Log4j <code>Logger</code>. */ public Logger getLogger(final Object caller) { if (caller == null) { return Logger.getRootLogger(); } return Logger.getLogger(caller.getClass()); } /** * Obtain a logger for the given class name (for static classes). * @param caller Calling className, must not be <code>null</code>. * @return A Log4j <code>Logger</code>. */ public Logger getLogger(final String className) { return Logger.getLogger(className); } public Logger getLogger(final Class<?> clazz) { return Logger.getLogger(clazz.getCanonicalName()); } /** * Log a message with log level <i>info</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. */ public void info(final Object caller, final String message) { getLogger(caller).info(message); } /** * Log a throwable with log level <i>info</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param throwable * The throwable. */ public void info(final Object caller, final Throwable throwable) { info(caller, null, throwable); } /** * Log a message together with a throwable with log level <i>info</i>. The * reference to the calling object is used to automatically generate more * detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. * @param throwable * The throwable. */ public void info(final Object caller, final String message, final Throwable throwable) { getLogger(caller).info(message, throwable); } /** * Log a message with log level <i>debug</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. */ public void debug(final Object caller, final String message) { getLogger(caller).debug(message); } /** * Log a throwable with log level <i>debug</i>. The reference to the * calling object is used to automatically generate more detailled log4j * messages. * * @param caller * The calling object. * @param throwable * The throwable. */ public void debug(final Object caller, final Throwable throwable) { debug(caller, null, throwable); } /** * Log a message together with a throwable with log level <i>debug</i>. The * reference to the calling object is used to automatically generate more * detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. * @param throwable * The throwable. */ public void debug(final Object caller, final String message, final Throwable throwable) { getLogger(caller).debug(message, throwable); } /** * Log a message with log level <i>warn</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. */ public void warn(final Object caller, final String message) { getLogger(caller).warn(message); } /** * Log a throwable with log level <i>warn</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param throwable * The throwable. */ public void warn(final Object caller, final Throwable throwable) { warn(caller, null, throwable); } /** * Log a message together with a throwable with log level <i>warn</i>. The * reference to the calling object is used to automatically generate more * detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. * @param throwable * The throwable. */ public void warn(final Object caller, final String message, final Throwable throwable) { getLogger(caller).warn(message, throwable); } /** * Log a message with log level <i>error</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. */ public void error(final Object caller, final String message) { getLogger(caller).error(message); } /** * Log a throwable with log level <i>error</i>. The reference to the * calling object is used to automatically generate more detailled log4j * messages. * * @param caller * The calling object. * @param throwable * The throwable. */ public void error(final Object caller, final Throwable throwable) { error(caller, null, throwable); } /** * Log a message together with a throwable with log level <i>error</i>. The * reference to the calling object is used to automatically generate more * detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. * @param throwable * The throwable. */ public void error(final Object caller, final String message, final Throwable throwable) { getLogger(caller).error(message, throwable); } /** * Log a message with log level <i>fatal</i>. The reference to the calling * object is used to automatically generate more detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. */ public void fatal(final Object caller, final String message) { getLogger(caller).fatal(message); } /** * Log a throwable with log level <i>fatal</i>. The reference to the * calling object is used to automatically generate more detailled log4j * messages. * * @param caller * The calling object. * @param throwable * The throwable. */ public void fatal(final Object caller, final Throwable throwable) { fatal(caller, null, throwable); } /** * Log a message together with a throwable with log level <i>fatal</i>. The * reference to the calling object is used to automatically generate more * detailled log4j messages. * * @param caller * The calling object. * @param message * The log message. * @param throwable * The throwable. */ public void fatal(final Object caller, final String message, final Throwable throwable) { getLogger(caller).fatal(message, throwable); } /** Log levels, ordered from 'almost all' to 'only severe errors' */ final private static String LOG_LEVELS[] = new String[] { "debug", "info", "warn", "error", "fatal"}; /** * Create the log4j properts object from the given preference store. * * @param prefs * Source preference store. * @return The log4j properts object from the given preference store. */ @SuppressWarnings("deprecation") private Properties createLog4jProperties( final org.eclipse.core.runtime.Preferences prefs) { final Properties result = new Properties(); // console logger fillFromStore(result, prefs, PROP_LOG4J_CONSOLE_APPENDER); fillFromStore(result, prefs, PROP_LOG4J_CONSOLE_LAYOUT); fillFromStore(result, prefs, PROP_LOG4J_CONSOLE_PATTERN); fillFromStore(result, prefs, PROP_LOG4J_CONSOLE_THRESHOLD); // file logger fillFromStore(result, prefs, PROP_LOG4J_FILE_APPENDER); fillFromStore(result, prefs, PROP_LOG4J_FILE_LAYOUT); fillFromStore(result, prefs, PROP_LOG4J_FILE_PATTERN); fillFromStore(result, prefs, PROP_LOG4J_FILE_THRESHOLD); fillFromStore(result, prefs, PROP_LOG4J_FILE_DESTINATION); fillFromStore(result, prefs, PROP_LOG4J_FILE_APPEND); fillFromStore(result, prefs, PROP_LOG4J_FILE_MAX_SIZE); fillFromStore(result, prefs, PROP_LOG4J_FILE_MAX_INDEX); // JMS logger fillFromStore(result, prefs, PROP_LOG4J_JMS_APPENDER); fillFromStore(result, prefs, PROP_LOG4J_JMS_THRESHOLD); fillFromStore(result, prefs, PROP_LOG4J_JMS_LAYOUT); fillFromStore(result, prefs, PROP_LOG4J_JMS_PATTERN); fillFromStore(result, prefs, PROP_LOG4J_JMS_URL); fillFromStore(result, prefs, PROP_LOG4J_JMS_TOPIC); fillFromSecureStorage(result, PROP_LOG4J_JMS_USER); fillFromSecureStorage(result, PROP_LOG4J_JMS_PASSWORD); // Maximize the 'threshold' of console, file and JMS appender // and use that as the root logger level. // This way, if nobody uses "debug", the debug level // is suppressed at the root and logger.isDebugEnabled() // can be used as intended to avoid debug message formatting String rootProperty = LOG_LEVELS[LOG_LEVELS.length-1]; final boolean use_console = prefs.getBoolean(PROP_LOG4J_CONSOLE); final boolean use_file = prefs.getBoolean(PROP_LOG4J_FILE); final boolean use_jms = prefs.getBoolean(PROP_LOG4J_JMS); final String console_threshold = prefs.getString(PROP_LOG4J_CONSOLE_THRESHOLD); final String file_threshold = prefs.getString(PROP_LOG4J_FILE_THRESHOLD); final String jms_threshold = prefs.getString(PROP_LOG4J_JMS_THRESHOLD); for (int i=0; i < LOG_LEVELS.length; ++i) { if ((use_console && LOG_LEVELS[i].equalsIgnoreCase(console_threshold)) || (use_file && LOG_LEVELS[i].equalsIgnoreCase(file_threshold)) || (use_jms && LOG_LEVELS[i].equalsIgnoreCase(jms_threshold))) { rootProperty = LOG_LEVELS[i]; break; } } // create the log4j root property: // level-of-root-logger, appender1, appender2, appender3 if (use_console) { rootProperty += "," + "css_console"; //$NON-NLS-1$ //$NON-NLS-2$ } if (use_file) { rootProperty += "," + "css_file"; //$NON-NLS-1$ //$NON-NLS-2$ } if (use_jms) { rootProperty += "," + "css_jms"; //$NON-NLS-1$ //$NON-NLS-2$ } result.setProperty("log4j.rootLogger", rootProperty); //$NON-NLS-1$ return result; } /** * Fill the given properties object (java.util) with a certain property that * is read from the given plugin preference store * (org.eclipse.core.runtime.Preferences). * * @param p * Properties object to fill. * @param prefs * Plugin preference store. * @param propertyID * The ID of the certain property. */ private void fillFromStore(final Properties p, final org.eclipse.core.runtime.Preferences prefs, final String propertyID) { p.setProperty(propertyID, prefs.getString(propertyID)); } /** * Fill the given properties object (java.util) with a certain property that * is read from secure storage * (org.eclipse.core.runtime.Preferences). * * @param p * Properties object to fill. * @param propertyID * The ID of the certain property. */ private void fillFromSecureStorage(final Properties p, final String propertyID) { p.setProperty(propertyID, SecureStorage.retrieveSecureStorage(CSSPlatformPlugin.getDefault().getBundle() .getSymbolicName(), propertyID)); } }
package org.hisp.dhis.android.core.enrollment; import com.fasterxml.jackson.databind.ObjectMapper; import org.hisp.dhis.android.core.Inject; import org.hisp.dhis.android.core.common.BaseIdentifiableObject; import org.hisp.dhis.android.core.common.Coordinates; import org.junit.Test; import java.io.IOException; import java.text.ParseException; import static org.assertj.core.api.Java6Assertions.assertThat; public class EnrollmentIntegrationTest { @Test public void enrollment_shouldMapFromJsonString() throws IOException, ParseException { ObjectMapper objectMapper = Inject.objectMapper(); Enrollment enrollment = objectMapper.readValue("{\n" + "\"trackedEntity\": \"nEenWmSyUEp\",\n" + "\"created\": \"2015-03-28T12:27:50.740\",\n" + "\"orgUnit\": \"Rp268JB6Ne4\",\n" + "\"program\": \"ur1Edk5Oe2n\",\n" + "\"trackedEntityInstance\": \"D2dUWKQErfQ\",\n" + "\"enrollment\": \"BVJQIxoM2o4\",\n" + "\"lastUpdated\": \"2015-03-28T12:27:50.748\",\n" + "\"orgUnitName\": \"Adonkia CHP\",\n" + "\"enrollmentDate\": \"2014-08-07T12:27:50.730\",\n" + "\"followup\": false,\n" + "\"incidentDate\": \"2014-07-21T12:27:50.730\",\n" + "\"status\": \"ACTIVE\",\n" + "\"coordinate\": { \"latitude\": 12.1, \"longitude\": 22.2 },\n" + "\"notes\": [],\n" + "\"attributes\": []\n" + "}", Enrollment.class); assertThat(enrollment.created()).isEqualTo( BaseIdentifiableObject.DATE_FORMAT.parse("2015-03-28T12:27:50.740")); assertThat(enrollment.lastUpdated()).isEqualTo( BaseIdentifiableObject.DATE_FORMAT.parse("2015-03-28T12:27:50.748")); assertThat(enrollment.uid()).isEqualTo("BVJQIxoM2o4"); assertThat(enrollment.organisationUnit()).isEqualTo("Rp268JB6Ne4"); assertThat(enrollment.program()).isEqualTo("ur1Edk5Oe2n"); assertThat(enrollment.dateOfEnrollment()).isEqualTo("2014-08-07T12:27:50.730"); assertThat(enrollment.dateOfIncident()).isEqualTo("2014-07-21T12:27:50.730"); assertThat(enrollment.followUp()).isEqualTo(false); assertThat(enrollment.enrollmentStatus()).isEqualTo(EnrollmentStatus.ACTIVE); assertThat(enrollment.trackedEntityInstance()).isEqualTo("D2dUWKQErfQ"); Coordinates coordinates = enrollment.coordinate(); assertThat(coordinates.latitude()).isEqualTo(12.1); assertThat(coordinates.longitude()).isEqualTo(22.2); } }
package com.devicehive.application; import com.devicehive.json.GsonFactory; import com.google.gson.Gson; import io.swagger.jaxrs.config.BeanConfig; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.SpringApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.context.web.SpringBootServletInitializer; import org.springframework.boot.web.servlet.ServletComponentScan; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.Lazy; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.stereotype.Component; import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean; import javax.validation.Validator; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @Component @ComponentScan(value = "com.devicehive", excludeFilters = { @ComponentScan.Filter(type = FilterType.REGEX, pattern = "org.springframework.transaction.*") }) @ServletComponentScan("com.devicehive.application.filter") @EnableScheduling @EnableAsync(proxyTargetClass = true) public class DeviceHiveApplication { public static final String MESSAGE_EXECUTOR = "DeviceHiveMessageService"; public static void main(String... args) { ConfigurableApplicationContext context = new SpringApplicationBuilder() .sources(DeviceHiveApplication.class) .web(true) .run(args); DeviceHiveApplication app = context.getBean(DeviceHiveApplication.class); context.registerShutdownHook(); } @Bean @Lazy(false) public BeanConfig swaggerConfig(@Value("${server.context-path}") String contextPath, @Value("${build.version}") String buildVersion) { String basePath = contextPath.equals("/") ? JerseyConfig.REST_PATH : contextPath + JerseyConfig.REST_PATH; BeanConfig beanConfig = new BeanConfig(); beanConfig.setTitle("Device Hive REST API"); beanConfig.setVersion(buildVersion); beanConfig.setBasePath(basePath); beanConfig.setResourcePackage("com.devicehive.resource"); beanConfig.setScan(true); return beanConfig; } @Lazy(false) @Bean(name = MESSAGE_EXECUTOR) public ExecutorService messageExecutorService(@Value("${app.executor.size:1}") Integer executorSize) { return Executors.newFixedThreadPool(executorSize); } @Bean public Gson gson() { return GsonFactory.createGson(); } @Bean public Validator localValidator() { return new LocalValidatorFactoryBean(); } }
package net.orfjackal.dimdwarf.db; import jdave.Block; import jdave.Group; import jdave.Specification; import jdave.junit4.JDaveRunner; import net.orfjackal.dimdwarf.tx.TransactionCoordinator; import net.orfjackal.dimdwarf.tx.TransactionImpl; import org.junit.runner.RunWith; /** * @author Esko Luontola * @since 11.9.2008 */ @RunWith(JDaveRunner.class) @Group({"fast"}) public class MultipleDatabaseTablesSpec extends Specification<Object> { private static final String TABLE1 = "table1"; private static final String TABLE2 = "table2"; private InMemoryDatabase dbms; private TransactionCoordinator tx; private Database db; private DatabaseTable table1; private DatabaseTable table2; private Blob key; private Blob value1; private Blob value2; private Blob value3; public void create() throws Exception { dbms = new InMemoryDatabase(TABLE1, TABLE2); tx = new TransactionImpl(); db = dbms.openConnection(tx.getTransaction()); table1 = this.db.openTable(TABLE1); table2 = this.db.openTable(TABLE2); key = Blob.fromBytes(new byte[]{0}); value1 = Blob.fromBytes(new byte[]{1}); value2 = Blob.fromBytes(new byte[]{2}); value3 = Blob.fromBytes(new byte[]{3}); } private Blob readInNewTransaction(String table, Blob key) { TransactionCoordinator tx = new TransactionImpl(); try { return dbms.openConnection(tx.getTransaction()).openTable(table).read(key); } finally { tx.prepare(); tx.commit(); } } private void updateInNewTransaction(String table, Blob key, Blob value) { TransactionCoordinator tx = new TransactionImpl(); try { dbms.openConnection(tx.getTransaction()).openTable(table).update(key, value); } finally { tx.prepare(); tx.commit(); } } public class OpeningDatabaseTables { public Object create() { return null; } public void theSameNamesWillCorrespondTheSameTable() { specify(db.openTable(TABLE1), should.equal(table1)); } public void differentNamesWillCorrespondDifferentTables() { specify(table1, should.not().equal(table2)); } public void nonexistantTablesCanNotBeOpened() { specify(new Block() { public void run() throws Throwable { db.openTable("doesNotExist"); } }, should.raise(IllegalArgumentException.class)); } } public class DuringTransaction { public Object create() { table1.update(key, value1); return null; } public void updatesAreSeenInTheUpdatedTable() { specify(table1.read(key), should.equal(value1)); } public void updatesAreNotSeenInOtherTables() { specify(table2.read(key), should.equal(Blob.EMPTY_BLOB)); } public void updatesAreNotSeenInOtherTransactions() { specify(readInNewTransaction(TABLE1, key), should.equal(Blob.EMPTY_BLOB)); } } public class AfterTransactionIsCommitted { public Object create() { table1.update(key, value1); tx.prepare(); tx.commit(); return null; } public void updatesAreSeenInTheUpdatedTable() { specify(readInNewTransaction(TABLE1, key), should.equal(value1)); } public void updatesAreNotSeenInOtherTables() { specify(readInNewTransaction(TABLE2, key), should.equal(Blob.EMPTY_BLOB)); } } public class WhenTheSameKeyIsUpdatedInDifferentTables { public Object create() { table1.update(key, value1); table2.update(key, value2); return null; } public void itDoesNotConflict() { tx.prepare(); tx.commit(); specify(readInNewTransaction(TABLE1, key), should.equal(value1)); specify(readInNewTransaction(TABLE2, key), should.equal(value2)); } } // public class WhenOnlyOneTableIsUpdated { // private long revision; // public Object create() { // table1.update(key, value1); // table2.update(key, value2); // tx.prepare(); // tx.commit(); // revision = dbms.getCurrentRevision(); // return null; // public void tableRevisionsAreInSync1() { // updateInNewTransaction(TABLE1, key, value3); // specify(dbms.getCurrentRevision(), should.equal(revision + 1)); // updateInNewTransaction(TABLE1, key, value3); // specify(dbms.getCurrentRevision(), should.equal(revision + 2)); // specify(readInNewTransaction(TABLE1, key), should.equal(value3)); // specify(dbms.getCurrentRevision(), should.equal(revision + 3)); // specify(readInNewTransaction(TABLE2, key), should.equal(value2)); // specify(dbms.getCurrentRevision(), should.equal(revision + 4)); // public void tableRevisionsAreInSync2() { // updateInNewTransaction(TABLE2, key, value3); // specify(dbms.getCurrentRevision(), should.equal(revision + 1)); // updateInNewTransaction(TABLE2, key, value3); // specify(dbms.getCurrentRevision(), should.equal(revision + 2)); // specify(readInNewTransaction(TABLE1, key), should.equal(value1)); // specify(dbms.getCurrentRevision(), should.equal(revision + 3)); // specify(readInNewTransaction(TABLE2, key), should.equal(value3)); // specify(dbms.getCurrentRevision(), should.equal(revision + 4)); }
package org.jboss.dna.repository.sequencers; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import javax.jcr.Node; import javax.jcr.Repository; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.observation.Event; import net.jcip.annotations.Immutable; import net.jcip.annotations.ThreadSafe; import org.jboss.dna.common.component.ClassLoaderFactory; import org.jboss.dna.common.component.ComponentLibrary; import org.jboss.dna.common.component.StandardClassLoaderFactory; import org.jboss.dna.common.monitor.LoggingProgressMonitor; import org.jboss.dna.common.monitor.ProgressMonitor; import org.jboss.dna.common.monitor.SimpleProgressMonitor; import org.jboss.dna.common.util.ArgCheck; import org.jboss.dna.common.util.HashCode; import org.jboss.dna.common.util.Logger; import org.jboss.dna.repository.RepositoryI18n; import org.jboss.dna.repository.observation.NodeChange; import org.jboss.dna.repository.observation.NodeChangeListener; import org.jboss.dna.repository.observation.NodeChanges; import org.jboss.dna.repository.services.AbstractServiceAdministrator; import org.jboss.dna.repository.services.AdministeredService; import org.jboss.dna.repository.services.ServiceAdministrator; import org.jboss.dna.repository.util.ExecutionContext; import org.jboss.dna.repository.util.JcrTools; import org.jboss.dna.repository.util.RepositoryNodePath; import org.jboss.dna.repository.util.SessionFactory; /** * A sequencing system is used to monitor changes in the content of {@link Repository JCR repositories} and to sequence the * content to extract or to generate structured information. * @author Randall Hauch */ public class SequencingService implements AdministeredService, NodeChangeListener { /** * Interface used to select the set of {@link Sequencer} instances that should be run. * @author Randall Hauch */ public static interface Selector { /** * Select the sequencers that should be used to sequence the supplied node. * @param sequencers the list of all sequencers available at the moment; never null * @param node the node to be sequenced; never null * @param nodeChange the set of node changes; never null * @return the list of sequencers that should be used; may not be null */ List<Sequencer> selectSequencers( List<Sequencer> sequencers, Node node, NodeChange nodeChange ); } /** * The default {@link Selector} implementation that selects every sequencer every time it's called, regardless of the node (or * logger) supplied. * @author Randall Hauch */ protected static class DefaultSelector implements Selector { public List<Sequencer> selectSequencers( List<Sequencer> sequencers, Node node, NodeChange nodeChange ) { return sequencers; } } /** * Interface used to determine whether a {@link NodeChange} should be processed. * @author Randall Hauch */ public static interface NodeFilter { /** * Determine whether the node represented by the supplied change should be submitted for sequencing. * @param nodeChange the node change event * @return true if the node should be submitted for sequencing, or false if the change should be ignored */ boolean accept( NodeChange nodeChange ); } /** * The default filter implementation, which accepts only new nodes or nodes that have new or changed properties. * @author Randall Hauch */ protected static class DefaultNodeFilter implements NodeFilter { public boolean accept( NodeChange nodeChange ) { // Only care about new nodes or nodes that have new/changed properies ... return nodeChange.includesEventTypes(Event.NODE_ADDED, Event.PROPERTY_ADDED, Event.PROPERTY_CHANGED); } } /** * The default {@link Selector} that considers every {@link Sequencer} to be used for every node. * @see SequencingService#setSequencerSelector(org.jboss.dna.repository.sequencers.SequencingService.Selector) */ public static final Selector DEFAULT_SEQUENCER_SELECTOR = new DefaultSelector(); /** * The default {@link NodeFilter} that accepts new nodes or nodes that have new/changed properties. * @see SequencingService#setSequencerSelector(org.jboss.dna.repository.sequencers.SequencingService.Selector) */ public static final NodeFilter DEFAULT_NODE_FILTER = new DefaultNodeFilter(); /** * Class loader factory instance that always returns the * {@link Thread#getContextClassLoader() current thread's context class loader} (if not null) or component library's class * loader. */ protected static final ClassLoaderFactory DEFAULT_CLASSLOADER_FACTORY = new StandardClassLoaderFactory(SequencingService.class.getClassLoader()); /** * The administrative component for this service. * @author Randall Hauch */ protected class Administrator extends AbstractServiceAdministrator { protected Administrator() { super(State.PAUSED); } /** * {@inheritDoc} */ @Override protected String serviceName() { return RepositoryI18n.sequencingServiceName.text(); } /** * {@inheritDoc} */ @Override protected void doStart( State fromState ) { super.doStart(fromState); startService(); } /** * {@inheritDoc} */ @Override protected void doShutdown( State fromState ) { super.doShutdown(fromState); shutdownService(); } /** * {@inheritDoc} */ public boolean awaitTermination( long timeout, TimeUnit unit ) throws InterruptedException { return doAwaitTermination(timeout, unit); } } private ExecutionContext executionContext; private SequencerLibrary sequencerLibrary = new SequencerLibrary(); private Selector sequencerSelector = DEFAULT_SEQUENCER_SELECTOR; private NodeFilter nodeFilter = DEFAULT_NODE_FILTER; private ExecutorService executorService; private Logger logger = Logger.getLogger(this.getClass()); private final Statistics statistics = new Statistics(); private final Administrator administrator = new Administrator(); /** * Create a new sequencing system, configured with no sequencers and not monitoring any workspaces. Upon construction, the * system is {@link ServiceAdministrator#isPaused() paused} and must be configured and then * {@link ServiceAdministrator#start() started}. */ public SequencingService() { this.sequencerLibrary.setClassLoaderFactory(DEFAULT_CLASSLOADER_FACTORY); } /** * Return the administrative component for this service. * @return the administrative component; never null */ public ServiceAdministrator getAdministrator() { return this.administrator; } /** * Get the statistics for this system. * @return statistics */ public Statistics getStatistics() { return this.statistics; } /** * @return sequencerLibrary */ protected ComponentLibrary<Sequencer, SequencerConfig> getSequencerLibrary() { return this.sequencerLibrary; } /** * Get the class loader factory that should be used to load sequencers. By default, this service uses a factory that will * return either the {@link Thread#getContextClassLoader() current thread's context class loader} (if not null) or the class * loader that loaded this class. * @return the class loader factory; never null * @see #setClassLoaderFactory(ClassLoaderFactory) */ public ClassLoaderFactory getClassLoaderFactory() { return this.sequencerLibrary.getClassLoaderFactory(); } /** * Set the Maven Repository that should be used to load the sequencer classes. By default, this service uses a class loader * factory that will return either the {@link Thread#getContextClassLoader() current thread's context class loader} (if not * null) or the class loader that loaded this class. * @param classLoaderFactory the class loader factory reference, or null if the default class loader factory should be used. * @see #getClassLoaderFactory() */ public void setClassLoaderFactory( ClassLoaderFactory classLoaderFactory ) { this.sequencerLibrary.setClassLoaderFactory(classLoaderFactory != null ? classLoaderFactory : DEFAULT_CLASSLOADER_FACTORY); } public boolean addSequencer( SequencerConfig config ) { return this.sequencerLibrary.add(config); } public boolean updateSequencer( SequencerConfig config ) { return this.sequencerLibrary.update(config); } public boolean removeSequencer( SequencerConfig config ) { return this.sequencerLibrary.remove(config); } /** * Get the logger for this system * @return the logger */ public Logger getLogger() { return this.logger; } /** * Set the logger for this system. * @param logger the logger, or null if the standard logging should be used */ public void setLogger( Logger logger ) { this.logger = logger != null ? logger : Logger.getLogger(this.getClass()); } /** * @return executionContext */ public ExecutionContext getExecutionContext() { return this.executionContext; } /** * @param executionContext Sets executionContext to the specified value. */ public void setExecutionContext( ExecutionContext executionContext ) { ArgCheck.isNotNull(executionContext, "execution context"); if (this.getAdministrator().isStarted()) { throw new IllegalStateException(RepositoryI18n.unableToChangeExecutionContextWhileRunning.text()); } this.executionContext = executionContext; } /** * Get the executor service used to run the sequencers. * @return the executor service * @see #setExecutorService(ExecutorService) */ public ExecutorService getExecutorService() { return this.executorService; } /** * Set the executor service that should be used by this system. By default, the system is set up with a * {@link Executors#newSingleThreadExecutor() executor that uses a single thread}. * @param executorService the executor service * @see #getExecutorService() * @see Executors#newCachedThreadPool() * @see Executors#newCachedThreadPool(java.util.concurrent.ThreadFactory) * @see Executors#newFixedThreadPool(int) * @see Executors#newFixedThreadPool(int, java.util.concurrent.ThreadFactory) * @see Executors#newScheduledThreadPool(int) * @see Executors#newScheduledThreadPool(int, java.util.concurrent.ThreadFactory) * @see Executors#newSingleThreadExecutor() * @see Executors#newSingleThreadExecutor(java.util.concurrent.ThreadFactory) * @see Executors#newSingleThreadScheduledExecutor() * @see Executors#newSingleThreadScheduledExecutor(java.util.concurrent.ThreadFactory) */ public void setExecutorService( ExecutorService executorService ) { ArgCheck.isNotNull(executorService, "executor service"); if (this.getAdministrator().isStarted()) { throw new IllegalStateException(RepositoryI18n.unableToChangeExecutionContextWhileRunning.text()); } this.executorService = executorService; } /** * Override this method to creates a different kind of default executor service. This method is called when the system is * {@link #start() started} without an executor service being {@link #setExecutorService(ExecutorService) set}. * <p> * This method creates a {@link Executors#newSingleThreadExecutor() single-threaded executor}. * </p> * @return */ protected ExecutorService createDefaultExecutorService() { return Executors.newSingleThreadExecutor(); } protected void startService() { if (this.getExecutionContext() == null) { throw new IllegalStateException(RepositoryI18n.unableToStartSequencingServiceWithoutExecutionContext.text()); } if (this.executorService == null) { this.executorService = createDefaultExecutorService(); } assert this.executorService != null; assert this.sequencerSelector != null; assert this.nodeFilter != null; assert this.sequencerLibrary != null; } protected void shutdownService() { if (this.executorService != null) { this.executorService.shutdown(); } } protected boolean doAwaitTermination( long timeout, TimeUnit unit ) throws InterruptedException { if (this.executorService.isShutdown()) return true; return this.executorService.awaitTermination(timeout, unit); } /** * Get the sequencing selector used by this system. * @return the sequencing selector */ public Selector getSequencerSelector() { return this.sequencerSelector; } /** * Set the sequencer selector, or null if the {@link #DEFAULT_SEQUENCER_SELECTOR default sequencer selector} should be used. * @param sequencerSelector the selector */ public void setSequencerSelector( Selector sequencerSelector ) { this.sequencerSelector = sequencerSelector != null ? sequencerSelector : DEFAULT_SEQUENCER_SELECTOR; } /** * Get the node filter used by this system. * @return the node filter */ public NodeFilter getNodeFilter() { return this.nodeFilter; } /** * Set the filter that checks which nodes are to be sequenced, or null if the {@link #DEFAULT_NODE_FILTER default node filter} * should be used. * @param nodeFilter the new node filter */ public void setNodeFilter( NodeFilter nodeFilter ) { this.nodeFilter = nodeFilter != null ? nodeFilter : DEFAULT_NODE_FILTER; } /** * {@inheritDoc} */ public void onNodeChanges( NodeChanges changes ) { NodeFilter filter = this.getNodeFilter(); for (final NodeChange changedNode : changes) { // Only care about new nodes or nodes that have new/changed properies ... if (filter.accept(changedNode)) { try { this.executorService.execute(new Runnable() { public void run() { processChangedNode(changedNode); } }); } catch (RejectedExecutionException e) { // The executor service has been shut down, so do nothing with this set of changes } } } } /** * Do the work of processing by sequencing the node. This method is called by the {@link #executorService executor service} * when it performs it's work on the enqueued {@link ChangedNode ChangedNode runnable objects}. * @param node the node to be processed. */ protected void processChangedNode( NodeChange changedNode ) { try { final String repositoryWorkspaceName = changedNode.getRepositoryWorkspaceName(); Session session = null; try { // Figure out which sequencers accept this path, // and track which output nodes should be passed to each sequencer... final String nodePath = changedNode.getAbsolutePath(); Map<SequencerCall, Set<RepositoryNodePath>> sequencerCalls = new HashMap<SequencerCall, Set<RepositoryNodePath>>(); List<Sequencer> allSequencers = this.sequencerLibrary.getInstances(); List<Sequencer> sequencers = new ArrayList<Sequencer>(allSequencers.size()); for (Sequencer sequencer : allSequencers) { final SequencerConfig config = sequencer.getConfiguration(); for (SequencerPathExpression pathExpression : config.getPathExpressions()) { for (String propertyName : changedNode.getModifiedProperties()) { String path = nodePath + "/@" + propertyName; SequencerPathExpression.Matcher matcher = pathExpression.matcher(path); if (matcher.matches()) { // String selectedPath = matcher.getSelectedPath(); RepositoryNodePath outputPath = RepositoryNodePath.parse(matcher.getOutputPath(), repositoryWorkspaceName); SequencerCall call = new SequencerCall(sequencer, propertyName); // Record the output path ... Set<RepositoryNodePath> outputPaths = sequencerCalls.get(call); if (outputPaths == null) { outputPaths = new HashSet<RepositoryNodePath>(); sequencerCalls.put(call, outputPaths); } outputPaths.add(outputPath); sequencers.add(sequencer); break; } } } } Node node = null; if (!sequencers.isEmpty()) { // Create a session that we'll use for all sequencing ... session = this.getExecutionContext().getSessionFactory().createSession(repositoryWorkspaceName); // Find the changed node ... String relPath = changedNode.getAbsolutePath().replaceAll("^/+", ""); node = session.getRootNode().getNode(relPath); // Figure out which sequencers should run ... sequencers = this.sequencerSelector.selectSequencers(sequencers, node, changedNode); } if (sequencers.isEmpty()) { this.statistics.recordNodeSkipped(); if (this.logger.isDebugEnabled()) { this.logger.trace("Skipping '{0}': no sequencers matched this condition", changedNode); } } else { // Run each of those sequencers ... ProgressMonitor progressMonitor = new SimpleProgressMonitor(RepositoryI18n.sequencerTask.text(changedNode)); if (this.logger.isTraceEnabled()) { progressMonitor = new LoggingProgressMonitor(progressMonitor, this.logger, Logger.Level.TRACE); } try { progressMonitor.beginTask(sequencerCalls.size(), RepositoryI18n.sequencerTask, changedNode); for (Map.Entry<SequencerCall, Set<RepositoryNodePath>> entry : sequencerCalls.entrySet()) { final SequencerCall sequencerCall = entry.getKey(); final Set<RepositoryNodePath> outputPaths = entry.getValue(); final Sequencer sequencer = sequencerCall.getSequencer(); final String sequencerName = sequencer.getConfiguration().getName(); final String propertyName = sequencerCall.getSequencedPropertyName(); // Get the paths to the nodes where the sequencer should write it's output ... assert outputPaths != null && outputPaths.size() != 0; // Create a new execution context for each sequencer final Context executionContext = new Context(); final ProgressMonitor sequenceMonitor = progressMonitor.createSubtask(1); try { sequenceMonitor.beginTask(100, RepositoryI18n.sequencerSubtask, sequencerName); sequencer.execute(node, propertyName, changedNode, outputPaths, executionContext, sequenceMonitor.createSubtask(80)); } catch (RepositoryException e) { this.logger.error(e, RepositoryI18n.errorInRepositoryWhileSequencingNode, sequencerName, changedNode); } catch (SequencerException e) { this.logger.error(e, RepositoryI18n.errorWhileSequencingNode, sequencerName, changedNode); } finally { try { // Save the changes made by each sequencer ... if (session != null) session.save(); sequenceMonitor.worked(10); // 90% of sequenceMonitor } finally { try { // And always close the context. executionContext.close(); } finally { sequenceMonitor.done(); // 100% of sequenceMonitor } } } } this.statistics.recordNodeSequenced(); } finally { progressMonitor.done(); } } } finally { if (session != null) session.logout(); } } catch (RepositoryException e) { this.logger.error(e, RepositoryI18n.errorInRepositoryWhileFindingSequencersToRunAgainstNode, changedNode); } catch (Throwable e) { this.logger.error(e, RepositoryI18n.errorFindingSequencersToRunAgainstNode, changedNode); } } protected class Context implements ExecutionContext { protected final SessionFactory factory; private final Set<Session> sessions = new HashSet<Session>(); protected final AtomicBoolean closed = new AtomicBoolean(false); protected Context() { final SessionFactory delegate = SequencingService.this.getExecutionContext().getSessionFactory(); this.factory = new SessionFactory() { public Session createSession( String name ) throws RepositoryException { if (closed.get()) throw new IllegalStateException(RepositoryI18n.executionContextHasBeenClosed.text()); Session session = delegate.createSession(name); recordSession(session); return session; } }; } protected synchronized void recordSession( Session session ) { if (session != null) sessions.add(session); } /** * {@inheritDoc} */ public SessionFactory getSessionFactory() { return this.factory; } /** * {@inheritDoc} */ public JcrTools getTools() { return SequencingService.this.getExecutionContext().getTools(); } public synchronized void close() { if (this.closed.get()) return; this.closed.set(true); for (Session session : sessions) { if (session != null) session.logout(); } } } /** * The statistics for the system. Each sequencing system has an instance of this class that is updated. * @author Randall Hauch */ @ThreadSafe public class Statistics { private final AtomicLong numberOfNodesSequenced = new AtomicLong(0); private final AtomicLong numberOfNodesSkipped = new AtomicLong(0); private final AtomicLong startTime; protected Statistics() { startTime = new AtomicLong(System.currentTimeMillis()); } public Statistics reset() { this.startTime.set(System.currentTimeMillis()); this.numberOfNodesSequenced.set(0); this.numberOfNodesSkipped.set(0); return this; } /** * @return the system time when the statistics were started */ public long getStartTime() { return this.startTime.get(); } /** * @return the number of nodes that were sequenced */ public long getNumberOfNodesSequenced() { return this.numberOfNodesSequenced.get(); } /** * @return the number of nodes that were skipped because no sequencers applied */ public long getNumberOfNodesSkipped() { return this.numberOfNodesSkipped.get(); } protected void recordNodeSequenced() { this.numberOfNodesSequenced.incrementAndGet(); } protected void recordNodeSkipped() { this.numberOfNodesSkipped.incrementAndGet(); } } @Immutable protected class SequencerCall { private final Sequencer sequencer; private final String sequencerName; private final String sequencedPropertyName; private final int hc; protected SequencerCall( Sequencer sequencer, String sequencedPropertyName ) { this.sequencer = sequencer; this.sequencerName = sequencer.getConfiguration().getName(); this.sequencedPropertyName = sequencedPropertyName; this.hc = HashCode.compute(this.sequencerName, this.sequencedPropertyName); } /** * @return sequencer */ public Sequencer getSequencer() { return this.sequencer; } /** * @return sequencedPropertyName */ public String getSequencedPropertyName() { return this.sequencedPropertyName; } /** * {@inheritDoc} */ @Override public int hashCode() { return this.hc; } /** * {@inheritDoc} */ @Override public boolean equals( Object obj ) { if (obj == this) return true; if (obj instanceof SequencerCall) { SequencerCall that = (SequencerCall)obj; if (!this.sequencerName.equals(that.sequencerName)) return false; if (!this.sequencedPropertyName.equals(that.sequencedPropertyName)) return false; return true; } return false; } } }
package org.drools.eclipse.debug; import java.io.EOFException; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.ObjectInputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Stack; import org.drools.audit.event.ActivationLogEvent; import org.drools.audit.event.LogEvent; import org.drools.audit.event.ObjectLogEvent; import org.drools.audit.event.RuleBaseLogEvent; import org.drools.audit.event.RuleFlowGroupLogEvent; import org.drools.audit.event.RuleFlowLogEvent; import org.drools.audit.event.RuleFlowNodeLogEvent; import org.drools.eclipse.DroolsEclipsePlugin; import org.drools.eclipse.DroolsPluginImages; import org.drools.eclipse.debug.actions.DeleteLogAction; import org.drools.eclipse.debug.actions.OpenLogAction; import org.drools.eclipse.debug.actions.RefreshLogAction; import org.drools.eclipse.debug.actions.ShowEventCauseAction; import org.eclipse.debug.ui.AbstractDebugView; import org.eclipse.jface.action.GroupMarker; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.viewers.IColorProvider; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.LabelProvider; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewSite; import org.eclipse.ui.IWorkbenchActionConstants; import org.eclipse.ui.PartInitException; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.io.StreamException; public class AuditView extends AbstractDebugView { private static final String LOG_FILE_NAME = "LogFileName"; private static final String CAUSE_EVENT_COLOR = "CauseEventColor"; private String logFileName; private IAction deleteAction; private IAction refreshAction; protected Viewer createViewer(Composite parent) { final TreeViewer variablesViewer = new TreeViewer(parent); variablesViewer.setContentProvider(new AuditViewContentProvider()); variablesViewer.setLabelProvider(new AuditLabelProvider()); variablesViewer.setUseHashlookup(true); variablesViewer.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { getViewer().refresh(); } }); return variablesViewer; } public void setLogFile(String logFileName) { this.logFileName = logFileName; refresh(); deleteAction.setEnabled(logFileName != null); refreshAction.setEnabled(logFileName != null); } @SuppressWarnings("unchecked") public void refresh() { if (logFileName == null) { getViewer().setInput(null); return; } List<LogEvent> eventList = new ArrayList<LogEvent>(); try { XStream xstream = new XStream(); ObjectInputStream in = xstream.createObjectInputStream( new FileReader(logFileName)); try { while (true) { Object object = in.readObject(); if (object instanceof LogEvent) { eventList.add((LogEvent) object); } else if (object instanceof List) { eventList.addAll((List<LogEvent>) object); } else { throw new IllegalArgumentException("Unexpected element in log: " + object); } } } catch (StreamException e) { if (!(e.getCause() instanceof EOFException)) { throw e; } } catch (EOFException e) { // do nothing } } catch (FileNotFoundException e) { setLogFile(null); } catch (Throwable t) { DroolsEclipsePlugin.log(t); } getViewer().setInput(createEventList(eventList)); // TODO: this is necessary because otherwise, the show cause action // cannot find the cause event if it hasn't been shown yet ((TreeViewer) getViewer()).expandAll(); } protected List<Event> createEventList(List<LogEvent> logEvents) { Iterator<LogEvent> iterator = logEvents.iterator(); List<Event> events = new ArrayList<Event>(); Stack<Event> beforeEvents = new Stack<Event>(); List<Event> newActivations = new ArrayList<Event>(); Map<String, Event> activationMap = new HashMap<String, Event>(); Map<Long, Event> objectMap = new HashMap<Long, Event>(); while (iterator.hasNext()) { LogEvent inEvent = (LogEvent) iterator.next(); Event event = new Event(inEvent.getType()); switch (inEvent.getType()) { case LogEvent.INSERTED: ObjectLogEvent inObjectEvent = (ObjectLogEvent) inEvent; event.setString("Object inserted (" + inObjectEvent.getFactId() + "): " + inObjectEvent.getObjectToString()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } event.addSubEvents(newActivations); newActivations.clear(); objectMap.put(new Long(((ObjectLogEvent) inEvent).getFactId()), event); break; case LogEvent.UPDATED: inObjectEvent = (ObjectLogEvent) inEvent; event.setString("Object updated (" + inObjectEvent.getFactId() + "): " + inObjectEvent.getObjectToString()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } event.addSubEvents(newActivations); newActivations.clear(); Event assertEvent = (Event) objectMap.get(new Long(((ObjectLogEvent) inEvent).getFactId())); if (assertEvent != null) { event.setCauseEvent(assertEvent); } break; case LogEvent.RETRACTED: inObjectEvent = (ObjectLogEvent) inEvent; event.setString("Object removed (" + inObjectEvent.getFactId() + "): " + inObjectEvent.getObjectToString()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } event.addSubEvents(newActivations); newActivations.clear(); assertEvent = (Event) objectMap.get(new Long(((ObjectLogEvent) inEvent).getFactId())); if (assertEvent != null) { event.setCauseEvent(assertEvent); } break; case LogEvent.ACTIVATION_CREATED: ActivationLogEvent inActivationEvent = (ActivationLogEvent) inEvent; event.setString("Activation created: Rule " + inActivationEvent.getRule() + " " + inActivationEvent.getDeclarations()); newActivations.add(event); activationMap.put(((ActivationLogEvent) inEvent).getActivationId(), event); break; case LogEvent.ACTIVATION_CANCELLED: inActivationEvent = (ActivationLogEvent) inEvent; event.setString("Activation cancelled: Rule " + inActivationEvent.getRule() + " " + inActivationEvent.getDeclarations()); newActivations.add(event); event.setCauseEvent((Event) activationMap.get(((ActivationLogEvent) inEvent).getActivationId())); break; case LogEvent.BEFORE_ACTIVATION_FIRE: inActivationEvent = (ActivationLogEvent) inEvent; event.setString("Activation executed: Rule " + inActivationEvent.getRule() + " " + inActivationEvent.getDeclarations()); events.add(event); beforeEvents.push(event); event.setCauseEvent((Event) activationMap.get(((ActivationLogEvent) inEvent).getActivationId())); break; case LogEvent.AFTER_ACTIVATION_FIRE: beforeEvents.pop(); break; case LogEvent.BEFORE_RULEFLOW_CREATED: RuleFlowLogEvent inRuleFlowEvent = (RuleFlowLogEvent) inEvent; event.setString("RuleFlow started: " + inRuleFlowEvent.getProcessName() + "[" + inRuleFlowEvent.getProcessId() + "]"); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULEFLOW_CREATED: beforeEvents.pop(); break; case LogEvent.BEFORE_RULEFLOW_COMPLETED: inRuleFlowEvent = (RuleFlowLogEvent) inEvent; event.setString("RuleFlow completed: " + inRuleFlowEvent.getProcessName() + "[" + inRuleFlowEvent.getProcessId() + "]"); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULEFLOW_COMPLETED: beforeEvents.pop(); break; case LogEvent.BEFORE_RULEFLOW_NODE_TRIGGERED: RuleFlowNodeLogEvent inRuleFlowNodeEvent = (RuleFlowNodeLogEvent) inEvent; event.setString("RuleFlow node triggered: " + inRuleFlowNodeEvent.getNodeName() + " in process " + inRuleFlowNodeEvent.getProcessName() + "[" + inRuleFlowNodeEvent.getProcessId() + "]"); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULEFLOW_NODE_TRIGGERED: beforeEvents.pop(); break; case LogEvent.BEFORE_RULEFLOW_GROUP_ACTIVATED: RuleFlowGroupLogEvent inRuleFlowGroupEvent = (RuleFlowGroupLogEvent) inEvent; event.setString("RuleFlowGroup activated: " + inRuleFlowGroupEvent.getGroupName() + "[size=" + inRuleFlowGroupEvent.getSize() + "]"); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULEFLOW_GROUP_ACTIVATED: beforeEvents.pop(); break; case LogEvent.BEFORE_RULEFLOW_GROUP_DEACTIVATED: inRuleFlowGroupEvent = (RuleFlowGroupLogEvent) inEvent; event.setString("RuleFlowGroup deactivated: " + inRuleFlowGroupEvent.getGroupName() + "[size=" + inRuleFlowGroupEvent.getSize() + "]"); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULEFLOW_GROUP_DEACTIVATED: beforeEvents.pop(); break; case LogEvent.BEFORE_PACKAGE_ADDED: RuleBaseLogEvent ruleBaseEvent = (RuleBaseLogEvent) inEvent; event.setString("Package added: " + ruleBaseEvent.getPackageName()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_PACKAGE_ADDED: beforeEvents.pop(); break; case LogEvent.BEFORE_PACKAGE_REMOVED: ruleBaseEvent = (RuleBaseLogEvent) inEvent; event.setString("Package removed: " + ruleBaseEvent.getPackageName()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_PACKAGE_REMOVED: beforeEvents.pop(); break; case LogEvent.BEFORE_RULE_ADDED: ruleBaseEvent = (RuleBaseLogEvent) inEvent; event.setString("Rule added: " + ruleBaseEvent.getRuleName()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULE_ADDED: if (!beforeEvents.isEmpty()) { Event beforeEvent = (Event) beforeEvents.pop(); beforeEvent.addSubEvents(newActivations); newActivations.clear(); } break; case LogEvent.BEFORE_RULE_REMOVED: ruleBaseEvent = (RuleBaseLogEvent) inEvent; event.setString("Rule removed: " + ruleBaseEvent.getRuleName()); if (!beforeEvents.isEmpty()) { ((Event) beforeEvents.peek()).addSubEvent(event); } else { events.add(event); } beforeEvents.push(event); break; case LogEvent.AFTER_RULE_REMOVED: if (!beforeEvents.isEmpty()) { Event beforeEvent = (Event) beforeEvents.pop(); beforeEvent.addSubEvents(newActivations); newActivations.clear(); } break; default: // do nothing break; } } return events; } public void deleteLog() { if (logFileName != null) { File file = new File(logFileName); try { file.delete(); // TODO delete file cause this doesn't seem to work setLogFile(null); refresh(); } catch (Throwable t) { t.printStackTrace(); DroolsEclipsePlugin.log(t); } } } protected void becomesVisible() { refresh(); } protected String getHelpContextId() { return null; } public Event getSelectedEvent() { ISelection selection = getViewer().getSelection(); if (selection instanceof IStructuredSelection) { Object selected = ((IStructuredSelection) selection).getFirstElement(); if (selected instanceof Event) { return (Event) selected; } } return null; } public void showEvent(Event event) { ((TreeViewer) getViewer()).reveal(event); } protected void fillContextMenu(IMenuManager menu) { Event selected = getSelectedEvent(); if (selected != null) { Event causeEvent = selected.getCauseEvent(); if (causeEvent != null) { menu.add(getAction("ShowEventCause")); } } menu.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS)); } protected void createActions() { deleteAction = new DeleteLogAction(this); setAction("ClearLog", deleteAction); deleteAction.setEnabled(logFileName != null); refreshAction = new RefreshLogAction(this); setAction("RefreshLog", refreshAction); refreshAction.setEnabled(logFileName != null); IAction action = new OpenLogAction(this); setAction("OpenLog", action); action = new ShowEventCauseAction(this); setAction("ShowEventCause", action); } protected void configureToolBar(IToolBarManager tbm) { tbm.add(getAction("OpenLog")); tbm.add(getAction("RefreshLog")); tbm.add(getAction("ClearLog")); } public void saveState(IMemento memento) { memento.putString(LOG_FILE_NAME, logFileName); } public void init(IViewSite site, IMemento memento) throws PartInitException { super.init(site, memento); if (memento != null) { logFileName = memento.getString(LOG_FILE_NAME); } } public class Event { private String toString; private int type; private List<Event> subEvents = new ArrayList<Event>(); private Event causeEvent; public Event(int type) { this.type = type; } public void setString(String toString) { this.toString = toString; } public String toString() { return toString; } public int getType() { return type; } public void addSubEvent(Event subEvent) { subEvents.add(subEvent); } public void addSubEvents(Collection<Event> subEvents) { this.subEvents.addAll(subEvents); } public Object[] getSubEvents() { return subEvents.toArray(); } public boolean hasSubEvents() { return !subEvents.isEmpty(); } public void setCauseEvent(Event causeEvent) { this.causeEvent = causeEvent; } public Event getCauseEvent() { return causeEvent; } } public class AuditLabelProvider extends LabelProvider implements IColorProvider { public Color getForeground(Object element) { return null; } public Color getBackground(Object element) { Event selected = getSelectedEvent(); if (selected != null) { if (element.equals(selected.getCauseEvent())) { Color color = DroolsEclipsePlugin.getDefault().getColor(CAUSE_EVENT_COLOR); if (color == null) { color = new Color(getControl().getDisplay(), 0, 255, 0); DroolsEclipsePlugin.getDefault().setColor(CAUSE_EVENT_COLOR, color); } return color; } } return null; } public Image getImage(Object element) { if (element instanceof Event) { int type = ((Event) element).getType(); switch (type) { case LogEvent.INSERTED: return DroolsPluginImages.getImage(DroolsPluginImages.INSERT); case LogEvent.UPDATED: return DroolsPluginImages.getImage(DroolsPluginImages.UPDATE); case LogEvent.RETRACTED: return DroolsPluginImages.getImage(DroolsPluginImages.RETRACT); case LogEvent.ACTIVATION_CREATED: return DroolsPluginImages.getImage(DroolsPluginImages.CREATE_ACTIVATION); case LogEvent.ACTIVATION_CANCELLED: return DroolsPluginImages.getImage(DroolsPluginImages.CANCEL_ACTIVATION); case LogEvent.BEFORE_ACTIVATION_FIRE: return DroolsPluginImages.getImage(DroolsPluginImages.EXECUTE_ACTIVATION); case LogEvent.BEFORE_RULEFLOW_CREATED: return DroolsPluginImages.getImage(DroolsPluginImages.RULEFLOW); case LogEvent.BEFORE_RULEFLOW_COMPLETED: return DroolsPluginImages.getImage(DroolsPluginImages.RULEFLOW); case LogEvent.BEFORE_RULEFLOW_NODE_TRIGGERED: return DroolsPluginImages.getImage(DroolsPluginImages.RULEFLOW_NODE_TRIGGERED); case LogEvent.BEFORE_RULEFLOW_GROUP_ACTIVATED: return DroolsPluginImages.getImage(DroolsPluginImages.RULEFLOW_GROUP); case LogEvent.BEFORE_RULEFLOW_GROUP_DEACTIVATED: return DroolsPluginImages.getImage(DroolsPluginImages.RULEFLOW_GROUP); case LogEvent.BEFORE_PACKAGE_ADDED: return DroolsPluginImages.getImage(DroolsPluginImages.DROOLS); case LogEvent.BEFORE_PACKAGE_REMOVED: return DroolsPluginImages.getImage(DroolsPluginImages.DROOLS); case LogEvent.BEFORE_RULE_ADDED: return DroolsPluginImages.getImage(DroolsPluginImages.DROOLS); case LogEvent.BEFORE_RULE_REMOVED: return DroolsPluginImages.getImage(DroolsPluginImages.DROOLS); } return null; } return null; } } }
package io.dropwizard.client; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.health.HealthCheck; import io.dropwizard.Application; import io.dropwizard.Configuration; import io.dropwizard.jackson.Jackson; import io.dropwizard.jersey.validation.Validators; import io.dropwizard.setup.Environment; import io.dropwizard.testing.ResourceHelpers; import io.dropwizard.testing.junit.DropwizardAppRule; import io.dropwizard.util.Duration; import org.apache.http.Header; import org.apache.http.HttpStatus; import org.apache.http.ProtocolVersion; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.conn.ConnectTimeoutException; import org.apache.http.conn.HttpHostConnectException; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicStatusLine; import org.assertj.core.api.AbstractLongAssert; import org.eclipse.jetty.util.component.LifeCycle; import org.glassfish.jersey.client.ClientProperties; import org.glassfish.jersey.client.ClientRequest; import org.glassfish.jersey.client.ClientResponse; import org.glassfish.jersey.client.JerseyClient; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Matchers; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.ProcessingException; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.Response; import java.net.NoRouteToHostException; import java.net.SocketTimeoutException; import java.net.URI; import java.util.concurrent.TimeUnit; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DropwizardApacheConnectorTest { private static final int SLEEP_TIME_IN_MILLIS = 1000; private static final int DEFAULT_CONNECT_TIMEOUT_IN_MILLIS = 500; private static final int ERROR_MARGIN_IN_MILLIS = 300; private static final int INCREASE_IN_MILLIS = 100; private static final URI NON_ROUTABLE_ADDRESS = URI.create("http://10.255.255.1"); @ClassRule public static final DropwizardAppRule<Configuration> APP_RULE = new DropwizardAppRule<>( TestApplication.class, ResourceHelpers.resourceFilePath("yaml/dropwizardApacheConnectorTest.yml")); @Rule public ExpectedException thrown = ExpectedException.none(); private final URI testUri = URI.create("http://localhost:" + APP_RULE.getLocalPort()); private JerseyClient client; private Environment environment; @Before public void setup() throws Exception { JerseyClientConfiguration clientConfiguration = new JerseyClientConfiguration(); clientConfiguration.setConnectionTimeout(Duration.milliseconds(SLEEP_TIME_IN_MILLIS / 2)); clientConfiguration.setTimeout(Duration.milliseconds(DEFAULT_CONNECT_TIMEOUT_IN_MILLIS)); environment = new Environment("test-dropwizard-apache-connector", Jackson.newObjectMapper(), Validators.newValidator(), new MetricRegistry(), getClass().getClassLoader()); client = (JerseyClient) new JerseyClientBuilder(environment) .using(clientConfiguration) .build("test"); for (LifeCycle lifeCycle : environment.lifecycle().getManagedObjects()) { lifeCycle.start(); } } @After public void tearDown() throws Exception { for (LifeCycle lifeCycle : environment.lifecycle().getManagedObjects()) { lifeCycle.stop(); } assertThat(client.isClosed()).isTrue(); } @Test public void when_no_read_timeout_override_then_client_request_times_out() { thrown.expect(ProcessingException.class); thrown.expectCause(any(SocketTimeoutException.class)); client.target(testUri + "/long_running") .request() .get(); } @Test public void when_read_timeout_override_created_then_client_requests_completes_successfully() { client.target(testUri + "/long_running") .property(ClientProperties.READ_TIMEOUT, SLEEP_TIME_IN_MILLIS * 2) .request() .get(); } /** * <p>In first assertion we prove, that a request takes no longer than: * <em>request_time < connect_timeout + error_margin</em> (1)</p> * <p/> * </p>In the second we show that if we set <b>connect_timeout</b> to * <b>set_connect_timeout + increase + error_margin</b> then * <em>request_time > connect_timeout + increase + error_margin</em> (2)</p> * <p/> * <p>Now, (1) and (2) can hold at the same time if then connect_timeout update was successful.</p> */ @Test public void connect_timeout_override_changes_how_long_it_takes_for_a_connection_to_timeout() { // before override WebTarget target = client.target(NON_ROUTABLE_ADDRESS); //This can't be tested without a real connection try { target.request().get(Response.class); } catch (ProcessingException e) { if (e.getCause() instanceof HttpHostConnectException) { return; } } assertThatConnectionTimeoutFor(target).isLessThan(DEFAULT_CONNECT_TIMEOUT_IN_MILLIS + ERROR_MARGIN_IN_MILLIS); // after override final int newTimeout = DEFAULT_CONNECT_TIMEOUT_IN_MILLIS + INCREASE_IN_MILLIS + ERROR_MARGIN_IN_MILLIS; final WebTarget newTarget = target.property(ClientProperties.CONNECT_TIMEOUT, newTimeout); assertThatConnectionTimeoutFor(newTarget).isGreaterThan(newTimeout); } @Test public void when_no_override_then_redirected_request_successfully_redirected() { assertThat(client.target(testUri + "/redirect") .request() .get(String.class) ).isEqualTo("redirected"); } @Test public void when_configuration_overridden_to_disallow_redirects_temporary_redirect_status_returned() { assertThat(client.target(testUri + "/redirect") .property(ClientProperties.FOLLOW_REDIRECTS, false) .request() .get(Response.class) .getStatus() ).isEqualTo(HttpStatus.SC_TEMPORARY_REDIRECT); } @Test public void when_jersey_client_runtime_is_garbage_collected_apache_client_is_not_closed() { for (int j = 0; j < 5; j++) { System.gc(); // We actually want GC here final String response = client.target(testUri + "/long_running") .property(ClientProperties.READ_TIMEOUT, SLEEP_TIME_IN_MILLIS * 2) .request() .get(String.class); assertThat(response).isEqualTo("success"); } } @Test public void multiple_headers_with_the_same_name_are_processed_successfully() throws Exception { final CloseableHttpClient client = mock(CloseableHttpClient.class); final DropwizardApacheConnector dropwizardApacheConnector = new DropwizardApacheConnector(client, null, false); final Header[] apacheHeaders = { new BasicHeader("Set-Cookie", "test1"), new BasicHeader("Set-Cookie", "test2") }; final CloseableHttpResponse apacheResponse = mock(CloseableHttpResponse.class); when(apacheResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), 200, "OK")); when(apacheResponse.getAllHeaders()).thenReturn(apacheHeaders); when(client.execute(Matchers.any())).thenReturn(apacheResponse); final ClientRequest jerseyRequest = mock(ClientRequest.class); when(jerseyRequest.getUri()).thenReturn(URI.create("http://localhost")); when(jerseyRequest.getMethod()).thenReturn("GET"); when(jerseyRequest.getHeaders()).thenReturn(new MultivaluedHashMap<>()); final ClientResponse jerseyResponse = dropwizardApacheConnector.apply(jerseyRequest); assertThat(jerseyResponse.getStatus()).isEqualTo(apacheResponse.getStatusLine().getStatusCode()); } @Path("/") public static class TestResource { @GET @Path("/long_running") public String getWithSleep() throws InterruptedException { TimeUnit.MILLISECONDS.sleep(SLEEP_TIME_IN_MILLIS); return "success"; } @GET @Path("redirect") public Response getWithRedirect() { return Response.temporaryRedirect(URI.create("/redirected")).build(); } @GET @Path("redirected") public String redirectedGet() { return "redirected"; } } public static class TestApplication extends Application<Configuration> { public static void main(String[] args) throws Exception { new TestApplication().run(args); } @Override public void run(Configuration configuration, Environment environment) throws Exception { environment.jersey().register(TestResource.class); environment.healthChecks().register("dummy", new HealthCheck() { @Override protected Result check() throws Exception { return Result.healthy(); } }); } } private static AbstractLongAssert<?> assertThatConnectionTimeoutFor(WebTarget webTarget) { final long startTime = System.nanoTime(); try { webTarget.request().get(Response.class); } catch (ProcessingException e) { final long endTime = System.nanoTime(); assertThat(e).isNotNull(); //noinspection ConstantConditions assertThat(e.getCause()).isNotNull(); assertThat(e.getCause()).isInstanceOfAny(ConnectTimeoutException.class, NoRouteToHostException.class); return assertThat(TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS)); } throw new AssertionError("ProcessingException expected but not thrown"); } }
package org.elasticsearch.xpack.ml.support; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.action.CloseJobAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction; import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction; import java.util.Collections; import java.util.Map; import java.util.concurrent.ExecutionException; import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0, supportsDedicatedMasters = false) public abstract class BaseMlIntegTestCase extends SecurityIntegTestCase { @Override protected boolean ignoreExternalCluster() { return true; } @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal)); settings.put(MlPlugin.USE_NATIVE_PROCESS_OPTION.getKey(), false); settings.put(MlPlugin.ML_ENABLED.getKey(), true); return settings.build(); } protected Job.Builder createJob(String id) { DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setFormat(DataDescription.DataFormat.JSON); dataDescription.setTimeFormat(DataDescription.EPOCH_MS); Detector.Builder d = new Detector.Builder("count", null); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build())); Job.Builder builder = new Job.Builder(); builder.setId(id); builder.setAnalysisConfig(analysisConfig); builder.setDataDescription(dataDescription); return builder; } // Due to the fact that ml plugin creates the state, notifications and meta indices automatically // when the test framework removes all indices then ml plugin adds them back. Causing validation to fail // we should move to templates instead as that will fix the test problem protected void cleanupWorkaround(int numNodes) throws Exception { deleteAllDatafeeds(client()); deleteAllJobs(client()); for (int i = 0; i < numNodes; i++) { internalCluster().stopRandomDataNode(); } internalCluster().startNode(Settings.builder().put(MlPlugin.ML_ENABLED.getKey(), false)); } private void deleteAllDatafeeds(Client client) throws Exception { MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData(); MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE); for (DatafeedConfig datafeed : mlMetadata.getDatafeeds().values()) { String datafeedId = datafeed.getId(); try { RemovePersistentTaskAction.Response stopResponse = client.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request(datafeedId)).get(); assertTrue(stopResponse.isAcknowledged()); } catch (ExecutionException e) { // CONFLICT is ok, as it means the datafeed has already stopped, which isn't an issue at all. if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) { throw new RuntimeException(e); } } assertBusy(() -> { try { GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId); GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get(); assertThat(r.getResponse().results().get(0).getDatafeedState(), equalTo(DatafeedState.STOPPED)); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }); DeleteDatafeedAction.Response deleteResponse = client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeedId)).get(); assertTrue(deleteResponse.isAcknowledged()); } } private void deleteAllJobs(Client client) throws Exception { MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData(); MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE); for (Map.Entry<String, Job> entry : mlMetadata.getJobs().entrySet()) { String jobId = entry.getKey(); try { CloseJobAction.Response response = client.execute(CloseJobAction.INSTANCE, new CloseJobAction.Request(jobId)).get(); assertTrue(response.isClosed()); } catch (Exception e) { // CONFLICT is ok, as it means the job has been closed already, which isn't an issue at all. if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) { throw new RuntimeException(e); } } DeleteJobAction.Response response = client.execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(jobId)).get(); assertTrue(response.isAcknowledged()); } } }
package imcode.util.log; import org.apache.log4j.*; import java.io.*; import imcode.server.WebAppGlobalConstants; public class WebAppFileAppender extends FileAppender { private static File absoluteWebAppPath = new File(WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath()); public WebAppFileAppender() { super(); } public void setFile(String fileName, boolean append ) throws IOException { super.setFile( (new File(absoluteWebAppPath, fileName)).toString(), append ); } }
package io.tilt.minka.api; import static io.tilt.minka.api.config.SchedulerSettings.THREAD_NAME_WEBSERVER_KERNEL; import static io.tilt.minka.api.config.SchedulerSettings.THREAD_NAME_WEBSERVER_WORKER; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.net.ServerSocket; import java.net.URI; import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.glassfish.grizzly.http.server.HttpServer; import org.glassfish.grizzly.http.server.NetworkListener; import org.glassfish.grizzly.nio.transport.TCPNIOTransport; import org.glassfish.grizzly.threadpool.GrizzlyExecutorService; import org.glassfish.grizzly.threadpool.ThreadPoolConfig; import org.glassfish.jersey.grizzly2.httpserver.GrizzlyHttpServerFactory; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.uri.internal.JerseyUriBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.support.ClassPathXmlApplicationContext; import io.tilt.minka.api.config.BootstrapConfiguration; import io.tilt.minka.api.config.BrokerConfiguration; import io.tilt.minka.api.inspect.AdminEndpoint; import io.tilt.minka.domain.AwaitingDelegate; import io.tilt.minka.domain.DependencyPlaceholder; import io.tilt.minka.domain.TCPShardIdentifier; import io.tilt.minka.utils.LogUtils; /** * System initiator and holder.<br> * Use an {@linkplain EventMapper} to map required and optional sharding events, or use a custom implementation * of {@linkplain PartitionMaster} and {@linkplain PartitionDelegate} <br> * <br> * Each server instance will spawn the underlying context which connects to Zookkeeper and starts network broker services.<br> * Although many instances can coexist within the same JVM under different namespaces, is not recommended. * You should only create one instance per application willing to distribute duties .<br> * <br> * By default all Minka services excluding broker service, run several tasks within a one-thread-only ThreadPoolExecutor.<br> * After an instance is created: the context gets initialized and waits for {@linkplain EventMapper::done}. <br> * * @author Cristian Gonzalez * @since Sept 20, 2016 * @param <D> the duty payload type * @param <P> the pallet payload type */ public class Server<D extends Serializable, P extends Serializable> { private static final String CONTEXT_PATH = "classpath:io/tilt/minka/config/context-minka-spring.xml"; protected static final Logger logger = LoggerFactory.getLogger(Server.class); private final String name = getClass().getSimpleName(); /* to enable many minka shards on the same JVM */ private static final Map<String, Tenant> tenants = new ConcurrentHashMap<>(); private static Lock lock = new ReentrantLock(); /* current holder's tenant, and one for each instance held by applications */ private Tenant tenant; private EventMapper<D, P> mapper; /** * Create a Minka server. All mandatory events must be mapped to consumers/suppliers. * @param jsonFormatConfig with a configuration in a JSON file, * whose format must comply {@linkplain Config} class serialization * @throws Exception when given file is invalid * */ public Server(final File jsonFormatConfig) throws Exception { Validate.notNull(jsonFormatConfig); Config config = Config.fromJsonFile(jsonFormatConfig); init(config); } /** * Create a Minka server. All mandatory events must be mapped to consumers/suppliers. * @param config Create a Minka server with a specific configuration */ public Server(final Config config) { Validate.notNull(config); init(config); } /** * Create a Minka server. All mandatory events must be mapped to consumers/suppliers. * one liner to custom main TCP hostname/ports only * @param zookeeperConnectionString in the form hostname:port/chroot * @param minkaHostPort in the form hostname:port * @param namespace all cluster members must reach themselves within it */ public Server(final String zookeeperConnectionString, final String minkaHostPort, final String namespace) { Validate.notNull(zookeeperConnectionString); Validate.notNull(minkaHostPort); Validate.notNull(namespace); final Config config = new Config(zookeeperConnectionString, minkaHostPort); config.getBootstrap().setNamespace(namespace); init(config); } /** * Create a Minka server with default configuration. * All mandatory events must be mapped to consumers/suppliers. * Shard will attempt to take a port over 5748, trying increased ports if busy. * Rest API will take port 57480 * * @param zookeeperConnectionString in the zookeeper form hostname:port/chroot * @param namespace all cluster members must reach themselves within it */ public Server(final String zookeeperConnectionString, final String namespace) { Validate.notNull(zookeeperConnectionString); Validate.notNull(namespace); Config config = new Config(zookeeperConnectionString); config.getBootstrap().setNamespace(namespace); init(config); } /** * Create a Minka server with default configuration. * All mandatory events must be mapped to consumers/suppliers. * Shard will attempt to take a port over 5748, trying increased ports if busy. */ public Server() { init(new Config()); } public Config getConfig() { if (tenant!=null) { return tenant.getConfig(); } else { throw new IllegalStateException("server already shutdown"); } } /** * Used when avoiding a client implementation of {@linkplain PartitionMaster}. * @return the event mapper instance associated with this server */ public EventMapper<D, P> getEventMapper() { if (tenant!=null) { return this.mapper; } else { throw new IllegalStateException("server already shutdown"); } } private void init(final Config config) { createTenant(config); logger.info("{}: Initializing context for namespace: {}", name, config.getBootstrap().getNamespace()); Runtime.getRuntime().addShutdownHook(new Thread(() -> destroy(false))); tenant.setConfig(config); final ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] { CONTEXT_PATH }, false); tenant.setContext(ctx); ctx.addBeanFactoryPostProcessor(beanFactory-> beanFactory.registerSingleton("config", config)); final String namespace = config.getBootstrap().getNamespace(); ctx.setDisplayName(new StringBuilder("minka-") .append(namespace) .append("-ts:") .append(System.currentTimeMillis()) .toString()); //logger.info("{}: Using configuration: {}", name, config.toString()); ctx.setId(namespace); mapper = new EventMapper<D, P>(tenant); startContext(config); } private void createTenant(final Config config) { try { lock.tryLock(500l, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { throw new IllegalThreadStateException("Other Server instances are being created concurrently (lock wait exhausted)"); } final String namespace = config.getBootstrap().getNamespace(); RuntimeException runtime = null; try { // namespace cannot be default or preexistent if ZK's chroot is not set final boolean chrootUsed = config.getBootstrap().getZookeeperHostPort().indexOf('/') > 0; final boolean duplicateName = tenants.containsKey(namespace); final boolean vmLimitAware = config.getBootstrap().isDropVMLimit(); if (!chrootUsed && duplicateName && !vmLimitAware) { runtime = exceptionSameName(namespace); } else { if (!vmLimitAware) { if (!duplicateName) { final long maxTenants = config.getBootstrap().getMaxServicesPerMachine(); if (tenants.size()>maxTenants) { runtime = exceptionMaxTenants(); } } else { runtime = exceptionMaxTenantsSameVM(); } } } for (Tenant t: tenants.values()) { logger.warn("Other tenant within the VM on the namespace: {} on broker hostport: {}", t.getConfig().getBootstrap().getNamespace(), t.getConfig().getBroker().getHostPort()); } if (duplicateName && !vmLimitAware) { // client should not depend on it anyway final String newName = namespace + "_" + new Random(System.currentTimeMillis()).nextInt(999999); logger.warn("{}: Overwritting service name: {} to avoid colission with other servers within the same JVM", name, newName); config.getBootstrap().setNamespace(newName); } if (runtime == null) { tenants.put(config.getBootstrap().getNamespace(), tenant = new Tenant()); } } finally { lock.unlock(); } if (runtime != null) { throw runtime; } } private IllegalArgumentException exceptionMaxTenantsSameVM() { return new IllegalArgumentException(new StringBuilder() .append(tenant.getConnectReference()) .append(": There're ") .append(tenants.size()) .append(" server/s already") .append(" in this VM with the same service-name: set a different one") .toString()); } private IllegalStateException exceptionMaxTenants() { return new IllegalStateException(new StringBuilder() .append(tenant.getConnectReference()) .append(": There's been created ") .append(tenants.size()) .append(" server/s already in this VM. If you indeed want that many: ") .append(" increase bootstrap's MAX_SERVICES_PER_MACHINE default value") .toString()); } private IllegalArgumentException exceptionSameName(final String namespace) { return new IllegalArgumentException(new StringBuilder() .append(tenant.getConnectReference()) .append(" a service on the namespace: ") .append(namespace) .append(" already exists!") .toString()); } private void startContext(final Config config) { if (!tenant.getContext().isActive()) { tenant.getContext().refresh(); if (tenant.getConfig().getBootstrap().isEnableWebserver()) { startWebserver(); logger.info(LogUtils.getGreetings( config.getResolvedShardId(), config.getBootstrap().getNamespace(), config.getBootstrap().getWebServerHostPort())); } else { logger.info("{}: {} Webserver disabled by configuration. Enable for a fully functional shard", name, tenant.getConnectReference()); } } else { logger.error("{}: {} Can only load Minka once !", name); } } private void startWebserver() { final ResourceConfig res = new ResourceConfig(AdminEndpoint.class); res.property("contextConfig", tenant.getContext()); final HttpServer webServer = GrizzlyHttpServerFactory.createHttpServer( resolveWebServerBindAddress(tenant.getConfig()), res); final ThreadPoolConfig config = ThreadPoolConfig.defaultConfig() .setCorePoolSize(1) .setMaxPoolSize(1); final Iterator<NetworkListener> it = webServer.getListeners().iterator(); while (it.hasNext()) { final NetworkListener listener = it.next(); logger.info("{}: {} Reconfiguring webserver listener {}", name, tenant.getConnectReference(), listener); final TCPNIOTransport transport = listener.getTransport(); transport.setSelectorRunnersCount(1); ((GrizzlyExecutorService)transport.getWorkerThreadPool()) .reconfigure(config.copy().setPoolName(THREAD_NAME_WEBSERVER_WORKER)); ((GrizzlyExecutorService)transport.getKernelThreadPool()) .reconfigure(config.copy().setPoolName(THREAD_NAME_WEBSERVER_KERNEL)); // note the transport class has an inner channel connector disabled to configure // as an instance private field which sizes the kernel pool to 10, unmodifiable. } // TODO disable ssl etc tenants.get(tenant.getConfig().getBootstrap().getNamespace()).setWebServer(webServer); try { webServer.start(); } catch (IOException e) { logger.info("{}: {} Unable to start web server", name, tenant.getConnectReference(), e); } } /* by default bind to the same broker's host interfase and if changed use broker's port plus 100 */ private URI resolveWebServerBindAddress(final Config config) { final String[] brokerHostPort = config.getBroker().getHostPort().split(":"); final JerseyUriBuilder builder = new JerseyUriBuilder(); final BootstrapConfiguration bs = config.getBootstrap(); final String[] webHostPort = bs.getWebServerHostPort().split(":"); int webPort = Integer.parseInt(webHostPort[1]); final boolean webHostPortUntouched = bs.getWebServerHostPort().equals(BootstrapConfiguration.WEB_SERVER_HOST_PORT); String webhostport; if (webHostPortUntouched) { int brokerPort = Integer.parseInt(brokerHostPort[1]); webPort = brokerPort == BrokerConfiguration.PORT ? webPort: brokerPort + 100; final String host = config.getResolvedShardId().getId().split(":")[0]; builder.host(host).port(webPort); webhostport = host + ":" + webPort; } else { builder.host(webHostPort[0]).port(webPort); webhostport = webHostPort[0]+ ":" + webPort; } config.getResolvedShardId().setWebHostPort(webhostport); config.getBootstrap().setWebServerHostPort(webhostport); logger.info("{}: {} Web host:port = {}", name, tenant.getConnectReference(), webhostport); builder.path(config.getBootstrap().getWebServerContextPath()); return builder.build(); } private void checkInit() { if (tenant!=null && !tenant.getContext().isActive()) { throw new IllegalStateException(tenant.getConnectReference() + " Minka service must be started first !"); } } protected synchronized void destroy(final boolean wait) { if (tenant != null && tenant.getContext()!=null && tenant.getContext().isActive()) { try { tenant.getContext().close(); } catch (Exception e) { logger.error("{}: {} Unexpected while destroying context at client call", name, tenant.getConnectReference(), e.getMessage()); } if (tenant.getConfig().getBootstrap().isEnableWebserver() && tenant.getWebServer()!=null) { try { tenant.getWebServer().shutdown(); } catch (Exception e) { logger.error("{}: {} Unexpected while stopping server at client call", name, tenant.getConnectReference(), e.getMessage()); } } tenants.remove(tenant.getConfig().getBootstrap().getNamespace()); if (wait && !holdUntilDisconnect()) { logger.error("{}: {} Couldnt wait for finalization of resources (may still remain open)", name, tenant.getConnectReference()); } tenant = null; } } /** * sleep and block current thread 3 times with 1s delay until broker's host-port is available again * in order to properly enable further tenant systems to initiate with a clean environment */ private boolean holdUntilDisconnect() { final Config c = tenant.getConfig(); final String[] parts = c.getBroker().getHostPort().split(":"); for(int retry = 0; retry < 3; retry++) { try (ServerSocket tmp = new ServerSocket(Integer.parseInt(parts[1]))) { return true; } catch (IOException ioe) { try { Thread.sleep(c.beatToMs(c.getBootstrap().getResourceReleaseWait())); } catch (InterruptedException e) { } } } return false; } private DependencyPlaceholder getDepPlaceholder() { return tenant.getContext().getBean(DependencyPlaceholder.class); } /** * An alternative way of mapping duty and pallet events, thru an implementation class. * @param delegate a fully implementation class of a partition delegate * @return the server builder */ public void setDelegate(final PartitionDelegate<?, ?> delegate) { Validate.notNull(delegate); checkInit(); final DependencyPlaceholder holder = getDepPlaceholder(); Validate.isTrue(holder==null || (holder.getDelegate() instanceof AwaitingDelegate), "You're overwriting previous delegate or event's consumer: " + delegate.getClass().getSimpleName()); logger.info("{}: {} Using new PartitionDelegate: {}", name, tenant.getConnectReference(), delegate.getClass().getSimpleName()); holder.setDelegate(delegate); } /** * An alternative way of mapping duty and pallet events, thru an implementation class. * @param master a fully implementation class of a partition master * @return the server builder */ public void setMaster(final PartitionMaster<?, ?> master) { Validate.notNull(master); checkInit(); final DependencyPlaceholder holder = getDepPlaceholder(); Validate.isTrue(holder==null || (holder.getMaster() instanceof AwaitingDelegate), "You're overwriting previous delegate or event's consumer: " + master.getClass().getSimpleName()); logger.info("{}: {} Using new PartitionMaster: {}", name, tenant.getConnectReference(), master.getClass().getSimpleName()); getDepPlaceholder().setMaster(master); } /** * Minka service must be fully initialized before being able to obtain an operative client * @return an instance of a client */ @SuppressWarnings("unchecked") public Client<D, P> getClient() { checkInit(); return tenant.getContext().getBean(Client.class); } /** * <p> * Warning: This executes automatically at VM shutdown (hook), but must be called independently * when in need to release unnecesary resource consumption. * <p> * Calls the termination of the system in an orderly manner. * Closing the API webserver and system context, which in turn will trigger finalization * of all spawned processes: dropping leadership candidature at Zookeeper, * and follower's captured entities. (properly calling the passed lambda at EventMapper) */ public void shutdown(final boolean wait) { if (tenant!=null) { logger.info("{}: {} Shutting down at request", name, tenant.getConnectReference()); destroy(true); } } public void shutdown() { shutdown(true); } /** * A way to permit several Server instances running within the same VM */ protected static class Tenant { private HttpServer webServer; private String connectReference; private ClassPathXmlApplicationContext context; private Config config; private Tenant() {} public HttpServer getWebServer() { return this.webServer; } public void setWebServer(HttpServer webServer) { this.webServer = webServer; } public void setConnectReference(String connectReference) { this.connectReference = connectReference; } public String getConnectReference() { String ret = StringUtils.EMPTY; if (StringUtils.isEmpty(connectReference)) { if (getContext()!=null && getContext().isActive()) { ret = connectReference = getContext().getBean(TCPShardIdentifier.class).getConnectString(); } } else { ret = connectReference; } return ret; } public ClassPathXmlApplicationContext getContext() { return this.context; } public void setContext(ClassPathXmlApplicationContext context) { this.context = context; } public Config getConfig() { return this.config; } public void setConfig(Config config) { this.config = config; } } }
/* This class is part of the XP framework's EAS connectivity * * $Id$ */ package net.xp_framework.easc.protocol.standard; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import net.xp_framework.easc.protocol.standard.Handler; public class Serializer { private static HashMap<Class, Method> typeMap = new HashMap<Class, Method>(); static { // Set up typeMap by inspecting all class methods with @Handler annotation for (Method m : Serializer.class.getDeclaredMethods()) { if (null == m.getAnnotation(Handler.class)) continue; typeMap.put(m.getParameterTypes()[0], m); } } @Handler public static String serialize(String s) { return "s:" + s.length() + ":\"" + s + "\";"; } @Handler public static String serialize(char c) { return "s:1:\"" + c + "\";"; } @Handler public static String serialize(byte b) { return "i:" + b + ";"; } @Handler public static String serialize(short s) { return "i:" + s + ";"; } @Handler public static String serialize(int i) { return "i:" + i + ";"; } @Handler public static String serialize(long l) { return "i:" + l + ";"; } @Handler public static String serialize(double d) { return "d:" + d + ";"; } @Handler public static String serialize(float f) { return "d:" + f + ";"; } @Handler public static String serialize(boolean b) { return "b:" + (b ? 1 : 0) + ";"; } protected static ArrayList<Field> classFields(Class c) { ArrayList<Field> list= new ArrayList<Field>(); for (Field f : c.getDeclaredFields()) { if (Modifier.isTransient(f.getModifiers())) continue; list.add(f); } return list; } @Handler public static String serialize(Object o) throws Exception { StringBuffer buffer= new StringBuffer(); Class c= o.getClass(); long numFields = 0; for (Field f : classFields(c)) { buffer.append("s:"); buffer.append(f.getName().length()); buffer.append(":\""); buffer.append(f.getName()); buffer.append("\";"); f.setAccessible(true); Method m= typeMap.get(f.getType()); if (m != null) { buffer.append(m.invoke(null, new Object[] { f.get(o) })); } else { System.out.println("!!! No mapping for " + f.getType().getName()); buffer.append(serialize(f.get(o))); // Will use serialize(Object o) } numFields++; } buffer.append("}"); buffer.insert(0, "O:" + c.getName().length() + ":\"" + c.getName() + "\":" + numFields + ":{"); return buffer.toString(); } }
package com.google.refine.freebase.protograph.transpose; import java.util.LinkedList; import java.util.List; import com.google.refine.browsing.FilteredRows; import com.google.refine.browsing.RowVisitor; import com.google.refine.expr.ExpressionUtils; import com.google.refine.freebase.protograph.AnonymousNode; import com.google.refine.freebase.protograph.CellNode; import com.google.refine.freebase.protograph.CellTopicNode; import com.google.refine.freebase.protograph.FreebaseTopicNode; import com.google.refine.freebase.protograph.Link; import com.google.refine.freebase.protograph.Node; import com.google.refine.freebase.protograph.NodeWithLinks; import com.google.refine.freebase.protograph.Protograph; import com.google.refine.freebase.protograph.ValueNode; import com.google.refine.model.Cell; import com.google.refine.model.Column; import com.google.refine.model.Project; import com.google.refine.model.Row; import com.google.refine.model.Recon.Judgment; public class Transposer { static public void transpose( Project project, FilteredRows filteredRows, Protograph protograph, Node rootNode, TransposedNodeFactory nodeFactory ) { transpose(project, filteredRows, protograph, rootNode, nodeFactory, 20); } static public void transpose( Project project, FilteredRows filteredRows, Protograph protograph, Node rootNode, TransposedNodeFactory nodeFactory, int limit ) { Context rootContext = new Context(rootNode, null, null, limit); filteredRows.accept(project, new RowVisitor() { Context rootContext; Protograph protograph; Node rootNode; TransposedNodeFactory nodeFactory; @Override public boolean visit(Project project, int rowIndex, Row row) { if (rootContext.limit <= 0 || rootContext.count < rootContext.limit) { descend(project, protograph, nodeFactory, rowIndex, row, rootNode, rootContext); } if (rootContext.limit > 0 && rootContext.count > rootContext.limit) { return true; } return false; } @Override public void start(Project project) { // TODO Auto-generated method stub } @Override public void end(Project project) { // TODO Auto-generated method stub } public RowVisitor init( Context rootContext, Protograph protograph, Node rootNode, TransposedNodeFactory nodeFactory ) { this.rootContext = rootContext; this.protograph = protograph; this.rootNode = rootNode; this.nodeFactory = nodeFactory; return this; } }.init(rootContext, protograph, rootNode, nodeFactory)); } static protected void descend( Project project, Protograph protograph, TransposedNodeFactory nodeFactory, int rowIndex, Row row, Node node, Context context ) { List<TransposedNode> tnodes = new LinkedList<TransposedNode>(); Link link = context.parent == null ? null : context.link; if (node instanceof CellNode) { CellNode node2 = (CellNode) node; for (String columnName : node2.columnNames) { Column column = project.columnModel.getColumnByName(columnName); if (column != null) { int cellIndex = column.getCellIndex(); Cell cell = row.getCell(cellIndex); if (cell != null && ExpressionUtils.isNonBlankData(cell.value)) { if (node2 instanceof CellTopicNode && (cell.recon == null || cell.recon.judgment == Judgment.None)) { return; } context.count++; if (context.limit > 0 && context.count > context.limit) { return; } if (context.parent == null) { tnodes.add(nodeFactory.transposeCellNode( null, link, node2, rowIndex, cellIndex, cell )); } else { for (TransposedNode parentNode : context.parent.transposedNodes) { tnodes.add(nodeFactory.transposeCellNode( parentNode, link, node2, rowIndex, cellIndex, cell )); } } } } } } else { if (node instanceof AnonymousNode) { if (context.parent == null) { tnodes.add(nodeFactory.transposeAnonymousNode( null, link, (AnonymousNode) node, rowIndex )); } else { for (TransposedNode parentNode : context.parent.transposedNodes) { tnodes.add(nodeFactory.transposeAnonymousNode( parentNode, link, (AnonymousNode) node, rowIndex )); } } } else if (node instanceof FreebaseTopicNode) { if (context.parent == null) { tnodes.add(nodeFactory.transposeTopicNode( null, link, (FreebaseTopicNode) node, rowIndex )); } else { for (TransposedNode parentNode : context.parent.transposedNodes) { tnodes.add(nodeFactory.transposeTopicNode( parentNode, link, (FreebaseTopicNode) node, rowIndex )); } } } else if (node instanceof ValueNode) { if (context.parent == null) { tnodes.add(nodeFactory.transposeValueNode( null, link, (ValueNode) node, rowIndex )); } else { for (TransposedNode parentNode : context.parent.transposedNodes) { tnodes.add(nodeFactory.transposeValueNode( parentNode, link, (ValueNode) node, rowIndex )); } } } } if (tnodes.size() > 0) { context.transposedNodes.clear(); context.transposedNodes.addAll(tnodes); } if (node instanceof NodeWithLinks) { NodeWithLinks node2 = (NodeWithLinks) node; int linkCount = node2.getLinkCount(); for (int i = 0; i < linkCount; i++) { Link link2 = node2.getLink(i); if (link2.condition == null || link2.condition.test(project, rowIndex, row)) { descend( project, protograph, nodeFactory, rowIndex, row, link2.getTarget(), context.subContexts.get(i) ); } } } } static class Context { List<TransposedNode> transposedNodes = new LinkedList<TransposedNode>(); List<Context> subContexts; Context parent; Link link; int count; int limit; Context(Node node, Context parent, Link link, int limit) { this.parent = parent; this.link = link; this.limit = limit; if (node instanceof NodeWithLinks) { NodeWithLinks node2 = (NodeWithLinks) node; int subContextCount = node2.getLinkCount(); subContexts = new LinkedList<Context>(); for (int i = 0; i < subContextCount; i++) { Link link2 = node2.getLink(i); subContexts.add( new Context(link2.getTarget(), this, link2, -1)); } } } public void nullifySubContextNodes() { if (subContexts != null) { for (Context context : subContexts) { context.transposedNodes.clear(); context.nullifySubContextNodes(); } } } } }
package eu.amidst.reviewMeeting2016; import eu.amidst.core.conceptdrift.utils.GaussianHiddenTransitionMethod; import eu.amidst.core.datastream.Attributes; import eu.amidst.core.distribution.Normal; import eu.amidst.core.learning.parametric.bayesian.PlateuStructure; import eu.amidst.core.variables.Variable; import eu.amidst.dynamic.datastream.DynamicDataInstance; import eu.amidst.dynamic.models.DynamicDAG; import eu.amidst.dynamic.variables.DynamicVariables; import eu.amidst.flinklink.core.conceptdrift.IdentifiableIDAModel; import eu.amidst.flinklink.core.data.DataFlink; import eu.amidst.flinklink.core.io.DataFlinkLoader; import eu.amidst.flinklink.core.learning.dynamic.DynamicParallelVB; import org.apache.flink.api.java.ExecutionEnvironment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; public class CajaMarDemo { static Logger logger = LoggerFactory.getLogger(CajaMarDemo.class); public static int seed = 5; public static int batchSize = 500; public static double transitionVariance = 0.1; public static void main(String[] args) throws Exception { /* * Create flink ExecutionEnvironment variable: * The ExecutionEnviroment is the context in which a program is executed. A local environment will cause * execution in the current JVM, a remote environment will cause execution on a remote cluster installation. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); //String fileName = "hdfs:///tmp_conceptdrift_data"; String fileName = "./datasets/dataFlink/conceptdrift/data"; DataFlink<DynamicDataInstance> data0 = DataFlinkLoader.loadDynamicDataFromFolder(env,fileName+0+".arff", false); Attributes attributes = data0.getAttributes(); System.out.println(attributes); // Create a Variables object from the attributes of the input data stream. DynamicVariables variables = new DynamicVariables(attributes); // Define the class variable. Variable classVar = variables.getVariableById(0); // Create an empty DAG object with the defined variables. DynamicDAG dynamicDAG = new DynamicDAG(variables); // Link the class as parent of all attributes dynamicDAG.getParentSetsTimeT() .stream() .filter(w -> w.getMainVar() != classVar) .forEach(w -> w.addParent(classVar)); /* for (Variable var: variables){ if (var==classVar) continue; dynamicDAG.getParentSetTimeT(var).addParent(classVar); } */ // Link the class through time dynamicDAG.getParentSetTimeT(classVar).addParent(classVar.getInterfaceVariable()); System.out.println(dynamicDAG.toString()); // Set the number of available months for learning int nMonths = 5; long start = System.nanoTime(); //Parallel Bayesian learning enging DynamicParallelVB parallelVB = new DynamicParallelVB(); parallelVB.setPlateuStructure(new PlateuStructure()); parallelVB.setGlobalThreshold(0.1); parallelVB.setMaximumGlobalIterations(100); parallelVB.setLocalThreshold(0.1); parallelVB.setMaximumLocalIterations(100); parallelVB.setSeed(0); parallelVB.setBatchSize(1000); parallelVB.setDAG(dynamicDAG); // Initiate learning parallelVB.initLearning(); for (int i = 0; i < nMonths; i++) { logger.info(" //Load the data for that month DataFlink<DynamicDataInstance> dataNew = DataFlinkLoader.loadDynamicDataFromFolder(env, fileName+i+".arff", false); //Update the model with the provided data parallelVB.updateModelWithNewTimeSlice(i, dataNew); } long duration = (System.nanoTime() - start) / 1; double seconds = duration / 1000000000.0; logger.info("Running time: {} seconds.", seconds); //Show the learnt Dynamic Bayesian network System.out.println(parallelVB.getLearntDynamicBayesianNetwork()); // Define the global latent binary variable. Variable globalHiddenVar = variables.newGaussianDynamicVariable("GlobalHidden"); dynamicDAG.updateDynamicVariables(variables); // Link the hidden as parent of all predictive attributes dynamicDAG.getParentSetsTimeT() .stream() .filter(w -> w.getMainVar() != classVar) .filter(w -> w.getMainVar() != globalHiddenVar) .forEach(w -> w.addParent(globalHiddenVar)); System.out.println(dynamicDAG.toString()); //Update the Plateu Structure parallelVB.setPlateuStructure(new PlateuStructure(Arrays.asList(globalHiddenVar))); //Define the transition for the global hidden GaussianHiddenTransitionMethod gaussianHiddenTransitionMethod = new GaussianHiddenTransitionMethod(Arrays.asList(globalHiddenVar), 0, 0.1); parallelVB.setTransitionMethod(gaussianHiddenTransitionMethod); //Update the Dynamic DAG parallelVB.setDAG(dynamicDAG); //Set the procedure to make the model identifiable parallelVB.setIdenitifableModelling(new IdentifiableIDAModel()); //Init learning parallelVB.initLearning(); double[] output = new double[nMonths]; for (int i = 0; i < nMonths; i++) { System.out.println(" DataFlink<DynamicDataInstance> dataNew = DataFlinkLoader.loadDynamicDataFromFolder(env, "./datasets/dataFlink/conceptdrift/data" + i + ".arff", false); parallelVB.updateModelWithNewTimeSlice(i, dataNew); Normal normal = parallelVB.getParameterPosteriorTimeT(globalHiddenVar); output[i] = normal.getMean(); } System.out.println(parallelVB.getLearntDynamicBayesianNetwork()); for (int i = 0; i < nMonths; i++) { System.out.println("E(H_"+i+") =\t" + output[i]); } } }
package com.github.groupon.monsoon.history.influx; import static com.github.groupon.monsoon.history.influx.InfluxUtil.TIME_COLUMN; import static com.google.common.collect.Iterators.peekingIterator; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.PeekingIterator; import com.google.common.collect.SetMultimap; import com.groupon.lex.metrics.GroupName; import com.groupon.lex.metrics.Histogram; import com.groupon.lex.metrics.MetricName; import com.groupon.lex.metrics.MetricValue; import com.groupon.lex.metrics.SimpleGroupPath; import com.groupon.lex.metrics.Tags; import com.groupon.lex.metrics.lib.SimpleMapEntry; import com.groupon.lex.metrics.timeseries.ImmutableTimeSeriesValue; import com.groupon.lex.metrics.timeseries.SimpleTimeSeriesCollection; import com.groupon.lex.metrics.timeseries.TimeSeriesCollection; import com.groupon.lex.metrics.timeseries.TimeSeriesValue; import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.SortedMap; import java.util.Spliterator; import java.util.Spliterators; import java.util.TreeMap; import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.stream.StreamSupport; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; import org.influxdb.dto.QueryResult; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; /** * Handles an ordered list of series and exposes them as TimeSeriesCollections. */ public class SeriesHandler { private final SortedMap<TimestampedGroup, IntermediateTSV> datums = new TreeMap<>(); public Stream<TimeSeriesCollection> build() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize( new DatumsIterator(datums.entrySet().iterator()), DatumsIterator.SPLITERATOR_CHARACTERISTICS), false); } public void addSeries(QueryResult.Series series) { final GroupName group = seriesToGroupName(series); final Optional<Histogram.Range> range = rangeFromSeries(series); final int timeColumnIdx = InfluxUtil.getColumnIndexFromSeries(series, TIME_COLUMN).orElseThrow(() -> new IllegalStateException("missing time column")); series.getValues().forEach(row -> { assert series.getColumns().size() == row.size(); final DateTime timestamp = new DateTime((Instant) row.get(timeColumnIdx)); final IntermediateTSV valueMap = new IntermediateTSV(); final ListIterator<String> columnIter = series.getColumns().listIterator(); final Iterator<Object> rowIter = row.iterator(); while (rowIter.hasNext()) { final int columnIdx = columnIter.nextIndex(); final String columnName = columnIter.next(); if (columnIdx != timeColumnIdx) valueMap.addMetric(valueKeyToMetricName(columnName), range, seriesValueToMetricValue(rowIter.next())); } datums.merge(new TimestampedGroup(timestamp, group), valueMap, IntermediateTSV::withMerged); }); } public void merge(SeriesHandler other) { other.datums.forEach((k, v) -> datums.merge(k, v, IntermediateTSV::withMerged)); } private static class DatumsIterator implements Iterator<TimeSeriesCollection> { public static final int SPLITERATOR_CHARACTERISTICS = Spliterator.DISTINCT | Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED | Spliterator.SORTED; private final PeekingIterator<Map.Entry<TimestampedGroup, IntermediateTSV>> iter; public DatumsIterator(Iterator<Map.Entry<TimestampedGroup, IntermediateTSV>> iter) { this.iter = peekingIterator(iter); } @Override public boolean hasNext() { return iter.hasNext(); } @Override public TimeSeriesCollection next() { final List<TimeSeriesValue> collections = new ArrayList<>(); final DateTime timestamp; { final Map.Entry<TimestampedGroup, IntermediateTSV> head = iter.next(); // Handles throwing of NoSuchElementException for us. collections.add(buildTsvFromIterElem(head)); timestamp = head.getKey().getTimestamp(); } while (iter.hasNext() && Objects.equals(iter.peek().getKey().getTimestamp(), timestamp)) collections.add(buildTsvFromIterElem(iter.next())); return new SimpleTimeSeriesCollection(timestamp, collections); } private static TimeSeriesValue buildTsvFromIterElem(Map.Entry<TimestampedGroup, IntermediateTSV> iterElem) { return iterElem.getValue().build(iterElem.getKey().getGroup()); } } private static GroupName seriesToGroupName(QueryResult.Series series) { final SimpleGroupPath groupPath = pathStrToGroupPath(series.getName()); final Tags tags = Tags.valueOf(series.getTags().entrySet().stream() .filter(tagEntry -> !Objects.equals(tagEntry.getKey(), InfluxUtil.MONSOON_RANGE_TAG)) .filter(tagEntry -> tagEntry.getValue() != null) .map(tagEntry -> SimpleMapEntry.create(tagEntry.getKey(), tagValueToMetricValue(tagEntry.getValue())))); return GroupName.valueOf(groupPath, tags); } private static Optional<Histogram.Range> rangeFromSeries(QueryResult.Series series) { return series.getTags().entrySet().stream() .filter(tagEntry -> Objects.equals(tagEntry.getKey(), InfluxUtil.MONSOON_RANGE_TAG)) .map(Map.Entry::getValue) .filter(Objects::nonNull) .map(rangeStr -> rangeStr.split(Pattern.quote(".."), 2)) .map(parts -> new Histogram.Range(Double.parseDouble(parts[0]), Double.parseDouble(parts[1]))) .findAny(); } private static SimpleGroupPath pathStrToGroupPath(String str) { return SimpleGroupPath.valueOf(str.split(Pattern.quote("."))); } private static MetricName valueKeyToMetricName(String str) { return MetricName.valueOf(str.split(Pattern.quote("."))); } private static MetricValue seriesValueToMetricValue(Object obj) { if (obj instanceof Boolean) return MetricValue.fromBoolean((Boolean) obj); if (obj instanceof Number) return MetricValue.fromNumberValue((Number) obj); assert obj instanceof String; return MetricValue.fromStrValue(obj.toString()); } private static MetricValue tagValueToMetricValue(String str) { if ("true".equals(str)) return MetricValue.TRUE; if ("false".equals(str)) return MetricValue.FALSE; try { return MetricValue.fromIntValue(Long.parseLong(str)); } catch (NumberFormatException ex) { /* SKIP: value is not an integer */ } try { return MetricValue.fromDblValue(Double.parseDouble(str)); } catch (NumberFormatException ex) { /* SKIP: value is not a floating point value */ } return MetricValue.fromStrValue(str); } @RequiredArgsConstructor private static class IntermediateTSV { private final Map<MetricName, MetricValue> metrics = new HashMap<>(); private final SetMultimap<MetricName, Histogram.RangeWithCount> histograms = MultimapBuilder .hashKeys() .hashSetValues() // Handle duplicate series correctly. .build(); public TimeSeriesValue build(GroupName group) { final Stream<Map.Entry<MetricName, MetricValue>> metricsStream = metrics.entrySet().stream() .filter(metricEntry -> !histograms.containsKey(metricEntry.getKey())); final Stream<Map.Entry<MetricName, MetricValue>> histogramsStream = histograms.asMap().entrySet().stream() .map(histogramMetric -> { final MetricValue histogram = MetricValue.fromHistValue(new Histogram(histogramMetric.getValue().stream())); return SimpleMapEntry.create(histogramMetric.getKey(), histogram); }); return new ImmutableTimeSeriesValue(group, Stream.concat(metricsStream, histogramsStream), Map.Entry::getKey, Map.Entry::getValue); } private void addMetric(MetricName name, MetricValue value) { metrics.put(name, value); } private void addMetric(MetricName name, Histogram.Range range, MetricValue value) { final double count = value.value() .orElseThrow(() -> new IllegalArgumentException("expected floating point value for range value")) .doubleValue(); histograms.put(name, new Histogram.RangeWithCount(range, count)); } public void addMetric(MetricName name, Optional<Histogram.Range> range, MetricValue value) { if (range.isPresent()) addMetric(name, range.get(), value); else addMetric(name, value); } public IntermediateTSV withMerged(IntermediateTSV other) { metrics.putAll(other.metrics); histograms.putAll(other.histograms); return this; } } @EqualsAndHashCode private static class TimestampedGroup implements Comparable<TimestampedGroup> { private final long timestamp; private final GroupName group; public TimestampedGroup(@NonNull DateTime timestamp, @NonNull GroupName group) { this.timestamp = timestamp.getMillis(); this.group = group; } public DateTime getTimestamp() { return new DateTime(timestamp, DateTimeZone.UTC); } public GroupName getGroup() { return group; } @Override public int compareTo(TimestampedGroup o) { int cmp = Long.compare(timestamp, o.timestamp); if (cmp == 0) cmp = group.compareTo(o.group); return cmp; } } }
package edu.umd.cs.findbugs.ba.npe; import java.util.BitSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.bcel.Constants; import org.apache.bcel.classfile.LineNumber; import org.apache.bcel.classfile.LineNumberTable; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.Instruction; import org.apache.bcel.generic.InstructionHandle; import edu.umd.cs.findbugs.ba.BasicBlock; import edu.umd.cs.findbugs.ba.CFGBuilderException; import edu.umd.cs.findbugs.ba.ClassContext; import edu.umd.cs.findbugs.ba.DataflowAnalysisException; import edu.umd.cs.findbugs.ba.Edge; import edu.umd.cs.findbugs.ba.EdgeTypes; import edu.umd.cs.findbugs.ba.Location; import edu.umd.cs.findbugs.ba.vna.ValueNumber; import edu.umd.cs.findbugs.ba.vna.ValueNumberFrame; /** * A user-friendly front end for finding null pointer dereferences * and redundant null comparisions. * * @see IsNullValueAnalysis * @author David Hovemeyer */ public class NullDerefAndRedundantComparisonFinder { private static final boolean DEBUG = Boolean.getBoolean("fnd.debug"); private ClassContext classContext; private Method method; private IsNullValueDataflow invDataflow; private NullDerefAndRedundantComparisonCollector collector; private List<RedundantBranch> redundantBranchList; private BitSet definitelySameBranchSet; private BitSet definitelyDifferentBranchSet; private BitSet undeterminedBranchSet; private BitSet lineMentionedMultipleTimes; static { if (DEBUG) System.out.println("fnd.debug enabled"); } /** * Constructor. * * @param classContext the ClassContext * @param method the method to analyze * @param invDataflow the IsNullValueDataflow to use * @param collector the NullDerefAndRedundantComparisonCollector used to report * null derefs and redundant null comparisons */ public NullDerefAndRedundantComparisonFinder( ClassContext classContext, Method method, IsNullValueDataflow invDataflow, NullDerefAndRedundantComparisonCollector collector) { this.classContext = classContext; this.method = method; this.invDataflow = invDataflow; this.collector = collector; this.lineMentionedMultipleTimes = new BitSet(); BitSet foundOnce = new BitSet(); int lineNum = -1; LineNumberTable lineNumberTable = method.getLineNumberTable(); if (lineNumberTable != null) for(LineNumber line : lineNumberTable.getLineNumberTable()) { int newLine = line.getLineNumber(); if (newLine == lineNum || newLine == -1) continue; lineNum = newLine; if (foundOnce.get(lineNum)) this.lineMentionedMultipleTimes.set(lineNum); else foundOnce.set(lineNum); } this.redundantBranchList = new LinkedList<RedundantBranch>(); this.definitelySameBranchSet = new BitSet(); this.definitelyDifferentBranchSet = new BitSet(); this.undeterminedBranchSet = new BitSet(); } public void execute() throws DataflowAnalysisException, CFGBuilderException { // Look for null check blocks where the reference being checked // is definitely null, or null on some path Iterator<BasicBlock> bbIter = invDataflow.getCFG().blockIterator(); while (bbIter.hasNext()) { BasicBlock basicBlock = bbIter.next(); if (basicBlock.isNullCheck()) { analyzeNullCheck(classContext, method, invDataflow, basicBlock); } else if (!basicBlock.isEmpty()) { // Look for all reference comparisons where // - both values compared are definitely null, or // - one value is definitely null and one is definitely not null // These cases are not null dereferences, // but they are quite likely to indicate an error, so while we've got // information about null values, we may as well report them. InstructionHandle lastHandle = basicBlock.getLastInstruction(); Instruction last = lastHandle.getInstruction(); switch (last.getOpcode()) { case Constants.IF_ACMPEQ: case Constants.IF_ACMPNE: analyzeRefComparisonBranch(basicBlock, lastHandle); break; case Constants.IFNULL: case Constants.IFNONNULL: analyzeIfNullBranch(basicBlock, lastHandle); break; } } } Iterator<RedundantBranch> i = redundantBranchList.iterator(); while (i.hasNext()) { RedundantBranch redundantBranch = i.next(); if (DEBUG) System.out.println("Redundant branch: " + redundantBranch); int lineNumber = redundantBranch.lineNumber; // The source to bytecode compiler may sometimes duplicate blocks of // code along different control paths. So, to report the bug, // we check to ensure that the branch is REALLY determined each // place it is duplicated, and that it is determined in the same way. boolean confused = undeterminedBranchSet.get(lineNumber) || (definitelySameBranchSet.get(lineNumber) && definitelyDifferentBranchSet.get(lineNumber)); // confused if there is JSR confusion or multiple null checks with different results on the same line boolean reportIt = true; if (lineMentionedMultipleTimes.get(lineNumber) && confused) reportIt = false; if (redundantBranch.location.getBasicBlock().isInJSRSubroutine() /* occurs in a JSR */ && confused) reportIt = false; if (reportIt) { collector.foundRedundantNullCheck(redundantBranch.location, redundantBranch); } } } private void analyzeRefComparisonBranch( BasicBlock basicBlock, InstructionHandle lastHandle) throws DataflowAnalysisException { Location location = new Location(lastHandle, basicBlock); IsNullValueFrame frame = invDataflow.getFactAtLocation(location); if (!frame.isValid()) { // Probably dead code due to pruning infeasible exception edges. return; } if (frame.getStackDepth() < 2) throw new DataflowAnalysisException("Stack underflow at " + lastHandle); // Find the line number. int lineNumber = getLineNumber(method, lastHandle); if (lineNumber < 0) return; int numSlots = frame.getNumSlots(); IsNullValue top = frame.getValue(numSlots - 1); IsNullValue topNext = frame.getValue(numSlots - 2); boolean definitelySame = top.isDefinitelyNull() && topNext.isDefinitelyNull(); boolean definitelyDifferent = (top.isDefinitelyNull() && topNext.isDefinitelyNotNull()) || (top.isDefinitelyNotNull() && topNext.isDefinitelyNull()); if (definitelySame || definitelyDifferent) { if (definitelySame) { if (DEBUG) System.out.println("Line " + lineNumber + " always same"); definitelySameBranchSet.set(lineNumber); } if (definitelyDifferent) { if (DEBUG) System.out.println("Line " + lineNumber + " always different"); definitelyDifferentBranchSet.set(lineNumber); } RedundantBranch redundantBranch = new RedundantBranch(location, lineNumber, top, topNext); // Figure out which control edge is made infeasible by the redundant comparison boolean wantSame = (lastHandle.getInstruction().getOpcode() == Constants.IF_ACMPEQ); int infeasibleEdgeType = (wantSame == definitelySame) ? EdgeTypes.FALL_THROUGH_EDGE : EdgeTypes.IFCMP_EDGE; Edge infeasibleEdge = invDataflow.getCFG().getOutgoingEdgeWithType(basicBlock, infeasibleEdgeType); redundantBranch.setInfeasibleEdge(infeasibleEdge); if (DEBUG) System.out.println("Adding redundant branch: " + redundantBranch); redundantBranchList.add(redundantBranch); } else { if (DEBUG) System.out.println("Line " + lineNumber + " undetermined"); undeterminedBranchSet.set(lineNumber); } } // This is called for both IFNULL and IFNONNULL instructions. private void analyzeIfNullBranch( BasicBlock basicBlock, InstructionHandle lastHandle) throws DataflowAnalysisException { Location location = new Location(lastHandle, basicBlock); IsNullValueFrame frame = invDataflow.getFactAtLocation(location); if (!frame.isValid()) { // This is probably dead code due to an infeasible exception edge. return; } IsNullValue top = frame.getTopValue(); // Find the line number. int lineNumber = getLineNumber(method, lastHandle); if (lineNumber < 0) return; if (!(top.isDefinitelyNull() || top.isDefinitelyNotNull())) { if (DEBUG) System.out.println("Line " + lineNumber + " undetermined"); undeterminedBranchSet.set(lineNumber); return; } // Figure out if the branch is always taken // or always not taken. short opcode = lastHandle.getInstruction().getOpcode(); boolean definitelySame = top.isDefinitelyNull(); if (opcode != Constants.IFNULL) definitelySame = !definitelySame; if (definitelySame) { if (DEBUG) System.out.println("Line " + lineNumber + " always same"); definitelySameBranchSet.set(lineNumber); } else { if (DEBUG) System.out.println("Line " + lineNumber + " always different"); definitelyDifferentBranchSet.set(lineNumber); } RedundantBranch redundantBranch = new RedundantBranch(location, lineNumber, top); // Determine which control edge is made infeasible by the redundant comparison boolean wantNull = (opcode == Constants.IFNULL); int infeasibleEdgeType = (wantNull == top.isDefinitelyNull()) ? EdgeTypes.FALL_THROUGH_EDGE : EdgeTypes.IFCMP_EDGE; Edge infeasibleEdge = invDataflow.getCFG().getOutgoingEdgeWithType(basicBlock, infeasibleEdgeType); redundantBranch.setInfeasibleEdge(infeasibleEdge); if (DEBUG) System.out.println("Adding redundant branch: " + redundantBranch); redundantBranchList.add(redundantBranch); } private void analyzeNullCheck(ClassContext classContext, Method method, IsNullValueDataflow invDataflow, BasicBlock basicBlock) throws DataflowAnalysisException, CFGBuilderException { // Look for null checks where the value checked is definitely // null or null on some path. InstructionHandle exceptionThrowerHandle = basicBlock.getExceptionThrower(); Instruction exceptionThrower = exceptionThrowerHandle.getInstruction(); // Get the stack values at entry to the null check. IsNullValueFrame frame = invDataflow.getStartFact(basicBlock); if (!frame.isValid()) return; // Could the reference be null? IsNullValue refValue = frame.getInstance(exceptionThrower, classContext.getConstantPoolGen()); if (!refValue.mightBeNull()) return; // Get the value number ValueNumberFrame vnaFrame = classContext.getValueNumberDataflow(method).getStartFact(basicBlock); if (!vnaFrame.isValid()) return; ValueNumber valueNumber = vnaFrame.getInstance(exceptionThrower, classContext.getConstantPoolGen()); // Issue a warning collector.foundNullDeref(new Location(exceptionThrowerHandle, basicBlock), valueNumber, refValue); } private static int getLineNumber(Method method, InstructionHandle handle) { LineNumberTable table = method.getCode().getLineNumberTable(); if (table == null) return -1; return table.getSourceLine(handle.getPosition()); } }
package io.spine.server.firebase.given; import com.google.auth.oauth2.GoogleCredentials; import com.google.cloud.firestore.Firestore; import com.google.firebase.FirebaseApp; import com.google.firebase.FirebaseOptions; import com.google.firebase.cloud.FirestoreClient; import com.google.protobuf.Duration; import com.google.protobuf.Message; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Timestamps; import io.spine.client.ActorRequestFactory; import io.spine.client.CommandFactory; import io.spine.core.BoundedContextName; import io.spine.core.CommandEnvelope; import io.spine.core.Event; import io.spine.core.EventContext; import io.spine.core.Subscribe; import io.spine.core.TenantId; import io.spine.core.UserId; import io.spine.people.PersonName; import io.spine.server.BoundedContext; import io.spine.server.aggregate.Aggregate; import io.spine.server.aggregate.AggregateRepository; import io.spine.server.aggregate.Apply; import io.spine.server.command.Assign; import io.spine.server.command.TestEventFactory; import io.spine.server.entity.Entity; import io.spine.server.entity.Repository; import io.spine.server.firebase.FMChangeCustomerName; import io.spine.server.firebase.FMCreateCustomer; import io.spine.server.firebase.FMCustomer; import io.spine.server.firebase.FMCustomerCreated; import io.spine.server.firebase.FMCustomerId; import io.spine.server.firebase.FMCustomerNameChanged; import io.spine.server.firebase.FMCustomerVBuilder; import io.spine.server.firebase.FMSession; import io.spine.server.firebase.FMSessionId; import io.spine.server.firebase.FMSessionVBuilder; import io.spine.server.firebase.FirebaseSubscriptionMirror; import io.spine.server.projection.Projection; import io.spine.server.projection.ProjectionRepository; import io.spine.server.stand.Stand; import io.spine.server.storage.StorageFactory; import io.spine.string.Stringifier; import io.spine.string.StringifierRegistry; import io.spine.time.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.text.ParseException; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkArgument; import static io.spine.Identifier.newUuid; import static io.spine.client.TestActorRequestFactory.newInstance; import static io.spine.server.BoundedContext.newName; import static io.spine.server.aggregate.AggregateMessageDispatcher.dispatchCommand; import static io.spine.server.projection.ProjectionEventDispatcher.dispatch; import static io.spine.server.storage.memory.InMemoryStorageFactory.newInstance; import static org.junit.Assume.assumeNotNull; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; /** * Test environment for the {@link FirebaseSubscriptionMirror FirebaseSubscriptionMirror} tests. * * @author Dmytro Dashenkov */ public final class FirebaseMirrorTestEnv { private static final String FIREBASE_SERVICE_ACC_SECRET = "serviceAccount.json"; private static final String DATABASE_URL = "https://spine-firestore-test.firebaseio.com"; private static final UserId TEST_ACTOR = UserId.newBuilder() .setValue("Firebase mirror test") .build(); private static final ActorRequestFactory defaultRequestFactory = newInstance(TEST_ACTOR); private static final TestEventFactory eventFactory = TestEventFactory.newInstance(FirebaseMirrorTestEnv.class); private static final Firestore firestore = createFirestore(); // Prevent utility class instantiation. private FirebaseMirrorTestEnv() { } public static FMCustomerId newId() { return FMCustomerId.newBuilder() .setUid(newUuid()) .build(); } public static FMSessionId newSessionId() { return FMSessionId.newBuilder() .setCustomerId(newId()) .setStartTime(Time.getCurrentTime()) .build(); } @SuppressWarnings("NonThreadSafeLazyInitialization") // OK for a test. public static Firestore getFirestore() { return firestore; } private static Firestore createFirestore() { final InputStream firebaseSecret = FirebaseMirrorTestEnv.class .getClassLoader() .getResourceAsStream(FIREBASE_SERVICE_ACC_SECRET); // Check if `serviceAccount.json` file exists. assumeNotNull(firebaseSecret); final GoogleCredentials credentials; try { credentials = GoogleCredentials.fromStream(firebaseSecret); } catch (IOException e) { log().error("Error while reading Firebase service account file.", e); throw new IllegalStateException(e); } final FirebaseOptions options = new FirebaseOptions.Builder() .setDatabaseUrl(DATABASE_URL) .setCredentials(credentials) .build(); FirebaseApp.initializeApp(options); final Firestore firestore = FirestoreClient.getFirestore(); return firestore; } public static void registerSessionIdStringifier() { final Stringifier<FMSessionId> stringifier = new Stringifier<FMSessionId>() { private static final String SEPARATOR = "::"; @Override protected String toString(FMSessionId genericId) { final String customerUid = genericId.getCustomerId() .getUid(); final String timestamp = Timestamps.toString(genericId.getStartTime()); return customerUid + SEPARATOR + timestamp; } @Override protected FMSessionId fromString(String stringId) { @SuppressWarnings("DynamicRegexReplaceableByCompiledPattern") // OK for tests. final String[] parts = stringId.split(SEPARATOR); checkArgument(parts.length == 2); final FMCustomerId customerId = FMCustomerId.newBuilder() .setUid(parts[0]) .build(); final Timestamp timestamp; try { timestamp = Timestamps.parse(parts[1]); } catch (ParseException e) { throw new IllegalArgumentException(e); } final FMSessionId result = FMSessionId.newBuilder() .setCustomerId(customerId) .setStartTime(timestamp) .build(); return result; } }; StringifierRegistry.getInstance() .register(stringifier, FMSessionId.class); } public static FMCustomer createCustomer(FMCustomerId customerId, BoundedContext boundedContext) { return createCustomer(customerId, boundedContext, defaultRequestFactory); } public static void createSession(FMSessionId sessionId, BoundedContext boundedContext) { final SessionProjection projection = createEntity(sessionId, boundedContext, FMSession.class); final FMCustomerCreated eventMsg = createdEvent(sessionId.getCustomerId()); final Event event = eventFactory.createEvent(eventMsg); dispatch(projection, event); final Stand stand = boundedContext.getStand(); stand.post(defaultTenant(), projection); } public static FMCustomer createCustomer(FMCustomerId customerId, BoundedContext boundedContext, TenantId tenantId) { return createCustomer(customerId, boundedContext, requestFactory(tenantId)); } private static FMCustomer createCustomer(FMCustomerId customerId, BoundedContext boundedContext, ActorRequestFactory requestFactory) { final CustomerAggregate aggregate = createEntity(customerId, boundedContext, FMCustomer.class); final CommandFactory commandFactory = requestFactory.command(); Stream.of(createCommand(customerId), updateCommand(customerId)) .map(commandFactory::create) .map(CommandEnvelope::of) .forEach(cmd -> dispatchCommand(aggregate, cmd)); final Stand stand = boundedContext.getStand(); final TenantId tenantId = requestFactory.getTenantId(); stand.post(tenantId == null ? defaultTenant() : tenantId, aggregate); return aggregate.getState(); } private static <I, E extends Entity<I, S>, S extends Message> E createEntity(I id, BoundedContext boundedContext, Class<S> stateClass) { @SuppressWarnings("unchecked") final Repository<I, E> repository = boundedContext.findRepository(stateClass) .orNull(); assertNotNull(repository); final E projection = repository.create(id); return projection; } private static ActorRequestFactory requestFactory(TenantId tenantId) { final ActorRequestFactory factory = newInstance(TEST_ACTOR, tenantId); return factory; } private static FMCreateCustomer createCommand(FMCustomerId id) { final FMCreateCustomer createCmd = FMCreateCustomer.newBuilder() .setId(id) .build(); return createCmd; } private static FMChangeCustomerName updateCommand(FMCustomerId id) { final PersonName newName = PersonName.newBuilder() .setGivenName("John") .setFamilyName("Doe") .build(); final FMChangeCustomerName updateCmd = FMChangeCustomerName.newBuilder() .setId(id) .setNewName(newName) .build(); return updateCmd; } private static FMCustomerCreated createdEvent(FMCustomerId id) { final FMCustomerCreated createCmd = FMCustomerCreated.newBuilder() .setId(id) .build(); return createCmd; } private static TenantId defaultTenant() { return TenantId.newBuilder() .setValue("Default tenant") .build(); } /** * Makes current thread {@linkplain Thread#sleep(long) sleep} for three seconds. * * <p>This is required to make sure that the data has been sent to Firebase and is available * for reading. */ public static void waitForConsistency() { try { Thread.sleep(3000L); } catch (InterruptedException e) { fail(e.getMessage()); } } public static BoundedContext createBoundedContext(String name, boolean multitenant) { final BoundedContextName contextName = newName(name); final StorageFactory storageFactory = newInstance(contextName, multitenant); final BoundedContext result = BoundedContext.newBuilder() .setName(name) .setMultitenant(multitenant) .setStorageFactorySupplier(() -> storageFactory) .build(); result.register(new CustomerRepository()); result.register(new SessionRepository()); return result; } public static class CustomerAggregate extends Aggregate<FMCustomerId, FMCustomer, FMCustomerVBuilder> { protected CustomerAggregate(FMCustomerId id) { super(id); } @SuppressWarnings("unused") // Reflective access. @Assign FMCustomerCreated handle(FMCreateCustomer command) { return createdEvent(command.getId()); } @SuppressWarnings("unused") // Reflective access. @Assign FMCustomerNameChanged handle(FMChangeCustomerName command) { return FMCustomerNameChanged.newBuilder() .setNewName(command.getNewName()) .build(); } @SuppressWarnings("unused") // Reflective access. @Apply private void on(FMCustomerCreated event) { getBuilder().setId(event.getId()); } @SuppressWarnings("unused") // Reflective access. @Apply private void on(FMCustomerNameChanged event) { getBuilder().setName(event.getNewName()); } } static class CustomerRepository extends AggregateRepository<FMCustomerId, CustomerAggregate> {} public static class SessionProjection extends Projection<FMSessionId, FMSession, FMSessionVBuilder> { protected SessionProjection(FMSessionId id) { super(id); } @SuppressWarnings("unused") // Reflective access. @Subscribe void on(FMCustomerCreated event, EventContext context) { getBuilder().setDuration(mockLogic(context)); } private static Duration mockLogic(EventContext context) { final Timestamp currentTime = Time.getCurrentTime(); final Timestamp eventTime = context.getTimestamp(); final long durationSeconds = eventTime.getSeconds() - currentTime.getSeconds(); final Duration duration = Duration.newBuilder() .setSeconds(durationSeconds) .build(); return duration; } } static class SessionRepository extends ProjectionRepository<FMSessionId, SessionProjection, FMSession> {} private static Logger log() { return LogSingleton.INSTANCE.value; } private enum LogSingleton { INSTANCE; @SuppressWarnings("NonSerializableFieldInSerializableClass") private final Logger value = LoggerFactory.getLogger(FirebaseMirrorTestEnv.class); } }
package com.oracle.graal.truffle; import com.oracle.graal.options.*; /** * Options for the Truffle compiler. */ public class TruffleCompilerOptions { // @formatter:off // configuration /** * Instructs the Truffle Compiler to compile call targets only if their name contains at least one element of a comma-separated list of includes. * Excludes are prefixed with a tilde (~). * * The format in EBNF: * <pre> * CompileOnly = Element, { ',', Element } ; * Element = Include | '~' Exclude ; * </pre> */ @Option(help = "Restrict compilation to comma-separated list of includes (or excludes prefixed with tilde)") public static final OptionValue<String> TruffleCompileOnly = new OptionValue<>(null); @Option(help = "Compile call target when call count exceeds this threshold") public static final OptionValue<Integer> TruffleCompilationThreshold = new OptionValue<>(1000); @Option(help = "Minimum number of calls before a call target is compiled") public static final OptionValue<Integer> TruffleMinInvokeThreshold = new OptionValue<>(3); @Option(help = "Delay compilation after an invalidation to allow for reprofiling") public static final OptionValue<Integer> TruffleInvalidationReprofileCount = new OptionValue<>(3); @Option(help = "Delay compilation after a node replacement") public static final OptionValue<Integer> TruffleReplaceReprofileCount = new OptionValue<>(10); @Option(help = "Enable automatic inlining of call targets") public static final OptionValue<Boolean> TruffleFunctionInlining = new OptionValue<>(true); @Option(help = "Maximum number of Graal IR nodes during partial evaluation") public static final OptionValue<Integer> TruffleGraphMaxNodes = new OptionValue<>(45000); @Option(help = "Stop inlining if caller's cumulative tree size would exceed this limit") public static final OptionValue<Integer> TruffleInliningMaxCallerSize = new OptionValue<>(2250); @Option(help = "Skip inlining candidate if its tree size exceeds this limit") public static final OptionValue<Integer> TruffleInliningMaxCalleeSize = new OptionValue<>(500); @Option(help = "Call frequency relative to call target") public static final OptionValue<Double> TruffleInliningMinFrequency = new OptionValue<>(0.3); @Option(help = "Allow inlining of less hot candidates if tree size is small") public static final OptionValue<Integer> TruffleInliningTrivialSize = new OptionValue<>(10); @Option(help = "Enable call target splitting") public static final OptionValue<Boolean> TruffleSplittingEnabled = new OptionValue<>(true); @Option(help = "Disable call target splitting if tree size exceeds this limit") public static final OptionValue<Integer> TruffleSplittingMaxCalleeSize = new OptionValue<>(100); @Option(help = "Number of most recently used methods in truffle cache") public static final OptionValue<Integer> TruffleMaxCompilationCacheSize = new OptionValue<>(512); @Option(help = "Enable asynchronous truffle compilation in background thread") public static final OptionValue<Boolean> TruffleBackgroundCompilation = new OptionValue<>(true); @Option(help = "") public static final OptionValue<Boolean> TruffleUseTimeForCompilationDecision = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Integer> TruffleCompilationDecisionTime = new OptionValue<>(100); @Option(help = "") public static final OptionValue<Boolean> TruffleCompilationDecisionTimePrintFail = new OptionValue<>(false); // tracing @Option(help = "") public static final OptionValue<Boolean> TraceTruffleCompilation = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleCompilationDetails = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleCompilationHistogram = new OptionValue<>(false); @Option(help = "Prints out all polymorphic and generic nodes after compilation.") public static final OptionValue<Boolean> TraceTruffleCompilationPolymorphism = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleExpansion = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleExpansionSource = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleCacheDetails = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleCompilationExceptions = new OptionValue<>(true); @Option(help = "") public static final OptionValue<Boolean> TruffleCompilationExceptionsAreFatal = new OptionValue<>(true); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleInlining = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TraceTruffleSplitting = new OptionValue<>(false); @Option(help = "") public static final OptionValue<Boolean> TruffleCallTargetProfiling = new StableOptionValue<>(false); // @formatter:on }
package org.jboss.as.console.client.domain.profiles; import com.google.gwt.user.client.ui.ScrollPanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import org.jboss.as.console.client.Console; import org.jboss.as.console.client.core.NameTokens; import org.jboss.as.console.client.domain.model.ProfileRecord; import org.jboss.as.console.client.shared.model.SubsystemRecord; import org.jboss.as.console.client.shared.subsys.SubsystemTreeBuilder; import org.jboss.ballroom.client.layout.LHSNavTree; import org.jboss.ballroom.client.layout.LHSNavTreeItem; import org.jboss.ballroom.client.layout.LHSTreeSection; import java.util.ArrayList; import java.util.List; /** * LHS domain management navigation. * * @author Heiko Braun * @date 2/11/11 */ class LHSProfileNavigation { private VerticalPanel layout; private VerticalPanel stack; private ScrollPanel scroll; private LHSNavTree navigation; private LHSTreeSection subsystemLeaf; private LHSTreeSection groupsLeaf; private LHSTreeSection commonLeaf; private ProfileSelector profileSelector; public LHSProfileNavigation() { layout = new VerticalPanel(); layout.setStyleName("fill-layout-width"); stack = new VerticalPanel(); stack.setStyleName("fill-layout-width"); profileSelector = new ProfileSelector(); Widget selectorWidget = profileSelector.asWidget(); stack.add(selectorWidget); navigation = new LHSNavTree("profiles"); navigation.getElement().setAttribute("aria-label", "Profile Tasks"); subsystemLeaf = new LHSTreeSection(Console.CONSTANTS.common_label_subsystems()); navigation.addItem(subsystemLeaf); /*subsystemSection = new SubsystemSection(); Widget subsysWidget = subsystemSection.asWidget(); stack.add(subsysWidget);*/ /*DisclosurePanel groupsPanel = new DisclosureStackPanel( Console.CONSTANTS.common_label_serverGroups()).asWidget(); LHSNavTree groupsTree = new LHSNavTree("profiles"); groupsPanel.setContent(groupsTree); */ groupsLeaf = new LHSTreeSection(Console.CONSTANTS.common_label_serverGroups()); navigation.addItem(groupsLeaf); LHSNavTreeItem groupItem = new LHSNavTreeItem(Console.CONSTANTS.common_label_serverGroupConfigurations(), NameTokens.ServerGroupPresenter); groupsLeaf.addItem(groupItem); //stack.add(groupsPanel); commonLeaf = new LHSTreeSection(Console.CONSTANTS.common_label_generalConfig()); navigation.addItem(commonLeaf); LHSNavTreeItem interfaces = new LHSNavTreeItem(Console.CONSTANTS.common_label_interfaces(), NameTokens.InterfacePresenter); LHSNavTreeItem sockets = new LHSNavTreeItem(Console.CONSTANTS.common_label_socketBinding(), NameTokens.SocketBindingPresenter); LHSNavTreeItem properties = new LHSNavTreeItem(Console.CONSTANTS.common_label_systemProperties(), NameTokens.PropertiesPresenter); commonLeaf.addItem(interfaces); commonLeaf.addItem(sockets); commonLeaf.addItem(properties); navigation.expandTopLevel(); stack.add(navigation); layout.add(stack); scroll = new ScrollPanel(layout); } public Widget asWidget() { return scroll; } public void updateSubsystems(List<SubsystemRecord> subsystems) { //subsystemSection.updateSubsystems(subsystems); subsystemLeaf.removeItems(); SubsystemTreeBuilder.build(subsystemLeaf, subsystems); } public void setProfiles(List<ProfileRecord> profiles) { List<String> profileNames = new ArrayList<String>(profiles.size()); for(ProfileRecord p :profiles) { profileNames.add(p.getName()); } profileSelector.setProfiles(profileNames); } }
package com.htmlhifive.testexplorer.api; import java.awt.*; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.List; import java.util.Map; import java.util.Properties; import javax.imageio.ImageIO; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import com.htmlhifive.testexplorer.image.EdgeDetector; import com.htmlhifive.testexplorer.model.Capability; import com.htmlhifive.testexplorer.model.Screenshot; import com.htmlhifive.testlib.image.utlity.ImageUtility; @Controller @RequestMapping("/image") public class ImageController { // PropertyKey private static final String RESULTS_DIR = "resultsDir"; // SessionKey private static final String KEY_INDEX_MAP = "INDEX_MAP"; @Autowired private HttpServletRequest request; @Autowired private Properties apiConfig; private static Logger log = LoggerFactory.getLogger(ImageController.class); /** * Get the image from id. * * @param id test execution result id or right image id * @param response HttpServletResponse */ @RequestMapping(value = "/get", method = RequestMethod.GET) public void getImage(@RequestParam String id, HttpServletResponse response) { @SuppressWarnings("unchecked") Map<String, Screenshot> screenshotMap = (Map<String, Screenshot>) request.getSession(false).getAttribute( KEY_INDEX_MAP); // Validate Parameters. Screenshot screenshot = screenshotMap.get(id); if (screenshot == null) { log.error("id(" + id + ") is invalid parameter."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } File pngFile; try { pngFile = findPngFile(screenshot); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } // Send png file. try { writeFileToResponse(pngFile, response); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); } } /** * Get edge detection result of an image. * * @param imageId id of an image to be processed by edge detector. * @param response HttpServletResponse */ @RequestMapping(value = "/getEdge", method = RequestMethod.GET) public void getEdgeImage(@RequestParam String imageId, @RequestParam(defaultValue = "-1") int colorIndex, HttpServletResponse response) { @SuppressWarnings("unchecked") Map<String, Screenshot> screenshotMap = (Map<String, Screenshot>) request.getSession(false).getAttribute( KEY_INDEX_MAP); // Validate Parameters. Screenshot screenshot = screenshotMap.get(imageId); if (screenshot == null) { log.error("id(" + imageId + ") is invalid parameter."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } File pngFile; try { pngFile = findPngFile(screenshot); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } BufferedImage image; try { image = createImage(pngFile); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } EdgeDetector edgeDetector = new EdgeDetector(0.5); switch (colorIndex) { case 0: edgeDetector.setBackgroundColor(new Color(0, 0, 0, 255)); edgeDetector.setForegroundColor(new Color(255, 0, 0, 255)); break; case 1: edgeDetector.setBackgroundColor(new Color(0, 0, 0, 0)); edgeDetector.setForegroundColor(new Color(0, 0, 255, 128)); break; } BufferedImage edgeImage = edgeDetector.DetectEdge(image); // Send png file. try { writeImageToResponse(edgeImage, response); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); } } /** * Get the diff image With a marker of comparison result. If there is no difference, return normal image. * * @param sourceId comparison source image id * @param targetId comparison target image id * @param response HttpServletResponse */ @RequestMapping(value = "/getDiff", method = RequestMethod.GET) public void getDiffImage(@RequestParam String sourceId, @RequestParam String targetId, HttpServletResponse response) { @SuppressWarnings("unchecked") Map<String, Screenshot> screenshotMap = (Map<String, Screenshot>) request.getSession(false).getAttribute( KEY_INDEX_MAP); // Validate Parameters. Screenshot sourceScreenshot = screenshotMap.get(sourceId); if (sourceScreenshot == null) { log.error("id(" + sourceId + ") is invalid parameter."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } Screenshot targetScreenshot = screenshotMap.get(targetId); if (targetScreenshot == null) { log.error("id(" + targetId + ") is invalid parameter."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } File sourcePngFile; File targetPngFile; try { sourcePngFile = findPngFile(sourceScreenshot); targetPngFile = findPngFile(targetScreenshot); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } // Create a partial image. BufferedImage actualImage; BufferedImage expectedImage; try { actualImage = createImage(sourcePngFile); expectedImage = createImage(targetPngFile); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } // Compare. List<Point> diffPoints = ImageUtility.compareImages(expectedImage, null, actualImage, null, null, null); BufferedImage image = !diffPoints.isEmpty() ? ImageUtility.getMarkedImage(actualImage, diffPoints) : actualImage; // Send png file. try { writeImageToResponse(image, response); } catch (IOException e) { response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); } } private void writeFileToResponse(File file, HttpServletResponse response) throws IOException { BufferedImage image = null; try { image = ImageIO.read(file); } catch (IOException e) { log.error(e.getMessage(), e); throw e; } writeImageToResponse(image, response); } private void writeImageToResponse(BufferedImage image, HttpServletResponse response) throws IOException { // Send png file. response.setContentType("image/png"); try { ImageIO.write(image, "png", response.getOutputStream()); } catch (IOException e) { log.error(e.getMessage(), e); throw e; } catch (IndexOutOfBoundsException e2) { // Even if an exception occurs, no problem in the subsequent processing. log.warn(e2.getMessage(), e2); } } private File getBaseDirectory(Screenshot screenshot) throws NoSuchFileException { Capability capability = screenshot.getCapability(); String directoryName = screenshot.getTestCaseResult().getExecuteTime() + File.separatorChar + capability.getTestClass(); File base = new File(apiConfig.getProperty(RESULTS_DIR), directoryName); if (!base.exists() || !base.isDirectory()) { log.error("Directory(" + base.getAbsolutePath() + ") Not Found."); throw new NoSuchFileException(base.getAbsolutePath()); } return base; } private File findPngFile(Screenshot screenshot) throws IOException { return findFile(screenshot, ".png"); } private File findFile(Screenshot screenshot, String extension) throws IOException { File root = getBaseDirectory(screenshot); String fileName = screenshot.getFileName() + extension; File file = new File(root, fileName); if (!file.exists() || !file.isFile()) { throw new FileNotFoundException(file.getAbsolutePath() + " Not Found."); } return file; } private BufferedImage createImage(File pngFile) throws IOException { try { return ImageIO.read(pngFile); } catch (IOException e) { log.error(e.getMessage(), e); throw e; } } }
package org.intermine.web.logic.query; /** * Class to generate help messages appropriate to a given constraint for display in the QueryBuilder * and template form. * @author Richard Smith * */ public class DisplayConstraintHelpMessages { /** * Generate a context sensitive help message for a constraint. The message will explain all * available options - e.g. if a dropdown will explain how to enter multiple values, if a * string field how to use wildcards. * @param con the constraint to generate help for * @return a context sensitive help message */ public static String getHelpMessage(DisplayConstraint con) { StringBuffer sb = new StringBuffer(); if (con.isLookup()) { sb.append("Search multiple fields including: " + con.getKeyFields() + " You can enter multiple values separated by comma, use * as a wildcard."); if (con.getExtraConstraintValues() != null) { sb.append(" You can choose to limit the " + con.getPath().getType() + " to a particular " + con.getExtraConstraintClassName() + "."); } sb.append(getBagMessage(con)); } else if (con.isNullSelected()) { sb.append("Select a value."); } else if (con.getBags() != null) { sb.append("Select a value."); sb.append(getBagMessage(con)); } else if (con.getPossibleValues() != null && !con.getPossibleValues().isEmpty()) { sb.append("Choose a value from the dropdown. To choose multiple values set the" + " operation to IN or NOT IN."); if (con.getPath().isAttribute() && "String".equals(con.getPath().getType())) { sb.append(" To type text set the operation to LIKE or NOT LIKE"); sb.append(", you can use * as a wildcard"); } sb.append("."); sb.append(getBagMessage(con)); } if (sb.length() == 0) { return null; } return sb.toString(); } private static String getBagMessage(DisplayConstraint con) { if (con.getBags() != null) { return " Or use the checkbox and constrain the " + con.getBagType() + " to be in a saved list."; } return ""; } }
package org.agito.activiti.jboss7.its; import java.util.Collection; import java.util.Properties; import org.agito.activiti.jboss7.engine.impl.JobExecutorProcessEngineConfiguration; import org.agito.activiti.jboss7.engine.impl.JtaProcessEngineConfiguration; import org.jboss.shrinkwrap.api.spec.ResourceAdapterArchive; import org.jboss.shrinkwrap.resolver.api.DependencyResolvers; import org.jboss.shrinkwrap.resolver.api.maven.MavenDependencyResolver; public class AbstractContainerTest { private static String JOBEXECUTOR_EE_VERSION; private static ResourceAdapterArchive CACHED_JCA_ASSET; public JobExecutorProcessEngineConfiguration getProcessEngineConfigurationImpl(String processEngineName) { JtaProcessEngineConfiguration ret = new JtaProcessEngineConfiguration(); ret.setProcessEngineName(processEngineName); ret.setDataSourceJndiName("java:jboss/datasources/" + processEngineName); ret.setJobExecutorActivate(true); ret.setDatabaseSchemaUpdate("true"); return ret; } public static ResourceAdapterArchive getActivitiResourceAdapterArchive() { if (CACHED_JCA_ASSET != null) { return CACHED_JCA_ASSET; } else { MavenDependencyResolver resolver = DependencyResolvers.use(MavenDependencyResolver.class) .loadMetadataFromPom("pom.xml"); Collection<ResourceAdapterArchive> resolvedArchives = resolver.artifact( "org.agito:activiti-jobexecutor-ee-jca-rar:rar:" + getJobExecutorVersion()).resolveAs( ResourceAdapterArchive.class); if (resolvedArchives.size() == 0) { throw new RuntimeException("could not resolve org.agito:activiti-jobexecutor-ee-jca-rar"); } else { CACHED_JCA_ASSET = resolvedArchives.iterator().next(); return CACHED_JCA_ASSET; } } } protected static String getJobExecutorVersion() { if (JOBEXECUTOR_EE_VERSION == null) JOBEXECUTOR_EE_VERSION = readCurrentVersion(); return JOBEXECUTOR_EE_VERSION; } private static String readCurrentVersion() { Properties props = new Properties(); try { props.load(AbstractContainerTest.class.getResourceAsStream("/build.properties")); } catch (Exception e) { throw new RuntimeException("Error reading build.properties. Run maven build first.", e); } return props.getProperty("current.version"); } }
package org.javarosa.workflow.activity; import java.util.Enumeration; import java.util.Hashtable; import java.util.Vector; import javax.microedition.lcdui.Command; import org.javarosa.core.Context; import org.javarosa.core.api.Constants; import org.javarosa.core.api.IActivity; import org.javarosa.core.api.ICommand; import org.javarosa.core.api.IShell; import org.javarosa.workflow.WorkflowLaunchContext; import org.javarosa.workflow.model.IWorkflowActionListener; import org.javarosa.workflow.model.Workflow; import org.javarosa.workflow.model.WorkflowAction; import org.javarosa.workflow.view.StateLaunchScreen; /** * WorkflowLaunchActivity is responsible for identifying the current state of a * workflow and presenting the appropriate User Interface for that state. * * @author Clayton Sims * @date Jan 13, 2009 * */ public class WorkflowActivity implements IActivity, IWorkflowActionListener { /** * The workflow that will be used in this activity. */ Workflow workflow; WorkflowLaunchContext context; IShell shell; StateLaunchScreen stateScreen; /** Vector<Command> **/ Vector commands = new Vector(); /* * (non-Javadoc) * * @see * org.javarosa.core.api.IActivity#contextChanged(org.javarosa.core.Context) */ public void contextChanged(Context globalContext) { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.javarosa.core.api.IActivity#destroy() */ public void destroy() { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.javarosa.core.api.IActivity#getActivityContext() */ public Context getActivityContext() { return context; } /* * (non-Javadoc) * * @see org.javarosa.core.api.IActivity#halt() */ public void halt() { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.javarosa.core.api.IActivity#resume(org.javarosa.core.Context) */ public void resume(Context globalContext) { shell.setDisplay(this, stateScreen); } /* * (non-Javadoc) * * @see * org.javarosa.core.api.IActivity#setShell(org.javarosa.core.api.IShell) */ public void setShell(IShell shell) { this.shell = shell; } /* * (non-Javadoc) * * @see org.javarosa.core.api.IActivity#start(org.javarosa.core.Context) */ public void start(Context context) { if (context instanceof WorkflowLaunchContext) { this.context = (WorkflowLaunchContext) context; this.workflow = this.context.getWorkflow(); this.workflow.setDataModel(this.context.getDataModel()); } stateScreen = new StateLaunchScreen("Title", this.workflow); Enumeration en = commands.elements(); while(en.hasMoreElements()) { stateScreen.addCommand((Command)en.nextElement()); } shell.setDisplay(this, stateScreen); } public void actionFired(WorkflowAction action) { Hashtable returnArgs = new Hashtable(); returnArgs.put(Constants.RETURN_ARG_KEY, action); shell.returnFromActivity(this, Constants.ACTIVITY_SUSPEND, returnArgs); } /* * (non-Javadoc) * * @see * org.javarosa.core.api.IActivity#annotateCommand(org.javarosa.core.api * .ICommand) */ public void annotateCommand(ICommand command) { if (command.getCommand() instanceof Command) { this.commands.addElement(command.getCommand()); } else { throw new RuntimeException( "Attempted to annotate a platform invalid command of class " + command.getCommand().getClass().toString() + " to the j2me platform Activity WorkflowActivity"); } } }
package org.broadinstitute.sting.playground.gatk.walkers; import org.broad.tribble.vcf.VCFConstants; import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.contexts.variantcontext.Allele; import org.broadinstitute.sting.gatk.contexts.variantcontext.Genotype; import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.refdata.ReferenceOrderedDatum; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.gatk.walkers.RMD; import org.broadinstitute.sting.gatk.walkers.Requires; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.MathUtils; import java.io.PrintStream; import java.util.*; /** * Produces an input file to Beagle imputation engine, listing genotype likelihoods for each sample in input variant file */ @Requires(value={},referenceMetaData=@RMD(name=ProduceBeagleInputWalker.ROD_NAME, type=ReferenceOrderedDatum.class)) public class ProduceBeagleInputWalker extends RodWalker<Integer, Integer> { public static final String ROD_NAME = "variant"; @Argument(fullName = "beagle_file", shortName = "beagle", doc = "File to print BEAGLE-specific data for use with imputation", required = true) public PrintStream beagleWriter = null; @Argument(fullName = "genotypes_file", shortName = "genotypes", doc = "File to print reference genotypes for error analysis", required = false) public PrintStream beagleGenotypesWriter = null; @Argument(fullName = "inserted_nocall_rate", shortName = "nc_rate", doc = "Rate (0-1) at which genotype no-calls will be randomly inserted, for testing", required = false) public double insertedNoCallRate = 0; private TreeSet<String> samples = null; private Random generator; private void initialize(Set<String> sampleNames) { generator = new Random(); beagleWriter.print("marker alleleA alleleB"); samples = new TreeSet<String>(sampleNames); for ( String sample : samples ) beagleWriter.print(String.format(" %s %s %s", sample, sample, sample)); beagleWriter.println(); if (beagleGenotypesWriter != null) beagleGenotypesWriter.println("dummy header"); } public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) { if( tracker != null ) { GenomeLoc loc = context.getLocation(); VariantContext vc_eval; vc_eval = tracker.getVariantContext(ref, ROD_NAME, null, loc, false); if ( vc_eval == null || vc_eval.isFiltered() ) return 0; if ( samples == null ) { initialize(vc_eval.getSampleNames()); } // output marker ID to Beagle input file beagleWriter.print(String.format("%s ", vc_eval.getLocation().toString())); if (beagleGenotypesWriter != null) beagleGenotypesWriter.print(String.format("%s ", vc_eval.getLocation().toString())); for (Allele allele: vc_eval.getAlleles()) { // TODO -- check whether this is really needed by Beagle String bglPrintString; if (allele.isNoCall() || allele.isNull()) bglPrintString = "0"; else bglPrintString = allele.toString().substring(0,1); // get rid of * in case of reference allele beagleWriter.print(String.format("%s ", bglPrintString)); } if ( !vc_eval.hasGenotypes() ) return 0; Map<String, Genotype> genotypes = vc_eval.getGenotypes(); for ( String sample : samples ) { // use sample as key into genotypes structure Genotype genotype = genotypes.get(sample); if (genotype.isCalled() && genotype.hasAttribute(VCFConstants.GENOTYPE_LIKELIHOODS_KEY)) { String[] glArray = genotype.getAttributeAsString(VCFConstants.GENOTYPE_LIKELIHOODS_KEY).split(","); Double maxLikelihood = -100.0; ArrayList<Double> likeArray = new ArrayList<Double>(); for (String gl : glArray) { // need to normalize likelihoods to avoid precision loss. In worst case, if all 3 log-likelihoods are too // small, we could end up with linear likelihoods of form 0.00 0.00 0.00 which will mess up imputation. Double dg = Double.valueOf(gl); if (dg> maxLikelihood) maxLikelihood = dg; likeArray.add(dg); } // see if we need to randomly mask out genotype in this position. Double d = generator.nextDouble(); if (d > insertedNoCallRate ) { // System.out.format("%5.4f ", d); for (Double likeVal: likeArray) beagleWriter.print(String.format("%5.4f ",Math.pow(10, likeVal-maxLikelihood))); } else { // we are masking out this genotype beagleWriter.print("0.33 0.33 0.33 "); } if (beagleGenotypesWriter != null) { char a = genotype.getAllele(0).toString().charAt(0); char b = genotype.getAllele(0).toString().charAt(0); beagleGenotypesWriter.format("%c %c ", a, b); } } else { beagleWriter.print("0.33 0.33 0.33 "); // write 1/3 likelihoods for uncalled genotypes. if (beagleGenotypesWriter != null) beagleGenotypesWriter.print(". . "); } } beagleWriter.println(); if (beagleGenotypesWriter != null) beagleGenotypesWriter.println(); } return 1; } public Integer reduceInit() { return 0; // Nothing to do here } public Integer reduce( Integer value, Integer sum ) { return 0; // Nothing to do here } public void onTraversalDone( Integer sum ) { } }
package org.junit.gen5.engine.support.descriptor; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.gen5.api.Assertions.assertEquals; import static org.junit.gen5.api.Assertions.assertFalse; import static org.junit.gen5.api.Assertions.assertNotSame; import static org.junit.gen5.api.Assertions.assertThrows; import static org.junit.gen5.api.Assertions.assertTrue; import java.lang.reflect.Method; import org.junit.gen5.api.Test; import org.junit.gen5.api.TestInfo; import org.junit.gen5.commons.util.PreconditionViolationException; /** * Unit tests for {@link JavaPackageSource}, {@link JavaClassSource}, and * {@link JavaMethodSource}. * * @since 5.0 */ class JavaSourceTests { @Test void packageSourceFromNullPackageName() { assertThrows(PreconditionViolationException.class, () -> new JavaPackageSource((String) null)); } @Test void packageSourceFromEmptyPackageName() { assertThrows(PreconditionViolationException.class, () -> new JavaPackageSource(" ")); } @Test void packageSourceFromPackageName() { String testPackage = getClass().getPackage().getName(); JavaPackageSource source = new JavaPackageSource(testPackage); assertThat(source.getPackageName()).isEqualTo(testPackage); } @Test void packageSourceFromNullPackageReference() { assertThrows(PreconditionViolationException.class, () -> new JavaPackageSource((Package) null)); } @Test void packageSourceFromPackageReference() { Package testPackage = getClass().getPackage(); JavaPackageSource source = new JavaPackageSource(testPackage); assertThat(source.getPackageName()).isEqualTo(testPackage.getName()); } @Test void classSource() { Class<?> testClass = getClass(); JavaClassSource source = new JavaClassSource(testClass); assertThat(source.getJavaClass()).isEqualTo(testClass); } @Test void methodSource() throws Exception { Method testMethod = getExampleMethod(); JavaMethodSource source = new JavaMethodSource(testMethod); assertThat(source.getJavaClass()).isEqualTo(getClass()); assertThat(source.getJavaMethodName()).isEqualTo(testMethod.getName()); assertThat(source.getJavaMethodParameterTypes()).containsExactly(String.class); } @Test void equalsAndHashCodeForJavaPackageSource() { Package testPackage = getClass().getPackage(); JavaPackageSource source1 = new JavaPackageSource(testPackage); JavaPackageSource source2 = new JavaPackageSource(testPackage); assertNotSame(source1, source2); assertFalse(source1.equals(null)); assertTrue(source1.equals(source1)); assertTrue(source1.equals(source2)); assertTrue(source2.equals(source1)); assertEquals(source1.hashCode(), source2.hashCode()); } @Test void equalsAndHashCodeForJavaClassSource() { Class<?> testClass = getClass(); JavaClassSource source1 = new JavaClassSource(testClass); JavaClassSource source2 = new JavaClassSource(testClass); assertNotSame(source1, source2); assertFalse(source1.equals(null)); assertTrue(source1.equals(source1)); assertTrue(source1.equals(source2)); assertTrue(source2.equals(source1)); assertEquals(source1.hashCode(), source2.hashCode()); } @Test void equalsAndHashCodeForJavaMethodSource(TestInfo testInfo) throws Exception { Method testMethod = getExampleMethod(); JavaMethodSource source1 = new JavaMethodSource(testMethod); JavaMethodSource source2 = new JavaMethodSource(testMethod); assertNotSame(source1, source2); assertFalse(source1.equals(null)); assertTrue(source1.equals(source1)); assertTrue(source1.equals(source2)); assertTrue(source2.equals(source1)); assertEquals(source1.hashCode(), source2.hashCode()); } void exampleMethod(String text) { } private Method getExampleMethod() throws Exception { return getClass().getDeclaredMethod("exampleMethod", String.class); } }
package org.mobicents.protocols.ss7.map.service.supplementary; import javolution.xml.XMLFormat; import javolution.xml.stream.XMLStreamException; import org.apache.log4j.Logger; import org.mobicents.protocols.ss7.map.MessageImpl; import org.mobicents.protocols.ss7.map.api.MAPException; import org.mobicents.protocols.ss7.map.api.datacoding.CBSDataCodingScheme; import org.mobicents.protocols.ss7.map.api.primitives.USSDString; import org.mobicents.protocols.ss7.map.api.service.supplementary.MAPDialogSupplementary; import org.mobicents.protocols.ss7.map.api.service.supplementary.SupplementaryMessage; import org.mobicents.protocols.ss7.map.datacoding.CBSDataCodingSchemeImpl; import org.mobicents.protocols.ss7.map.primitives.MAPAsnPrimitive; import org.mobicents.protocols.ss7.map.primitives.USSDStringImpl; /** * @author amit bhayani * */ public abstract class SupplementaryMessageImpl extends MessageImpl implements SupplementaryMessage, MAPAsnPrimitive { private static final Logger logger = Logger.getLogger(SupplementaryMessageImpl.class); private static final String DATA_CODING_SCHEME = "dataCodingScheme"; private static final String STRING = "string"; private static final byte DEFAULT_DATA_CODING_SCHEME = 0x0f; private static final String DEFAULT_USSD_STRING = ""; protected CBSDataCodingScheme ussdDataCodingSch; protected USSDString ussdString; public SupplementaryMessageImpl() { super(); } public SupplementaryMessageImpl(CBSDataCodingScheme ussdDataCodingSch, USSDString ussdString) { this.ussdDataCodingSch = ussdDataCodingSch; this.ussdString = ussdString; } public MAPDialogSupplementary getMAPDialog() { return (MAPDialogSupplementary) super.getMAPDialog(); } public CBSDataCodingScheme getDataCodingScheme() { return ussdDataCodingSch; } public USSDString getUSSDString() { return this.ussdString; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(", ussdDataCodingSch="); sb.append(ussdDataCodingSch); if (ussdString != null) { sb.append(", ussdString="); try { sb.append(ussdString.getString(null)); } catch (Exception e) { } } sb.append("]"); return sb.toString(); } /** * XML Serialization/Deserialization */ protected static final XMLFormat<SupplementaryMessageImpl> USSD_MESSAGE_XML = new XMLFormat<SupplementaryMessageImpl>( SupplementaryMessageImpl.class) { @Override public void read(javolution.xml.XMLFormat.InputElement xml, SupplementaryMessageImpl ussdMessage) throws XMLStreamException { MAP_MESSAGE_XML.read(xml, ussdMessage); ussdMessage.ussdDataCodingSch = new CBSDataCodingSchemeImpl(xml.getAttribute(DATA_CODING_SCHEME, DEFAULT_DATA_CODING_SCHEME)); String encodedString = xml.getAttribute(STRING, DEFAULT_USSD_STRING); try { ussdMessage.ussdString = new USSDStringImpl(encodedString, ussdMessage.ussdDataCodingSch, null); } catch (MAPException e) { logger.error("Error while trying to read ussd string", e); } } @Override public void write(SupplementaryMessageImpl ussdMessage, javolution.xml.XMLFormat.OutputElement xml) throws XMLStreamException { MAP_MESSAGE_XML.write(ussdMessage, xml); xml.setAttribute(DATA_CODING_SCHEME, ussdMessage.ussdDataCodingSch.getCode()); String ussdStr = ""; try { ussdStr = ussdMessage.ussdString.getString(null); } catch (MAPException e) { logger.error("Error while trying to write ussd string", e); } xml.setAttribute(STRING, ussdStr); } }; }
package org.languagetool.remote; import org.languagetool.AnalyzedSentence; import org.languagetool.AnalyzedToken; import org.languagetool.AnalyzedTokenReadings; import org.languagetool.JLanguageTool; import org.languagetool.Language; import org.languagetool.Languages; import org.languagetool.synthesis.Synthesizer; import org.languagetool.tools.StringTools; import org.languagetool.tools.Tools; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.StringWriter; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Takes correct sentences, introduces errors (e.g. confusion pairs), and * evaluates the LT rules. */ public class ArtificialErrorEval { static String[] words = new String[2]; static String[] lemmas = new String[2]; static String[] fakeRuleIDs = new String[2]; //TP: true positive with the expected suggestion //TPns: true positive with no suggestion static List<String> classifyTypes = Arrays.asList("TP", "FP", "TN", "FN", "TPns", "TPws"); static int[][] results = new int[2][6]; // word0/word1 ; TP/FP/TN/FN/TP with no suggestion/TP wrong suggestion static int[] accumulateResults = new int[6]; // totalErrors/TP/FP/TN/FN static RemoteLanguageTool lt; static JLanguageTool localLt; static Synthesizer synth; static int maxInputSentences = 1000000; // decrease this number for testing static boolean verboseOutput = false; static boolean unidirectional = false; static boolean wholeword = true; static boolean isDoubleLetters = false; static boolean isDiacritics = false; static boolean inflected = false; static boolean isParallelCorpus = false; static int columnCorrect = 1; static int columnIncorrect = 2; static Pattern pWordboundaries = Pattern.compile("\\b.+\\b"); static int countLine = 0; static int checkedSentences = 0; static int maxCheckedSentences = 1000000; // decrease this number for testing static List<String> onlyRules = new ArrayList<String>(); static List<String> disabledRules = new ArrayList<String>(); static String summaryOutputFilename = ""; static String verboseOutputFilename = ""; static String errorCategory = ""; static String langCode = ""; static Language language; static String corpusFilePath = ""; static String outputPathRoot = ""; static HashMap<String, List<RemoteRuleMatch>> cachedMatches; static String remoteServer = "http://localhost:8081"; static String userName = ""; static String apiKey = ""; public static void main(String[] args) throws IOException { //use configuration file if (args.length==1) { String configurationFilename = args[0]; Properties prop = new Properties(); FileInputStream fis = new FileInputStream(configurationFilename); prop.load(new InputStreamReader(fis, Charset.forName("UTF-8"))); String maxInputSentencesStr = prop.getProperty("maxInputSentences"); String maxCheckedSentencesStr = prop.getProperty("maxCheckedSentences"); if (maxInputSentencesStr != null) { maxInputSentences = Integer.parseInt(maxInputSentencesStr); } if (maxCheckedSentencesStr != null) { maxCheckedSentences = Integer.parseInt(maxCheckedSentencesStr) + 1; } boolean printSummaryDetails = Boolean.parseBoolean(prop.getProperty("printSummaryDetails", "true")); boolean printHeader = Boolean.parseBoolean(prop.getProperty("printHeader", "true")); remoteServer = prop.getProperty("remoteServer", "http://localhost:8081"); String disabledRulesStr = prop.getProperty("disabledRules", ""); if (!disabledRulesStr.isEmpty()) { disabledRules = Arrays.asList(disabledRulesStr.split(",")); } String onlyRulesStr = prop.getProperty("onlyRules", ""); if (!onlyRulesStr.isEmpty()) { onlyRules = Arrays.asList(onlyRulesStr.split(",")); } // Only one file String analyzeOneFile = prop.getProperty("analyzeOneFile"); if (analyzeOneFile.equalsIgnoreCase("true")) { userName = prop.getProperty("userName", ""); apiKey = prop.getProperty("apiKey", ""); runEvaluationOnFile(prop.getProperty("languageCode"), prop.getProperty("inputFile")); } else { String inputFolder = prop.getProperty("inputFolder"); String outpuFolder = prop.getProperty("outputFolder"); runEvaluationOnFolders(inputFolder, outpuFolder, printSummaryDetails, printHeader); } } // language code + input file else if (args.length == 2) { runEvaluationOnFile(args[0], args[1]); } else { writeHelp(); System.exit(1); } } private static void runEvaluationOnFile(String languageCode, String inputFile) throws IOException { langCode = languageCode; corpusFilePath = inputFile; verboseOutput = true; language = Languages.getLanguageForShortCode(langCode); localLt = new JLanguageTool(language); synth = language.getSynthesizer(); lt = new RemoteLanguageTool(Tools.getUrl(remoteServer)); File corpusFile = new File(corpusFilePath); if (!corpusFile.exists() || corpusFile.isDirectory()) { throw new IOException("File not found: " + corpusFilePath); } String fileName = corpusFile.getName(); System.out.println("Analyzing file: " + fileName); fileName = fileName.substring(0, fileName.lastIndexOf('.')); verboseOutputFilename = inputFile +".results"; // reset all global Variables to default unidirectional = false; wholeword = true; isDoubleLetters = false; isDiacritics = false; inflected = false; isParallelCorpus = false; columnCorrect = 1; columnIncorrect = 2; if (fileName.startsWith("parallelcorpus") || fileName.startsWith("pc-")) { isParallelCorpus = true; unidirectional = true; wholeword = false; String parts[] = fileName.split("-"); if (parts.length > 2) { columnCorrect = Integer.parseInt(parts[1]); columnIncorrect = Integer.parseInt(parts[2]); } } else if (fileName.equals("diacritics")) { isDiacritics = true; unidirectional = true; } else if (fileName.equals("double_letters")) { isDoubleLetters = true; unidirectional = true; } else { String[] parts = fileName.split("~"); words[0] = parts[0].replaceAll("_", " "); words[1] = parts[1].replaceAll("_", " "); if (parts.length > 2) { unidirectional = parts[2].equals("u"); if (parts[2].equals("u_notwholeword")) { unidirectional = true; wholeword = false; } if (parts[2].equals("notwholeword")) { wholeword = false; } } } run(true); } private static void runEvaluationOnFolders(String inputFolder, String outputFolder, boolean printSummaryDetails, boolean printHeader) throws IOException { verboseOutput = true; SimpleDateFormat formatter= new SimpleDateFormat("yyyy-MM-dd"); Date date = new Date(System.currentTimeMillis()); outputPathRoot = outputFolder+"/"+formatter.format(date); Files.createDirectories(Paths.get(outputPathRoot)); //TODO: remove existing folder lt = new RemoteLanguageTool(Tools.getUrl(remoteServer)); File[] languageDirectories = new File(inputFolder).listFiles(File::isDirectory); for (File languageDirectory : languageDirectories) { langCode = languageDirectory.getName(); language = Languages.getLanguageForShortCode(langCode); Files.createDirectories(Paths.get(outputPathRoot+"/"+langCode)); summaryOutputFilename = outputPathRoot+"/"+langCode+"/"+langCode+".tsv"; if (printHeader) { appendToFile(summaryOutputFilename, "Category\tRules\tErrors\tPrecision\tRecall\tTP\tFP\tTN\tFN"); } File[] categoryDirectories = languageDirectory.listFiles(File::isDirectory); for (File categoryDirectory: categoryDirectories) { Arrays.fill(accumulateResults, 0); errorCategory = categoryDirectory.getName(); Files.createDirectories(Paths.get(outputPathRoot+"/"+langCode+"/"+errorCategory)); File[] corpusFiles = categoryDirectory.listFiles(File::isFile); for (File myCorpusFile: corpusFiles) { corpusFilePath = myCorpusFile.getAbsolutePath(); String fileName = myCorpusFile.getName(); System.out.println("Analyzing file: " + fileName); fileName = fileName.substring(0, fileName.lastIndexOf('.')); //reset all global Variables to default unidirectional = false; wholeword = true; isDoubleLetters = false; isDiacritics = false; inflected = false; isParallelCorpus = false; columnCorrect = 1; columnIncorrect = 2; if (fileName.startsWith("parallelcorpus") || fileName.startsWith("pc-")) { isParallelCorpus = true; unidirectional = true; String parts[] = fileName.split("-"); if (parts.length > 2) { columnCorrect = Integer.parseInt(parts[1]); columnIncorrect = Integer.parseInt(parts[2]); } } else if (fileName.equals("diacritics")) { isDiacritics = true; unidirectional = true; } else if (fileName.equals("double_letters")) { isDoubleLetters = true; unidirectional = true; } else { String[] parts = fileName.split("~"); words[0] = parts[0].replaceAll("_", " "); words[1] = parts[1].replaceAll("_", " "); if (parts.length > 2) { unidirectional = parts[2].equals("u"); if (parts[2].equals("u_notwholeword")) { unidirectional = true; wholeword = false; } if (parts[2].equals("notwholeword")) { wholeword = false; } } } verboseOutputFilename = outputPathRoot+"/"+langCode+"/"+errorCategory+"/"+myCorpusFile.getName(); run(printSummaryDetails); } // total by category float precision = accumulateResults[1] / (float) (accumulateResults[1] + accumulateResults[2]); float recall = accumulateResults[1] / (float) (accumulateResults[1] + accumulateResults[4]); appendToFile (summaryOutputFilename, errorCategory + "\t" + "TOTAL" + "\t" + accumulateResults[0] + "\t" + String.format(Locale.ROOT, "%.4f", precision) + "\t" + String.format(Locale.ROOT, "%.4f", recall) + "\t" + accumulateResults[1] + "\t" + accumulateResults[2] + "\t" + accumulateResults[3] + "\t" + accumulateResults[4] + "\t" ); } } System.out.println("FINISHED!"); } private static void run(boolean printSummaryDetails) throws IOException { int ignoredLines = 0; Arrays.fill(results[0], 0); Arrays.fill(results[1], 0); fakeRuleIDs[0] = "rules_" + words[0] + "->" + words[1]; // rules in one direction fakeRuleIDs[1] = "rules_" + words[1] + "->" + words[0]; // rules in the other direction CheckConfiguration config; if (!userName.isEmpty() && !apiKey.isEmpty()) { config = new CheckConfigurationBuilder(langCode) .disabledRuleIds("WHITESPACE_RULE") .textSessionID("-2") .username(userName) .apiKey(apiKey) .build(); } else { config = new CheckConfigurationBuilder(langCode) .disabledRuleIds("WHITESPACE_RULE") .textSessionID("-2") .build(); } long start = System.currentTimeMillis(); List<String> lines = Files.readAllLines(Paths.get(corpusFilePath)); if (!inflected && !isDoubleLetters && !isDiacritics && !isParallelCorpus) { final Pattern p0; Matcher mWordBoundaries = pWordboundaries.matcher(words[0]); if (mWordBoundaries.matches() && wholeword) { p0 = Pattern.compile("\\b" + words[0] + "\\b", Pattern.CASE_INSENSITIVE); } else { p0 = Pattern.compile(words[0], Pattern.CASE_INSENSITIVE); } final Pattern p1; mWordBoundaries = pWordboundaries.matcher(words[1]); if (mWordBoundaries.matches() && wholeword) { p1 = Pattern.compile("\\b" + words[1] + "\\b", Pattern.CASE_INSENSITIVE); } else { p1 = Pattern.compile(words[1], Pattern.CASE_INSENSITIVE); } countLine = 0; checkedSentences = 0; for (String line : lines) { cachedMatches = new HashMap<>(); countLine++; if (countLine > maxInputSentences || checkedSentences > maxCheckedSentences) { break; } boolean foundSomething = false; if (words[0].length() > 0) { Matcher m = p0.matcher(line); while (m.find()) { foundSomething = true; analyzeSentence(line, 0, m.start(), config); } } if (words[1].length() > 0) { Matcher m = p1.matcher(line); while (m.find()) { foundSomething = true; analyzeSentence(line, 1, m.start(), config); } } if (!foundSomething) { // printSentenceOutput("Ignored, no error", line, ""); } } } if (isParallelCorpus) { final Pattern p = Pattern.compile("(.*)__(.*)__(.*)"); countLine = 0; checkedSentences = 0; for (String line : lines) { cachedMatches = new HashMap<>(); countLine++; if (countLine > maxInputSentences || checkedSentences > maxCheckedSentences) { break; } String[] parts = line.split("\t"); // adjust the numbers 3 and 4 according to the source file if (parts.length < columnCorrect && parts.length < columnIncorrect) { continue; } String correctSource = parts[columnCorrect - 1]; String incorrectSource = parts[columnIncorrect - 1]; words[0] = null; words[1] = null; /*Matcher mIncorrect = p.matcher(incorrectSource); if (mIncorrect.matches()) { words[0] = mIncorrect.group(2); } int posError = -1; Matcher mCorrect = p.matcher(correctSource); if (mCorrect.matches()) { words[1] = mCorrect.group(2); correctSentence = mCorrect.group(1) + mCorrect.group(2) + mCorrect.group(3); posError = mCorrect.group(1).length(); }*/ String correctSentence = correctSource.replaceAll("__", ""); String incorrectSentence = incorrectSource.replaceAll("__", ""); if (correctSentence.equals(incorrectSentence)) { printSentenceOutput("IGNORED LINE: sentences are identical!", correctSource, 0, ""); ignoredLines++; continue; } List<String> diffs = differences(correctSentence, incorrectSentence); int posError = diffs.get(0).length(); words[1] = diffs.get(1); words[0] = diffs.get(2); if (words[1] != null) { // words[0] may be null! // check FN analyzeSentence(correctSentence, 1, posError, config); // check FP in the correct sentence words[0] = words[1]; words[1] = null; analyzeSentence(correctSentence, 0, posError, config); } } } if (isDoubleLetters) { // introduce error: nn -> n fakeRuleIDs[0] = "rules_double_letters"; countLine = 0; checkedSentences = 0; final Pattern p1 = Pattern.compile("([a-zA-Z])\\1+"); for (String line : lines) { cachedMatches = new HashMap<>(); countLine++; if (countLine > maxInputSentences || checkedSentences > maxCheckedSentences) { break; } Matcher m = p1.matcher(line); while (m.find()) { words[1] = m.group(0); words[0] = words[1].substring(0, 1); analyzeSentence(line, 1, m.start(), config); } } } if (isDiacritics) { // check missing diacritics countLine = 0; checkedSentences = 0; for (String line : lines) { cachedMatches = new HashMap<>(); countLine++; if (countLine > maxInputSentences || checkedSentences > maxCheckedSentences) { break; } List<String> tokens = language.getWordTokenizer().tokenize(line); int pos = 0; for (String token: tokens) { if (StringTools.hasDiacritics(token)) { words[1] = token; words[0] = StringTools.removeDiacritics(token); analyzeSentence(line, 1, pos, config); } pos += token.length(); } } } if (inflected) { // search lemma countLine = 0; checkedSentences = 0; for (String line : lines) { cachedMatches = new HashMap<>(); countLine++; if (countLine > maxInputSentences || checkedSentences > maxCheckedSentences) { break; } List<AnalyzedSentence> analyzedSentences = localLt.analyzeText(line); boolean foundSomething = false; for (AnalyzedSentence analyzedSentence: analyzedSentences) { for (AnalyzedTokenReadings token : analyzedSentence.getTokensWithoutWhitespace()) { if (lemmas[0].length() > 0) { if (token.hasLemma(lemmas[0])) { words[0] = token.getToken(); AnalyzedToken atr1 = token.readingWithLemma(lemmas[0]); AnalyzedToken atr2 = new AnalyzedToken(atr1.getToken(), atr1.getPOSTag(), lemmas[1]); String[] syntheziedWords = synth.synthesize(atr2, atr2.getPOSTag()); words[1] = syntheziedWords[0]; foundSomething = true; analyzeSentence(line, 0, token.getStartPos(), config); } } if (lemmas[1].length() > 0) { if (token.hasLemma(lemmas[1])) { words[1] = token.getToken(); AnalyzedToken atr1 = token.readingWithLemma(lemmas[1]); AnalyzedToken atr2 = new AnalyzedToken(atr1.getToken(), atr1.getPOSTag(), lemmas[0]); String[] syntheziedWords = synth.synthesize(atr2, atr2.getPOSTag()); words[0] = syntheziedWords[0]; foundSomething = true; analyzeSentence(line, 1, token.getStartPos(), config); } } } } } } // print results int oneOrTwo = (unidirectional ? 1 : 2); for (int i = 0; i < oneOrTwo; i++) { float precision = results[i][classifyTypes.indexOf("TP")] / (float) (results[i][classifyTypes.indexOf("TP")] + results[i][classifyTypes.indexOf("FP")]); float recall = results[i][classifyTypes.indexOf("TP")] / (float) (results[i][classifyTypes.indexOf("TP")] + results[i][classifyTypes.indexOf("FN")] + results[i][classifyTypes.indexOf("TPns")] + results[i][classifyTypes.indexOf("TPws")]); // recall including empty suggestions float recall2 = (results[i][classifyTypes.indexOf("TP")] + results[i][classifyTypes.indexOf("TPns")]) / (float) (results[i][classifyTypes.indexOf("TP")] + results[i][classifyTypes.indexOf("FN")] + results[i][classifyTypes.indexOf("TPns")] + results[i][classifyTypes.indexOf("TPws")]); //float expectedSuggestionPercentage = (float) results[i][classifyTypes.indexOf("TPs")] // / results[i][classifyTypes.indexOf("TP")]; int errorsTotal = results[i][classifyTypes.indexOf("TP")] + results[i][classifyTypes.indexOf("FP")] + results[i][classifyTypes.indexOf("TN")] + results[i][classifyTypes.indexOf("FN")] + results[i][classifyTypes.indexOf("TPns")] + results[i][classifyTypes.indexOf("TPws")]; StringWriter resultsString = new StringWriter(); resultsString.append(" resultsString.append("Results for " + fakeRuleIDs[i] + "\n"); int nCorrectSentences = results[i][1] + results[i][2] ; // FP + TN int nIncorrectSentences = results[i][0] + results[i][4] + results[i][5] + results[i][3]; // TP + TPns + TPws + FN resultsString.append("Total sentences: " + String.valueOf(errorsTotal) + "\n"); resultsString.append(formattedAbsoluteAndPercentage("\nCorrect sentences", nCorrectSentences, nCorrectSentences + nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage("FP", results[i][1], nCorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage("TN", results[i][2], nCorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage("\nIncorrect sentences", nIncorrectSentences, nCorrectSentences + nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage("TP (total)", results[i][4] + results[i][5] + results[i][0], nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage(" TP (expected suggestion)", results[i][0], nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage(" TPns (no suggestion)", results[i][4], nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage(" TPws (wrong suggestion)", results[i][5], nIncorrectSentences)); resultsString.append(formattedAbsoluteAndPercentage("FN", results[i][3], nIncorrectSentences)); resultsString.append("\nPrecision: " + String.format(Locale.ROOT, "%.4f", precision) + "\n"); resultsString.append("Recall: " + String.format(Locale.ROOT, "%.4f", recall) + "\n"); resultsString.append("Recall (including empty suggestions): " + String.format(Locale.ROOT, "%.4f", recall2) + "\n"); if (ignoredLines > 0) { resultsString.append("\nIgnored lines from source: " + ignoredLines + "\n"); } resultsString.append(printTimeFromStart(start)); appendToFile(verboseOutputFilename, resultsString.toString()); if (printSummaryDetails) { appendToFile(summaryOutputFilename, errorCategory + "\t" + fakeRuleIDs[i] + "\t" + errorsTotal + "\t" + String.format(Locale.ROOT, "%.4f", precision) + "\t" + String.format(Locale.ROOT, "%.4f", recall) + "\t" + results[i][classifyTypes.indexOf("TP")] + "\t" + results[i][classifyTypes.indexOf("FP")] + "\t" + results[i][classifyTypes.indexOf("TN")] + "\t" + results[i][classifyTypes.indexOf("FN")] + "\t"); } accumulateResults[0] += errorsTotal; accumulateResults[1] += results[i][classifyTypes.indexOf("TP")]; accumulateResults[2] += results[i][classifyTypes.indexOf("FP")]; accumulateResults[3] += results[i][classifyTypes.indexOf("TN")]; accumulateResults[4] += results[i][classifyTypes.indexOf("FN")]; } System.out.println(printTimeFromStart(start)); System.out.println(" } private static String formattedAbsoluteAndPercentage (String tag, int i, int j) { float percentage = (float) i*100/j; StringWriter r = new StringWriter(); r.append(tag+": "); r.append(Integer.toString(i)); r.append(" ("); r.append(String.format(Locale.ROOT, "%.2f", percentage)); r.append("%)\n"); return r.toString(); } private static String printTimeFromStart(long start) { long totalSecs = (long) ((System.currentTimeMillis() - start) / 1000.0); long hours = totalSecs / 3600; int minutes = (int) ((totalSecs % 3600) / 60); int seconds = (int) (totalSecs % 60); return String.format("\nTime: %02d:%02d:%02d\n", hours, minutes, seconds); } private static void appendToFile(String FilePath, String text) throws IOException { if (!FilePath.isEmpty()) { try (BufferedWriter out = new BufferedWriter(new FileWriter(FilePath, true))) { out.write(text + "\n"); } } else { System.out.println(text); } } private static void analyzeSentence(String correctSentence, int j, int fromPos, CheckConfiguration config) throws IOException { // Correct sentence if (!unidirectional || j == 0) { List<RemoteRuleMatch> matchesCorrect; if (cachedMatches.containsKey(correctSentence)) { matchesCorrect = cachedMatches.get(correctSentence); } else { try { matchesCorrect = lt.check(correctSentence, config).getMatches(); } catch (RuntimeException e) { e.printStackTrace(); wait(1000); matchesCorrect = lt.check(correctSentence, config).getMatches(); } checkedSentences++; cachedMatches.put(correctSentence, matchesCorrect); } String replaceWith = words[1 - j]; String originalString = correctSentence.substring(fromPos, fromPos + words[j].length()); //capitalization change only makes sense with full words if (wholeword && StringTools.isCapitalizedWord(originalString) && replaceWith != null) { replaceWith = StringTools.uppercaseFirstChar(replaceWith); } List<String> ruleIDs = ruleIDsAtPos(matchesCorrect, fromPos, replaceWith); if (ruleIDs.size() > 0) { results[j][classifyTypes.indexOf("FP")]++; printSentenceOutput("FP", correctSentence, j, String.join(",", ruleIDs)); } else { results[j][classifyTypes.indexOf("TN")]++; // Too verbose... // printSentenceOutput("TN", correctSentence, fakeRuleIDs[j]); } } // Wrong sentence if ( (!unidirectional || j == 1) && words[1 - j] != null) { String replaceWith = words[1 - j]; String originalString = correctSentence.substring(fromPos, fromPos + words[j].length()); // capitalization change only makes sense with full words if (wholeword) { replaceWith = StringTools.preserveCase(replaceWith, originalString); } String wrongSentence = correctSentence.substring(0, fromPos) + replaceWith + correctSentence.substring(fromPos + words[j].length(), correctSentence.length()); if (wrongSentence.equals(correctSentence)) { // Should not happen printSentenceOutput("Error: word cannot be replaced", correctSentence, j, ""); return; } List<RemoteRuleMatch> matchesWrong; if (cachedMatches.containsKey(wrongSentence)) { matchesWrong = cachedMatches.get(wrongSentence); } else { try { matchesWrong = lt.check(wrongSentence, config).getMatches(); } catch (RuntimeException e) { e.printStackTrace(); wait(1000); matchesWrong = lt.check(wrongSentence, config).getMatches(); } checkedSentences++; cachedMatches.put(wrongSentence, matchesWrong); } List<String> ruleIDs = ruleIDsAtPos(matchesWrong, fromPos, originalString); if (ruleIDs.size() > 0) { if (isExpectedSuggestionAtPos(matchesWrong, fromPos, originalString, wrongSentence, correctSentence)) { results[1 - j][classifyTypes.indexOf("TP")]++; printSentenceOutput("TP", wrongSentence, 1 - j, String.join(",", ruleIDs)); } else if (isEmptySuggestionAtPos(matchesWrong, fromPos, originalString, wrongSentence, correctSentence)) { results[1 - j][classifyTypes.indexOf("TPns")]++; printSentenceOutput("TPns", wrongSentence, 1 - j, String.join(",", ruleIDs)); } else { results[1 - j][classifyTypes.indexOf("TPws")]++; printSentenceOutput("TPws", wrongSentence, 1 - j, String.join(",", ruleIDs)); } } else { results[1 - j][classifyTypes.indexOf("FN")]++; printSentenceOutput("FN", wrongSentence, 1 - j, ""); } } } private static void printSentenceOutput(String classification, String sentence, int i, String ruleIds) throws IOException { if (verboseOutput) { String fakeRuleID = ""; if (fakeRuleIDs[i].contains("null")) { fakeRuleID = "rules_" + words[i] + "->" + words[1 - i]; } else { fakeRuleID = fakeRuleIDs[i]; } if (verboseOutputFilename.isEmpty()) { System.out.println(countLine + ". " + classification + ": " + sentence + " –– " + fakeRuleID + ":" + ruleIds); } else { try (BufferedWriter out = new BufferedWriter(new FileWriter(verboseOutputFilename, true))) { out.write(countLine + "\t" + classification + "\t" + sentence + "\t" + fakeRuleID + ":" + ruleIds+"\n"); } } } } private static List<String> ruleIDsAtPos(List<RemoteRuleMatch> matchesCorrect, int pos, String expectedSuggestion) { List<String> ruleIDs = new ArrayList<>(); for (RemoteRuleMatch match : matchesCorrect) { if (match.getErrorOffset() <= pos && match.getErrorOffset() + match.getErrorLength() >= pos) { if (disabledRules.contains(match.getRuleId())) { continue; } if (!onlyRules.isEmpty() && !onlyRules.contains(match.getRuleId())) { continue; } String subId = null; try { subId = match.getRuleSubId().get(); } catch (NoSuchElementException e) { //System.out.println("Exception, skipping '" + countLine + "': "); //e.printStackTrace(); } if (subId != null) { ruleIDs.add(match.getRuleId() + "[" + match.getRuleSubId().get() + "]"); } else { ruleIDs.add(match.getRuleId()); } } } return ruleIDs; } private static boolean isExpectedSuggestionAtPos(List<RemoteRuleMatch> matchesCorrect, int pos, String expectedSuggestion, String wrongSentence, String correctSentence) { for (RemoteRuleMatch match : matchesCorrect) { if (match.getErrorOffset() <= pos && match.getErrorOffset() + match.getErrorLength() >= pos) { for (String s : match.getReplacements().get()) { // check that the replacement rebuilds the original correct sentence String correctedSentence = wrongSentence.substring(0, match.getErrorOffset()) + s + wrongSentence.substring(match.getErrorOffset() + match.getErrorLength(), wrongSentence.length()); if (correctedSentence.equals(correctSentence)) { return true; } } } } return false; } private static boolean isEmptySuggestionAtPos(List<RemoteRuleMatch> matchesCorrect, int pos, String expectedSuggestion, String wrongSentence, String correctSentence) { for (RemoteRuleMatch match : matchesCorrect) { if (match.getReplacements().get().size() == 0) { if (match.getErrorOffset() <= pos && match.getErrorOffset() + match.getErrorLength() >= pos) { return true; } } } return false; } private static void writeHelp() { System.out.println("Usage 1: " + ArtificialErrorEval.class.getSimpleName() + " <language code> <input file>"); System.out.println("Usage 2: " + ArtificialErrorEval.class.getSimpleName() + " <configuration file>"); } private static List<String> differences(String s1, String s2) { List<String> results = new ArrayList<>(); if (s1.equals(s2)) { results.add(s1); results.add(""); results.add(""); results.add(""); return results; } int fromStart = 0; while (s1.charAt(fromStart) == s2.charAt(fromStart)) { fromStart++; } int l1 = s1.length(); int l2 = s2.length(); int fromEnd = 0; while (s1.charAt(l1 - 1 - fromEnd) == s2.charAt(l2 - 1 - fromEnd)) { fromEnd++; } // corrections (e.g. stress vs stresses) while (fromStart > l1 - fromEnd) { fromEnd } while (fromStart > l2 - fromEnd) { fromEnd } // common string at start results.add(s1.substring(0, fromStart)); // diff in sentence 1 results.add(s1.substring(fromStart, l1 - fromEnd)); // diff in sentence 2 results.add(s2.substring(fromStart, l2 - fromEnd)); // common string at end results.add(s1.substring(l1 - fromEnd, l1)); return results; } public static void wait(int ms) { try { Thread.sleep(ms); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } }
package org.zalando.nakadiproducer.flowid; import org.zalando.tracer.Tracer; import lombok.extern.slf4j.Slf4j; @Slf4j public class TracerFlowIdComponent implements FlowIdComponent { private static final String X_FLOW_ID = "X-Flow-ID"; private final Tracer tracer; public TracerFlowIdComponent(Tracer tracer) { this.tracer = tracer; } public String getXFlowIdKey() { return X_FLOW_ID; } @Override public String getXFlowIdValue() { if (tracer != null) { try { return tracer.get(X_FLOW_ID).getValue(); } catch (IllegalArgumentException e) { log.warn("No trace was configured for the name {}. Returning null. " + "To configure Tracer provide an application property: " + "tracer.traces.X-Flow-ID=flow-id", X_FLOW_ID); } catch (IllegalStateException e) { log.warn("Unexpected Error while receiving the Trace Id {}. Returning null. " + "Please check your tracer configuration: {}", X_FLOW_ID, e.getMessage()); } } else { log.warn("No bean of class Tracer was found. Returning null."); } return null; } @Override public void startTraceIfNoneExists() { if (tracer != null) { try { tracer.get(X_FLOW_ID).getValue(); } catch (IllegalArgumentException e) { tracer.start(); } catch (IllegalStateException e) { log.warn("Unexpected Error while checking for an existing Trace Id {}. " + "Please check your tracer configuration: {}", X_FLOW_ID, e.getMessage()); } } else { log.warn("No bean of class Tracer was found."); } } }
package li.strolch.execution; import static li.strolch.runtime.StrolchConstants.SYSTEM_USER_AGENT; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.ResourceBundle; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import li.strolch.agent.api.ComponentContainer; import li.strolch.agent.api.StrolchAgent; import li.strolch.handler.operationslog.LogMessage; import li.strolch.handler.operationslog.LogSeverity; import li.strolch.handler.operationslog.OperationsLog; import li.strolch.model.Locator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SimpleDurationExecutionTimer implements DelayedExecutionTimer { private static final Logger logger = LoggerFactory.getLogger(SimpleDurationExecutionTimer.class); private Map<Locator, ScheduledFuture<?>> simulationTasks; private StrolchAgent agent; public SimpleDurationExecutionTimer(StrolchAgent agent) { this.agent = agent; this.simulationTasks = Collections.synchronizedMap(new HashMap<>()); } @Override public void destroy() { this.simulationTasks.values().forEach(task -> task.cancel(false)); } @Override public void cancel(Locator locator) { ScheduledFuture<?> future = this.simulationTasks.remove(locator); if (future != null) { if (!future.cancel(false)) { logger.warn("Failed to cancel task " + locator); } } } @Override public void execute(String realm, ComponentContainer container, Locator actionLocator, long duration) { synchronized (this.simulationTasks) { if (this.simulationTasks.containsKey(actionLocator)) { logger.warn("Ignoring duplicate timer for locator " + actionLocator); } else { SimulationTask task = new SimulationTask(realm, container, actionLocator); ScheduledFuture<?> future = getExecutor().schedule(task, duration, TimeUnit.MILLISECONDS); this.simulationTasks.put(actionLocator, future); logger.info("Registered execution timer for " + actionLocator); } } } private ScheduledExecutorService getExecutor() { return this.agent.getScheduledExecutor("DurationExecution"); } private void executed(String realm, ComponentContainer container, Locator locator) { this.simulationTasks.remove(locator); ExecutionHandler executionHandler = container.getComponent(ExecutionHandler.class); Controller controller = executionHandler.getController(realm, locator); if (controller == null) { logger.warn("Controller already remove for " + locator); return; } if (controller.isStopped(locator)) { logger.warn("Execution for " + locator + " is already stopped."); return; } try { controller.toExecuted(locator); } catch (Exception e) { logger.error("Failed to set " + locator + " to executed due to " + e.getMessage(), e); if (this.agent.getContainer().hasComponent(OperationsLog.class)) { this.agent.getContainer().getComponent(OperationsLog.class).addMessage( new LogMessage(realm, SYSTEM_USER_AGENT, locator, LogSeverity.Exception, ResourceBundle.getBundle("strolch-service"), "execution.handler.failed.executed") .withException(e).value("reason", e)); } } } private class SimulationTask implements Runnable { private String realm; private ComponentContainer container; private Locator locator; public SimulationTask(String realm, ComponentContainer container, Locator locator) { this.realm = realm; this.container = container; this.locator = locator; } @Override public void run() { executed(realm, container, locator); } } }
package com.tngtech.java.junit.dataprovider; import static com.tngtech.java.junit.dataprovider.DataProviders.$; import static com.tngtech.java.junit.dataprovider.DataProviders.$$; import static com.tngtech.java.junit.dataprovider.DataProviders.testForEach; import static com.tngtech.java.junit.dataprovider.DataProviders.crossProduct; import static org.assertj.core.api.Assertions.assertThat; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; public class DataProvidersTest extends BaseTest { @Test public void test$ShouldReturnEmptyObjectArrayForNoArgs() { // Given: // When: Object[] result = $(); // Then: assertThat(result).isEqualTo(new Object[0]); } @Test public void test$ShouldReturnObjectArrayWithSingleElementForOneArg() { // Given: // When: Object[] result = $("test"); // Then: assertThat(result).isEqualTo(new Object[] { "test" }); } @Test public void test$ShouldReturnObjectArrayWithAllElementsForMultipleArg() { // Given: long millis = System.currentTimeMillis(); Date now = new Date(); // When: Object[] result = $(millis, now, "equals"); // Then: assertThat(result).containsExactly(millis, now, "equals"); } @Test public void test$$ShouldReturnEmptyObjectArrayForNoArgs() { // Given: // When: Object[][] result = $$(); // Then: assertThat(result).isEqualTo(new Object[0][0]); } @Test public void test$$ShouldReturnObjectArrayWithSingleElementForOneArg() { // Given: // When: Object[] result = $$(new Object[] { 10e-3 }); // Then: assertThat(result).isEqualTo(new Object[][] { { 0.01 } }); } @Test public void test$$ShouldReturnObjectArrayWithAllElementsForMultipleArg() { // Given: // When: Object[] result = $$(new Object[] { "test1", 1 }, new Object[] { "test2" }, new Object[] { "test3", 3 }); // Then: assertThat(result).isEqualTo(new Object[][] { { "test1", 1 }, { "test2" }, { "test3", 3 } }); } @Test public void test$$And$InCooperation() { // Given: // When: // @formatter:off Object[][] result = $$( $(0, 0, 0), $(0, 1, 1), $(1, 1, 2), $(1, 2, 3) ); // @formatter:on // Then: assertThat(result).isEqualTo(new Object[][] { { 0, 0, 0 }, { 0, 1, 1 }, { 1, 1, 2 }, { 1, 2, 3 } }); } @Test public void testTestForEachObjectArrayShouldReturnEmptyObjectArrayArrayForNoArg() { // Given: // When: Object[][] result = testForEach(); // Then: assertThat(result).isEqualTo(new Object[0][0]); } @Test public void testTestForEachObjectArrayShouldReturnObjectArrayArrayWithSingleElementForOneArg() { // Given: // When: Object[][] result = testForEach(17.25); // Then: assertThat(result).isEqualTo(new Object[][] { { 17.25 } }); } @Test public void testTestForEachObjectArrayShouldReturnObjectArrayArrayWithObjectArrayForEveryArgOnMultipleArgs() { // Given: // When: Object[][] result = testForEach('a', "aa", "aaa"); // Then: assertThat(result).isEqualTo(new Object[][] { { 'a' }, { "aa" }, { "aaa" } }); } @Test(expected = NullPointerException.class) public void testTestForEachIterableShouldThrowNullPointerExceptionForNullArg() { // Given: // When: testForEach((Iterable<Object>) null); // Then: expect exception } @Test public void testTestForEachIterableShouldReturnObjectArrayArrayWithObjectArrayForEverySetEntry() { // Given: Set<Float> set = new HashSet<Float>(); set.add(1.7f); set.add(238.78239f); // When: Object[][] result = testForEach(set); // Then: assertThat(result).contains(new Object[] { Float.valueOf(1.7f) }, new Object[] { Float.valueOf(238.78239f) }); } @Test public void testTestForEachIterableShouldReturnObjectArrayArrayWithObjectArrayForEveryListEntry() { // Given: List<Long> list = new ArrayList<Long>(); list.add(261l); list.add(167120l); // When: Object[][] result = testForEach(list); // Then: assertThat(result).isEqualTo(new Object[][] { { Long.valueOf(261l) }, { Long.valueOf(167120l) } }); } @Test(expected = NullPointerException.class) public void testTestForEachClassOfEnumShouldThrowNullPointerExceptionForNullArg() { // Given: // When: testForEach((Class<TestEnum>) null); // Then: expect exception } @Test public void testTestForEachClassOfEnumShouldReturnObjectArrayArrayForEachEnumValue() { // Given: // When: Object[][] result = testForEach(TestEnum.class); // Then: assertThat(result).isEqualTo(new Object[][] { { TestEnum.VAL1 }, { TestEnum.VAL2 }, { TestEnum.VAL3 } }); } @Test public void testCrossProductShouldReturnEmptyWhenLeftSideIsEmpty() { // Given: // When: Object[][] result = crossProduct(testForEach(1,2,3), testForEach()); // Then: assertThat(result).isEqualTo(new Object[][] { }); } @Test public void testCrossProductShouldReturnEmptyWhenRightSideIsEmpty() { // Given: // When: Object[][] result = crossProduct(testForEach(), testForEach(1,2,3)); // Then: assertThat(result).isEqualTo(new Object[][] { }); } @Test public void testCrossProductShouldReturnTheCrossProductOfBothSides() { // Given: // When: Object[][] result = crossProduct(testForEach(1,2,3), testForEach(4,5)); // Then: assertThat(result).isEqualTo(new Object[][] { {1, 4}, { 1, 5 }, { 2, 4 }, { 2, 5 }, { 3, 4 }, { 3, 5 } }); } }
package io.appium.uiautomator.core; import android.view.InputEvent; import android.view.MotionEvent; import android.view.MotionEvent.PointerCoords; import static io.appium.android.bootstrap.utils.ReflectionUtils.invoke; import static io.appium.android.bootstrap.utils.ReflectionUtils.method; public class InteractionController { private static final String CLASS_INTERACTION_CONTROLLER = "com.android.uiautomator.core.InteractionController"; private static final String METHOD_SEND_KEY = "sendKey"; private static final String METHOD_INJECT_EVENT_SYNC = "injectEventSync"; private static final String METHOD_TOUCH_DOWN = "touchDown"; private static final String METHOD_TOUCH_UP = "touchUp"; private static final String METHOD_TOUCH_MOVE = "touchMove"; public static final String METHOD_PERFORM_MULTI_POINTER_GESTURE = "performMultiPointerGesture"; private final Object interactionController; public InteractionController(Object interactionController) { this.interactionController = interactionController; } public boolean sendKey(int keyCode, int metaState){ return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_SEND_KEY, int.class, int.class), interactionController, keyCode, metaState); } public boolean injectEventSync(InputEvent event) { return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_INJECT_EVENT_SYNC, InputEvent.class), interactionController, event); } public boolean touchDown(int x, int y) { return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_TOUCH_DOWN, int.class, int.class), interactionController, x, y); } public boolean touchUp(int x, int y) { return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_TOUCH_UP, int.class, int.class), interactionController, x, y); } public boolean touchMove(int x, int y) { return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_TOUCH_MOVE, int.class, int.class), interactionController, x, y); } public Boolean performMultiPointerGesture(MotionEvent.PointerCoords[][] pcs) { return (Boolean) invoke(method(CLASS_INTERACTION_CONTROLLER, METHOD_PERFORM_MULTI_POINTER_GESTURE, PointerCoords[][].class), interactionController, (Object) pcs); } }
package org.cohorte.eclipse.felix.config.generator; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.psem2m.utilities.CXException; import org.psem2m.utilities.files.CXFile; import org.psem2m.utilities.files.CXFileDir; import org.psem2m.utilities.files.CXFileUtf8; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * class that read a launch configuration to generator a config.properties that * is use by felix to launch framework * * @author apisu * */ @Mojo(name = "generate-config", defaultPhase = LifecyclePhase.COMPILE) public class ConfigGenerator extends AbstractMojo { @Parameter(defaultValue = "${project}", required = true, readonly = true) MavenProject project; // base new config file on this config file @Parameter(property = "base.felix.config.file.path") String sourceConfigFile; @Parameter(property = "launch.eclipse.file.path") String launchEclipseFile; @Parameter(property = "shell.felix.jar.file.path") String shellFelixJarFilePath; @Parameter(property = "shell.felix.config.file.path") String shellFelixConfigFilePath; @Parameter(property = "felix.cache.rootdir") String felixCacheRootDir; @Parameter(property = "shell.vmarguments") String overrideShellArgument; @Parameter(property = "shell.vmarguments.file.path") String overrideShellArgumentFilePath; @Parameter(property = "target.shell.file.path") String targetLaunchJvmFile; @Parameter(property = "target.config.file.path") String pathTargerConfigFile; // can express multiple folder with ";" separator. the property express also // pair of path in local disk and path in target disk for config // eg cohorte-home/repo:opt/cohorte/repo; another path @Parameter(property = "bundle.jar.directories") String pPathBundleTarget; private final Properties pProperties = new Properties(); private final Map<String, String> pMapSymbolicNameToJarPath = new HashMap<>(); /** * return the symolic bundle name in jar file if t's a bundle else null * * @return * @throws IOException */ private String getBundleSymbolicNameFromJar(String aFilePath) throws IOException { getLog().debug(String.format("getBundleSymbolicNameFromJar file =[%s]", aFilePath)); String wSymbolicName = null; final ZipFile wZipFile = new ZipFile(aFilePath); try { final Enumeration<? extends ZipEntry> wEntries = wZipFile.entries(); while (wEntries.hasMoreElements()) { final ZipEntry wEntry = wEntries.nextElement(); if (wEntry.getName().contains("MANIFEST.MF")) { final InputStream wStream = wZipFile.getInputStream(wEntry); final String wContent = new BufferedReader(new InputStreamReader(wStream)).lines() .collect(Collectors.joining("\n")); wStream.close(); final String[] wLines = wContent.split("\n"); for (int i = 0; i < wLines.length && wSymbolicName == null; i++) { final String wLine = wLines[i]; if (wLine.contains("Bundle-SymbolicName: ")) { wSymbolicName = wLine.replace("Bundle-SymbolicName: ", ""); getLog().debug(String.format("symbolic name=[%s] file =[%s]", wSymbolicName, aFilePath)); } } } } return wSymbolicName; } finally { wZipFile.close(); } } /** * list bundle jar file in this directory with symbolic name * * @param aDir * * @return * @throws IOException */ private void analyseDir(CXFileDir aDir) throws IOException { getLog().debug(String.format("analyseDir =[%s]", aDir.getAbsolutePath())); for (final String wFile : aDir.list()) { getLog().debug(String.format("dir=[%s] file =[%s]", aDir.getAbsolutePath(), wFile)); if (wFile.endsWith(".jar")) { // check if it's a bundle final String wFullFilePath = aDir.getAbsolutePath() + File.separatorChar + wFile; String wSymbolicBundleName = getBundleSymbolicNameFromJar(wFullFilePath); if (wSymbolicBundleName != null) { if (wSymbolicBundleName.contains(";")) { wSymbolicBundleName = wSymbolicBundleName.split(";")[0]; } getLog().debug(String.format("===>symbolicName=[%s] \n, path jar=[%s]", wSymbolicBundleName, wFullFilePath)); pMapSymbolicNameToJarPath.put(wSymbolicBundleName, wFullFilePath); } } else { final CXFileDir wSubDir = new CXFileDir(aDir, wFile); if (wSubDir.exists() && wSubDir.isDirectory()) { analyseDir(wSubDir); } } } } private Map<String, String> analyseDirectory(String aPathBundleTarget) throws MojoExecutionException { final Map<String, String> wDirsBundleLocation = new HashMap<>(); try { final String wPathBundle = aPathBundleTarget.replaceAll("\n", "").replaceAll("\t", "").replaceAll(" ", ""); getLog().info(String.format("analyseDirectory dir=[%s]!", wPathBundle)); if (wPathBundle.contains(";")) { for (final String wPathPair : wPathBundle.split(";")) { if (wPathPair.contains("=")) { final String wPathLocalDir = wPathPair.split("=")[0]; final String wPathTargetDir = wPathPair.split("=")[1]; final CXFileDir wDir = new CXFileDir(wPathLocalDir); if (wDir.isDirectory() && wDir.exists()) { getLog().info(String.format("add dir local=[%s]!", wPathLocalDir)); getLog().info(String.format("add dir target=[%s]!", wPathTargetDir)); analyseDir(wDir); wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir); } } } } else { if (wPathBundle.contains("=")) { final String wPathLocalDir = wPathBundle.split("=")[0]; final String wPathTargetDir = wPathBundle.split("=")[1]; final CXFileDir wDir = new CXFileDir(wPathLocalDir); if (wDir.isDirectory() && wDir.exists()) { getLog().info(String.format("add dir local=[%s]!", wPathLocalDir)); getLog().info(String.format("add dir target=[%s]!", wPathTargetDir)); analyseDir(wDir); wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir); } } } return wDirsBundleLocation; } catch (final Exception e) { getLog().error(String.format("fail to analyse directory %s error=[%s]!", pPathBundleTarget, CXException.eInString(e))); throw new MojoExecutionException(String.format("fail to analyse directory %s error=[%s]!", aPathBundleTarget, CXException.eInString(e))); } } private Document getDocumentFromLauncherFile(String aLaunchConfigFile) throws ParserConfigurationException, SAXException, IOException { final DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder; dBuilder = dbFactory.newDocumentBuilder(); return dBuilder.parse(aLaunchConfigFile); } private String getVMParameter(Document wDocLauncherEclipse) throws SAXException, IOException, ParserConfigurationException { String wVmArgument = null; final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute"); // checkk which string property describe list of bundle for (int x = 0; x < wNodeList.getLength() && wVmArgument == null; x++) { final Node wNode = wNodeList.item(x); if (wNode instanceof Element) { final Element wElement = (Element) wNode; final String wValue = null; if (wElement.getAttribute("key") != null) { if (wElement.getAttribute("key").equals("org.eclipse.jdt.launching.VM_ARGUMENTS")) { wVmArgument = wElement.getAttribute("value"); } } } } String wVmArgUsableInShell = ""; final String[] wLines = wVmArgument.split("\n"); final Map<String, String> wMapOverrideArgument = new HashMap<>(); if (overrideShellArgumentFilePath != null) { getLog().debug(String.format("file override argument vm %s", overrideShellArgumentFilePath)); final CXFileUtf8 wFileContentOverrideVmArg = new CXFileUtf8(overrideShellArgumentFilePath); if (wFileContentOverrideVmArg.exists()) { overrideShellArgument = wFileContentOverrideVmArg.readAll(); getLog().debug(String.format("content file override argument vm %s", overrideShellArgument)); } } else { getLog().debug(String.format("no file override argument vm %s", overrideShellArgumentFilePath)); } if (overrideShellArgument != null) { final List<String> wOverrideShellArgument = Arrays.asList(overrideShellArgument.split("\n")); for (final String wOverrideArg : wOverrideShellArgument) { if (wOverrideArg.contains("=")) { final String[] wSplit = wOverrideArg.split("="); String wArgumentKey = wSplit[0]; getLog().debug(String.format("argument override key %s", wArgumentKey)); wArgumentKey = wArgumentKey.replaceAll(" ", "").replaceAll("\t", ""); if (wArgumentKey.startsWith("-D")) { wArgumentKey = wArgumentKey.substring(2); } if (wSplit.length > 1) { wMapOverrideArgument.put(wArgumentKey, wSplit[1]); } else { wMapOverrideArgument.put(wArgumentKey, ""); } } } } else { getLog().debug(String.format("no override argumet")); } for (final String wLine : wLines) { if (wLine.trim().length() > 0) { String wArgumentKey = null; if (wLine.contains("=")) { wArgumentKey = wLine.split("=")[0]; if (wArgumentKey.startsWith("-D")) { wArgumentKey = wArgumentKey.substring(2); } } if (wArgumentKey != null) { getLog().debug(String.format("argument key %s", wArgumentKey)); if (wMapOverrideArgument.keySet().contains(wArgumentKey)) { // wVmArgUsableInShell += "# override launch eclipse vm argument by maven // task\n"; wVmArgUsableInShell += "\t-D" + wArgumentKey + "=" + wMapOverrideArgument.get(wArgumentKey) + " \\\n"; } else { wVmArgUsableInShell += "\t" + wLine + " \\\n"; } } else { wVmArgUsableInShell += "\t" + wLine + " \\\n"; } } } getLog().debug(String.format("vm arguments [%s]", wVmArgUsableInShell)); return wVmArgUsableInShell; } private List<String> getListSymbolicBundleNameToAdd(Document wDocLauncherEclipse) throws SAXException, IOException, ParserConfigurationException { final List<String> wLisSymbolicBundleName = new ArrayList<>(); final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute"); // checkk which string property describe list of bundle for (int x = 0; x < wNodeList.getLength(); x++) { final Node wNode = wNodeList.item(x); if (wNode instanceof Element) { final Element wElement = (Element) wNode; String wValue = null; if (wElement.getAttribute("key") != null) { if (wElement.getAttribute("key").equals("workspace_bundles")) { // bundle of the current project wValue = wElement.getAttribute("value"); } else if (wElement.getAttribute("key").equals("target_bundles")) { // bundle of the current project wValue = wElement.getAttribute("value"); } } if (wValue != null) { for (final String wBundleName : wValue.split(",")) { if (wBundleName.contains("@")) { wLisSymbolicBundleName.add(wBundleName.split("@")[0]); } } } // search bundle path in repo } } getLog().debug(String.format("symbolic name bundle to add [%s]", wLisSymbolicBundleName)); return wLisSymbolicBundleName; } private void createJvmShell(Document aLauncherEclipseDom) throws MojoExecutionException, IOException, SAXException, ParserConfigurationException { final String wShellFormat = "#!/bin/sh\njava %s -Dfelix.config.properties=file:/%s -Dfile.encoding=UTF-8 -jar %s bundle-cache -consoleLog -console"; final String wVmArgument = getVMParameter(aLauncherEclipseDom); final String wShell = String.format(wShellFormat, wVmArgument, shellFelixConfigFilePath, shellFelixJarFilePath); getLog().info(String.format("shell launch jvm =[%s]", wShell)); if (targetLaunchJvmFile != null) { final CXFileUtf8 wShellFile = new CXFileUtf8(targetLaunchJvmFile); wShellFile.getParentDirectory().mkdirs(); wShellFile.writeAll(wShell); } } private void createConfigFelixFile(String aFileBaseConfifPath, Document aLauncherEclipseDom) throws MojoExecutionException, IOException, SAXException, ParserConfigurationException { final Map<String, String> wDirsBundleLocation = analyseDirectory(pPathBundleTarget); if (aFileBaseConfifPath != null) { // load property from file if (aFileBaseConfifPath.startsWith("http: // get content by using http final HttpGet wGet = new HttpGet(aFileBaseConfifPath); final CloseableHttpClient wClient = HttpClientBuilder.create().build(); final HttpResponse wResponse = wClient.execute(wGet); pProperties.load(wResponse.getEntity().getContent()); } else { final CXFile wFileBaseProperty = new CXFile(aFileBaseConfifPath); if (wFileBaseProperty.exists()) { pProperties.load(wFileBaseProperty.getInputStream()); } } } else { // set default value for properties if no base pProperties.put("org.osgi.framework.storage.clean", "none"); pProperties.put("org.osgi.framework.storage", "bundle-cache"); pProperties.put("org.osgi.framework.startlevel.beginning", "4"); pProperties.put("felix.cache.rootdir", felixCacheRootDir != null ? felixCacheRootDir : "/opt/node/felix/rootdir"); } final List<String> wListSymbolicBundleName = getListSymbolicBundleNameToAdd(aLauncherEclipseDom); final List<String> wTreatedSymbolicNames = new ArrayList<>(); wTreatedSymbolicNames.addAll(wListSymbolicBundleName); // add felix framework that is not checked in eclipse configuration // get bundle property to add the new one String wListBundles = pProperties.getProperty("felix.auto.start.4"); if (wListBundles == null) { wListBundles = ""; } // wListBundles += "# add bundle from launch configuration \n"; for (final String wSymbolicBundleToAdd : wListSymbolicBundleName) { if (pMapSymbolicNameToJarPath.containsKey(wSymbolicBundleToAdd)) { if (wListBundles.length() > 0) { wListBundles += " \\\n"; } final String wAddBundle = "file:\\" + pMapSymbolicNameToJarPath.get(wSymbolicBundleToAdd); getLog().info(String.format("add bundle=[%s]!", wAddBundle)); // todo replace path by new location wListBundles += wAddBundle; wTreatedSymbolicNames.remove(wSymbolicBundleToAdd); } } if (wTreatedSymbolicNames.size() > 0) { getLog().error("symbolicName no treated " + wTreatedSymbolicNames); throw new MojoExecutionException("symbolicName no treated " + wTreatedSymbolicNames); } // reploace all localDir by target dir for (final String wLocalDir : wDirsBundleLocation.keySet()) { wListBundles = wListBundles.replaceAll(wLocalDir, wDirsBundleLocation.get(wLocalDir)); } getLog().info(String.format("felix.auto.start.4=[%s]!", wListBundles)); pProperties.put("felix.auto.start.4", wListBundles); getLog().debug(String.format("properties file content=[%s]", pProperties.toString())); if (pathTargerConfigFile != null) { final CXFileUtf8 pFileTargerConfigFile = new CXFileUtf8(pathTargerConfigFile); pFileTargerConfigFile.getParentDirectory().mkdirs(); pFileTargerConfigFile.openWrite(); for (final Object wKey : pProperties.keySet()) { pFileTargerConfigFile.write(wKey.toString() + "=" + pProperties.getProperty(wKey.toString()) + "\n"); } pFileTargerConfigFile.close(); } } @Override public void execute() throws MojoExecutionException, MojoFailureException { // TODO Auto-generated method stub getLog().info("execute "); if (launchEclipseFile != null) { final File wFileLaunchConfig = new File(launchEclipseFile); if (pPathBundleTarget != null) { if (!wFileLaunchConfig.exists()) { getLog().error(String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath())); throw new MojoExecutionException( String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath())); } else { try { getLog().info(String.format("launch file=[%s]!", wFileLaunchConfig.getAbsolutePath())); getLog().info(String.format("path directories jar=[%s]!", pPathBundleTarget)); final Document wLauncherEclipseDom = getDocumentFromLauncherFile(launchEclipseFile); createConfigFelixFile(sourceConfigFile, wLauncherEclipseDom); createJvmShell(wLauncherEclipseDom); } catch (final Exception e) { getLog().error(String.format("fail to parse xml file %s error=[%s]!", wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e))); throw new MojoExecutionException(String.format("fail to parse xml file %s error=[%s]!", wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e))); } } } else { getLog().info(String.format("no path bundle directory !")); } } else { getLog().info(String.format("no launch file %s!", launchEclipseFile)); } } }
package org.sagebionetworks.table.query.model; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * An element that be serialized to SQL. * */ public abstract class SQLElement implements Element { Element parent; /** * Write this element to SQL. * @return */ final public String toSql(){ // By default Quotes are include when writing to SQL. boolean includeQutoes = true; return toSql(includeQutoes); } private String toSql(boolean includeQutoes) { ToSqlParameters parameters = new ToSqlParameters(includeQutoes); StringBuilder builder = new StringBuilder(); toSql(builder, parameters); return builder.toString(); } @Override public String toString() { return toSql(); } /** * Create an iterator to recursively iterate over all elements of the given type. */ public <T extends Element> Iterable<T> createIterable(Class<T> type) { LinkedList<T> list = new LinkedList<T>(); SQLElement.addRecursive(list, type, this); return list; } /** * Helper to recursively add all elements from this tree that are of the given * type to the provided list. * * @param <T> * @param list * @param type * @param element */ static <T extends Element> void addRecursive(List<T> list, Class<T> type, Element element) { if (type.isInstance(element)) { list.add(type.cast(element)); } for (Element child : element.getChildren()) { addRecursive(list, type, child); } } /** * Iterate over all element of this tree. * * @return */ public Iterable<Element> createAllElementsIterable(){ return createIterable(Element.class); } /** * Get the first element of the given type. * @param type * @return */ public <T extends Element> T getFirstElementOfType(Class<T> type){ Iterator<T> itertor = createIterable(type).iterator(); if(itertor.hasNext()){ return itertor.next(); }else{ return null; } } /** * Does this tree have any aggregate elements? This method will do a * recursive walk of the tree and return true if any element in the tree is * an aggregate. * * Note: This method is Recursive. * * @return */ public boolean hasAnyAggregateElements(){ // Iterate over all elements that HasAggregate to find the first aggregate. for(HasAggregate has: createIterable(HasAggregate.class)){ if(has.isElementAggregate()){ return true; } } // none of the elements are aggregates. return false; } /** * Is this element equivalent to the the given element. * Two elements are equivalent if they are of the same type and the SQL * of each element is equivalent. For example, * 'count(foo)' is equivalent to to 'COUNT( "foo" ). * * @param other * @return */ public boolean equivalent(SQLElement other){ if(other == null){ return false; } if(!this.getClass().isInstance(other)){ return false; } String thisSQL = this.toSqlWithoutQuotes(); String otherSQL = other.toSqlWithoutQuotes(); return thisSQL.equals(otherSQL); } /** * Get the SQL for this element without quotes. * @return */ public String toSqlWithoutQuotes(){ // do not include quotes boolean includeQutoes = false; return toSql(includeQutoes); } @Override public boolean hasQuotes(){ return false; } /** * Does this tree have a leaf with quotes? * * @return */ public boolean hasQuotesRecursive(){ if(this.hasQuotes()){ return true; } // Check all nodes for(Element leaf: createAllElementsIterable()){ if(leaf.hasQuotes()){ return true; } } return false; } /** * Helper to build a children Iterable * @param children * @return */ static Iterable<Element> buildChildren(Element...children){ if(children == null || children.length < 1) { return Collections.emptyList(); }else if(children.length == 1 && children[0] != null) { return Collections.singleton(children[0]); }else { List<Element> list = new LinkedList<>(); for(Element child: children) { if(child != null) { list.add(child); } } return list; } } /** * Helper to build a children Iterable. * @param children * @return */ static Iterable<Element> buildChildren(List<? extends Element> children){ if(children == null) { return Collections.emptyList(); }else { return buildChildren(children.toArray(new Element[children.size()])); } } @Override public final Element getParent() { return this.parent; } /** * Set the parent of this element. * @param parent */ @Override public final void setParent(Element parent) { this.parent = parent; } /** * Recursively set the parent element for all elements in this tree. */ @Override public final void recursiveSetParent() { for(Element child: getChildren()) { child.setParent(this); child.recursiveSetParent(); } } @Override public final void recursiveClearParent() { for(Element child: getChildren()) { child.setParent(null); child.recursiveClearParent(); } this.setParent(null); } @Override public final <T extends Element> boolean isInContext(Class<T> type) { if(this.parent == null) { return false; } if(type.isInstance(this.parent)) { return true; } return this.parent.isInContext(type); } }
package org.openbel.framework.core.equivalence; import static org.openbel.framework.common.BELUtilities.hasItems; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Assert; import org.junit.Test; import org.openbel.framework.common.enums.RelationshipType; import org.openbel.framework.common.protonetwork.model.ProtoEdgeTable.TableProtoEdge; /** * Tests around {@link StatementEquivalencer} * * @author James McMahon {@code <[email protected]>} */ public class StatementEquivalencerTest { // TODO create a more generalized test that makes sure equivalent edges // have the same statements @Test public void testIssue10() { // test data from nick's test, this won't make sense in isolation List<TableProtoEdge> edges = new ArrayList<TableProtoEdge>(); edges.add(new TableProtoEdge(0, RelationshipType.DIRECTLY_INCREASES.getDisplayValue(), 1)); edges.add(new TableProtoEdge(0, RelationshipType.DIRECTLY_INCREASES.getDisplayValue(), 1)); edges.add(new TableProtoEdge(2, RelationshipType.ACTS_IN.getDisplayValue(), 0)); edges.add(new TableProtoEdge(2, RelationshipType.ACTS_IN.getDisplayValue(), 0)); edges.add(new TableProtoEdge(3, RelationshipType.TRANSCRIBED_TO.getDisplayValue(), 4)); edges.add(new TableProtoEdge(4, RelationshipType.TRANSLATED_TO.getDisplayValue(), 5)); edges.add(new TableProtoEdge(6, RelationshipType.TRANSCRIBED_TO.getDisplayValue(), 7)); edges.add(new TableProtoEdge(7, RelationshipType.TRANSLATED_TO.getDisplayValue(), 8)); Map<Integer, Set<Integer>> edgeStmts = new HashMap<Integer, Set<Integer>>(); edgeStmts.put(0, arrayToSet(0)); edgeStmts.put(1, arrayToSet(1)); edgeStmts.put(2, arrayToSet(0)); edgeStmts.put(3, arrayToSet(1)); edgeStmts.put(4, arrayToSet(2)); edgeStmts.put(5, arrayToSet(3)); edgeStmts.put(6, arrayToSet(4)); edgeStmts.put(7, arrayToSet(5)); Map<Integer, Integer> eqn = new HashMap<Integer, Integer>(); eqn.put(0, 0); eqn.put(1, 1); eqn.put(2, 2); eqn.put(3, 3); eqn.put(4, 4); eqn.put(5, 2); eqn.put(6, 5); eqn.put(7, 1); eqn.put(8, 6); // this will be cleared, but just to be superstitious Map<Integer, Integer> eqe = new HashMap<Integer, Integer>(); eqn.put(0, 0); eqn.put(1, 0); eqn.put(2, 2); eqn.put(3, 2); eqn.put(4, 4); eqn.put(5, 5); eqn.put(6, 6); eqn.put(7, 7); StatementEquivalencer.equivalenceInternal(edges, edgeStmts, eqn, eqe); // printIssue10(edges, edgeStmts); Assert.assertEquals(2, edgeStmts.get(1).size()); } private static Set<Integer> arrayToSet(int... array) { // damn you java Set<Integer> set = new HashSet<Integer>(); for (int v : array) { set.add(v); } return set; } private static void printIssue10(List<TableProtoEdge> edges, Map<Integer, Set<Integer>> edgeStmts) { for (int i = 0, n = edges.size(); i < n; i++) { final String ei = String.valueOf(i); final Set<Integer> stmts = edgeStmts.get(i); if (hasItems(stmts)) { for (final Integer stmt : stmts) { final String stmtstring = String.valueOf(stmt); System.out.println(ei + " " + stmtstring); } } } } }
package org.editorconfig.configmanagement.extended; import com.intellij.application.options.codeStyle.properties.AbstractCodeStylePropertyMapper; import com.intellij.application.options.codeStyle.properties.CodeStylePropertyAccessor; import com.intellij.lang.Language; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.codeStyle.LanguageCodeStyleSettingsProvider; import com.intellij.psi.codeStyle.modifier.CodeStyleSettingsModifier; import com.intellij.psi.codeStyle.modifier.CodeStyleStatusBarUIContributor; import com.intellij.psi.codeStyle.modifier.TransientCodeStyleSettings; import com.intellij.util.containers.ContainerUtil; import org.editorconfig.Utils; import org.editorconfig.configmanagement.EditorConfigNavigationActionsFactory; import org.editorconfig.core.EditorConfig; import org.editorconfig.core.EditorConfigException; import org.editorconfig.core.ParserCallback; import org.editorconfig.plugincomponents.SettingsProviderComponent; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import static org.editorconfig.core.EditorConfig.OutPair; @SuppressWarnings("SameParameterValue") public class EditorConfigCodeStyleSettingsModifier implements CodeStyleSettingsModifier { private final static Map<String,List<String>> DEPENDENCIES = ContainerUtil.newHashMap(); static { addDependency("indent_size", "continuation_indent_size"); } private static void addDependency(@NotNull String name, String... dependentNames) { DEPENDENCIES.put(name, Arrays.asList(dependentNames)); } @Override public boolean modifySettings(@NotNull TransientCodeStyleSettings settings, @NotNull PsiFile psiFile) { final VirtualFile file = psiFile.getVirtualFile(); if (Utils.isFullIntellijSettingsSupport() && file != null) { final Project project = psiFile.getProject(); if (!project.isDisposed() && Utils.isEnabled(settings)) { // Get editorconfig settings final List<OutPair> outPairs; try { outPairs = getEditorConfigOptions(project, psiFile, EditorConfigNavigationActionsFactory.getInstance(file)); // Apply editorconfig settings for the current editor if (applyCodeStyleSettings(new MyContext(settings, outPairs, psiFile))) { settings.addDependencies(EditorConfigNavigationActionsFactory.getInstance(file).getEditorConfigFiles()); return true; } } catch (EditorConfigException e) { // TODO: Report an error, ignore for now } } } return false; } @Nullable @Override public CodeStyleStatusBarUIContributor getStatusBarUiContributor(@NotNull TransientCodeStyleSettings transientSettings) { return new EditorConfigCodeStyleStatusBarUIContributor(); } private static boolean applyCodeStyleSettings(@NotNull MyContext context) { LanguageCodeStyleSettingsProvider provider = LanguageCodeStyleSettingsProvider.forLanguage(context.getLanguage()); if (provider != null) { AbstractCodeStylePropertyMapper mapper = provider.getPropertyMapper(context.getSettings()); Set<String> processed = ContainerUtil.newHashSet(); boolean isModified = processOptions(context, mapper, false, processed); isModified = processOptions(context, mapper, true, processed) || isModified; return isModified; } return false; } private static boolean processOptions(@NotNull MyContext context, @NotNull AbstractCodeStylePropertyMapper mapper, boolean languageSpecific, Set<String> processed) { String langPrefix = languageSpecific ? mapper.getLanguageDomainId() + "_" : null; boolean isModified = false; for (OutPair option : context.getOptions()) { final String optionKey = option.getKey(); String intellijName = EditorConfigIntellijNameUtil.toIntellijName(optionKey); CodeStylePropertyAccessor accessor = findAccessor(mapper, intellijName, langPrefix); if (accessor != null) { final String val = preprocessValue(context, optionKey, option.getVal()); if (DEPENDENCIES.containsKey(optionKey)) { for (String dependency : DEPENDENCIES.get(optionKey)) { if (!processed.contains(dependency)) { CodeStylePropertyAccessor dependencyAccessor = findAccessor(mapper, dependency, null); if (dependencyAccessor != null) { isModified |= dependencyAccessor.setFromString(val); } } } } isModified |= accessor.setFromString(val); processed.add(intellijName); } } return isModified; } private static String preprocessValue(@NotNull MyContext context, @NotNull String optionKey, @NotNull String optionValue) { if ("indent_size".equals(optionKey) && "tab".equals(optionValue)) { return context.getTabSize(); } return optionValue; } @Nullable private static CodeStylePropertyAccessor findAccessor(@NotNull AbstractCodeStylePropertyMapper mapper, @NotNull String propertyName, @Nullable String langPrefix) { if (langPrefix != null) { if (propertyName.startsWith(langPrefix)) { final String prefixlessName = StringUtil.trimStart(propertyName, langPrefix); final EditorConfigPropertyKind propertyKind = IntellijPropertyKindMap.getPropertyKind(prefixlessName); if (propertyKind == EditorConfigPropertyKind.LANGUAGE || propertyKind == EditorConfigPropertyKind.COMMON) { return mapper.getAccessor(prefixlessName); } } } else { return mapper.getAccessor(propertyName); } return null; } private static List<OutPair> getEditorConfigOptions(@NotNull Project project, @NotNull PsiFile psiFile, @NotNull ParserCallback callback) throws EditorConfigException { String filePath = Utils.getFilePath(project, psiFile.getVirtualFile()); final Set<String> rootDirs = SettingsProviderComponent.getInstance().getRootDirs(project); return new EditorConfig().getProperties(filePath, rootDirs, callback); } private static class MyContext { private final @NotNull CodeStyleSettings mySettings; private final @NotNull List<OutPair> myOptions; private final @NotNull PsiFile myFile; private MyContext(@NotNull CodeStyleSettings settings, @NotNull List<OutPair> options, @NotNull PsiFile file) { mySettings = settings; myOptions = options; myFile = file; } @NotNull private CodeStyleSettings getSettings() { return mySettings; } @NotNull private List<OutPair> getOptions() { return myOptions; } @NotNull private Language getLanguage() { return myFile.getLanguage(); } private String getTabSize() { for (OutPair pair : myOptions) { if ("tab_width".equals(pair.getKey())) { return pair.getVal(); } } CommonCodeStyleSettings.IndentOptions indentOptions = mySettings.getIndentOptions(myFile.getFileType()); return String.valueOf(indentOptions.TAB_SIZE); } } }
package com.novoda.downloadmanager; import android.support.annotation.Nullable; import com.novoda.notils.logger.simple.Log; final class WaitForDownloadServiceThenPerform { interface Action<T> { T performAction(); } private WaitForDownloadServiceThenPerform() { // Uses static factory method. } static <T> WaitForDownloadServiceThenPerformAction<T> waitFor(@Nullable DownloadService downloadService, Object downloadServiceLock) { return new WaitForDownloadServiceThenPerformAction<>(downloadService, downloadServiceLock); } static class WaitForDownloadServiceThenPerformAction<T> { private final DownloadService downloadService; private final Object downloadServiceLock; WaitForDownloadServiceThenPerformAction(DownloadService downloadService, Object downloadServiceLock) { this.downloadService = downloadService; this.downloadServiceLock = downloadServiceLock; } T thenPerform(final Action<T> action) { if (downloadService == null) { waitForLock(); } return action.performAction(); } private void waitForLock() { try { synchronized (downloadServiceLock) { if (downloadService == null) { downloadServiceLock.wait(); } } } catch (InterruptedException e) { Log.e(e, "Interrupted waiting for download service."); } } } }
package org.yakindu.sct.model.stext.scoping; import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.xtext.EcoreUtil2; import org.eclipse.xtext.naming.QualifiedName; import org.eclipse.xtext.scoping.impl.ImportNormalizer; import org.eclipse.xtext.scoping.impl.ImportedNamespaceAwareLocalScopeProvider; import org.yakindu.base.xtext.utils.jface.viewers.ContextElementAdapter; import org.yakindu.sct.model.sgraph.SGraphPackage; import org.yakindu.sct.model.sgraph.Scope; import org.yakindu.sct.model.sgraph.Statechart; import org.yakindu.sct.model.stext.stext.Import; import org.yakindu.sct.model.stext.stext.ImportScope; import org.yakindu.sct.model.stext.stext.StateSpecification; import org.yakindu.sct.model.stext.stext.TransitionSpecification; import com.google.common.collect.Lists; /** * * @author thomas kutz * */ public class StextImportAwareScopeProvider extends ImportedNamespaceAwareLocalScopeProvider { @Override protected List<ImportNormalizer> internalGetImportedNamespaceResolvers(final EObject context, boolean ignoreCase) { List<ImportNormalizer> importedNamespaceResolvers = Lists.newArrayList(); List<ImportScope> importScopes = new ArrayList<ImportScope>(); if (context instanceof StateSpecification || context instanceof TransitionSpecification) { importScopes = EcoreUtil2.getAllContentsOfType(getStatechart(context), ImportScope.class); } else if (context instanceof Statechart) { importScopes = EcoreUtil2.getAllContentsOfType(context, ImportScope.class); } for (ImportScope scope : importScopes) { importedNamespaceResolvers.addAll(createNamespaceResolver(scope, ignoreCase)); } return importedNamespaceResolvers; } @Override protected ImportNormalizer doCreateImportNormalizer(QualifiedName importedNamespace, boolean wildcard, boolean ignoreCase) { // all imports should be treated as if they had a wild card return new ImportNormalizer(importedNamespace, true, ignoreCase); } protected List<ImportNormalizer> createNamespaceResolver(Scope scope, boolean ignoreCase) { List<ImportNormalizer> importedNamespaceResolvers = Lists.newArrayList(); ImportScope importScope = (ImportScope) scope; for (Import child : importScope.getImports()) { String value = getImportedNamespace(child); ImportNormalizer resolver = createImportedNamespaceResolver(value, ignoreCase); if (resolver != null) importedNamespaceResolvers.add(resolver); } return importedNamespaceResolvers; } protected Statechart getStatechart(EObject context) { final ContextElementAdapter provider = (ContextElementAdapter) EcoreUtil.getExistingAdapter(context.eResource(), ContextElementAdapter.class); if (provider == null) { return EcoreUtil2.getContainerOfType(context, Statechart.class); } else { return (Statechart) EcoreUtil.getObjectByType(provider.getElement().eResource().getContents(), SGraphPackage.Literals.STATECHART); } } }
package dk.statsbiblioteket.newpaper.processmonitor.backend; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * Service class to expose retrieval of Batch (@see Batch) and Events (@see Event) objects for monitoring progress and state */ @Component @Path("/") public class Batches { @Autowired private DataSourceCombiner dataSource; /** * Retrieves a list of all known Batch objects (@see Batch). * @param details If true, will also include the available details for each event in the Batch objects. Defaults to false. * @return List<Batch> as JSON data. */ @GET @Path("/") @Produces(MediaType.APPLICATION_JSON) public List<Batch> getBatches(@QueryParam("details") @DefaultValue("false") boolean details) { return convertBatchList(dataSource.getAsOneDataSource().getBatches(details, null)); } private List<Batch> convertBatchList(List<dk.statsbiblioteket.newspaper.processmonitor.datasources.Batch> batches) { ArrayList<Batch> result = new ArrayList<Batch>(batches.size()); for (dk.statsbiblioteket.newspaper.processmonitor.datasources.Batch batch : batches) { result.add(convert(batch)); } return result; } /** * Retrieves a specific batch given it's ID. * @param batchID The ID of the specific batch * @param details If true, will also include the available details for each event in the Batch. Defaults to false. * @return Batch as JSON Object */ @GET @Path("{batchID}") @Produces(MediaType.APPLICATION_JSON) public Batch getSpecificBatch(@PathParam("batchID") String batchID, @QueryParam("details") @DefaultValue("false") boolean details) { return convert(dataSource.getAsOneDataSource().getBatch(batchID, details)); } private Batch convert(dk.statsbiblioteket.newspaper.processmonitor.datasources.Batch batch) { Batch result = new Batch(); result.setBatchID(batch.getBatchID()); result.setEvents(convert(batch.getEventList())); return result; } private Map<String, Event> convert(List<dk.statsbiblioteket.newspaper.processmonitor.datasources.Event> eventList) { Map<String, Event> result = new HashMap<String, Event>(eventList.size()); for (dk.statsbiblioteket.newspaper.processmonitor.datasources.Event event : eventList) { result.put(event.getEventID(), convert(event)); } return result; } /** * Retrieves a specific Event for a specific Batch. * @param batchID The ID of the specific batch * @param eventID The ID of the specific event * @param details If true, will also include the available details. Defaults to false. */ @GET @Path("{batchID}/{eventID}") @Produces(MediaType.APPLICATION_JSON) public Event getSpecificBatchEvent(@PathParam("batchID") String batchID, @PathParam("eventID") String eventID, @QueryParam("details") @DefaultValue("false") boolean details) { return convert(dataSource.getAsOneDataSource().getBatchEvent(batchID, eventID, details)); } private Event convert(dk.statsbiblioteket.newspaper.processmonitor.datasources.Event batchEvent) { Event result = new Event(); result.setDetails(batchEvent.getDetails()); result.setSuccess(batchEvent.isSucces()); return result; } }
package org.openmrs.projectbuendia.webservices.rest; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_BIND; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_CONCEPT_ID; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_CONSTRAINT; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_ID; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_MESSAGE; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_NODESET; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_OPENMRS_CONCEPT; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_TYPE; import static org.openmrs.module.xforms.XformBuilder.ATTRIBUTE_UUID; import static org.openmrs.module.xforms.XformBuilder.CONTROL_INPUT; import static org.openmrs.module.xforms.XformBuilder.CONTROL_REPEAT; import static org.openmrs.module.xforms.XformBuilder.CONTROL_SELECT; import static org.openmrs.module.xforms.XformBuilder.CONTROL_SELECT1; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_BASE64BINARY; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_BOOLEAN; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_DATE; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_DATETIME; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_DECIMAL; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_TEXT; import static org.openmrs.module.xforms.XformBuilder.DATA_TYPE_TIME; import static org.openmrs.module.xforms.XformBuilder.INSTANCE_ID; import static org.openmrs.module.xforms.XformBuilder.MODEL_ID; import static org.openmrs.module.xforms.XformBuilder.NAMESPACE_XFORMS; import static org.openmrs.module.xforms.XformBuilder.NAMESPACE_XML_INSTANCE; import static org.openmrs.module.xforms.XformBuilder.NAMESPACE_XML_SCHEMA; import static org.openmrs.module.xforms.XformBuilder.NODE_BIND; import static org.openmrs.module.xforms.XformBuilder.NODE_GROUP; import static org.openmrs.module.xforms.XformBuilder.NODE_HINT; import static org.openmrs.module.xforms.XformBuilder.NODE_INSTANCE; import static org.openmrs.module.xforms.XformBuilder.NODE_LABEL; import static org.openmrs.module.xforms.XformBuilder.NODE_MODEL; import static org.openmrs.module.xforms.XformBuilder.NODE_VALUE; import static org.openmrs.module.xforms.XformBuilder.NODE_XFORMS; import static org.openmrs.module.xforms.XformBuilder.PREFIX_XFORMS; import static org.openmrs.module.xforms.XformBuilder.PREFIX_XML_INSTANCES; import static org.openmrs.module.xforms.XformBuilder.PREFIX_XML_SCHEMA; import static org.openmrs.module.xforms.XformBuilder.PREFIX_XML_SCHEMA2; import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import java.util.Vector; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.kxml2.kdom.Document; import org.kxml2.kdom.Element; import org.kxml2.kdom.Node; import org.openmrs.Concept; import org.openmrs.ConceptAnswer; import org.openmrs.ConceptDatatype; import org.openmrs.ConceptName; import org.openmrs.ConceptNumeric; import org.openmrs.Field; import org.openmrs.Form; import org.openmrs.FormField; import org.openmrs.api.context.Context; import org.openmrs.hl7.HL7Constants; import org.openmrs.module.xforms.RelativeBuilder; import org.openmrs.module.xforms.XformBuilder; import org.openmrs.module.xforms.XformConstants; import org.openmrs.module.xforms.formentry.FormEntryWrapper; import org.openmrs.module.xforms.formentry.FormSchemaFragment; import org.openmrs.module.xforms.util.XformsUtil; import org.openmrs.util.FormConstants; import org.openmrs.util.FormUtil; /** * This is a clone of the Xforms module XformBuilderEx class, allowing us to tinker with the view * creation code separately from the module itself. */ public class BuendiaXformBuilder { // Note: not a Map because we need to call into Xforms module code which requires Hashtable :( private final Hashtable<String, Element> bindings = new Hashtable<>(); private final Map<FormField, Element> formFields = new HashMap<>(); private final Map<FormField, String> fieldTokens = new HashMap<>(); private final boolean useConceptIdAsHint; private Element bodyNode; private BuendiaXformBuilder() { useConceptIdAsHint = "true".equalsIgnoreCase(Context.getAdministrationService().getGlobalProperty("xforms.useConceptIdAsHint")); } /** * Builds an xform for an given an openmrs form. This is the only * public member in the class; it constructs an instance (to avoid * nasty statics) and then invokes private methods appropriately. */ public static String buildXform(Form form) throws Exception { return new BuendiaXformBuilder().buildXformImpl(form); } private String buildXformImpl(Form form) throws Exception { boolean includeRelationshipNodes = !"false".equals(Context.getAdministrationService() .getGlobalProperty(XformConstants.GLOBAL_PROP_KEY_INCLUDE_PATIENT_RELATIONSHIPS)); //String schemaXml = XformsUtil.getSchema(form); String templateXml = FormEntryWrapper.getFormTemplate(form); //Add relationship data node if (includeRelationshipNodes) { templateXml = templateXml.replace("</patient>", " <patient_relative>\n <patient_relative.person/>\n <patient_relative.relationship/>\n </patient_relative>\n </patient>"); } Document doc = new Document(); doc.setEncoding(XformConstants.DEFAULT_CHARACTER_ENCODING); Element xformsNode = appendElement(doc, NAMESPACE_XFORMS, NODE_XFORMS); xformsNode.setPrefix(PREFIX_XFORMS, NAMESPACE_XFORMS); xformsNode.setPrefix(PREFIX_XML_SCHEMA, NAMESPACE_XML_SCHEMA); xformsNode.setPrefix(PREFIX_XML_SCHEMA2, NAMESPACE_XML_SCHEMA); xformsNode.setPrefix(PREFIX_XML_INSTANCES, NAMESPACE_XML_INSTANCE); xformsNode.setPrefix("jr", "http://openrosa.org/javarosa"); Element modelNode = appendElement(xformsNode, NAMESPACE_XFORMS, NODE_MODEL); modelNode.setAttribute(null, ATTRIBUTE_ID, MODEL_ID); // All our UI nodes are appended directly into the xforms node. // Another alternative would be to create the HTML body node here, and append // everything under that. bodyNode = xformsNode; Element instanceNode = appendElement(modelNode, NAMESPACE_XFORMS, NODE_INSTANCE); instanceNode.setAttribute(null, ATTRIBUTE_ID, INSTANCE_ID); Element formNode = (Element) XformBuilder.getDocument(new StringReader(templateXml)).getRootElement(); formNode.setAttribute(null, ATTRIBUTE_UUID, form.getUuid()); instanceNode.addChild(Element.ELEMENT, formNode); // (Note for comparison with XformBuilderEx: schema doc code removed here, as it wasn't actually used.) //TODO This block should be replaced with using database field items instead of // parsing the template document. Hashtable<String, String> problemList = new Hashtable<String, String>(); Hashtable<String, String> problemListItems = new Hashtable<String, String>(); XformBuilder.parseTemplate(modelNode, formNode, formNode, bindings, bodyNode, problemList, problemListItems, 0); buildUInodes(form); //find all conceptId attributes in the document and replace their value with a mapped concept String prefSourceName = Context.getAdministrationService().getGlobalProperty( XformConstants.GLOBAL_PROP_KEY_PREFERRED_CONCEPT_SOURCE); //we only use the mappings if the global property is set if (StringUtils.isNotBlank(prefSourceName)) { for (int i = 0; i < formNode.getChildCount(); i++) { Element childElement = formNode.getElement(i); if (childElement != null) { for (int j = 0; j < childElement.getChildCount(); j++) { if (childElement.getElement(j) != null) { Element grandChildElement = childElement.getElement(j); String value = grandChildElement.getAttributeValue(null, ATTRIBUTE_OPENMRS_CONCEPT); if (StringUtils.isNotBlank(value)) XformBuilder.addConceptMapAttributes(grandChildElement, value); } } } } } if (includeRelationshipNodes) { RelativeBuilder.build(modelNode, bodyNode, formNode); } return XformBuilder.fromDoc2String(doc); } private void buildUInodes(Form form) { Locale locale = Context.getLocale(); TreeMap<Integer, TreeSet<FormField>> formStructure = FormUtil.getFormStructure(form); buildUInodes(form, formStructure, 0, locale); } private void buildUInodes(Form form, TreeMap<Integer, TreeSet<FormField>> formStructure, Integer sectionId, Locale locale) { if (!formStructure.containsKey(sectionId)) return; TreeSet<FormField> section = formStructure.get(sectionId); if (section == null || section.size() < 1) return; // Note: FormUtil.getTagList needs a Vector<String>. Urgh. Vector<String> tagList = new Vector<>(); for(FormField formField : section) { Integer subSectionId = formField.getFormFieldId(); String sectionName = FormUtil.getXmlToken(formField.getField().getName()); String name = FormUtil.getNewTag(sectionName, tagList); if(formField.getParent() != null && fieldTokens.values().contains(name)){ String parentName = fieldTokens.get(formField.getParent()); String token = parentName + "_" + name; if(!bindings.containsKey(token)) { token = FormUtil.getNewTag(FormUtil.getXmlToken(formField.getParent().getField().getName()), new Vector<String>()); token = token + "_" + name; } name = token; } fieldTokens.put(formField, name); Field field = formField.getField(); boolean required = formField.isRequired(); if (field.getFieldType().getFieldTypeId().equals(FormConstants.FIELD_TYPE_CONCEPT)) { Concept concept = field.getConcept(); ConceptDatatype datatype = concept.getDatatype(); // TODO(jonskeet): Don't rely on names here? (Do we even need problem lists?) if ( (name.contains("problem_added") || name.contains("problem_resolved")) && formField.getParent() != null && (formField.getParent().getField().getName().contains("PROBLEM LIST")) ){ addProblemList(name, concept, required, locale, formField); } else { switch (datatype.getHl7Abbreviation()) { case HL7Constants.HL7_BOOLEAN: addUiNode(name, concept, DATA_TYPE_BOOLEAN, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_DATE: addUiNode(name, concept, DATA_TYPE_DATE, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_DATETIME: addUiNode(name, concept, DATA_TYPE_DATETIME, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_TIME: addUiNode(name, concept, DATA_TYPE_TIME, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_TEXT: addUiNode(name, concept, DATA_TYPE_TEXT, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_NUMERIC: ConceptNumeric conceptNumeric = Context.getConceptService().getConceptNumeric(concept.getConceptId()); addUiNode(name, conceptNumeric, DATA_TYPE_DECIMAL, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; case HL7Constants.HL7_CODED: case HL7Constants.HL7_CODED_WITH_EXCEPTIONS: addCodedField(name, formField, field, required, concept, locale); break; case "ED": // This isn't in HL7Constants as far as I can tell. addUiNode(name, concept, DATA_TYPE_BASE64BINARY, CONTROL_INPUT, locale, getParentNode(formField, locale)); break; } } } if (formStructure.containsKey(subSectionId)) { buildUInodes(form, formStructure, subSectionId, locale); } } } private Element addUiNode(String token, Concept concept, String dataType, String controlName, Locale locale, Element bodyNode) { String bindName = token; Element controlNode = appendElement(bodyNode, NAMESPACE_XFORMS, controlName); controlNode.setAttribute(null, ATTRIBUTE_BIND, bindName); Element bindNode = bindings.get(bindName); if (bindNode == null) { throw new IllegalArgumentException("No bind node for bindName " + bindName); } bindNode.setAttribute(null, ATTRIBUTE_TYPE, dataType); //create the label Element labelNode = appendElement(controlNode, NAMESPACE_XFORMS, NODE_LABEL); ConceptName name = concept.getName(locale, false); if (name == null) { name = concept.getName(); } labelNode.addChild(Element.TEXT, name.getName()); addHintNode(labelNode, concept); if(concept instanceof ConceptNumeric) { ConceptNumeric numericConcept = (ConceptNumeric)concept; if(numericConcept.isPrecise()){ Double minInclusive = numericConcept.getLowAbsolute(); Double maxInclusive = numericConcept.getHiAbsolute(); if(!(minInclusive == null && maxInclusive == null)){ String lower = (minInclusive == null ? "" : FormSchemaFragment.numericToString(minInclusive, numericConcept.isPrecise())); String upper = (maxInclusive == null ? "" : FormSchemaFragment.numericToString(maxInclusive, numericConcept.isPrecise())); bindNode.setAttribute(null, ATTRIBUTE_CONSTRAINT, ". >= " + lower + " and . <= " + upper); bindNode.setAttribute(null, (XformsUtil.isJavaRosaSaveFormat() ? "jr:constraintMsg" : ATTRIBUTE_MESSAGE), "value should be between " + lower + " and " + upper + " inclusive"); } } } return controlNode; } private void addCodedUiNodes(boolean multiplSel, Element controlNode, Collection<ConceptAnswer> answerList, Concept concept, String dataType, String controlName, Locale locale){ for (ConceptAnswer answer : answerList) { String conceptName = answer.getAnswerConcept().getName(locale).getName(); String conceptValue; if (answer.getAnswerConcept().getConceptClass().getConceptClassId().equals(HL7Constants.CLASS_DRUG) && answer.getAnswerDrug() != null) { conceptName = answer.getAnswerDrug().getName(); if(multiplSel) conceptValue = FormUtil.getXmlToken(conceptName); else { conceptValue = StringEscapeUtils.escapeXml(FormUtil.conceptToString(answer.getAnswerConcept(), locale)) + "^" + FormUtil.drugToString(answer.getAnswerDrug()); } } else { if(multiplSel) conceptValue = FormUtil.getXmlToken(conceptName); else conceptValue = StringEscapeUtils.escapeXml(FormUtil.conceptToString(answer.getAnswerConcept(), locale)); } Element itemNode = appendElement(controlNode, NAMESPACE_XFORMS, XformBuilder.NODE_ITEM); itemNode.setAttribute(null, ATTRIBUTE_CONCEPT_ID, concept.getConceptId().toString()); Element itemLabelNode = appendElement(itemNode, NAMESPACE_XFORMS, NODE_LABEL); itemLabelNode.addChild(Element.TEXT, conceptName); //TODO This will make sense after the form designer's OptionDef implements //the xforms hint. //addHintNode(itemLabelNode, answer.getAnswerConcept()); Element itemValNode = appendElement(itemNode, NAMESPACE_XFORMS, NODE_VALUE); itemValNode.addChild(Element.TEXT, conceptValue); } } private void addProblemList(String token, Concept concept, boolean required, Locale locale, FormField formField) { Element groupNode = appendElement(bodyNode, NAMESPACE_XFORMS, NODE_GROUP); Element labelNode = appendElement(groupNode, NAMESPACE_XFORMS, NODE_LABEL); labelNode.addChild(Element.TEXT, formField.getField().getConcept().getName(locale, false).getName()); addHintNode(labelNode, concept); Element repeatControl = appendElement(groupNode, NAMESPACE_XFORMS, CONTROL_REPEAT); repeatControl.setAttribute(null, ATTRIBUTE_BIND, token); //add the input node. Element controlNode = appendElement(repeatControl, NAMESPACE_XFORMS, CONTROL_INPUT); String nodeset = "problem_list/" + token + "/value"; String id = nodeset.replace('/', '_'); controlNode.setAttribute(null, ATTRIBUTE_BIND, id); //add the label. labelNode = appendElement(controlNode, NAMESPACE_XFORMS, NODE_LABEL); labelNode.addChild(Element.TEXT, token + " value"); addHintNode(labelNode, concept); //create bind node Element bindNode = appendElement(bindings.get(token).getParent(), NAMESPACE_XFORMS, NODE_BIND); bindNode.setAttribute(null, ATTRIBUTE_ID, id); bindNode.setAttribute(null, ATTRIBUTE_NODESET, "/form/" + nodeset); bindNode.setAttribute(null, ATTRIBUTE_TYPE, DATA_TYPE_TEXT); } /** * Returns the element representing the UI control for the parent of the given * form field - i.e. the element under which the UI control for the form field * itself should be added. */ private Element getParentNode(FormField formField, Locale locale){ formField = formField.getParent(); if (formField == null || formField.getParent() == null) { return bodyNode; // is this problem list? } Element node = formFields.get(formField); if (node != null) { return node; } String token = fieldTokens.get(formField); Element groupNode = appendElement(bodyNode, NAMESPACE_XFORMS, NODE_GROUP); Element labelNode = appendElement(groupNode, NAMESPACE_XFORMS, NODE_LABEL); labelNode.addChild(Element.TEXT, formField.getField().getConcept().getName(locale, false).getName()); addHintNode(labelNode, formField.getField().getConcept()); if (formField.getMaxOccurs() != null && formField.getMaxOccurs() == -1) { Element repeatControl = appendElement(groupNode, NAMESPACE_XFORMS, CONTROL_REPEAT); repeatControl.setAttribute(null, ATTRIBUTE_BIND, token); formFields.put(formField, repeatControl); return repeatControl; } else { groupNode.setAttribute(null, ATTRIBUTE_ID, token); formFields.put(formField, groupNode); return groupNode; } } private void addCodedField(String name, FormField formField, Field field, boolean required, Concept concept, Locale locale) { if (formField.getMaxOccurs() != null && formField.getMaxOccurs().intValue() == -1) { addProblemList(name, concept, required, locale, formField); } else { //Collection<ConceptAnswer> answers = concept.getAnswers(false); List<ConceptAnswer> answers = new ArrayList<ConceptAnswer>(concept.getAnswers(false)); Collections.sort(answers); String controlName = field.getSelectMultiple() ? CONTROL_SELECT : CONTROL_SELECT1; Element controlNode = addUiNode(name, concept, DATA_TYPE_TEXT, controlName, locale, getParentNode(formField, locale)); if (controlNode != null) { addCodedUiNodes(true, controlNode, answers, concept, DATA_TYPE_TEXT, CONTROL_SELECT, locale); } } } private void addHintNode(Element labelNode, Concept concept) { String hint = null; if(concept.getDescription() != null) { hint = concept.getDescription().getDescription(); } if(useConceptIdAsHint) { hint = (hint != null ? hint + " [" + concept.getConceptId() + "]" : concept.getConceptId().toString()); } if(hint != null) { Element hintNode = appendElement(labelNode.getParent(), NAMESPACE_XFORMS, NODE_HINT); hintNode.addChild(Element.TEXT, hint); } } private static Element appendElement(Node parent, String namespaceURI, String localName) { Element child = parent.createElement(namespaceURI, localName); parent.addChild(Element.ELEMENT, child); return child; } }
package org.rzo.yajsw.os.ms.win.w32; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import jnacontrib.win32.Registry; import jnacontrib.win32.Registry.REGISTRY_ROOT_KEY; import org.apache.commons.configuration.BaseConfiguration; import org.apache.commons.configuration.Configuration; import org.jboss.netty.logging.InternalLogger; import org.rzo.yajsw.boot.WrapperLoader; import org.rzo.yajsw.os.JavaHome; import com.sun.jna.Platform; // TODO: Auto-generated Javadoc /** * The Class JavaHome. */ public class WindowsJavaHome implements JavaHome { /** The _config. */ Configuration _config; InternalLogger _logger; public void setLogger(InternalLogger logger) { _logger = logger; } /** * Instantiates a new java home. * * @param config * the config */ WindowsJavaHome(Configuration config) { _config = config; } /** * Find java. * * @return the string */ public String findJava(String wrapperJava, String customProcName) { String java = null; // String wrapperJava = _config.getString("wrapper.java.command"); if (wrapperJava != null && !wrapperJava.endsWith(".exe") && !wrapperJava.endsWith("}")) wrapperJava = wrapperJava + ".exe"; try { // check if this is relative to wrapper home if (wrapperJava != null) { File f = new File(wrapperJava); if (!f.isAbsolute()) { File f2 = new File(WrapperLoader.getWrapperHome(), wrapperJava); if (f2.exists()) try { wrapperJava = f2.getCanonicalPath(); } catch (IOException e) { _logger.warn("Exception in findJava()", e); } } else { wrapperJava = new File(wrapperJava).getCanonicalPath(); if (!new File(wrapperJava).exists()) { _logger.error("java file does not exist: " + wrapperJava); return null; } } } } catch (Exception e) { _logger.warn("Error in JavaHome.findJava(): ", e); } java = wrapperJava; // String customProcName = // _config.getString("wrapper.java.customProcName"); boolean useJavaw = _config.getBoolean("wrapper.java.command.javaw", (wrapperJava != null) && (wrapperJava.endsWith("javaw.exe"))); if (java == null) { String minVersion = _config.getString("wrapper.java.command.minVersion"); String maxVersion = _config.getString("wrapper.java.command.maxVersion"); boolean b64bit = _config.getBoolean("wrapper.java.command.64bit", false); boolean jreOnly = _config.getBoolean("wrapper.java.command.jreOnly", false); boolean preferJdk = _config.getBoolean("wrapper.java.command.preferJdk", false); boolean preferJre = _config.getBoolean("wrapper.java.command.preferJre", true && !preferJdk); boolean jdkOnly = _config.getBoolean("wrapper.java.command.jdkOnly", false); String registryBase = Platform.is64Bit() && (!b64bit) ? "SOFTWARE\\Wow6432Node" : "SOFTWARE"; if (!jdkOnly && (jreOnly || preferJre)) { java = findJavaInRegistry(new String[] { registryBase+"\\JavaSoft\\Java Runtime Environment", registryBase+"\\IBM\\Java2 Runtime Environment" }, minVersion, maxVersion, b64bit); } if (java == null && !jreOnly) java = findJavaInRegistry(new String[] { registryBase+"\\JavaSoft\\Java Development Kit", registryBase+"\\IBM\\Java Development Kit" }, minVersion, maxVersion, b64bit); } else if (customProcName != null) { String h = java; File f = new File(h); if (f.exists()) java = f.getParentFile().getParentFile().getAbsolutePath(); else // if we have to copy java, we need to find the path to java java = findJavaFromPath(java); } else // user has given us wrapper.java and we need not rename the exe -> // return user input return java; // we have no java path -> abort if (java == null) return null; // java is now set to a path, it must exist File javaFile = new File(java); if (!javaFile.exists()) return null; java = javaFile.getAbsolutePath() + "/bin"; if (!useJavaw) { java += "/java.exe"; } else java += "/javaw.exe"; if (customProcName != null) { java = copyTotmp(java, customProcName); } if (_logger == null) System.out.println("using java: " + java); else _logger.info("using java: " + java); return java; } /** * Find java from path. * * @param java * the java * * @return the string */ private String findJavaFromPath(String java) { if (java != null) { File javaFile = new File(java); if (javaFile.exists()) return java; } // search java in environment path String[] paths = System.getenv("path").split(";"); for (String path : paths) { if (path.contains("jdk") || path.contains("jre")) { File javaFile = new File(path); if (javaFile.exists()) { return path.substring(0, path.length() - 4); } } } return null; } /** * Copy totmp. * * @param java * the java * @param customProcName * the custom proc name * * @return the string */ private String copyTotmp(String java, String customProcName) { try { boolean isTmp = true; File javaFile = new File(java); File tmpJavaFile = null; try { File fc = new File(customProcName); if (fc.isAbsolute() && fc.getParentFile().exists()) { tmpJavaFile = fc; isTmp = false; } } catch (Exception ex) { // ignore } // Create temp file. customProcName = customProcName.endsWith(".exe") ? customProcName.substring(0, customProcName.length() - 4) : customProcName; String exeName = "java_" + customProcName + "_"; if (tmpJavaFile == null) try { tmpJavaFile = File.createTempFile(exeName, ".exe"); copyFile(javaFile, tmpJavaFile); } catch (Exception ex) { _logger.error("error creating tmp file: " + exeName, ex); } // Delete temp file when program exits. if (!tmpJavaFile.exists()) copyFile(javaFile, tmpJavaFile); if (isTmp) tmpJavaFile.deleteOnExit(); return tmpJavaFile.getAbsolutePath(); } catch (Throwable e) { _logger.error("error copying java: " + java + " -> " + customProcName, e); } return null; } /** * Copy file. * * @param in * the in * @param out * the out * * @throws IOException * Signals that an I/O exception has occurred. */ void copyFile(File in, File out) throws IOException { _logger.info("copying java: " + in.getAbsolutePath() + " -> " + out.getAbsolutePath()); FileChannel inChannel = new FileInputStream(in).getChannel(); FileChannel outChannel = new FileOutputStream(out).getChannel(); try { inChannel.transferTo(0, inChannel.size(), outChannel); } catch (IOException e) { throw e; } finally { if (inChannel != null) inChannel.close(); if (outChannel != null) outChannel.close(); } } /** * Find java from java home env. * * @return the string */ private String findJavaFromJavaHomeEnv() { return System.getenv("JAVA_HOME"); } /** * Find java in registry. * * @param keys * the keys * @param minVersion * the min version * @param maxVersion * the max version * @param b64bit * the b64bit * * @return the string */ private String findJavaInRegistry(String[] keys, String minVersion, String maxVersion, boolean b64bit) { String[] values = null; String result = null; String resultKey = null; String resultDir = null; minVersion = minVersion == null ? "1.1.0" : minVersion; maxVersion = maxVersion == null ? "99.99.99" : maxVersion; for (String key : keys) { try { values = Registry.getSubKeys(REGISTRY_ROOT_KEY.LOCAL_MACHINE, key); for (String value : values) { String dir = Registry.getStringValue(REGISTRY_ROOT_KEY.LOCAL_MACHINE, key + "\\" + value, "JavaHome"); boolean exists = false; try { exists = dir != null && new File(dir).exists(); } catch (Exception ex) { System.out.println("wrong registry key value: " + dir); } if (exists && value.compareTo(maxVersion) <= 0 && value.compareTo(minVersion) >= 0) { if (result == null) { result = value; resultKey = key; resultDir = dir; } else if (value.compareTo(result) >= 0) { result = value; resultKey = key; resultDir = dir; } } } } catch (Exception e) { e.printStackTrace(); } } return resultDir; } /** * The main method. * * @param args * the arguments */ public static void main(String[] args) { Configuration conf = new BaseConfiguration(); conf.setProperty("wrapper.java.command", "java"); WindowsJavaHome javaHome = new WindowsJavaHome(conf); System.out.println(javaHome.findJava(conf.getString("wrapper.java.command"), conf.getString("wrapper.java.command"))); conf.setProperty("wrapper.java.customProcName", "test"); javaHome = new WindowsJavaHome(conf); System.out.println(javaHome.findJava(conf.getString("wrapper.java.command"), conf.getString("wrapper.java.command"))); conf.setProperty("wrapper.java.command", "javaw"); javaHome = new WindowsJavaHome(conf); System.out.println(javaHome.findJava(conf.getString("wrapper.java.command"), conf.getString("wrapper.java.command"))); conf.clear(); conf.setProperty("wrapper.java.minversion", "1.5.0"); conf.setProperty("wrapper.java.maxversion", "1.5.99"); conf.setProperty("wrapper.java.customProcName", "test"); javaHome = new WindowsJavaHome(conf); System.out.println(javaHome.findJava(conf.getString("wrapper.java.command"), conf.getString("wrapper.java.command"))); conf.clear(); conf.setProperty("wrapper.java.minversion", "1.6.0"); conf.setProperty("wrapper.java.customProcName", "test"); conf.setProperty("wrapper.java.preferJdk", true); javaHome = new WindowsJavaHome(conf); System.out.println(javaHome.findJava(conf.getString("wrapper.java.command"), conf.getString("wrapper.java.command"))); } }
package io.logspace.agent.scheduling; import io.logspace.agent.api.order.AgentOrder; /** * The agent executor is responsible for the time-based execution of its methods. */ public interface AgentExecutor { /** * Time-based execution of the agent with its order. * * @param agentOrder - The {@link AgentOrder} of the {@link io.logspace.agent.api.Agent Agent} to execute. */ void executeScheduledAgent(AgentOrder agentOrder); /** * Time-based execution of updates. */ void update(); }
package org.ops4j.pax.web.extender.samples.war.dispatch.jsp; import java.io.IOException; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; public class ExampleServlet extends HttpServlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { RequestDispatcher rd = super.getServletContext().getNamedDispatcher( "jsp"); rd.forward(new HttpServletRequestFilter(request, "/subjsp/test.jsp"), response); } private static class HttpServletRequestFilter extends HttpServletRequestWrapper { private String pathInfo; public HttpServletRequestFilter(HttpServletRequest request, String pathInfo) { super(request); this.pathInfo = pathInfo; } public String getServletPath() { return "/"; } public String getPathInfo() { return pathInfo; } } }
package org.eclipse.che.selenium.pageobject.dashboard; import static java.lang.String.format; import static org.eclipse.che.selenium.core.constant.TestTimeoutsConstants.EXPECTED_MESS_IN_CONSOLE_SEC; import static org.eclipse.che.selenium.core.constant.TestTimeoutsConstants.LOAD_PAGE_TIMEOUT_SEC; import static org.eclipse.che.selenium.core.constant.TestTimeoutsConstants.REDRAW_UI_ELEMENTS_TIMEOUT_SEC; import com.google.inject.Inject; import com.google.inject.Singleton; import java.util.List; import org.eclipse.che.selenium.core.SeleniumWebDriver; import org.eclipse.che.selenium.core.constant.TestTimeoutsConstants; import org.eclipse.che.selenium.core.utils.WaitUtils; import org.eclipse.che.selenium.pageobject.Loader; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; /** * @author Alex * @author Andrey Chizhikov */ @Singleton public class DashboardWorkspace { private final SeleniumWebDriver seleniumWebDriver; private final Loader loader; private final Dashboard dashboard; private final DashboardProject dashboardProject; public interface TabNames { String OVERVIEW = "Overview"; String PROJECTS = "Projects"; String MACHINES = "Machines"; String SERVERS = "Servers"; String AGENTS = "Agents"; String ENV_VARIABLES = "Env Variables"; String CONFIG = "Config"; String SSH = "Ssh"; String SHARE = "Share"; String STACK_LIBRARY = "Stack library"; String STACK_IMPORT = "Stack import"; String STACK_AUTHORING = "Stack authoring"; } // names of ready-to-go stacks public interface ReadyGoToStacks { String JAVA = "java-default"; String NODE = "node-default"; String JAVA_MYSQL = "java-mysql"; } public interface RecipeTypeBtn { String COMPOSE = "compose"; String DOCKERFILE = "dockerfile"; String MACHINES_WARNING_MSG = "The environment should contain exactly one dev machine. " + "Switch on Dev property to have terminal, SSH and IDE tooling injected to the machine."; } public enum StackLibrary { BITNAMI_CODEIGNITER("Bitnami Codeigniter"), BITNAMI_SYMFONY("Bitnami Symfony"), BITNAMI_PLAY_FOR_JAVA("Bitnami Play for Java"), BITNAMI_RAILS("Bitnami Rails"), BITNAMI_EXPRESS("Bitnami Express"), BITNAMI_LARAVEL("Bitnami Laravel"), BITNAMI_SWIFT("Bitnami Swift"); private String name; StackLibrary(String name) { this.name = name; } public String getName() { return name; } } private interface Locators { String NEW_WORKSPACE_BTN = "//a[contains(@ng-href, 'create-workspace')]/span[text()='Add Workspace']"; String TOOLBAR_TITLE_NAME = "//div[contains(@class,'che-toolbar')]//span[contains(text(),'%s')]"; String WORKSPACES_LIST = "//ng-transclude[@class='che-list-content']"; String WORKSPACE_ITEM_NAME = "//div[contains(@class, 'workspace-name-clip')]//div[contains(@data-str, '%s')]"; String EXPORT_WORKSPACE_BTN = "//button[contains(@class, 'che-button')]/span[text()='Export as a file']"; String DOWNLOAD_WORKSPACE_BTN = "//che-button-default[@che-button-title='download']"; String CLIPBOARD_JSON_WS_BTN = "//che-button-default[@che-button-title='clipboard']"; String HIDE_JSON_WS_BTN = "//span[text()='Close']"; String WORKSPACE_JSON_CONTENT = "//div[@class='CodeMirror-code']"; String IMPORT_WORKSPACE_BTN = "//md-radio-button[@aria-label='Import an existing workspace configuration']"; String RECIPE_TYPE_BUTTON = "//button/span[text()='%s']"; String RECIPE_URL_FIELD = "//input[@placeholder='URL of the Recipe']"; String OPEN_IN_IDE_WS_BTN = "//che-button-default[@che-button-title='Open']"; String STACK_NAME_XPATH = "//md-card[contains(@ng-class,'%s')]"; String NAME_WORKSPACE_INPUT = "//input[@placeholder='Name of the workspace *']"; String RUN_WORKSPACE_BTN = "//button/span[text()='Run']"; String STOP_WORKSPACE_BTN = "//button/span[contains(text(),'Stop')]"; String DELETE_WORKSPACE_BTN = "//button/span[text()='Delete']"; String STATE_WORKSPACE = "//div[contains(@class, 'workspace-status')]/span[text()='%s']"; String WORKSPACE_TITLE = "//div[contains(@class,'toolbar-info')]/span[text()='%s']"; String DELETE_BTN_DIALOG_WIND = "//button[@ng-click='cheConfirmDialogController.hide()']//span[text()='Delete']"; String CREATE_WS_FROM_STACK = "//md-radio-button[@aria-label='Create new workspace from stack']"; String STACK_LIBRARY_ITEM = "//div[text()='%s']"; String RESENT_WS_NAVBAR = "//div[@class='admin-navbar-menu recent-workspaces']"; String LEFT_SIDE_BAR = "//div[@class='left-sidebar-container']"; String ADD_DEVLOPER_BTN = "//span[text()='Add Developer']"; String REMOVE_DEVELOPER_ICON = "//span[text()='%s']//following::div[@tooltip='Remove member'][1]"; String INPUT_SHARE_DIALOG = "//md-chips[contains(@class,'share-user-input')]//input"; String SHARE_BTN_DIALOG = "//che-button-primary[@aria-disabled='false']//span[text()='Share']"; String DEVELOPER_SHARE_ITEM = "//span[text()='%s']"; String RUNTIME_TAB = "//span[text()='Runtime']"; String DEV_MACHINE = "//div[@class='workspace-machine-config']//div[text()='dev-machine']"; String WARNING_MSG = "//div[@class='workspace-environments-input']//div[text()='%s']"; String CONFIG_MACHINE_SWITCH = "//div[text()='%s']//following::div[@class='config-dev-machine-switch ng-scope'][1]"; String TAB_NAMES_IN_WS = "//md-pagination-wrapper//span[text()='%s']"; String RECIPE_EDITOR = "//div[contains(@class, 'recipe-editor')]//div[@class='CodeMirror-code']"; String WARNING_DIALOG_DELETE = "//div[@class='ng-binding' and text()=\"%s\"]"; String WS_TIMEOUT_MESSAGE = "//div[@che-label-name='Idle timeout']//div[@ng-transclude]"; String SAVE_BUTTON = "//che-button-save-flat//span[text()='Save']"; String CANCEL_BUTTON = "//che-button-cancel-flat//span[text()='Cancel']"; String DELETE_BUTTON = "//che-button-primary[@che-button-title='Delete']/button"; String ADD_MACHINE_BUTTON = "//che-button-primary[@che-button-title = 'Add machine']/button"; String RAM_WORKSPACE = "//input[contains(@name, 'memory')]"; String MACHINE_NAME = "//span[@machine-name='%s']"; String MACHINE_IMAGE = "//span[@machine-image='%s']"; String EDIT_MACHINE = "//div[@edit-machine='%s']"; String DELETE_MACHINE = "//div[@delete-machine='%s']"; String NEW_MACHINE_NAME = "//div[@che-form='editMachineForm']//input"; String EDIT_MACHINE_NAME_BUTTON = "//che-button-primary[@che-button-title='Edit']/button"; String EDIT_MACHINE_DIALOG_NAME = "//md-dialog/che-popup[@title='Edit the machine']"; String REMOVE_MACHINE_DIALOG_NAME = "//md-dialog/che-popup[@title='Remove machine']"; String ADD_MACHINE_DIALOG_NAME = "//md-dialog/che-popup[@title='Add a new machine']"; String CANCEL_DIALOG_BUTTON = "//md-dialog[@role='dialog']//button/span[text()='Cancel']"; String CLOSE_DIALOG_BUTTON = "//md-dialog[@role='dialog']//button/span[text()='Close']"; String DELETE_DIALOG_BUTTON = "//md-dialog[@role='dialog']//button/span[text()='Delete']"; String UPDATE_DIALOG_BUTTON = "//md-dialog[@role='dialog']//button/span[text()='Update']"; String ADD_DIALOG_BUTTON = "//md-dialog[@role='dialog']//button/span[text()='Add']"; String MACHINE_BUTTON = "//che-machine-selector[@content-title='%s']//toggle-single-button[@id = 'workspace-machine-%s']//span"; String ENV_VARIABLE = "env-variable-name-%s"; String ADD_ENV_VARIABLE_BUTTON = "//che-button-primary[@che-button-title='Add Env Variable']/button"; String ADD_ENV_VARIABLE_DIALOG_NAME = "//md-dialog/che-popup[@title='Add a new environment variable']"; String VARIABLE_CHECKBOX = "//md-checkbox[@aria-label='Environment-Variable-%s']"; String VARIABLE_NAME = "//span[@variable-name='%s']"; String VARIABLE_VALUE = "//div[@id='env-variable-name-%s']//span[@variable-value='%s']"; String NEW_ENV_VARIABLE_NAME = "variable-name-input"; String NEW_ENV_VARIABLE_VALUE = "//textarea[@name='deskvalue']"; String EDIT_ENV_VARIABLE = "//div[@edit-variable='%s']"; String DELETE_ENV_VARIABLE = "//div[@delete-variable='%s']"; String SERVER = "server-name-%s"; String SERVER_REFERENCE = "//span[@server-reference='%s']"; String SERVER_PORT = "//div[@id='server-name-%s']//span[@server-port='%s']"; String SERVER_PROTOCOL = "//div[@id='server-name-%s']//span[@server-protocol='%s']"; String EDIT_SERVER_BUTTON = "//div[@edit-server='%s']"; String DELETE_SERVER_BUTTON = "//div[@delete-server='%s']"; String ADD_SERVER_BUTTON = "//che-button-primary[@che-button-title='Add Server']/button"; String ADD_NEW_SERVER_DIALOG_NAME = "//md-dialog/che-popup[@title='Add a new server']"; String ADD_SERVER_REFERENCE_FIELD = "server-reference-input"; String ADD_SERVER_PORT_FIELD = "server-port-input"; String ADD_SERVER_PROTOCOL_FIELD = "server-protocol-input"; String AGENT_NAME = "//span[@agent-name='%s']"; String AGENT_DESCRIPTION = "//span[@agent-description='%s']"; String AGENT_STATE = "//md-switch[@agent-switch='%s']"; String ADD_NEW_PROJECT_BUTTON = "//che-button-primary[@che-button-title='Add Project']/button"; String ADD_PROJECT_BUTTON = "//che-button-primary[@name='addButton']/button"; String SAMPLE_CHECKBOX_XPATH = "//md-checkbox[@aria-label='Sample %s']"; } public enum StateWorkspace { STOPPED("stopped"), STARTING("starting"), RUNNING("running"), STOPPING("stopping"); private final String status; StateWorkspace(String status) { this.status = status; } public String getStatus() { return status; } } @Inject public DashboardWorkspace( SeleniumWebDriver seleniumWebDriver, Loader loader, Dashboard dashboard, DashboardProject dashboardProject) { this.seleniumWebDriver = seleniumWebDriver; this.loader = loader; this.dashboard = dashboard; this.dashboardProject = dashboardProject; PageFactory.initElements(seleniumWebDriver, this); } @FindBy(xpath = Locators.NEW_WORKSPACE_BTN) WebElement newWorkspaceBtn; @FindBy(xpath = Locators.WORKSPACES_LIST) WebElement listOfWorkspaces; @FindBy(xpath = Locators.EXPORT_WORKSPACE_BTN) WebElement exportWsButton; @FindBy(xpath = Locators.DOWNLOAD_WORKSPACE_BTN) WebElement downloadWsJsonBtn; @FindBy(xpath = Locators.CLIPBOARD_JSON_WS_BTN) WebElement clipboardWsJsonBtn; @FindBy(xpath = Locators.HIDE_JSON_WS_BTN) WebElement hideJsonWsBtn; @FindBy(xpath = Locators.IMPORT_WORKSPACE_BTN) WebElement importWsRadioBtn; @FindBy(xpath = Locators.WORKSPACE_JSON_CONTENT) WebElement workspaceJsonContent; @FindBy(xpath = Locators.RECIPE_URL_FIELD) WebElement recipeUrlField; @FindBy(xpath = Locators.OPEN_IN_IDE_WS_BTN) WebElement openInIdeWsBtn; @FindBy(xpath = Locators.NAME_WORKSPACE_INPUT) WebElement nameWorkspaceInput; @FindBy(xpath = Locators.RUN_WORKSPACE_BTN) WebElement runWorkspaceBtn; @FindBy(xpath = Locators.STOP_WORKSPACE_BTN) WebElement stopWorkspaceBtn; @FindBy(xpath = Locators.DELETE_WORKSPACE_BTN) WebElement deleteWorkspaceBtn; @FindBy(xpath = Locators.DELETE_BTN_DIALOG_WIND) WebElement deleteItBtn; @FindBy(xpath = Locators.RAM_WORKSPACE) WebElement ramWorkspace; @FindBy(xpath = Locators.SAVE_BUTTON) WebElement saveBtn; @FindBy(xpath = Locators.DELETE_BUTTON) WebElement deleteBtn; @FindBy(xpath = Locators.CANCEL_BUTTON) WebElement cancelBtn; @FindBy(xpath = Locators.WS_TIMEOUT_MESSAGE) WebElement wsTimeotMessage; public void waitToolbarTitleName(String titleName) { new WebDriverWait(seleniumWebDriver, TestTimeoutsConstants.LOADER_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.TOOLBAR_TITLE_NAME, titleName)))); } public void clickOnNewWorkspaceBtn() { dashboard.waitNotificationIsClosed(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(newWorkspaceBtn)) .click(); } public void waitListWorkspacesOnDashboard() { new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(listOfWorkspaces)); } public void waitExpTextFromListWsOnDashboard(String expText) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until((WebDriver driver) -> getTextFromListWorkspaces().contains(expText)); } public String getTextFromListWorkspaces() { return listOfWorkspaces.getText(); } public void selectWorkspaceItemName(String wsName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.WORKSPACE_ITEM_NAME, wsName)))) .click(); } public void clickExportWorkspaceBtn() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(exportWsButton)) .click(); } public void waitDownloadWorkspaceJsonFileBtn() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(downloadWsJsonBtn)); } public void waitClipboardWorkspaceJsonFileBtn() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(clipboardWsJsonBtn)); } public void clickOnHideWorkspaceJsonFileBtn() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(hideJsonWsBtn)) .click(); } public void clickIntoWorkspaceJsonContent() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(workspaceJsonContent)) .click(); } public void clickOnImportWorkspaceBtn() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(importWsRadioBtn)) .click(); } public void selectRecipeType(String recipeType) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.RECIPE_TYPE_BUTTON, recipeType)))) .click(); } /** set the focus into 'recipe-editor' form in the 'Stack authoring' */ public void clickIntoWorkspaceRecipeEditor() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.RECIPE_EDITOR))) .click(); } /** * wait the warning message when there is two or more machines * * @param mess is the message into workspace machine config */ public void waitWarningMessage(String mess) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(String.format(Locators.WARNING_MSG, mess)))); } /** * switch the config machine * * @param nameMachine is the machine name */ public void switchConfigMachine(String nameMachine) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(String.format(Locators.CONFIG_MACHINE_SWITCH, nameMachine)))) .click(); } public void waitCustomRecipeUrlField() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(recipeUrlField)); } public void typeCustomRecipeUrl(String nameOfWs) { waitCustomRecipeUrlField(); recipeUrlField.clear(); recipeUrlField.sendKeys(nameOfWs); } /** wait 'Open in IDE' btn on All workspaces page' */ public void waitOpenInIdeWsEBtn() { new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until(ExpectedConditions.visibilityOf(openInIdeWsBtn)); } /** click 'Open in IDE btn on All workspaces page' */ public void clickOpenInIdeWsBtn() { dashboard.waitNotificationIsClosed(); waitOpenInIdeWsEBtn(); new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until(ExpectedConditions.elementToBeClickable(openInIdeWsBtn)) .click(); } /** * select defined stack from 'READY-TO-GO STACKS' tab * * @param stackName name of stack from 'READY-TO-GO STACKS' */ public void selectReadyToGoStack(String stackName) { new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.STACK_NAME_XPATH, stackName)))) .click(); } public void enterNameWorkspace(String nameWorkspace) { new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until(ExpectedConditions.visibilityOf(nameWorkspaceInput)) .clear(); new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until(ExpectedConditions.visibilityOf(nameWorkspaceInput)) .sendKeys(nameWorkspace); new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until(ExpectedConditions.textToBePresentInElementValue(nameWorkspaceInput, nameWorkspace)); } /** * enter value RAM of workspace * * @param ram is value of RAM */ public void enterRamWorkspace(int ram) { String ramValue = "//input[@name='memory']"; new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(ramWorkspace)) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(ramWorkspace)) .sendKeys(String.valueOf(ram)); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.textToBePresentInElementLocated( By.xpath(ramValue), String.valueOf(ram))); } /** * Check state of workspace in 'Workspace Information' * * @param stateWorkspace expected state of workspace */ public void checkStateOfWorkspace(StateWorkspace stateWorkspace) { new WebDriverWait(seleniumWebDriver, EXPECTED_MESS_IN_CONSOLE_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.STATE_WORKSPACE, stateWorkspace.getStatus())))); } /** click on 'RUN' button in 'Workspace Information' */ public void clickOnRunWorkspace() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(runWorkspaceBtn)) .click(); } /** click on 'STOP' button in 'Workspace Information' */ public void clickOnStopWorkspace() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(stopWorkspaceBtn)) .click(); } /** click on 'DELETE' button in 'Delete workspace' */ public void clickOnDeleteWorkspace() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(deleteWorkspaceBtn)) .click(); } /** * Check name of workspace in 'Workspace' tab * * @param nameWorkspace expected name of workspace */ public void checkNameWorkspace(String nameWorkspace) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.WORKSPACE_TITLE, nameWorkspace)))); } /** Click on the delete/remove button in the dialog window */ public void clickOnDeleteItInDialogWindow() { WaitUtils.sleepQuietly(1); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.elementToBeClickable(deleteItBtn)) .click(); } /** wait the workspace is not present on dashboard */ public void waitWorkspaceIsNotPresent(String nameWorkspace) { new WebDriverWait(seleniumWebDriver, TestTimeoutsConstants.ELEMENT_TIMEOUT_SEC) .until( ExpectedConditions.invisibilityOfElementLocated( By.xpath(format(Locators.WORKSPACE_ITEM_NAME, nameWorkspace)))); } /** Select 'Create new workspace from stack' on the 'New Workspace' page */ public void selectCreateNewWorkspaceFromStack() { loader.waitOnClosed(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.CREATE_WS_FROM_STACK))) .click(); } /** * Select stack library by name * * @param stackLibrary name of stack */ public void selectStackLibrary(StackLibrary stackLibrary) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.STACK_LIBRARY_ITEM, stackLibrary.getName())))) .click(); } public void clickOnSaveBtn() { loader.waitOnClosed(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(saveBtn)) .click(); } public void clickOnDeleteBtn() { loader.waitOnClosed(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(deleteBtn)) .click(); } public void clickOnCancelBtn() { loader.waitOnClosed(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(cancelBtn)) .click(); } /** Return true if workspaces present on the navigation panel */ public boolean workspacesIsPresent() { new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.LEFT_SIDE_BAR))); List<WebElement> workspaces = seleniumWebDriver.findElements(By.xpath(Locators.RESENT_WS_NAVBAR)); return !(workspaces.size() == 0); } /** * Adds developer to share list * * @param email is an email of developer into sharing list */ public void addDeveloperToShareList(String email) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.ADD_DEVLOPER_BTN))) .click(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.INPUT_SHARE_DIALOG))) .sendKeys(email + ","); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.SHARE_BTN_DIALOG))) .click(); } /** * Delete a developer from sharing list * * @param email is an email of developer into sharing list */ public void deleteDeveloperFromShareList(String email) { new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(String.format(Locators.REMOVE_DEVELOPER_ICON, email)))) .click(); } /** Wait the text into warning dialog delete or remove */ public void waitTextInWarningDialogDelete(String expText) { new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(String.format(Locators.WARNING_DIALOG_DELETE, expText)))); } /** * Wait the email of developer is present in 'Share' tab * * @param email the email of developer */ public void waitDeveloperIsPresentInShareTab(String email) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.DEVELOPER_SHARE_ITEM, email)))); } /** * Wait the email of developer is not present in 'Share' tab * * @param email the email of developer */ public void waitDeveloperIsNotPresentInShareTab(String email) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.invisibilityOfElementLocated( By.xpath(format(Locators.DEVELOPER_SHARE_ITEM, email)))); } /** Select 'Runtime' tab in workspace menu */ public void selectRuntimeTab() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.RUNTIME_TAB))) .click(); } /** Expand 'DEV-MACHINE' settings on the 'Runtime' tab */ public void expandDevMachineSettings() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.DEV_MACHINE))) .click(); } /** * Select tab into workspace menu * * @param tabName is the tab name into workspace menu */ public void selectTabInWorspaceMenu(String tabName) { loader.waitOnClosed(); new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(String.format(Locators.TAB_NAMES_IN_WS, tabName)))) .click(); } public void clickNewProjectButtonOnDashboard() throws Exception { dashboard.clickOnNewProjectLinkOnDashboard(); } public void createNewWorkspaceFromStackLibrary( StackLibrary stackLibrary, String workspaceName, String projectName) { selectCreateNewWorkspaceFromStack(); selectTabInWorspaceMenu(DashboardWorkspace.TabNames.STACK_LIBRARY); selectStackLibrary(stackLibrary); enterNameWorkspace(workspaceName); dashboardProject.selectTemplateProject(projectName); dashboardProject.clickOnCreateProject(); } public String getWsTimeoutMessage() { return new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOf(wsTimeotMessage)) .getText(); } public void waitWsTimeoutMessage(String expectedMessage) { new WebDriverWait(seleniumWebDriver, LOAD_PAGE_TIMEOUT_SEC) .until( (WebDriver webdriver) -> webdriver.findElement(By.xpath(Locators.WS_TIMEOUT_MESSAGE))) .getText() .contains(expectedMessage); } /** * Check if machine exists in machines list * * @param machineName the name of machine */ public void checkMachineExists(String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.MACHINE_NAME, machineName)))); loader.waitOnClosed(); } public void checkMachineIsNotExists(String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.invisibilityOfElementLocated( By.xpath(format(Locators.MACHINE_NAME, machineName)))); loader.waitOnClosed(); } public void clickOnAddMachineButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.ADD_MACHINE_BUTTON))) .click(); } /** * Click on the Edit Machine button * * @param machineName the name of machine */ public void clickOnEditMachineButton(String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.EDIT_MACHINE, machineName)))) .click(); } /** * Click on the Delete Machine button * * @param machineName the name of machine */ public void clickOnDeleteMachineButton(String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.DELETE_MACHINE, machineName)))) .click(); } /** Check that the Add New Machine dialog is opened */ public void checkAddNewMachineDialogIsOpen() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.ADD_MACHINE_DIALOG_NAME))); } /** Check that the Edit Machine dialog is opened */ public void checkEditTheMachineDialogIsOpen() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.EDIT_MACHINE_DIALOG_NAME))); } public void setMachineNameInDialog(String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.NEW_MACHINE_NAME))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.NEW_MACHINE_NAME))) .sendKeys(machineName); } public void clickOnEditDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.EDIT_MACHINE_NAME_BUTTON))) .click(); } public void clickOnCloseDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.CLOSE_DIALOG_BUTTON))) .click(); } public void clickOnCancelDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.CANCEL_DIALOG_BUTTON))) .click(); } public void clickOnDeleteDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.DELETE_DIALOG_BUTTON))) .click(); } public void clickOnUpdateDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.UPDATE_DIALOG_BUTTON))) .click(); } public void clickOnAddDialogButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.ADD_DIALOG_BUTTON))) .click(); } public void clickOnAddServerButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.ADD_SERVER_BUTTON))) .click(); } public void waitAddServerDialogIsOpen() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.ADD_NEW_SERVER_DIALOG_NAME))); } public void enterReference(String name) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.id(Locators.ADD_SERVER_REFERENCE_FIELD))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.id(Locators.ADD_SERVER_REFERENCE_FIELD))) .sendKeys(name); } public void enterPort(String name) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.id(Locators.ADD_SERVER_PORT_FIELD))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.id(Locators.ADD_SERVER_PORT_FIELD))) .sendKeys(name); } public void enterProtocol(String protocol) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.id(Locators.ADD_SERVER_PROTOCOL_FIELD))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.id(Locators.ADD_SERVER_PROTOCOL_FIELD))) .sendKeys(protocol); } public void checkServerName(String serverName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.SERVER_REFERENCE, serverName)))); } public void checkServerExists(String serverName, String port) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.SERVER_PORT, serverName, port)))); loader.waitOnClosed(); } public void checkServerIsNotExists(String serverName, String port) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.invisibilityOfElementLocated( By.xpath(format(Locators.SERVER_PORT, serverName, port)))); loader.waitOnClosed(); } public void clickOnDeleteServerButton(String serverName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.DELETE_SERVER_BUTTON, serverName)))) .click(); } public void clickOnEditServerButton(String serverName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.EDIT_SERVER_BUTTON, serverName)))) .click(); } public void selectMachine(String tabName, String machineName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.MACHINE_BUTTON, tabName, machineName)))) .click(); } public void checkAgentExists(String agentName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.AGENT_NAME, agentName)))); } public void switchAgentState(String agentName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.AGENT_STATE, agentName)))) .click(); } public Boolean getAgentState(String agentName) { String state = new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.AGENT_STATE, agentName)))) .getAttribute("aria-checked"); return Boolean.parseBoolean(state); } public String checkAgentDescription(String agentName) { return new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.AGENT_DESCRIPTION, agentName)))) .getText(); } public void clickOnAddEnvVariableButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.ADD_ENV_VARIABLE_BUTTON))) .click(); } public void checkAddNewEnvVarialbleDialogIsOpen() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.ADD_ENV_VARIABLE_DIALOG_NAME))); } public void enterEnvVariableName(String name) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.id(Locators.NEW_ENV_VARIABLE_NAME))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.id(Locators.NEW_ENV_VARIABLE_NAME))) .sendKeys(name); } public void enterEnvVariableValue(String value) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.NEW_ENV_VARIABLE_VALUE))) .clear(); new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.NEW_ENV_VARIABLE_VALUE))) .sendKeys(value); } public void addNewEnvironmentVariable(String name, String value) { enterEnvVariableName(name); enterEnvVariableValue(value); } public Boolean checkEnvVariableExists(String varName) { return new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.VARIABLE_NAME, varName)))) .getText() .equals(varName); } public Boolean checkValueExists(String varName, String varValue) { loader.waitOnClosed(); return new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.VARIABLE_VALUE, varName, varValue)))) .isDisplayed(); } public void checkValueIsNotExists(String varName, String varValue) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.invisibilityOfElementLocated( By.xpath(format(Locators.VARIABLE_VALUE, varName, varValue)))); loader.waitOnClosed(); } public void clickOnDeleteEnvVariableButton(String varName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.DELETE_ENV_VARIABLE, varName)))) .click(); } public void clickOnEditEnvVariableButton(String varName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.EDIT_ENV_VARIABLE, varName)))) .click(); } public void clickOnEnvVariableCheckbox(String varName) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.VARIABLE_CHECKBOX, varName)))) .click(); } public void clickOnAddNewProjectButton() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(Locators.ADD_NEW_PROJECT_BUTTON))) .click(); } public void clickOnAddProjects() { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until(ExpectedConditions.visibilityOfElementLocated(By.xpath(Locators.ADD_PROJECT_BUTTON))) .click(); } public void selectSample(String name) { new WebDriverWait(seleniumWebDriver, REDRAW_UI_ELEMENTS_TIMEOUT_SEC) .until( ExpectedConditions.visibilityOfElementLocated( By.xpath(format(Locators.SAMPLE_CHECKBOX_XPATH, name)))) .click(); } }
package org.sagebionetworks.repo.web.migration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.sagebionetworks.bridge.manager.participantdata.ParticipantDataIdMappingManagerImpl; import org.sagebionetworks.bridge.model.BridgeParticipantDAO; import org.sagebionetworks.bridge.model.BridgeUserParticipantMappingDAO; import org.sagebionetworks.bridge.model.Community; import org.sagebionetworks.bridge.model.CommunityTeamDAO; import org.sagebionetworks.bridge.model.ParticipantDataDAO; import org.sagebionetworks.bridge.model.ParticipantDataDescriptorDAO; import org.sagebionetworks.bridge.model.ParticipantDataStatusDAO; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnType; import org.sagebionetworks.bridge.model.data.ParticipantDataDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataRepeatType; import org.sagebionetworks.bridge.model.data.ParticipantDataRow; import org.sagebionetworks.bridge.model.data.ParticipantDataStatus; import org.sagebionetworks.bridge.model.data.value.ParticipantDataStringValue; import org.sagebionetworks.bridge.model.data.value.ParticipantDataValue; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Submission; import org.sagebionetworks.repo.manager.StorageQuotaManager; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.manager.UserProfileManager; import org.sagebionetworks.repo.manager.migration.MigrationManager; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessApproval; import org.sagebionetworks.repo.model.AccessRequirement; import org.sagebionetworks.repo.model.AuthenticationDAO; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.CommentDAO; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.FileEntity; import org.sagebionetworks.repo.model.Folder; import org.sagebionetworks.repo.model.GroupMembersDAO; import org.sagebionetworks.repo.model.MembershipInvtnSubmission; import org.sagebionetworks.repo.model.MembershipInvtnSubmissionDAO; import org.sagebionetworks.repo.model.MembershipRqstSubmission; import org.sagebionetworks.repo.model.MembershipRqstSubmissionDAO; import org.sagebionetworks.repo.model.MessageDAO; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.Project; import org.sagebionetworks.repo.model.RestrictableObjectDescriptor; import org.sagebionetworks.repo.model.RestrictableObjectType; import org.sagebionetworks.repo.model.StorageQuotaAdminDao; import org.sagebionetworks.repo.model.Team; import org.sagebionetworks.repo.model.TeamDAO; import org.sagebionetworks.repo.model.TermsOfUseAccessApproval; import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement; import org.sagebionetworks.repo.model.UserGroup; import org.sagebionetworks.repo.model.UserGroupDAO; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.bootstrap.EntityBootstrapper; import org.sagebionetworks.repo.model.daemon.BackupRestoreStatus; import org.sagebionetworks.repo.model.daemon.DaemonStatus; import org.sagebionetworks.repo.model.daemon.RestoreSubmission; import org.sagebionetworks.repo.model.dao.FileHandleDao; import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO; import org.sagebionetworks.repo.model.dao.table.TableRowTruthDAO; import org.sagebionetworks.repo.model.dbo.dao.table.TableModelUtils; import org.sagebionetworks.repo.model.file.FileHandle; import org.sagebionetworks.repo.model.file.PreviewFileHandle; import org.sagebionetworks.repo.model.file.S3FileHandle; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.message.Comment; import org.sagebionetworks.repo.model.message.MessageToUser; import org.sagebionetworks.repo.model.IdList; import org.sagebionetworks.repo.model.migration.ListBucketProvider; import org.sagebionetworks.repo.model.migration.MigrationType; import org.sagebionetworks.repo.model.migration.MigrationTypeCount; import org.sagebionetworks.repo.model.migration.MigrationTypeCounts; import org.sagebionetworks.repo.model.migration.MigrationTypeList; import org.sagebionetworks.repo.model.migration.MigrationUtils; import org.sagebionetworks.repo.model.migration.RowMetadata; import org.sagebionetworks.repo.model.migration.RowMetadataResult; import org.sagebionetworks.repo.model.provenance.Activity; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.Row; import org.sagebionetworks.repo.model.table.RowSet; import org.sagebionetworks.repo.model.v2.dao.V2WikiPageDao; import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.controller.DispatchServletSingleton; import org.sagebionetworks.repo.web.controller.EntityServletTestHelper; import org.sagebionetworks.repo.web.controller.ServletTestHelper; import org.sagebionetworks.repo.web.service.ServiceProvider; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; /** * This is an integration test to test the migration of all tables from start to finish. * * The test does the following: * 1. the before() method creates at least one object for every type object that must migrate. * 2. Create a backup copy of all data. * 3. Delete all data in the system. * 4. Restore all data from the backup. * * NOTE: Whenever a new migration type is added this test must be extended to test that objects migration. * * * * @author jmhill * */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:test-context.xml" }) public class MigrationIntegrationAutowireTest { public static final long MAX_WAIT_MS = 10*1000; // 10 sec. @Autowired private EntityServletTestHelper entityServletHelper; @Autowired private UserManager userManager; @Autowired private FileHandleDao fileMetadataDao; @Autowired private UserProfileManager userProfileManager; @Autowired private ServiceProvider serviceProvider; @Autowired private EntityBootstrapper entityBootstrapper; @Autowired private MigrationManager migrationManager; @Autowired private StorageQuotaManager storageQuotaManager; @Autowired private StorageQuotaAdminDao storageQuotaAdminDao; @Autowired private UserGroupDAO userGroupDAO; @Autowired private GroupMembersDAO groupMembersDAO; @Autowired private TeamDAO teamDAO; @Autowired private CommunityTeamDAO communityTeamDAO; @Autowired private BridgeParticipantDAO bridgeParticipantDAO; @Autowired private BridgeUserParticipantMappingDAO bridgeUserParticipantMappingDAO; @Autowired private ParticipantDataDAO participantDataDAO; @Autowired private ParticipantDataDescriptorDAO participantDataDescriptorDAO; @Autowired private ParticipantDataStatusDAO participantDataStatusDAO; @Autowired private AuthenticationDAO authDAO; @Autowired private MessageDAO messageDAO; @Autowired private CommentDAO commentDAO; @Autowired private MembershipRqstSubmissionDAO membershipRqstSubmissionDAO; @Autowired private MembershipInvtnSubmissionDAO membershipInvtnSubmissionDAO; @Autowired private ColumnModelDAO columnModelDao; @Autowired private TableRowTruthDAO tableRowTruthDao; @Autowired private V2WikiPageDao v2wikiPageDAO; private Long adminUserId; private String adminUserIdString; private UserInfo adminUserInfo; // Activity private Activity activity; // Entities private Project project; private FileEntity fileEntity; private Community community; private Folder folderToTrash; // requirement private AccessRequirement accessRequirement; // approval private AccessApproval accessApproval; // V2 Wiki page private V2WikiPage v2RootWiki; private V2WikiPage v2SubWiki; // File Handles private S3FileHandle handleOne; private S3FileHandle markdownOne; private PreviewFileHandle preview; // Evaluation private Evaluation evaluation; private Submission submission; private HttpServletRequest mockRequest; private UserInfo newUser; @Before public void before() throws Exception{ mockRequest = Mockito.mock(HttpServletRequest.class); when(mockRequest.getServletPath()).thenReturn("/repo/v1"); // get user IDs adminUserId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId(); adminUserIdString = adminUserId.toString(); adminUserInfo = userManager.getUserInfo(adminUserId); resetDatabase(); createNewUser(); String sampleFileHandleId = createFileHandles(); createActivity(); createEntities(); createFavorite(); createEvaluation(); createAccessRequirement(); createAccessApproval(); createV2WikiPages(); createDoi(); createStorageQuota(); UserGroup sampleGroup = createUserGroups(1); createTeamsRequestsAndInvitations(sampleGroup); createCredentials(sampleGroup); createMessages(sampleGroup, sampleFileHandleId); createColumnModel(); UserGroup sampleGroup2 = createUserGroups(2); createCommunity(sampleGroup2); createParticipantData(sampleGroup); } private void createColumnModel() throws DatastoreException, NotFoundException, IOException { String tableId = "syn123"; // Create some test column models List<ColumnModel> start = TableModelUtils.createOneOfEachType(); // Create each one List<ColumnModel> models = new LinkedList<ColumnModel>(); for(ColumnModel cm: start){ models.add(columnModelDao.createColumnModel(cm)); } List<String> header = TableModelUtils.getHeaders(models); // bind the columns to the entity columnModelDao.bindColumnToObject(header, tableId); // create some test rows. List<Row> rows = TableModelUtils.createRows(models, 5); RowSet set = new RowSet(); set.setHeaders(TableModelUtils.getHeaders(models)); set.setRows(rows); set.setTableId(tableId); // Append the rows to the table tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); // Append some more rows rows = TableModelUtils.createRows(models, 6); set.setRows(rows); tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); } public void createNewUser() throws NotFoundException{ NewUser user = new NewUser(); user.setUserName(UUID.randomUUID().toString()); user.setEmail(user.getUserName()+"@test.com"); Long id = userManager.createUser(user); newUser = userManager.getUserInfo(id); } private void resetDatabase() throws Exception { // This gives us a chance to also delete the S3 for table rows tableRowTruthDao.truncateAllRowData(); // Before we start this test we want to start with a clean database migrationManager.deleteAllData(adminUserInfo); // bootstrap to put back the bootstrap data entityBootstrapper.bootstrapAll(); storageQuotaAdminDao.clear(); } private void createFavorite() { userProfileManager.addFavorite(adminUserInfo, fileEntity.getId()); } private void createDoi() throws Exception { serviceProvider.getDoiService().createDoi(adminUserId, project.getId(), ObjectType.ENTITY, 1L); } private void createActivity() throws Exception { activity = new Activity(); activity.setDescription("some desc"); activity = serviceProvider.getActivityService().createActivity(adminUserId, activity); } private void createEvaluation() throws Exception { // initialize Evaluations evaluation = new Evaluation(); evaluation.setName("name"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.PLANNED); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); evaluation = new Evaluation(); evaluation.setName("name2"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.OPEN); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); // initialize Participants serviceProvider.getEvaluationService().addParticipant(adminUserId, evaluation.getId()); // initialize Submissions submission = new Submission(); submission.setName("submission1"); submission.setVersionNumber(1L); submission.setEntityId(fileEntity.getId()); submission.setUserId(adminUserIdString); submission.setEvaluationId(evaluation.getId()); submission = entityServletHelper.createSubmission(submission, adminUserId, fileEntity.getEtag()); } public void createAccessApproval() throws Exception { accessApproval = newToUAccessApproval(accessRequirement.getId(), adminUserIdString); accessApproval = ServletTestHelper.createAccessApproval( DispatchServletSingleton.getInstance(), accessApproval, adminUserId, new HashMap<String, String>()); } public void createAccessRequirement() throws Exception { // Add an access requirement to this entity accessRequirement = newAccessRequirement(); String entityId = project.getId(); RestrictableObjectDescriptor entitySubjectId = new RestrictableObjectDescriptor(); entitySubjectId.setId(entityId); entitySubjectId.setType(RestrictableObjectType.ENTITY); RestrictableObjectDescriptor evaluationSubjectId = new RestrictableObjectDescriptor(); assertNotNull(evaluation); assertNotNull(evaluation.getId()); evaluationSubjectId.setId(evaluation.getId()); evaluationSubjectId.setType(RestrictableObjectType.EVALUATION); accessRequirement.setSubjectIds(Arrays.asList(new RestrictableObjectDescriptor[]{entitySubjectId, evaluationSubjectId})); accessRequirement = ServletTestHelper.createAccessRequirement(DispatchServletSingleton.getInstance(), accessRequirement, adminUserId, new HashMap<String, String>()); } private TermsOfUseAccessApproval newToUAccessApproval(Long requirementId, String accessorId) { TermsOfUseAccessApproval aa = new TermsOfUseAccessApproval(); aa.setAccessorId(accessorId); aa.setEntityType(TermsOfUseAccessApproval.class.getName()); aa.setRequirementId(requirementId); return aa; } public void createV2WikiPages() throws NotFoundException { // Using wikiPageDao until wiki service is created // Create a V2 Wiki page v2RootWiki = new V2WikiPage(); v2RootWiki.setCreatedBy(adminUserIdString); v2RootWiki.setModifiedBy(adminUserIdString); v2RootWiki.setAttachmentFileHandleIds(new LinkedList<String>()); v2RootWiki.getAttachmentFileHandleIds().add(handleOne.getId()); v2RootWiki.setTitle("Root title"); v2RootWiki.setMarkdownFileHandleId(markdownOne.getId()); Map<String, FileHandle> map = new HashMap<String, FileHandle>(); map.put(handleOne.getFileName(), handleOne); List<String> newIds = new ArrayList<String>(); newIds.add(handleOne.getId()); v2RootWiki = v2wikiPageDAO.create(v2RootWiki, map, fileEntity.getId(), ObjectType.ENTITY, newIds); // Create a child v2SubWiki = new V2WikiPage(); v2SubWiki.setCreatedBy(adminUserIdString); v2SubWiki.setModifiedBy(adminUserIdString); v2SubWiki.setParentWikiId(v2RootWiki.getId()); v2SubWiki.setTitle("V2 Sub-wiki-title"); v2SubWiki.setMarkdownFileHandleId(markdownOne.getId()); v2SubWiki = v2wikiPageDAO.create(v2SubWiki, new HashMap<String, FileHandle>(), fileEntity.getId(), ObjectType.ENTITY, new ArrayList<String>()); } /** * Create the entities used by this test. * @throws JSONObjectAdapterException * @throws ServletException * @throws IOException * @throws NotFoundException */ public void createEntities() throws JSONObjectAdapterException, ServletException, IOException, NotFoundException { // Create a project project = new Project(); project.setName("MigrationIntegrationAutowireTest.Project"); project.setEntityType(Project.class.getName()); project = serviceProvider.getEntityService().createEntity(adminUserId, project, null, mockRequest); // Create a file entity fileEntity = new FileEntity(); fileEntity.setName("MigrationIntegrationAutowireTest.FileEntity"); fileEntity.setEntityType(FileEntity.class.getName()); fileEntity.setParentId(project.getId()); fileEntity.setDataFileHandleId(handleOne.getId()); fileEntity = serviceProvider.getEntityService().createEntity(adminUserId, fileEntity, activity.getId(),mockRequest); // Create a folder to trash folderToTrash = new Folder(); folderToTrash.setName("boundForTheTrashCan"); folderToTrash.setParentId(project.getId()); folderToTrash = serviceProvider.getEntityService().createEntity(adminUserId, folderToTrash, null, mockRequest); // Send it to the trash can serviceProvider.getTrashService().moveToTrash(adminUserId, folderToTrash.getId()); } private AccessRequirement newAccessRequirement() { TermsOfUseAccessRequirement dto = new TermsOfUseAccessRequirement(); dto.setEntityType(dto.getClass().getName()); dto.setAccessType(ACCESS_TYPE.DOWNLOAD); dto.setTermsOfUse("foo"); return dto; } /** * Create the file handles used by this test. * @throws NotFoundException */ public String createFileHandles() throws NotFoundException { // Create a file handle handleOne = new S3FileHandle(); handleOne.setCreatedBy(adminUserIdString); handleOne.setCreatedOn(new Date()); handleOne.setBucketName("bucket"); handleOne.setKey("mainFileKey"); handleOne.setEtag("etag"); handleOne.setFileName("foo.bar"); handleOne = fileMetadataDao.createFile(handleOne); // Create markdown content markdownOne = new S3FileHandle(); markdownOne.setCreatedBy(adminUserIdString); markdownOne.setCreatedOn(new Date()); markdownOne.setBucketName("bucket"); markdownOne.setKey("markdownFileKey"); markdownOne.setEtag("etag"); markdownOne.setFileName("markdown1"); markdownOne = fileMetadataDao.createFile(markdownOne); // Create a preview preview = new PreviewFileHandle(); preview.setCreatedBy(adminUserIdString); preview.setCreatedOn(new Date()); preview.setBucketName("bucket"); preview.setKey("previewFileKey"); preview.setEtag("etag"); preview.setFileName("bar.txt"); preview = fileMetadataDao.createFile(preview); // Set two as the preview of one fileMetadataDao.setPreviewId(handleOne.getId(), preview.getId()); return handleOne.getId(); } private void createStorageQuota() { storageQuotaManager.setQuotaForUser(adminUserInfo, adminUserInfo, 3000); } // returns a group for use in a team private UserGroup createUserGroups(int index) throws NotFoundException { List<String> adder = new ArrayList<String>(); // Make one group UserGroup parentGroup = new UserGroup(); parentGroup.setIsIndividual(false); parentGroup.setId(userGroupDAO.create(parentGroup).toString()); // Make two users UserGroup parentUser = new UserGroup(); parentUser.setIsIndividual(true); parentUser.setId(userGroupDAO.create(parentUser).toString()); UserGroup siblingUser = new UserGroup(); siblingUser.setIsIndividual(true); siblingUser.setId(userGroupDAO.create(siblingUser).toString()); // Nest one group and two users within the parent group adder.add(parentUser.getId()); adder.add(siblingUser.getId()); groupMembersDAO.addMembers(parentGroup.getId(), adder); return parentGroup; } private void createCredentials(UserGroup group) throws Exception { Long principalId = Long.parseLong(group.getId()); authDAO.changePassword(principalId, "ThisIsMySuperSecurePassword"); authDAO.changeSecretKey(principalId); authDAO.changeSessionToken(principalId, null); } @SuppressWarnings("serial") private void createMessages(final UserGroup group, String fileHandleId) { MessageToUser dto = new MessageToUser(); // Note: ID is auto generated dto.setCreatedBy(group.getId()); dto.setFileHandleId(fileHandleId); // Note: CreatedOn is set by the DAO dto.setSubject("See you on the other side?"); dto.setRecipients(new HashSet<String>() {{add(group.getId());}}); dto.setInReplyTo(null); // Note: InReplyToRoot is calculated by the DAO dto = messageDAO.createMessage(dto); messageDAO.createMessageStatus_NewTransaction(dto.getId(), group.getId(), null); Comment dto2 = new Comment(); dto2.setCreatedBy(group.getId()); dto2.setFileHandleId(fileHandleId); dto2.setTargetId("1337"); dto2.setTargetType(ObjectType.ENTITY); commentDAO.createComment(dto2); } private void createTeamsRequestsAndInvitations(UserGroup group) { String otherUserId = BOOTSTRAP_PRINCIPAL.ANONYMOUS_USER.getPrincipalId().toString(); Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); teamDAO.create(team); // create a MembershipRqstSubmission MembershipRqstSubmission mrs = new MembershipRqstSubmission(); Date createdOn = new Date(); Date expiresOn = new Date(); mrs.setCreatedOn(createdOn); mrs.setExpiresOn(expiresOn); mrs.setMessage("Please let me join the team."); mrs.setTeamId(""+group.getId()); // need another valid user group mrs.setUserId(otherUserId); membershipRqstSubmissionDAO.create(mrs); // create a MembershipInvtnSubmission MembershipInvtnSubmission mis = new MembershipInvtnSubmission(); mis.setCreatedOn(createdOn); mis.setExpiresOn(expiresOn); mis.setMessage("Please join the team."); mis.setTeamId(""+group.getId()); // need another valid user group mis.setInviteeId(otherUserId); membershipInvtnSubmissionDAO.create(mis); } private void createCommunity(UserGroup group) throws Exception { Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); team = teamDAO.create(team); // Create a community community = new Community(); community.setName("MigrationIntegrationAutowireTest.Community"); community.setEntityType(Community.class.getName()); community.setTeamId(team.getId()); community = serviceProvider.getEntityService().createEntity(adminUserId, community, null, mockRequest); communityTeamDAO.create(KeyFactory.stringToKey(community.getId()), Long.parseLong(team.getId())); } private void createParticipantData(UserGroup sampleGroup) throws Exception { Long participantId = Long.parseLong(sampleGroup.getId()) ^ -1L; bridgeParticipantDAO.create(participantId); bridgeUserParticipantMappingDAO.setParticipantIdsForUser(Long.parseLong(sampleGroup.getId()), Collections.<String> singletonList(participantId.toString())); ParticipantDataDescriptor participantDataDescriptor = new ParticipantDataDescriptor(); participantDataDescriptor.setName(participantId.toString() + "desc"); participantDataDescriptor.setRepeatType(ParticipantDataRepeatType.ALWAYS); participantDataDescriptor.setRepeatFrequency("0 0 4 * * ? *"); participantDataDescriptor = participantDataDescriptorDAO.createParticipantDataDescriptor(participantDataDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor.setName("a"); participantDataColumnDescriptor.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor2 = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor2.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor2.setName("b"); participantDataColumnDescriptor2.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor); ParticipantDataRow dataRow = new ParticipantDataRow(); ParticipantDataStringValue stringValue1 = new ParticipantDataStringValue(); stringValue1.setValue("1"); ParticipantDataStringValue stringValue2 = new ParticipantDataStringValue(); stringValue2.setValue("2"); dataRow.setData(ImmutableMap.<String, ParticipantDataValue> builder().put("a", stringValue1).put("b", stringValue2).build()); List<ParticipantDataRow> data = Lists.newArrayList(dataRow); participantDataDAO.append(participantId.toString(), participantDataDescriptor.getId(), data, Lists.newArrayList(participantDataColumnDescriptor, participantDataColumnDescriptor2)); ParticipantDataStatus status = new ParticipantDataStatus(); status.setParticipantDataDescriptorId(participantDataDescriptor.getId()); status.setLastEntryComplete(false); status.setLastPrompted(new Date()); status.setLastStarted(new Date()); participantDataStatusDAO.update(Collections.<ParticipantDataStatus> singletonList(status)); } @After public void after() throws Exception{ // to cleanup for this test we delete all in the database resetDatabase(); } /** * This is the actual test. The rest of the class is setup and tear down. * * @throws Exception */ @Test public void testRoundTrip() throws Exception{ // Get the list of primary types MigrationTypeList primaryTypesList = entityServletHelper.getPrimaryMigrationTypes(adminUserId); assertNotNull(primaryTypesList); assertNotNull(primaryTypesList.getList()); assertTrue(primaryTypesList.getList().size() > 0); // Get the counts before we start MigrationTypeCounts startCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); validateStartingCount(startCounts); // This test will backup all data, delete it, then restore it. List<BackupInfo> backupList = new ArrayList<BackupInfo>(); for (MigrationType type : primaryTypesList.getList()) { // Backup each type backupList.addAll(backupAllOfType(type)); } // Now delete all data in reverse order for (int i = primaryTypesList.getList().size() - 1; i >= 0; i MigrationType type = primaryTypesList.getList().get(i); deleteAllOfType(type); } // After deleting, the counts should be 0 except for a few special cases MigrationTypeCounts afterDeleteCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); assertNotNull(afterDeleteCounts); assertNotNull(afterDeleteCounts.getList()); for (int i = 0; i < afterDeleteCounts.getList().size(); i++) { MigrationTypeCount afterDelete = afterDeleteCounts.getList().get(i); // Special cases for the not-deleted migration admin if (afterDelete.getType() == MigrationType.PRINCIPAL) { assertEquals("There should be 4 UserGroups remaining after the delete: " + BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER + ", " + BOOTSTRAP_PRINCIPAL.ADMINISTRATORS_GROUP + ", " + BOOTSTRAP_PRINCIPAL.PUBLIC_GROUP + ", and " + BOOTSTRAP_PRINCIPAL.AUTHENTICATED_USERS_GROUP, new Long(4), afterDelete.getCount()); } else if (afterDelete.getType() == MigrationType.GROUP_MEMBERS || afterDelete.getType() == MigrationType.CREDENTIAL) { assertEquals("Counts do not match for: " + afterDelete.getType().name(), new Long(1), afterDelete.getCount()); } else { assertEquals("Counts are non-zero for: " + afterDelete.getType().name(), new Long(0), afterDelete.getCount()); } } // Now restore all of the data for(BackupInfo info: backupList){ String fileName = info.getFileName(); assertNotNull("Did not find a backup file name for type: "+info.getType(), fileName); restoreFromBackup(info.getType(), fileName); } // The counts should all be back MigrationTypeCounts finalCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); for (int i = 1; i < finalCounts.getList().size(); i++) { MigrationTypeCount startCount = startCounts.getList().get(i); MigrationTypeCount afterRestore = finalCounts.getList().get(i); assertEquals("Count for " + startCount.getType().name() + " does not match", startCount.getCount(), afterRestore.getCount()); } } private static class BackupInfo { MigrationType type; String fileName; public BackupInfo(MigrationType type, String fileName) { super(); this.type = type; this.fileName = fileName; } public MigrationType getType() { return type; } public String getFileName() { return fileName; } } /** * There must be at least one object for every type of migratable object. * @param startCounts */ private void validateStartingCount(MigrationTypeCounts startCounts) { assertNotNull(startCounts); assertNotNull(startCounts.getList()); List<MigrationType> typesToMigrate = new LinkedList<MigrationType>(); for (MigrationType tm: MigrationType.values()) { if (migrationManager.isMigrationTypeUsed(adminUserInfo, tm)) { typesToMigrate.add(tm); } } assertEquals("This test requires at least one object to exist for each MigrationType. Please create a new object of the new MigrationType in the before() method of this test.",typesToMigrate.size(), startCounts.getList().size()); for(MigrationTypeCount count: startCounts.getList()){ assertTrue("This test requires at least one object to exist for each MigrationType. Please create a new object of type: "+count.getType()+" in the before() method of this test.", count.getCount() > 0); } } /** * Extract the filename from the full url. * @param fullUrl * @return */ public String getFileNameFromUrl(String fullUrl){; int index = fullUrl.lastIndexOf("/"); return fullUrl.substring(index+1, fullUrl.length()); } /** * Backup all data * @param type * @return * @throws Exception */ private List<BackupInfo> backupAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if(list == null) return null; // Backup batches by their level in the tree ListBucketProvider provider = new ListBucketProvider(); MigrationUtils.bucketByTreeLevel(list.getList().iterator(), provider); List<BackupInfo> result = new ArrayList<BackupInfo>(); List<List<Long>> listOfBuckets = provider.getListOfBuckets(); for(List<Long> batch: listOfBuckets){ if(batch.size() > 0){ String fileName = backup(type, batch); result.add(new BackupInfo(type, fileName)); } } return result; } private String backup(MigrationType type, List<Long> tobackup) throws Exception { // Start the backup job IdList ids = new IdList(); ids.setList(tobackup); BackupRestoreStatus status = entityServletHelper.startBackup(adminUserId, type, ids); // wait for it.. waitForDaemon(status); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); assertNotNull(status.getBackupUrl()); return getFileNameFromUrl(status.getBackupUrl()); } private void restoreFromBackup(MigrationType type, String fileName) throws Exception{ RestoreSubmission sub = new RestoreSubmission(); sub.setFileName(fileName); BackupRestoreStatus status = entityServletHelper.startRestore(adminUserId, type, sub); // wait for it waitForDaemon(status); } /** * Delete all data for a type. * @param type * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private void deleteAllOfType(MigrationType type) throws Exception{ IdList idList = getIdListOfAllOfType(type); if(idList == null) return; MigrationTypeCount result = entityServletHelper.deleteMigrationType(adminUserId, type, idList); System.out.println("Deleted: "+result); } /** * List all of the IDs for a type. * @param type * @return * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private IdList getIdListOfAllOfType(MigrationType type) throws Exception{ RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if(list.getTotalCount() < 1) return null; // Create the backup list List<Long> toBackup = new LinkedList<Long>(); for(RowMetadata row: list.getList()){ toBackup.add(row.getId()); } IdList idList = new IdList(); idList.setList(toBackup); return idList; } /** * Wait for a deamon to process a a job. * @param status * @throws InterruptedException * @throws JSONObjectAdapterException * @throws IOException * @throws ServletException */ private void waitForDaemon(BackupRestoreStatus status) throws Exception{ long start = System.currentTimeMillis(); while(DaemonStatus.COMPLETED != status.getStatus()){ assertFalse("Daemon failed "+status.getErrorDetails(), DaemonStatus.FAILED == status.getStatus()); System.out.println("Waiting for backup/restore daemon. Message: "+status.getProgresssMessage()); Thread.sleep(1000); long elapse = System.currentTimeMillis() - start; assertTrue("Timed out waiting for a backup/restore daemon",elapse < MAX_WAIT_MS); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); } } }
package metadata.etl.lhotse.extractor; import metadata.etl.lhotse.LzExecMessage; import metadata.etl.lhotse.LzTaskExecRecord; import metadata.etl.utils.ProcessUtils; import metadata.etl.utils.SshUtils; import metadata.etl.utils.XmlParser; import metadata.etl.utils.hiveparser.HiveSqlAnalyzer; import metadata.etl.utils.hiveparser.HiveSqlType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import wherehows.common.Constant; import wherehows.common.schemas.LineageRecord; import java.util.ArrayList; import java.util.List; public class Hive2HdfsLineageExtractor implements BaseLineageExtractor { private static final Logger logger = LoggerFactory.getLogger(Hive2HdfsLineageExtractor.class); @Override public List<LineageRecord> getLineageRecord(String logLocation, LzExecMessage message, int defaultDatabaseId) { LzTaskExecRecord lzTaskExecRecord = message.lzTaskExecRecord; List<LineageRecord> lineageRecords = new ArrayList<>(); try { logger.info("start to parse the log: {}", logLocation); XmlParser xmlParser = new XmlParser(logLocation); // get info from logs String destPath = xmlParser.getExtProperty("extProperties/entry/destFilePath"); String sql = xmlParser.getExtProperty("extProperties/entry/filterSQL"); long flowExecId = Long.parseLong(xmlParser.getExtProperty("curRunDate")); String databaseName = xmlParser.getExtProperty("extProperties/entry/databaseName"); // get the hdfs file name. String [] cmds = {"hdfs", "dfs", "-ls", destPath}; ArrayList<String> results = ProcessUtils.exec(cmds); // for debug logger.info("the process utils result: {}", results); if (results == null || results.size() == 0) { logger.error("process utils: no result get"); return null; } else { if (!destPath.endsWith("/")) destPath += "/"; String raw = results.get(results.size()-1); String []tmps = raw.split(" "); destPath = tmps[tmps.length - 1]; // if (results.size() > 1) logger.info("process utils: result > 1"); } logger.info("extract props from log file finished."); logger.info("the dest path is: {}", destPath); logger.info("the sql is: {}", sql); logger.info("the flow exce id is: {}", flowExecId); logger.info("the database name is: {}", databaseName); logger.info("the job name is: {}", lzTaskExecRecord.taskName); // parse the hive table from sql List<String> isrcTableNames = new ArrayList<String>(); List<String> idesTableNames = new ArrayList<String>(); if (sql != null) { String opType = HiveSqlAnalyzer.analyzeSql(sql, isrcTableNames, idesTableNames); if (opType.equals(HiveSqlType.QUERY)) { } } logger.info("hive sql parse finished."); long taskId = Long.parseLong(lzTaskExecRecord.taskId); String taskName = lzTaskExecRecord.taskName; String flowPath = "/lhotse/hive2hdfs/" + flowExecId; String operation = null; long num = 0L; logger.info("start to create the source record: {}", isrcTableNames.toString()); // source lineage record. for (String sourcePath : isrcTableNames) { LineageRecord lineageRecord = new LineageRecord(lzTaskExecRecord.appId, flowExecId, taskName, taskId); // set lineage record details. lineageRecord.setDatasetInfo(defaultDatabaseId, sourcePath, "hive"); lineageRecord.setOperationInfo("source", operation, num, num, num, num, lzTaskExecRecord.taskStartTime, lzTaskExecRecord.taskEndTime, flowPath); lineageRecord.setAbstractObjectName("/" + databaseName + "/" + sourcePath); lineageRecord.setFullObjectName("/" + databaseName + "/" + sourcePath); lineageRecord.setSrlNo(2); logger.info("the source record is: {}", lineageRecord.toString()); lineageRecords.add(lineageRecord); } logger.info("start to create the target record!"); // target lineage record. LineageRecord lineageRecord = new LineageRecord(lzTaskExecRecord.appId, flowExecId, taskName, taskId); // set lineage record details. lineageRecord.setDatasetInfo(defaultDatabaseId, destPath, "hdfs"); lineageRecord.setOperationInfo("target", operation, num, num, num, num, lzTaskExecRecord.taskStartTime, lzTaskExecRecord.taskEndTime, flowPath); lineageRecord.setAbstractObjectName(destPath); lineageRecord.setFullObjectName(destPath); lineageRecord.setSrlNo(3); logger.info("the target record is: {}", lineageRecord.toString()); lineageRecords.add(lineageRecord); } catch (Exception e) { e.printStackTrace(); logger.info("error happened in collecting lineage record."); } return lineageRecords; } }
package org.sagebionetworks.repo.web.migration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.sagebionetworks.bridge.model.BridgeParticipantDAO; import org.sagebionetworks.bridge.model.BridgeUserParticipantMappingDAO; import org.sagebionetworks.bridge.model.Community; import org.sagebionetworks.bridge.model.CommunityTeamDAO; import org.sagebionetworks.bridge.model.ParticipantDataDAO; import org.sagebionetworks.bridge.model.ParticipantDataDescriptorDAO; import org.sagebionetworks.bridge.model.ParticipantDataId; import org.sagebionetworks.bridge.model.ParticipantDataStatusDAO; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnType; import org.sagebionetworks.bridge.model.data.ParticipantDataDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataRepeatType; import org.sagebionetworks.bridge.model.data.ParticipantDataRow; import org.sagebionetworks.bridge.model.data.ParticipantDataStatus; import org.sagebionetworks.bridge.model.data.value.ParticipantDataStringValue; import org.sagebionetworks.bridge.model.data.value.ParticipantDataValue; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Submission; import org.sagebionetworks.repo.manager.StorageQuotaManager; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.manager.UserProfileManager; import org.sagebionetworks.repo.manager.migration.MigrationManager; import org.sagebionetworks.repo.model.*; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.bootstrap.EntityBootstrapper; import org.sagebionetworks.repo.model.daemon.BackupRestoreStatus; import org.sagebionetworks.repo.model.daemon.DaemonStatus; import org.sagebionetworks.repo.model.daemon.RestoreSubmission; import org.sagebionetworks.repo.model.dao.FileHandleDao; import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO; import org.sagebionetworks.repo.model.dao.table.TableRowTruthDAO; import org.sagebionetworks.repo.model.dbo.DBOBasicDao; import org.sagebionetworks.repo.model.dbo.dao.table.TableModelTestUtils; import org.sagebionetworks.repo.model.dbo.persistence.DBOSessionToken; import org.sagebionetworks.repo.model.dbo.persistence.DBOTermsOfUseAgreement; import org.sagebionetworks.repo.model.file.FileHandle; import org.sagebionetworks.repo.model.file.PreviewFileHandle; import org.sagebionetworks.repo.model.file.S3FileHandle; import org.sagebionetworks.repo.model.file.UploadType; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.message.Comment; import org.sagebionetworks.repo.model.message.MessageToUser; import org.sagebionetworks.repo.model.migration.ListBucketProvider; import org.sagebionetworks.repo.model.migration.MigrationType; import org.sagebionetworks.repo.model.migration.MigrationTypeCount; import org.sagebionetworks.repo.model.migration.MigrationTypeCounts; import org.sagebionetworks.repo.model.migration.MigrationTypeList; import org.sagebionetworks.repo.model.migration.MigrationUtils; import org.sagebionetworks.repo.model.migration.RowMetadata; import org.sagebionetworks.repo.model.migration.RowMetadataResult; import org.sagebionetworks.repo.model.principal.PrincipalAliasDAO; import org.sagebionetworks.repo.model.project.S3UploadDestinationSetting; import org.sagebionetworks.repo.model.project.UploadDestinationListSetting; import org.sagebionetworks.repo.model.project.UploadDestinationSetting; import org.sagebionetworks.repo.model.provenance.Activity; import org.sagebionetworks.repo.model.quiz.PassingRecord; import org.sagebionetworks.repo.model.quiz.QuizResponse; import org.sagebionetworks.repo.model.table.ColumnMapper; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.RawRowSet; import org.sagebionetworks.repo.model.table.Row; import org.sagebionetworks.repo.model.table.RowSet; import org.sagebionetworks.repo.model.v2.dao.V2WikiPageDao; import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.controller.AbstractAutowiredControllerTestBase; import org.sagebionetworks.repo.web.service.ServiceProvider; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.sagebionetworks.table.cluster.utils.TableModelUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.DirtiesContext; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; /** * This is an integration test to test the migration of all tables from start to finish. * * The test does the following: 1. the before() method creates at least one object for every type object that must * migrate. 2. Create a backup copy of all data. 3. Delete all data in the system. 4. Restore all data from the backup. * * NOTE: Whenever a new migration type is added this test must be extended to test that objects migration. * * * * @author jmhill * */ @DirtiesContext public class MigrationIntegrationAutowireTest extends AbstractAutowiredControllerTestBase { public static final long MAX_WAIT_MS = 30 * 1000; // 10 sec. @Autowired private DBOBasicDao basicDao; @Autowired private UserManager userManager; @Autowired private FileHandleDao fileMetadataDao; @Autowired private UserProfileManager userProfileManager; @Autowired private ServiceProvider serviceProvider; @Autowired private EntityBootstrapper entityBootstrapper; @Autowired private MigrationManager migrationManager; @Autowired private StorageQuotaManager storageQuotaManager; @Autowired private StorageQuotaAdminDao storageQuotaAdminDao; @Autowired private UserGroupDAO userGroupDAO; @Autowired private PrincipalAliasDAO principalAliasDAO; @Autowired private GroupMembersDAO groupMembersDAO; @Autowired private TeamDAO teamDAO; @Autowired private CommunityTeamDAO communityTeamDAO; @Autowired private BridgeParticipantDAO bridgeParticipantDAO; @Autowired private BridgeUserParticipantMappingDAO bridgeUserParticipantMappingDAO; @Autowired private ParticipantDataDAO participantDataDAO; @Autowired private ParticipantDataDescriptorDAO participantDataDescriptorDAO; @Autowired private ParticipantDataStatusDAO participantDataStatusDAO; @Autowired private AuthenticationDAO authDAO; @Autowired private MessageDAO messageDAO; @Autowired private CommentDAO commentDAO; @Autowired private MembershipRqstSubmissionDAO membershipRqstSubmissionDAO; @Autowired private MembershipInvtnSubmissionDAO membershipInvtnSubmissionDAO; @Autowired private ColumnModelDAO columnModelDao; @Autowired private TableRowTruthDAO tableRowTruthDao; @Autowired private V2WikiPageDao v2wikiPageDAO; @Autowired private QuizResponseDAO quizResponseDAO; @Autowired private ProjectSettingsDAO projectSettingsDAO; @Autowired private ProjectStatsDAO projectStatsDAO; @Autowired private ChallengeDAO challengeDAO; @Autowired private ChallengeTeamDAO challengeTeamDAO; private Team team; private Long adminUserId; private String adminUserIdString; private UserInfo adminUserInfo; // Activity private Activity activity; // Entities private Project project; private FileEntity fileEntity; private Community community; private Folder folderToTrash; // requirement private AccessRequirement accessRequirement; // approval private AccessApproval accessApproval; // V2 Wiki page private V2WikiPage v2RootWiki; private V2WikiPage v2SubWiki; // File Handles private S3FileHandle handleOne; private S3FileHandle markdownOne; private PreviewFileHandle preview; // Evaluation private Evaluation evaluation; private Submission submission; private HttpServletRequest mockRequest; private Challenge challenge; private ChallengeTeam challengeTeam; @Before public void before() throws Exception { mockRequest = Mockito.mock(HttpServletRequest.class); when(mockRequest.getServletPath()).thenReturn("/repo/v1"); // get user IDs adminUserId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId(); adminUserIdString = adminUserId.toString(); adminUserInfo = userManager.getUserInfo(adminUserId); resetDatabase(); createNewUser(); String sampleFileHandleId = createFileHandles(); createActivity(); createEntities(); createFavorite(); createProjectSetting(); createProjectStat(); createEvaluation(); createAccessRequirement(); createAccessApproval(); createV2WikiPages(); createDoi(); createStorageQuota(); UserGroup sampleGroup = createUserGroups(1); createTeamsRequestsAndInvitations(sampleGroup); createCredentials(sampleGroup); createSessionToken(sampleGroup); createTermsOfUseAgreement(sampleGroup); createMessages(sampleGroup, sampleFileHandleId); createColumnModel(); UserGroup sampleGroup2 = createUserGroups(2); createCommunity(sampleGroup2); createParticipantData(sampleGroup); createQuizResponse(); createChallengeAndRegisterTeam(); } private void createChallengeAndRegisterTeam() { challenge = new Challenge(); challenge.setParticipantTeamId(team.getId()); challenge.setProjectId(project.getId()); challenge = challengeDAO.create(challenge); challengeTeam = new ChallengeTeam(); challengeTeam.setChallengeId(challenge.getId()); // this is nonsensical: We are registering a team which is the challenge // participant team. However it does the job of exercising object migration. challengeTeam.setTeamId(team.getId()); } private void createProjectSetting() { UploadDestinationListSetting settings = new UploadDestinationListSetting(); settings.setProjectId(project.getId()); settings.setSettingsType("upload"); S3UploadDestinationSetting destination = new S3UploadDestinationSetting(); destination.setUploadType(UploadType.S3); settings.setDestinations(Collections.<UploadDestinationSetting> singletonList(destination)); projectSettingsDAO.create(settings); } private void createProjectStat() { ProjectStat projectStat = new ProjectStat(KeyFactory.stringToKey(project.getId()), adminUserId, new Date()); projectStatsDAO.update(projectStat); } private void createQuizResponse() { QuizResponse dto = new QuizResponse(); PassingRecord passingRecord = new PassingRecord(); passingRecord.setPassed(true); passingRecord.setPassedOn(new Date()); passingRecord.setQuizId(101L); passingRecord.setResponseId(222L); passingRecord.setScore(7L); passingRecord.setUserId(adminUserId.toString()); adminUserId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId(); dto.setCreatedBy(adminUserId.toString()); dto.setCreatedOn(new Date()); dto.setQuizId(101L); quizResponseDAO.create(dto, passingRecord); } private void createColumnModel() throws DatastoreException, NotFoundException, IOException { String tableId = "syn123"; // Create some test column models List<ColumnModel> start = TableModelTestUtils.createOneOfEachType(); // Create each one List<ColumnModel> models = new LinkedList<ColumnModel>(); for (ColumnModel cm : start) { models.add(columnModelDao.createColumnModel(cm)); } ColumnMapper mapper = TableModelUtils.createColumnModelColumnMapper(models, false); List<String> header = TableModelUtils.getHeaders(models); // bind the columns to the entity columnModelDao.bindColumnToObject(header, tableId); // create some test rows. List<Row> rows = TableModelTestUtils.createRows(models, 5); RawRowSet set = new RawRowSet(TableModelUtils.getHeaders(models), null, tableId, rows); // Append the rows to the table tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, mapper, set); // Append some more rows rows = TableModelTestUtils.createRows(models, 6); set = new RawRowSet(TableModelUtils.getHeaders(models), null, tableId, rows); tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, mapper, set); } public void createNewUser() throws NotFoundException { NewUser user = new NewUser(); user.setUserName(UUID.randomUUID().toString()); user.setEmail(user.getUserName() + "@test.com"); Long id = userManager.createUser(user); userManager.getUserInfo(id); } private void resetDatabase() throws Exception { // This gives us a chance to also delete the S3 for table rows tableRowTruthDao.truncateAllRowData(); // Before we start this test we want to start with a clean database migrationManager.deleteAllData(adminUserInfo); // bootstrap to put back the bootstrap data entityBootstrapper.bootstrapAll(); storageQuotaAdminDao.clear(); } private void createFavorite() { userProfileManager.addFavorite(adminUserInfo, fileEntity.getId()); } private void createDoi() throws Exception { serviceProvider.getDoiService().createDoi(adminUserId, project.getId(), ObjectType.ENTITY, 1L); } private void createActivity() throws Exception { activity = new Activity(); activity.setDescription("some desc"); activity = serviceProvider.getActivityService().createActivity(adminUserId, activity); } private void createEvaluation() throws Exception { // initialize Evaluations evaluation = new Evaluation(); evaluation.setName("name"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.PLANNED); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); evaluation = new Evaluation(); evaluation.setName("name2"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.OPEN); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); // initialize Participants serviceProvider.getEvaluationService().addParticipant(adminUserId, evaluation.getId()); // initialize Submissions submission = new Submission(); submission.setName("submission1"); submission.setVersionNumber(1L); submission.setEntityId(fileEntity.getId()); submission.setUserId(adminUserIdString); submission.setEvaluationId(evaluation.getId()); submission = entityServletHelper.createSubmission(submission, adminUserId, fileEntity.getEtag()); } public void createAccessApproval() throws Exception { accessApproval = newToUAccessApproval(accessRequirement.getId(), adminUserIdString); accessApproval = servletTestHelper.createAccessApproval(dispatchServlet, accessApproval, adminUserId, new HashMap<String, String>()); } public void createAccessRequirement() throws Exception { // Add an access requirement to this entity accessRequirement = newAccessRequirement(); String entityId = project.getId(); RestrictableObjectDescriptor entitySubjectId = new RestrictableObjectDescriptor(); entitySubjectId.setId(entityId); entitySubjectId.setType(RestrictableObjectType.ENTITY); RestrictableObjectDescriptor evaluationSubjectId = new RestrictableObjectDescriptor(); assertNotNull(evaluation); assertNotNull(evaluation.getId()); evaluationSubjectId.setId(evaluation.getId()); evaluationSubjectId.setType(RestrictableObjectType.EVALUATION); accessRequirement.setSubjectIds(Arrays.asList(new RestrictableObjectDescriptor[] { entitySubjectId, evaluationSubjectId })); accessRequirement = servletTestHelper.createAccessRequirement(dispatchServlet, accessRequirement, adminUserId, new HashMap<String, String>()); } private TermsOfUseAccessApproval newToUAccessApproval(Long requirementId, String accessorId) { TermsOfUseAccessApproval aa = new TermsOfUseAccessApproval(); aa.setAccessorId(accessorId); aa.setConcreteType(TermsOfUseAccessApproval.class.getName()); aa.setRequirementId(requirementId); return aa; } public void createV2WikiPages() throws NotFoundException { // Using wikiPageDao until wiki service is created // Create a V2 Wiki page v2RootWiki = new V2WikiPage(); v2RootWiki.setCreatedBy(adminUserIdString); v2RootWiki.setModifiedBy(adminUserIdString); v2RootWiki.setAttachmentFileHandleIds(new LinkedList<String>()); v2RootWiki.getAttachmentFileHandleIds().add(handleOne.getId()); v2RootWiki.setTitle("Root title"); v2RootWiki.setMarkdownFileHandleId(markdownOne.getId()); Map<String, FileHandle> map = new HashMap<String, FileHandle>(); map.put(handleOne.getFileName(), handleOne); List<String> newIds = new ArrayList<String>(); newIds.add(handleOne.getId()); v2RootWiki = v2wikiPageDAO.create(v2RootWiki, map, fileEntity.getId(), ObjectType.ENTITY, newIds); // Create a child v2SubWiki = new V2WikiPage(); v2SubWiki.setCreatedBy(adminUserIdString); v2SubWiki.setModifiedBy(adminUserIdString); v2SubWiki.setParentWikiId(v2RootWiki.getId()); v2SubWiki.setTitle("V2 Sub-wiki-title"); v2SubWiki.setMarkdownFileHandleId(markdownOne.getId()); v2SubWiki = v2wikiPageDAO.create(v2SubWiki, new HashMap<String, FileHandle>(), fileEntity.getId(), ObjectType.ENTITY, new ArrayList<String>()); } /** * Create the entities used by this test. * * @throws JSONObjectAdapterException * @throws ServletException * @throws IOException * @throws NotFoundException */ public void createEntities() throws JSONObjectAdapterException, ServletException, IOException, NotFoundException { // Create a project project = new Project(); project.setName("MigrationIntegrationAutowireTest.Project"); project.setEntityType(Project.class.getName()); project = serviceProvider.getEntityService().createEntity(adminUserId, project, null, mockRequest); // Create a file entity fileEntity = new FileEntity(); fileEntity.setName("MigrationIntegrationAutowireTest.FileEntity"); fileEntity.setEntityType(FileEntity.class.getName()); fileEntity.setParentId(project.getId()); fileEntity.setDataFileHandleId(handleOne.getId()); fileEntity = serviceProvider.getEntityService().createEntity(adminUserId, fileEntity, activity.getId(), mockRequest); // Create a folder to trash folderToTrash = new Folder(); folderToTrash.setName("boundForTheTrashCan"); folderToTrash.setParentId(project.getId()); folderToTrash = serviceProvider.getEntityService().createEntity(adminUserId, folderToTrash, null, mockRequest); // Send it to the trash can serviceProvider.getTrashService().moveToTrash(adminUserId, folderToTrash.getId()); } private AccessRequirement newAccessRequirement() { TermsOfUseAccessRequirement dto = new TermsOfUseAccessRequirement(); dto.setConcreteType(dto.getClass().getName()); dto.setAccessType(ACCESS_TYPE.DOWNLOAD); dto.setTermsOfUse("foo"); return dto; } /** * Create the file handles used by this test. * * @throws NotFoundException */ public String createFileHandles() throws NotFoundException { // Create a file handle handleOne = new S3FileHandle(); handleOne.setCreatedBy(adminUserIdString); handleOne.setCreatedOn(new Date()); handleOne.setBucketName("bucket"); handleOne.setKey("mainFileKey"); handleOne.setEtag("etag"); handleOne.setFileName("foo.bar"); handleOne = fileMetadataDao.createFile(handleOne); // Create markdown content markdownOne = new S3FileHandle(); markdownOne.setCreatedBy(adminUserIdString); markdownOne.setCreatedOn(new Date()); markdownOne.setBucketName("bucket"); markdownOne.setKey("markdownFileKey"); markdownOne.setEtag("etag"); markdownOne.setFileName("markdown1"); markdownOne = fileMetadataDao.createFile(markdownOne); // Create a preview preview = new PreviewFileHandle(); preview.setCreatedBy(adminUserIdString); preview.setCreatedOn(new Date()); preview.setBucketName("bucket"); preview.setKey("previewFileKey"); preview.setEtag("etag"); preview.setFileName("bar.txt"); preview = fileMetadataDao.createFile(preview); // Set two as the preview of one fileMetadataDao.setPreviewId(handleOne.getId(), preview.getId()); return handleOne.getId(); } private void createStorageQuota() { storageQuotaManager.setQuotaForUser(adminUserInfo, adminUserInfo, 3000); } // returns a group for use in a team private UserGroup createUserGroups(int index) throws NotFoundException { List<String> adder = new ArrayList<String>(); // Make one group UserGroup parentGroup = new UserGroup(); parentGroup.setIsIndividual(false); parentGroup.setId(userGroupDAO.create(parentGroup).toString()); // Make two users UserGroup parentUser = new UserGroup(); parentUser.setIsIndividual(true); parentUser.setId(userGroupDAO.create(parentUser).toString()); UserGroup siblingUser = new UserGroup(); siblingUser.setIsIndividual(true); siblingUser.setId(userGroupDAO.create(siblingUser).toString()); // Nest one group and two users within the parent group adder.add(parentUser.getId()); adder.add(siblingUser.getId()); groupMembersDAO.addMembers(parentGroup.getId(), adder); return parentGroup; } private void createCredentials(UserGroup group) throws Exception { Long principalId = Long.parseLong(group.getId()); authDAO.changePassword(principalId, "ThisIsMySuperSecurePassword"); authDAO.changeSecretKey(principalId); authDAO.changeSessionToken(principalId, null, DomainType.SYNAPSE); } private void createSessionToken(UserGroup group) throws Exception { DBOSessionToken token = new DBOSessionToken(); token.setDomain(DomainType.SYNAPSE); token.setPrincipalId(Long.parseLong(group.getId())); token.setSessionToken(UUID.randomUUID().toString()); token.setValidatedOn(new Date()); basicDao.createOrUpdate(token); } private void createTermsOfUseAgreement(UserGroup group) throws Exception { DBOTermsOfUseAgreement tou = new DBOTermsOfUseAgreement(); tou.setPrincipalId(Long.parseLong(group.getId())); tou.setAgreesToTermsOfUse(Boolean.TRUE); tou.setDomain(DomainType.SYNAPSE); basicDao.createNew(tou); } @SuppressWarnings("serial") private void createMessages(final UserGroup group, String fileHandleId) { MessageToUser dto = new MessageToUser(); // Note: ID is auto generated dto.setCreatedBy(group.getId()); dto.setFileHandleId(fileHandleId); // Note: CreatedOn is set by the DAO dto.setSubject("See you on the other side?"); dto.setRecipients(new HashSet<String>() { { add(group.getId()); } }); dto.setInReplyTo(null); // Note: InReplyToRoot is calculated by the DAO dto = messageDAO.createMessage(dto); messageDAO.createMessageStatus_NewTransaction(dto.getId(), group.getId(), null); Comment dto2 = new Comment(); dto2.setCreatedBy(group.getId()); dto2.setFileHandleId(fileHandleId); dto2.setTargetId("1337"); dto2.setTargetType(ObjectType.ENTITY); commentDAO.createComment(dto2); } private void createTeamsRequestsAndInvitations(UserGroup group) { String otherUserId = BOOTSTRAP_PRINCIPAL.ANONYMOUS_USER.getPrincipalId().toString(); team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); team = teamDAO.create(team); // create a MembershipRqstSubmission MembershipRqstSubmission mrs = new MembershipRqstSubmission(); Date createdOn = new Date(); Date expiresOn = new Date(); mrs.setCreatedOn(createdOn); mrs.setExpiresOn(expiresOn); mrs.setMessage("Please let me join the team."); mrs.setTeamId("" + group.getId()); // need another valid user group mrs.setUserId(otherUserId); membershipRqstSubmissionDAO.create(mrs); // create a MembershipInvtnSubmission MembershipInvtnSubmission mis = new MembershipInvtnSubmission(); mis.setCreatedOn(createdOn); mis.setExpiresOn(expiresOn); mis.setMessage("Please join the team."); mis.setTeamId("" + group.getId()); // need another valid user group mis.setInviteeId(otherUserId); membershipInvtnSubmissionDAO.create(mis); } private void createCommunity(UserGroup group) throws Exception { Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); team = teamDAO.create(team); // Create a community community = new Community(); community.setName("MigrationIntegrationAutowireTest.Community"); community.setEntityType(Community.class.getName()); community.setTeamId(team.getId()); community = serviceProvider.getEntityService().createEntity(adminUserId, community, null, mockRequest); communityTeamDAO.create(KeyFactory.stringToKey(community.getId()), Long.parseLong(team.getId())); } private void createParticipantData(UserGroup sampleGroup) throws Exception { Long participantId = Long.parseLong(sampleGroup.getId()) ^ -1L; bridgeParticipantDAO.create(participantId); bridgeUserParticipantMappingDAO.setParticipantIdsForUser(Long.parseLong(sampleGroup.getId()), Collections.<ParticipantDataId> singletonList(new ParticipantDataId(participantId))); ParticipantDataDescriptor participantDataDescriptor = new ParticipantDataDescriptor(); participantDataDescriptor.setName(participantId.toString() + "desc"); participantDataDescriptor.setRepeatType(ParticipantDataRepeatType.ALWAYS); participantDataDescriptor.setRepeatFrequency("0 0 4 * * ? *"); participantDataDescriptor = participantDataDescriptorDAO.createParticipantDataDescriptor(participantDataDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor.setName("a"); participantDataColumnDescriptor.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor2 = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor2.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor2.setName("b"); participantDataColumnDescriptor2.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor2); ParticipantDataRow dataRow = new ParticipantDataRow(); ParticipantDataStringValue stringValue1 = new ParticipantDataStringValue(); stringValue1.setValue("1"); ParticipantDataStringValue stringValue2 = new ParticipantDataStringValue(); stringValue2.setValue("2"); dataRow.setData(ImmutableMap.<String, ParticipantDataValue> builder().put("a", stringValue1).put("b", stringValue2).build()); List<ParticipantDataRow> data = Lists.newArrayList(dataRow); participantDataDAO.append(new ParticipantDataId(participantId), participantDataDescriptor.getId(), data, Lists.newArrayList(participantDataColumnDescriptor, participantDataColumnDescriptor2)); ParticipantDataStatus status = new ParticipantDataStatus(); status.setParticipantDataDescriptorId(participantDataDescriptor.getId()); status.setLastEntryComplete(false); status.setLastPrompted(new Date()); status.setLastStarted(new Date()); participantDataStatusDAO.update(Collections.<ParticipantDataStatus> singletonList(status), ImmutableMap .<String, ParticipantDataId> builder().put(participantDataDescriptor.getId(), new ParticipantDataId(participantId)).build()); } @After public void after() throws Exception { // to cleanup for this test we delete all in the database resetDatabase(); } // test that if we create a group with members, back it up, // add members, and restore, the extra members are removed // (This was broken in PLFM-2757) @Test public void testCertifiedUsersGroupMigration() throws Exception { String groupId = BOOTSTRAP_PRINCIPAL.CERTIFIED_USERS.getPrincipalId().toString(); List<UserGroup> members = groupMembersDAO.getMembers(groupId); List<BackupInfo> backupList = backupAllOfType(MigrationType.PRINCIPAL); // add new member(s) UserGroup yetAnotherUser = new UserGroup(); yetAnotherUser.setIsIndividual(true); yetAnotherUser.setId(userGroupDAO.create(yetAnotherUser).toString()); groupMembersDAO.addMembers(groupId, Collections.singletonList(yetAnotherUser.getId())); // membership is different because new user has been added assertFalse(members.equals(groupMembersDAO.getMembers(groupId))); // Now restore all of the data for (BackupInfo info : backupList) { String fileName = info.getFileName(); assertNotNull("Did not find a backup file name for type: " + info.getType(), fileName); restoreFromBackup(info.getType(), fileName); } // should be back to normal assertEquals(members, groupMembersDAO.getMembers(groupId)); } /** * This is the actual test. The rest of the class is setup and tear down. * * @throws Exception */ @Test public void testRoundTrip() throws Exception { // Get the list of primary types MigrationTypeList primaryTypesList = entityServletHelper.getPrimaryMigrationTypes(adminUserId); assertNotNull(primaryTypesList); assertNotNull(primaryTypesList.getList()); assertTrue(primaryTypesList.getList().size() > 0); // Get the counts before we start MigrationTypeCounts startCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); validateStartingCount(startCounts); // This test will backup all data, delete it, then restore it. List<BackupInfo> backupList = new ArrayList<BackupInfo>(); for (MigrationType type : primaryTypesList.getList()) { // Backup each type backupList.addAll(backupAllOfType(type)); } // Now delete all data in reverse order for (int i = primaryTypesList.getList().size() - 1; i >= 0; i MigrationType type = primaryTypesList.getList().get(i); deleteAllOfType(type); } // After deleting, the counts should be 0 except for a few special cases MigrationTypeCounts afterDeleteCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); assertNotNull(afterDeleteCounts); assertNotNull(afterDeleteCounts.getList()); for (int i = 0; i < afterDeleteCounts.getList().size(); i++) { MigrationTypeCount afterDelete = afterDeleteCounts.getList().get(i); // Special cases for the not-deleted migration admin if (afterDelete.getType() == MigrationType.PRINCIPAL) { assertEquals("There should be 4 UserGroups remaining after the delete: " + BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER + ", " + "Administrators" + ", " + BOOTSTRAP_PRINCIPAL.PUBLIC_GROUP + ", and " + BOOTSTRAP_PRINCIPAL.AUTHENTICATED_USERS_GROUP, new Long(4), afterDelete.getCount()); } else if (afterDelete.getType() == MigrationType.GROUP_MEMBERS || afterDelete.getType() == MigrationType.CREDENTIAL) { assertEquals("Counts do not match for: " + afterDelete.getType().name(), new Long(1), afterDelete.getCount()); } else { assertEquals("Counts are non-zero for: " + afterDelete.getType().name(), new Long(0), afterDelete.getCount()); } } // Now restore all of the data for (BackupInfo info : backupList) { String fileName = info.getFileName(); assertNotNull("Did not find a backup file name for type: " + info.getType(), fileName); restoreFromBackup(info.getType(), fileName); } // The counts should all be back MigrationTypeCounts finalCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); for (int i = 1; i < finalCounts.getList().size(); i++) { MigrationTypeCount startCount = startCounts.getList().get(i); MigrationTypeCount afterRestore = finalCounts.getList().get(i); assertEquals("Count for " + startCount.getType().name() + " does not match", startCount.getCount(), afterRestore.getCount()); } } private static class BackupInfo { MigrationType type; String fileName; public BackupInfo(MigrationType type, String fileName) { super(); this.type = type; this.fileName = fileName; } public MigrationType getType() { return type; } public String getFileName() { return fileName; } } /** * There must be at least one object for every type of migratable object. * * @param startCounts */ private void validateStartingCount(MigrationTypeCounts startCounts) { assertNotNull(startCounts); assertNotNull(startCounts.getList()); List<MigrationType> typesToMigrate = new LinkedList<MigrationType>(); for (MigrationType tm : MigrationType.values()) { if (migrationManager.isMigrationTypeUsed(adminUserInfo, tm)) { typesToMigrate.add(tm); } } assertEquals( "This test requires at least one object to exist for each MigrationType. Please create a new object of the new MigrationType in the before() method of this test.", typesToMigrate.size(), startCounts.getList().size()); for (MigrationTypeCount count : startCounts.getList()) { assertTrue("This test requires at least one object to exist for each MigrationType. Please create a new object of type: " + count.getType() + " in the before() method of this test.", count.getCount() > 0); } } /** * Extract the filename from the full url. * * @param fullUrl * @return */ public String getFileNameFromUrl(String fullUrl) { ; int index = fullUrl.lastIndexOf("/"); return fullUrl.substring(index + 1, fullUrl.length()); } /** * Backup all data * * @param type * @return * @throws Exception */ private List<BackupInfo> backupAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list == null) return null; // Backup batches by their level in the tree ListBucketProvider provider = new ListBucketProvider(); MigrationUtils.bucketByTreeLevel(list.getList().iterator(), provider); List<BackupInfo> result = new ArrayList<BackupInfo>(); List<List<Long>> listOfBuckets = provider.getListOfBuckets(); for (List<Long> batch : listOfBuckets) { if (batch.size() > 0) { String fileName = backup(type, batch); result.add(new BackupInfo(type, fileName)); } } return result; } private String backup(MigrationType type, List<Long> tobackup) throws Exception { // Start the backup job IdList ids = new IdList(); ids.setList(tobackup); BackupRestoreStatus status = entityServletHelper.startBackup(adminUserId, type, ids); // wait for it.. waitForDaemon(status); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); assertNotNull(status.getBackupUrl()); return getFileNameFromUrl(status.getBackupUrl()); } private void restoreFromBackup(MigrationType type, String fileName) throws Exception { RestoreSubmission sub = new RestoreSubmission(); sub.setFileName(fileName); BackupRestoreStatus status = entityServletHelper.startRestore(adminUserId, type, sub); // wait for it waitForDaemon(status); } /** * Delete all data for a type. * * @param type * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private void deleteAllOfType(MigrationType type) throws Exception { IdList idList = getIdListOfAllOfType(type); if (idList == null) return; MigrationTypeCount result = entityServletHelper.deleteMigrationType(adminUserId, type, idList); System.out.println("Deleted: " + result); } /** * List all of the IDs for a type. * * @param type * @return * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private IdList getIdListOfAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list.getTotalCount() < 1) return null; // Create the backup list List<Long> toBackup = new LinkedList<Long>(); for (RowMetadata row : list.getList()) { toBackup.add(row.getId()); } IdList idList = new IdList(); idList.setList(toBackup); return idList; } /** * Wait for a deamon to process a a job. * * @param status * @throws InterruptedException * @throws JSONObjectAdapterException * @throws IOException * @throws ServletException */ private void waitForDaemon(BackupRestoreStatus status) throws Exception { long start = System.currentTimeMillis(); while (DaemonStatus.COMPLETED != status.getStatus()) { assertFalse("Daemon failed " + status.getErrorDetails(), DaemonStatus.FAILED == status.getStatus()); System.out.println("Waiting for backup/restore daemon. Message: " + status.getProgresssMessage()); Thread.sleep(1000); long elapse = System.currentTimeMillis() - start; assertTrue("Timed out waiting for a backup/restore daemon", elapse < MAX_WAIT_MS); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); } } }
package org.geotools.swing.event; import org.fest.swing.edt.FailOnThreadViolationRepaintManager; import org.junit.BeforeClass; import java.awt.Rectangle; import java.awt.event.MouseEvent; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import org.fest.swing.edt.GuiActionRunner; import org.fest.swing.edt.GuiTask; import org.geotools.geometry.DirectPosition2D; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.geotools.swing.testutils.MockMapPane; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; /** * Unit tests for MapMouseEvent. * * @author Michael Bedward * @since 8.0 * @source $URL$ * @version $Id$ */ public class MapMouseEventTest { private static final double TOL = 1.0e-6; // screen area with aspect 1:1 private static final Rectangle SCREEN = new Rectangle(100, 100); // world area with aspect 2:1 private static final ReferencedEnvelope WORLD = new ReferencedEnvelope( 149.0, 153.0, -33, -35, DefaultGeographicCRS.WGS84); private static final int x = SCREEN.width / 2; private static final int y = SCREEN.height / 2; private MockMapPane pane; private MouseEvent ev; private MapMouseEvent mapEv; @BeforeClass public static void setupOnce() { FailOnThreadViolationRepaintManager.install(); } @Before public void setup() throws Exception { GuiActionRunner.execute(new GuiTask() { @Override protected void executeInEDT() throws Throwable { pane = new MockMapPane(); pane.setScreenArea(SCREEN); pane.setDisplayArea(WORLD); } }); } @Test public void getSource() throws Exception { createEvent(0, 0); assertEquals(pane, mapEv.getSource()); } @Test public void getWorldPos() throws Exception { AffineTransform tr = pane.getMapContent().getViewport().getScreenToWorld(); Point2D p = new Point2D.Double(x, y); tr.transform(p, p); createEvent(x, y); DirectPosition2D pos = mapEv.getWorldPos(); assertEquals(p.getX(), pos.x, TOL); assertEquals(p.getY(), pos.y, TOL); } @Test public void getEnvelopeByPixels() throws Exception { AffineTransform tr = pane.getMapContent().getViewport().getScreenToWorld(); Rectangle2D screenRect = new Rectangle2D.Double(x - 0.5, y - 0.5, 1, 1); Rectangle2D expected = tr.createTransformedShape(screenRect).getBounds2D(); createEvent(x, y); ReferencedEnvelope actual = mapEv.getEnvelopeByPixels(1); assertRect(expected, actual); } @Test public void getEnvelopeByWorld() throws Exception { final double w = 0.1; AffineTransform tr = pane.getMapContent().getViewport().getScreenToWorld(); Point2D p = new Point2D.Double(x, y); tr.transform(p, p); Rectangle2D expected = new Rectangle2D.Double(p.getX() - w/2, p.getY() - w/2, w, w); createEvent(x, y); ReferencedEnvelope actual = mapEv.getEnvelopeByWorld(0.1); assertRect(expected, actual); } private void createEvent(final int x, final int y) throws Exception { GuiActionRunner.execute(new GuiTask() { @Override protected void executeInEDT() throws Throwable { ev = new MouseEvent(pane, MouseEvent.MOUSE_PRESSED, 0L, 0, x, y, 1, false); mapEv = new MapMouseEvent(pane, ev); } }); } private void assertRect(Rectangle2D expected, ReferencedEnvelope actual) { assertEquals(expected.getMinX(), actual.getMinX(), TOL); assertEquals(expected.getMaxX(), actual.getMaxX(), TOL); assertEquals(expected.getMinY(), actual.getMinY(), TOL); assertEquals(expected.getMaxY(), actual.getMaxY(), TOL); } }
package org.motechproject.cmslite.api.web; import org.apache.log4j.Logger; import org.ektorp.AttachmentInputStream; import org.motechproject.cmslite.api.CMSLiteService; import org.motechproject.cmslite.api.ResourceNotFoundException; import org.motechproject.cmslite.api.ResourceQuery; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.sound.sampled.AudioFileFormat; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import java.io.DataInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Arrays; public class ResourceServlet extends HttpServlet { private static ApplicationContext context; private Logger logger = Logger.getLogger(this.getClass()); synchronized static public ApplicationContext getContext() { if (context == null) { context = new ClassPathXmlApplicationContext("applicationCmsLiteApi.xml"); } return context; } @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { CMSLiteService cmsLiteService = (CMSLiteService) getContext().getBean("cmsLiteService"); ResourceQuery resourceQuery = resourceQuery(request); AttachmentInputStream contentStream = null; try { logger.info("Getting resource for : " + resourceQuery.getLanguage() + ":" + resourceQuery.getName()); contentStream = (AttachmentInputStream) cmsLiteService.getContent(resourceQuery); long contentLength = contentStream.getContentLength(); response.setStatus(HttpServletResponse.SC_OK); response.setHeader("Content-Type", "audio/x-wav"); response.setHeader("Accept-Ranges", "bytes"); response.setContentLength((int) contentLength); OutputStream fo = response.getOutputStream(); byte [] buffer = new byte [1024*4]; int read ; while((read=contentStream.read(buffer))>=0){ fo.write(buffer,0,read); } } catch (ResourceNotFoundException e) { response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); logger.error("Resource not found for : " + resourceQuery.getLanguage() + ":" + resourceQuery.getName() + "\n" + Arrays.toString(e.getStackTrace())); } finally { if(contentStream != null) contentStream.close(); request.getInputStream().close(); response.getOutputStream().flush(); response.getOutputStream().close(); } } private ResourceQuery resourceQuery(HttpServletRequest request) { String requestURL = request.getRequestURI(); String contextPathOnly = request.getContextPath(); String servletPathOnly = request.getServletPath(); String[] resourcePaths = requestURL.replace(contextPathOnly, "").replace(servletPathOnly, "").substring(1).split("/"); String language = resourcePaths[0]; String name = resourcePaths[1]; return new ResourceQuery(name, language); } }
package com.shinemo.mpush.cs.zk.listener; import java.util.List; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; import org.apache.curator.framework.recipes.cache.TreeCacheEvent.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.shinemo.mpush.common.Application; import com.shinemo.mpush.cs.manage.ServerManage; import com.shinemo.mpush.tools.GenericsUtil; import com.shinemo.mpush.tools.Jsons; import com.shinemo.mpush.tools.spi.ServiceContainer; import com.shinemo.mpush.tools.zk.ZKPath; import com.shinemo.mpush.tools.zk.ZkRegister; import com.shinemo.mpush.tools.zk.listener.DataChangeListener; public abstract class AbstractDataChangeListener<T extends Application> extends DataChangeListener { protected static ZkRegister zkRegister = ServiceContainer.getInstance(ZkRegister.class); private static final Logger log = LoggerFactory.getLogger(AbstractDataChangeListener.class); private Class<T> clazz; @SuppressWarnings("unchecked") public AbstractDataChangeListener() { clazz = (Class<T>) GenericsUtil.getSuperClassGenericType(this.getClass(), 0); } public void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) throws Exception { String data = ""; if (event.getData() != null) { data = ToStringBuilder.reflectionToString(event.getData(), ToStringStyle.MULTI_LINE_STYLE); } if (Type.NODE_ADDED == event.getType()) { dataAddOrUpdate(event.getData(),clazz); } else if (Type.NODE_REMOVED == event.getType()) { dataRemove(event.getData()); } else if (Type.NODE_UPDATED == event.getType()) { dataAddOrUpdate(event.getData(),clazz); } else { log.warn(this.getClass().getSimpleName()+"other path:" + path + "," + event.getType().name() + "," + data); } } public void initData() { log.warn("start init "+ this.getClass().getSimpleName()+"server data"); _initData(); log.warn("end init "+ this.getClass().getSimpleName()+"server data"); } public abstract String getRegisterPath(); public abstract ServerManage<T> getServerManage(); private void _initData() { List<String> rawData = zkRegister.getChildrenKeys(getRegisterPath()); for (String raw : rawData) { String fullPath = ZKPath.CONNECTION_SERVER.getFullPath(raw); T app = getServerApplication(fullPath,clazz); getServerManage().addOrUpdate(fullPath, app); } } private void dataRemove(ChildData data) { String path = data.getPath(); getServerManage().remove(path); } private void dataAddOrUpdate(ChildData data,Class<T> clazz) { String path = data.getPath(); byte[] rawData = data.getData(); T serverApp = Jsons.fromJson(rawData, clazz); getServerManage().addOrUpdate(path, serverApp); } private T getServerApplication(String fullPath,Class<T> clazz) { String rawApp = zkRegister.get(fullPath); T app = Jsons.fromJson(rawApp,clazz); return app; } }
package org.mustbe.consulo.msil.lang.stubbing; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.consulo.lombok.annotations.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.joou.UByte; import org.joou.UInteger; import org.joou.UShort; import org.mustbe.consulo.RequiredReadAction; import org.mustbe.consulo.dotnet.DotNetTypes; import org.mustbe.consulo.dotnet.psi.DotNetAttributeUtil; import org.mustbe.consulo.dotnet.psi.DotNetExpression; import org.mustbe.consulo.dotnet.psi.DotNetNamedElement; import org.mustbe.consulo.dotnet.psi.DotNetParameter; import org.mustbe.consulo.dotnet.psi.DotNetType; import org.mustbe.consulo.dotnet.psi.DotNetTypeDeclaration; import org.mustbe.consulo.dotnet.resolve.DotNetPsiSearcher; import org.mustbe.consulo.dotnet.resolve.DotNetTypeRef; import org.mustbe.consulo.msil.lang.psi.MsilConstantValue; import org.mustbe.consulo.msil.lang.psi.MsilCustomAttribute; import org.mustbe.consulo.msil.lang.psi.MsilCustomAttributeSignature; import org.mustbe.consulo.msil.lang.psi.MsilFieldEntry; import org.mustbe.consulo.msil.lang.psi.MsilTokens; import org.mustbe.consulo.msil.lang.psi.impl.MsilNativeTypeImpl; import org.mustbe.consulo.msil.lang.psi.impl.MsilUserTypeImpl; import org.mustbe.consulo.msil.lang.stubbing.values.MsiCustomAttributeValue; import org.mustbe.consulo.msil.lang.stubbing.values.MsilCustomAttributeEnumValue; import org.mustbe.dotnet.asm.STypeSignatureParser; import org.mustbe.dotnet.msil.decompiler.textBuilder.util.XStubUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.PsiElement; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.containers.hash.LinkedHashMap; import edu.arizona.cs.mbel.io.ByteBuffer; import edu.arizona.cs.mbel.mbel.AbstractTypeReference; import edu.arizona.cs.mbel.mbel.AssemblyTypeRef; import edu.arizona.cs.mbel.signature.ClassTypeSignature; import edu.arizona.cs.mbel.signature.TypeSignature; import edu.arizona.cs.mbel.signature.TypeSignatureParser; import edu.arizona.cs.mbel.signature.ValueTypeSignature; /** * @author VISTALL * @since 10.07.2015 */ @Logger public class MsilCustomAttributeStubber { @NotNull @RequiredReadAction public static MsilCustomAttributeArgumentList build(MsilCustomAttribute attribute) { MsilCustomAttributeSignature signature = attribute.getSignature(); byte[] bytes = signature.getBytes(); ByteBuffer byteBuffer = new ByteBuffer(bytes); List<MsiCustomAttributeValue> constructorArguments = new ArrayList<MsiCustomAttributeValue>(); Map<String, MsiCustomAttributeValue> namedArguments = new LinkedHashMap<String, MsiCustomAttributeValue>(); if(byteBuffer.canRead() && byteBuffer.getShort() == 1) { boolean failed = false; DotNetParameter[] parameters = attribute.getParameterList().getParameters(); for(DotNetParameter parameter : parameters) { try { DotNetType type = parameter.getType(); if(type == null) { throw new NullPointerException(); } TypeSignature typeSignature = toTypeSignature(type); MsiCustomAttributeValue attributeValue = buildArgument(attribute, typeSignature, byteBuffer); if(attributeValue != null) { constructorArguments.add(attributeValue); } } catch(Exception e) { failed = true; break; } } if(!failed && byteBuffer.canRead()) { try { int count = byteBuffer.getShort(); for(int i = 0; i < count; i++) { byteBuffer.get(); //type 0x53 field 0x54 property TypeSignature typeSignature = TypeSignatureParser.parse(byteBuffer, null); if(typeSignature == null) { continue; } CharSequence name = XStubUtil.getString(byteBuffer, CharsetToolkit.UTF8_CHARSET); if(name.length() == 0) { continue; } MsiCustomAttributeValue attributeValue = buildArgument(attribute, typeSignature, byteBuffer); if(attributeValue != null) { namedArguments.put(name.toString(), attributeValue); } } } catch(Exception e) { LOGGER.warn(e); } } } return new MsilCustomAttributeArgumentList(constructorArguments, namedArguments); } @RequiredReadAction private static TypeSignature toTypeSignature(@NotNull DotNetType type) { if(type instanceof MsilNativeTypeImpl) { IElementType elementType = PsiUtilCore.getElementType(((MsilNativeTypeImpl) type).getTypeElement()); if(elementType == MsilTokens.STRING_KEYWORD) { return TypeSignature.STRING; } else if(elementType == MsilTokens.BOOL_KEYWORD) { return TypeSignature.BOOLEAN; } /*else if(elementType == MsilTokens.INT_KEYWORD) { return TypeSignature.INT_PTR; } else if(elementType == MsilTokens.UINT_KEYWORD) { return TypeSignature.UINT_PTR; } */ else if(elementType == MsilTokens.INT8_KEYWORD) { return TypeSignature.I1; } else if(elementType == MsilTokens.UINT8_KEYWORD) { return TypeSignature.U1; } else if(elementType == MsilTokens.INT16_KEYWORD) { return TypeSignature.I2; } else if(elementType == MsilTokens.UINT16_KEYWORD) { return TypeSignature.U2; } else if(elementType == MsilTokens.INT32_KEYWORD) { return TypeSignature.I4; } else if(elementType == MsilTokens.UINT32_KEYWORD) { return TypeSignature.U4; } else if(elementType == MsilTokens.INT64_KEYWORD) { return TypeSignature.I8; } else if(elementType == MsilTokens.UINT64_KEYWORD) { return TypeSignature.U8; } else if(elementType == MsilTokens.FLOAT32_KEYWORD) { return TypeSignature.R4; } else if(elementType == MsilTokens.FLOAT64_KEYWORD) { return TypeSignature.R8; } } else if(type instanceof MsilUserTypeImpl) { String referenceText = ((MsilUserTypeImpl) type).getReferenceText(); switch(((MsilUserTypeImpl) type).getTypeResoleKind()) { case STRUCT: return new ValueTypeSignature(parse(referenceText)); case UNKNOWN: case CLASS: return new ClassTypeSignature(parse(referenceText)); } } LOGGER.error("Unknown how convert: " + type); return null; } @NotNull private static AbstractTypeReference parse(String referenceText) { return new AssemblyTypeRef(null, StringUtil.getPackageName(referenceText), StringUtil.getShortName(referenceText)); } @RequiredReadAction private static MsiCustomAttributeValue buildArgument(@NotNull PsiElement scope, TypeSignature typeSignature, ByteBuffer byteBuffer) { if(typeSignature == TypeSignature.I1) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.get()); } else if(typeSignature == TypeSignature.U1) { return new MsiCustomAttributeValue(typeSignature, UByte.valueOf(byteBuffer.get() & 0xFF)); } else if(typeSignature == TypeSignature.I2) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.getShort()); } else if(typeSignature == TypeSignature.U2) { return new MsiCustomAttributeValue(typeSignature, UShort.valueOf(byteBuffer.getShort() & 0xFFFF)); } else if(typeSignature == TypeSignature.I4) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.getInt()); } else if(typeSignature == TypeSignature.U4) { return new MsiCustomAttributeValue(typeSignature, UInteger.valueOf(byteBuffer.getInt() & 0xFFFFFFFFL)); } /*else if(typeSignature == TypeSignature.I8) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.getLong()); } else if(typeSignature == TypeSignature.U8) { return new MsiCustomAttributeValue(typeSignature, ULong); } */ else if(typeSignature == TypeSignature.R4) { return new MsiCustomAttributeValue(typeSignature, Float.intBitsToFloat(byteBuffer.getInt())); } /*else if(typeSignature == TypeSignature.R8) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.getDouble()); } */ else if(typeSignature == TypeSignature.BOOLEAN) { return new MsiCustomAttributeValue(typeSignature, byteBuffer.get() == 1); } else if(typeSignature == TypeSignature.STRING) { return new MsiCustomAttributeValue(typeSignature, XStubUtil.getString(byteBuffer, CharsetToolkit.UTF8_CHARSET)); } else if(typeSignature instanceof ClassTypeSignature) { String vmQName = ((ClassTypeSignature) typeSignature).getClassType().getFullName(); if(vmQName.equals(DotNetTypes.System.Type)) { CharSequence text = XStubUtil.getString(byteBuffer, CharsetToolkit.UTF8_CHARSET); TypeSignature stringTypeSignature = STypeSignatureParser.parse(text); return new MsiCustomAttributeValue(typeSignature, stringTypeSignature); } } else if(typeSignature instanceof ValueTypeSignature) { String vmQName = ((ValueTypeSignature) typeSignature).getValueType().getFullName(); DotNetTypeDeclaration resolvedElement = DotNetPsiSearcher.getInstance(scope.getProject()).findType(vmQName, scope.getResolveScope()); if(resolvedElement != null && resolvedElement.isEnum()) { Number value = getValue(scope, byteBuffer, resolvedElement.getTypeRefForEnumConstants()); if(value != null) { Map<Long, String> map = new HashMap<Long, String>(); long l = value.longValue(); DotNetNamedElement[] members = resolvedElement.getMembers(); for(DotNetNamedElement member : members) { if(member instanceof MsilFieldEntry && ((MsilFieldEntry) member).hasModifier(MsilTokens.LITERAL_KEYWORD)) { DotNetExpression initializer = ((MsilFieldEntry) member).getInitializer(); if(!(initializer instanceof MsilConstantValue)) { continue; } String valueText = ((MsilConstantValue) initializer).getValueText(); map.put(Long.parseLong(valueText), member.getName()); } } if(DotNetAttributeUtil.hasAttribute(resolvedElement, DotNetTypes.System.FlagsAttribute)) { List<String> fields = new ArrayList<String>(); for(Map.Entry<Long, String> entry : map.entrySet()) { if((l & entry.getKey()) == entry.getKey()) { fields.add(entry.getValue()); } } return new MsilCustomAttributeEnumValue(typeSignature, l, fields); } else { String stringValue = map.get(l); if(stringValue != null) { return new MsilCustomAttributeEnumValue(typeSignature, value, Collections.singletonList(stringValue)); } else { return new MsiCustomAttributeValue(typeSignature, value); } } } else { throw new IllegalArgumentException("Cant get value from enum: " + vmQName); } } } LOGGER.error("Cant get value for: " + typeSignature); return null; } @Nullable @RequiredReadAction private static Number getValue(PsiElement scope, ByteBuffer byteBuffer, DotNetTypeRef typeRef) { PsiElement resolvedElement = typeRef.resolve(scope).getElement(); String qName = null; if(resolvedElement instanceof DotNetTypeDeclaration) { qName = ((DotNetTypeDeclaration) resolvedElement).getVmQName(); } if(qName == null) { return null; } if(qName.equals(DotNetTypes.System.Int32)) { return byteBuffer.getInt(); } else if(qName.equals(DotNetTypes.System.UInt32)) { return byteBuffer.getInt() & 0xFFFFFFFFL; } else if(qName.equals(DotNetTypes.System.Int16)) { return byteBuffer.getShort(); } else if(qName.equals(DotNetTypes.System.UInt16)) { return byteBuffer.getShort() & 0xFFFF; } else if(qName.equals(DotNetTypes.System.Byte)) { return byteBuffer.get() & 0xFF; } else if(qName.equals(DotNetTypes.System.SByte)) { return byteBuffer.getShort(); } LOGGER.warn("Unknown type: " + qName); return null; } }
//This code is developed as part of the Java CoG Kit project //This message may not be removed or altered. package org.globus.cog.abstraction.coaster.service.job.manager; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.apache.log4j.Logger; import org.globus.cog.abstraction.coaster.service.LocalTCPService; import org.globus.cog.abstraction.coaster.service.job.manager.Worker.ShutdownCallback; import org.globus.cog.abstraction.impl.common.AbstractionFactory; import org.globus.cog.abstraction.impl.common.ProviderMethodException; import org.globus.cog.abstraction.impl.common.StatusImpl; import org.globus.cog.abstraction.impl.common.execution.WallTime; import org.globus.cog.abstraction.impl.common.task.ExecutionServiceImpl; import org.globus.cog.abstraction.impl.common.task.ExecutionTaskHandler; import org.globus.cog.abstraction.impl.common.task.InvalidProviderException; import org.globus.cog.abstraction.impl.common.task.InvalidServiceContactException; import org.globus.cog.abstraction.impl.common.task.JobSpecificationImpl; import org.globus.cog.abstraction.impl.common.task.TaskImpl; import org.globus.cog.abstraction.impl.common.task.TaskSubmissionException; import org.globus.cog.abstraction.interfaces.ExecutionService; import org.globus.cog.abstraction.interfaces.JobSpecification; import org.globus.cog.abstraction.interfaces.Status; import org.globus.cog.abstraction.interfaces.Task; import org.globus.cog.abstraction.interfaces.TaskHandler; import org.globus.cog.karajan.workflow.service.channels.ChannelContext; public class WorkerManager extends Thread { public static final Logger logger = Logger.getLogger(WorkerManager.class); /** * We allow for at least one minute of extra time compared to the requested * walltime */ public static final Seconds TIME_RESERVE = new Seconds(60); public static final File scriptDir = new File(System .getProperty("user.home") + File.separator + ".globus" + File.separator + "coasters"); public static final String SCRIPT = "worker.pl"; public static final int OVERALLOCATION_FACTOR = 10; public static final int MAX_WORKERS = 256; public static final int MAX_STARTING_WORKERS = 32; public static final List coasterAttributes = Arrays .asList(new String[] { "coasterspernode", "coasterinternalip", "coasterworkermaxwalltime" }); private SortedMap ready; private Map ids; private Set busy; private Set startingTasks; private Map requested; private boolean shutdownFlag; private LinkedList allocationRequests; private File script; private IDGenerator sr; private URI callbackURI; private LocalTCPService localService; private TaskHandler handler; private int currentWorkers; public WorkerManager(LocalTCPService localService) throws IOException { super("Worker Manager"); ready = new TreeMap(); busy = new HashSet(); ids = new HashMap(); startingTasks = new HashSet(); requested = new HashMap(); allocationRequests = new LinkedList(); this.localService = localService; this.callbackURI = localService.getContact(); writeScript(); sr = new IDGenerator(); handler = new ExecutionTaskHandler(); } private void writeScript() throws IOException { scriptDir.mkdirs(); if (!scriptDir.exists()) { throw new IOException("Failed to create script dir (" + scriptDir + ")"); } script = File.createTempFile("cscript", ".pl", scriptDir); script.deleteOnExit(); InputStream is = WorkerManager.class.getClassLoader() .getResourceAsStream(SCRIPT); if (is == null) { throw new IOException("Could not find resource in class path: " + SCRIPT); } FileOutputStream fos = new FileOutputStream(script); byte[] buf = new byte[1024]; int len = is.read(buf); while (len != -1) { fos.write(buf, 0, len); len = is.read(buf); } fos.close(); is.close(); } public void run() { try { if (logger.isInfoEnabled()) { startInfoThread(); } AllocationRequest req; while (!shutdownFlag) { synchronized (allocationRequests) { while (allocationRequests.isEmpty()) { allocationRequests.wait(); } req = (AllocationRequest) allocationRequests .removeFirst(); if (logger.isInfoEnabled()) { logger.info("Got allocation request: " + req); } } try { startWorker(new Seconds(req.maxWallTime.getSeconds()) .multiply(OVERALLOCATION_FACTOR) .add(TIME_RESERVE), req.prototype); } catch (NoClassDefFoundError e) { req.prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), new TaskSubmissionException(e))); } catch (Exception e) { req.prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), e)); } } } catch (InterruptedException e) { e.printStackTrace(); } catch (Error e) { e.printStackTrace(); System.exit(126); } } private void startWorker(Seconds maxWallTime, Task prototype) throws InvalidServiceContactException, InvalidProviderException, ProviderMethodException { String numWorkersString = (String) ((JobSpecification) prototype .getSpecification()).getAttribute("coastersPerNode"); int numWorkers; if (numWorkersString == null) { numWorkers = 1; } else { numWorkers = Integer.parseInt(numWorkersString); } String workerMaxwalltimeString = (String) ((JobSpecification) prototype .getSpecification()).getAttribute("coasterWorkerMaxwalltime"); if (workerMaxwalltimeString != null) { // override the computed maxwalltime maxWallTime = new Seconds(WallTime.timeToSeconds(workerMaxwalltimeString)); logger.debug("Overridden worker maxwalltime is "+maxWallTime); } logger .info("Starting new worker set with " + numWorkers + " workers"); Task t = new TaskImpl(); t.setType(Task.JOB_SUBMISSION); t.setSpecification(buildSpecification(prototype)); copyAttributes(t, prototype, maxWallTime); t.setRequiredService(1); t.setService(0, buildService(prototype)); synchronized (this) { if (!startingTasks.contains(prototype)) { return; } } Map newlyRequested = new HashMap(); for (int n = 0; n < numWorkers; n++) { int id = sr.nextInt(); if (logger.isInfoEnabled()) { logger.info("Starting worker with id=" + id + " and maxwalltime=" + maxWallTime + "s"); } String sid = String.valueOf(id); ((JobSpecification) t.getSpecification()).addArgument(sid); try { Worker wr = new Worker(this, sid, maxWallTime, t, prototype); newlyRequested.put(sid, wr); } catch (Exception e) { prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), e)); } } try { handler.submit(t); } catch (Exception e) { prototype.setStatus(new StatusImpl(Status.FAILED, e.getMessage(), e)); } synchronized (this) { requested.putAll(newlyRequested); } } private JobSpecification buildSpecification(Task prototype) { JobSpecification ps = (JobSpecification) prototype.getSpecification(); JobSpecification js = new JobSpecificationImpl(); js.setExecutable("/usr/bin/perl"); js.addArgument(script.getAbsolutePath()); String internalHostname = (String)ps.getAttribute("coasterInternalIP"); if(internalHostname!=null) { // override automatically determined hostname // TODO detect if we've done this already for a different // value? (same non-determinism as for coastersPerWorker and // walltime handling that jobs may come in with different // values and we can only use one) try { logger.warn("original callback URI is "+callbackURI.toString()); callbackURI=new URI(callbackURI.getScheme(), callbackURI.getUserInfo(), internalHostname, callbackURI.getPort(), callbackURI.getPath(), callbackURI.getQuery(), callbackURI.getFragment()); logger.warn("callback URI has been overridden to "+callbackURI.toString()); } catch(URISyntaxException use) { throw new RuntimeException(use); } // TODO nasty exception in the line above } js.addArgument(callbackURI.toString()); // js.addArgument(id); return js; } private ExecutionService buildService(Task prototype) throws InvalidServiceContactException, InvalidProviderException, ProviderMethodException { ExecutionService s = new ExecutionServiceImpl(); s.setServiceContact(prototype.getService(0).getServiceContact()); ExecutionService p = (ExecutionService) prototype.getService(0); String jm = p.getJobManager(); int colon = jm.indexOf(':'); // remove provider used to bootstrap coasters jm = jm.substring(colon + 1); colon = jm.indexOf(':'); if (colon == -1) { s.setProvider(jm); } else { s.setJobManager(jm.substring(colon + 1)); s.setProvider(jm.substring(0, colon)); } if (p.getSecurityContext() != null) { s.setSecurityContext(p.getSecurityContext()); } else { s.setSecurityContext(AbstractionFactory.newSecurityContext(s .getProvider())); } return s; } private void copyAttributes(Task t, Task prototype, Seconds maxWallTime) { JobSpecification pspec = (JobSpecification) prototype .getSpecification(); JobSpecification tspec = (JobSpecification) t.getSpecification(); Iterator i = pspec.getAttributeNames().iterator(); while (i.hasNext()) { String name = (String) i.next(); if (!coasterAttributes.contains(name)) { tspec.setAttribute(name, pspec.getAttribute(name)); } } tspec.setAttribute("maxwalltime", new WallTime((int) maxWallTime .getSeconds()).format()); } private int k; private long last; public Worker request(WallTime maxWallTime, Task prototype) throws InterruptedException { WorkerKey key = new WorkerKey(new Seconds(maxWallTime.getSeconds()) .add(TIME_RESERVE).add(Seconds.now())); Worker w = null; if (logger.isDebugEnabled()) { logger.debug("Looking for worker for key " + key); logger.debug("Ready: " + ready); } synchronized (this) { Collection tm = ready.tailMap(key).values(); Iterator i = tm.iterator(); if (i.hasNext()) { w = (Worker) i.next(); i.remove(); busy.add(w); startingTasks.remove(prototype); } } if (w != null) { if (k == 0) { last = System.currentTimeMillis(); } if (++k % 100 == 0) { long crt = System.currentTimeMillis(); int js = 0; if (last != 0) { js = (int) (80000 / (crt - last)); } last = crt; System.err.println(" " + k / 80 + "; " + js + " J/s"); } logger.info("Using worker " + w + " for task " + prototype); w.setRunning(prototype); return w; } else { synchronized (this) { if (currentWorkers >= MAX_WORKERS) { this.wait(250); return null; } boolean alreadyThere; alreadyThere = !startingTasks.add(prototype); if (!alreadyThere) { currentWorkers++; if (logger.isInfoEnabled()) { logger .info("No suitable worker found. Attempting to start a new one."); } synchronized (allocationRequests) { if (allocationRequests.size() < MAX_STARTING_WORKERS) { allocationRequests.add(new AllocationRequest( maxWallTime, prototype)); allocationRequests.notify(); } else { this.wait(250); return null; } } } } return null; } } public void workerTerminated(Worker worker) { logger.warn("Worker terminated: " + worker); Status s = worker.getStatus(); if (s.getStatusCode() == Status.FAILED) { synchronized (this) { requested.remove(worker.getId()); startingTasks.remove(worker.getRunning()); // this will cause all the jobs associated with the worker to // fail ready.put(new WorkerKey(worker), worker); } } synchronized (this) { currentWorkers ready.remove(new WorkerKey(worker)); ids.remove(worker.getId()); } } public void registrationReceived(String id, String url, ChannelContext cc) { Worker wr; synchronized (this) { wr = (Worker) requested.remove(id); } if (wr == null) { logger.warn("Received unrequested registration (id = " + id + ", url = " + url); throw new IllegalArgumentException("Invalid worker id (" + id + "). This worker manager instance does not " + "recall requesting a worker with such an id."); } wr .setScheduledTerminationTime(Seconds.now().add( wr.getMaxWallTime())); wr.setChannelContext(cc); if (logger.isInfoEnabled()) { logger.info("Worker registration received: " + wr); } synchronized (this) { startingTasks.remove(wr.getRunning()); ready.put(new WorkerKey(wr), wr); ids.put(id, wr); wr.workerRegistered(); } } public void removeWorker(Worker worker) { synchronized (this) { ready.remove(new WorkerKey(worker)); currentWorkers busy.remove(worker); startingTasks.remove(worker.getRunning()); ids.remove(worker.getId()); } } public void workerTaskDone(Worker wr) { synchronized (this) { busy.remove(wr); ready.put(new WorkerKey(wr), wr); notifyAll(); wr.setRunning(null); } } public int availableWorkers() { synchronized (this) { return ready.size(); } } public ChannelContext getChannelContext(String id) { Worker wr = (Worker) ids.get(id); if (wr == null) { throw new IllegalArgumentException("No worker with id=" + id); } else { return wr.getChannelContext(); } } protected TaskHandler getTaskHandler() { return handler; } private static class AllocationRequest { public WallTime maxWallTime; public Task prototype; public AllocationRequest(WallTime maxWallTime, Task prototype) { this.maxWallTime = maxWallTime; this.prototype = prototype; } } public void shutdown() { try { synchronized (this) { Iterator i; List callbacks = new ArrayList(); i = ready.values().iterator(); while (i.hasNext()) { Worker wr = (Worker) i.next(); callbacks.add(wr.shutdown()); } i = callbacks.iterator(); while (i.hasNext()) { ShutdownCallback cb = (ShutdownCallback) i.next(); if (cb != null) { cb.waitFor(); } } i = new ArrayList(requested.values()).iterator(); while (i.hasNext()) { Worker wr = (Worker) i.next(); try { handler.cancel(wr.getWorkerTask()); } catch (Exception e) { logger.warn("Failed to cancel queued worker task " + wr.getWorkerTask(), e); } } } } catch (InterruptedException e) { logger.warn("Interrupted", e); } } private void startInfoThread() { new Thread() { { setDaemon(true); } public void run() { while (true) { try { Thread.sleep(20000); synchronized (WorkerManager.this) { logger.info("Current workers: " + currentWorkers); logger.info("Ready: " + ready); logger.info("Busy: " + busy); logger.info("Requested: " + requested); logger.info("Starting: " + startingTasks); logger.info("Ids: " + ids); } synchronized (allocationRequests) { logger.info("AllocationR: " + allocationRequests); } } catch (Exception e) { e.printStackTrace(); } } } }.start(); } }
package com.yahoo.vespa.hosted.node.admin.nodeadmin; import com.yahoo.component.ComponentId; import com.yahoo.component.provider.ComponentRegistry; import com.yahoo.concurrent.classlock.ClassLocking; import com.yahoo.log.LogLevel; import com.yahoo.vespa.defaults.Defaults; import com.yahoo.vespa.hosted.dockerapi.Docker; import com.yahoo.vespa.hosted.dockerapi.metrics.MetricReceiverWrapper; import com.yahoo.vespa.hosted.node.admin.component.AdminComponent; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.logging.Logger; import java.util.stream.Collectors; /** * NodeAdminMain is the main component of the node admin JDisc application: * - It will read config and check its environment to figure out its responsibilities * - It will "start" (only) the necessary components. * - Other components MUST NOT try to start (typically in constructor) since the features * they provide is NOT WANTED and possibly destructive, and/or the environment may be * incompatible. For instance, trying to contact the Docker daemon too early will * be fatal: the node admin may not have installed and started the docker daemon. */ public class NodeAdminMain implements AutoCloseable { private static final Logger logger = Logger.getLogger(NodeAdminMain.class.getName()); private final ComponentRegistry<AdminComponent> adminRegistry; private final Docker docker; private final MetricReceiverWrapper metricReceiver; private final ClassLocking classLocking; private List<AdminComponent> enabledComponents = new ArrayList<>(); private Optional<DockerAdminComponent> dockerAdmin = Optional.empty(); public NodeAdminMain(ComponentRegistry<AdminComponent> adminRegistry, Docker docker, MetricReceiverWrapper metricReceiver, ClassLocking classLocking) { this.adminRegistry = adminRegistry; this.docker = docker; this.metricReceiver = metricReceiver; this.classLocking = classLocking; } public static NodeAdminConfig getConfig() { String path = Defaults.getDefaults().underVespaHome("conf/node-admin.json"); return NodeAdminConfig.fromFile(new File(path)); } public void start() { NodeAdminConfig config = getConfig(); if (config.components.isEmpty()) { dockerAdmin = Optional.of(new DockerAdminComponent( config, docker, metricReceiver, classLocking)); enable(dockerAdmin.get()); } else { logger.log(LogLevel.INFO, () -> { String registeredComponentsList = adminRegistry .allComponentsById().keySet().stream() .map(ComponentId::stringValue) .collect(Collectors.joining(", ")); String requestedComponentsList = config.components.stream() .collect(Collectors.joining(", ")); return String.format( "Components registered = '%s', enabled = '%s'", registeredComponentsList, requestedComponentsList); }); for (String componentSpecificationString : config.components) { AdminComponent component = adminRegistry.getComponent(componentSpecificationString); if (component == null) { throw new IllegalArgumentException("There is no component named '" + componentSpecificationString + "'"); } enable(component); } } } private void enable(AdminComponent component) { component.enable(); enabledComponents.add(component); } @Override public void close() { int i = enabledComponents.size(); while (i enabledComponents.remove(i).disable(); } } public NodeAdminStateUpdater getNodeAdminStateUpdater() { return dockerAdmin.get().getNodeAdminStateUpdater(); } }
//This code is developed as part of the Java CoG Kit project //This message may not be removed or altered. package org.globus.cog.abstraction.coaster.service.job.manager; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.apache.log4j.Logger; import org.globus.cog.abstraction.coaster.service.LocalTCPService; import org.globus.cog.abstraction.coaster.service.job.manager.Worker.ShutdownCallback; import org.globus.cog.abstraction.impl.common.AbstractionFactory; import org.globus.cog.abstraction.impl.common.ProviderMethodException; import org.globus.cog.abstraction.impl.common.StatusImpl; import org.globus.cog.abstraction.impl.common.execution.WallTime; import org.globus.cog.abstraction.impl.common.task.ExecutionServiceImpl; import org.globus.cog.abstraction.impl.common.task.ExecutionTaskHandler; import org.globus.cog.abstraction.impl.common.task.InvalidProviderException; import org.globus.cog.abstraction.impl.common.task.InvalidServiceContactException; import org.globus.cog.abstraction.impl.common.task.JobSpecificationImpl; import org.globus.cog.abstraction.impl.common.task.TaskImpl; import org.globus.cog.abstraction.impl.common.task.TaskSubmissionException; import org.globus.cog.abstraction.interfaces.ExecutionService; import org.globus.cog.abstraction.interfaces.JobSpecification; import org.globus.cog.abstraction.interfaces.Status; import org.globus.cog.abstraction.interfaces.Task; import org.globus.cog.abstraction.interfaces.TaskHandler; import org.globus.cog.karajan.workflow.service.channels.ChannelContext; public class WorkerManager extends Thread { public static final Logger logger = Logger.getLogger(WorkerManager.class); /** * We allow for at least one minute of extra time compared to the requested * walltime */ public static final Seconds TIME_RESERVE = new Seconds(60); public static final File scriptDir = new File(System .getProperty("user.home") + File.separator + ".globus" + File.separator + "coasters"); public static final String SCRIPT = "worker.pl"; public static final int OVERALLOCATION_FACTOR = 10; public static final int MAX_WORKERS = 256; public static final int MAX_STARTING_WORKERS = 32; public static final List coasterAttributes = Arrays .asList(new String[] { "coasterspernode", "coasterinternalip", "coasterworkermaxwalltime" }); private SortedMap ready; private Map ids; private Set busy; private Set startingTasks; private Map requested; private boolean shutdownFlag; private LinkedList allocationRequests; private File script; private IDGenerator sr; private URI callbackURI; private LocalTCPService localService; private TaskHandler handler; private int currentWorkers; public WorkerManager(LocalTCPService localService) throws IOException { super("Worker Manager"); ready = new TreeMap(); busy = new HashSet(); ids = new HashMap(); startingTasks = new HashSet(); requested = new HashMap(); allocationRequests = new LinkedList(); this.localService = localService; this.callbackURI = localService.getContact(); writeScript(); sr = new IDGenerator(); handler = new ExecutionTaskHandler(); } private void writeScript() throws IOException { scriptDir.mkdirs(); if (!scriptDir.exists()) { throw new IOException("Failed to create script dir (" + scriptDir + ")"); } script = File.createTempFile("cscript", ".pl", scriptDir); script.deleteOnExit(); InputStream is = WorkerManager.class.getClassLoader() .getResourceAsStream(SCRIPT); if (is == null) { throw new IOException("Could not find resource in class path: " + SCRIPT); } FileOutputStream fos = new FileOutputStream(script); byte[] buf = new byte[1024]; int len = is.read(buf); while (len != -1) { fos.write(buf, 0, len); len = is.read(buf); } fos.close(); is.close(); } public void run() { try { if (logger.isInfoEnabled()) { startInfoThread(); } AllocationRequest req; while (!shutdownFlag) { synchronized (allocationRequests) { while (allocationRequests.isEmpty()) { allocationRequests.wait(); } req = (AllocationRequest) allocationRequests .removeFirst(); if (logger.isInfoEnabled()) { logger.info("Got allocation request: " + req); } } try { startWorker(new Seconds(req.maxWallTime.getSeconds()) .multiply(OVERALLOCATION_FACTOR) .add(TIME_RESERVE), req.prototype); } catch (NoClassDefFoundError e) { req.prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), new TaskSubmissionException(e))); } catch (Exception e) { req.prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), e)); } } } catch (InterruptedException e) { e.printStackTrace(); } catch (Error e) { e.printStackTrace(); System.exit(126); } } private void startWorker(Seconds maxWallTime, Task prototype) throws InvalidServiceContactException, InvalidProviderException, ProviderMethodException { String numWorkersString = (String) ((JobSpecification) prototype .getSpecification()).getAttribute("coastersPerNode"); int numWorkers; if (numWorkersString == null) { numWorkers = 1; } else { numWorkers = Integer.parseInt(numWorkersString); } String workerMaxwalltimeString = (String) ((JobSpecification) prototype .getSpecification()).getAttribute("coasterWorkerMaxwalltime"); if (workerMaxwalltimeString != null) { // override the computed maxwalltime maxWallTime = new Seconds(WallTime.timeToSeconds(workerMaxwalltimeString)); logger.debug("Overridden worker maxwalltime is "+maxWallTime); } logger .info("Starting new worker set with " + numWorkers + " workers"); Task t = new TaskImpl(); t.setType(Task.JOB_SUBMISSION); t.setSpecification(buildSpecification(prototype)); copyAttributes(t, prototype, maxWallTime); t.setRequiredService(1); t.setService(0, buildService(prototype)); synchronized (this) { if (!startingTasks.contains(prototype)) { return; } } Map newlyRequested = new HashMap(); for (int n = 0; n < numWorkers; n++) { int id = sr.nextInt(); if (logger.isInfoEnabled()) { logger.info("Starting worker with id=" + id + " and maxwalltime=" + maxWallTime + "s"); } String sid = String.valueOf(id); ((JobSpecification) t.getSpecification()).addArgument(sid); try { Worker wr = new Worker(this, sid, maxWallTime, t, prototype); newlyRequested.put(sid, wr); } catch (Exception e) { prototype.setStatus(new StatusImpl(Status.FAILED, e .getMessage(), e)); } } try { handler.submit(t); } catch (Exception e) { prototype.setStatus(new StatusImpl(Status.FAILED, e.getMessage(), e)); } synchronized (this) { requested.putAll(newlyRequested); } } private JobSpecification buildSpecification(Task prototype) { JobSpecification ps = (JobSpecification) prototype.getSpecification(); JobSpecification js = new JobSpecificationImpl(); js.setExecutable("/usr/bin/perl"); js.addArgument(script.getAbsolutePath()); String internalHostname = (String)ps.getAttribute("coasterInternalIP"); if(internalHostname!=null) { // override automatically determined hostname // TODO detect if we've done this already for a different // value? (same non-determinism as for coastersPerWorker and // walltime handling that jobs may come in with different // values and we can only use one) try { logger.warn("original callback URI is "+callbackURI.toString()); callbackURI=new URI(callbackURI.getScheme(), callbackURI.getUserInfo(), internalHostname, callbackURI.getPort(), callbackURI.getPath(), callbackURI.getQuery(), callbackURI.getFragment()); logger.warn("callback URI has been overridden to "+callbackURI.toString()); } catch(URISyntaxException use) { throw new RuntimeException(use); } // TODO nasty exception in the line above } js.addArgument(callbackURI.toString()); // js.addArgument(id); return js; } private ExecutionService buildService(Task prototype) throws InvalidServiceContactException, InvalidProviderException, ProviderMethodException { ExecutionService s = new ExecutionServiceImpl(); s.setServiceContact(prototype.getService(0).getServiceContact()); ExecutionService p = (ExecutionService) prototype.getService(0); String jm = p.getJobManager(); int colon = jm.indexOf(':'); // remove provider used to bootstrap coasters jm = jm.substring(colon + 1); colon = jm.indexOf(':'); if (colon == -1) { s.setProvider(jm); } else { s.setJobManager(jm.substring(colon + 1)); s.setProvider(jm.substring(0, colon)); } if (p.getSecurityContext() != null) { s.setSecurityContext(p.getSecurityContext()); } else { s.setSecurityContext(AbstractionFactory.newSecurityContext(s .getProvider())); } return s; } private void copyAttributes(Task t, Task prototype, Seconds maxWallTime) { JobSpecification pspec = (JobSpecification) prototype .getSpecification(); JobSpecification tspec = (JobSpecification) t.getSpecification(); Iterator i = pspec.getAttributeNames().iterator(); while (i.hasNext()) { String name = (String) i.next(); if (!coasterAttributes.contains(name)) { tspec.setAttribute(name, pspec.getAttribute(name)); } } tspec.setAttribute("maxwalltime", new WallTime((int) maxWallTime .getSeconds()).format()); } private int k; private long last; public Worker request(WallTime maxWallTime, Task prototype) throws InterruptedException { WorkerKey key = new WorkerKey(new Seconds(maxWallTime.getSeconds()) .add(TIME_RESERVE).add(Seconds.now())); Worker w = null; if (logger.isDebugEnabled()) { logger.debug("Looking for worker for key " + key); logger.debug("Ready: " + ready); } synchronized (this) { Collection tm = ready.tailMap(key).values(); Iterator i = tm.iterator(); if (i.hasNext()) { w = (Worker) i.next(); i.remove(); busy.add(w); startingTasks.remove(prototype); } } if (w != null) { if (k == 0) { last = System.currentTimeMillis(); } if (++k % 100 == 0) { long crt = System.currentTimeMillis(); int js = 0; if (last != 0) { js = (int) (80000 / (crt - last)); } last = crt; System.err.println(" " + k / 80 + "; " + js + " J/s"); } logger.info("Using worker " + w + " for task " + prototype); w.setRunning(prototype); return w; } else { synchronized (this) { if (currentWorkers >= MAX_WORKERS) { this.wait(250); return null; } boolean alreadyThere; alreadyThere = !startingTasks.add(prototype); if (!alreadyThere) { currentWorkers++; if (logger.isInfoEnabled()) { logger .info("No suitable worker found. Attempting to start a new one."); } synchronized (allocationRequests) { if (allocationRequests.size() < MAX_STARTING_WORKERS) { allocationRequests.add(new AllocationRequest( maxWallTime, prototype)); allocationRequests.notify(); } else { this.wait(250); return null; } } } } return null; } } public void workerTerminated(Worker worker) { logger.warn("Worker terminated: " + worker); Status s = worker.getStatus(); synchronized (this) { if (s.getStatusCode() == Status.FAILED) { requested.remove(worker.getId()); startingTasks.remove(worker.getRunning()); // this will cause all the jobs associated with the worker to // fail ready.put(new WorkerKey(worker), worker); } currentWorkers ready.remove(new WorkerKey(worker)); ids.remove(worker.getId()); } } public void registrationReceived(String id, String url, ChannelContext cc) { Worker wr; synchronized (this) { wr = (Worker) requested.remove(id); } if (wr == null) { logger.warn("Received unrequested registration (id = " + id + ", url = " + url); throw new IllegalArgumentException("Invalid worker id (" + id + "). This worker manager instance does not " + "recall requesting a worker with such an id."); } wr .setScheduledTerminationTime(Seconds.now().add( wr.getMaxWallTime())); wr.setChannelContext(cc); if (logger.isInfoEnabled()) { logger.info("Worker registration received: " + wr); } synchronized (this) { startingTasks.remove(wr.getRunning()); ready.put(new WorkerKey(wr), wr); ids.put(id, wr); wr.workerRegistered(); } } public void removeWorker(Worker worker) { synchronized (this) { ready.remove(new WorkerKey(worker)); currentWorkers busy.remove(worker); startingTasks.remove(worker.getRunning()); ids.remove(worker.getId()); } } public void workerTaskDone(Worker wr) { synchronized (this) { busy.remove(wr); ready.put(new WorkerKey(wr), wr); notifyAll(); wr.setRunning(null); } } public int availableWorkers() { synchronized (this) { return ready.size(); } } public ChannelContext getChannelContext(String id) { Worker wr = (Worker) ids.get(id); if (wr == null) { throw new IllegalArgumentException("No worker with id=" + id); } else { return wr.getChannelContext(); } } protected TaskHandler getTaskHandler() { return handler; } private static class AllocationRequest { public WallTime maxWallTime; public Task prototype; public AllocationRequest(WallTime maxWallTime, Task prototype) { this.maxWallTime = maxWallTime; this.prototype = prototype; } } public void shutdown() { try { synchronized (this) { Iterator i; List callbacks = new ArrayList(); i = ready.values().iterator(); while (i.hasNext()) { Worker wr = (Worker) i.next(); callbacks.add(wr.shutdown()); } i = callbacks.iterator(); while (i.hasNext()) { ShutdownCallback cb = (ShutdownCallback) i.next(); if (cb != null) { cb.waitFor(); } } i = new ArrayList(requested.values()).iterator(); while (i.hasNext()) { Worker wr = (Worker) i.next(); try { handler.cancel(wr.getWorkerTask()); } catch (Exception e) { logger.warn("Failed to cancel queued worker task " + wr.getWorkerTask(), e); } } } } catch (InterruptedException e) { logger.warn("Interrupted", e); } } private void startInfoThread() { new Thread() { { setDaemon(true); } public void run() { while (true) { try { Thread.sleep(20000); synchronized (WorkerManager.this) { logger.info("Current workers: " + currentWorkers); logger.info("Ready: " + ready); logger.info("Busy: " + busy); logger.info("Requested: " + requested); logger.info("Starting: " + startingTasks); logger.info("Ids: " + ids); } synchronized (allocationRequests) { logger.info("AllocationR: " + allocationRequests); } } catch (Exception e) { e.printStackTrace(); } } } }.start(); } }
package com.github.davidmoten.rx2.internal.flowable.buffertofile; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import com.github.davidmoten.guavamini.Preconditions; import com.github.davidmoten.guavamini.annotations.VisibleForTesting; import com.github.davidmoten.rx2.buffertofile.Options; import com.github.davidmoten.rx2.buffertofile.Serializer; import io.reactivex.Flowable; import io.reactivex.Observable; import io.reactivex.Observer; import io.reactivex.Scheduler.Worker; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.Exceptions; import io.reactivex.internal.functions.ObjectHelper; import io.reactivex.internal.subscriptions.SubscriptionHelper; import io.reactivex.internal.util.BackpressureHelper; import io.reactivex.plugins.RxJavaPlugins; public final class FlowableOnBackpressureBufferToFile<T> extends Flowable<T> { private final Flowable<T> source; private final Observable<T> source2; private final Options options; private final Serializer<T> serializer; public FlowableOnBackpressureBufferToFile(Flowable<T> source, Observable<T> source2, Options options, Serializer<T> serializer) { // only one source should be defined Preconditions.checkArgument((source != null) ^ (source2 != null)); this.source = source; this.source2 = source2; this.options = options; this.serializer = serializer; } @Override protected void subscribeActual(Subscriber<? super T> child) { PagedQueue queue = new PagedQueue(options.fileFactory(), options.pageSizeBytes()); Worker worker = options.scheduler().createWorker(); if (source != null) { source.subscribe( new BufferToFileSubscriberFlowable<T>(child, queue, serializer, worker)); } else { source2.subscribe( new BufferToFileSubscriberObservable<T>(child, queue, serializer, worker)); } } @SuppressWarnings("serial") @VisibleForTesting public static final class BufferToFileSubscriberFlowable<T> extends BufferToFileSubscriber<T> implements Subscriber<T>, Subscription { private Subscription parent; @VisibleForTesting public BufferToFileSubscriberFlowable(Subscriber<? super T> child, PagedQueue queue, Serializer<T> serializer, Worker worker) { super(child, queue, serializer, worker); } @Override public void onSubscribe(Subscription parent) { if (SubscriptionHelper.validate(this.parent, parent)) { this.parent = parent; child.onSubscribe(this); } } @Override public void request(long n) { if (SubscriptionHelper.validate(n)) { BackpressureHelper.add(requested, n); parent.request(n); scheduleDrain(); } } @Override public void cancel() { cancelled = true; parent.cancel(); // ensure queue is closed from the worker thread // to simplify concurrency controls in PagedQueue scheduleDrain(); } @Override public void onNext(T t) { super.onNext(t); } @Override public void onError(Throwable e) { super.onError(e); } @Override public void onComplete() { super.onComplete(); } @Override public void cancelUpstream() { parent.cancel(); } } @SuppressWarnings("serial") private static final class BufferToFileSubscriberObservable<T> extends BufferToFileSubscriber<T> implements Observer<T>, Subscription { private Disposable parent; BufferToFileSubscriberObservable(Subscriber<? super T> child, PagedQueue queue, Serializer<T> serializer, Worker worker) { super(child, queue, serializer, worker); } @Override public void onSubscribe(Disposable d) { this.parent = d; child.onSubscribe(this); } @Override public void onNext(T t) { super.onNext(t); } @Override public void onError(Throwable e) { super.onError(e); } @Override public void onComplete() { super.onComplete(); } @Override public void cancelUpstream() { parent.dispose(); } @Override public void request(long n) { if (SubscriptionHelper.validate(n)) { BackpressureHelper.add(requested, n); scheduleDrain(); } } @Override public void cancel() { cancelled = true; parent.dispose(); // ensure queue is closed from the worker thread // to simplify concurrency controls in PagedQueue scheduleDrain(); } } @SuppressWarnings({ "serial" }) @VisibleForTesting static abstract class BufferToFileSubscriber<T> extends AtomicInteger implements Runnable { protected final Subscriber<? super T> child; private final PagedQueue queue; private final Serializer<T> serializer; private final Worker worker; protected final AtomicLong requested = new AtomicLong(); protected volatile boolean cancelled; private volatile boolean done; // `error` set just before the volatile `done` is set and read just // after `done` is read. Thus doesn't need to be volatile. private Throwable error; BufferToFileSubscriber(Subscriber<? super T> child, PagedQueue queue, Serializer<T> serializer, Worker worker) { this.child = child; this.queue = queue; this.serializer = serializer; this.worker = worker; } public void onNext(T t) { try { queue.offer(serializer.serialize(t)); } catch (Throwable e) { Exceptions.throwIfFatal(e); onError(e); return; } scheduleDrain(); } public void onError(Throwable e) { // must assign error before assign done = true to avoid race // condition in drain() and also so appropriate memory barrier in // place given error is non-volatile error = e; done = true; scheduleDrain(); } public void onComplete() { done = true; scheduleDrain(); } protected void scheduleDrain() { // only schedule a drain if current drain has finished // otherwise the drain requested counter (`this`) will be // incremented and the drain loop will ensure that another drain // cycle occurs if required if (getAndIncrement() == 0) { worker.schedule(this); } } @Override public void run() { drain(); } private void drain() { // check cancel outside of request drain loop because the drain // method is also used to serialize read with cancellation (closing // the queue) and we still want it to happen if there are no // requests if (cancelled) { close(queue); worker.dispose(); return; } int missed = 1; while (true) { long r = requested.get(); long e = 0; // emitted while (e != r) { if (cancelled) { close(queue); worker.dispose(); return; } // for visibility purposes must read error AFTER reading // done (done is volatile and error is non-volatile) boolean isDone = done; // must check isDone and error because don't want to emit an // error that is only partially visible to the current // thread if (isDone && error != null) { cancelNow(); child.onError(error); return; } byte[] bytes; try { bytes = queue.poll(); } catch (Throwable err) { Exceptions.throwIfFatal(err); cancelNow(); child.onError(err); return; } if (bytes != null) { // assumed to be fast so we don't check cancelled // after this call T t; try { t = ObjectHelper.requireNonNull( serializer.deserialize(bytes), "Serializer.deserialize should not return null (because RxJava 2 does not support streams with null items"); } catch (Throwable err) { Exceptions.throwIfFatal(err); cancelNow(); child.onError(err); return; } child.onNext(t); e++; } else if (isDone) { cancelNow(); child.onComplete(); return; } else { break; } } if (e != 0L && r != Long.MAX_VALUE) { requested.addAndGet(-e); } missed = addAndGet(-missed); if (missed == 0) { return; } } } private void cancelNow() { cancelled = true; cancelUpstream(); close(queue); worker.dispose(); } abstract public void cancelUpstream(); } @VisibleForTesting public static void close(PagedQueue queue) { try { queue.close(); } catch (Throwable err) { Exceptions.throwIfFatal(err); RxJavaPlugins.onError(err); } } }
package com.splicemachine.derby.impl.sql.execute.operations; import com.splicemachine.constants.bytes.BytesUtil; import com.splicemachine.derby.hbase.SpliceDriver; import com.splicemachine.derby.hbase.SpliceObserverInstructions; import com.splicemachine.derby.hbase.SpliceOperationCoprocessor; import com.splicemachine.derby.iapi.sql.execute.*; import com.splicemachine.derby.iapi.storage.RowProvider; import com.splicemachine.derby.impl.SpliceMethod; import com.splicemachine.derby.impl.job.operation.SuccessFilter; import com.splicemachine.derby.impl.sql.execute.operations.JoinUtils.JoinSide; import com.splicemachine.derby.impl.storage.DistributedClientScanProvider; import com.splicemachine.derby.impl.storage.RowProviders; import com.splicemachine.derby.impl.store.access.hbase.HBaseRowLocation; import com.splicemachine.derby.utils.*; import com.splicemachine.derby.utils.marshall.*; import com.splicemachine.encoding.Encoding; import com.splicemachine.encoding.MultiFieldDecoder; import com.splicemachine.encoding.MultiFieldEncoder; import com.splicemachine.job.JobStats; import com.splicemachine.utils.SpliceLogUtils; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.sql.execute.NoPutResultSet; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.SQLInteger; import org.apache.hadoop.hbase.client.Scan; import org.apache.log4j.Logger; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class MergeSortJoinOperation extends JoinOperation implements SinkingOperation { private static final long serialVersionUID = 2l; private static Logger LOG = Logger.getLogger(MergeSortJoinOperation.class); protected boolean wasRightOuterJoin; protected int leftHashKeyItem; protected int[] leftHashKeys; protected int rightHashKeyItem; protected int[] rightHashKeys; protected ExecRow rightTemplate; protected static List<NodeType> nodeTypes; protected Scan reduceScan; protected SQLInteger rowType; public int emptyRightRowsReturned = 0; protected SpliceMethod<ExecRow> emptyRowFun; protected ExecRow emptyRow; static { nodeTypes = Arrays.asList(NodeType.REDUCE,NodeType.SCAN,NodeType.SINK); } private MergeSortJoiner joiner; public MergeSortJoinOperation() { super(); } public MergeSortJoinOperation(SpliceOperation leftResultSet, int leftNumCols, SpliceOperation rightResultSet, int rightNumCols, int leftHashKeyItem, int rightHashKeyItem, Activation activation, GeneratedMethod restriction, int resultSetNumber, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { super(leftResultSet, leftNumCols, rightResultSet, rightNumCols, activation, restriction, resultSetNumber, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); SpliceLogUtils.trace(LOG, "instantiate"); this.leftHashKeyItem = leftHashKeyItem; this.rightHashKeyItem = rightHashKeyItem; init(SpliceOperationContext.newContext(activation)); recordConstructorTime(); } @Override public void readExternal(ObjectInput in) throws IOException,ClassNotFoundException { SpliceLogUtils.trace(LOG, "readExternal"); super.readExternal(in); leftHashKeyItem = in.readInt(); rightHashKeyItem = in.readInt(); emptyRightRowsReturned = in.readInt(); } @Override public void writeExternal(ObjectOutput out) throws IOException { SpliceLogUtils.trace(LOG, "writeExternal"); super.writeExternal(out); out.writeInt(leftHashKeyItem); out.writeInt(rightHashKeyItem); out.writeInt(emptyRightRowsReturned); } @Override public ExecRow getNextSinkRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (spliceRuntimeContext.isLeft(resultSetNumber)) return leftResultSet.nextRow(spliceRuntimeContext); return rightResultSet.nextRow(spliceRuntimeContext); } @Override public ExecRow nextRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { return next(false,spliceRuntimeContext); } @Override public void open() throws StandardException, IOException { super.open(); if(joiner!=null){ joiner.close(); joiner = null; } } protected ExecRow next(boolean outer, SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { SpliceLogUtils.trace(LOG, "next"); if(joiner==null){ SpliceRuntimeContext left = SpliceRuntimeContext.generateLeftRuntimeContext(resultSetNumber); SpliceRuntimeContext right = SpliceRuntimeContext.generateRightRuntimeContext(resultSetNumber); if(spliceRuntimeContext.isSink()){ left.markAsSink(); right.markAsSink(); } RowDecoder leftDecoder = getRowEncoder(left, leftNumCols, leftHashKeys).getDual(leftResultSet.getExecRowDefinition()); RowDecoder rightDecoder = getRowEncoder(right, rightNumCols, rightHashKeys).getDual(rightResultSet.getExecRowDefinition()); StandardIterator<JoinSideExecRow> scanner = getMergeScanner(spliceRuntimeContext, leftDecoder, rightDecoder); scanner.open(); Restriction mergeRestriction = getRestriction(); joiner = getMergeJoiner(outer, scanner, mergeRestriction); } beginTime = getCurrentTimeMillis(); boolean shouldClose = true; try{ ExecRow joinedRow = joiner.nextRow(); if(joinedRow!=null){ rowsSeen++; shouldClose =false; setCurrentRow(joinedRow); if(currentRowLocation==null) currentRowLocation = new HBaseRowLocation(); currentRowLocation.setValue(joiner.lastRowLocation()); setCurrentRowLocation(currentRowLocation); }else{ clearCurrentRow(); } return joinedRow; }finally{ if(shouldClose){ if(LOG.isDebugEnabled()){ LOG.debug(String.format("Saw %s records (%s left, %s right)", rowsSeen, joiner.getLeftRowsSeen(), joiner.getRightRowsSeen())); } joiner.close(); } } } @Override public RowProvider getReduceRowProvider(SpliceOperation top,RowDecoder decoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException { byte[] start = uniqueSequenceID; byte[] finish = BytesUtil.unsignedCopyAndIncrement(start); reduceScan = Scans.newScan(start,finish,SpliceUtils.NA_TRANSACTION_ID); if(failedTasks.size()>0){ reduceScan.setFilter(new SuccessFilter(failedTasks)); } if(top!=this && top instanceof SinkingOperation){ SpliceUtils.setInstructions(reduceScan,activation,top,spliceRuntimeContext); return new DistributedClientScanProvider("mergeSortJoin",SpliceOperationCoprocessor.TEMP_TABLE,reduceScan,decoder,spliceRuntimeContext); }else{ //we need to scan the data directly on the client return RowProviders.openedSourceProvider(top,LOG,spliceRuntimeContext); } } @Override public RowProvider getMapRowProvider(SpliceOperation top, RowDecoder decoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException { return getReduceRowProvider(top,decoder,spliceRuntimeContext); } @Override public void init(SpliceOperationContext context) throws StandardException{ SpliceLogUtils.trace(LOG, "init"); super.init(context); leftResultSet.init(context); rightResultSet.init(context); SpliceLogUtils.trace(LOG,"leftHashkeyItem=%d,rightHashKeyItem=%d",leftHashKeyItem,rightHashKeyItem); emptyRightRowsReturned = 0; leftHashKeys = generateHashKeys(leftHashKeyItem); rightHashKeys = generateHashKeys(rightHashKeyItem); mergedRow = activation.getExecutionFactory().getValueRow(leftNumCols + rightNumCols); rightTemplate = activation.getExecutionFactory().getValueRow(rightNumCols); if(uniqueSequenceID!=null){ byte[] start = new byte[uniqueSequenceID.length]; System.arraycopy(uniqueSequenceID,0,start,0,start.length); byte[] finish = BytesUtil.unsignedCopyAndIncrement(start); rowType = (SQLInteger) activation.getDataValueFactory().getNullInteger(null); if(regionScanner==null) reduceScan = Scans.newScan(start,finish, getTransactionID()); else{ reduceScan = context.getScan(); } } } @Override protected JobStats doShuffle() throws StandardException { SpliceLogUtils.trace(LOG, "executeShuffle"); long start = System.currentTimeMillis(); SpliceRuntimeContext spliceLRuntimeContext = SpliceRuntimeContext.generateLeftRuntimeContext(resultSetNumber); SpliceRuntimeContext spliceRRuntimeContext = SpliceRuntimeContext.generateRightRuntimeContext(resultSetNumber); ExecRow template = getExecRowDefinition(); RowProvider leftProvider = leftResultSet.getMapRowProvider(this, getRowEncoder(spliceLRuntimeContext).getDual(template),spliceLRuntimeContext); RowProvider rightProvider = rightResultSet.getMapRowProvider(this, getRowEncoder(spliceRRuntimeContext).getDual(template),spliceRRuntimeContext); RowProvider combined = RowProviders.combine(leftProvider, rightProvider); SpliceObserverInstructions soi = SpliceObserverInstructions.create(getActivation(),this,new SpliceRuntimeContext()); JobStats stats = combined.shuffleRows(soi); nextTime+=System.currentTimeMillis()-start; return stats; } @Override public NoPutResultSet executeScan() throws StandardException { SpliceLogUtils.trace(LOG,"executeScan"); final List<SpliceOperation> opStack = new ArrayList<SpliceOperation>(); this.generateLeftOperationStack(opStack); SpliceLogUtils.trace(LOG,"operationStack=%s",opStack); ExecRow rowDef = getExecRowDefinition(); RowEncoder encoder = RowEncoder.create(rowDef.nColumns(),null,null,null,KeyType.BARE,RowMarshaller.packed()); SpliceRuntimeContext spliceRuntimeContext = new SpliceRuntimeContext(); RowProvider provider = getReduceRowProvider(this,encoder.getDual(getExecRowDefinition()),spliceRuntimeContext); return new SpliceNoPutResultSet(activation,this,provider); } @Override public RowEncoder getRowEncoder(SpliceRuntimeContext spliceRuntimeContext) throws StandardException { if (spliceRuntimeContext.isLeft(resultSetNumber)) return getRowEncoder(spliceRuntimeContext,leftNumCols,leftHashKeys); return getRowEncoder(spliceRuntimeContext,rightNumCols,rightHashKeys); } private RowEncoder getRowEncoder(SpliceRuntimeContext spliceRuntimeContext, final int numCols,int[] keyColumns) throws StandardException { int[] rowColumns; final byte[] joinSideBytes = Encoding.encode(spliceRuntimeContext.isLeft(resultSetNumber)?JoinSide.LEFT.ordinal():JoinSide.RIGHT.ordinal()); KeyMarshall keyType = new KeyMarshall() { @Override public void encodeKey(DataValueDescriptor[] columns, int[] keyColumns, boolean[] sortOrder, byte[] keyPostfix, MultiFieldEncoder keyEncoder) throws StandardException { //noinspection RedundantCast ((KeyMarshall)KeyType.BARE).encodeKey(columns,keyColumns,sortOrder,keyPostfix,keyEncoder); //add ordinal position /* * add the ordinal position. * * We can safely call setRawBytes() here, because we know that joinSideBytes are encoded * prior to being set here */ keyEncoder.setRawBytes(joinSideBytes); /* * add a unique id * * We can safely call setRawBytes() here because we know that a unique key is 8 bytes, and it will * never be decoded anyway */ keyEncoder.setRawBytes(SpliceUtils.getUniqueKey()); /* * add the postfix * * We can safely call setRawBytes() here because we know that the prefix will be a fixed length and * will also never be outright decoded (it'll be used for correctness checking). */ keyEncoder.setRawBytes(keyPostfix); } @Override public void decode(DataValueDescriptor[] columns, int[] reversedKeyColumns, boolean[] sortOrder, MultiFieldDecoder rowDecoder) throws StandardException { /* * Some Join columns have key sets like [0,0], where the same field is encoded multiple * times. We need to only decode the first instance, or else we'll get incorrect answers */ rowDecoder.seek(11); //skip the query prefix int[] decodedColumns = new int[numCols]; for(int key:reversedKeyColumns){ if(key==-1) continue; if(decodedColumns[key]!=-1){ //we can decode this one, as it's not a duplicate DerbyBytesUtil.decodeInto(rowDecoder,columns[key]); decodedColumns[key] = -1; }else{ //skip this one, it's a duplicate of something else rowDecoder.skip(); } } } @Override public int getFieldCount(int[] keyColumns) { //noinspection RedundantCast return ((KeyMarshall)KeyType.FIXED_PREFIX_UNIQUE_POSTFIX).getFieldCount(keyColumns)+1; } }; RowMarshall rowType = RowMarshaller.packed(); /* * Because there may be duplicate entries in keyColumns, we need to make sure * that rowColumns deals only with the unique form. */ int[] allCols = new int[numCols]; int numSet=0; for(int keyCol:keyColumns){ int allCol = allCols[keyCol]; if(allCol!=-1){ //only set it if it hasn't already been set allCols[keyCol] = -1; numSet++; } } int pos=0; rowColumns = new int[numCols-numSet]; for(int rowPos=0;rowPos<allCols.length;rowPos++){ if(allCols[rowPos]!=-1){ rowColumns[pos] = rowPos; pos++; } } return new RowEncoder(keyColumns,null,rowColumns,uniqueSequenceID,keyType,rowType, true); } @Override public ExecRow getExecRowDefinition() throws StandardException { SpliceLogUtils.trace(LOG, "getExecRowDefinition"); JoinUtils.getMergedRow((this.leftResultSet).getExecRowDefinition(),(this.rightResultSet).getExecRowDefinition(), wasRightOuterJoin,rightNumCols,leftNumCols,mergedRow); return mergedRow; } @Override public List<NodeType> getNodeTypes() { SpliceLogUtils.trace(LOG, "getNodeTypes"); return nodeTypes; } @Override public SpliceOperation getLeftOperation() { SpliceLogUtils.trace(LOG,"getLeftOperation"); return leftResultSet; } @Override public String toString(){ return "Merge"+super.toString(); } @Override public String prettyPrint(int indentLevel) { return "MergeSortJoin:"+super.prettyPrint(indentLevel); } @Override public void close() throws StandardException, IOException { SpliceLogUtils.trace(LOG, "close in MergeSortJoin"); beginTime = getCurrentTimeMillis(); if(joiner!=null) joiner.close(); if ( isOpen ) { //delete from the temp space if(reduceScan!=null) SpliceDriver.driver().getTempCleaner().deleteRange(uniqueSequenceID,reduceScan.getStartRow(),reduceScan.getStopRow()); clearCurrentRow(); super.close(); } closeTime += getElapsedMillis(beginTime); } /*private helper methods*/ private StandardIterator<JoinSideExecRow> getMergeScanner(SpliceRuntimeContext spliceRuntimeContext, RowDecoder leftDecoder, RowDecoder rightDecoder) { StandardIterator<JoinSideExecRow> scanner; if(spliceRuntimeContext.isSink()){ scanner = ResultMergeScanner.regionAwareScanner(reduceScan, transactionID, leftDecoder, rightDecoder, region); }else{ scanner = ResultMergeScanner.clientScanner(reduceScan,leftDecoder,rightDecoder); } return scanner; } private MergeSortJoiner getMergeJoiner(boolean outer, final StandardIterator<JoinSideExecRow> scanner, final Restriction mergeRestriction) { if(outer){ StandardSupplier<ExecRow> emptyRowSupplier = new StandardSupplier<ExecRow>() { @Override public ExecRow get() throws StandardException { if (emptyRow == null) emptyRow = emptyRowFun.invoke(); return emptyRow; } }; return new MergeSortJoiner(mergedRow,scanner,mergeRestriction,wasRightOuterJoin,leftNumCols,rightNumCols, oneRowRightSide,notExistsRightSide, emptyRowSupplier){ @Override protected boolean shouldMergeEmptyRow(boolean noRecordsFound) { return noRecordsFound; } }; }else{ StandardSupplier<ExecRow> emptyRowSupplier = new StandardSupplier<ExecRow>() { @Override public ExecRow get() throws StandardException { return rightTemplate; } }; return new MergeSortJoiner(mergedRow,scanner,mergeRestriction,wasRightOuterJoin, leftNumCols,rightNumCols,oneRowRightSide, notExistsRightSide,emptyRowSupplier); } } private Restriction getRestriction() { Restriction mergeRestriction = Restriction.noOpRestriction; if(restriction!=null){ mergeRestriction = new Restriction() { @Override public boolean apply(ExecRow row) throws StandardException { activation.setCurrentRow(row,resultSetNumber); DataValueDescriptor shouldKeep = restriction.invoke(); return !shouldKeep.isNull() && shouldKeep.getBoolean(); } }; } return mergeRestriction; } }
package org.apereo.cas.web.flow; import org.apereo.cas.CasProtocolConstants; import org.apereo.cas.authentication.CoreAuthenticationTestUtils; import org.apereo.cas.services.DefaultRegisteredServiceAccessStrategy; import org.apereo.cas.services.DenyAllAttributeReleasePolicy; import org.apereo.cas.services.RegisteredServiceTestUtils; import org.apereo.cas.services.ReturnMappedAttributeReleasePolicy; import org.apereo.cas.ticket.TicketGrantingTicket; import org.apereo.cas.web.support.WebUtils; import lombok.val; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.mock.web.MockServletContext; import org.springframework.webflow.context.servlet.ServletExternalContext; import org.springframework.webflow.execution.Action; import org.springframework.webflow.test.MockRequestContext; import javax.servlet.http.Cookie; import java.net.URI; import java.util.Map; import java.util.Set; import java.util.UUID; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; /** * @author Scott Battaglia * @since 3.0.0 */ @Tag("WebflowActions") public class GenerateServiceTicketActionTests extends AbstractWebflowActionsTests { @Autowired @Qualifier(CasWebflowConstants.ACTION_ID_GENERATE_SERVICE_TICKET) private Action action; private TicketGrantingTicket ticketGrantingTicket; @BeforeEach public void onSetUp() { val authnResult = getAuthenticationSystemSupport() .finalizeAuthenticationTransaction(CoreAuthenticationTestUtils.getWebApplicationService(), CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword()); this.ticketGrantingTicket = getCentralAuthenticationService().createTicketGrantingTicket(authnResult); getTicketRegistry().addTicket(this.ticketGrantingTicket); } @Test public void verifyServiceTicketFromCookie() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); context.getFlowScope().put(WebUtils.PARAMETER_TICKET_GRANTING_TICKET_ID, this.ticketGrantingTicket.getId()); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, service.getId()); request.setCookies(new Cookie("TGT", this.ticketGrantingTicket.getId())); this.action.execute(context); assertNotNull(WebUtils.getServiceTicketFromRequestScope(context)); } @Test public void verifyTicketGrantingTicketFromRequest() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, service.getId()); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); this.action.execute(context); assertNotNull(WebUtils.getServiceTicketFromRequestScope(context)); } @Test public void verifyServiceTicketWithAccessStrategyMapped() throws Exception { val context = new MockRequestContext(); val serviceId = UUID.randomUUID().toString(); val registeredService = RegisteredServiceTestUtils.getRegisteredService(serviceId, Map.of("Role", Set.of(".*developer.*"))); registeredService.setAttributeReleasePolicy(new ReturnMappedAttributeReleasePolicy( Map.of("Role", "groovy { return attributes['eduPersonAffiliation'].get(0) }"))); getServicesManager().save(registeredService); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, RegisteredServiceTestUtils.getService(serviceId)); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, serviceId); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); this.action.execute(context); assertNotNull(WebUtils.getServiceTicketFromRequestScope(context)); } @Test public void verifyServiceTicketWithAccessStrategyDenied() throws Exception { val context = new MockRequestContext(); val serviceId = UUID.randomUUID().toString(); val registeredService = RegisteredServiceTestUtils.getRegisteredService(serviceId, Map.of("eduPersonAffiliation", Set.of(".*developer.*"))); registeredService.setAttributeReleasePolicy(new DenyAllAttributeReleasePolicy()); getServicesManager().save(registeredService); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, RegisteredServiceTestUtils.getService(serviceId)); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, serviceId); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); this.action.execute(context); assertNotNull(WebUtils.getServiceTicketFromRequestScope(context)); } @Test public void verifyServiceTicketWithAccessStrategyMultivalued() throws Exception { val context = new MockRequestContext(); val serviceId = UUID.randomUUID().toString(); val registeredService = RegisteredServiceTestUtils.getRegisteredService(serviceId, Map.of("eduPersonAffiliation", Set.of(".*developer.*"))); registeredService.setAttributeReleasePolicy(new ReturnMappedAttributeReleasePolicy( Map.of("eduPersonAffiliation", "groovy { return 'engineers' }"))); getServicesManager().save(registeredService); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, RegisteredServiceTestUtils.getService(serviceId)); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, serviceId); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); this.action.execute(context); assertNotNull(WebUtils.getServiceTicketFromRequestScope(context)); } @Test public void verifyTicketGrantingTicketNoTgt() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, service.getId()); val tgt = mock(TicketGrantingTicket.class); when(tgt.getId()).thenReturn("bleh"); WebUtils.putTicketGrantingTicketInScopes(context, tgt); assertEquals(CasWebflowConstants.TRANSITION_ID_AUTHENTICATION_FAILURE, this.action.execute(context).getId()); } @Test public void verifyTicketGrantingTicketExpiredTgt() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, service.getId()); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); this.ticketGrantingTicket.markTicketExpired(); getTicketRegistry().updateTicket(this.ticketGrantingTicket); assertEquals(CasWebflowConstants.TRANSITION_ID_AUTHENTICATION_FAILURE, this.action.execute(context).getId()); } @Test public void verifyTicketGrantingTicketNotTgtButGateway() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); request.addParameter(CasProtocolConstants.PARAMETER_SERVICE, service.getId()); request.addParameter(CasProtocolConstants.PARAMETER_GATEWAY, "true"); val tgt = mock(TicketGrantingTicket.class); when(tgt.getId()).thenReturn("bleh"); WebUtils.putTicketGrantingTicketInScopes(context, tgt); assertEquals(CasWebflowConstants.TRANSITION_ID_GATEWAY, this.action.execute(context).getId()); } @Test public void verifyWarnCookie() throws Exception { val context = new MockRequestContext(); val service = RegisteredServiceTestUtils.getService(UUID.randomUUID().toString()); context.getFlowScope().put(CasWebflowConstants.ATTRIBUTE_SERVICE, service); val registeredService = RegisteredServiceTestUtils.getRegisteredService(service.getId()); registeredService.setAccessStrategy(new DefaultRegisteredServiceAccessStrategy() .setUnauthorizedRedirectUrl(new URI("https://github.com"))); getServicesManager().save(registeredService); val request = new MockHttpServletRequest(); context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse())); WebUtils.putWarningCookie(context, Boolean.TRUE); WebUtils.putTicketGrantingTicketInScopes(context, this.ticketGrantingTicket); assertEquals(CasWebflowConstants.STATE_ID_WARN, this.action.execute(context).getId()); } }
package org.eclipse.birt.report.designer.ui.dialogs; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter; import org.eclipse.birt.report.designer.data.ui.dataset.DataSetUIUtil; import org.eclipse.birt.report.designer.internal.ui.dialogs.BaseDialog; import org.eclipse.birt.report.designer.internal.ui.dialogs.DataColumnBindingDialog; import org.eclipse.birt.report.designer.internal.ui.dialogs.IBindingDialogHelper; import org.eclipse.birt.report.designer.internal.ui.util.DataUtil; import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler; import org.eclipse.birt.report.designer.internal.ui.util.IHelpContextIds; import org.eclipse.birt.report.designer.internal.ui.util.UIUtil; import org.eclipse.birt.report.designer.internal.ui.util.WidgetUtil; import org.eclipse.birt.report.designer.nls.Messages; import org.eclipse.birt.report.designer.ui.views.ElementAdapterManager; import org.eclipse.birt.report.designer.ui.views.attributes.providers.ChoiceSetFactory; import org.eclipse.birt.report.designer.util.DEUtil; import org.eclipse.birt.report.model.api.CachedMetaDataHandle; import org.eclipse.birt.report.model.api.CommandStack; import org.eclipse.birt.report.model.api.ComputedColumnHandle; import org.eclipse.birt.report.model.api.DataItemHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.GroupHandle; import org.eclipse.birt.report.model.api.ImageHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.ResultSetColumnHandle; import org.eclipse.birt.report.model.api.StructureFactory; import org.eclipse.birt.report.model.api.activity.SemanticException; import org.eclipse.birt.report.model.api.elements.structures.ComputedColumn; import org.eclipse.birt.report.model.api.metadata.IChoiceSet; import org.eclipse.birt.report.model.api.metadata.PropertyValueException; import org.eclipse.birt.report.model.api.util.StringUtil; import org.eclipse.birt.report.model.elements.interfaces.IReportItemModel; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.viewers.CheckStateChangedEvent; import org.eclipse.jface.viewers.CheckboxTableViewer; import org.eclipse.jface.viewers.ICheckStateListener; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredContentProvider; import org.eclipse.jface.viewers.ITableLabelProvider; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CLabel; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.ui.ISharedImages; import org.eclipse.ui.PlatformUI; /** * The dialog to select and edit column bindings */ public class ColumnBindingDialog extends BaseDialog { public static class BindingInfo { private int bindingType; private Object bindingValue; public BindingInfo( ) { } public BindingInfo( int type, Object value ) { this.bindingType = type; this.bindingValue = value; } public int getBindingType( ) { return bindingType; } public Object getBindingValue( ) { return bindingValue; } public void setBindingType( int bindingType ) { this.bindingType = bindingType; } public void setBindingValue( Object bindingValue ) { this.bindingValue = bindingValue; } } private static final String ALL = Messages.getString( "ColumnBindingDialog.All" );//$NON-NLS-1$ private static final String CHOICE_DATASET_FROM_CONTAINER = Messages.getString( "ColumnBindingDialog.Choice.DatasetFromContainer" );//$NON-NLS-1$ private static final String CHOICE_NONE = Messages.getString( "ColumnBindingDialog.NONE" );//$NON-NLS-1$ private static final String CHOICE_REPORTITEM_FROM_CONTAINER = Messages.getString( "ColumnBindingDialog.Choice.ReportItemFromContainer" );//$NON-NLS-1$ private static final String COLUMN_AGGREGATEON = Messages.getString( "ColumnBindingDialog.Column.AggregateOn" ); //$NON-NLS-1$ private static final String COLUMN_FILTER = Messages.getString( "ColumnBindingDialog.Column.Filter" ); //$NON-NLS-1$ private static final String COLUMN_FUNCTION = Messages.getString( "ColumnBindingDialog.Column.Function" ); //$NON-NLS-1$ private static final String COLUMN_DATATYPE = Messages.getString( "ColumnBindingDialog.Column.DataType" ); //$NON-NLS-1$ private static final String COLUMN_DISPLAYNAME = Messages.getString( "ColumnBindingDialog.Column.DisplayName" ); //$NON-NLS-1$ private static final String COLUMN_EXPRESSION = Messages.getString( "ColumnBindingDialog.Column.Expression" ); //$NON-NLS-1$ // private static final String BUTTON_GENERATE = Messages.getString( // "ColumnBindingDialog.Button.Generate" ); //$NON-NLS-1$ private static final String COLUMN_NAME = Messages.getString( "ColumnBindingDialog.Column.Name" ); //$NON-NLS-1$ private static final IChoiceSet DATA_TYPE_CHOICE_SET = DEUtil.getMetaDataDictionary( ) .getStructure( ComputedColumn.COMPUTED_COLUMN_STRUCT ) .getMember( ComputedColumn.DATA_TYPE_MEMBER ) .getAllowedChoices( ); public static final String DEFAULT_DLG_TITLE = Messages.getString( "ColumnBindingDialog.DialogTitle" ); //$NON-NLS-1$ private static final String dummyChoice = "dummy"; //$NON-NLS-1$ private static final String INPUT_PROPMT = Messages.getString( "ColumnBindingDialog.InputPrompt" ); //$NON-NLS-1$ private static final String LABEL_COLUMN_BINDINGS = Messages.getString( "ColumnBindingDialog.Label.DataSet" ); //$NON-NLS-1$ private static final String MSG_ADD = Messages.getString( "ColumnBindingDialog.Text.Add" ); //$NON-NLS-1$ private static final String MSG_DELETE = Messages.getString( "ColumnBindingDialog.Text.Del" ); //$NON-NLS-1$ private static final String MSG_REFRESH = Messages.getString( "ColumnBindingDialog.Text.Refresh" ); //$NON-NLS-1$ private static final String MSG_ADDAGGREGATEON = Messages.getString( "ColumnBindingDialog.Text.AddAggr" ); //$NON-NLS-1$ private static final String MSG_EDIT = Messages.getString( "ColumnBindingDialog.Text.Edit" ); //$NON-NLS-1$ private static final String NONE_AGGREGATEON = Messages.getString( "ColumnBindingDialog.AGGREGATEON.NONE" );//$NON-NLS-1$ private static final String WARN_COLUMN_BINDINGS = Messages.getString( "ColumnBingingDialog.Label.Warn" ); //$NON-NLS-1$ // private Button generateButton; protected TableViewer bindingTable; protected Button btnAdd; // private List bindingList; protected Button btnDel; protected Button btnEdit; private boolean canAggregate = false; private boolean canSelect = false; private Composite composite; private IStructuredContentProvider contentProvider = new IStructuredContentProvider( ) { public void dispose( ) { } public Object[] getElements( Object inputElement ) { List elementsList = getBindingList( (DesignElementHandle) inputElement ); // elementsList.add( dummyChoice ); return elementsList.toArray( ); } public void inputChanged( Viewer viewer, Object oldInput, Object newInput ) { } }; private Combo datasetCombo; private Button datasetRadio; private transient boolean enableAutoCommit = false; protected ExpressionProvider expressionProvider; private List groupList = Collections.EMPTY_LIST; private String[] groups; protected ReportItemHandle inputElement; private boolean isDataSetVisible; private ITableLabelProvider labelProvider = new ITableLabelProvider( ) { public void addListener( ILabelProviderListener listener ) { } public void dispose( ) { } public Image getColumnImage( Object element, int columnIndex ) { return null; } public String getColumnText( Object element, int columnIndex ) { if ( element == dummyChoice ) { if ( columnIndex == 1 ) { return INPUT_PROPMT; } return ""; //$NON-NLS-1$ } ComputedColumnHandle handle = ( (ComputedColumnHandle) element ); String text = null; switch ( columnIndex ) { case 1 : text = handle.getName( ); break; case 2 : text = handle.getDisplayName( ); break; case 3 : text = ChoiceSetFactory.getDisplayNameFromChoiceSet( handle.getDataType( ), DATA_TYPE_CHOICE_SET ); break; case 4 : text = org.eclipse.birt.report.designer.data.ui.util.DataUtil.getAggregationExpression( handle ); break; case 5 : try { String function = handle.getAggregateFunction( ); if ( function != null ) text = org.eclipse.birt.report.designer.data.ui.util.DataUtil.getAggregationManager( ) .getAggregation( function ) .getDisplayName( ); } catch ( BirtException e ) { ExceptionHandler.handle( e ); text = null; } break; case 6 : text = handle.getFilterExpression( ); break; case 7 : String value = DEUtil.getAggregateOn( handle ); if ( value == null ) { if ( handle.getAggregateFunction( ) != null ) { text = ALL; } else text = NONE_AGGREGATEON; } else { text = value; } break; } if ( text == null ) { text = ""; //$NON-NLS-1$ } return text; } public boolean isLabelProperty( Object element, String property ) { return false; } public void removeListener( ILabelProviderListener listener ) { } }; private String NullDatasetChoice = null; private String NullReportItemChoice = null; private Combo reportItemCombo; private Button reportItemRadio; private String selectedColumnName = null; private int selectIndex; private CLabel warnLabel; public ColumnBindingDialog( ReportItemHandle input ) { super( DEFAULT_DLG_TITLE ); setInput( input ); } public ColumnBindingDialog( ReportItemHandle input, boolean canSelect ) { super( DEFAULT_DLG_TITLE ); setInput( input ); this.canSelect = canSelect; } public ColumnBindingDialog( ReportItemHandle input, Shell parent, boolean canSelect ) { this( input, parent, DEFAULT_DLG_TITLE, canSelect, true ); } public ColumnBindingDialog( ReportItemHandle input, Shell parent, boolean canSelect, boolean canAggregate ) { this( input, parent, DEFAULT_DLG_TITLE, canSelect, canAggregate ); } public ColumnBindingDialog( ReportItemHandle input, Shell parent, String title, boolean canSelect, boolean canAggregate ) { super( parent, title ); setInput( input ); this.canSelect = canSelect; this.canAggregate = canAggregate; } public ColumnBindingDialog( ReportItemHandle input, String title ) { super( title ); setInput( input ); } public ColumnBindingDialog( ReportItemHandle input, String title, boolean canAggregate ) { super( title ); setInput( input ); this.canAggregate = canAggregate; } protected void addBinding( ComputedColumn column ) { try { DEUtil.addColumn( DEUtil.getBindingHolder( inputElement ), column, false ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } /** * Adds buttons in Button area. * * @param cmp * parent composite * @param table * the Table widget affected by Buttons * @return the number of added buttons */ protected int addButtons( Composite cmp, final Table table ) { btnRefresh = new Button( cmp, SWT.PUSH ); btnRefresh.setText( MSG_REFRESH ); GridData data = new GridData( GridData.VERTICAL_ALIGN_BEGINNING ); data.widthHint = Math.max( 60, btnRefresh.computeSize( SWT.DEFAULT, SWT.DEFAULT, true ).x ); btnRefresh.setLayoutData( data ); btnRefresh.addListener( SWT.Selection, new Listener( ) { public void handleEvent( Event event ) { if ( inputElement != null ) { DataSetHandle datasetHandle = inputElement.getDataSet( ); if ( datasetHandle != null ) { try { CachedMetaDataHandle cmdh = DataSetUIUtil.getCachedMetaDataHandle( datasetHandle ); for ( Iterator iter = cmdh.getResultSet( ) .iterator( ); iter.hasNext( ); ) { ResultSetColumnHandle element = (ResultSetColumnHandle) iter.next( ); ComputedColumn bindingColumn = StructureFactory.newComputedColumn( inputElement, element.getColumnName( ) ); bindingColumn.setDataType( element.getDataType( ) ); bindingColumn.setExpression( DEUtil.getExpression( element ) ); inputElement.addColumnBinding( bindingColumn, false ); } } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } refreshBindingTable( ); updateButtons( ); } } } } ); return 1; } private void commit( ) { if ( isEnableAutoCommit( ) ) { getActionStack( ).commit( ); } } protected Control createDialogArea( Composite parent ) { UIUtil.bindHelp( parent, IHelpContextIds.COLUMNBINDING_DIALOG_ID ); Composite parentComposite = (Composite) super.createDialogArea( parent ); if ( this.canSelect ) { composite = new Composite( parentComposite, SWT.NONE ); composite.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) ); composite.setLayout( UIUtil.createGridLayoutWithoutMargin( 2, false ) ); warnLabel = new CLabel( composite, SWT.NONE ); warnLabel.setImage( PlatformUI.getWorkbench( ) .getSharedImages( ) .getImage( ISharedImages.IMG_OBJS_WARN_TSK ) ); warnLabel.setText( WARN_COLUMN_BINDINGS ); GridData gd = new GridData( GridData.FILL_HORIZONTAL ); gd.horizontalSpan = 2; warnLabel.setLayoutData( gd ); datasetRadio = new Button( composite, SWT.RADIO ); datasetRadio.setText( LABEL_COLUMN_BINDINGS ); datasetRadio.setLayoutData( new GridData( GridData.BEGINNING ) ); datasetRadio.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { refreshBinding( ); if ( datasetRadio.getSelection( ) && inputElement.getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF && ( DEUtil.getBindingHolder( inputElement, true ) == null || DEUtil.getBindingHolder( inputElement, true ) .getDataBindingType( ) != ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ) ) saveBinding( ); } } ); datasetCombo = new Combo( composite, SWT.READ_ONLY | SWT.BORDER ); datasetCombo.setBackground( PlatformUI.getWorkbench( ) .getDisplay( ) .getSystemColor( SWT.COLOR_LIST_BACKGROUND ) ); String[] dataSets = ChoiceSetFactory.getDataSets( ); String[] newList = new String[dataSets.length + 1]; newList[0] = NullDatasetChoice; System.arraycopy( dataSets, 0, newList, 1, dataSets.length ); datasetCombo.setItems( newList ); String dataSetName = getDataSetName( ); datasetCombo.deselectAll( ); if ( dataSetName != null ) { datasetCombo.setText( dataSetName ); } else { datasetCombo.select( 0 ); } gd = new GridData( ); gd.widthHint = 250; datasetCombo.setLayoutData( gd ); datasetCombo.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent event ) { saveBinding( ); } } ); reportItemRadio = new Button( composite, SWT.RADIO ); reportItemRadio.setText( Messages.getString( "BindingPage.ReportItem.Label" ) ); //$NON-NLS-1$ reportItemRadio.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { refreshBinding( ); if ( reportItemRadio.getSelection( ) && inputElement.getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_DATA && ( DEUtil.getBindingHolder( inputElement, true ) == null || DEUtil.getBindingHolder( inputElement, true ) .getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ) ) saveBinding( ); } } ); reportItemCombo = new Combo( composite, SWT.READ_ONLY | SWT.BORDER ); reportItemCombo.setBackground( PlatformUI.getWorkbench( ) .getDisplay( ) .getSystemColor( SWT.COLOR_LIST_BACKGROUND ) ); gd = new GridData( ); gd.widthHint = 250; reportItemCombo.setLayoutData( gd ); reportItemCombo.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { saveBinding( ); } } ); } Composite contentComposite = new Composite( parentComposite, SWT.NONE ); contentComposite.setLayoutData( new GridData( GridData.FILL_BOTH ) ); contentComposite.setLayout( UIUtil.createGridLayoutWithoutMargin( 2, false ) ); /** * Binding table */ final Table table = new Table( contentComposite, SWT.SINGLE | SWT.FULL_SELECTION | SWT.BORDER | ( canSelect ? SWT.CHECK : 0 ) ); GridData gd = new GridData( GridData.FILL_BOTH ); gd.heightHint = 200; gd.verticalSpan = 5; table.setLayoutData( gd ); table.setLinesVisible( true ); table.setHeaderVisible( true ); // table.addKeyListener( new KeyAdapter( ) { // /** // * @see // org.eclipse.swt.events.KeyAdapter#keyReleased(org.eclipse.swt.events.KeyEvent) // */ // public void keyReleased( KeyEvent e ) // // If Delete pressed, delete the selected row // if ( e.keyCode == SWT.DEL ) // IStructuredSelection selection = (IStructuredSelection) // bindingTable.getSelection( ); // if ( selection.getFirstElement( ) instanceof ComputedColumnHandle ) // deleteRow( (ComputedColumnHandle) selection.getFirstElement( ) ); table.addKeyListener( new KeyAdapter( ) { public void keyPressed( KeyEvent e ) { if ( e.keyCode == SWT.DEL && ( DEUtil.getBindingHolder( inputElement ) .getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_DATA || ( DEUtil.getBindingHolder( inputElement ) .getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_NONE && ( DEUtil.getBindingHolder( inputElement, true ) == null || DEUtil.getBindingHolder( inputElement, true ) .getDataBindingType( ) != ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ) ) ) ) { int itemCount = table.getItemCount( ); if ( selectIndex == itemCount ) { return; } if ( selectIndex == itemCount - 1 ) { selectIndex } try { handleDelEvent( ); } catch ( Exception e1 ) { WidgetUtil.processError( getShell( ), e1 ); } refreshBindingTable( ); } } } ); table.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { selectIndex = table.getSelectionIndex( ); updateButtons( ); } } ); table.addMouseListener( new MouseAdapter( ) { /** * @param e */ public void mouseDoubleClick( MouseEvent e ) { editSelectedBinding( table.getSelectionIndex( ) ); } } ); String[] columns = null; int[] columnWidth = null; groups = new String[groupList.size( ) + 1]; groups[0] = ALL; for ( int i = 0; i < groupList.size( ); i++ ) { groups[i + 1] = ( (GroupHandle) groupList.get( i ) ).getName( ); } if ( canAggregate ) { columns = new String[]{ null, COLUMN_NAME, COLUMN_DISPLAYNAME, COLUMN_DATATYPE, COLUMN_EXPRESSION, COLUMN_FUNCTION, COLUMN_FILTER, COLUMN_AGGREGATEON }; columnWidth = new int[]{ canSelect ? 25 : 20, 100, 100, 70, 100, 100, 100, 100 }; } else { columns = new String[]{ null, COLUMN_NAME, COLUMN_DISPLAYNAME, COLUMN_DATATYPE, COLUMN_EXPRESSION }; columnWidth = new int[]{ canSelect ? 25 : 20, 150, 150, 70, 150 }; } for ( int i = 0; i < columns.length; i++ ) { TableColumn column = new TableColumn( table, SWT.LEFT ); column.setResizable( columns[i] != null ); if ( columns[i] != null ) { column.setText( columns[i] ); } column.setWidth( columnWidth[i] ); } if ( canSelect ) { bindingTable = new CheckboxTableViewer( table ); ( (CheckboxTableViewer) bindingTable ).addCheckStateListener( new ICheckStateListener( ) { public void checkStateChanged( CheckStateChangedEvent event ) { if ( event.getElement( ) instanceof ComputedColumnHandle ) { ComputedColumnHandle handle = (ComputedColumnHandle) event.getElement( ); if ( handle.getName( ).equals( selectedColumnName ) ) { selectedColumnName = null; } else { selectedColumnName = handle.getName( ); } updateSelection( ); updateButtons( ); } else { ( (CheckboxTableViewer) bindingTable ).setChecked( dummyChoice, false ); } } } ); } else { bindingTable = new TableViewer( table ); } bindingTable.setColumnProperties( columns ); bindingTable.setContentProvider( contentProvider ); bindingTable.setLabelProvider( labelProvider ); // bindingTable.setCellModifier( cellModifier ); bindingTable.setInput( inputElement ); bindingTable.addSelectionChangedListener( new ISelectionChangedListener( ) { public void selectionChanged( SelectionChangedEvent event ) { updateButtons( ); } } ); btnAdd = new Button( contentComposite, SWT.PUSH ); btnAdd.setText( MSG_ADD ); GridData data = new GridData( ); data.widthHint = Math.max( 60, btnAdd.computeSize( SWT.DEFAULT, SWT.DEFAULT, true ).x ); btnAdd.setLayoutData( data ); btnAdd.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { handleAddEvent( ); refreshBindingTable( ); if ( table.getItemCount( ) > 0 ) selectIndex = ( table.getItemCount( ) - 1 ); updateButtons( ); } } ); if ( canAggregate ) { btnAddAggr = new Button( contentComposite, SWT.PUSH ); btnAddAggr.setText( MSG_ADDAGGREGATEON ); //$NON-NLS-1$ data = new GridData( ); data.widthHint = Math.max( 60, btnAddAggr.computeSize( SWT.DEFAULT, SWT.DEFAULT, true ).x ); btnAddAggr.setLayoutData( data ); btnAddAggr.addListener( SWT.Selection, new Listener( ) { public void handleEvent( Event event ) { DataColumnBindingDialog dialog = new DataColumnBindingDialog( true ); dialog.setInput( inputElement ); dialog.setExpressionProvider( expressionProvider ); dialog.setAggreate( true ); if ( dialog.open( ) == Dialog.OK ) { if ( bindingTable != null ) { refreshBindingTable( ); bindingTable.getTable( ) .setSelection( bindingTable.getTable( ) .getItemCount( ) - 1 ); } } refreshBindingTable( ); if ( table.getItemCount( ) > 0 ) setSelectionInTable( table.getItemCount( ) - 1 ); updateButtons( ); } } ); } btnEdit = new Button( contentComposite, SWT.PUSH ); btnEdit.setText( MSG_EDIT ); data = new GridData( ); data.widthHint = Math.max( 60, btnEdit.computeSize( SWT.DEFAULT, SWT.DEFAULT, true ).x ); btnEdit.setLayoutData( data ); btnEdit.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { handleEditEvent( ); refreshBindingTable( ); } } ); btnDel = new Button( contentComposite, SWT.PUSH ); btnDel.setText( MSG_DELETE ); data = new GridData( ); data.widthHint = Math.max( 60, btnDel.computeSize( SWT.DEFAULT, SWT.DEFAULT, true ).x ); btnDel.setLayoutData( data ); btnDel.addSelectionListener( new SelectionAdapter( ) { public void widgetSelected( SelectionEvent e ) { if ( bindingTable.isCellEditorActive( ) ) { bindingTable.cancelEditing( ); } int pos = bindingTable.getTable( ).getSelectionIndex( ); if ( pos == -1 ) { bindingTable.getTable( ).setFocus( ); return; } selectIndex = pos; int itemCount = bindingTable.getTable( ).getItemCount( ); if ( selectIndex == itemCount - 1 ) { selectIndex } try { handleDelEvent( ); } catch ( Exception e1 ) { WidgetUtil.processError( getShell( ), e1 ); } refreshBindingTable( ); } } ); // initTableCellColor( ); // Add custom buttons int buttonsNumber = addButtons( contentComposite, table ); if ( buttonsNumber > 0 ) { // Adjust UI layout if ( table.getLayoutData( ) instanceof GridData ) { ( (GridData) table.getLayoutData( ) ).verticalSpan += buttonsNumber; } } if ( !isDataSetVisible ) { if ( composite != null ) ( (GridData) composite.getLayoutData( ) ).exclude = true; } return parentComposite; } private void deleteRow( ComputedColumnHandle handle ) { try { if ( handle.getName( ).equals( selectedColumnName ) ) { selectedColumnName = null; } handle.drop( ); } catch ( PropertyValueException e ) { ExceptionHandler.handle( e ); } } private void generateBindingColumns( ) throws SemanticException { List columnList = DataUtil.generateComputedColumns( inputElement ); if ( columnList.size( ) > 0 ) { for ( Iterator iter = columnList.iterator( ); iter.hasNext( ); ) { addBinding( (ComputedColumn) iter.next( ) ); } } refreshBindingTable( ); } /** * Gets the DE CommandStack instance * * @return CommandStack instance */ private CommandStack getActionStack( ) { return SessionHandleAdapter.getInstance( ).getCommandStack( ); } public String[] getAvailableDatasetItems( ) { String[] dataSets = ChoiceSetFactory.getDataSets( ); String[] newList = new String[dataSets.length + 1]; newList[0] = NullDatasetChoice; System.arraycopy( dataSets, 0, newList, 1, dataSets.length ); return newList; } protected List getBindingList( DesignElementHandle inputElement ) { return DEUtil.getVisiableColumnBindingsList( inputElement ); } private String getColumnName( String expression ) { List columnList = DEUtil.getVisiableColumnBindingsList( inputElement ); for ( Iterator iter = columnList.iterator( ); iter.hasNext( ); ) { ComputedColumnHandle cachedColumn = (ComputedColumnHandle) iter.next( ); String columnName = cachedColumn.getName( ); if ( DEUtil.getColumnExpression( columnName ).equals( expression ) ) { return columnName; } } return null; } private String getDataSetName( ) { if ( inputElement.getDataSet( ) == null ) { return null; } String dataSetName = inputElement.getDataSet( ).getQualifiedName( ); if ( StringUtil.isBlank( dataSetName ) ) { dataSetName = null; } return dataSetName; } protected Map<String, ReportItemHandle> referMap = new HashMap<String, ReportItemHandle>( ); private Button btnAddAggr; private Button btnRefresh; protected String[] getReferences( ) { List referenceList = inputElement.getAvailableDataSetBindingReferenceList( ); String[] references = new String[referenceList.size( ) + 1]; references[0] = NullReportItemChoice; referMap.put( references[0], null ); int j = 0; for ( int i = 0; i < referenceList.size( ); i++ ) { ReportItemHandle item = ( (ReportItemHandle) referenceList.get( i ) ); if ( item.getName( ) != null ) { references[++j] = item.getQualifiedName( ); referMap.put( references[j], item ); } } int tmp = j + 1; Arrays.sort( references, 1, tmp ); for ( int i = 0; i < referenceList.size( ); i++ ) { ReportItemHandle item = ( (ReportItemHandle) referenceList.get( i ) ); if ( item.getName( ) == null ) { references[++j] = item.getElement( ) .getDefn( ) .getDisplayName( ) + " (ID " //$NON-NLS-1$ + item.getID( ) + ") - " //$NON-NLS-1$ + Messages.getString( "BindingPage.ReportItem.NoName" ); //$NON-NLS-1$ referMap.put( references[j], item ); } } Arrays.sort( references, tmp, referenceList.size( ) + 1 ); return references; } private ComputedColumnHandle getSelectColumnHandle( ) { if ( selectedColumnName != null ) { for ( int i = 0; i < bindingTable.getTable( ).getItemCount( ); i++ ) { ComputedColumnHandle handle = (ComputedColumnHandle) bindingTable.getElementAt( i ); if ( selectedColumnName.equals( handle.getName( ) ) ) { return handle; } } } return null; } protected void handleAddEvent( ) { DataColumnBindingDialog dialog = new DataColumnBindingDialog( true ); dialog.setInput( inputElement ); dialog.setExpressionProvider( expressionProvider ); if ( dialog.open( ) == Dialog.OK ) { if ( bindingTable != null ) { refreshBindingTable( ); bindingTable.getTable( ).setSelection( bindingTable.getTable( ) .getItemCount( ) - 1 ); } } } protected void handleDelEvent( ) { if ( !btnDel.isEnabled( ) ) return; int pos = bindingTable.getTable( ).getSelectionIndex( ); if ( pos > -1 ) { try { ComputedColumnHandle handle = (ComputedColumnHandle) ( DEUtil.getBindingHolder( inputElement ) ).getColumnBindings( ) .getAt( pos ); deleteRow( handle ); } catch ( Exception e1 ) { ExceptionHandler.handle( e1 ); } } } protected void handleEditEvent( ) { int pos = bindingTable.getTable( ).getSelectionIndex( ); editSelectedBinding( pos ); } /** * Edits the selected binding of table. * * @param bindingIndex */ private void editSelectedBinding( int bindingIndex ) { if ( !btnEdit.isEnabled( ) ) return; ComputedColumnHandle bindingHandle = null; if ( bindingIndex > -1 ) { bindingHandle = (ComputedColumnHandle) ( DEUtil.getBindingHolder( inputElement ) ).getColumnBindings( ) .getAt( bindingIndex ); } if ( bindingHandle == null ) return; String bindingName = bindingHandle.getName( ); DataColumnBindingDialog dialog = new DataColumnBindingDialog( false ); dialog.setInput( inputElement, bindingHandle ); dialog.setExpressionProvider( expressionProvider ); if ( dialog.open( ) == Dialog.OK ) { if ( bindingTable != null ) bindingTable.getTable( ).setSelection( bindingIndex ); if ( selectedColumnName != null && selectedColumnName.equals( bindingName ) ) selectedColumnName = bindingHandle.getName( ); } } protected boolean initDialog( ) { if ( canSelect ) { if ( inputElement instanceof DataItemHandle ) { selectedColumnName = ( (DataItemHandle) inputElement ).getResultSetColumn( ); updateSelection( ); } else if ( inputElement instanceof ImageHandle ) { selectedColumnName = getColumnName( ( (ImageHandle) inputElement ).getValueExpression( ) ); updateSelection( ); } } load( ); return super.initDialog( ); } /** * @return Returns the enableAutoCommit. */ public boolean isEnableAutoCommit( ) { return enableAutoCommit; } public void load( ) { if ( canSelect ) { datasetRadio.setEnabled( true ); reportItemRadio.setEnabled( true ); BindingInfo info = (BindingInfo) loadValue( ); if ( info != null ) { refreshBindingInfo( info ); } } refreshBindingTable( ); } public Object loadValue( ) { if ( canSelect ) { int type = inputElement.getDataBindingType( ); Object value; if ( type == ReportItemHandle.DATABINDING_TYPE_NONE ) type = DEUtil.getBindingHolder( inputElement ) .getDataBindingType( ); switch ( type ) { case ReportItemHandle.DATABINDING_TYPE_DATA : DataSetHandle dataset = inputElement.getDataSet( ); if ( dataset == null ) value = NullDatasetChoice; else value = dataset.getQualifiedName( ); break; case ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF : ReportItemHandle reference = inputElement.getDataBindingReference( ); if ( reference == null ) value = NullReportItemChoice; else value = reference.getQualifiedName( ); break; default : value = NullDatasetChoice; } BindingInfo info = new BindingInfo( type, value ); return info; } return null; } protected void okPressed( ) { if ( canSelect ) { setResult( selectedColumnName ); if ( inputElement instanceof DataItemHandle ) { try { ( (DataItemHandle) inputElement ).setResultSetColumn( selectedColumnName ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } } super.okPressed( ); } private void refreshBinding( ) { if ( datasetRadio.getSelection( ) ) { datasetRadio.setSelection( true ); datasetCombo.setEnabled( true ); reportItemRadio.setSelection( false ); reportItemCombo.setEnabled( false ); if ( datasetCombo.getSelectionIndex( ) == -1 ) { datasetCombo.setItems( getAvailableDatasetItems( ) ); datasetCombo.select( 0 ); } } else { datasetRadio.setSelection( false ); datasetCombo.setEnabled( false ); reportItemRadio.setSelection( true ); reportItemCombo.setEnabled( true ); if ( reportItemCombo.getSelectionIndex( ) == -1 ) { reportItemCombo.setItems( getReferences( ) ); reportItemCombo.select( 0 ); } } } private void refreshBindingInfo( BindingInfo info ) { if ( canSelect ) { int type = info.getBindingType( ); Object value = info.getBindingValue( ); datasetCombo.setItems( getAvailableDatasetItems( ) ); reportItemCombo.setItems( getReferences( ) ); if ( type == ReportItemHandle.DATABINDING_TYPE_NONE ) type = DEUtil.getBindingHolder( inputElement ) .getDataBindingType( ); switch ( type ) { case ReportItemHandle.DATABINDING_TYPE_NONE : case ReportItemHandle.DATABINDING_TYPE_DATA : datasetRadio.setSelection( true ); datasetCombo.setEnabled( true ); datasetCombo.setText( value.toString( ) ); reportItemRadio.setSelection( false ); reportItemCombo.setEnabled( false ); break; case ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF : datasetRadio.setSelection( false ); datasetCombo.setEnabled( false ); reportItemRadio.setSelection( true ); reportItemCombo.setEnabled( true ); reportItemCombo.setText( value.toString( ) ); } } } protected void refreshBindingTable( ) { bindingTable.refresh( ); if ( canSelect ) { updateSelection( ); } updateButtons( ); } private void resetDataSetReference( Object value, boolean clearHistory ) { try { startTrans( "" ); //$NON-NLS-1$ inputElement.setDataBindingReference( null ); DataSetHandle dataSet = null; if ( value != null ) { dataSet = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .findDataSet( value.toString( ) ); } if ( inputElement.getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ) { inputElement.setDataBindingReference( null ); } inputElement.setDataSet( dataSet ); if ( clearHistory ) { inputElement.getColumnBindings( ).clearValue( ); inputElement.getPropertyHandle( ReportItemHandle.PARAM_BINDINGS_PROP ) .clearValue( ); } generateBindingColumns( ); selectedColumnName = null; commit( ); } catch ( SemanticException e ) { rollback( ); ExceptionHandler.handle( e ); } load( ); } private void resetReference( Object value ) { if ( value == null && inputElement.getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_DATA ) { resetDataSetReference( null, true ); } else { try { startTrans( Messages.getString( "DataColumBindingDialog.stackMsg.resetReference" ) ); //$NON-NLS-1$ ReportItemHandle element = null; if ( value != null ) { element = (ReportItemHandle) SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .findElement( value.toString( ) ); } inputElement.setDataBindingReference( element ); selectedColumnName = null; commit( ); } catch ( SemanticException e ) { rollback( ); ExceptionHandler.handle( e ); } load( ); } } private void rollback( ) { if ( isEnableAutoCommit( ) ) { getActionStack( ).rollback( ); } } public void save( Object saveValue ) throws SemanticException { if ( saveValue instanceof BindingInfo ) { BindingInfo info = (BindingInfo) saveValue; int type = info.getBindingType( ); String value = info.getBindingValue( ).toString( ); switch ( type ) { case ReportItemHandle.DATABINDING_TYPE_DATA : if ( value.equals( NullDatasetChoice ) ) { value = null; } int ret = 0; if ( !NullDatasetChoice.equals( ( (BindingInfo) loadValue( ) ).getBindingValue( ) .toString( ) ) || inputElement.getColumnBindings( ) .iterator( ) .hasNext( ) ) { MessageDialog prefDialog = new MessageDialog( UIUtil.getDefaultShell( ), Messages.getString( "dataBinding.title.changeDataSet" ),//$NON-NLS-1$ null, Messages.getString( "dataBinding.message.changeDataSet" ),//$NON-NLS-1$ MessageDialog.QUESTION, new String[]{ Messages.getString( "AttributeView.dialg.Message.Yes" ),//$NON-NLS-1$ Messages.getString( "AttributeView.dialg.Message.No" ),//$NON-NLS-1$ Messages.getString( "AttributeView.dialg.Message.Cancel" )}, 0 );//$NON-NLS-1$ ret = prefDialog.open( ); } switch ( ret ) { // Clear binding info case 0 : resetDataSetReference( value, true ); break; // Doesn't clear binding info case 1 : resetDataSetReference( value, false ); break; // Cancel. case 2 : load( ); } break; case ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF : if ( value.equals( NullReportItemChoice ) ) { value = null; } else if ( referMap.get( value ).getName( ) == null ) { MessageDialog dialog = new MessageDialog( UIUtil.getDefaultShell( ), Messages.getString( "dataBinding.title.haveNoName" ),//$NON-NLS-1$ null, Messages.getString( "dataBinding.message.haveNoName" ),//$NON-NLS-1$ MessageDialog.QUESTION, new String[]{ Messages.getString( "dataBinding.button.OK" )//$NON-NLS-1$ }, 0 ); dialog.open( ); load( ); return; } int ret1 = 0; if ( !NullReportItemChoice.equals( ( (BindingInfo) loadValue( ) ).getBindingValue( ) .toString( ) ) || inputElement.getColumnBindings( ) .iterator( ) .hasNext( ) ) { MessageDialog prefDialog = new MessageDialog( UIUtil.getDefaultShell( ), Messages.getString( "dataBinding.title.changeDataSet" ),//$NON-NLS-1$ null, Messages.getString( "dataBinding.message.changeDataSet" ),//$NON-NLS-1$ MessageDialog.QUESTION, new String[]{ Messages.getString( "AttributeView.dialg.Message.Yes" ),//$NON-NLS-1$ Messages.getString( "AttributeView.dialg.Message.Cancel" )}, 0 );//$NON-NLS-1$ ret1 = prefDialog.open( ); } switch ( ret1 ) { // Clear binding info case 0 : resetReference( value ); break; // Cancel. case 1 : load( ); } } } } private void saveBinding( ) { BindingInfo info = new BindingInfo( ); if ( datasetRadio.getSelection( ) ) { info.setBindingType( ReportItemHandle.DATABINDING_TYPE_DATA ); info.setBindingValue( datasetCombo.getText( ) ); } else { info.setBindingType( ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ); info.setBindingValue( reportItemCombo.getText( ) ); } try { save( info ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } /** * @param enableAutoCommit * The enableAutoCommit to set. */ public void setEnableAutoCommit( boolean enableAutoCommit ) { this.enableAutoCommit = enableAutoCommit; } public void setExpressionProvider( ExpressionProvider provider ) { expressionProvider = provider; } /* * Set data for Group List */ public void setGroupList( List groupList ) { this.groupList = groupList; } /* * Set input for dialog */ private void setInput( ReportItemHandle input ) { this.inputElement = input; ReportItemHandle container = DEUtil.getBindingHolder( input.getContainer( ) ); if ( container != null && ( container.getDataSet( ) != null || container.columnBindingsIterator( ) .hasNext( ) ) ) { NullDatasetChoice = CHOICE_DATASET_FROM_CONTAINER; } else { NullDatasetChoice = CHOICE_NONE; } if ( container != null && container.getDataBindingReference( ) != null ) { NullReportItemChoice = CHOICE_REPORTITEM_FROM_CONTAINER; } else { NullReportItemChoice = CHOICE_NONE; } isDataSetVisible = DEUtil.getBindingHolder( inputElement ) .getElement( ) .getDefn( ) .isPropertyVisible( IReportItemModel.DATA_SET_PROP ); IBindingDialogHelper dialogHelper = (IBindingDialogHelper) ElementAdapterManager.getAdapter( inputElement, IBindingDialogHelper.class ); if ( dialogHelper != null ) dialogHelper.setBindingHolder( DEUtil.getBindingHolder( inputElement ) ); canAggregate = dialogHelper == null ? false : dialogHelper.canProcessAggregation( ); } protected void setSelectionInTable( int selectedIndex ) { this.selectIndex = selectedIndex; } private void startTrans( String name ) { if ( isEnableAutoCommit( ) ) { getActionStack( ).startTrans( name ); } } protected void updateButtons( ) { boolean okEnable = false; if ( !canSelect || ( !isDataSetVisible && selectedColumnName != null ) // || ( selectedColumnName != null && getDataSetName( ) != null ) // || ( selectedColumnName != null && DEUtil.getBindingHolder( // inputElement ) // .getDataSet( ) != null ) || getSelectColumnHandle( ) != null ) { okEnable = true; } getOkButton( ).setEnabled( okEnable ); int min = 0; int max = bindingTable.getTable( ).getItemCount( ) - 1; if ( ( min <= selectIndex ) && ( selectIndex <= max ) ) { btnDel.setEnabled( true ); if ( btnEdit != null ) btnEdit.setEnabled( true ); } else { btnDel.setEnabled( false ); if ( btnEdit != null ) btnEdit.setEnabled( false ); } bindingTable.getTable( ).select( selectIndex ); if ( DEUtil.getBindingHolder( inputElement ).getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_DATA ) { btnAdd.setEnabled( true ); if ( btnAddAggr != null ) btnAddAggr.setEnabled( true ); if ( btnRefresh != null ) btnRefresh.setEnabled( true ); } else if ( DEUtil.getBindingHolder( inputElement ).getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_NONE && ( DEUtil.getBindingHolder( inputElement, true ) == null || DEUtil.getBindingHolder( inputElement, true ) .getDataBindingType( ) != ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF ) ) { btnAdd.setEnabled( true ); if ( btnAddAggr != null ) btnAddAggr.setEnabled( true ); if ( btnRefresh != null ) btnRefresh.setEnabled( true ); } else { btnAdd.setEnabled( false ); btnEdit.setEnabled( false ); btnDel.setEnabled( false ); if ( btnAddAggr != null ) btnAddAggr.setEnabled( false ); if ( btnRefresh != null ) btnRefresh.setEnabled( false ); } } private void updateSelection( ) { if ( canSelect ) { ( (CheckboxTableViewer) bindingTable ).setAllChecked( false ); ( (CheckboxTableViewer) bindingTable ).setGrayed( dummyChoice, true ); if ( getSelectColumnHandle( ) != null ) { ( (CheckboxTableViewer) bindingTable ).setChecked( getSelectColumnHandle( ), true ); } } } }
package at.medevit.ecrit.pharmacy_at.application.part; import java.net.URL; import java.util.ArrayList; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import org.eclipse.core.commands.Command; import org.eclipse.core.commands.IParameter; import org.eclipse.core.commands.Parameterization; import org.eclipse.core.commands.ParameterizedCommand; import org.eclipse.core.commands.common.NotDefinedException; import org.eclipse.core.databinding.observable.list.IObservableList; import org.eclipse.core.databinding.observable.map.IObservableMap; import org.eclipse.core.databinding.property.Properties; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.Path; import org.eclipse.e4.core.commands.ECommandService; import org.eclipse.e4.core.commands.EHandlerService; import org.eclipse.e4.ui.workbench.modeling.ESelectionService; import org.eclipse.e4.ui.workbench.swt.modeling.EMenuService; import org.eclipse.emf.databinding.EMFProperties; import org.eclipse.emf.databinding.FeaturePath; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.jface.databinding.viewers.ObservableListContentProvider; import org.eclipse.jface.databinding.viewers.ObservableMapCellLabelProvider; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.ColumnLabelProvider; import org.eclipse.jface.viewers.ColumnViewer; import org.eclipse.jface.viewers.DoubleClickEvent; import org.eclipse.jface.viewers.EditingSupport; import org.eclipse.jface.viewers.IDoubleClickListener; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TableViewerColumn; import org.eclipse.swt.SWT; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DragSourceAdapter; import org.eclipse.swt.dnd.DragSourceEvent; import org.eclipse.swt.dnd.TextTransfer; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.Text; import org.osgi.framework.Bundle; import org.osgi.framework.FrameworkUtil; import at.medevit.ecrit.pharmacy_at.application.Messages; import at.medevit.ecrit.pharmacy_at.application.filter.ArticleFilter; import at.medevit.ecrit.pharmacy_at.application.filter.CriticalLevelFilter; import at.medevit.ecrit.pharmacy_at.core.SampleModel; import at.medevit.ecrit.pharmacy_at.model.ModelPackage; import at.medevit.ecrit.pharmacy_at.model.StockArticle; public class ArticleListPart { private final Image GREEN = getImage("boxGreen.png"); private final Image YELLOW = getImage("boxYellow.png"); private final Image RED = getImage("boxRed.png"); private final Image GREY = getImage("boxGrey.png"); private TableViewer tableViewer; private List<StockArticle> stockArticles; private ArticleFilter filter; private CriticalLevelFilter criticalLevelFilter; @Inject private EMenuService menuService; @Inject private ESelectionService selectionService; @Inject private ECommandService commandService; @Inject private EHandlerService handlerService; @Inject public ArticleListPart(){ stockArticles = SampleModel.getStock().getArticles(); } @PostConstruct public void postConstruct(Composite parent){ Composite composite = new Composite(parent, SWT.NONE); composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); composite.setLayout(new GridLayout(1, false)); // set critical stock level reached filter criticalLevelFilter = new CriticalLevelFilter(); // search Composite filterComposite = new Composite(composite, SWT.NONE); filterComposite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false, 1, 1)); filterComposite.setLayout(new GridLayout(2, false)); final Text txtSearch = new Text(filterComposite, SWT.BORDER | SWT.SEARCH); txtSearch.setMessage("Search"); txtSearch.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); txtSearch.addKeyListener(new KeyAdapter() { public void keyReleased(KeyEvent ke){ filter.setSearchText(txtSearch.getText()); tableViewer.refresh(); } }); Button btnFilterCritical = new Button(filterComposite, SWT.TOGGLE); btnFilterCritical.setText("Only Critical"); btnFilterCritical.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e){ if (((Button) e.widget).getSelection()) { tableViewer.addFilter(criticalLevelFilter); } else { tableViewer.removeFilter(criticalLevelFilter); } tableViewer.refresh(); } }); initTableViewer(composite); menuService.registerContextMenu(tableViewer.getTable(), Messages.getString("ID_POPUP_ARTICLELIST")); } private void initTableViewer(Composite composite){ tableViewer = new TableViewer(composite, SWT.BORDER | SWT.FULL_SELECTION); Table table = tableViewer.getTable(); table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); table.setHeaderVisible(true); table.setLinesVisible(true); ObservableListContentProvider cp = new ObservableListContentProvider(); initColumns(cp); tableViewer.setContentProvider(cp); // add search filter filter = new ArticleFilter(); tableViewer.addFilter(filter); // add drag support Transfer[] transferTypes = new Transfer[] { TextTransfer.getInstance() }; tableViewer.addDragSupport(DND.DROP_COPY, transferTypes, new DragSourceAdapter() { @Override public void dragSetData(DragSourceEvent event){ IStructuredSelection selection = (IStructuredSelection) tableViewer.getSelection(); StockArticle a = (StockArticle) selection.getFirstElement(); selectionService.setSelection(a); if (TextTransfer.getInstance().isSupportedType(event.dataType)) { event.data = a.toString(); } } }); // add the selection change listener tableViewer.addSelectionChangedListener(new ISelectionChangedListener() { @Override public void selectionChanged(SelectionChangedEvent event){ IStructuredSelection selection = (IStructuredSelection) event.getSelection(); StockArticle stockArticle = (StockArticle) selection.getFirstElement(); selectionService.setSelection(stockArticle); } }); // add double click listener tableViewer.addDoubleClickListener(new IDoubleClickListener() { @Override public void doubleClick(DoubleClickEvent event){ Command cmd = commandService.getCommand(Messages.getString("ID_CMD_ADD_TO_INVOICE")); // ParameterizedCommand pCmd = new ParameterizedCommand(cmd, null); ParameterizedCommand pCmd = prepareCommandWithParameters(cmd); // only execute if command can be executed System.out.println(handlerService.canExecute(pCmd)); handlerService.executeHandler(pCmd); } }); // set model IObservableList input = Properties.selfList(StockArticle.class).observe(stockArticles); tableViewer.setInput(input); } /** * * @param cmd * @return * @deprecated for demonstration purposes */ protected ParameterizedCommand prepareCommandWithParameters(Command cmd){ ParameterizedCommand pCmd = new ParameterizedCommand(cmd, null); try { // StockArticleConverter sac = StockArticleConverter.getInstance(); // StockArticle stockArticle = (StockArticle) selectionService.getSelection(); // sac.convertToString(stockArticle); // get parameters IParameter iparam = cmd.getParameter("commandparameter.modelelement.Article"); ArrayList<Parameterization> parameters = new ArrayList<Parameterization>(); parameters.add(new Parameterization(iparam, "a stock article")); // would only be relevant if passing via converter would work properly // parameters.add(new Parameterization(iparam, stockArticle.hashCode() + "")); // create parameterized command pCmd = new ParameterizedCommand(cmd, parameters.toArray(new Parameterization[parameters .size()])); } catch (NotDefinedException e) { e.printStackTrace(); } return pCmd; } private void initColumns(ObservableListContentProvider cp){ String[] columnNames = new String[] { "AdmNr", "Name", "Availalbility" }; EAttribute[] columnAttributes = new EAttribute[] { ModelPackage.Literals.ARTICLE__ADMISSION_NUMBER, ModelPackage.Literals.ARTICLE__NAME, ModelPackage.Literals.ARTICLE__AVAILABILITY }; int[] columnWidths = new int[] { 80, 100, 100 }; for (int i = 0; i < columnNames.length; i++) { TableViewerColumn tvc = new TableViewerColumn(tableViewer, SWT.NONE); tvc.getColumn().setText(columnNames[i]); tvc.getColumn().setWidth(columnWidths[i]); tvc.getColumn().setResizable(true); // determine the attribute that should be observed FeaturePath path = FeaturePath.fromList(ModelPackage.Literals.STOCK_ARTICLE__ARTICLE, columnAttributes[i]); // bind the feature and setup a table column IObservableMap map = EMFProperties.value(path).observeDetail(cp.getKnownElements()); tvc.setLabelProvider(new ObservableMapCellLabelProvider(map)); } TableViewerColumn tvcOnStock = new TableViewerColumn(tableViewer, SWT.NONE); tvcOnStock.getColumn().setText("OnStock"); tvcOnStock.getColumn().setWidth(80); IObservableMap stockMap = EMFProperties.value(ModelPackage.Literals.STOCK_ARTICLE__NUMBER_ON_STOCK) .observeDetail(cp.getKnownElements()); tvcOnStock.setLabelProvider(new ObservableMapCellLabelProvider(stockMap)); TableViewerColumn tvcLowerBound = new TableViewerColumn(tableViewer, SWT.NONE); tvcLowerBound.getColumn().setText("LowerBound"); tvcLowerBound.getColumn().setWidth(80); IObservableMap lowerBoundMap = EMFProperties.value(ModelPackage.Literals.STOCK_ARTICLE__LOWER_BOUND).observeDetail( cp.getKnownElements()); tvcLowerBound.setLabelProvider(new ObservableMapCellLabelProvider(lowerBoundMap)); TableViewerColumn tvcOrdered = new TableViewerColumn(tableViewer, SWT.NONE); tvcOrdered.getColumn().setText("Ordered"); tvcOrdered.getColumn().setWidth(80); EMFProperties.value(ModelPackage.Literals.STOCK_ARTICLE__NUMBER_ORDERED).observeDetail( cp.getKnownElements()); tvcOrdered.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element){ return null; } @Override public Image getImage(Object element){ StockArticle stockArticle = (StockArticle) element; int value = stockArticle.getNumberOnStock() - stockArticle.getLowerBound(); if (stockArticle.getNumberOrdered() > 0) { return GREEN; } if (stockArticle.getNumberOnStock() < 1) { return RED; } if (value < 1 && stockArticle.getNumberOrdered() == 0) { return YELLOW; } return GREY; } }); } public void updatePart(){ stockArticles = SampleModel.getStock().getArticles(); tableViewer.refresh(); } private static Image getImage(String file){ Bundle bundle = FrameworkUtil.getBundle(ArticleListPart.class); URL url = FileLocator.find(bundle, new Path("icons/" + file), null); ImageDescriptor image = ImageDescriptor.createFromURL(url); return image.createImage(); } }
package com.rbmhtechnology.vind.elasticsearch.backend.client; import com.rbmhtechnology.vind.elasticsearch.backend.util.ElasticRequestUtils; import com.rbmhtechnology.vind.elasticsearch.backend.util.PainlessScript; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetMappingsRequest; import org.elasticsearch.client.indices.GetMappingsResponse; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; public abstract class ElasticVindClient { private static final Logger log = LoggerFactory.getLogger(ElasticVindClient.class); protected String defaultIndex; protected RestHighLevelClient client; protected int port; protected String host; protected String scheme; private long connectionTimeOut = 1000; private long clientTimOut = 1000; public RestHighLevelClient getClient() { return client; } public String getDefaultIndex() { return defaultIndex; } public ElasticVindClient setDefaultIndex(String index) { this.defaultIndex = index; return this; } public long getConnectionTimeOut() { return connectionTimeOut; } public ElasticVindClient setConnectionTimeOut(long connectionTimeOut) { this.connectionTimeOut = connectionTimeOut; return this; } public long getClientTimOut() { return clientTimOut; } public ElasticVindClient setClientTimOut(long clientTimOut) { this.clientTimOut = clientTimOut; return this; } public boolean indexExists() throws IOException { try { final RequestOptions authenticatedDefaultRequest = RequestOptions.DEFAULT; final GetIndexRequest existsRequest = new GetIndexRequest(getDefaultIndex()); return this.client.indices().exists(existsRequest, authenticatedDefaultRequest); } catch (Exception e) { throw new IOException(String.format("Index does not exist: %s", getDefaultIndex()),e); } } public boolean ping() throws IOException { try { final RequestOptions authenticatedDefaultRequest = RequestOptions.DEFAULT; return this.client.ping(authenticatedDefaultRequest); } catch (IOException e) { log.error("Unable to ping Elasticsearch server {}://{}:{}", scheme, host, port,e); throw new IOException(String.format("Unable to ping Elasticsearch server %s://%s:%s", scheme, host, port),e); } } public BulkResponse add(Map<String, Object> jsonDoc) throws IOException { final BulkRequest bulkIndexRequest = new BulkRequest(defaultIndex); bulkIndexRequest.add(ElasticRequestUtils.getIndexRequest(defaultIndex,jsonDoc)); bulkIndexRequest.timeout(TimeValue.timeValueMillis(connectionTimeOut)); bulkIndexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); return client.bulk(bulkIndexRequest, RequestOptions.DEFAULT); } public BulkResponse add(List<Map<String, Object>> jsonDocs) throws IOException { final BulkRequest bulkIndexRequest = new BulkRequest(defaultIndex); jsonDocs.forEach( jsonDoc -> bulkIndexRequest.add(ElasticRequestUtils.getIndexRequest(defaultIndex,jsonDoc)) ); bulkIndexRequest.timeout(TimeValue.timeValueMillis(connectionTimeOut)); bulkIndexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); return client.bulk(bulkIndexRequest, RequestOptions.DEFAULT); } public UpdateResponse update(String id, PainlessScript.ScriptBuilder script) throws IOException { final UpdateRequest request = ElasticRequestUtils.getUpdateRequest(defaultIndex, id, script); return client.update(request, RequestOptions.DEFAULT); } public GetResponse realTimeGet(String id) throws IOException { return client.get(ElasticRequestUtils.getRealTimeGetRequest(defaultIndex,id),RequestOptions.DEFAULT); } public MultiGetResponse realTimeGet(List<String> ids) throws IOException { final MultiGetRequest request = ElasticRequestUtils.getRealTimeGetRequest(defaultIndex, ids); return client.mget(request, RequestOptions.DEFAULT); } public DeleteResponse deleteById(String id) throws IOException { return client.delete(ElasticRequestUtils.getDeleteRequest(defaultIndex,id),RequestOptions.DEFAULT); } public CreateIndexResponse createIndex(String indexName) throws IOException { return client.indices().create(ElasticRequestUtils.getCreateIndexRequest(indexName), RequestOptions.DEFAULT); } public BulkByScrollResponse deleteByQuery(QueryBuilder query) throws IOException { final DeleteByQueryRequest request = ElasticRequestUtils.getDeleteByQueryRequest(defaultIndex, query); return client.deleteByQuery(request,RequestOptions.DEFAULT); } public GetMappingsResponse getMappings() throws IOException { final GetMappingsRequest request = ElasticRequestUtils.getMappingsRequest(defaultIndex); return client.indices().getMapping(request, RequestOptions.DEFAULT); } public BulkResponse addPercolateQuery(String queryId, QueryBuilder query) throws IOException { return addPercolateQuery(queryId, query, new HashMap<>()); } public BulkResponse addPercolateQuery(String queryId, QueryBuilder query, Map<String, Object> metadata) throws IOException { metadata.put("query", query); final XContentBuilder queryDoc = mapToXContentBuilder(metadata); final BulkRequest bulkIndexRequest = new BulkRequest(defaultIndex); bulkIndexRequest.add(ElasticRequestUtils.addPercolatorQueryRequest(defaultIndex, queryId, queryDoc)); bulkIndexRequest.timeout(TimeValue.timeValueMillis(connectionTimeOut)); bulkIndexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); return client.bulk(bulkIndexRequest, RequestOptions.DEFAULT); } public SearchResponse percolatorDocQuery(List<Map<String, Object>> mapDocs, QueryBuilder query) throws IOException { final List<XContentBuilder> xContentDocs = new ArrayList<>(); for (Map<String, Object> mapDoc : mapDocs) { xContentDocs.add(mapToXContentBuilder(mapDoc)); } final SearchRequest request = ElasticRequestUtils.percolateDocumentRequest(defaultIndex, xContentDocs, query); return client.search(request, RequestOptions.DEFAULT); } public SearchResponse percolatorDocQuery(Map<String, Object> mapDoc, QueryBuilder query) throws IOException { return percolatorDocQuery(Collections.singletonList(mapDoc), query); } public SearchResponse percolatorDocQuery(Map<String, Object> matchingDoc) throws IOException { return percolatorDocQuery(Collections.singletonList(matchingDoc)); } public SearchResponse percolatorDocQuery(List<Map<String, Object>> mapDoc) throws IOException { return percolatorDocQuery(mapDoc, null); } public void close() throws IOException { try { this.client.close(); } catch (IOException e) { log.error("Unable to close Elasticsearch client connection to {}://{}:{}", scheme, host, port,e); throw new IOException(String.format("Unable to ping Elasticsearch client connection to %s://%s:%s", scheme, host, port),e); } } public SearchResponse query(SearchSourceBuilder query) throws IOException { final SearchRequest request = ElasticRequestUtils.getSearchRequest(defaultIndex, query); return client.search(request,RequestOptions.DEFAULT); } private XContentBuilder mapToXContentBuilder(Map<String, Object> doc) throws IOException { final XContentBuilder builder = jsonBuilder().startObject(); for (Map.Entry<String, Object> entry : doc.entrySet()) { String k = entry.getKey(); Object value = entry.getValue(); builder.field(k, value); } return builder.endObject(); } public static class Builder { private String defaultIndex; private final int port; private final String scheme; private final String host; public Builder(String host) { final URI elasticUri = URI.create(host); this.port = elasticUri.getPort(); this.host = elasticUri.getHost(); this.scheme = elasticUri.getScheme(); } public Builder setDefaultIndex(String index) { this.defaultIndex = index; return this; } public ElasticVindClient buildWithBasicAuth(String user, String key) { return ElasticVindClientBasicAuth.build(defaultIndex, port, scheme, host, user, key); } public ElasticVindClient buildWithApiKeyAuth(String id, String key) { return ElasticVindClientApiKeyAuth.build(defaultIndex, port, scheme, host, id, key); } public ElasticVindClient build() { return ElasticVindClientNoAuth.build(defaultIndex, port, scheme, host); } } }
package org.wildfly.extension.batch.jberet.deployment; import javax.batch.operations.JobOperator; import java.util.Properties; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.OperationStepHandler; import org.jboss.as.controller.PathAddress; import org.jboss.dmr.ModelNode; import org.jboss.msc.service.ServiceController; import org.wildfly.extension.batch.jberet.BatchServiceNames; import org.wildfly.extension.batch.jberet._private.BatchLogger; /** * A handler to assist with batch operations that require a {@linkplain JobOperator}. * * @author <a href="mailto:[email protected]">James R. Perkins</a> */ abstract class JobOperationStepHandler implements OperationStepHandler { @Override public final void execute(final OperationContext context, final ModelNode operation) throws OperationFailedException { final PathAddress address = context.getCurrentAddress(); final ServiceController<?> controller = context.getServiceRegistry(true).getService(BatchServiceNames.jobOperatorServiceName(address)); final JobOperator jobOperator = (JobOperator) controller.getService(); execute(context, operation, jobOperator); } /** * Executes the step. Includes the {@linkplain JobOperator} for convenience. * * @param context the operation context used * @param operation the operation for the step * @param jobOperator the job operator * * @throws OperationFailedException if there is a step failure */ protected abstract void execute(OperationContext context, ModelNode operation, JobOperator jobOperator) throws OperationFailedException; static ModelNode resolveValue(final OperationContext context, final ModelNode operation, final AttributeDefinition attribute) throws OperationFailedException { final ModelNode value = new ModelNode(); if (operation.has(attribute.getName())) { value.set(operation.get(attribute.getName())); } return attribute.resolveValue(context, value); } static Properties resolvePropertyValue(final OperationContext context, final ModelNode operation, final AttributeDefinition attribute) throws OperationFailedException { // Get the properties final Properties properties = new Properties(); if (operation.hasDefined(attribute.getName())) { resolveValue(context, operation, attribute).asPropertyList() .forEach(p -> properties.put(p.getName(), p.getValue().asString())); } return properties; } static OperationFailedException createOperationFailure(final Throwable cause) { final String msg = cause.getLocalizedMessage(); // OperationFailedException's don't log the cause, for debug purposes logging the failure could be useful BatchLogger.LOGGER.debugf(cause, "Failed to process batch operation: %s", msg); return new OperationFailedException(msg, cause); } }
package info.elexis.server.core.connector.elexis.billable; import java.util.List; import org.eclipse.core.runtime.IStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ch.elexis.core.model.article.Constants; import ch.elexis.core.model.eigenartikel.EigenartikelTyp; import ch.rgw.tools.Money; import ch.rgw.tools.TimeTool; import info.elexis.server.core.connector.elexis.billable.optifier.DefaultOptifier; import info.elexis.server.core.connector.elexis.jpa.model.annotated.Artikel; import info.elexis.server.core.connector.elexis.jpa.model.annotated.Behandlung; import info.elexis.server.core.connector.elexis.jpa.model.annotated.Fall; import info.elexis.server.core.connector.elexis.jpa.model.annotated.Kontakt; import info.elexis.server.core.connector.elexis.jpa.model.annotated.Verrechnet; public class VerrechenbarArtikel implements IBillable<Artikel> { protected Logger log = LoggerFactory.getLogger(VerrechenbarArtikel.class); private final Artikel article; public VerrechenbarArtikel(Artikel article) { this.article = article; } @Override public String getCodeSystemName() { return article.getTyp(); } @Override public String getCodeSystemCode() { return "999"; } @Override public String getId() { return article.getId(); } @Override public String getCode() { if (Artikel.TYP_EIGENARTIKEL.equals(article.getTyp())) { article.getSubId(); } return article.getId(); } @Override public String getText() { return article.getLabel(); } @Override public List<Object> getActions(Object context) { return null; } @Override public IStatus add(Behandlung kons, Kontakt userContact, Kontakt mandatorContact) { return new DefaultOptifier().add(this, kons, userContact, mandatorContact); } @Override public IStatus removeFromConsultation(Verrechnet vr, Kontakt mandatorContact) { return new DefaultOptifier().remove(vr); } @Override public Artikel getEntity() { return article; } @Override public int getTP(TimeTool date, Fall fall) { int vkt = 0; double vpe = 0.0; double vke = 0.0; try { Money m = new Money(); m.addCent(article.getVkPreis()); vkt = m.getCents(); } catch (Exception e) { log.warn("Error parsing public price: " + e.getMessage() + " @ " + article.getId()); } try { vpe = Double.parseDouble(article.getExtInfoAsString(Constants.FLD_EXT_PACKAGE_UNIT_INT)); } catch (Exception e) { log.warn("Error parsing package size: " + e.getMessage() + "@ " + article.getId()); } try { vke = Double.parseDouble(article.getExtInfoAsString(Artikel.FLD_EXTINFO_SELLUNIT)); } catch (Exception e) { log.warn("Error parsing sell unit: " + e.getMessage() + " @ " + article.getId()); } return determineTP(date, fall, vpe, vke, vkt); } @Override public double getFactor(TimeTool dat, Fall fall) { return 1; } @Override public VatInfo getVatInfo() { if (Artikel.TYP_EIGENARTIKEL.equalsIgnoreCase(article.getTyp())) { EigenartikelTyp eat = EigenartikelTyp.byCharSafe(article.getCodeclass()); switch (eat) { case PHARMA: case MAGISTERY: return VatInfo.VAT_CH_ISMEDICAMENT; case NONPHARMA: return VatInfo.VAT_CH_NOTMEDICAMENT; default: break; } return VatInfo.VAT_NONE; } return VatInfo.VAT_DEFAULT; } public static int determineTP(TimeTool date, Fall fall, double vpe, double vke, int vkt) { if ((vpe > 0.0) && (vke > 0.0) && (vpe != vke)) { return (int) Math.round(vke * (vkt / vpe)); } else { return vkt; } } }
package org.eclipse.birt.chart.reportitem.ui; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.birt.chart.exception.ChartException; import org.eclipse.birt.chart.model.Chart; import org.eclipse.birt.chart.model.ChartWithAxes; import org.eclipse.birt.chart.model.DialChart; import org.eclipse.birt.chart.model.component.Series; import org.eclipse.birt.chart.model.data.Query; import org.eclipse.birt.chart.model.data.SeriesDefinition; import org.eclipse.birt.chart.model.data.impl.QueryImpl; import org.eclipse.birt.chart.model.type.BubbleSeries; import org.eclipse.birt.chart.model.type.DifferenceSeries; import org.eclipse.birt.chart.model.type.GanttSeries; import org.eclipse.birt.chart.model.type.StockSeries; import org.eclipse.birt.chart.plugin.ChartEnginePlugin; import org.eclipse.birt.chart.reportitem.ChartReportItemUtil; import org.eclipse.birt.chart.reportitem.ChartXTabUtil; import org.eclipse.birt.chart.reportitem.ui.dialogs.ChartColumnBindingDialog; import org.eclipse.birt.chart.reportitem.ui.dialogs.ExtendedItemFilterDialog; import org.eclipse.birt.chart.reportitem.ui.dialogs.ReportItemParametersDialog; import org.eclipse.birt.chart.reportitem.ui.i18n.Messages; import org.eclipse.birt.chart.reportitem.ui.views.attributes.provider.ChartCubeFilterHandleProvider; import org.eclipse.birt.chart.reportitem.ui.views.attributes.provider.ChartFilterProviderDelegate; import org.eclipse.birt.chart.ui.swt.ColorPalette; import org.eclipse.birt.chart.ui.swt.ColumnBindingInfo; import org.eclipse.birt.chart.ui.swt.CustomPreviewTable; import org.eclipse.birt.chart.ui.swt.DataDefinitionTextManager; import org.eclipse.birt.chart.ui.swt.DefaultChartDataSheet; import org.eclipse.birt.chart.ui.swt.SimpleTextTransfer; import org.eclipse.birt.chart.ui.swt.interfaces.IChartDataSheet; import org.eclipse.birt.chart.ui.swt.interfaces.IDataServiceProvider; import org.eclipse.birt.chart.ui.swt.interfaces.ISelectDataComponent; import org.eclipse.birt.chart.ui.swt.wizard.ChartAdapter; import org.eclipse.birt.chart.ui.util.ChartHelpContextIds; import org.eclipse.birt.chart.ui.util.ChartUIConstants; import org.eclipse.birt.chart.ui.util.ChartUIUtil; import org.eclipse.birt.chart.util.ChartUtil; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.ui.frameworks.taskwizard.WizardBase; import org.eclipse.birt.report.designer.internal.ui.dialogs.ExpressionFilter; import org.eclipse.birt.report.designer.internal.ui.views.ViewsTreeProvider; import org.eclipse.birt.report.designer.internal.ui.views.attributes.provider.AbstractFilterHandleProvider; import org.eclipse.birt.report.designer.ui.actions.NewDataSetAction; import org.eclipse.birt.report.designer.ui.cubebuilder.action.NewCubeAction; import org.eclipse.birt.report.designer.ui.dialogs.ColumnBindingDialog; import org.eclipse.birt.report.designer.ui.dialogs.ExpressionProvider; import org.eclipse.birt.report.designer.util.DEUtil; import org.eclipse.birt.report.item.crosstab.core.de.CrosstabReportItemHandle; import org.eclipse.birt.report.model.api.ComputedColumnHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.ExtendedItemHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.metadata.IClassInfo; import org.eclipse.birt.report.model.api.olap.CubeHandle; import org.eclipse.birt.report.model.api.olap.LevelHandle; import org.eclipse.birt.report.model.api.olap.MeasureHandle; import org.eclipse.emf.common.util.EList; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IContributionItem; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.StackLayout; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DragSource; import org.eclipse.swt.dnd.DragSourceEvent; import org.eclipse.swt.dnd.DragSourceListener; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeItem; import org.eclipse.swt.widgets.Widget; import org.eclipse.ui.PlatformUI; /** * Data sheet implementation for Standard Chart */ public class StandardChartDataSheet extends DefaultChartDataSheet implements Listener { final private ExtendedItemHandle itemHandle; final private ReportDataServiceProvider dataProvider; private Button btnInherit = null; private Button btnUseData = null; private boolean bIsInheritSelected = true; private CCombo cmbDataItems = null; private StackLayout stackLayout = null; private Composite cmpStack = null; private Composite cmpCubeTree = null; private Composite cmpDataPreview = null; private CustomPreviewTable tablePreview = null; private TreeViewer cubeTreeViewer = null; private Button btnFilters = null; private Button btnParameters = null; private Button btnBinding = null; private String currentData = null; public static final int SELECT_NONE = 1; public static final int SELECT_NEXT = 2; public static final int SELECT_DATA_SET = 4; public static final int SELECT_DATA_CUBE = 8; public static final int SELECT_REPORT_ITEM = 16; public static final int SELECT_NEW_DATASET = 32; public static final int SELECT_NEW_DATACUBE = 64; private final int iSupportedDataItems; private List<Integer> selectDataTypes = new ArrayList<Integer>( ); public StandardChartDataSheet( ExtendedItemHandle itemHandle, ReportDataServiceProvider dataProvider, int iSupportedDataItems ) { this.itemHandle = itemHandle; this.dataProvider = dataProvider; this.iSupportedDataItems = iSupportedDataItems; addListener( this ); } public StandardChartDataSheet( ExtendedItemHandle itemHandle, ReportDataServiceProvider dataProvider ) { this( itemHandle, dataProvider, 0 ); } public Composite createActionButtons( Composite parent ) { Composite composite = ChartUIUtil.createCompositeWrapper( parent ); { composite.setLayoutData( new GridData( GridData.FILL_HORIZONTAL | GridData.VERTICAL_ALIGN_END ) ); } btnFilters = new Button( composite, SWT.NONE ); { btnFilters.setAlignment( SWT.CENTER ); GridData gridData = new GridData( GridData.FILL_HORIZONTAL ); btnFilters.setLayoutData( gridData ); btnFilters.setText( Messages.getString( "StandardChartDataSheet.Label.Filters" ) ); //$NON-NLS-1$ btnFilters.addListener( SWT.Selection, this ); } btnParameters = new Button( composite, SWT.NONE ); { btnParameters.setAlignment( SWT.CENTER ); GridData gridData = new GridData( GridData.FILL_HORIZONTAL ); btnParameters.setLayoutData( gridData ); btnParameters.setText( Messages.getString( "StandardChartDataSheet.Label.Parameters" ) ); //$NON-NLS-1$ btnParameters.addListener( SWT.Selection, this ); } btnBinding = new Button( composite, SWT.NONE ); { btnBinding.setAlignment( SWT.CENTER ); GridData gridData = new GridData( GridData.FILL_HORIZONTAL ); btnBinding.setLayoutData( gridData ); btnBinding.setText( Messages.getString( "StandardChartDataSheet.Label.DataBinding" ) ); //$NON-NLS-1$ btnBinding.addListener( SWT.Selection, this ); } setEnabledForButtons( ); return composite; } private void setEnabledForButtons( ) { if ( isCubeMode( ) ) { // getDataServiceProvider( ).checkState( // IDataServiceProvider.SHARE_QUERY ) boolean disabled = getDataServiceProvider( ).isInXTabAggrCell( ) || getDataServiceProvider( ).isInXTabMeasureCell( ); btnFilters.setEnabled( !disabled ); btnBinding.setEnabled( getDataServiceProvider( ).isInvokingSupported( ) || getDataServiceProvider( ).isSharedBinding( ) ); btnParameters.setEnabled( false ); } else { btnFilters.setEnabled( hasDataSet( ) ); // Bugzilla#177704 Chart inheriting data from container doesn't // support parameters due to limitation in DtE btnParameters.setEnabled( getDataServiceProvider( ).getBoundDataSet( ) != null && getDataServiceProvider( ).isInvokingSupported( ) ); btnBinding.setEnabled( hasDataSet( ) && ( getDataServiceProvider( ).isInvokingSupported( ) || getDataServiceProvider( ).isSharedBinding( ) ) ); } } private boolean hasDataSet( ) { return getDataServiceProvider( ).getReportDataSet( ) != null || getDataServiceProvider( ).getBoundDataSet( ) != null; } void fireEvent( Widget widget, int eventType ) { Event event = new Event( ); event.data = this; event.widget = widget; event.type = eventType; notifyListeners( event ); } public Composite createDataDragSource( Composite parent ) { cmpStack = new Composite( parent, SWT.NONE ); cmpStack.setLayoutData( new GridData( GridData.FILL_BOTH ) ); stackLayout = new StackLayout( ); stackLayout.marginHeight = 0; stackLayout.marginWidth = 0; cmpStack.setLayout( stackLayout ); cmpCubeTree = ChartUIUtil.createCompositeWrapper( cmpStack ); cmpDataPreview = ChartUIUtil.createCompositeWrapper( cmpStack ); Label label = new Label( cmpCubeTree, SWT.NONE ); { label.setText( Messages.getString( "StandardChartDataSheet.Label.CubeTree" ) ); //$NON-NLS-1$ label.setFont( JFaceResources.getBannerFont( ) ); } if ( !dataProvider.isInXTabMeasureCell( ) && !dataProvider.isInMultiView( ) ) { // No description if dnd is disabled Label description = new Label( cmpCubeTree, SWT.WRAP ); { GridData gd = new GridData( GridData.FILL_HORIZONTAL ); description.setLayoutData( gd ); description.setText( Messages.getString( "StandardChartDataSheet.Label.DragCube" ) ); //$NON-NLS-1$ } } cubeTreeViewer = new TreeViewer( cmpCubeTree, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER ); cubeTreeViewer.getTree( ) .setLayoutData( new GridData( GridData.FILL_BOTH ) ); ( (GridData) cubeTreeViewer.getTree( ).getLayoutData( ) ).heightHint = 120; ViewsTreeProvider provider = new ViewsTreeProvider( ); cubeTreeViewer.setLabelProvider( provider ); cubeTreeViewer.setContentProvider( provider ); cubeTreeViewer.setInput( getCube( ) ); final DragSource dragSource = new DragSource( cubeTreeViewer.getTree( ), DND.DROP_COPY ); dragSource.setTransfer( new Transfer[]{ SimpleTextTransfer.getInstance( ) } ); dragSource.addDragListener( new DragSourceListener( ) { private String text = null; public void dragFinished( DragSourceEvent event ) { // TODO Auto-generated method stub } public void dragSetData( DragSourceEvent event ) { event.data = text; } public void dragStart( DragSourceEvent event ) { text = createCubeExpression( ); if ( text == null ) { event.doit = false; } } } ); cubeTreeViewer.getTree( ).addListener( SWT.MouseDown, new Listener( ) { public void handleEvent( Event event ) { if ( event.button == 3 && event.widget instanceof Tree ) { Tree tree = (Tree) event.widget; TreeItem treeItem = tree.getSelection( )[0]; if ( treeItem.getData( ) instanceof LevelHandle || treeItem.getData( ) instanceof MeasureHandle ) { tree.setMenu( createMenuManager( treeItem.getData( ) ).createContextMenu( tree ) ); // tree.getMenu( ).setVisible( true ); } else { tree.setMenu( null ); } } } } ); label = new Label( cmpDataPreview, SWT.NONE ); { label.setText( Messages.getString( "StandardChartDataSheet.Label.DataPreview" ) ); //$NON-NLS-1$ label.setFont( JFaceResources.getBannerFont( ) ); } if ( !dataProvider.isInXTabMeasureCell( ) && !dataProvider.isInMultiView( ) ) { // No description if dnd is disabled Label description = new Label( cmpDataPreview, SWT.WRAP ); { GridData gd = new GridData( GridData.FILL_HORIZONTAL ); description.setLayoutData( gd ); description.setText( Messages.getString( "StandardChartDataSheet.Label.ToBindADataColumn" ) ); //$NON-NLS-1$ } } tablePreview = new CustomPreviewTable( cmpDataPreview, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION ); { GridData gridData = new GridData( GridData.FILL_BOTH ); gridData.widthHint = 400; gridData.heightHint = 120; tablePreview.setLayoutData( gridData ); tablePreview.setHeaderAlignment( SWT.LEFT ); tablePreview.addListener( CustomPreviewTable.MOUSE_RIGHT_CLICK_TYPE, this ); } updateDragDataSource( ); return cmpStack; } private void updateDragDataSource( ) { if ( isCubeMode( ) ) { stackLayout.topControl = cmpCubeTree; cubeTreeViewer.setInput( getCube( ) ); } else { stackLayout.topControl = cmpDataPreview; refreshTablePreview( ); } cmpStack.layout( ); } public Composite createDataSelector( Composite parent ) { Composite cmpDataSet = ChartUIUtil.createCompositeWrapper( parent ); { cmpDataSet.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) ); } Label label = new Label( cmpDataSet, SWT.NONE ); { label.setText( Messages.getString( "StandardChartDataSheet.Label.SelectDataSet" ) ); //$NON-NLS-1$ label.setFont( JFaceResources.getBannerFont( ) ); } Composite cmpDetail = new Composite( cmpDataSet, SWT.NONE ); { GridLayout gridLayout = new GridLayout( 3, false ); gridLayout.marginWidth = 10; gridLayout.marginHeight = 0; cmpDetail.setLayout( gridLayout ); cmpDetail.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) ); } Composite compRadios = ChartUIUtil.createCompositeWrapper( cmpDetail ); { GridData gd = new GridData( ); gd.verticalSpan = 2; compRadios.setLayoutData( gd ); } btnInherit = new Button( compRadios, SWT.RADIO ); btnInherit.setText( Messages.getString( "StandardChartDataSheet.Label.UseReportData" ) ); //$NON-NLS-1$ btnInherit.addListener( SWT.Selection, this ); btnUseData = new Button( compRadios, SWT.RADIO ); btnUseData.setText( Messages.getString( "StandardChartDataSheet.Label.UseDataSet" ) ); //$NON-NLS-1$ btnUseData.addListener( SWT.Selection, this ); new Label( cmpDetail, SWT.NONE ); new Label( cmpDetail, SWT.NONE ); cmbDataItems = new CCombo( cmpDetail, SWT.DROP_DOWN | SWT.READ_ONLY | SWT.BORDER ); cmbDataItems.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) ); cmbDataItems.addListener( SWT.Selection, this ); initDataSelector( ); updatePredefinedQueries( ); return cmpDataSet; } int invokeNewDataSet( ) { IAction action = new NewDataSetAction( ); PlatformUI.getWorkbench( ).getHelpSystem( ).setHelp( action, ChartHelpContextIds.DIALOG_NEW_DATA_SET ); action.run( ); // Due to the limitation of the action execution, always return ok return Window.OK; } int invokeEditFilter( ) { ExtendedItemHandle handle = getItemHandle( ); handle.getModuleHandle( ).getCommandStack( ).startTrans( null ); ExtendedItemFilterDialog page = new ExtendedItemFilterDialog( handle ); AbstractFilterHandleProvider provider = ChartFilterProviderDelegate.createFilterProvider( handle, handle ); if ( provider instanceof ChartCubeFilterHandleProvider ) { ( (ChartCubeFilterHandleProvider) provider ).setContext( getContext( ) ); } page.setFilterHandleProvider( provider ); int openStatus = page.open( ); if ( openStatus == Window.OK ) { handle.getModuleHandle( ).getCommandStack( ).commit( ); } else { handle.getModuleHandle( ).getCommandStack( ).rollback( ); } return openStatus; } int invokeEditParameter( ) { ReportItemParametersDialog page = new ReportItemParametersDialog( getItemHandle( ) ); return page.open( ); } int invokeDataBinding( ) { Shell shell = new Shell( Display.getDefault( ), SWT.DIALOG_TRIM | SWT.RESIZE | SWT.APPLICATION_MODAL ); // #194163: Do not register CS help in chart since it's registered in // super column binding dialog. // ChartUIUtil.bindHelp( shell, // ChartHelpContextIds.DIALOG_DATA_SET_COLUMN_BINDING ); ExtendedItemHandle handle = getItemHandle( ); handle.getModuleHandle( ).getCommandStack( ).startTrans( null ); ColumnBindingDialog page = new ChartColumnBindingDialog( handle, shell, getContext( ) ); ExpressionProvider ep = new ExpressionProvider( getItemHandle( ) ); ep.addFilter( new ExpressionFilter( ) { public boolean select( Object parentElement, Object element ) { // Remove unsupported expression. See bugzilla#132768 return !( parentElement.equals( ExpressionProvider.BIRT_OBJECTS ) && element instanceof IClassInfo && ( (IClassInfo) element ).getName( ) .equals( "Total" ) ); //$NON-NLS-1$ } } ); page.setExpressionProvider( ep ); // Make all bindings under share binding case read-only. ( (ChartColumnBindingDialog) page ).setReadOnly( getDataServiceProvider( ).isSharedBinding( ) || getDataServiceProvider( ).isInheritanceOnly( ) ); int openStatus = page.open( ); if ( openStatus == Window.OK ) { handle.getModuleHandle( ).getCommandStack( ).commit( ); updatePredefinedQueries( ); } else { handle.getModuleHandle( ).getCommandStack( ).rollback( ); } return openStatus; } private void initDataSelector( ) { // create Combo items cmbDataItems.setItems( createDataComboItems( ) ); cmbDataItems.setVisibleItemCount( cmbDataItems.getItemCount( ) ); // Select report item reference // Since handle may have data set or data cube besides reference, always // check reference first String sItemRef = getDataServiceProvider( ).getReportItemReference( ); if ( sItemRef != null ) { btnUseData.setSelection( true ); bIsInheritSelected = false; cmbDataItems.setText( sItemRef ); currentData = sItemRef; return; } // Select data set String sDataSet = getDataServiceProvider( ).getBoundDataSet( ); if ( sDataSet != null && !getDataServiceProvider( ).isInheritanceOnly( ) ) { btnUseData.setSelection( true ); bIsInheritSelected = false; cmbDataItems.setText( sDataSet ); currentData = sDataSet; if ( sDataSet != null ) { switchDataTable( ); } return; } // Select data cube String sDataCube = getDataServiceProvider( ).getDataCube( ); if ( sDataCube != null && !getDataServiceProvider( ).isInheritanceOnly( ) ) { btnUseData.setSelection( true ); bIsInheritSelected = false; cmbDataItems.setText( sDataCube ); currentData = sDataCube; return; } btnInherit.setSelection( true ); bIsInheritSelected = true; if ( getDataServiceProvider( ).isInheritanceOnly( ) ) { btnUseData.setSelection( false ); btnUseData.setEnabled( false ); } cmbDataItems.select( 0 ); currentData = null; cmbDataItems.setEnabled( false ); // Initializes column bindings from container getDataServiceProvider( ).setDataSet( null ); String reportDataSet = getDataServiceProvider( ).getReportDataSet( ); if ( reportDataSet != null ) { switchDataTable( ); } // select reference item // selectItemRef( ); // if ( cmbReferences.getSelectionIndex( ) > 0 ) // cmbDataSet.setEnabled( false ); // btnUseReference.setSelection( true ); // btnUseReportData.setSelection( false ); // btnUseDataSet.setSelection( false ); // else // cmbReferences.setEnabled( false ); // String dataCube = getDataServiceProvider( ).getDataCube( ); // if ( dataCube != null ) // cmbCubes.setText( dataCube ); // btnUseReference.setSelection( false ); // btnUseReportData.setSelection( false ); // btnUseDataSet.setSelection( false ); // btnUseCubes.setSelection( true ); // else // cmbCubes.select( 0 ); } public void handleEvent( Event event ) { if ( event.data instanceof ISelectDataComponent ) { // When user select expression in drop&down list of live preview // area, the event will be handled to update related column color. if ( event.type == IChartDataSheet.EVENT_QUERY && event.detail == IChartDataSheet.DETAIL_UPDATE_COLOR ) { refreshTableColor( ); } return; } // Right click to display the menu. Menu display by clicking // application key is triggered by os, so do nothing. if ( event.type == CustomPreviewTable.MOUSE_RIGHT_CLICK_TYPE ) { if ( getDataServiceProvider( ).getBoundDataSet( ) != null || getDataServiceProvider( ).getReportDataSet( ) != null ) { if ( event.widget instanceof Button ) { Button header = (Button) event.widget; // Bind context menu to each header button if ( header.getMenu( ) == null ) { header.setMenu( createMenuManager( event.data ).createContextMenu( tablePreview ) ); } header.getMenu( ).setVisible( true ); } } } else if ( event.type == SWT.Selection ) { if ( event.widget instanceof MenuItem ) { MenuItem item = (MenuItem) event.widget; IAction action = (IAction) item.getData( ); action.setChecked( !action.isChecked( ) ); action.run( ); } else if ( event.widget == btnFilters ) { if ( invokeEditFilter( ) == Window.OK ) { refreshTablePreview( ); // Update preview via event fireEvent( btnFilters, EVENT_PREVIEW ); } } else if ( event.widget == btnParameters ) { if ( invokeEditParameter( ) == Window.OK ) { refreshTablePreview( ); // Update preview via event fireEvent( btnParameters, EVENT_PREVIEW ); } } else if ( event.widget == btnBinding ) { if ( invokeDataBinding( ) == Window.OK ) { refreshTablePreview( ); // Update preview via event fireEvent( btnBinding, EVENT_PREVIEW ); } } try { if ( event.widget == btnInherit ) { ColorPalette.getInstance( ).restore( ); // Skip when selection is false if ( !btnInherit.getSelection( ) ) { return; } // Avoid duplicate loading data set. if ( bIsInheritSelected ) { return; } bIsInheritSelected = true; getDataServiceProvider( ).setReportItemReference( null ); getDataServiceProvider( ).setDataSet( null ); switchDataSet( null ); cmbDataItems.select( 0 ); currentData = null; cmbDataItems.setEnabled( false ); setEnabledForButtons( ); updateDragDataSource( ); updatePredefinedQueries( ); } else if ( event.widget == btnUseData ) { // Skip when selection is false if ( !btnUseData.getSelection( ) ) { return; } // Avoid duplicate loading data set. if ( !bIsInheritSelected ) { return; } bIsInheritSelected = false; getDataServiceProvider( ).setReportItemReference( null ); getDataServiceProvider( ).setDataSet( null ); selectDataSet( ); cmbDataItems.setEnabled( true ); setEnabledForButtons( ); updateDragDataSource( ); updatePredefinedQueries( ); } else if ( event.widget == cmbDataItems ) { ColorPalette.getInstance( ).restore( ); int selectedIndex = cmbDataItems.getSelectionIndex( ); Integer selectState = selectDataTypes.get( selectedIndex ); switch ( selectState.intValue( ) ) { case SELECT_NONE : // Inherit data from container btnInherit.setSelection( true ); btnUseData.setSelection( false ); btnInherit.notifyListeners( SWT.Selection, new Event( ) ); break; case SELECT_NEXT : selectedIndex++; selectState = selectDataTypes.get( selectedIndex ); cmbDataItems.select( selectedIndex ); break; } switch ( selectState.intValue( ) ) { case SELECT_DATA_SET : if ( getDataServiceProvider( ).getReportItemReference( ) == null && getDataServiceProvider( ).getBoundDataSet( ) != null && getDataServiceProvider( ).getBoundDataSet( ) .equals( cmbDataItems.getText( ) ) ) { return; } getDataServiceProvider( ).setDataSet( cmbDataItems.getText( ) ); currentData = cmbDataItems.getText( ); switchDataSet( cmbDataItems.getText( ) ); setEnabledForButtons( ); updateDragDataSource( ); break; case SELECT_DATA_CUBE : getDataServiceProvider( ).setDataCube( cmbDataItems.getText( ) ); currentData = cmbDataItems.getText( ); updateDragDataSource( ); setEnabledForButtons( ); // Update preview via event DataDefinitionTextManager.getInstance( ) .refreshAll( ); fireEvent( tablePreview, EVENT_PREVIEW ); break; case SELECT_REPORT_ITEM : if ( cmbDataItems.getText( ) .equals( getDataServiceProvider( ).getReportItemReference( ) ) ) { return; } getDataServiceProvider( ).setReportItemReference( cmbDataItems.getText( ) ); // TED 10163 // Following calls will revise chart model for // report item sharing case, in older version of // chart, it is allowed to set grouping on category // series when sharing report item, but now it isn't // allowed, so this calls will revise chart model to // remove category series grouping flag for the // case. ChartReportItemUtil.reviseChartModel( ChartReportItemUtil.REVISE_REFERENCE_REPORT_ITEM, this.getContext( ).getModel( ), itemHandle ); currentData = cmbDataItems.getText( ); // selectDataSet( ); // switchDataSet( cmbDataItems.getText( ) ); // Update preview via event DataDefinitionTextManager.getInstance( ) .refreshAll( ); fireEvent( tablePreview, EVENT_PREVIEW ); setEnabledForButtons( ); updateDragDataSource( ); break; case SELECT_NEW_DATASET : // Bring up the dialog to create a dataset int result = invokeNewDataSet( ); if ( result == Window.CANCEL ) { return; } cmbDataItems.removeAll( ); cmbDataItems.setItems( createDataComboItems( ) ); cmbDataItems.setVisibleItemCount( cmbDataItems.getItemCount( ) ); if ( currentData == null ) { cmbDataItems.select( 0 ); } else { cmbDataItems.setText( currentData ); } break; case SELECT_NEW_DATACUBE : if ( getDataServiceProvider( ).getAllDataSets( ).length == 0 ) { invokeNewDataSet( ); } if ( getDataServiceProvider( ).getAllDataSets( ).length != 0 ) { new NewCubeAction( ).run( ); } cmbDataItems.removeAll( ); cmbDataItems.setItems( createDataComboItems( ) ); cmbDataItems.setVisibleItemCount( cmbDataItems.getItemCount( ) ); if ( currentData == null ) { cmbDataItems.select( 0 ); } else { cmbDataItems.setText( currentData ); } break; } updatePredefinedQueries( ); } // else if ( event.widget == btnUseReference ) // // Skip when selection is false // if ( !btnUseReference.getSelection( ) ) // return; // cmbDataSet.setEnabled( false ); // cmbReferences.setEnabled( true ); // selectItemRef( ); // setEnabledForButtons( ); // else if ( event.widget == cmbReferences ) // if ( cmbReferences.getSelectionIndex( ) == 0 ) // if ( getDataServiceProvider( ).getReportItemReference( ) == // null ) // return; // getDataServiceProvider( ).setReportItemReference( null ); // // Auto select the data set // selectDataSet( ); // cmbReferences.setEnabled( false ); // cmbDataSet.setEnabled( true ); // btnUseReference.setSelection( false ); // btnUseDataSet.setSelection( true ); // else // if ( cmbReferences.getText( ) // .equals( getDataServiceProvider( ).getReportItemReference( ) // return; // getDataServiceProvider( ).setReportItemReference( // cmbReferences.getText( ) ); // selectDataSet( ); // switchDataSet( cmbDataSet.getText( ) ); // setEnabledForButtons( ); } catch ( ChartException e1 ) { WizardBase.showException( e1.getLocalizedMessage( ) ); } } } private void selectDataSet( ) { String currentDS = getDataServiceProvider( ).getBoundDataSet( ); if ( currentDS == null ) { cmbDataItems.select( 0 ); currentData = null; } else { cmbDataItems.setText( currentDS ); currentData = currentDS; } } private void refreshTablePreview( ) { if ( dataProvider.getDataSetFromHandle( ) == null ) { return; } tablePreview.clearContents( ); switchDataTable( ); tablePreview.layout( ); } private void switchDataSet( String datasetName ) throws ChartException { if ( isCubeMode( ) ) { return; } try { // Clear old dataset and preview data tablePreview.clearContents( ); // Try to get report data set if ( datasetName == null ) { datasetName = getDataServiceProvider( ).getReportDataSet( ); } if ( datasetName != null ) { switchDataTable( ); } else { tablePreview.createDummyTable( ); } tablePreview.layout( ); } catch ( Throwable t ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.DATA_BINDING, t ); } DataDefinitionTextManager.getInstance( ).refreshAll( ); // Update preview via event fireEvent( tablePreview, EVENT_PREVIEW ); } /** * Update column headers and data to table. * * @param headers * @param dataList */ private void updateTablePreview( final ColumnBindingInfo[] headers, final List dataList ) { fireEvent( tablePreview, EVENT_QUERY ); if ( tablePreview.isDisposed( ) ) { return; } if ( headers == null ) { tablePreview.setEnabled( false ); tablePreview.createDummyTable( ); } else { tablePreview.setEnabled( true ); tablePreview.setColumns( headers ); refreshTableColor( ); // Add data value if ( dataList != null ) { for ( Iterator iterator = dataList.iterator( ); iterator.hasNext( ); ) { String[] dataRow = (String[]) iterator.next( ); for ( int i = 0; i < dataRow.length; i++ ) { tablePreview.addEntry( dataRow[i], i ); } } } } tablePreview.layout( ); } private volatile boolean bIsGettingPreviewData = false; private void switchDataTable( ) { if ( isCubeMode( ) ) { return; } // 1. Create a runnable. Runnable runnable = new Runnable( ) { /* * (non-Javadoc) * * @see org.eclipse.jface.operation.IRunnableWithProgress#run(org.eclipse.core.runtime.IProgressMonitor) */ public void run( ) { if ( bIsGettingPreviewData ) { // to avoid collision caused by multiple refreshing return; } ColumnBindingInfo[] headers = null; List dataList = null; try { // Get header and data in other thread. headers = getDataServiceProvider( ).getPreviewHeadersInfo( ); bIsGettingPreviewData = true; dataList = getDataServiceProvider( ).getPreviewData( ); bIsGettingPreviewData = false; getDataServiceProvider( ).setPredefinedExpressions( headers ); final ColumnBindingInfo[] headerInfo = headers; final List data = dataList; // Execute UI operation in UI thread. Display.getDefault( ).syncExec( new Runnable( ) { public void run( ) { updateTablePreview( headerInfo, data ); } } ); } catch ( Exception e ) { final ColumnBindingInfo[] headerInfo = headers; final List data = dataList; // Catch any exception. final String msg = e.getMessage( ); Display.getDefault( ).syncExec( new Runnable( ) { /* * (non-Javadoc) * * @see java.lang.Runnable#run() */ public void run( ) { // Still update table preview in here to ensure the // column headers of table preview can be updated // and user can select expression from table preview // even if there is no preview data. updateTablePreview( headerInfo, data ); WizardBase.showException( msg ); } } ); } } }; // 2. Run it. new Thread( runnable ).start( ); } private void refreshTableColor( ) { if ( isCubeMode( ) ) { return; } // Reset column color for ( int i = 0; i < tablePreview.getColumnNumber( ); i++ ) { tablePreview.setColumnColor( i, ColorPalette.getInstance( ) .getColor( ExpressionUtil.createJSRowExpression( tablePreview.getColumnHeading( i ) ) ) ); } } /** * Returns actual expression for common and sharing query case. * * @param query * @param expr * @return */ private String getActualExpression( String expr ) { if ( !dataProvider.checkState( IDataServiceProvider.SHARE_QUERY ) ) { return expr; } // Convert to actual expression. Object obj = tablePreview.getCurrentColumnHeadObject( ); if ( obj instanceof ColumnBindingInfo ) { ColumnBindingInfo cbi = (ColumnBindingInfo) obj; int type = cbi.getColumnType( ); if ( type == ColumnBindingInfo.GROUP_COLUMN || type == ColumnBindingInfo.AGGREGATE_COLUMN ) { return cbi.getExpression( ); } } return expr; } protected void manageColorAndQuery( Query query, String expr ) { // If it's not used any more, remove color binding if ( DataDefinitionTextManager.getInstance( ) .getNumberOfSameDataDefinition( query.getDefinition( ) ) == 0 ) { ColorPalette.getInstance( ).retrieveColor( query.getDefinition( ) ); } // Update query, if it is sharing binding case, the specified expression // will be converted and set to query, else directly set specified // expression to query. // DataDefinitionTextManager.getInstance( ).updateQuery( query, expr ); query.setDefinition( getActualExpression( expr ) ); DataDefinitionTextManager.getInstance( ).updateText( query ); // Reset table column color refreshTableColor( ); // Refresh all data definition text DataDefinitionTextManager.getInstance( ).refreshAll( ); } /** * @param queryType * @param query * @param expr * @param seriesDefinition * @since 2.5 */ protected void manageColorAndQuery( String queryType, Query query, String expr, SeriesDefinition seriesDefinition ) { // If it's not used any more, remove color binding if ( DataDefinitionTextManager.getInstance( ) .getNumberOfSameDataDefinition( query.getDefinition( ) ) == 0 ) { ColorPalette.getInstance( ).retrieveColor( query.getDefinition( ) ); } // Update query, if it is sharing binding case, the specified expression // will be converted and set to query, else directly set specified // expression to query. updateQuery( queryType, query, expr, seriesDefinition ); DataDefinitionTextManager.getInstance( ).updateText( query ); // Reset table column color refreshTableColor( ); // Refresh all data definition text DataDefinitionTextManager.getInstance( ).refreshAll( ); } private void updateQuery( String queryType, Query query, String expr, SeriesDefinition seriesDefinition ) { String actualExpr = expr; if ( dataProvider.checkState( IDataServiceProvider.SHARE_QUERY ) ) { boolean isGroupOrAggr = false; // Convert to actual expression. Object obj = tablePreview.getCurrentColumnHeadObject( ); if ( obj instanceof ColumnBindingInfo ) { ColumnBindingInfo cbi = (ColumnBindingInfo) obj; int type = cbi.getColumnType( ); if ( type == ColumnBindingInfo.GROUP_COLUMN || type == ColumnBindingInfo.AGGREGATE_COLUMN ) { actualExpr = cbi.getExpression( ); isGroupOrAggr = true; } } // Update group state. if ( seriesDefinition != null && ( queryType.equals( ChartUIConstants.QUERY_CATEGORY ) || queryType.equals( ChartUIConstants.QUERY_VALUE ) ) ) { seriesDefinition.getGrouping( ).setEnabled( isGroupOrAggr ); } } query.setDefinition( actualExpr ); } class CategoryXAxisAction extends Action { Query query; String expr; private SeriesDefinition seriesDefintion; CategoryXAxisAction( String expr ) { super( getBaseSeriesTitle( getChartModel( ) ) ); seriesDefintion = (SeriesDefinition) ChartUIUtil.getBaseSeriesDefinitions( getChartModel( ) ) .get( 0 ); this.query = ( (Query) seriesDefintion.getDesignTimeSeries( ) .getDataDefinition( ) .get( 0 ) ); this.expr = expr; setEnabled( DataDefinitionTextManager.getInstance( ) .isAcceptableExpression( query, expr, dataProvider.isSharedBinding( ) ) ); } public void run( ) { manageColorAndQuery( ChartUIConstants.QUERY_CATEGORY, query, expr, seriesDefintion ); } } class GroupYSeriesAction extends Action { Query query; String expr; private SeriesDefinition seriesDefinition; GroupYSeriesAction( Query query, String expr, SeriesDefinition seriesDefinition ) { super( getGroupSeriesTitle( getChartModel( ) ) ); this.seriesDefinition = seriesDefinition; this.query = query; this.expr = expr; setEnabled( DataDefinitionTextManager.getInstance( ) .isAcceptableExpression( query, expr, dataProvider.isSharedBinding( ) ) ); } public void run( ) { // Use the first group, and copy to the all groups ChartAdapter.beginIgnoreNotifications( ); ChartUIUtil.setAllGroupingQueryExceptFirst( getChartModel( ), expr ); ChartAdapter.endIgnoreNotifications( ); manageColorAndQuery( ChartUIConstants.QUERY_OPTIONAL, query, expr, seriesDefinition ); } } class ValueYSeriesAction extends Action { Query query; String expr; ValueYSeriesAction( Query query, String expr ) { super( getOrthogonalSeriesTitle( getChartModel( ) ) ); this.query = query; this.expr = expr; // Grouping expressions can't be set on value series. boolean enabled = true; if ( dataProvider.checkState( IDataServiceProvider.SHARE_QUERY ) ) { Object obj = tablePreview.getCurrentColumnHeadObject( ); if ( obj instanceof ColumnBindingInfo && ( (ColumnBindingInfo) obj ).getColumnType( ) == ColumnBindingInfo.GROUP_COLUMN ) { enabled = false; } } setEnabled( enabled ); } public void run( ) { manageColorAndQuery( ChartUIConstants.QUERY_VALUE, query, expr, null ); } } class HeaderShowAction extends Action { HeaderShowAction( String header ) { super( header ); setEnabled( false ); } } ExtendedItemHandle getItemHandle( ) { return this.itemHandle; } ReportDataServiceProvider getDataServiceProvider( ) { return this.dataProvider; } protected List<Object> getActionsForTableHead( String expr ) { List<Object> actions = new ArrayList<Object>( 3 ); actions.add( getBaseSeriesMenu( getChartModel( ), expr ) ); actions.add( getOrthogonalSeriesMenu( getChartModel( ), expr ) ); actions.add( getGroupSeriesMenu( getChartModel( ), expr ) ); return actions; } private MenuManager createMenuManager( final Object data ) { MenuManager menuManager = new MenuManager( ); menuManager.setRemoveAllWhenShown( true ); menuManager.addMenuListener( new IMenuListener( ) { public void menuAboutToShow( IMenuManager manager ) { if ( data instanceof Integer ) { // Menu for table addMenu( manager, new HeaderShowAction( tablePreview.getCurrentColumnHeading( ) ) ); String expr = ExpressionUtil.createJSRowExpression( tablePreview.getCurrentColumnHeading( ) ); List<Object> actions = getActionsForTableHead( expr ); for ( Object act : actions ) { addMenu( manager, act ); } } else if ( data instanceof MeasureHandle ) { // Menu for Measure String expr = createCubeExpression( ); if ( expr != null ) { addMenu( manager, getOrthogonalSeriesMenu( getChartModel( ), expr ) ); } } else if ( data instanceof LevelHandle ) { // Menu for Level String expr = createCubeExpression( ); if ( expr != null ) { // bug#220724 if ( ( (Boolean) dataProvider.checkData( ChartUIConstants.QUERY_CATEGORY, expr ) ).booleanValue( ) ) { addMenu( manager, getBaseSeriesMenu( getChartModel( ), expr ) ); } if ( dataProvider.checkState( IDataServiceProvider.MULTI_CUBE_DIMENSIONS ) && ( (Boolean) dataProvider.checkData( ChartUIConstants.QUERY_OPTIONAL, expr ) ).booleanValue( ) ) { addMenu( manager, getGroupSeriesMenu( getChartModel( ), expr ) ); } } } } private void addMenu( IMenuManager manager, Object item ) { if ( item instanceof IAction ) { manager.add( (IAction) item ); } else if ( item instanceof IContributionItem ) { manager.add( (IContributionItem) item ); } // Do not allow customized query in xtab if ( getDataServiceProvider( ).isPartChart( ) ) { if ( item instanceof IAction ) { ( (IAction) item ).setEnabled( false ); } } } } ); return menuManager; } private Object getBaseSeriesMenu( Chart chart, String expr ) { EList sds = ChartUIUtil.getBaseSeriesDefinitions( chart ); if ( sds.size( ) == 1 ) { return new CategoryXAxisAction( expr ); } return null; } private Object getGroupSeriesMenu( Chart chart, String expr ) { IMenuManager topManager = new MenuManager( getGroupSeriesTitle( getChartModel( ) ) ); int axisNum = ChartUIUtil.getOrthogonalAxisNumber( chart ); for ( int axisIndex = 0; axisIndex < axisNum; axisIndex++ ) { List sds = ChartUIUtil.getOrthogonalSeriesDefinitions( chart, axisIndex ); for ( int i = 0; i < sds.size( ); i++ ) { SeriesDefinition sd = (SeriesDefinition) sds.get( i ); IAction action = new GroupYSeriesAction( sd.getQuery( ), expr, sd ); // ONLY USE FIRST GROUPING SERIES FOR CHART ENGINE SUPPORT // if ( axisNum == 1 && sds.size( ) == 1 ) { // Simply cascade menu return action; } // action.setText( getSecondMenuText( axisIndex, // sd.getDesignTimeSeries( ) ) ); // topManager.add( action ); } } return topManager; } private Object getOrthogonalSeriesMenu( Chart chart, String expr ) { IMenuManager topManager = new MenuManager( getOrthogonalSeriesTitle( getChartModel( ) ) ); int axisNum = ChartUIUtil.getOrthogonalAxisNumber( chart ); for ( int axisIndex = 0; axisIndex < axisNum; axisIndex++ ) { List sds = ChartUIUtil.getOrthogonalSeriesDefinitions( chart, axisIndex ); for ( int i = 0; i < sds.size( ); i++ ) { Series series = ( (SeriesDefinition) sds.get( i ) ).getDesignTimeSeries( ); EList dataDefns = series.getDataDefinition( ); if ( series instanceof StockSeries ) { IMenuManager secondManager = new MenuManager( getSecondMenuText( axisIndex, i, series ) ); topManager.add( secondManager ); for ( int j = 0; j < dataDefns.size( ); j++ ) { IAction action = new ValueYSeriesAction( (Query) dataDefns.get( j ), expr ); action.setText( ChartUIUtil.getStockTitle( j ) + Messages.getString( "StandardChartDataSheet.Label.Component" ) ); //$NON-NLS-1$ secondManager.add( action ); } } else if ( series instanceof BubbleSeries ) { IMenuManager secondManager = new MenuManager( getSecondMenuText( axisIndex, i, series ) ); topManager.add( secondManager ); for ( int j = 0; j < dataDefns.size( ); j++ ) { IAction action = new ValueYSeriesAction( (Query) dataDefns.get( j ), expr ); action.setText( ChartUIUtil.getBubbleTitle( j ) + Messages.getString( "StandardChartDataSheet.Label.Component" ) ); //$NON-NLS-1$ secondManager.add( action ); } } else if ( series instanceof DifferenceSeries ) { IMenuManager secondManager = new MenuManager( getSecondMenuText( axisIndex, i, series ) ); topManager.add( secondManager ); for ( int j = 0; j < dataDefns.size( ); j++ ) { IAction action = new ValueYSeriesAction( (Query) dataDefns.get( j ), expr ); action.setText( ChartUIUtil.getDifferenceTitle( j ) + Messages.getString( "StandardChartDataSheet.Label.Component" ) ); //$NON-NLS-1$ secondManager.add( action ); } } else if ( series instanceof GanttSeries ) { IMenuManager secondManager = new MenuManager( getSecondMenuText( axisIndex, i, series ) ); topManager.add( secondManager ); for ( int j = 0; j < dataDefns.size( ); j++ ) { IAction action = new ValueYSeriesAction( (Query) dataDefns.get( j ), expr ); action.setText( ChartUIUtil.getGanttTitle( j ) + Messages.getString( "StandardChartDataSheet.Label.Component" ) ); //$NON-NLS-1$ secondManager.add( action ); } } else { IAction action = new ValueYSeriesAction( (Query) dataDefns.get( 0 ), expr ); if ( axisNum == 1 && sds.size( ) == 1 ) { // Simplify cascade menu return action; } action.setText( getSecondMenuText( axisIndex, i, series ) ); topManager.add( action ); } } } return topManager; } private String getSecondMenuText( int axisIndex, int seriesIndex, Series series ) { StringBuffer sb = new StringBuffer( ); if ( ChartUIUtil.getOrthogonalAxisNumber( getChartModel( ) ) > 1 ) { sb.append( Messages.getString( "StandardChartDataSheet.Label.Axis" ) ); //$NON-NLS-1$ sb.append( axisIndex + 1 ); sb.append( " - " ); //$NON-NLS-1$ } sb.append( Messages.getString( "StandardChartDataSheet.Label.Series" ) //$NON-NLS-1$ + ( seriesIndex + 1 ) + " (" + series.getDisplayName( ) + ")" ); //$NON-NLS-1$ //$NON-NLS-2$ return sb.toString( ); } private String getBaseSeriesTitle( Chart chart ) { if ( chart instanceof ChartWithAxes ) { return Messages.getString( "StandardChartDataSheet.Label.UseAsCategoryXAxis" ); //$NON-NLS-1$ } return Messages.getString( "StandardChartDataSheet.Label.UseAsCategorySeries" ); //$NON-NLS-1$ } private String getOrthogonalSeriesTitle( Chart chart ) { if ( chart instanceof ChartWithAxes ) { return Messages.getString( "StandardChartDataSheet.Label.PlotAsValueYSeries" ); //$NON-NLS-1$ } else if ( chart instanceof DialChart ) { return Messages.getString( "StandardChartDataSheet.Label.PlotAsGaugeValue" ); //$NON-NLS-1$ } return Messages.getString( "StandardChartDataSheet.Label.PlotAsValueSeries" ); //$NON-NLS-1$ } private String getGroupSeriesTitle( Chart chart ) { if ( chart instanceof ChartWithAxes ) { return Messages.getString( "StandardChartDataSheet.Label.UseToGroupYSeries" ); //$NON-NLS-1$ } return Messages.getString( "StandardChartDataSheet.Label.UseToGroupValueSeries" ); //$NON-NLS-1$ } private boolean isCubeMode( ) { return ChartXTabUtil.getBindingCube( itemHandle ) != null; } private CubeHandle getCube( ) { return ChartXTabUtil.getBindingCube( itemHandle ); } /** * Creates the cube expression * * @return expression */ private String createCubeExpression( ) { if ( cubeTreeViewer == null ) { return null; } TreeItem[] selection = cubeTreeViewer.getTree( ).getSelection( ); String expr = null; if ( selection.length > 0 && !dataProvider.isSharedBinding( ) && !dataProvider.isPartChart( ) ) { TreeItem treeItem = selection[0]; ComputedColumnHandle binding = null; if ( treeItem.getData( ) instanceof LevelHandle ) { binding = ChartXTabUtil.findBinding( itemHandle, ChartXTabUtil.createDimensionExpression( (LevelHandle) treeItem.getData( ) ) ); } else if ( treeItem.getData( ) instanceof MeasureHandle ) { binding = ChartXTabUtil.findBinding( itemHandle, ChartXTabUtil.createMeasureExpression( (MeasureHandle) treeItem.getData( ) ) ); } if ( binding != null ) { expr = ExpressionUtil.createJSDataExpression( binding.getName( ) ); } } return expr; } private boolean isDataItemSupported( int type ) { return iSupportedDataItems == 0 || ( iSupportedDataItems & type ) == type; } private String[] createDataComboItems( ) { List<String> items = new ArrayList<String>( ); selectDataTypes.clear( ); if ( isDataItemSupported( SELECT_NONE ) ) { if ( DEUtil.getDataSetList( itemHandle.getContainer( ) ) .size( ) > 0 ) { items.add( Messages.getString( "ReportDataServiceProvider.Option.Inherits", //$NON-NLS-1$ ( (DataSetHandle) DEUtil.getDataSetList( itemHandle.getContainer( ) ) .get( 0 ) ).getName( ) ) ); } else { items.add( ReportDataServiceProvider.OPTION_NONE ); } selectDataTypes.add( new Integer( SELECT_NONE ) ); } if ( isDataItemSupported( SELECT_DATA_SET ) ) { String[] dataSets = getDataServiceProvider( ).getAllDataSets( ); if ( dataSets.length > 0 ) { if ( isDataItemSupported( SELECT_NEXT ) ) { items.add( Messages.getString( "StandardChartDataSheet.Combo.DataSets" ) ); //$NON-NLS-1$ selectDataTypes.add( new Integer( SELECT_NEXT ) ); } for ( int i = 0; i < dataSets.length; i++ ) { items.add( dataSets[i] ); selectDataTypes.add( new Integer( SELECT_DATA_SET ) ); } } if ( isDataItemSupported( SELECT_NEW_DATASET ) ) { items.add( Messages.getString( "StandardChartDataSheet.NewDataSet" ) ); //$NON-NLS-1$ selectDataTypes.add( new Integer( SELECT_NEW_DATASET ) ); } } if ( isDataItemSupported( SELECT_DATA_CUBE ) ) { String[] dataCubes = getDataServiceProvider( ).getAllDataCubes( ); if ( dataCubes.length > 0 ) { if ( isDataItemSupported( SELECT_NEXT ) ) { items.add( Messages.getString( "StandardChartDataSheet.Combo.DataCubes" ) ); //$NON-NLS-1$ selectDataTypes.add( new Integer( SELECT_NEXT ) ); } for ( int i = 0; i < dataCubes.length; i++ ) { items.add( dataCubes[i] ); selectDataTypes.add( new Integer( SELECT_DATA_CUBE ) ); } } if ( isDataItemSupported( SELECT_NEW_DATACUBE ) ) { items.add( Messages.getString( "StandardChartDataSheet.NewDataCube" ) ); //$NON-NLS-1$ selectDataTypes.add( new Integer( SELECT_NEW_DATACUBE ) ); } } if ( isDataItemSupported( SELECT_REPORT_ITEM ) ) { String[] dataRefs = getDataServiceProvider( ).getAllReportItemReferences( ); if ( dataRefs.length > 0 ) { if ( isDataItemSupported( SELECT_NEXT ) ) { items.add( Messages.getString( "StandardChartDataSheet.Combo.ReportItems" ) ); //$NON-NLS-1$ selectDataTypes.add( new Integer( SELECT_NEXT ) ); } for ( int i = 0; i < dataRefs.length; i++ ) { items.add( dataRefs[i] ); selectDataTypes.add( new Integer( SELECT_REPORT_ITEM ) ); } } } return items.toArray( new String[items.size( )] ); } private void updatePredefinedQueries( ) { if ( dataProvider.isInXTabMeasureCell( ) ) { try { CrosstabReportItemHandle xtab = ChartXTabUtil.getXtabContainerCell( itemHandle ) .getCrosstab( ); if ( dataProvider.isPartChart( ) ) { List<String> levels = ChartXTabUtil.getAllLevelsBindingExpression( xtab ); String[] exprs = levels.toArray( new String[levels.size( )] ); if ( exprs.length == 2 && dataProvider.isInXTabAggrCell( ) ) { // Only one direction is valid for chart in total cell if ( ( (ChartWithAxes) getChartModel( ) ).isTransposed( ) ) { exprs = new String[]{ exprs[1] }; } else { exprs = new String[]{ exprs[0] }; } } getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_CATEGORY, exprs ); } else { Iterator columnBindings = ChartXTabUtil.getAllColumnBindingsIterator( itemHandle ); List<String> levels = ChartXTabUtil.getAllLevelsBindingExpression( columnBindings ); String[] exprs = levels.toArray( new String[levels.size( )] ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_CATEGORY, exprs ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_OPTIONAL, exprs ); columnBindings = ChartXTabUtil.getAllColumnBindingsIterator( itemHandle ); List<String> measures = ChartXTabUtil.getAllMeasuresBindingExpression( columnBindings ); exprs = measures.toArray( new String[measures.size( )] ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_VALUE, exprs ); } } catch ( BirtException e ) { WizardBase.displayException( e ); } } else { CubeHandle cube = getCube( ); if ( cube == null ) { getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_CATEGORY, null ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_VALUE, null ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_OPTIONAL, null ); } else { if ( dataProvider.isInheritanceOnly( ) // Is in multiple view || dataProvider.isSharedBinding( ) ) // Is sharing // query case. { // Get all column bindings. List<String> dimensionExprs = new ArrayList<String>( ); List<String> measureExprs = new ArrayList<String>( ); ReportItemHandle reportItemHandle = dataProvider.getReportItemHandle( ); for ( Iterator iter = reportItemHandle.getColumnBindings( ) .iterator( ); iter.hasNext( ); ) { ComputedColumnHandle cch = (ComputedColumnHandle) iter.next( ); String dataExpr = ExpressionUtil.createJSDataExpression( cch.getName( ) ); if ( ChartXTabUtil.isDimensionExpresion( cch.getExpression( ) ) ) { dimensionExprs.add( dataExpr ); } else if ( ChartXTabUtil.isMeasureExpresion( cch.getExpression( ) ) ) { // Fixed issue ED 28. // Underlying code was reverted to the earlier than // bugzilla 246683, since we have enhanced it to // support all available measures defined in shared // item. // Bugzilla 246683. // Here if it is sharing with crosstab or // multi-view, we just put the measure expression // whose aggregate-ons is most into prepared // expression query. It will keep correct value to // shared crosstab or multi-view. measureExprs.add( dataExpr ); } } String[] categoryExprs = dimensionExprs.toArray( new String[dimensionExprs.size( )] ); String[] yOptionalExprs = categoryExprs; String[] valueExprs = measureExprs.toArray( new String[measureExprs.size( )] ); ReportItemHandle referenceHandle = ChartReportItemUtil.getReportItemReference( itemHandle ); if ( referenceHandle instanceof ExtendedItemHandle && ChartReportItemUtil.isChartReportItemHandle( referenceHandle ) ) { // If the final reference handle is cube with other // chart, the valid category and Y optional expressions // only allow those expressions defined in shared chart. Chart referenceCM = ChartReportItemUtil.getChartFromHandle( (ExtendedItemHandle) referenceHandle ); categoryExprs = ChartUtil.getCategoryExpressions( referenceCM ); yOptionalExprs = ChartUtil.getYOptoinalExpressions( referenceCM ); valueExprs = ChartUtil.getValueSeriesExpressions( referenceCM ); Chart cm = getChartModel( ); if ( categoryExprs.length > 0 ) { updateCategoryExpression( cm, categoryExprs[0] ); } if ( yOptionalExprs.length > 0 ) { updateYOptionalExpressions( cm, yOptionalExprs[0] ); } } else if ( dataProvider.checkState( IDataServiceProvider.SHARE_CROSSTAB_QUERY ) ) { // In sharing query with crosstab, the category // expression and Y optional expression is decided by // value series expression, so here set them to null. // And in UI, when the value series expression is // selected, it will trigger to set correct category and // Y optional expressions. categoryExprs = null; yOptionalExprs = null; } getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_CATEGORY, categoryExprs ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_OPTIONAL, yOptionalExprs ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_VALUE, valueExprs ); } else { Iterator columnBindings = ChartXTabUtil.getAllColumnBindingsIterator( itemHandle ); List<String> levels = ChartXTabUtil.getAllLevelsBindingExpression( columnBindings ); String[] exprs = levels.toArray( new String[levels.size( )] ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_CATEGORY, exprs ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_OPTIONAL, exprs ); columnBindings = ChartXTabUtil.getAllColumnBindingsIterator( itemHandle ); List<String> measures = ChartXTabUtil.getAllMeasuresBindingExpression( columnBindings ); exprs = measures.toArray( new String[measures.size( )] ); getContext( ).addPredefinedQuery( ChartUIConstants.QUERY_VALUE, exprs ); } } } // Fire event to update predefined queries in outside UI fireEvent( btnBinding, EVENT_QUERY ); } /** * Update Y Optional expression with specified expression if current Y * optional expression is null or empty. * * @param cm * chart model. * @param expr * specified expression. */ private void updateYOptionalExpressions( Chart cm, String expr ) { List<SeriesDefinition> orthSDs = ChartUtil.getAllOrthogonalSeriesDefinitions( cm ); for ( SeriesDefinition sd : orthSDs ) { Query q = sd.getQuery( ); if ( q == null ) { sd.setQuery( QueryImpl.create( expr ) ); continue; } if ( q.getDefinition( ) == null || "".equals( q.getDefinition( ).trim( ) ) ) //$NON-NLS-1$ { q.setDefinition( expr ); } } } /** * Update category expression with specified expression if current category * expression is null or empty. * * @param cm * chart model. * @param expr * specified expression. */ private void updateCategoryExpression( Chart cm, String expr ) { EList<SeriesDefinition> baseSDs = ChartUtil.getBaseSeriesDefinitions( cm ); for ( SeriesDefinition sd : baseSDs ) { EList<Query> dds = sd.getDesignTimeSeries( ).getDataDefinition( ); Query q = dds.get( 0 ); if ( q.getDefinition( ) == null || "".equals( q.getDefinition( ).trim( ) ) ) //$NON-NLS-1$ { q.setDefinition( expr ); } } } }
package org.phenotips.data.rest.internal; import com.xpn.xwiki.XWiki; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import net.sf.json.JSONObject; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.phenotips.data.Patient; import org.phenotips.data.PatientRepository; import org.phenotips.data.rest.PatientResource; import org.phenotips.data.rest.Relations; import org.slf4j.Logger; import org.xwiki.component.manager.ComponentLookupException; import org.xwiki.component.manager.ComponentManager; import org.xwiki.component.util.DefaultParameterizedType; import org.xwiki.component.util.ReflectionUtils; import org.xwiki.context.Execution; import org.xwiki.context.ExecutionContext; import org.xwiki.model.reference.DocumentReference; import org.xwiki.rest.XWikiRestException; import org.xwiki.security.authorization.AuthorizationManager; import org.xwiki.security.authorization.Right; import org.xwiki.test.mockito.MockitoComponentMockingRule; import org.xwiki.users.User; import org.xwiki.users.UserManager; import javax.inject.Provider; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Tests for the {@link DefaultPatientResourceImpl} component. * */ public class DefaultPatientResourceImplTest { @Rule public MockitoComponentMockingRule<PatientResource> mocker = new MockitoComponentMockingRule<PatientResource>(DefaultPatientResourceImpl.class); @Mock private User currentUser; @Mock private Patient patient; @Mock private UriInfo uriInfo; private Logger logger; private PatientRepository repository; private AuthorizationManager access; private UserManager users; private String uriString = "http://self/uri"; private String id = "00000001"; private DocumentReference patientDocument; private DocumentReference userProfileDocument; private XWikiContext context; private DefaultPatientResourceImpl patientResource; @Before public void setUp() throws ComponentLookupException, URISyntaxException { MockitoAnnotations.initMocks(this); Execution execution = mock(Execution.class); ExecutionContext executionContext = mock(ExecutionContext.class); ComponentManager componentManager = this.mocker.getInstance(ComponentManager.class, "context"); when(componentManager.getInstance(Execution.class)).thenReturn(execution); doReturn(executionContext).when(execution).getContext(); doReturn(mock(XWikiContext.class)).when(executionContext).getProperty("xwikicontext"); this.patientResource = (DefaultPatientResourceImpl)this.mocker.getComponentUnderTest(); this.logger = this.mocker.getMockedLogger(); this.repository = this.mocker.getInstance(PatientRepository.class); this.access = this.mocker.getInstance(AuthorizationManager.class); this.users = this.mocker.getInstance(UserManager.class); this.userProfileDocument = new DocumentReference("wiki", "user", "00000001"); doReturn(this.currentUser).when(this.users).getCurrentUser(); doReturn(this.userProfileDocument).when(this.currentUser).getProfileDocument(); this.patientDocument = new DocumentReference("wiki", "data", "P0000001"); doReturn(this.patient).when(this.repository).getPatientById(this.id); doReturn(this.patientDocument).when(this.patient).getDocument(); doReturn(new URI(this.uriString)).when(this.uriInfo).getRequestUri(); ReflectionUtils.setFieldValue(this.patientResource, "uriInfo", this.uriInfo); Provider<XWikiContext> provider = this.mocker.getInstance(XWikiContext.TYPE_PROVIDER); this.context = provider.get(); } @Test public void getPatientWhenRepositoryReturnsNullPatient() { doReturn(null).when(this.repository).getPatientById(anyString()); Response response = this.patientResource.getPatient(this.id); verify(this.logger).debug("No such patient record: [{}]", this.id); Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus()); } @Test public void getPatientWhenUserDoesNotHaveAccess() { doReturn(false).when(this.access).hasAccess(Right.VIEW, this.userProfileDocument, this.patientDocument); Response response = this.patientResource.getPatient(this.id); verify(this.logger).debug("View access denied to user [{}] on patient record [{}]", this.currentUser, this.id); Assert.assertEquals(Status.FORBIDDEN.getStatusCode(), response.getStatus()); } @Test @SuppressWarnings("unchecked") public void checkGetPatientNormalBehaviour() { doReturn(true).when(this.access).hasAccess(Right.VIEW, this.userProfileDocument, this.patientDocument); doReturn(new JSONObject()).when(this.patient).toJSON(); Response response = this.patientResource.getPatient(this.id); Assert.assertTrue(response.getEntity() instanceof JSONObject); Map<String, Map<String, String>> json = (Map<String, Map<String, String>>)response.getEntity(); Assert.assertThat(json, hasValue(hasEntry("rel", Relations.SELF))); Assert.assertThat(json, hasValue(hasEntry("href", this.uriString))); Map<String, List<MediaType>> actualMap = (Map)response.getMetadata(); Assert.assertThat(actualMap, hasValue(hasItem(MediaType.APPLICATION_JSON_TYPE))); Assert.assertEquals(Status.OK.getStatusCode(), response.getStatus()); } @Test public void checkGetPatientAlwaysSendsLoggerMessageOnRequest() { doReturn(true).when(this.access).hasAccess(Right.VIEW, this.userProfileDocument, this.patientDocument); doReturn(new JSONObject()).when(this.patient).toJSON(); this.patientResource.getPatient(this.id); verify(this.logger).debug("Retrieving patient record [{}] via REST", this.id); } @Test public void updatePatientWhenRepositoryReturnsNullPatient() { doReturn(null).when(this.repository).getPatientById(anyString()); WebApplicationException ex = null; try { this.patientResource.updatePatient("", this.id); } catch (WebApplicationException temp) { ex = temp; } Assert.assertNotNull("updatePatient did not throw a WebApplicationException as expected " + "when the patient could not be found", ex); Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), ex.getResponse().getStatus()); verify(this.logger).debug("Patient record [{}] doesn't exist yet. It can be created by POST-ing the" + " JSON to /rest/patients", this.id); } @Test public void updatePatientWhenUserDoesNotHaveAccess() { doReturn(false).when(this.access).hasAccess(Right.EDIT, this.userProfileDocument, this.patientDocument); WebApplicationException ex = null; try { this.patientResource.updatePatient("", this.id); } catch (WebApplicationException temp) { ex = temp; } Assert.assertNotNull("updatePatient did not throw a WebApplicationException as expected " + "when the User did not have edit rights", ex); Assert.assertEquals(Status.FORBIDDEN.getStatusCode(), ex.getResponse().getStatus()); verify(this.logger).debug("Edit access denied to user [{}] on patient record [{}]", currentUser, id); } @Test public void checkUpdatePatientThrowsExceptionWhenSentWrongIdInJSON() { doReturn(true).when(this.access).hasAccess(Right.EDIT, this.userProfileDocument, this.patientDocument); JSONObject json = new JSONObject(); json.put("id", "!!!!!"); doReturn(this.id).when(this.patient).getId(); WebApplicationException ex = null; try { this.patientResource.updatePatient(json.toString(), this.id); } catch (WebApplicationException temp) { ex = temp; } Assert.assertNotNull("updatePatient did not throw a WebApplicationException as expected " + "when json id did not match patient id", ex); Assert.assertEquals(Status.CONFLICT.getStatusCode(), ex.getResponse().getStatus()); } @Test public void checkUpdatePatientCatchesExceptionFromUpdateFromJSON() { doReturn(true).when(this.access).hasAccess(Right.EDIT, this.userProfileDocument, this.patientDocument); JSONObject json = new JSONObject(); json.put("id", this.id); doReturn(this.id).when(this.patient).getId(); doThrow(Exception.class).when(this.patient).updateFromJSON(any(JSONObject.class)); WebApplicationException ex = null; try { this.patientResource.updatePatient(json.toString(), this.id); } catch (WebApplicationException temp) { ex = temp; } Assert.assertNotNull("updatePatient did not throw a WebApplicationException as expected " + "when catching an Exception from Patient.updateFromJSON", ex); Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), ex.getResponse().getStatus()); verify(this.logger).warn("Failed to update patient [{}] from JSON: {}. Source JSON was: {}", patient.getId(), ex.getMessage(), json.toString()); } @Test public void checkUpdatePatientNormalBehaviour() { doReturn(true).when(this.access).hasAccess(Right.EDIT, this.userProfileDocument, this.patientDocument); JSONObject json = new JSONObject(); json.put("id", this.id); doReturn(this.id).when(this.patient).getId(); Response response = this.patientResource.updatePatient(json.toString(), this.id); verify(this.patient).updateFromJSON(any(JSONObject.class)); Assert.assertEquals(Status.NO_CONTENT.getStatusCode(), response.getStatus()); } @Test public void checkUpdatePatientAlwaysSendsLoggerMessageOnRequest() { doReturn(true).when(this.access).hasAccess(Right.EDIT, this.userProfileDocument, this.patientDocument); JSONObject json = new JSONObject(); json.put("id", this.id); doReturn(this.id).when(this.patient).getId(); this.patientResource.updatePatient(json.toString(), this.id); verify(this.logger).debug("Updating patient record [{}] via REST with JSON: {}", this.id, json.toString()); } @Test public void deletePatientWhenRepositoryReturnsNullPatient() { doReturn(null).when(this.repository).getPatientById(anyString()); Response response = this.patientResource.deletePatient(this.id); verify(this.logger).debug("Patient record [{}] didn't exist", this.id); Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus()); } @Test public void deletePatientWhenUserDoesNotHaveAccess() { doReturn(false).when(this.access).hasAccess(Right.DELETE, this.userProfileDocument, this.patientDocument); Response response = this.patientResource.deletePatient(this.id); verify(this.logger).debug("Delete access denied to user [{}] on patient record [{}]", this.currentUser, this.id); Assert.assertEquals(Status.FORBIDDEN.getStatusCode(), response.getStatus()); } @Test public void checkDeletePatientCatchesXWikiException() throws XWikiException { XWiki wiki = mock(XWiki.class); doReturn(wiki).when(this.context).getWiki(); doReturn(true).when(this.access).hasAccess(Right.DELETE, this.userProfileDocument, this.patientDocument); doThrow(XWikiException.class).when(wiki).deleteDocument(any(XWikiDocument.class), eq(context)); WebApplicationException ex = null; try { this.patientResource.deletePatient(this.id); } catch(WebApplicationException temp) { ex = temp; } Assert.assertNotNull("deletePatient did not throw a WebApplicationException as expected " + "when catching an XWikiException", ex); Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), ex.getResponse().getStatus()); verify(this.logger).warn(eq("Failed to delete patient record [{}]: {}"), eq(this.id), anyString()); } @Test public void checkDeletePatientNormalBehaviour() throws XWikiException { XWiki wiki = mock(XWiki.class); XWikiDocument patientXWikiDoc = mock(XWikiDocument.class); doReturn(wiki).when(this.context).getWiki(); doReturn(patientXWikiDoc).when(wiki).getDocument(this.patientDocument, this.context); doReturn(true).when(this.access).hasAccess(Right.DELETE, this.userProfileDocument, this.patientDocument); Response response = this.patientResource.deletePatient(this.id); verify(wiki).getDocument(this.patientDocument, this.context); verify(wiki).deleteDocument(patientXWikiDoc, this.context); Assert.assertEquals(Status.NO_CONTENT.getStatusCode(), response.getStatus()); } @Test public void checkDeletePatientAlwaysSendsLoggerMessageOnRequest() { XWiki wiki = mock(XWiki.class); doReturn(wiki).when(this.context).getWiki(); doReturn(true).when(this.access).hasAccess(Right.DELETE, this.userProfileDocument, this.patientDocument); this.patientResource.deletePatient(this.id); verify(this.logger).debug("Deleting patient record [{}] via REST", this.id); } }
package org.jboss.forge.addon.database.tools.generate; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Properties; import javax.inject.Inject; import org.hibernate.cfg.JDBCMetaDataConfiguration; import org.hibernate.cfg.reveng.DefaultReverseEngineeringStrategy; import org.hibernate.cfg.reveng.ReverseEngineeringSettings; import org.hibernate.cfg.reveng.ReverseEngineeringStrategy; import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.Property; import org.hibernate.mapping.Table; import org.hibernate.tool.hbm2x.ArtifactCollector; import org.hibernate.tool.hbm2x.POJOExporter; import org.hibernate.tool.hbm2x.pojo.ComponentPOJOClass; import org.hibernate.tool.hbm2x.pojo.EntityPOJOClass; import org.hibernate.tool.hbm2x.pojo.POJOClass; import org.jboss.forge.addon.database.tools.util.HibernateToolsHelper; import org.jboss.forge.addon.parser.java.facets.JavaSourceFacet; import org.jboss.forge.addon.ui.context.UIBuilder; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.context.UIExecutionContext; import org.jboss.forge.addon.ui.context.UINavigationContext; import org.jboss.forge.addon.ui.context.UIValidationContext; import org.jboss.forge.addon.ui.input.UISelectMany; import org.jboss.forge.addon.ui.metadata.UICommandMetadata; import org.jboss.forge.addon.ui.metadata.WithAttributes; import org.jboss.forge.addon.ui.result.NavigationResult; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.addon.ui.result.Results; import org.jboss.forge.addon.ui.util.Metadata; import org.jboss.forge.addon.ui.wizard.UIWizardStep; public class DatabaseTableSelectionStep implements UIWizardStep { private static String NAME = "Database Table Selection"; private static String DESCRIPTION = "Select the database tables for which you want to generate entities"; @Inject @WithAttributes( label = "Database Tables", description = "The database tables for which to generate entities") private UISelectMany<String> databaseTables; @Override public NavigationResult next(UINavigationContext context) throws Exception { return null; } @Override public UICommandMetadata getMetadata(UIContext context) { return Metadata .forCommand(getClass()) .name(NAME) .description(DESCRIPTION); } @Override public boolean isEnabled(UIContext context) { return true; } @Inject private GenerateEntitiesCommandDescriptor descriptor; @Inject private HibernateToolsHelper helper; private JDBCMetaDataConfiguration jmdc; @SuppressWarnings("unchecked") @Override public void initializeUI(UIBuilder builder) throws Exception { jmdc = new JDBCMetaDataConfiguration(); jmdc.setProperties(descriptor.connectionProperties); jmdc.setReverseEngineeringStrategy(createReverseEngineeringStrategy()); helper.buildMappings(descriptor.urls, descriptor.driverClass, jmdc); Iterator<Object> iterator = jmdc.getTableMappings(); ArrayList<String> tables = new ArrayList<String>(); while (iterator.hasNext()) { Object mapping = iterator.next(); if (mapping instanceof Table) { Table table = (Table) mapping; tables.add(table.getName()); } } databaseTables.setValueChoices(tables); databaseTables.setDefaultValue(tables); builder.add(databaseTables); } @Override public Result execute(UIExecutionContext context) { Collection<String> entities = exportSelectedEntities(); return Results.success(entities.size() + " entities were generated"); } @Override public void validate(UIValidationContext context) { } private boolean isSelected(Collection<String> selection, POJOClass element) { boolean result = false; if (element.isComponent()) { if (element instanceof ComponentPOJOClass) { ComponentPOJOClass cpc = (ComponentPOJOClass) element; Iterator<?> iterator = cpc.getAllPropertiesIterator(); result = true; while (iterator.hasNext()) { Object object = iterator.next(); if (object instanceof Property) { Property property = (Property) object; String tableName = property.getValue().getTable().getName(); if (!selection.contains(tableName)) { result = false; break; } } } } } else { if (element instanceof EntityPOJOClass) { EntityPOJOClass epc = (EntityPOJOClass) element; Object object = epc.getDecoratedObject(); if (object instanceof PersistentClass) { PersistentClass pc = (PersistentClass) object; Table table = pc.getTable(); if (selection.contains(table.getName())) { result = true; } } } } return result; } private Collection<String> getSelectedTableNames() { ArrayList<String> result = new ArrayList<String>(); Iterator<String> iterator = databaseTables.getValue().iterator(); while (iterator.hasNext()) { result.add(iterator.next()); } return result; } private Collection<String> exportSelectedEntities() { final Collection<String> selectedTableNames = getSelectedTableNames(); JavaSourceFacet java = descriptor.selectedProject.getFacet(JavaSourceFacet.class); POJOExporter pj = new POJOExporter(jmdc, java.getSourceDirectory() .getUnderlyingResourceObject()) { @Override @SuppressWarnings("rawtypes") protected void exportPOJO(Map additionalContext, POJOClass element) { if (isSelected(selectedTableNames, element)) { super.exportPOJO(additionalContext, element); } } }; Properties pojoProperties = new Properties(); pojoProperties.setProperty("jdk5", "true"); pojoProperties.setProperty("ejb3", "true"); pj.setProperties(pojoProperties); pj.setArtifactCollector(new ArtifactCollector()); pj.start(); return selectedTableNames; } private ReverseEngineeringStrategy createReverseEngineeringStrategy() { ReverseEngineeringStrategy strategy = new DefaultReverseEngineeringStrategy(); ReverseEngineeringSettings revengsettings = new ReverseEngineeringSettings(strategy) .setDefaultPackageName(descriptor.targetPackage) .setDetectManyToMany(true) .setDetectOneToOne(true) .setDetectOptimisticLock(true); strategy.setSettings(revengsettings); return strategy; } }
package org.monarchinitiative.exomiser.core.phenotype; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertThat; /** * @author Jules Jacobsen <[email protected]> */ public class CrossSpeciesPhenotypeMatcherTest { private CrossSpeciesPhenotypeMatcher instance; //Nose phenotypes private final PhenotypeTerm bigNose = PhenotypeTerm.of("HP:0000001", "Big nose"); private final PhenotypeTerm nose = PhenotypeTerm.of("HP:0000002", "Nose"); private final PhenotypeTerm littleNose = PhenotypeTerm.of("HP:0000003", "Little nose"); private final PhenotypeMatch perfectNoseMatch = PhenotypeMatch.builder().query(bigNose).match(bigNose).lcs(bigNose).simj(1.0).score(4.0).build(); private final PhenotypeMatch noseMatch = PhenotypeMatch.builder().query(bigNose).match(littleNose).lcs(nose).simj(0.5).score(1.0).build(); //Toe phenotypes private final PhenotypeTerm toe = PhenotypeTerm.of("HP:0000004", "Toe"); private final PhenotypeTerm bigToe = PhenotypeTerm.of("HP:0000005", "Big toe"); private final PhenotypeTerm crookedToe = PhenotypeTerm.of("HP:0000006", "Crooked toe"); private final PhenotypeTerm longToe = PhenotypeTerm.of("HP:0000007", "Long toe"); private final PhenotypeMatch bestToeMatch = PhenotypeMatch.builder().query(bigToe).match(longToe).lcs(toe).score(2.0).build(); private final PhenotypeMatch bigToeCrookedToeMatch = PhenotypeMatch.builder().query(bigToe).match(crookedToe).lcs(toe).score(1.5).build(); @Before public void setUp() { Map<PhenotypeTerm, Set<PhenotypeMatch>> phenotypeMatches = new LinkedHashMap<>(); phenotypeMatches.put(bigNose, Sets.newHashSet(perfectNoseMatch, noseMatch)); phenotypeMatches.put(bigToe, Sets.newHashSet(bestToeMatch, bigToeCrookedToeMatch)); instance = new CrossSpeciesPhenotypeMatcher(Organism.HUMAN, phenotypeMatches); //TODO: would this make more sense? //QueryPhenotypeMatch queryPhenotypeMatch = new QueryPhenotypeMatch(Organism.HUMAN, phenotypeMatches); //CrossSpeciesPhenotypeMatcher phenotypeMatcher = new CrossSpeciesPhenotypeMatcher(queryPhenotypeMatch); } @Test public void emptyInputValues() throws Exception { CrossSpeciesPhenotypeMatcher instance = new CrossSpeciesPhenotypeMatcher(Organism.HUMAN, Collections.emptyMap()); assertThat(instance.getOrganism(), equalTo(Organism.HUMAN)); assertThat(instance.getQueryTerms(), equalTo(Collections.emptyList())); assertThat(instance.getTermPhenotypeMatches(), equalTo(Collections.emptyMap())); } @Test public void testEquals() { CrossSpeciesPhenotypeMatcher emptyHumanOne = new CrossSpeciesPhenotypeMatcher(Organism.HUMAN, Collections.emptyMap()); CrossSpeciesPhenotypeMatcher emptyMouseOne = new CrossSpeciesPhenotypeMatcher(Organism.MOUSE, Collections.emptyMap()); CrossSpeciesPhenotypeMatcher emptyHumanTwo = new CrossSpeciesPhenotypeMatcher(Organism.HUMAN, Collections.emptyMap()); assertThat(emptyHumanOne, equalTo(emptyHumanTwo)); assertThat(emptyHumanOne, not(equalTo(emptyMouseOne))); } @Test public void testToString() { System.out.println(new CrossSpeciesPhenotypeMatcher(Organism.HUMAN, Collections.emptyMap())); System.out.println(instance); } @Test public void testGetBestForwardAndReciprocalMatches_returnsEmptyListFromEmptyQuery() throws Exception { assertThat(instance.calculateBestForwardAndReciprocalMatches(Collections.emptyList()), equalTo(Collections.emptyList())); } @Test public void testGetBestForwardAndReciprocalMatches() throws Exception { List<String> modelPhenotypes = Lists.newArrayList(littleNose.getId(), longToe.getId()); List<PhenotypeMatch> expected = Lists.newArrayList(noseMatch, bestToeMatch, noseMatch, bestToeMatch); expected.forEach(match -> System.out.printf("%s-%s=%f%n", match.getQueryPhenotypeId(), match.getMatchPhenotypeId(), match.getScore())); assertThat(instance.calculateBestForwardAndReciprocalMatches(modelPhenotypes), equalTo(expected)); } @Test public void testCanCalculateBestPhenotypeMatchesByTerm() { List<PhenotypeMatch> bestForwardAndReciprocalMatches = Lists.newArrayList(noseMatch, bestToeMatch, perfectNoseMatch, bestToeMatch); List<PhenotypeMatch> result = instance.calculateBestPhenotypeMatchesByTerm(bestForwardAndReciprocalMatches); assertThat(result, containsInAnyOrder(bestToeMatch, perfectNoseMatch)); } @Test public void testCalculateBestPhenotypeMatchesByTermReturnsEmptyMapForEmptyInputList() { assertThat(instance.calculateBestPhenotypeMatchesByTerm(Collections.emptyList()), equalTo(Collections.emptyList())); } @Test public void testCanGetTheoreticalBestModel() { assertThat(instance.getQueryPhenotypeMatch(), equalTo(new QueryPhenotypeMatch(Organism.HUMAN, instance.getTermPhenotypeMatches()))); } }