answer
stringlengths 17
10.2M
|
|---|
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import javax.swing.*;
public final class MainPanel extends JPanel {
private MainPanel() {
super(new BorderLayout(10, 10));
JTextArea log = new JTextArea();
String deviceName = "con.txt";
JButton b1 = new JButton("c:/" + deviceName);
b1.addActionListener(e -> {
File file = new File(deviceName);
try {
if (file.createNewFile()) {
log.append("the named file does not exist and was successfully created.\n");
} else {
log.append("the named file already exists.\n");
}
} catch (IOException ex) {
// ex.printStackTrace();
Object[] msg = {ex.getMessage()};
showMessageDialog(msg);
}
});
Component p1 = makeTitledPanel("IOException: before 1.5", b1);
JButton b2 = new JButton("c:/" + deviceName + ":getCanonicalPath");
b2.addActionListener(e -> {
File file = new File(deviceName);
if (!isCanonicalPath(file)) {
Object[] msg = {file.getAbsolutePath() + " is not a canonical path."};
showMessageDialog(msg);
}
});
Component p2 = makeTitledPanel("getCanonicalPath: before 1.5", b2);
JButton b3 = new JButton("c:/" + deviceName + ":isFile");
b3.addActionListener(e -> {
File file = new File(deviceName);
if (!file.isFile()) {
Object[] msg = {file.getAbsolutePath() + " is not a file."};
showMessageDialog(msg);
}
});
Component p3 = makeTitledPanel("isFile: JDK 1.5+", b3);
JPanel p = new JPanel(new GridLayout(3, 1, 10, 10));
p.add(p1);
p.add(p2);
p.add(p3);
add(p, BorderLayout.NORTH);
add(new JScrollPane(log));
setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
setPreferredSize(new Dimension(320, 240));
}
private void showMessageDialog(Object... obj) {
JRootPane root = getRootPane();
JOptionPane.showMessageDialog(root, obj, "Error", JOptionPane.INFORMATION_MESSAGE);
}
// Before 1.5
public static boolean isCanonicalPath(File file) {
if (file == null) {
return false;
}
try {
if (file.getCanonicalPath() == null || !file.isFile()) {
return false;
}
} catch (IOException ex) {
return false;
}
return true;
}
private static Component makeTitledPanel(String title, Component c) {
JPanel p = new JPanel(new BorderLayout());
p.setBorder(BorderFactory.createTitledBorder(title));
p.add(c);
return p;
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
|
package som.interpreter.nodes;
import som.interpreter.Arguments;
import som.vmobjects.SBlock;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.nodes.ExplodeLoop;
public abstract class ContextualNode extends ExpressionNode {
protected final int contextLevel;
public ContextualNode(final int contextLevel) {
this.contextLevel = contextLevel;
}
@ExplodeLoop
protected Arguments determineOuterArguments(final VirtualFrame frame) {
Arguments args = Arguments.get(frame);
int i = contextLevel;
while (i > 0) {
SBlock block = (SBlock) args.getSelf();
args = block.getContext();
i
}
return args;
}
@ExplodeLoop
protected Object determineOuterSelf(final VirtualFrame frame) {
Object self = Arguments.get(frame).getSelf();
int i = contextLevel;
while (i > 0) {
SBlock block = (SBlock) self;
self = block.getOuterSelf();
i
}
return self;
}
}
|
package soot.jimple.infoflow;
import heros.FlowFunction;
import heros.FlowFunctions;
import heros.InterproceduralCFG;
import heros.flowfunc.Identity;
import heros.solver.PathEdge;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import soot.Local;
import soot.NullType;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.ArrayRef;
import soot.jimple.AssignStmt;
import soot.jimple.CaughtExceptionRef;
import soot.jimple.Constant;
import soot.jimple.DefinitionStmt;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.ReturnStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.Stmt;
import soot.jimple.ThrowStmt;
import soot.jimple.infoflow.data.Abstraction;
import soot.jimple.infoflow.data.AbstractionWithPath;
import soot.jimple.infoflow.data.AccessPath;
import soot.jimple.infoflow.heros.InfoflowSolver;
import soot.jimple.infoflow.source.DefaultSourceSinkManager;
import soot.jimple.infoflow.source.ISourceSinkManager;
import soot.jimple.infoflow.util.BaseSelector;
import soot.jimple.internal.JimpleLocal;
import soot.jimple.toolkits.ide.icfg.JimpleBasedBiDiICFG;
public class InfoflowProblem extends AbstractInfoflowProblem {
InfoflowSolver bSolver;
private final static boolean DEBUG = false;
final ISourceSinkManager sourceSinkManager;
Abstraction zeroValue = null;
/**
* Computes the taints produced by a taint wrapper object
* @param iStmt The call statement the taint wrapper shall check for well-
* known methods that introduce black-box taint propagation
* @param callArgs The actual parameters with which the method in invoked
* @param source The taint source
* @return The taints computed by the wrapper
*/
private Set<Abstraction> computeWrapperTaints
(final Stmt iStmt,
Abstraction source) {
Set<Abstraction> res = new HashSet<Abstraction>();
if(taintWrapper == null)
return Collections.emptySet();
if (!source.getAccessPath().isStaticFieldRef())
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
boolean found = iiExpr.getBase().equals(source.getAccessPath().getPlainValue());
if (!found)
for (Value param : iiExpr.getArgs())
if (source.getAccessPath().getPlainValue().equals(param)) {
found = true;
break;
}
if (!found)
return Collections.emptySet();
}
Set<AccessPath> vals = taintWrapper.getTaintsForMethod(iStmt, source.getAccessPath());
if(vals != null) {
for (AccessPath val : vals) {
Abstraction newAbs = source.deriveNewAbstraction(val);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(iStmt);
res.add(newAbs);
// If the taint wrapper taints the base object (new taint), this must be propagated
// backwards as there might be aliases for the base object
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if(iiExpr.getBase().equals(newAbs.getAccessPath().getPlainValue())
|| newAbs.getAccessPath().isStaticFieldRef()) {
Abstraction bwAbs = source.deriveNewAbstraction(val,iStmt, false);
for (Unit predUnit : interproceduralCFG().getPredsOf(iStmt))
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
return res;
}
/**
* Checks whether a taint wrapper is exclusive for a specific invocation statement
* @param iStmt The call statement the taint wrapper shall check for well-
* known methods that introduce black-box taint propagation
* @param callArgs The actual parameters with which the method in invoked
* @param source The taint source
* @return True if the wrapper is exclusive, otherwise false
*/
private boolean isWrapperExclusive
(final Stmt iStmt,
Abstraction source) {
if(taintWrapper == null)
return false;
return taintWrapper.isExclusive(iStmt, source.getAccessPath());
}
@Override
public FlowFunctions<Unit, Abstraction, SootMethod> createFlowFunctionsFactory() {
return new FlowFunctions<Unit, Abstraction, SootMethod>() {
/**
* Creates a new taint abstraction for the given value
* @param src The source statement from which the taint originated
* @param targetValue The target value that shall now be tainted
* @param source The incoming taint abstraction from the source
* @param taintSet The taint set to which to add all newly produced
* taints
*/
private void addTaintViaStmt
(final Unit src,
final Value targetValue,
Abstraction source,
Set<Abstraction> taintSet,
boolean cutFirstField) {
taintSet.add(source);
Abstraction newAbs = source.deriveNewAbstraction(targetValue, cutFirstField, src);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(src);
taintSet.add(newAbs);
//only heap-objects
if (triggerInaktiveTaintOrReverseFlow(targetValue, source)) {
// call backwards-check:
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
for (Unit predUnit : interproceduralCFG().getPredsOf(src)){
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
@Override
public FlowFunction<Abstraction> getNormalFlowFunction(final Unit src, final Unit dest) {
// If we compute flows on parameters, we create the initial
// flow fact here
if (src instanceof IdentityStmt) {
final IdentityStmt is = (IdentityStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Set<Abstraction> res = new HashSet<Abstraction>();
boolean addOriginal = true;
if (is.getRightOp() instanceof CaughtExceptionRef) {
if (source.getExceptionThrown()) {
res.add(source.deriveNewAbstractionOnCatch(is.getLeftOp(), is));
addOriginal = false;
}
}
if (addOriginal)
res.add(source);
if (sourceSinkManager.isSource(is, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
res.add(new AbstractionWithPath(is.getLeftOp(),
is.getRightOp(),
is, false, true, is).addPathElement(is));
else
res.add(new Abstraction(is.getLeftOp(),
is.getRightOp(), is, false, true, is));
}
return res;
}
};
}
// taint is propagated with assignStmt
else if (src instanceof AssignStmt) {
final AssignStmt assignStmt = (AssignStmt) src;
Value right = assignStmt.getRightOp();
Value left = assignStmt.getLeftOp();
final Value leftValue = BaseSelector.selectBase(left, false);
final Set<Value> rightVals = BaseSelector.selectBaseList(right, true);
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
boolean addLeftValue = false;
boolean cutFirstField = false;
Set<Abstraction> res = new HashSet<Abstraction>();
// shortcuts:
// on NormalFlow taint cannot be created
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
for (Value rightValue : rightVals) {
// check if static variable is tainted (same name, same class)
//y = X.f && X.f tainted --> y, X.f tainted
if (newSource.getAccessPath().isStaticFieldRef()) {
if (rightValue instanceof StaticFieldRef) {
StaticFieldRef rightRef = (StaticFieldRef) rightValue;
if (newSource.getAccessPath().getFirstField().equals(rightRef.getField())) {
addLeftValue = true;
cutFirstField = true;
}
}
} else {
// if both are fields, we have to compare their fieldName via equals and their bases
//y = x.f && x tainted --> y, x tainted
//y = x.f && x.f tainted --> y, x tainted
if (rightValue instanceof InstanceFieldRef) {
InstanceFieldRef rightRef = (InstanceFieldRef) rightValue;
Local rightBase = (Local) rightRef.getBase();
Local sourceBase = newSource.getAccessPath().getPlainLocal();
if (rightBase.equals(sourceBase)) {
if (newSource.getAccessPath().isInstanceFieldRef()) {
if (rightRef.getField().equals(newSource.getAccessPath().getFirstField())) {
addLeftValue = true;
cutFirstField = true;
}
} else {
addLeftValue = true;
}
}
}
// indirect taint propagation:
// if rightvalue is local and source is instancefield of this local:
// y = x && x.f tainted --> y.f, x.f tainted
// y.g = x && x.f tainted --> y.g.f, x.f tainted
if (rightValue instanceof Local && newSource.getAccessPath().isInstanceFieldRef()) {
Local base = newSource.getAccessPath().getPlainLocal();
if (rightValue.equals(base)) {
if (leftValue instanceof Local) {
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction
(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt)).addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt));
} else {
// access path length = 1 - taint entire value if left is field reference
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction(leftValue, assignStmt))
.addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(leftValue, assignStmt));
}
}
}
if (rightValue instanceof ArrayRef) {
//y = x[i] && x tainted -> x, y tainted
Local rightBase = (Local) ((ArrayRef) rightValue).getBase();
if (rightBase.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
// generic case, is true for Locals, ArrayRefs that are equal etc..
//y = x && x tainted --> y, x tainted
if (rightValue.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
}
// if one of them is true -> add leftValue
if (addLeftValue) {
if (sourceSinkManager.isSink(assignStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(leftValue, assignStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
assignStmt);
else
results.addResult(leftValue, assignStmt,
newSource.getSource(), newSource.getSourceContext());
}
if(triggerInaktiveTaintOrReverseFlow(leftValue, newSource) || newSource.isAbstractionActive())
addTaintViaStmt(src, leftValue, newSource, res, cutFirstField);
return res;
}
//if leftvalue contains the tainted value -> it is overwritten - remove taint:
//but not for arrayRefs:
// x[i] = y --> taint is preserved since we do not distinguish between elements of collections
//because we do not use a MUST-Alias analysis, we cannot delete aliases of taints
if(((AssignStmt)src).getLeftOp() instanceof ArrayRef){
return Collections.singleton(newSource);
}
if(newSource.getAccessPath().isInstanceFieldRef()){
//x.f = y && x.f tainted --> no taint propagated
if (leftValue instanceof InstanceFieldRef) {
InstanceFieldRef leftRef = (InstanceFieldRef) leftValue;
if (leftRef.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (leftRef.getField().equals(newSource.getAccessPath().getFirstField())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, true, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//x = y && x.f tainted -> no taint propagated
}else if (leftValue instanceof Local){
if (leftValue.equals(newSource.getAccessPath().getPlainValue())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}else if(newSource.getAccessPath().isStaticFieldRef()){
//X.f = y && X.f tainted -> no taint propagated
if(leftValue instanceof StaticFieldRef && ((StaticFieldRef)leftValue).getField().equals(newSource.getAccessPath().getFirstField())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//when the fields of an object are tainted, but the base object is overwritten then the fields should not be tainted any more
//x = y && x.f tainted -> no taint propagated
if(newSource.getAccessPath().isLocal() && leftValue.equals(newSource.getAccessPath().getPlainValue())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}
}
//nothing applies: z = y && x tainted -> taint is preserved
return Collections.singleton(newSource);
}
};
}
// for unbalanced problems, return statements correspond to
// normal flows, not return flows, because there is no return
// site we could jump to
else if (src instanceof ReturnStmt) {
final ReturnStmt returnStmt = (ReturnStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (returnStmt.getOp().equals(source.getAccessPath().getPlainValue()) && sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(),
source.getSourceContext(),
((AbstractionWithPath) source).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(), source.getSourceContext());
}
return Collections.singleton(source);
}
};
}
else if (src instanceof ThrowStmt) {
final ThrowStmt throwStmt = (ThrowStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (throwStmt.getOp().equals(source.getAccessPath().getPlainLocal()))
return Collections.singleton(source.deriveNewAbstractionOnThrow());
return Collections.singleton(source);
}
};
}
return Identity.v();
}
@Override
public FlowFunction<Abstraction> getCallFlowFunction(final Unit src, final SootMethod dest) {
final Stmt stmt = (Stmt) src;
final InvokeExpr ie = stmt.getInvokeExpr();
final List<Value> callArgs = ie.getArgs();
final List<Value> paramLocals = new ArrayList<Value>();
for (int i = 0; i < dest.getParameterCount(); i++) {
paramLocals.add(dest.getActiveBody().getParameterLocal(i));
}
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.singleton(source);
}
if(isWrapperExclusive(stmt, source)) {
//taint is propagated in CallToReturnFunction, so we do not need any taint here:
return Collections.emptySet();
}
//if we do not have to look into sinks:
if (!inspectSinks && sourceSinkManager.isSink(stmt, interproceduralCFG())) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
// check if whole object is tainted (happens with strings, for example:)
if (!dest.isStatic() && ie instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) ie;
// this might be enough because every call must happen with a local variable which is tainted itself:
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(dest.getActiveBody().getThisLocal()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
//add new callArgs:
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
//special treatment for clinit methods - no param mapping possible
if(!dest.getName().equals("<clinit>")) {
assert dest.getParameterCount() == callArgs.size();
// check if param is tainted:
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(callArgs.get(i), newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(paramLocals.get(i)), stmt);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
}
// staticfieldRefs must be analyzed even if they are not part of the params:
if (newSource.getAccessPath().isStaticFieldRef()) {
Abstraction abs;
abs = newSource.clone();
assert (abs.equals(newSource) && abs.hashCode() == newSource.hashCode());
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getReturnFlowFunction(final Unit callSite, final SootMethod callee, final Unit exitStmt, final Unit retSite) {
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
//activate taint if necessary, but in any case we have to take the previous call edge abstraction
Abstraction newSource;
if(!source.isAbstractionActive()){
if(callSite.equals(source.getActivationUnit()) || callSite.equals(source.getActivationUnitOnCurrentLevel()) ){
newSource = source.getActiveCopy(true);
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
//if abstraction is not active and activeStmt was in this method, it will not get activated = it can be removed:
if(!newSource.isAbstractionActive() && newSource.getActivationUnit() != null && interproceduralCFG().getMethodOf(newSource.getActivationUnit()).equals(callee)){
return Collections.emptySet();
}
Set<Abstraction> res = new HashSet<Abstraction>();
// if we have a returnStmt we have to look at the returned value:
if (exitStmt instanceof ReturnStmt) {
ReturnStmt returnStmt = (ReturnStmt) exitStmt;
Value retLocal = returnStmt.getOp();
if (callSite instanceof DefinitionStmt) {
DefinitionStmt defnStmt = (DefinitionStmt) callSite;
Value leftOp = defnStmt.getLeftOp();
if (retLocal.equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(leftOp, newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftOp), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(exitStmt);
assert abs != newSource; // our source abstraction must be immutable
res.add(abs);
//call backwards-solver:
if(triggerInaktiveTaintOrReverseFlow(leftOp, abs)){
Abstraction bwAbs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftOp), callSite, false);
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
// Check whether this return is treated as a sink
assert returnStmt.getOp() == null
|| returnStmt.getOp() instanceof Local
|| returnStmt.getOp() instanceof Constant;
if (returnStmt.getOp() != null
&& newSource.getAccessPath().isLocal()
&& newSource.getAccessPath().getPlainValue().equals(returnStmt.getOp())
&& sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
// easy: static
if (newSource.getAccessPath().isStaticFieldRef()) {
Abstraction abs = newSource.clone();
assert (abs.equals(newSource) && abs.hashCode() == newSource.hashCode());
res.add(abs);
// call backwards-check:
Abstraction bwAbs = newSource.deriveInactiveAbstraction();
if (newSource.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
// checks: this/params/fields
// check one of the call params are tainted (not if simple type)
Value sourceBase = newSource.getAccessPath().getPlainLocal();
Value originalCallArg = null;
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
if (callSite instanceof Stmt) {
Stmt iStmt = (Stmt) callSite;
originalCallArg = iStmt.getInvokeExpr().getArg(i);
//either the param is a fieldref (not possible in jimple?) or an array Or one of its fields is tainted/all fields are tainted
if (triggerInaktiveTaintOrReverseFlow(originalCallArg, newSource)) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(originalCallArg), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
abs = ((AbstractionWithPath) abs).addPathElement(exitStmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(originalCallArg, abs)){
// call backwards-check:
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
Local thisL = null;
if (!callee.isStatic()) {
thisL = callee.getActiveBody().getThisLocal();
}
if (thisL != null) {
if (thisL.equals(sourceBase)) {
boolean param = false;
// check if it is not one of the params (then we have already fixed it)
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
param = true;
break;
}
}
if (!param) {
if (callSite instanceof Stmt) {
Stmt stmt = (Stmt) callSite;
if (stmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iIExpr = (InstanceInvokeExpr) stmt.getInvokeExpr();
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(iIExpr.getBase()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(iIExpr.getBase(), abs)){
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getCallToReturnFlowFunction(final Unit call, final Unit returnSite) {
// special treatment for native methods:
if (call instanceof Stmt) {
final Stmt iStmt = (Stmt) call;
final List<Value> callArgs = iStmt.getInvokeExpr().getArgs();
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Abstraction newSource;
//check inactive elements:
if (!source.isAbstractionActive() && (call.equals(source.getActivationUnit()))|| call.equals(source.getActivationUnitOnCurrentLevel())){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
res.addAll(computeWrapperTaints(iStmt, newSource));
// We can only pass on a taint if it is neither a parameter nor the
// base object of the current call
boolean passOn = true;
//we only can remove the taint if we step into the call/return edges
//otherwise we will loose taint - see ArrayTests/arrayCopyTest
if(!interproceduralCFG().getCalleesOfCallAt(call).isEmpty() || (taintWrapper != null
&& taintWrapper.isExclusive(iStmt, newSource.getAccessPath()))) {
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr)
if (((InstanceInvokeExpr) iStmt.getInvokeExpr()).getBase().equals
(newSource.getAccessPath().getPlainLocal())) {
passOn = false;
}
if (passOn)
for (int i = 0; i < callArgs.size(); i++)
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) && isTransferableValue(callArgs.get(i))) {
passOn = false;
break;
}
//static variables are always propagated if they are not overwritten. So if we have at least one call/return edge pair,
//we can be sure that the value does not get "lost" if we do not pass it on:
if(newSource.getAccessPath().isStaticFieldRef()){
passOn = false;
}
}
if (passOn)
res.add(newSource);
if (iStmt.getInvokeExpr().getMethod().isNative()) {
if (callArgs.contains(newSource.getAccessPath().getPlainValue())) {
// java uses call by value, but fields of complex objects can be changed (and tainted), so use this conservative approach:
res.addAll(ncHandler.getTaintedValues(iStmt, newSource, callArgs));
}
}
if (iStmt instanceof AssignStmt) {
final AssignStmt stmt = (AssignStmt) iStmt;
if (sourceSinkManager.isSource(stmt, interproceduralCFG())) {
if (DEBUG)
System.out.println("Found source: " + stmt.getInvokeExpr().getMethod());
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(new AbstractionWithPath(stmt.getLeftOp(),
stmt.getInvokeExpr(),
stmt, false, true, iStmt).addPathElement(call));
else
res.add(new Abstraction(stmt.getLeftOp(),
stmt.getInvokeExpr(), stmt, false, true, iStmt));
res.remove(zeroValue);
}
}
// if we have called a sink we have to store the path from the source - in case one of the params is tainted!
if (sourceSinkManager.isSink(iStmt, interproceduralCFG())) {
boolean taintedParam = false;
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal())) {
taintedParam = true;
break;
}
}
if (taintedParam) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
// if the base object which executes the method is tainted the sink is reached, too.
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
}
return res;
}
};
}
return Identity.v();
}
};
}
public InfoflowProblem(List<String> sourceList, List<String> sinkList) {
super(new JimpleBasedBiDiICFG());
this.sourceSinkManager = new DefaultSourceSinkManager(sourceList, sinkList);
}
public InfoflowProblem(ISourceSinkManager sourceSinkManager) {
super(new JimpleBasedBiDiICFG());
this.sourceSinkManager = sourceSinkManager;
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, List<String> sourceList, List<String> sinkList) {
super(icfg);
this.sourceSinkManager = new DefaultSourceSinkManager(sourceList, sinkList);
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, ISourceSinkManager sourceSinkManager) {
super(icfg);
this.sourceSinkManager = sourceSinkManager;
}
public InfoflowProblem(ISourceSinkManager mySourceSinkManager, Set<Unit> analysisSeeds) {
super(new JimpleBasedBiDiICFG());
this.sourceSinkManager = mySourceSinkManager;
this.initialSeeds.addAll(analysisSeeds);
}
@Override
public Abstraction createZeroValue() {
if (zeroValue == null) {
zeroValue = this.pathTracking == PathTrackingMethod.NoTracking ?
new Abstraction(new JimpleLocal("zero", NullType.v()), null, null, false, true, null) :
new AbstractionWithPath(new JimpleLocal("zero", NullType.v()), null, null, false, true, null);
}
return zeroValue;
}
public void setBackwardSolver(InfoflowSolver backwardSolver){
bSolver = backwardSolver;
}
@Override
public boolean autoAddZero() {
return false;
}
}
|
package soot.jimple.infoflow;
import heros.FlowFunction;
import heros.FlowFunctions;
import heros.InterproceduralCFG;
import heros.flowfunc.Identity;
import heros.flowfunc.KillAll;
import heros.solver.PathEdge;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.Local;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.ArrayRef;
import soot.jimple.AssignStmt;
import soot.jimple.CaughtExceptionRef;
import soot.jimple.Constant;
import soot.jimple.DefinitionStmt;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.ReturnStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.Stmt;
import soot.jimple.ThrowStmt;
import soot.jimple.infoflow.data.Abstraction;
import soot.jimple.infoflow.data.AbstractionWithPath;
import soot.jimple.infoflow.data.AccessPath;
import soot.jimple.infoflow.heros.InfoflowSolver;
import soot.jimple.infoflow.source.DefaultSourceSinkManager;
import soot.jimple.infoflow.source.ISourceSinkManager;
import soot.jimple.infoflow.util.BaseSelector;
import soot.jimple.toolkits.ide.icfg.JimpleBasedBiDiICFG;
public class InfoflowProblem extends AbstractInfoflowProblem {
private InfoflowSolver bSolver;
private final ISourceSinkManager sourceSinkManager;
private Abstraction zeroValue = null;
private final Logger logger = LoggerFactory.getLogger(getClass());
/**
* Computes the taints produced by a taint wrapper object
* @param iStmt The call statement the taint wrapper shall check for well-
* known methods that introduce black-box taint propagation
* @param source The taint source
* @return The taints computed by the wrapper
*/
private Set<Abstraction> computeWrapperTaints
(final Stmt iStmt,
Abstraction source) {
Set<Abstraction> res = new HashSet<Abstraction>();
if(taintWrapper == null)
return Collections.emptySet();
if (!source.getAccessPath().isStaticFieldRef())
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
boolean found = iiExpr.getBase().equals(source.getAccessPath().getPlainValue());
if (!found)
for (Value param : iiExpr.getArgs())
if (source.getAccessPath().getPlainValue().equals(param)) {
found = true;
break;
}
if (!found)
return Collections.emptySet();
}
Set<AccessPath> vals = taintWrapper.getTaintsForMethod(iStmt, source.getAccessPath());
if(vals != null) {
for (AccessPath val : vals) {
Abstraction newAbs = source.deriveNewAbstraction(val);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(iStmt);
res.add(newAbs);
// If the taint wrapper taints the base object (new taint), this must be propagated
// backwards as there might be aliases for the base object
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if(iiExpr.getBase().equals(newAbs.getAccessPath().getPlainValue())
|| newAbs.getAccessPath().isStaticFieldRef()) {
Abstraction bwAbs = source.deriveNewAbstraction(val, false);
for (Unit predUnit : interproceduralCFG().getPredsOf(iStmt))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
return res;
}
@Override
public FlowFunctions<Unit, Abstraction, SootMethod> createFlowFunctionsFactory() {
return new FlowFunctions<Unit, Abstraction, SootMethod>() {
/**
* Creates a new taint abstraction for the given value
* @param src The source statement from which the taint originated
* @param targetValue The target value that shall now be tainted
* @param source The incoming taint abstraction from the source
* @param taintSet The taint set to which to add all newly produced
* taints
*/
private void addTaintViaStmt
(final Unit src,
final Value targetValue,
Abstraction source,
Set<Abstraction> taintSet,
boolean cutFirstField) {
taintSet.add(source);
Abstraction newAbs = source.deriveNewAbstraction(targetValue, cutFirstField, src);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(src);
taintSet.add(newAbs);
//only heap-objects
if (triggerInaktiveTaintOrReverseFlow(targetValue, source)) {
// call backwards-check:
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
for (Unit predUnit : interproceduralCFG().getPredsOf(src)){
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
@Override
public FlowFunction<Abstraction> getNormalFlowFunction(final Unit src, final Unit dest) {
// If we compute flows on parameters, we create the initial
// flow fact here
if (src instanceof IdentityStmt) {
final IdentityStmt is = (IdentityStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Set<Abstraction> res = new HashSet<Abstraction>();
boolean addOriginal = true;
if (is.getRightOp() instanceof CaughtExceptionRef) {
if (source.getExceptionThrown()) {
res.add(source.deriveNewAbstractionOnCatch(is.getLeftOp(), is));
addOriginal = false;
}
}
if (addOriginal)
res.add(source);
if (sourceSinkManager.isSource(is, interproceduralCFG())) {
Abstraction abs;
if (pathTracking != PathTrackingMethod.NoTracking)
abs = new AbstractionWithPath(is.getLeftOp(),
is.getRightOp(),
is, false, true, is).addPathElement(is);
else
abs = new Abstraction(is.getLeftOp(),
is.getRightOp(), is, false, true, is);
abs.setZeroAbstraction(source.getZeroAbstraction());
res.add(abs);
}
return res;
}
};
}
// taint is propagated with assignStmt
else if (src instanceof AssignStmt) {
final AssignStmt assignStmt = (AssignStmt) src;
Value right = assignStmt.getRightOp();
Value left = assignStmt.getLeftOp();
final Value leftValue = BaseSelector.selectBase(left, false);
final Set<Value> rightVals = BaseSelector.selectBaseList(right, true);
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
boolean addLeftValue = false;
boolean cutFirstField = false;
Set<Abstraction> res = new HashSet<Abstraction>();
// shortcuts:
// on NormalFlow taint cannot be created
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
for (Value rightValue : rightVals) {
// check if static variable is tainted (same name, same class)
//y = X.f && X.f tainted --> y, X.f tainted
if (newSource.getAccessPath().isStaticFieldRef()) {
if (rightValue instanceof StaticFieldRef) {
StaticFieldRef rightRef = (StaticFieldRef) rightValue;
if (newSource.getAccessPath().getFirstField().equals(rightRef.getField())) {
addLeftValue = true;
cutFirstField = true;
}
}
} else {
// if both are fields, we have to compare their fieldName via equals and their bases
//y = x.f && x tainted --> y, x tainted
//y = x.f && x.f tainted --> y, x tainted
if (rightValue instanceof InstanceFieldRef) {
InstanceFieldRef rightRef = (InstanceFieldRef) rightValue;
Local rightBase = (Local) rightRef.getBase();
Local sourceBase = newSource.getAccessPath().getPlainLocal();
if (rightBase.equals(sourceBase)) {
if (newSource.getAccessPath().isInstanceFieldRef()) {
if (rightRef.getField().equals(newSource.getAccessPath().getFirstField())) {
addLeftValue = true;
cutFirstField = true;
}
} else {
addLeftValue = true;
}
}
}
// indirect taint propagation:
// if rightvalue is local and source is instancefield of this local:
// y = x && x.f tainted --> y.f, x.f tainted
// y.g = x && x.f tainted --> y.g.f, x.f tainted
if (rightValue instanceof Local && newSource.getAccessPath().isInstanceFieldRef()) {
Local base = newSource.getAccessPath().getPlainLocal();
if (rightValue.equals(base)) {
if (leftValue instanceof Local) {
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction
(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt)).addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt));
} else {
// access path length = 1 - taint entire value if left is field reference
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction(leftValue, assignStmt))
.addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(leftValue, assignStmt));
}
}
}
if (rightValue instanceof ArrayRef) {
//y = x[i] && x tainted -> x, y tainted
Local rightBase = (Local) ((ArrayRef) rightValue).getBase();
if (rightBase.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
// generic case, is true for Locals, ArrayRefs that are equal etc..
//y = x && x tainted --> y, x tainted
if (rightValue.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
}
// if one of them is true -> add leftValue
if (addLeftValue) {
if (sourceSinkManager.isSink(assignStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(leftValue, assignStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
assignStmt);
else
results.addResult(leftValue, assignStmt,
newSource.getSource(), newSource.getSourceContext());
}
if(triggerInaktiveTaintOrReverseFlow(leftValue, newSource) || newSource.isAbstractionActive())
addTaintViaStmt(src, leftValue, newSource, res, cutFirstField);
return res;
}
//if leftvalue contains the tainted value -> it is overwritten - remove taint:
//but not for arrayRefs:
// x[i] = y --> taint is preserved since we do not distinguish between elements of collections
//because we do not use a MUST-Alias analysis, we cannot delete aliases of taints
if(((AssignStmt)src).getLeftOp() instanceof ArrayRef){
return Collections.singleton(newSource);
}
if(newSource.getAccessPath().isInstanceFieldRef()){
//x.f = y && x.f tainted --> no taint propagated
if (leftValue instanceof InstanceFieldRef) {
InstanceFieldRef leftRef = (InstanceFieldRef) leftValue;
if (leftRef.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (leftRef.getField().equals(newSource.getAccessPath().getFirstField())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, true, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//x = y && x.f tainted -> no taint propagated
}else if (leftValue instanceof Local){
if (leftValue.equals(newSource.getAccessPath().getPlainValue())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}else if(newSource.getAccessPath().isStaticFieldRef()){
//X.f = y && X.f tainted -> no taint propagated
if(leftValue instanceof StaticFieldRef && ((StaticFieldRef)leftValue).getField().equals(newSource.getAccessPath().getFirstField())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//when the fields of an object are tainted, but the base object is overwritten then the fields should not be tainted any more
//x = y && x.f tainted -> no taint propagated
if(newSource.getAccessPath().isLocal() && leftValue.equals(newSource.getAccessPath().getPlainValue())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}
}
//nothing applies: z = y && x tainted -> taint is preserved
return Collections.singleton(newSource);
}
};
}
// for unbalanced problems, return statements correspond to
// normal flows, not return flows, because there is no return
// site we could jump to
else if (src instanceof ReturnStmt) {
final ReturnStmt returnStmt = (ReturnStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (returnStmt.getOp().equals(source.getAccessPath().getPlainValue()) && sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(),
source.getSourceContext(),
((AbstractionWithPath) source).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(), source.getSourceContext());
}
return Collections.singleton(source);
}
};
}
else if (src instanceof ThrowStmt) {
final ThrowStmt throwStmt = (ThrowStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (throwStmt.getOp().equals(source.getAccessPath().getPlainLocal()))
return Collections.singleton(source.deriveNewAbstractionOnThrow());
return Collections.singleton(source);
}
};
}
return Identity.v();
}
@Override
public FlowFunction<Abstraction> getCallFlowFunction(final Unit src, final SootMethod dest) {
if (!dest.isConcrete()){
logger.debug("Call skipped because target has no body: {} -> {}", src, dest);
return KillAll.v();
}
final Stmt stmt = (Stmt) src;
final InvokeExpr ie = stmt.getInvokeExpr();
final List<Value> callArgs = ie.getArgs();
final List<Value> paramLocals = new ArrayList<Value>();
for (int i = 0; i < dest.getParameterCount(); i++) {
paramLocals.add(dest.getActiveBody().getParameterLocal(i));
}
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.singleton(source);
}
if(taintWrapper != null && taintWrapper.isExclusive(stmt, source.getAccessPath())) {
//taint is propagated in CallToReturnFunction, so we do not need any taint here:
return Collections.emptySet();
}
//if we do not have to look into sinks:
if (!inspectSinks && sourceSinkManager.isSink(stmt, interproceduralCFG())) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
// check if whole object is tainted (happens with strings, for example:)
if (!dest.isStatic() && ie instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) ie;
// this might be enough because every call must happen with a local variable which is tainted itself:
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(dest.getActiveBody().getThisLocal()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
//add new callArgs:
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
//special treatment for clinit methods - no param mapping possible
if(!dest.getName().equals("<clinit>")) {
assert dest.getParameterCount() == callArgs.size();
// check if param is tainted:
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(callArgs.get(i), newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(paramLocals.get(i)), stmt);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
}
// staticfieldRefs must be analyzed even if they are not part of the params:
if (newSource.getAccessPath().isStaticFieldRef()) {
Abstraction abs;
abs = newSource.clone();
assert (abs.equals(newSource) && abs.hashCode() == newSource.hashCode());
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getReturnFlowFunction(final Unit callSite, final SootMethod callee, final Unit exitStmt, final Unit retSite) {
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
//activate taint if necessary, but in any case we have to take the previous call edge abstraction
Abstraction newSource;
if(!source.isAbstractionActive()){
if(callSite != null
&& (callSite.equals(source.getActivationUnit()) || callSite.equals(source.getActivationUnitOnCurrentLevel())) ){
newSource = source.getActiveCopy(true);
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
//if abstraction is not active and activeStmt was in this method, it will not get activated = it can be removed:
if(!newSource.isAbstractionActive() && newSource.getActivationUnit() != null
&& interproceduralCFG().getMethodOf(newSource.getActivationUnit()).equals(callee))
return Collections.emptySet();
Set<Abstraction> res = new HashSet<Abstraction>();
// Check whether this return is treated as a sink
if (exitStmt instanceof ReturnStmt) {
ReturnStmt returnStmt = (ReturnStmt) exitStmt;
assert returnStmt.getOp() == null
|| returnStmt.getOp() instanceof Local
|| returnStmt.getOp() instanceof Constant;
if (returnStmt.getOp() != null
&& newSource.getAccessPath().isLocal()
&& newSource.getAccessPath().getPlainValue().equals(returnStmt.getOp())
&& sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
// If we have no caller, we have nowhere to propagate. This
// can happen when leaving the main method.
if (callSite == null)
return Collections.emptySet();
// if we have a returnStmt we have to look at the returned value:
if (exitStmt instanceof ReturnStmt) {
ReturnStmt returnStmt = (ReturnStmt) exitStmt;
Value retLocal = returnStmt.getOp();
if (callSite instanceof DefinitionStmt) {
DefinitionStmt defnStmt = (DefinitionStmt) callSite;
Value leftOp = defnStmt.getLeftOp();
if (retLocal.equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(leftOp, newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftOp), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(exitStmt);
assert abs != newSource; // our source abstraction must be immutable
res.add(abs);
//call backwards-solver:
if(triggerInaktiveTaintOrReverseFlow(leftOp, abs)){
Abstraction bwAbs = newSource.deriveNewAbstraction
(newSource.getAccessPath().copyWithNewValue(leftOp), false);
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
// easy: static
if (newSource.getAccessPath().isStaticFieldRef()) {
// Simply pass on the taint
res.add(newSource);
// call backwards-check:
Abstraction bwAbs = newSource.deriveInactiveAbstraction();
if (newSource.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
// checks: this/params/fields
// check one of the call params are tainted (not if simple type)
Value sourceBase = newSource.getAccessPath().getPlainLocal();
Value originalCallArg = null;
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
if (callSite instanceof Stmt) {
Stmt iStmt = (Stmt) callSite;
originalCallArg = iStmt.getInvokeExpr().getArg(i);
//either the param is a fieldref (not possible in jimple?) or an array Or one of its fields is tainted/all fields are tainted
if (triggerInaktiveTaintOrReverseFlow(originalCallArg, newSource)) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(originalCallArg), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
abs = ((AbstractionWithPath) abs).addPathElement(exitStmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(originalCallArg, abs)){
// call backwards-check:
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
Local thisL = null;
if (!callee.isStatic()) {
thisL = callee.getActiveBody().getThisLocal();
}
if (thisL != null) {
if (thisL.equals(sourceBase)) {
boolean param = false;
// check if it is not one of the params (then we have already fixed it)
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
param = true;
break;
}
}
if (!param) {
if (callSite instanceof Stmt) {
Stmt stmt = (Stmt) callSite;
if (stmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iIExpr = (InstanceInvokeExpr) stmt.getInvokeExpr();
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(iIExpr.getBase()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(iIExpr.getBase(), abs)){
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getCallToReturnFlowFunction(final Unit call, final Unit returnSite) {
// special treatment for native methods:
if (call instanceof Stmt) {
final Stmt iStmt = (Stmt) call;
final List<Value> callArgs = iStmt.getInvokeExpr().getArgs();
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Abstraction newSource;
//check inactive elements:
if (!source.isAbstractionActive() && (call.equals(source.getActivationUnit()))|| call.equals(source.getActivationUnitOnCurrentLevel())){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
res.addAll(computeWrapperTaints(iStmt, newSource));
// We can only pass on a taint if it is neither a parameter nor the
// base object of the current call. If this call overwrites the left
// side, the taint is never passed on.
boolean passOn = !(call instanceof AssignStmt && ((AssignStmt) call).getLeftOp().equals
(newSource.getAccessPath().getPlainLocal()));
//we only can remove the taint if we step into the call/return edges
//otherwise we will loose taint - see ArrayTests/arrayCopyTest
if (passOn)
if(hasValidCallees(call) || (taintWrapper != null
&& taintWrapper.isExclusive(iStmt, newSource.getAccessPath()))) {
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr)
if (((InstanceInvokeExpr) iStmt.getInvokeExpr()).getBase().equals
(newSource.getAccessPath().getPlainLocal())) {
passOn = false;
}
if (passOn)
for (int i = 0; i < callArgs.size(); i++)
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) && isTransferableValue(callArgs.get(i))) {
passOn = false;
break;
}
//static variables are always propagated if they are not overwritten. So if we have at least one call/return edge pair,
//we can be sure that the value does not get "lost" if we do not pass it on:
if(newSource.getAccessPath().isStaticFieldRef()){
passOn = false;
}
}
if (passOn)
res.add(newSource);
if (iStmt.getInvokeExpr().getMethod().isNative()) {
if (callArgs.contains(newSource.getAccessPath().getPlainValue())) {
// java uses call by value, but fields of complex objects can be changed (and tainted), so use this conservative approach:
res.addAll(ncHandler.getTaintedValues(iStmt, newSource, callArgs));
}
}
if (iStmt instanceof AssignStmt) {
final AssignStmt stmt = (AssignStmt) iStmt;
if (sourceSinkManager.isSource(stmt, interproceduralCFG())) {
logger.debug("Found source: " + stmt.getInvokeExpr().getMethod());
Abstraction abs;
if (pathTracking == PathTrackingMethod.ForwardTracking)
abs = new AbstractionWithPath(stmt.getLeftOp(),
stmt.getInvokeExpr(),
stmt, false, true, iStmt).addPathElement(call);
else
abs = new Abstraction(stmt.getLeftOp(),
stmt.getInvokeExpr(), stmt, false, true, iStmt);
abs.setZeroAbstraction(source.getZeroAbstraction());
res.add(abs);
res.remove(zeroValue);
}
}
// if we have called a sink we have to store the path from the source - in case one of the params is tainted!
if (sourceSinkManager.isSink(iStmt, interproceduralCFG())) {
boolean taintedParam = false;
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal())) {
taintedParam = true;
break;
}
}
if (taintedParam) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
// if the base object which executes the method is tainted the sink is reached, too.
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
}
return res;
}
/**
* Checks whether the given call has at least one valid target,
* i.e. a callee with a body.
* @param call The call site to check
* @return True if there is at least one callee implementation
* for the given call, otherwise false
*/
private boolean hasValidCallees(Unit call) {
Set<SootMethod> callees = interproceduralCFG().getCalleesOfCallAt(call);
for (SootMethod callee : callees)
if (callee.isConcrete())
return true;
return false;
}
};
}
return Identity.v();
}
};
}
public InfoflowProblem(List<String> sourceList, List<String> sinkList) {
this(new JimpleBasedBiDiICFG(), new DefaultSourceSinkManager(sourceList, sinkList));
}
public InfoflowProblem(ISourceSinkManager sourceSinkManager) {
this(new JimpleBasedBiDiICFG(), sourceSinkManager);
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, List<String> sourceList, List<String> sinkList) {
this(icfg, new DefaultSourceSinkManager(sourceList, sinkList));
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, ISourceSinkManager sourceSinkManager) {
super(icfg);
this.sourceSinkManager = sourceSinkManager;
}
public InfoflowProblem(ISourceSinkManager mySourceSinkManager, Set<Unit> analysisSeeds) {
this(new JimpleBasedBiDiICFG(), mySourceSinkManager);
for (Unit u : analysisSeeds)
this.initialSeeds.put(u, Collections.singleton(zeroValue));
}
public void setBackwardSolver(InfoflowSolver backwardSolver){
bSolver = backwardSolver;
}
@Override
public boolean autoAddZero() {
return false;
}
}
|
package soot.jimple.infoflow;
import heros.FlowFunction;
import heros.FlowFunctions;
import heros.InterproceduralCFG;
import heros.flowfunc.Identity;
import heros.solver.PathEdge;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import soot.Local;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.ArrayRef;
import soot.jimple.AssignStmt;
import soot.jimple.CaughtExceptionRef;
import soot.jimple.Constant;
import soot.jimple.DefinitionStmt;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.ReturnStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.Stmt;
import soot.jimple.ThrowStmt;
import soot.jimple.infoflow.data.Abstraction;
import soot.jimple.infoflow.data.AbstractionWithPath;
import soot.jimple.infoflow.data.AccessPath;
import soot.jimple.infoflow.heros.InfoflowSolver;
import soot.jimple.infoflow.source.DefaultSourceSinkManager;
import soot.jimple.infoflow.source.ISourceSinkManager;
import soot.jimple.infoflow.util.BaseSelector;
import soot.jimple.toolkits.ide.icfg.JimpleBasedBiDiICFG;
public class InfoflowProblem extends AbstractInfoflowProblem {
private InfoflowSolver bSolver;
private final static boolean DEBUG = false;
private final ISourceSinkManager sourceSinkManager;
private Abstraction zeroValue = null;
/**
* Computes the taints produced by a taint wrapper object
* @param iStmt The call statement the taint wrapper shall check for well-
* known methods that introduce black-box taint propagation
* @param callArgs The actual parameters with which the method in invoked
* @param source The taint source
* @return The taints computed by the wrapper
*/
private Set<Abstraction> computeWrapperTaints
(final Stmt iStmt,
Abstraction source) {
Set<Abstraction> res = new HashSet<Abstraction>();
if(taintWrapper == null)
return Collections.emptySet();
if (!source.getAccessPath().isStaticFieldRef())
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
boolean found = iiExpr.getBase().equals(source.getAccessPath().getPlainValue());
if (!found)
for (Value param : iiExpr.getArgs())
if (source.getAccessPath().getPlainValue().equals(param)) {
found = true;
break;
}
if (!found)
return Collections.emptySet();
}
Set<AccessPath> vals = taintWrapper.getTaintsForMethod(iStmt, source.getAccessPath());
if(vals != null) {
for (AccessPath val : vals) {
Abstraction newAbs = source.deriveNewAbstraction(val);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(iStmt);
res.add(newAbs);
// If the taint wrapper taints the base object (new taint), this must be propagated
// backwards as there might be aliases for the base object
if(iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iiExpr = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if(iiExpr.getBase().equals(newAbs.getAccessPath().getPlainValue())
|| newAbs.getAccessPath().isStaticFieldRef()) {
Abstraction bwAbs = source.deriveNewAbstraction(val,iStmt, false);
for (Unit predUnit : interproceduralCFG().getPredsOf(iStmt))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
return res;
}
@Override
public FlowFunctions<Unit, Abstraction, SootMethod> createFlowFunctionsFactory() {
return new FlowFunctions<Unit, Abstraction, SootMethod>() {
/**
* Creates a new taint abstraction for the given value
* @param src The source statement from which the taint originated
* @param targetValue The target value that shall now be tainted
* @param source The incoming taint abstraction from the source
* @param taintSet The taint set to which to add all newly produced
* taints
*/
private void addTaintViaStmt
(final Unit src,
final Value targetValue,
Abstraction source,
Set<Abstraction> taintSet,
boolean cutFirstField) {
taintSet.add(source);
Abstraction newAbs = source.deriveNewAbstraction(targetValue, cutFirstField, src);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) newAbs).addPathElement(src);
taintSet.add(newAbs);
//only heap-objects
if (triggerInaktiveTaintOrReverseFlow(targetValue, source)) {
// call backwards-check:
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
for (Unit predUnit : interproceduralCFG().getPredsOf(src)){
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
@Override
public FlowFunction<Abstraction> getNormalFlowFunction(final Unit src, final Unit dest) {
// If we compute flows on parameters, we create the initial
// flow fact here
if (src instanceof IdentityStmt) {
final IdentityStmt is = (IdentityStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Set<Abstraction> res = new HashSet<Abstraction>();
boolean addOriginal = true;
if (is.getRightOp() instanceof CaughtExceptionRef) {
if (source.getExceptionThrown()) {
res.add(source.deriveNewAbstractionOnCatch(is.getLeftOp(), is));
addOriginal = false;
}
}
if (addOriginal)
res.add(source);
if (sourceSinkManager.isSource(is, interproceduralCFG())) {
Abstraction abs;
if (pathTracking != PathTrackingMethod.NoTracking)
abs = new AbstractionWithPath(is.getLeftOp(),
is.getRightOp(),
is, false, true, is).addPathElement(is);
else
abs = new Abstraction(is.getLeftOp(),
is.getRightOp(), is, false, true, is);
abs.setZeroAbstraction(source.getZeroAbstraction());
res.add(abs);
}
return res;
}
};
}
// taint is propagated with assignStmt
else if (src instanceof AssignStmt) {
final AssignStmt assignStmt = (AssignStmt) src;
Value right = assignStmt.getRightOp();
Value left = assignStmt.getLeftOp();
final Value leftValue = BaseSelector.selectBase(left, false);
final Set<Value> rightVals = BaseSelector.selectBaseList(right, true);
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
boolean addLeftValue = false;
boolean cutFirstField = false;
Set<Abstraction> res = new HashSet<Abstraction>();
// shortcuts:
// on NormalFlow taint cannot be created
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
for (Value rightValue : rightVals) {
// check if static variable is tainted (same name, same class)
//y = X.f && X.f tainted --> y, X.f tainted
if (newSource.getAccessPath().isStaticFieldRef()) {
if (rightValue instanceof StaticFieldRef) {
StaticFieldRef rightRef = (StaticFieldRef) rightValue;
if (newSource.getAccessPath().getFirstField().equals(rightRef.getField())) {
addLeftValue = true;
cutFirstField = true;
}
}
} else {
// if both are fields, we have to compare their fieldName via equals and their bases
//y = x.f && x tainted --> y, x tainted
//y = x.f && x.f tainted --> y, x tainted
if (rightValue instanceof InstanceFieldRef) {
InstanceFieldRef rightRef = (InstanceFieldRef) rightValue;
Local rightBase = (Local) rightRef.getBase();
Local sourceBase = newSource.getAccessPath().getPlainLocal();
if (rightBase.equals(sourceBase)) {
if (newSource.getAccessPath().isInstanceFieldRef()) {
if (rightRef.getField().equals(newSource.getAccessPath().getFirstField())) {
addLeftValue = true;
cutFirstField = true;
}
} else {
addLeftValue = true;
}
}
}
// indirect taint propagation:
// if rightvalue is local and source is instancefield of this local:
// y = x && x.f tainted --> y.f, x.f tainted
// y.g = x && x.f tainted --> y.g.f, x.f tainted
if (rightValue instanceof Local && newSource.getAccessPath().isInstanceFieldRef()) {
Local base = newSource.getAccessPath().getPlainLocal();
if (rightValue.equals(base)) {
if (leftValue instanceof Local) {
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction
(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt)).addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftValue), assignStmt));
} else {
// access path length = 1 - taint entire value if left is field reference
if (pathTracking == PathTrackingMethod.ForwardTracking)
res.add(((AbstractionWithPath) newSource.deriveNewAbstraction(leftValue, assignStmt))
.addPathElement(src));
else
res.add(newSource.deriveNewAbstraction(leftValue, assignStmt));
}
}
}
if (rightValue instanceof ArrayRef) {
//y = x[i] && x tainted -> x, y tainted
Local rightBase = (Local) ((ArrayRef) rightValue).getBase();
if (rightBase.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
// generic case, is true for Locals, ArrayRefs that are equal etc..
//y = x && x tainted --> y, x tainted
if (rightValue.equals(newSource.getAccessPath().getPlainValue())) {
addLeftValue = true;
}
}
}
// if one of them is true -> add leftValue
if (addLeftValue) {
if (sourceSinkManager.isSink(assignStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(leftValue, assignStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
assignStmt);
else
results.addResult(leftValue, assignStmt,
newSource.getSource(), newSource.getSourceContext());
}
if(triggerInaktiveTaintOrReverseFlow(leftValue, newSource) || newSource.isAbstractionActive())
addTaintViaStmt(src, leftValue, newSource, res, cutFirstField);
return res;
}
//if leftvalue contains the tainted value -> it is overwritten - remove taint:
//but not for arrayRefs:
// x[i] = y --> taint is preserved since we do not distinguish between elements of collections
//because we do not use a MUST-Alias analysis, we cannot delete aliases of taints
if(((AssignStmt)src).getLeftOp() instanceof ArrayRef){
return Collections.singleton(newSource);
}
if(newSource.getAccessPath().isInstanceFieldRef()){
//x.f = y && x.f tainted --> no taint propagated
if (leftValue instanceof InstanceFieldRef) {
InstanceFieldRef leftRef = (InstanceFieldRef) leftValue;
if (leftRef.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (leftRef.getField().equals(newSource.getAccessPath().getFirstField())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, true, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//x = y && x.f tainted -> no taint propagated
}else if (leftValue instanceof Local){
if (leftValue.equals(newSource.getAccessPath().getPlainValue())) {
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}else if(newSource.getAccessPath().isStaticFieldRef()){
//X.f = y && X.f tainted -> no taint propagated
if(leftValue instanceof StaticFieldRef && ((StaticFieldRef)leftValue).getField().equals(newSource.getAccessPath().getFirstField())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}else{
//start backward:
for (Value rightValue : rightVals) {
Abstraction newAbs = newSource.deriveNewAbstraction(rightValue, false, src);
if (triggerInaktiveTaintOrReverseFlow(rightValue, newAbs)) {
Abstraction bwAbs = newAbs.deriveInactiveAbstraction();
// for (Unit predUnit : interproceduralCFG().getPredsOf(src))
// bSolver.processEdge(new PathEdge<Unit, Abstraction, SootMethod>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
//when the fields of an object are tainted, but the base object is overwritten then the fields should not be tainted any more
//x = y && x.f tainted -> no taint propagated
if(newSource.getAccessPath().isLocal() && leftValue.equals(newSource.getAccessPath().getPlainValue())){
if(newSource.isAbstractionActive()){
return Collections.emptySet();
}
}
//nothing applies: z = y && x tainted -> taint is preserved
return Collections.singleton(newSource);
}
};
}
// for unbalanced problems, return statements correspond to
// normal flows, not return flows, because there is no return
// site we could jump to
else if (src instanceof ReturnStmt) {
final ReturnStmt returnStmt = (ReturnStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (returnStmt.getOp().equals(source.getAccessPath().getPlainValue()) && sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(),
source.getSourceContext(),
((AbstractionWithPath) source).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
source.getSource(), source.getSourceContext());
}
return Collections.singleton(source);
}
};
}
else if (src instanceof ThrowStmt) {
final ThrowStmt throwStmt = (ThrowStmt) src;
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (throwStmt.getOp().equals(source.getAccessPath().getPlainLocal()))
return Collections.singleton(source.deriveNewAbstractionOnThrow());
return Collections.singleton(source);
}
};
}
return Identity.v();
}
@Override
public FlowFunction<Abstraction> getCallFlowFunction(final Unit src, final SootMethod dest) {
final Stmt stmt = (Stmt) src;
final InvokeExpr ie = stmt.getInvokeExpr();
final List<Value> callArgs = ie.getArgs();
final List<Value> paramLocals = new ArrayList<Value>();
for (int i = 0; i < dest.getParameterCount(); i++) {
paramLocals.add(dest.getActiveBody().getParameterLocal(i));
}
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.singleton(source);
}
if(taintWrapper != null && taintWrapper.isExclusive(stmt, source.getAccessPath())) {
//taint is propagated in CallToReturnFunction, so we do not need any taint here:
return Collections.emptySet();
}
if (!dest.isConcrete())
return Collections.emptySet();
//if we do not have to look into sinks:
if (!inspectSinks && sourceSinkManager.isSink(stmt, interproceduralCFG())) {
return Collections.emptySet();
}
Abstraction newSource;
if (!source.isAbstractionActive() && (src.equals(source.getActivationUnit()) || src.equals(source.getActivationUnitOnCurrentLevel()))){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
// check if whole object is tainted (happens with strings, for example:)
if (!dest.isStatic() && ie instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) ie;
// this might be enough because every call must happen with a local variable which is tainted itself:
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(dest.getActiveBody().getThisLocal()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
//add new callArgs:
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
//special treatment for clinit methods - no param mapping possible
if(!dest.getName().equals("<clinit>")) {
assert dest.getParameterCount() == callArgs.size();
// check if param is tainted:
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(callArgs.get(i), newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue
(paramLocals.get(i)), stmt);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
}
}
// staticfieldRefs must be analyzed even if they are not part of the params:
if (newSource.getAccessPath().isStaticFieldRef()) {
Abstraction abs;
abs = newSource.clone();
assert (abs.equals(newSource) && abs.hashCode() == newSource.hashCode());
assert abs != newSource; // our source abstraction must be immutable
abs.setAbstractionFromCallEdge(abs.clone());
res.add(abs);
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getReturnFlowFunction(final Unit callSite, final SootMethod callee, final Unit exitStmt, final Unit retSite) {
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
if (source.equals(zeroValue)) {
return Collections.emptySet();
}
//activate taint if necessary, but in any case we have to take the previous call edge abstraction
Abstraction newSource;
if(!source.isAbstractionActive()){
if(callSite != null
&& (callSite.equals(source.getActivationUnit()) || callSite.equals(source.getActivationUnitOnCurrentLevel())) ){
newSource = source.getActiveCopy(true);
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
}else{
newSource = source.cloneUsePredAbstractionOfCG();
}
//if abstraction is not active and activeStmt was in this method, it will not get activated = it can be removed:
if(!newSource.isAbstractionActive() && newSource.getActivationUnit() != null
&& interproceduralCFG().getMethodOf(newSource.getActivationUnit()).equals(callee))
return Collections.emptySet();
Set<Abstraction> res = new HashSet<Abstraction>();
// Check whether this return is treated as a sink
if (exitStmt instanceof ReturnStmt) {
ReturnStmt returnStmt = (ReturnStmt) exitStmt;
assert returnStmt.getOp() == null
|| returnStmt.getOp() instanceof Local
|| returnStmt.getOp() instanceof Constant;
if (returnStmt.getOp() != null
&& newSource.getAccessPath().isLocal()
&& newSource.getAccessPath().getPlainValue().equals(returnStmt.getOp())
&& sourceSinkManager.isSink(returnStmt, interproceduralCFG())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
returnStmt);
else
results.addResult(returnStmt.getOp(), returnStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
// If we have no caller, we have nowhere to propagate. This
// can happen when leaving the main method.
if (callSite == null)
return Collections.emptySet();
// if we have a returnStmt we have to look at the returned value:
if (exitStmt instanceof ReturnStmt) {
ReturnStmt returnStmt = (ReturnStmt) exitStmt;
Value retLocal = returnStmt.getOp();
if (callSite instanceof DefinitionStmt) {
DefinitionStmt defnStmt = (DefinitionStmt) callSite;
Value leftOp = defnStmt.getLeftOp();
if (retLocal.equals(newSource.getAccessPath().getPlainLocal()) &&
(triggerInaktiveTaintOrReverseFlow(leftOp, newSource) || newSource.isAbstractionActive())) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftOp), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(exitStmt);
assert abs != newSource; // our source abstraction must be immutable
res.add(abs);
//call backwards-solver:
if(triggerInaktiveTaintOrReverseFlow(leftOp, abs)){
Abstraction bwAbs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(leftOp), callSite, false);
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
// easy: static
if (newSource.getAccessPath().isStaticFieldRef()) {
Abstraction abs = newSource.clone();
assert (abs.equals(newSource) && abs.hashCode() == newSource.hashCode());
res.add(abs);
// call backwards-check:
Abstraction bwAbs = newSource.deriveInactiveAbstraction();
if (newSource.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
// checks: this/params/fields
// check one of the call params are tainted (not if simple type)
Value sourceBase = newSource.getAccessPath().getPlainLocal();
Value originalCallArg = null;
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
if (callSite instanceof Stmt) {
Stmt iStmt = (Stmt) callSite;
originalCallArg = iStmt.getInvokeExpr().getArg(i);
//either the param is a fieldref (not possible in jimple?) or an array Or one of its fields is tainted/all fields are tainted
if (triggerInaktiveTaintOrReverseFlow(originalCallArg, newSource)) {
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(originalCallArg), callSite);
if (pathTracking == PathTrackingMethod.ForwardTracking)
abs = ((AbstractionWithPath) abs).addPathElement(exitStmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(originalCallArg, abs)){
// call backwards-check:
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
Local thisL = null;
if (!callee.isStatic()) {
thisL = callee.getActiveBody().getThisLocal();
}
if (thisL != null) {
if (thisL.equals(sourceBase)) {
boolean param = false;
// check if it is not one of the params (then we have already fixed it)
for (int i = 0; i < callee.getParameterCount(); i++) {
if (callee.getActiveBody().getParameterLocal(i).equals(sourceBase)) {
param = true;
break;
}
}
if (!param) {
if (callSite instanceof Stmt) {
Stmt stmt = (Stmt) callSite;
if (stmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iIExpr = (InstanceInvokeExpr) stmt.getInvokeExpr();
Abstraction abs = newSource.deriveNewAbstraction(newSource.getAccessPath().copyWithNewValue(iIExpr.getBase()));
if (pathTracking == PathTrackingMethod.ForwardTracking)
((AbstractionWithPath) abs).addPathElement(stmt);
res.add(abs);
if(triggerInaktiveTaintOrReverseFlow(iIExpr.getBase(), abs)){
Abstraction bwAbs = abs.deriveInactiveAbstraction();
if (abs.isAbstractionActive())
bwAbs = bwAbs.getAbstractionWithNewActivationUnitOnCurrentLevel(callSite);
for (Unit predUnit : interproceduralCFG().getPredsOf(callSite))
bSolver.processEdge(new PathEdge<Unit, Abstraction>(bwAbs, predUnit, bwAbs));
}
}
}
}
}
}
return res;
}
};
}
@Override
public FlowFunction<Abstraction> getCallToReturnFlowFunction(final Unit call, final Unit returnSite) {
// special treatment for native methods:
if (call instanceof Stmt) {
final Stmt iStmt = (Stmt) call;
final List<Value> callArgs = iStmt.getInvokeExpr().getArgs();
return new FlowFunction<Abstraction>() {
@Override
public Set<Abstraction> computeTargets(Abstraction source) {
if (stopAfterFirstFlow && !results.isEmpty())
return Collections.emptySet();
Abstraction newSource;
//check inactive elements:
if (!source.isAbstractionActive() && (call.equals(source.getActivationUnit()))|| call.equals(source.getActivationUnitOnCurrentLevel())){
newSource = source.getActiveCopy(false);
}else{
newSource = source;
}
Set<Abstraction> res = new HashSet<Abstraction>();
res.addAll(computeWrapperTaints(iStmt, newSource));
// We can only pass on a taint if it is neither a parameter nor the
// base object of the current call
boolean passOn = true;
//we only can remove the taint if we step into the call/return edges
//otherwise we will loose taint - see ArrayTests/arrayCopyTest
if(hasValidCallees(call) || (taintWrapper != null
&& taintWrapper.isExclusive(iStmt, newSource.getAccessPath()))) {
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr)
if (((InstanceInvokeExpr) iStmt.getInvokeExpr()).getBase().equals
(newSource.getAccessPath().getPlainLocal())) {
passOn = false;
}
if (passOn)
for (int i = 0; i < callArgs.size(); i++)
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal()) && isTransferableValue(callArgs.get(i))) {
passOn = false;
break;
}
//static variables are always propagated if they are not overwritten. So if we have at least one call/return edge pair,
//we can be sure that the value does not get "lost" if we do not pass it on:
if(newSource.getAccessPath().isStaticFieldRef()){
passOn = false;
}
}
if (passOn)
res.add(newSource);
if (iStmt.getInvokeExpr().getMethod().isNative()) {
if (callArgs.contains(newSource.getAccessPath().getPlainValue())) {
// java uses call by value, but fields of complex objects can be changed (and tainted), so use this conservative approach:
res.addAll(ncHandler.getTaintedValues(iStmt, newSource, callArgs));
}
}
if (iStmt instanceof AssignStmt) {
final AssignStmt stmt = (AssignStmt) iStmt;
if (sourceSinkManager.isSource(stmt, interproceduralCFG())) {
if (DEBUG)
System.out.println("Found source: " + stmt.getInvokeExpr().getMethod());
Abstraction abs;
if (pathTracking == PathTrackingMethod.ForwardTracking)
abs = new AbstractionWithPath(stmt.getLeftOp(),
stmt.getInvokeExpr(),
stmt, false, true, iStmt).addPathElement(call);
else
abs = new Abstraction(stmt.getLeftOp(),
stmt.getInvokeExpr(), stmt, false, true, iStmt);
abs.setZeroAbstraction(source.getZeroAbstraction());
res.add(abs);
res.remove(zeroValue);
}
}
// if we have called a sink we have to store the path from the source - in case one of the params is tainted!
if (sourceSinkManager.isSink(iStmt, interproceduralCFG())) {
boolean taintedParam = false;
for (int i = 0; i < callArgs.size(); i++) {
if (callArgs.get(i).equals(newSource.getAccessPath().getPlainLocal())) {
taintedParam = true;
break;
}
}
if (taintedParam) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
// if the base object which executes the method is tainted the sink is reached, too.
if (iStmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr vie = (InstanceInvokeExpr) iStmt.getInvokeExpr();
if (vie.getBase().equals(newSource.getAccessPath().getPlainValue())) {
if (pathTracking != PathTrackingMethod.NoTracking)
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(),
newSource.getSourceContext(),
((AbstractionWithPath) newSource).getPropagationPath(),
call);
else
results.addResult(iStmt.getInvokeExpr(), iStmt,
newSource.getSource(), newSource.getSourceContext());
}
}
}
return res;
}
/**
* Checks whether the given call has at least one valid target,
* i.e. a callee with a body.
* @param call The call site to check
* @return True if there is at least one callee implementation
* for the given call, otherwise false
*/
private boolean hasValidCallees(Unit call) {
Set<SootMethod> callees = interproceduralCFG().getCalleesOfCallAt(call);
for (SootMethod callee : callees)
if (callee.isConcrete())
return true;
return false;
}
};
}
return Identity.v();
}
};
}
public InfoflowProblem(List<String> sourceList, List<String> sinkList) {
this(new JimpleBasedBiDiICFG(), new DefaultSourceSinkManager(sourceList, sinkList));
}
public InfoflowProblem(ISourceSinkManager sourceSinkManager) {
this(new JimpleBasedBiDiICFG(), sourceSinkManager);
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, List<String> sourceList, List<String> sinkList) {
this(icfg, new DefaultSourceSinkManager(sourceList, sinkList));
}
public InfoflowProblem(InterproceduralCFG<Unit, SootMethod> icfg, ISourceSinkManager sourceSinkManager) {
super(icfg);
this.sourceSinkManager = sourceSinkManager;
}
public InfoflowProblem(ISourceSinkManager mySourceSinkManager, Set<Unit> analysisSeeds) {
this(new JimpleBasedBiDiICFG(), mySourceSinkManager);
for (Unit u : analysisSeeds)
this.initialSeeds.put(u, Collections.singleton(zeroValue));
}
public void setBackwardSolver(InfoflowSolver backwardSolver){
bSolver = backwardSolver;
}
@Override
public boolean autoAddZero() {
return false;
}
}
|
package soot.jimple.spark.pag;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import soot.SootField;
import soot.SootMethod;
import soot.jimple.spark.ondemand.genericutil.Predicate;
import soot.jimple.spark.sets.P2SetVisitor;
/**
* Utilities for dumping dot representations of parts of a {@link PAG}.
* @author msridhar
*
*/
public class PagToDotDumper {
public static final int TRACE_MAX_LVL = 99;
private PAG pag;
private PrintStream ps;
private Node curNode;
private HashMap<Node, Node[]> vmatches;
private HashMap<Node, Node[]> invVmatches;
public PagToDotDumper(PAG pag) {
this.pag = pag;
this.vmatches = new HashMap<Node, Node[]>();
this.invVmatches = new HashMap<Node, Node[]>();
}
public void debugEmptyP2Sets() {
traceNode("java.util.Hashtable$ValueCollection", "contains","r0");
if (false) {
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
VarNode vNode = (VarNode) iter.next();
if (vNode.getP2Set().isEmpty()) {
System.err.println(vNode);
System.err.println(vNode.getP2Set());
}
}
}
}
/**
* Build vmatchEdges and store them in vmatches field
*
*/
private void buildVmatchEdges() {
// for each store and load pair
for (Iterator iter = pag.loadSourcesIterator(); iter.hasNext();) {
final FieldRefNode frn1 = (FieldRefNode) iter.next();
for (Iterator iter2 = pag.storeInvSourcesIterator(); iter2
.hasNext();) {
final FieldRefNode frn2 = (FieldRefNode) iter2.next();
VarNode base1 = frn1.getBase();
VarNode base2 = frn2.getBase();
// debug(frn1, frn2, base1, base2);
// if they store & load the same field
if (frn1.getField().equals(frn2.getField())) {
if (base1.getP2Set().hasNonEmptyIntersection(
base2.getP2Set())) {
// System.err.println("srcs:");
Node[] src = pag.loadLookup(frn1);
Node[] dst = pag.storeInvLookup(frn2);
for (int i = 0; i < src.length; i++) {
// System.err.println(src[i]);
vmatches.put(src[i], dst);
}
// System.err.println("dst:");
for (int i = 0; i < dst.length; i++) {
// System.err.println(dst[i]);
invVmatches.put(dst[i], src);
}
}
}
}
}
}
/**
* @param frn1
* @param frn2
* @param base1
* @param base2
* @param lvn
* @param mName
* TODO
*/
@SuppressWarnings("unused")
private void debug(final FieldRefNode frn1, final FieldRefNode frn2,
VarNode base1, VarNode base2) {
if (base1 instanceof LocalVarNode && base2 instanceof LocalVarNode) {
LocalVarNode lvn1 = (LocalVarNode) base1;
LocalVarNode lvn2 = (LocalVarNode) base2;
if (lvn1.getMethod().getDeclaringClass().getName().equals(
"java.util.Hashtable$ValueCollection")
&& lvn1.getMethod().getName().equals("contains")
&& lvn2.getMethod().getDeclaringClass().getName().equals(
"java.util.Hashtable$ValueCollection")
&& lvn2.getMethod().getName().equals("<init>")
) {
System.err.println("Method: " + lvn1.getMethod().getName());
System.err.println(makeLabel(frn1));
System.err.println("Base: " + base1.getVariable());
System.err.println("Field: " + frn1.getField());
System.err.println(makeLabel(frn2));
System.err.println("Base: " + base2.getVariable());
System.err.println("Field: " + frn2.getField());
if (frn1.getField().equals(frn2.getField())) {
System.err.println("field match");
if (base1.getP2Set().hasNonEmptyIntersection(
base2.getP2Set())) {
System.err.println("non empty");
} else {
System.err.println("b1: " + base1.getP2Set());
System.err.println("b2: " + base2.getP2Set());
}
}
}
}
}
/**
* @param lvNode
* @param node
* @return
*/
public static String translateEdge(Node src, Node dest, String label) {
return makeNodeName(src) + " -> " + makeNodeName(dest) + " [label=\""
+ label + "\"];";
}
private final static Predicate<Node> emptyP2SetPred = new Predicate<Node>() {
public boolean test(Node n) {
return !(n instanceof AllocNode) && n.getP2Set().isEmpty();
}
};
/**
* @param node
* @return
*/
public static String translateLabel(Node n, Predicate<Node> p) {
String color = "";
String label;
if (p.test(n))
color = ", color=red";
if (n instanceof LocalVarNode) {
label = makeLabel((LocalVarNode) n);
} else if (n instanceof AllocNode) {
label = makeLabel((AllocNode) n);
} else if (n instanceof FieldRefNode) {
label = makeLabel((FieldRefNode) n);
} else {
label = n.toString();
}
return makeNodeName(n) + "[label=\"" + label + "\"" + color + "];";
}
private static String translateLabel(Node n) {
return translateLabel(n, emptyP2SetPred);
}
/**
* @param lvNode
* @param cName
* @param mName
* @return
*/
private boolean isDefinedIn(LocalVarNode lvNode, String cName, String mName) {
return lvNode.getMethod() != null
&& lvNode.getMethod().getDeclaringClass().getName().equals(
cName) && lvNode.getMethod().getName().equals(mName);
}
public static String format(LocalVarNode v) {
return v.getVariable().toString();
}
public void printOneNodeByID(int id) {
buildVmatchEdges();
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
Node n = (Node) iter.next();
if (n.getNumber() == id) {
printOneNode((VarNode) n);
}
}
}
public void printOneAllocNodeByID(int id) {
for (Iterator iter = pag.getAllocNodeNumberer().iterator(); iter.hasNext(); ) {
final AllocNode n = (AllocNode) iter.next();
if (n.getNumber() == id) {
System.err.println(id + ": " + n.getNewExpr().toString());
}
}
}
private void printOneNode(VarNode node) {
PrintStream ps = System.err;
ps.println(makeLabel(node));
Node[] succs = pag.simpleInvLookup(node);
ps.println("assign");
ps.println("======");
for (int i = 0; i < succs.length; i++) {
ps.println(succs[i]);
}
succs = pag.allocInvLookup(node);
ps.println("new");
ps.println("======");
for (int i = 0; i < succs.length; i++) {
ps.println(succs[i]);
}
succs = pag.loadInvLookup(node);
ps.println("load");
ps.println("======");
for (int i = 0; i < succs.length; i++) {
ps.println(succs[i]);
}
succs = pag.storeLookup(node);
ps.println("store");
ps.println("======");
for (int i = 0; i < succs.length; i++) {
ps.println(succs[i]);
}
}
public void dumpP2Set(String fName, String cName, String mName) {
PrintStream ps;
try {
FileOutputStream fos = new FileOutputStream(new File(fName));
ps = new PrintStream(fos);
ps.println("digraph G {");
dumpLocalP2Set(cName, mName, ps);
ps.print("}");
} catch (IOException e) {
e.printStackTrace();
}
}
private void dumpLocalP2Set(String cName, String mName, final PrintStream ps) {
this.ps = ps;
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
VarNode vNode = (VarNode) iter.next();
if (vNode instanceof LocalVarNode) {
final LocalVarNode lvNode = (LocalVarNode) vNode;
curNode = lvNode;
if (lvNode.getMethod() != null
&& lvNode.getMethod().getName().equals(mName)) {
ps.println("\t" + makeNodeName(lvNode) + " [label=\""
+ makeLabel(lvNode) + "\"];");
lvNode.getP2Set().forall(new P2SetToDotPrinter());
}
}
}
}
public void dumpPAG(String cName, String mName) {
String mName2 = mName;
if (mName.indexOf('<') == 0)
mName2 = mName.substring(1, mName.length() - 1);
dumpPAG(cName + "." + mName2 + ".dot", cName, mName);
}
public void dumpPAG(String fName, String cName, String mName) {
PrintStream ps;
try {
FileOutputStream fos = new FileOutputStream(new File(fName));
ps = new PrintStream(fos);
ps.println("digraph G {");
ps.println("\trankdir=LR;");
dumpLocalPAG(cName, mName, ps);
ps.print("}");
} catch (IOException e) {
e.printStackTrace();
}
}
private void dumpLocalPAG(String cName, String mName, final PrintStream ps) {
// this.queryMethod = mName;
// iterate over all variable nodes
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
final Node node = (Node) iter.next();
if (!(node instanceof LocalVarNode))
continue;
final LocalVarNode lvNode = (LocalVarNode) node;
// nodes that is defined in the specified class and method
if (isDefinedIn(lvNode, cName, mName)) {
dumpForwardReachableNodesFrom(lvNode, ps);
}
}
// for (Iterator iter = pag.getFieldRefNodeNumberer().iterator(); iter
// .hasNext();) {
// final FieldRefNode frNode = (FieldRefNode) iter.next();
// if (frNode.getBase().)
// Node[] succs = pag.storeInvLookup(frNode);
// for (int i = 0; i < succs.length; i++) {
// ps.println("\t" + translateLabel(succs[i]));
// // print edge
// ps.println("\t" + translateEdge(frNode, succs[i], "store"));
}
/**
* @param lvNode
* @param ps
*/
private void dumpForwardReachableNodesFrom(final LocalVarNode lvNode,
final PrintStream ps) {
ps.println("\t" + translateLabel(lvNode));
Node[] succs = pag.simpleInvLookup(lvNode);
for (int i = 0; i < succs.length; i++) {
ps.println("\t" + translateLabel(succs[i]));
// print edge
ps.println("\t" + translateEdge(lvNode, succs[i], "assign"));
}
succs = pag.allocInvLookup(lvNode);
for (int i = 0; i < succs.length; i++) {
ps.println("\t" + translateLabel(succs[i]));
// print edge
ps.println("\t" + translateEdge(lvNode, succs[i], "new"));
}
succs = pag.loadInvLookup(lvNode);
for (int i = 0; i < succs.length; i++) {
final FieldRefNode frNode = (FieldRefNode) succs[i];
ps.println("\t" + translateLabel(frNode));
ps.println("\t" + translateLabel(frNode.getBase()));
// print edge
ps.println("\t" + translateEdge(lvNode, frNode, "load"));
ps.println("\t"
+ translateEdge(frNode, frNode.getBase(), "getBase"));
}
succs = pag.storeLookup(lvNode);
for (int i = 0; i < succs.length; i++) {
final FieldRefNode frNode = (FieldRefNode) succs[i];
ps.println("\t" + translateLabel(frNode));
ps.println("\t" + translateLabel(frNode.getBase()));
// print edge
ps.println("\t" + translateEdge(frNode, lvNode, "store"));
ps.println("\t"
+ translateEdge(frNode, frNode.getBase(), "getBase"));
}
}
public void traceNode(int id) {
buildVmatchEdges();
String fName = "trace." + id + ".dot";
try {
FileOutputStream fos = new FileOutputStream(new File(fName));
ps = new PrintStream(fos);
ps.println("digraph G {");
// iterate over all variable nodes
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
final VarNode n = (VarNode) iter.next();
if (n.getNumber() == id) {
LocalVarNode lvn = (LocalVarNode) n;
printOneNode(lvn);
trace(lvn, ps, new HashSet<Node>(), TRACE_MAX_LVL);
}
}
ps.print("}");
} catch (IOException e) {
e.printStackTrace();
}
}
public void traceNode(String cName, String mName, String varName) {
String mName2 = mName;
if (mName.indexOf('<') == 0)
mName2 = mName.substring(1, mName.length() - 1);
traceLocalVarNode("trace." + cName + "." + mName2 + "." + varName
+ ".dot", cName, mName, varName);
}
public void traceLocalVarNode(String fName, String cName, String mName,
String varName) {
PrintStream ps;
buildVmatchEdges();
try {
FileOutputStream fos = new FileOutputStream(new File(fName));
ps = new PrintStream(fos);
ps.println("digraph G {");
// iterate over all variable nodes
for (Iterator iter = pag.getVarNodeNumberer().iterator(); iter
.hasNext();) {
final VarNode n = (VarNode) iter.next();
if (!(n instanceof LocalVarNode))
continue;
LocalVarNode lvn = (LocalVarNode) n;
// HACK
if (lvn.getMethod() == null)
continue;
if (isDefinedIn(lvn, cName, mName)) {
// System.err.println("class match");
// System.err.println(lvn.getVariable());
if (lvn.getVariable().toString().equals(varName)) {
// System.err.println(lvn);
trace(lvn, ps, new HashSet<Node>(), 10);
}
}
}
ps.print("}");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
*
* Do a DFS traversal
*
* @param name
* @param name2
* @param ps2
*/
private void trace(VarNode node, PrintStream ps, HashSet<Node> visitedNodes, int level) {
if (level < 1)
return;
ps.println("\t" + translateLabel(node));
// // assign value to others
// Node[] preds = pag.storeLookup(node);
// for (int i = 0; i < preds.length; i++) {
// if (visitedNodes.contains(preds[i]))
// continue;
// ps.println("\t" + translateLabel(preds[i]));
// // print edge
// ps.println("\t" + translateEdge(preds[i], node, "store"));
// visitedNodes.add(preds[i]);
// // trace((VarNode) preds[i], ps, visitedNodes);
// get other's value
Node[] succs = pag.simpleInvLookup(node);
for (int i = 0; i < succs.length; i++) {
if (visitedNodes.contains(succs[i]))
continue;
ps.println("\t" + translateLabel(succs[i]));
// print edge
ps.println("\t" + translateEdge(node, succs[i], "assign"));
visitedNodes.add(succs[i]);
trace((VarNode) succs[i], ps, visitedNodes, level-1);
}
succs = pag.allocInvLookup(node);
for (int i = 0; i < succs.length; i++) {
if (visitedNodes.contains(succs[i]))
continue;
ps.println("\t" + translateLabel(succs[i]));
// print edge
ps.println("\t" + translateEdge(node, succs[i], "new"));
}
succs = vmatches.get(node);
if (succs != null) {
// System.err.println(succs.length);
for (int i = 0; i < succs.length; i++) {
// System.err.println(succs[i]);
if (visitedNodes.contains(succs[i]))
continue;
ps.println("\t" + translateLabel(succs[i]));
// print edge
ps.println("\t" + translateEdge(node, succs[i], "vmatch"));
trace((VarNode) succs[i], ps, visitedNodes, level-1);
}
}
// succs = pag.loadInvLookup(node);
// for (int i = 0; i < succs.length; i++) {
// if (visitedNodes.contains(succs[i]))
// continue;
// final FieldRefNode frNode = (FieldRefNode) succs[i];
// // ps.println("\t" + translateLabel(frNode));
// // print edge
// ps.println("\t"
// + translateEdge(node, frNode.getBase(), "getfield\\n"
// + frNode.getField()));
// visitedNodes.add(frNode.getBase());
// trace(frNode.getBase(), ps, visitedNodes);
// succs = pag.storeLookup(node);
// for (int i = 0; i < succs.length; i++) {
// if (visitedNodes.contains(succs[i]))
// continue;
// final FieldRefNode frNode = (FieldRefNode) succs[i];
// // ps.println("\t" + translateLabel(frNode.getBase()));
// // print edge
// ps.println("\t"
// + translateEdge(frNode.getBase(), node, "putfield\\n"
// + frNode.getField()));
// visitedNodes.add(frNode.getBase());
// trace(frNode.getBase(), ps, visitedNodes);
}
public static String makeNodeName(Node n) {
return "node_" + n.getNumber();
}
public static String makeLabel(AllocNode n) {
return n.getNewExpr().toString();
}
public static String makeLabel(LocalVarNode n) {
SootMethod sm = n.getMethod();
return "LV " + n.getVariable().toString() + " " + n.getNumber() + "\\n"
+ sm.getDeclaringClass() + "\\n" + sm.getName();
}
/**
* @param node
* @return
*/
public static String makeLabel(FieldRefNode node) {
if (node.getField() instanceof SootField) {
final SootField sf = (SootField) node.getField();
return "FNR " + makeLabel(node.getBase()) + "." + sf.getName();
} else
return "FNR " + makeLabel(node.getBase()) + "." + node.getField();
}
/**
* @param base
* @return
*/
public static String makeLabel(VarNode base) {
if (base instanceof LocalVarNode)
return makeLabel((LocalVarNode) base);
else
return base.toString();
}
class P2SetToDotPrinter extends P2SetVisitor {
public void visit(Node n) {
ps.println("\t" + makeNodeName(n) + " [label=\""
+ makeLabel((AllocNode) n) + "\"];");
ps.print("\t" + makeNodeName(curNode) + " -> ");
ps.println(makeNodeName(n) + ";");
}
}
}
|
package de.SweetCode.e;
import com.jogamp.opengl.*;
import com.jogamp.opengl.awt.GLCanvas;
import com.jogamp.opengl.fixedfunc.GLMatrixFunc;
import com.jogamp.opengl.util.FPSAnimator;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureData;
import de.SweetCode.e.loop.ProfilerLoop;
import de.SweetCode.e.rendering.GameScene;
import de.SweetCode.e.rendering.layers.Layer;
import de.SweetCode.e.utils.StringUtils;
import de.SweetCode.e.utils.log.LogEntry;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferStrategy;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
import java.awt.image.VolatileImage;
import java.lang.management.GarbageCollectorMXBean;
import java.nio.IntBuffer;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
public class EScreen extends JFrame implements GLEventListener {
/**
* @TODO:
* Experimental Feature - it will allocate VRAM instead of RAM
* to store and draw frames. This will also be used for the DynamicTextureLoader
* to reduce the used RAM.
*
* I am currently working on some bugs and on the implementation itself to ensure its
* performance.
*/
public static final boolean USE_VRAM = true;
/**
* @TODO:
* Experimental Feature: using OpenGL to render the frame.
*/
public static final boolean USE_JOGL = false;
private BufferStrategy bufferStrategy;
private GameScene current = null;
private VolatileImage volatileImage = null;
private static GraphicsConfiguration graphicConfiguration;
// OpenGL
private GLProfile glProfile = null;
public EScreen() {
Settings settings = E.getE().getSettings();
this.setTitle(settings.getName());
this.setUndecorated(!settings.isDecorated());
this.setResizable(settings.isResizable());
this.setPreferredSize(new Dimension(settings.getWidth(), settings.getHeight()));
this.setMinimumSize(new Dimension(settings.getWidth(), settings.getHeight()));
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
EScreen.graphicConfiguration = this.getGraphicsConfiguration();
if(USE_JOGL) {
this.glProfile = GLProfile.get(GLProfile.GL2);
GLCapabilities glCapabilities = new GLCapabilities(glProfile);
glCapabilities.setDoubleBuffered(false);
GLCanvas canvas = new GLCanvas(glCapabilities);
canvas.addGLEventListener(this);
canvas.setSize(400, 400);
FPSAnimator animator = new FPSAnimator(canvas, 200);
animator.start();
this.add(canvas);
} else {
if (!(EScreen.USE_VRAM)) {
this.createBufferStrategy(1);
this.bufferStrategy = this.getBufferStrategy();
if (this.bufferStrategy == null) {
E.getE().getLog().log(LogEntry.Builder.create().message("Failed to create BufferStrategy.").build());
}
} else {
E.getE().getLog().log(
LogEntry.Builder.create()
.message("Using images stored in VRAM to render the frames.")
.build()
);
}
}
this.pack();
this.setLocationRelativeTo(null);
this.setVisible(true);
}
public GameScene getCurrent() {
return this.current;
}
public void setScene(GameScene gameScene) {
this.current = gameScene;
this.invalidate();
this.repaint();
}
@Override
public void paint(Graphics graphics) {
if(USE_JOGL) {
super.paint(graphics);
return;
}
if (this.current == null) {
return;
}
Settings s = E.getE().getSettings();
//@TODO Work on camera.
//Camera camera = E.getE().getCamera();
do {
Graphics2D g;
if(USE_VRAM) {
if(
this.volatileImage == null ||
(
this.volatileImage != null &&
this.volatileImage.validate(super.getGraphicsConfiguration()) == VolatileImage.IMAGE_INCOMPATIBLE
)
) {
this.volatileImage = super.createVolatileImage(s.getWidth(), s.getHeight());
}
g = this.volatileImage.createGraphics();
} else {
g = (Graphics2D) this.bufferStrategy.getDrawGraphics();
}
g.setRenderingHints(E.getE().getSettings().getRenderingHints());
int x = 0;
int y = 0;
/**
@TODO
if(E.getE().getSettings().fixAspectRatio()) {
AspectRatio aspectRatio = new AspectRatio(new Dimension(1280, 720), new Dimension(this.getWidth(), this.getHeight()));
BoundingBox optimal = aspectRatio.getOptimal();
x = (int) optimal.getMin().getX();
y = (int) optimal.getMin().getY();
}**/
BufferedImage frame = this.frame();
g.drawImage(frame, x, y, null);
if(EScreen.USE_VRAM) {
graphics.drawImage(this.volatileImage, 0, 0, null);
} else {
this.bufferStrategy.show();
}
g.dispose();
E.getE().getLayers().getLayers().forEach(Layer::clean);
} while(USE_VRAM ? this.volatileImage.contentsLost() : this.bufferStrategy.contentsLost());
}
public static GraphicsConfiguration getGraphicConfiguration() {
return graphicConfiguration;
}
private BufferedImage frame() {
this.current.render(E.getE().getLayers());
E.getE().getGameComponents().forEach(k -> {
GameComponent e = k.getGameComponent();
if(e instanceof Renderable && e.isActive()) {
((Renderable) e).render(E.getE().getLayers());
}
});
Settings settings = E.getE().getSettings();
if(settings.isDebugging()) {
ProfilerLoop profilerLoop = E.getE().getProfilerLoop();
List<Settings.DebugDisplay> displays = settings.getDebugInformation();
int xOffset = 360;
int yOffset = 12;
int xStep = 1;
Layer layer = E.getE().getLayers().first();
layer.g().setColor(
EScreen.highContrast(
new Color(layer.b().getRGB(settings.getWidth() - xOffset / 2, (int) (yOffset * 1.5D)))
)
);
if(displays.contains(Settings.DebugDisplay.CPU_PROFILE)) {
layer.g().drawString(
String.format(
"CPU: %.2f%% | Cores: %d",
profilerLoop.getAverageCPU() * 100,
profilerLoop.getAvailableProcessors()
),
settings.getWidth() - xOffset,
yOffset * xStep
);
xStep++;
}
if(displays.contains(Settings.DebugDisplay.LOOP_PROFILE)) {
layer.g().drawString(
String.format(
"FPS: %d (%d) | Ticks: %d (%d)",
E.getE().getCurrentFPS(),
settings.getTargetFPS(),
E.getE().getCurrentTicks(),
settings.getTargetTicks()
),
settings.getWidth() - xOffset,
yOffset * xStep
);
xStep++;
}
if(displays.contains(Settings.DebugDisplay.MEMORY_PROFILE)) {
layer.g().drawString(
String.format(
"Heap: %.2fMB | Used: %.2fMB",
profilerLoop.getMaxMemory() * E.C.BYTES_TO_MEGABYTES,
profilerLoop.getAverageMemoryUsed() * E.C.BYTES_TO_MEGABYTES
),
settings.getWidth() - xOffset,
yOffset * xStep
);
xStep++;
}
if(displays.contains(Settings.DebugDisplay.GC_PROFILE)) {
List<GarbageCollectorMXBean> gcBeans = profilerLoop.getGCBeans();
layer.g().drawString(
String.format(
"GCs: %d",
gcBeans.size()
),
settings.getWidth() - xOffset,
yOffset * xStep
);
for (int i = 0; i < gcBeans.size(); i++) {
GarbageCollectorMXBean gc = gcBeans.get(0);
layer.g().drawString(
String.format(
"%s, %d (%dms), %s",
gc.getName(),
gc.getCollectionCount(),
gc.getCollectionTime(),
StringUtils.join(gc.getMemoryPoolNames(), ", ")
),
(int) (settings.getWidth() - xOffset * 0.95),
yOffset * ((xStep + 1) + i)
);
}
xStep += gcBeans.size();
xStep++;
}
if(displays.contains(Settings.DebugDisplay.GC_PROFILE)) {
Set<Thread> threads = profilerLoop.getThreads();
layer.g().drawString(
String.format(
"Threads: %d",
threads.size()
),
settings.getWidth() - xOffset,
yOffset * xStep
);
final int[] i = {0};
int finalXStep = xStep;
threads.stream()
.sorted(Comparator.comparingLong(value -> value.getId()))
.forEach(t -> {
layer.g().drawString(
String.format(
"%d - P: %d - %s (%s)",
t.getId(),
t.getPriority(),
t.getName(),
t.getState().name()
),
(int) (settings.getWidth() - xOffset * 0.95),
yOffset * ((finalXStep + 1) + i[0])
);
i[0]++;
});
xStep += threads.size();
xStep++;
}
}
return E.getE().getLayers().combine();
}
@Override
public void init(GLAutoDrawable drawable) {}
@Override
public void dispose(GLAutoDrawable glAutoDrawable) {}
@Override
public void display(GLAutoDrawable drawable) {
if(!(this.current == null)) {
// getting the new frame
BufferedImage frame = this.frame();
// Frame to Buffer
IntBuffer buffer = IntBuffer.allocate(frame.getWidth() * frame.getHeight() * 4);
buffer.put(((DataBufferInt) frame.getRaster().getDataBuffer()).getData());
buffer.flip();
GL2 gl = drawable.getGL().getGL2();
// clear
gl.glClearColor(0F, 0F, 0F, 0F);
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
// generating & binding texture
TextureData textureData = new TextureData(this.glProfile, GL.GL_RGBA, frame.getWidth(), frame.getHeight(), 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false, false, false, buffer, null);
Texture texture = new Texture(gl, textureData);
texture.enable(gl);
texture.bind(gl);
// viewport
gl.glViewport(0, 0, frame.getWidth(), frame.getHeight());
gl.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrtho(0, frame.getWidth(), frame.getHeight(), 0, 0, 1);
gl.glMatrixMode(GLMatrixFunc.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glBegin(GL2.GL_QUADS);
gl.glTexCoord2f(0, 0);
gl.glVertex2f(0, 0);
gl.glTexCoord2f(1, 0);
gl.glVertex2f(frame.getWidth(), 0);
gl.glTexCoord2f(1, 1);
gl.glVertex2f(frame.getWidth(), frame.getHeight());
gl.glTexCoord2f(0, 1);
gl.glVertex2f(0, frame.getHeight());
gl.glEnd();
gl.glFlush();
textureData.destroy();
textureData.flush();
texture.disable(gl);
texture.destroy(gl);
buffer.clear();
}
}
@Override
public void reshape(GLAutoDrawable glAutoDrawable, int i, int i1, int i2, int i3) {}
/**
* Returns a color with the highest possible contrast compared to the input color.
* @param input The input color.
* @return The complementary color.
*/
private static Color highContrast(Color input) {
float[] hsb = new float[3];
Color.RGBtoHSB(input.getRed(), input.getGreen(), input.getBlue(), hsb);
float hue = hsb[0];
float saturation = hsb[1];
float brightness = hsb[2];
if(saturation < 0.3) {
return (brightness < 0.5 ? Color.WHITE : Color.BLACK);
}
// then we get the complementary
return new Color(Color.HSBtoRGB((hue * 360 + 180) % 360 / 360, saturation, brightness));
}
}
|
package com.haxademic.demo.draw.shapes.shader;
import com.haxademic.core.app.P;
import com.haxademic.core.app.PAppletHax;
import com.haxademic.core.app.config.AppSettings;
import com.haxademic.core.app.config.Config;
import com.haxademic.core.data.constants.PBlendModes;
import com.haxademic.core.debug.DebugView;
import com.haxademic.core.draw.color.ColorUtil;
import com.haxademic.core.draw.color.ImageGradient;
import com.haxademic.core.draw.context.PG;
import com.haxademic.core.draw.image.ImageCacher;
import com.haxademic.core.draw.image.ImageUtil;
import com.haxademic.core.draw.particle.ParticleLauncherGPU;
import com.haxademic.core.hardware.depthcamera.DepthSilhouetteSmoothed;
import com.haxademic.core.hardware.depthcamera.cameras.DepthCamera;
import com.haxademic.core.hardware.depthcamera.cameras.DepthCamera.DepthCameraType;
import com.haxademic.core.hardware.depthcamera.cameras.IDepthCamera;
import com.haxademic.core.hardware.mouse.Mouse;
import com.haxademic.core.math.MathUtil;
import com.haxademic.core.ui.UI;
import processing.core.PGraphics;
import processing.core.PImage;
public class Demo_VertexShader_GPUParticlesLauncher_DepthSilhouette
extends PAppletHax {
public static void main(String args[]) { arguments = args; PAppletHax.main(Thread.currentThread().getStackTrace()[1].getClassName()); }
protected DepthSilhouetteSmoothed depthSilhouetteSmoothed;
protected ParticleLauncherGPU gpuParticles;
protected PGraphics silhouetteCropped;
protected PGraphics shapesLayer;
protected ImageGradient imageGradient;
protected int FRAME_LAUNCH_INTERVAL = 1;
protected String UI_PARTICLES_LAUNCH_ATTEMPTS = "UI_MAX_LAUNCHES_PER_FRAME";
protected String UI_PARTICLES_POINT_SIZE = "UI_PARTICLES_POINT_SIZE";
protected String UI_SILHOUETTE_ALPHA = "UI_SILHOUETTE_ALPHA";
protected void config() {
Config.setProperty(AppSettings.WIDTH, 1280 );
Config.setProperty(AppSettings.HEIGHT, 720 );
}
protected void firstFrame () {
// init depth cam
DepthCamera.instance(DepthCameraType.Realsense);
IDepthCamera depthCamera = DepthCamera.instance().camera;
depthSilhouetteSmoothed = new DepthSilhouetteSmoothed(depthCamera, 6);
depthSilhouetteSmoothed.buildUI(false);
DebugView.setTexture("depthBuffer", depthSilhouetteSmoothed.depthBuffer());
DebugView.setTexture("avgBuffer", depthSilhouetteSmoothed.avgBuffer());
DebugView.setTexture("image", depthSilhouetteSmoothed.image());
// build buffer to paste silhouette into that matches the aspect ratio of the app
float silhouetteCropScaleDown = 0.25f;
silhouetteCropped = PG.newPG(P.round(p.width * silhouetteCropScaleDown), P.round(p.height * silhouetteCropScaleDown), false, false);
DebugView.setTexture("silhouetteCropped", silhouetteCropped);
// build gradient
imageGradient = new ImageGradient(ImageGradient.PASTELS());
imageGradient.addTexturesFromPath(ImageGradient.COOLORS_PATH);
imageGradient.randomGradientTexture();
// build particles launcher
shapesLayer = PG.newPG(p.width, p.height);
// gpuParticles = new ParticleLauncherGPU(256, "haxademic/shaders/point/particle-launcher-fizz-frag.glsl");
PImage particle = p.loadImage("D:\\workspace\\pepsi-nitro-wall\\_assets\\court-design-assets\\bubble_tex_02_alpha_00000_00000.png");
gpuParticles = new ParticleLauncherGPU(512, "haxademic/shaders/point/particle-launcher-fizz-frag.glsl", "haxademic/shaders/vertex/particles-launcher-textured-frag.glsl", "haxademic/shaders/vertex/particles-launcher-textured-vert.glsl", particle);
DebugView.setValue("gpuParticles.vertices()", gpuParticles.numParticles());
DebugView.setTexture("gpuParticles.positionBuffer()", gpuParticles.positionBuffer());
DebugView.setTexture("gpuParticles.colorBuffer()", gpuParticles.colorBuffer());
UI.addTitle("GPUParticlesLauncher");
UI.addSlider(UI_PARTICLES_LAUNCH_ATTEMPTS, 1700, 1, 5000, 10, false);
UI.addSlider(UI_PARTICLES_POINT_SIZE, 3, 0, 50, 0.1f, false);
UI.addSlider(UI_SILHOUETTE_ALPHA, 0.1f, 0, 1, 0.01f, false);
}
protected void drawApp() {
// set up context
p.background(0);
// TEMP REMOVE
ImageUtil.drawImageCropFill(videoVLC, p.g, true);
// copy silhouette to aspect-ratio-corrected copy
// and prepare pixels data
depthSilhouetteSmoothed.update();
ImageUtil.cropFillCopyImage(depthSilhouetteSmoothed.image(), silhouetteCropped, true);
silhouetteCropped.loadPixels();
// launch! need to open & close the position buffer where we're writing new launch pixels
int startLaunchTime = p.millis();
gpuParticles.beginLaunch();
// look for non-black pixels to launch from
// since we're using a scaled-down map for efficiency, scale x/y positions back up
PGraphics launchMap = silhouetteCropped;
float scaleLaunchMapToScreen = MathUtil.scaleToTarget(launchMap.width, p.width);
if(p.frameCount % FRAME_LAUNCH_INTERVAL == 0) {
int numLaunched = 0;
int launches = UI.valueInt(UI_PARTICLES_LAUNCH_ATTEMPTS);
for (int i = 0; i < launches; i++) {
int checkX = MathUtil.randRange(0, launchMap.width);
int checkY = MathUtil.randRange(0, launchMap.height);
int pixelColor = ImageUtil.getPixelColor(launchMap, checkX, checkY);
float redColor = (float) ColorUtil.redFromColorInt(pixelColor) / 255f;
if(redColor > 0.3f && numLaunched < launches) {
gpuParticles.launch(shapesLayer, checkX * scaleLaunchMapToScreen, checkY * scaleLaunchMapToScreen);
numLaunched++;
}
}
}
gpuParticles.endLaunch();
DebugView.setValue("launchTime", p.millis() - startLaunchTime);
// update particles buffers
int startUpdateTime = p.millis();
gpuParticles.updateSimulation();
DebugView.setValue("updateTime", p.millis() - startUpdateTime);
// update particles color map
if(Mouse.xNorm < 0.5f) {
ImageUtil.copyImage(ImageGradient.SPARKS_FLAMES(), gpuParticles.colorBuffer());
// ImageUtil.copyImage(ImageGradient.BLACK_HOLE(), gpuParticles.colorBuffer());
} else {
gpuParticles.colorBuffer().beginDraw();
gpuParticles.colorBuffer().background(255);
gpuParticles.colorBuffer().endDraw();
}
// update/draw particles
shapesLayer.beginDraw();
shapesLayer.background(0,0);
shapesLayer.blendMode(PBlendModes.ADD);
gpuParticles.pointSize(UI.value(UI_PARTICLES_POINT_SIZE));
gpuParticles.renderTo(shapesLayer, true);
shapesLayer.endDraw();
// draw to screen
// silhouette
p.blendMode(PBlendModes.BLEND);
PG.setPImageAlpha(p, UI.value(UI_SILHOUETTE_ALPHA));
p.image(silhouetteCropped, 0, 0, silhouetteCropped.width * scaleLaunchMapToScreen, silhouetteCropped.height * scaleLaunchMapToScreen);
PG.resetPImageAlpha(p);
// particles
p.blendMode(PBlendModes.DIFFERENCE);
p.image(shapesLayer, 0, 0);
p.blendMode(PBlendModes.BLEND);
// draw rgb camera
PImage cameraSource = DepthCamera.instance().camera.getRgbImage();
float camScale = 0.2f;
float camW = cameraSource.width * camScale;
float camH = cameraSource.height * camScale;
p.image(cameraSource, p.width - camW - 20, 20, camW, camH);
}
public void keyPressed() {
super.keyPressed();
if(p.key == ' ') imageGradient.randomGradientTexture();
}
}
|
package org.wyona.yanel.impl.resources.search;
import org.wyona.yanel.core.attributes.viewable.View;
import org.wyona.yanel.impl.resources.BasicXMLResource;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.wyona.meguni.parser.Parser;
import org.wyona.meguni.util.ResultSet;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.ConfigurationUtil;
/**
* Search resource
*/
public class SearchResource extends BasicXMLResource {
private static Logger log = Logger.getLogger(SearchResource.class);
private static String PROVIDER_NAME = "provider";
private static String QUERY_NAME = "q";
private static String DOMAIN_NAME = "domain";
/**
* @see org.wyona.yanel.core.api.attributes.ViewableV2#getView(String)
*/
public View getView(String viewId) throws Exception {
String provider = getRequest().getParameter(PROVIDER_NAME);
if (provider != null && !provider.equals("yanel")) {
ExternalSearchProvider esp = getExternalSearchProvider(provider);
if (esp != null) {
View view = new View();
view.setResponse(false); // this resource writes the response itself
javax.servlet.http.HttpServletResponse response = getResponse();
response.setStatus(307);
String query = getRequest().getParameter(QUERY_NAME);
String domain = getRequest().getParameter(DOMAIN_NAME);
String site="";
if (domain != null) site = "+site:" + domain; // TODO: This will work for Google and bing, but is this true for all search engines?
response.setHeader("Location", esp.getURL() + query + site);
return view;
}
}
return super.getView(viewId);
}
/*
* @see org.wyona.yanel.impl.resources.BasicXMLResource#getContentXML(String)
*/
protected InputStream getContentXML(String viewId) throws Exception {
if (log.isDebugEnabled()) {
log.debug("requested viewId: " + viewId);
}
StringBuilder sb = new StringBuilder("<?xml version=\"1.0\"?>");
sb.append("<y:search xmlns:y=\"http:
String query = getRequest().getParameter(QUERY_NAME);
String provider = getRequest().getParameter(PROVIDER_NAME);
if (query != null && query.length() > 0) {
sb.append("<y:query>" + query + "</y:query>");
try {
Result[] results;
if (provider != null) {
if (provider.equals("yanel")) {
results = getLocalResults(query);
} else if (provider.equals("google")) {
results = getGoogleResults(query);
} else if (provider.equals("bing")) {
results = getMSNResults(query);
} else {
results = getLocalResults(query);
log.warn("No such provider: " + provider);
}
} else {
results = getLocalResults(query);
provider = "yanel";
log.warn("No search provider specified!");
}
sb.append("<y:provider id=\"" + provider + "\">" + provider + "</y:provider>");
if (results != null && results.length > 0) {
sb.append("<y:results>");
for (int i = 0; i < results.length; i++) {
sb.append("<y:result url=\"" + results[i].getURL() + "\">");
if (results[i].getTitle() != null) {
sb.append(" <y:title>" + results[i].getTitle() + "</y:title>");
} else {
sb.append(" <y:no-title/>");
}
sb.append("</y:result>");
}
sb.append("</y:results>");
}
} catch(org.wyona.yarep.core.search.SearchException e) {
log.error(e, e);
sb.append("<y:exception>" + e.getMessage() + "</y:exception>");
}
} else {
sb.append("<y:no-query/>");
}
sb.append("</y:search>");
return new ByteArrayInputStream(sb.toString().getBytes());
}
private Result[] getLocalResults(String query) throws Exception {
if (query != null && query.length() > 0) {
org.wyona.yarep.core.Node[] nodes = getRealm().getRepository().getSearcher().search(query);
if (nodes != null && nodes.length > 0) {
Result[] results = new Result[nodes.length];
for (int i = 0; i < nodes.length; i++) {
results[i] = new Result(nodes[i].getPath(), null, null, nodes[i].getMimeType());
}
return results;
} else {
log.info("Nothing found for query: " + query);
return new Result[0];
}
}
log.warn("No query specified!");
return new Result[0];
}
private Result[] getGoogleResults(String query) throws Exception {
String className = getResourceConfigProperty("parser");
if (className == null) className = "org.wyona.meguni.parser.impl.GoogleParser";
//if (className == null) className = "org.wyona.meguni.parser.impl.MSNParser";
Parser parser = (Parser) Class.forName(className).newInstance();
ResultSet rs = parser.parse(query);
if (rs != null && rs.size() > 0) {
Result[] results = new Result[rs.size()];
for (int i = 0; i < rs.size(); i++) {
results[i] = new Result(rs.get(i).url.toString(), null, null, null);
}
return results;
} else {
return new Result[0];
}
}
private Result[] getMSNResults(String query) throws Exception {
String className = getResourceConfigProperty("parser");
if (className == null) className = "org.wyona.meguni.parser.impl.MSNParser";
Parser parser = (Parser) Class.forName(className).newInstance();
ResultSet rs = parser.parse(query);
if (rs != null && rs.size() > 0) {
Result[] results = new Result[rs.size()];
for (int i = 0; i < rs.size(); i++) {
results[i] = new Result(rs.get(i).url.toString(), null, null, null);
}
return results;
} else {
return new Result[0];
}
}
/**
* @see org.wyona.yanel.core.api.attributes.ViewableV2#exists()
*/
public boolean exists() throws Exception {
return true;
}
private ExternalSearchProvider getExternalSearchProvider(String providerId) throws Exception {
org.w3c.dom.Document customConfigDoc = getConfiguration().getCustomConfiguration();
if (customConfigDoc != null) {
Configuration config = ConfigurationUtil.toConfiguration(customConfigDoc.getDocumentElement());
Configuration externalSearchProvidersConfig = config.getChild("external-search-providers");
Configuration[] searchProviders = externalSearchProvidersConfig.getChildren("provider");
for (int i = 0; i < searchProviders.length; i++) {
if (searchProviders[i].getAttribute("id").equals(providerId)) {
return new ExternalSearchProvider(providerId, searchProviders[i].getAttribute("url"), null);
}
}
}
return null;
}
}
class ExternalSearchProvider {
private String url;
public ExternalSearchProvider(String id, String url, String label) {
this.url = url;
}
public String getURL() {
return url;
}
}
|
package ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerDownloadException;
import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException;
import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerObjectNotFoundException;
import ca.corefacility.bioinformatics.irida.exceptions.UploadException;
import ca.corefacility.bioinformatics.irida.exceptions.WorkflowException;
import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetException;
import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetNotFoundException;
import ca.corefacility.bioinformatics.irida.exceptions.galaxy.NoGalaxyHistoryException;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.InputFileType;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.DatasetCollectionType;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.GalaxyWorkflowStatus;
import ca.corefacility.bioinformatics.irida.pipeline.upload.ExecutionManagerSearch;
import ca.corefacility.bioinformatics.irida.pipeline.upload.Uploader.DataStorage;
import com.github.jmchilton.blend4j.galaxy.HistoriesClient;
import com.github.jmchilton.blend4j.galaxy.ToolsClient;
import com.github.jmchilton.blend4j.galaxy.ToolsClient.FileUploadRequest;
import com.github.jmchilton.blend4j.galaxy.beans.Dataset;
import com.github.jmchilton.blend4j.galaxy.beans.History;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryContents;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryDataset;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryDataset.Source;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryDetails;
import com.github.jmchilton.blend4j.galaxy.beans.Library;
import com.github.jmchilton.blend4j.galaxy.beans.collection.request.CollectionDescription;
import com.github.jmchilton.blend4j.galaxy.beans.collection.request.CollectionElement;
import com.github.jmchilton.blend4j.galaxy.beans.collection.request.HistoryDatasetElement;
import com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.UniformInterfaceException;
/**
* Class for working with Galaxy Histories.
* @author Aaron Petkau <[email protected]>
*
*/
public class GalaxyHistoriesService implements ExecutionManagerSearch<History, String> {
private static final Logger logger = LoggerFactory
.getLogger(GalaxyHistoriesService.class);
private HistoriesClient historiesClient;
private ToolsClient toolsClient;
private GalaxyLibrariesService librariesService;
private static final String FORWARD_PAIR_NAME = "forward";
private static final String REVERSE_PAIR_NAME = "reverse";
private static final String BASE_NAME = "file";
private static final String COLLECTION_NAME = "collection";
/**
* Builds a new GalaxyHistory object for working with Galaxy Histories.
* @param historiesClient The HistoriesClient for interacting with Galaxy histories.
* @param toolsClient The ToolsClient for interacting with tools in Galaxy.
* @param librariesService A service for dealing with Galaxy libraries.
*/
public GalaxyHistoriesService(HistoriesClient historiesClient,
ToolsClient toolsClient, GalaxyLibrariesService librariesService) {
checkNotNull(historiesClient, "historiesClient is null");
checkNotNull(toolsClient, "toolsClient is null");
checkNotNull(librariesService, "librariesService is null");
this.historiesClient = historiesClient;
this.toolsClient = toolsClient;
this.librariesService = librariesService;
}
/**
* Creates a new History for running a workflow.
* @return A new History for running a workflow.
*/
public History newHistoryForWorkflow() {
History history = new History();
history.setName(UUID.randomUUID().toString());
return historiesClient.create(history);
}
/**
* Given a history id returns the status for the given workflow.
*
* @param historyId
* The history id to use to find a workflow.
* @return The WorkflowStatus for the given workflow.
* @throws ExecutionManagerException
* If there was an exception when attempting to get the status
* for a history.
*/
public GalaxyWorkflowStatus getStatusForHistory(String historyId) throws ExecutionManagerException {
checkNotNull(historyId, "historyId is null");
try {
HistoryDetails details = historiesClient.showHistory(historyId);
logger.trace("Details for history " + details.getId() + ": state=" + details.getState());
return GalaxyWorkflowStatus.builder(details).build();
} catch (ClientHandlerException | UniformInterfaceException e) {
throw new WorkflowException(e);
}
}
/**
* Transfers a dataset from a Galaxy library into a history for a workflow.
* @param libraryFileId The id of a file within a Galaxy library.
* @param history The history to transfer this library dataset into.
* @return A HistoryDetails object describing the details of the created history dataset.
*/
public HistoryDetails libraryDatasetToHistory(String libraryFileId, History history) {
checkNotNull(libraryFileId, "libraryFileId is null");
checkNotNull(history, "history is null");
HistoryDataset historyDataset = new HistoryDataset();
historyDataset.setSource(Source.LIBRARY);
historyDataset.setContent(libraryFileId);
return historiesClient.createHistoryDataset(
history.getId(), historyDataset);
}
/**
* Uploads a file to a given history.
* @param path The path to the file to upload.
* @param fileType The file type of the file to upload.
* @param history The history to upload the file into.
* @return A Dataset object for the uploaded file.
* @throws UploadException If there was an issue uploading the file to Galaxy.
* @throws GalaxyDatasetException If there was an issue finding the corresponding Dataset for the file
* in the history.
*/
public Dataset fileToHistory(Path path, InputFileType fileType, History history) throws UploadException, GalaxyDatasetException {
checkNotNull(path, "path is null");
checkNotNull(fileType, "fileType is null");
checkNotNull(history, "history is null");
checkNotNull(history.getId(), "history id is null");
checkState(path.toFile().exists(), "path " + path + " does not exist");
File file = path.toFile();
FileUploadRequest uploadRequest = new FileUploadRequest(history.getId(), file);
uploadRequest.setFileType(fileType.toString());
ClientResponse clientResponse =
toolsClient.uploadRequest(uploadRequest);
if (clientResponse == null) {
throw new UploadException("Could not upload " + file + " to history " + history.getId() +
" ClientResponse is null");
} else if (!ClientResponse.Status.OK.equals(clientResponse.getClientResponseStatus())) {
String message = "Could not upload " + file + " to history " + history.getId() +
" ClientResponse: " + clientResponse.getClientResponseStatus() + " " +
clientResponse.getEntity(String.class);
logger.error(message);
throw new UploadException(message);
} else {
return getDatasetForFileInHistory(file.getName(), history.getId());
}
}
/**
* Uploads a set of files to a given history through the given library.
*
* @param paths
* The set of paths to upload.
* @param fileType
* The file type of the file to upload.
* @param history
* The history to upload the file into.
* @param library
* The library to initially upload the file into.
* @param dataStorage
* The type of DataStorage strategy to use.
* @return An {@link Map} of paths and ids for each dataset object in this
* history.
* @throws UploadException
* If there was an issue uploading the file to Galaxy.
*/
public Map<Path, String> filesToLibraryToHistory(Set<Path> paths,
InputFileType fileType, History history, Library library,
DataStorage dataStorage) throws UploadException {
checkNotNull(paths, "paths is null");
Map<Path, String> datasetIdsMap = new HashMap<>();
Map<Path, String> datasetLibraryIdsMap = librariesService
.filesToLibraryWait(paths, fileType, library, dataStorage);
if (datasetLibraryIdsMap.size() != paths.size()) {
throw new UploadException(
"Error: datasets uploaded to a Galaxy library are not the same size ("
+ datasetLibraryIdsMap.size()
+ ") as the paths to upload (" + paths.size() + ")");
}
try {
for (Path path : datasetLibraryIdsMap.keySet()) {
String datasetLibraryId = datasetLibraryIdsMap.get(path);
HistoryDetails historyDetails = libraryDatasetToHistory(
datasetLibraryId, history);
logger.debug("Transfered library dataset " + datasetLibraryId
+ " to history " + history.getId() + " dataset id "
+ historyDetails.getId());
datasetIdsMap.put(path, historyDetails.getId());
}
} catch (RuntimeException e) {
throw new UploadException(e);
}
return datasetIdsMap;
}
/**
* Uploads a list of files into the given history.
* @param dataFiles The list of files to upload.
* @param inputFileType The type of files to upload.
* @param workflowHistory The history to upload the files into.String
* @return A list of Datasets describing each uploaded file.
* @throws UploadException If an error occured uploading the file.
* @throws GalaxyDatasetException If there was an issue finding the corresponding dataset for
* the file in the history
*/
public List<Dataset> uploadFilesListToHistory(List<Path> dataFiles,
InputFileType inputFileType, History history) throws UploadException, GalaxyDatasetException {
checkNotNull(dataFiles, "dataFiles is null");
checkNotNull(inputFileType, "inputFileType is null");
checkNotNull(history, "history is null");
List<Dataset> inputDatasets = new LinkedList<Dataset>();
for (Path file : dataFiles) {
Dataset inputDataset = fileToHistory(file, inputFileType, history);
inputDatasets.add(inputDataset);
}
return inputDatasets;
}
/**
* Constructs a collection containing a list of files from the given datasets.
* @param inputDatasetsForward The forward datasets to construct a collection from.
* @param inputDatasetsReverse The reverse datasets to construct a collection from.
* @param history The history to construct the collection within.
* @return A CollectionResponse describing the dataset collection.
* @throws ExecutionManagerException If an exception occured constructing the collection.
*/
public CollectionResponse constructPairedFileCollection(List<Dataset> inputDatasetsForward,
List<Dataset> inputDatasetsReverse, History history) throws ExecutionManagerException {
checkNotNull(inputDatasetsForward, "inputDatasetsForward is null");
checkNotNull(inputDatasetsReverse, "inputDatasetsReverse is null");
checkNotNull(history, "history is null");
checkNotNull(history.getId(), "history does not have an associated id");
checkArgument(inputDatasetsForward.size() == inputDatasetsReverse.size(),
"inputDatasets do not have equal sizes");
CollectionDescription collectionDescription = new CollectionDescription();
collectionDescription.setCollectionType(DatasetCollectionType.LIST_PAIRED.toString());
collectionDescription.setName(COLLECTION_NAME);
for (int i = 0; i < inputDatasetsForward.size(); i++) {
Dataset datasetForward = inputDatasetsForward.get(i);
Dataset datasetReverse = inputDatasetsReverse.get(i);
HistoryDatasetElement elementForward = new HistoryDatasetElement();
elementForward.setId(datasetForward.getId());
elementForward.setName(FORWARD_PAIR_NAME);
HistoryDatasetElement elementReverse = new HistoryDatasetElement();
elementReverse.setId(datasetReverse.getId());
elementReverse.setName(REVERSE_PAIR_NAME);
// Create an object to link together the forward and reverse reads for file2
CollectionElement element = new CollectionElement();
element.setName(BASE_NAME + i);
element.setCollectionType(DatasetCollectionType.PAIRED.toString());
element.addCollectionElement(elementForward);
element.addCollectionElement(elementReverse);
collectionDescription.addDatasetElement(element);
}
try {
return historiesClient.createDatasetCollection(history.getId(), collectionDescription);
} catch (RuntimeException e) {
throw new ExecutionManagerException("Could not construct dataset collection", e);
}
}
/**
* Builds a new Dataset Collection given the description of this collection.
* @param collectionDescription A description of the collection to build.
* @param history The history to build the collection within.
* @return A CollectionResponse describing the constructed collection.
* @throws ExecutionManagerException If there was an issue constructing the collection.
*/
public CollectionResponse constructCollection(CollectionDescription collectionDescription,
History history) throws ExecutionManagerException {
checkNotNull(collectionDescription, "collectionDescription is null");
checkNotNull(history, "history is null");
try {
return historiesClient.createDatasetCollection(history.getId(), collectionDescription);
} catch (RuntimeException e) {
throw new ExecutionManagerException("Could not construct dataset collection", e);
}
}
/**
* Constructs a collection containing a list of datasets within a history.
* @param datasets The datasets to construct a collection around.
* @param history The history to construct the collection within.
* @return A CollectionResponse describing the dataset collection.
* @throws ExecutionManagerException If an exception occured constructing the collection.
*/
public CollectionResponse constructCollectionList(List<Dataset> datasets,
History history) throws ExecutionManagerException {
checkNotNull(datasets, "datasets is null");
checkNotNull(history, "history is null");
checkNotNull(history.getId(), "history does not have an associated id");
CollectionDescription collectionDescription = new CollectionDescription();
collectionDescription.setCollectionType(DatasetCollectionType.LIST.toString());
collectionDescription.setName(COLLECTION_NAME);
for (Dataset dataset : datasets) {
HistoryDatasetElement element = new HistoryDatasetElement();
element.setId(dataset.getId());
element.setName(dataset.getName());
collectionDescription.addDatasetElement(element);
}
return constructCollection(collectionDescription, history);
}
/**
* Gets a Dataset object for a file with the given name in the given history.
* @param filename The name of the file to get a Dataset object for.
* @param historyId The history id to look for the dataset.
* @return The corresponding dataset for the given file name.
* @throws GalaxyDatasetException If there was an issue when searching for a dataset.
*/
public Dataset getDatasetForFileInHistory(String filename, String historyId) throws GalaxyDatasetException {
checkNotNull(filename, "filename is null");
checkNotNull(historyId, "historyId is null");
List<HistoryContents> historyContentsList =
historiesClient.showHistoryContents(historyId);
List<HistoryContents> matchingHistoryContents = historyContentsList.stream().
filter((historyContents) -> filename.equals(historyContents.getName())).collect(Collectors.toList());
// if more than one matching history item
if (matchingHistoryContents.size() > 1) {
String historyIds = "[";
for (HistoryContents content : matchingHistoryContents) {
historyIds += content.getId() + ",";
}
historyIds += "]";
throw new GalaxyDatasetException("Found " + matchingHistoryContents.size() + " datasets for file "
+ filename + ": " + historyIds);
} else if (matchingHistoryContents.size() == 1) {
String dataId = matchingHistoryContents.get(0).getId();
if (dataId != null) {
Dataset dataset = historiesClient.showDataset(historyId, dataId);
if (dataset != null) {
return dataset;
}
}
}
throw new GalaxyDatasetNotFoundException("dataset for file " + filename +
" not found in Galaxy history " + historyId);
}
/**
* {@inheritDoc}
*/
@Override
public History findById(String id)
throws ExecutionManagerObjectNotFoundException {
checkNotNull(id, "id is null");
List<History> galaxyHistories = historiesClient.getHistories();
if (galaxyHistories != null) {
Optional<History> h = galaxyHistories.stream().
filter((history) -> id.equals(history.getId())).findFirst();
if (h.isPresent()) {
return h.get();
}
}
throw new NoGalaxyHistoryException("No history for id " + id);
}
/**
* {@inheritDoc}
*/
@Override
public boolean exists(String id) {
try {
return findById(id) != null;
} catch (ExecutionManagerObjectNotFoundException e) {
return false;
}
}
/**
* Given a particular dataset id within a Galaxy history download this
* dataset to the local filesystem.
*
* @param historyId
* The id of the history containing the dataset.
* @param datasetId
* The id of the dataset to download.
* @param destination
* The destination to download a file to (will overwrite any
* exisiting content).
* @throws IOException
* If there was an error downloading the file.
* @throws ExecutionManagerDownloadException
* If there was an issue downloading the dataset.
*/
public void downloadDatasetTo(String historyId, String datasetId,
Path destination) throws IOException, ExecutionManagerDownloadException {
checkNotNull(historyId, "historyId is null");
checkNotNull(datasetId, "datasetId is null");
checkNotNull(destination, "destination is null");
try {
historiesClient.downloadDataset(historyId, datasetId,
destination.toFile());
} catch (RuntimeException e) {
throw new ExecutionManagerDownloadException(
"Could not download dataset identified by historyId="
+ historyId + ", datasetId=" + datasetId
+ " to destination=" + destination, e);
}
}
/**
* Show the history contents for the specified history identifier
*
* @param historyId the identifier to show the history contents for.
*
* @return the history contents for the specified identifier.
* @throws ExecutionManagerException on failure to communicate with Galaxy.
*/
public List<HistoryContents> showHistoryContents(final String historyId) throws ExecutionManagerException {
try {
return historiesClient.showHistoryContents(historyId);
} catch (RuntimeException e) {
throw new ExecutionManagerException("Couldn't load history contents for id [" + historyId + "]", e);
}
}
/**
* Show the history provenance contents for the specified history identifiers.
*
* @param historyId the identifier to show the history contents for.
* @param historyProvenanceId the step in the execution to show provenance for.
*
* @return the history provenance contents for the specified identifiers.
* @throws ExecutionManagerException on failure to communicate with Galaxy.
*/
public HistoryContentsProvenance showProvenance(final String historyId, final String historyProvenanceId) throws ExecutionManagerException {
try {
return historiesClient.showProvenance(historyId, historyProvenanceId);
} catch (RuntimeException e) {
throw new ExecutionManagerException(e);
}
}
}
|
package galaxyspace.systems.SolarSystem.planets.overworld.tile;
import java.util.HashSet;
import java.util.List;
import javax.annotation.Nullable;
import galaxyspace.api.tile.ITileEffects;
import galaxyspace.core.registers.items.GSItems;
import micdoodle8.mods.galacticraft.api.entity.IAntiGrav;
import micdoodle8.mods.galacticraft.api.item.IArmorGravity;
import micdoodle8.mods.galacticraft.api.transmission.NetworkType;
import micdoodle8.mods.galacticraft.api.vector.BlockVec3Dim;
import micdoodle8.mods.galacticraft.api.world.IGalacticraftWorldProvider;
import micdoodle8.mods.galacticraft.core.dimension.WorldProviderSpaceStation;
import micdoodle8.mods.galacticraft.core.energy.tile.EnergyStorageTile;
import micdoodle8.mods.galacticraft.core.energy.tile.TileBaseElectricBlockWithInventory;
import micdoodle8.mods.galacticraft.core.util.ConfigManagerCore;
import micdoodle8.mods.miccore.Annotations.NetworkedField;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ItemStackHelper;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.play.server.SPacketUpdateTileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.NonNullList;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
public class TileEntityGravitationModule extends TileBaseElectricBlockWithInventory implements ITileEffects {
public static int PROCESS_TIME_REQUIRED = 130;
@NetworkedField(targetSide = Side.CLIENT)
public int processTimeRequired = 1;
@NetworkedField(targetSide = Side.CLIENT)
public static int processTicks = 0;
public static HashSet<BlockVec3Dim> loadedTiles = new HashSet();
private int radius;
private AxisAlignedBB aabb;
private boolean initialised = false;
public static boolean check = false;
public boolean shouldRenderEffects = false;
public TileEntityGravitationModule() {
super("tile.gravitation_module.name");
this.initialised = true;
this.radius = 4;
this.storage.setCapacity(15000);
this.storage.setMaxExtract(ConfigManagerCore.hardMode ? 60 : 45);
this.inventory = NonNullList.withSize(1 + 4, ItemStack.EMPTY);
}
@Override
public void validate()
{
super.validate();
if (!this.world.isRemote) this.loadedTiles.add(new BlockVec3Dim(this));
}
@Override
public void onLoad() {
if (!this.world.isRemote)
this.loadedTiles.add(new BlockVec3Dim(this));
}
@Override
public void onChunkUnload() {
if (!this.world.isRemote)
this.loadedTiles.remove(new BlockVec3Dim(this));
super.onChunkUnload();
}
@Override
public void invalidate() {
if (!this.world.isRemote) {
this.loadedTiles.remove(new BlockVec3Dim(this));
}
super.invalidate();
}
@Override
@Nullable
public SPacketUpdateTileEntity getUpdatePacket()
{
NBTTagCompound nbttagcompound = new NBTTagCompound();
writeToNBT(nbttagcompound);
return new SPacketUpdateTileEntity(pos, 1, nbttagcompound);
}
@Override
public void onDataPacket(NetworkManager net, SPacketUpdateTileEntity packet) {
super.onDataPacket(net, packet);
readFromNBT(packet.getNbtCompound());
}
@Override
public void update() {
super.update();
//Freezing
if (this.world.rand.nextInt(4) == 0)
this.world.notifyLightSet(this.getPos());
if (this.canProcess() && !this.disabled) {
if (this.hasEnoughEnergyToRun) {
int energy_boost = 0;
for(int i = 0; i <= 3; i++)
{
if(this.getInventory().get(1 + i).isItemEqual(new ItemStack(GSItems.UPGRADES, 1, 3)))
energy_boost++;
}
this.storage.setMaxExtract(ConfigManagerCore.hardMode ? 90 + 60 - (20 * energy_boost) : 75 + 55 - (15 * energy_boost));
if (this.processTicks == 0) {
this.processTicks = this.processTimeRequired;
} else {
if (--this.processTicks <= 0) {
this.smeltItem();
this.processTicks = this.processTimeRequired;
// this.world.setBlockMetadataWithNotify(xCoord, yCoord, zCoord, 5, 2);
}
}
} else if (this.processTicks > 0 && this.processTicks < this.processTimeRequired) {
// Apply a "cooling down" process if the electric furnace runs out of energy
// while smelting
if (this.world.rand.nextInt(4) == 0) {
this.processTicks++;
}
check = false;
}
/*
* else { if(this.getBlockMetadata() > 4)
* this.world.setBlockMetadataWithNotify(xCoord, yCoord, zCoord, 0, 2); }
*/
// BlockGravitationModule.updateFurnaceBlockState(this.hasEnoughEnergyToRun,
// this.worldObj, this.xCoord, this.yCoord, this.zCoord);
} else {
// if(this.getBlockMetadata() > 4) this.world.setBlockMetadataWithNotify(xCoord,
// yCoord, zCoord, 0, 2);
this.processTicks = 0;
}
}
public static boolean canProcess()
{
return true;
}
public void smeltItem() {
aabb = new AxisAlignedBB(this.getPos().getX() - getGravityRadius(), this.getPos().getY() - 4,
this.getPos().getZ() - getGravityRadius(),
this.getPos().getX() + getGravityRadius(), this.getPos().getY() + 16,
this.getPos().getZ() + getGravityRadius());
if (this.world.provider instanceof IGalacticraftWorldProvider) {
final double g;
if (this.world.provider instanceof WorldProviderSpaceStation)
g = 0.80665D;
else
g = (1.0 - ((IGalacticraftWorldProvider) world.provider).getGravity()) / 0.08F;
//if(getGravityRadius() > 14) g /= 2;
final List list = world.getEntitiesWithinAABB(Entity.class, aabb);
if(!world.isRemote) {
for (Object e : list) {
if(e instanceof IAntiGrav) continue;
//Iterator iterator = list.iterator();
Entity entity = (Entity) e;
/*if(!(entity instanceof EntityPlayer))
{
entity.addVelocity(0.0D, g / 1000, 0.0D);
}*/
entity.fallDistance -= g * 10.0F;
if(e instanceof EntityLivingBase && !(e instanceof EntityPlayer))
((EntityLivingBase)e).motionY -= (g / 200);
if(entity.fallDistance < 0) {
entity.fallDistance = 0.0F;
}
}
}
else
{
for(Object e: list) {
if(e instanceof EntityLivingBase) {
EntityLivingBase living = (EntityLivingBase)e;
if(e instanceof EntityPlayer) {
EntityPlayer p = (EntityPlayer)living;
if (p.capabilities.isFlying)
continue;
if (!p.inventory.armorItemInSlot(0).isEmpty()
&& p.inventory.armorItemInSlot(0).getItem() instanceof IArmorGravity
&& ((IArmorGravity) p.inventory.armorItemInSlot(0).getItem())
.gravityOverrideIfLow(p) > 0)
continue;
}
living.motionY -= (g / 200);
}
}
}
check = true;
if (this.shouldRenderEffects && world.isRemote) {
for (int yy = -4; yy < 16; yy++) {
for (int ix = -getGravityRadius(); ix <= getGravityRadius() + 1; ix++) {
if(ix == -getGravityRadius() || ix == getGravityRadius() + 1 || yy == 15 || yy == -4) {
world.spawnParticle(EnumParticleTypes.CRIT_MAGIC, this.getPos().getX() + ix + this.world.rand.nextFloat() - 0.5F,
this.getPos().getY() + yy + this.world.rand.nextFloat() - 0.5F,
this.getPos().getZ() - getGravityRadius() + this.world.rand.nextFloat() - 0.5F, 0.0D, 0.0D,
0.0D);
world.spawnParticle(EnumParticleTypes.CRIT_MAGIC, this.getPos().getX() + ix + this.world.rand.nextFloat() - 0.5F,
this.getPos().getY() + yy + this.world.rand.nextFloat() - 0.5F,
this.getPos().getZ() + getGravityRadius() + 1 + this.world.rand.nextFloat() - 0.5F, 0.0D, 0.0D,
0.0D);
}
}
for (int iz = -getGravityRadius(); iz <= getGravityRadius() + 1; iz++) {
if(iz == -getGravityRadius() || iz == getGravityRadius() + 1 || yy == 15 || yy == -4) {
world.spawnParticle(EnumParticleTypes.CRIT_MAGIC,
this.getPos().getX() - getGravityRadius() + this.world.rand.nextFloat() - 0.5F,
this.getPos().getY() + yy + this.world.rand.nextFloat() - 0.5F,
this.getPos().getZ() + iz + this.world.rand.nextFloat() - 0.5F, 0.0D, 0.0D, 0.0D);
world.spawnParticle(EnumParticleTypes.CRIT_MAGIC,
this.getPos().getX() + getGravityRadius() + 1 + this.world.rand.nextFloat() - 0.5F,
this.getPos().getY() + yy + this.world.rand.nextFloat() - 0.5F,
this.getPos().getZ() + iz + this.world.rand.nextFloat() - 0.5F, 0.0D, 0.0D, 0.0D);
}
}
}
}
}
}
@Override
public void readFromNBT(NBTTagCompound par1NBTTagCompound)
{
super.readFromNBT(par1NBTTagCompound);
if (this.storage.getEnergyStoredGC() > EnergyStorageTile.STANDARD_CAPACITY)
{
this.initialised = true;
}
else
this.initialised = false;
this.processTicks = par1NBTTagCompound.getInteger("smeltingTicks");
ItemStackHelper.loadAllItems(par1NBTTagCompound, this.getInventory());
if(par1NBTTagCompound.hasKey("gravityradius")) {
int grav = par1NBTTagCompound.getInteger("gravityradius");
this.setGravityRadius(grav == 0 ? 1 : grav);
}
}
@Override
public NBTTagCompound writeToNBT(NBTTagCompound par1NBTTagCompound)
{
if (this.tierGC == 1 && this.storage.getEnergyStoredGC() > EnergyStorageTile.STANDARD_CAPACITY)
this.storage.setEnergyStored(EnergyStorageTile.STANDARD_CAPACITY);
super.writeToNBT(par1NBTTagCompound);
par1NBTTagCompound.setInteger("smeltingTicks", this.processTicks);
par1NBTTagCompound.setInteger("gravityradius", radius > 16 ? 16 : radius);
ItemStackHelper.saveAllItems(par1NBTTagCompound, this.getInventory());
return par1NBTTagCompound;
}
@Override
public boolean isItemValidForSlot(int slotID, ItemStack itemStack)
{
return true;
}
@Override
public boolean shouldUseEnergy()
{
return this.canProcess();
}
@Override
public void setEffectsVisible(boolean shouldRender) {
this.shouldRenderEffects = shouldRender;
}
@Override
public boolean getEffectsVisible() {
return this.shouldRenderEffects;
}
public void setGravityRadius(int radius)
{
this.radius = radius;
}
public int getGravityRadius()
{
return this.radius;
}
@Override
public EnumFacing getFront() {
return EnumFacing.DOWN;
}
@Override
public EnumFacing getElectricInputDirection() {
return EnumFacing.DOWN;
}
@Override
public boolean canConnect(EnumFacing direction, NetworkType type) {
if (direction == null) {
return false;
}
if (type == NetworkType.POWER) {
return direction == this.getElectricInputDirection();
}
return false;
}
public boolean inGravityZone(World world, EntityPlayer player)
{
if(player.posX > this.pos.getX() - getGravityRadius() &&
player.posY > this.pos.getY() - 4 &&
player.posZ > this.pos.getZ() - getGravityRadius() &&
player.posX < this.pos.getX() + getGravityRadius() &&
player.posY < this.pos.getY() + 16 &&
player.posZ < this.pos.getZ() + getGravityRadius())
return true;
return false;
}
@Override
public int[] getSlotsForFace(EnumFacing side) {
return null;
}
}
|
package io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common;
import io.github.opencubicchunks.cubicchunks.api.world.IColumn;
import io.github.opencubicchunks.cubicchunks.api.world.ICube;
import io.github.opencubicchunks.cubicchunks.api.world.IHeightMap;
import io.github.opencubicchunks.cubicchunks.core.asm.mixin.ICubicWorldInternal;
import io.github.opencubicchunks.cubicchunks.core.world.column.CubeMap;
import io.github.opencubicchunks.cubicchunks.core.world.cube.Cube;
import mcp.MethodsReturnNonnullByDefault;
import net.minecraft.world.World;
import net.minecraft.world.chunk.Chunk;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Implements;
import org.spongepowered.asm.mixin.Interface;
import org.spongepowered.asm.mixin.Intrinsic;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import javax.annotation.ParametersAreNonnullByDefault;
import java.util.Collection;
/**
* Implements the IColumn interface
*/
@ParametersAreNonnullByDefault
@MethodsReturnNonnullByDefault
@Mixin(value = Chunk.class, priority = 2000)
@Implements(@Interface(iface = IColumn.class, prefix = "chunk$"))
public abstract class MixinChunk_Column implements IColumn {
/*
* WARNING: WHEN YOU RENAME ANY OF THESE 3 FIELDS RENAME CORRESPONDING
* FIELDS IN "cubicchunks.asm.mixin.core.common.MixinChunk_Cubes" and
* "cubicchunks.asm.mixin.core.client.MixinChunk_Cubes".
*/
private CubeMap cubeMap;
private IHeightMap opacityIndex;
private Cube cachedCube;
@Shadow @Final public int z;
@Shadow @Final public int x;
@Shadow @Final private World world;
@Shadow public boolean unloadQueued;
@Shadow @Final private int[] heightMap;
@Override public Cube getLoadedCube(int cubeY) {
if (cachedCube != null && cachedCube.getY() == cubeY) {
return cachedCube;
}
return getWorld().getCubeCache().getLoadedCube(x, cubeY, z);
}
@Override public Cube getCube(int cubeY) {
if (cachedCube != null && cachedCube.getY() == cubeY) {
return cachedCube;
}
return getWorld().getCubeCache().getCube(x, cubeY, z);
}
@Override public void addCube(ICube cube) {
this.cubeMap.put((Cube) cube);
}
@Override public Cube removeCube(int cubeY) {
if (cachedCube != null && cachedCube.getY() == cubeY) {
invalidateCachedCube();
}
return this.cubeMap.remove(cubeY);
}
private void invalidateCachedCube() {
cachedCube = null;
}
@Override public boolean hasLoadedCubes() {
return !cubeMap.isEmpty();
}
public <T extends World & ICubicWorldInternal> T getWorld() {
return (T) this.world;
}
@Override public boolean shouldTick() {
for (Cube cube : cubeMap) {
if (cube.getTickets().shouldTick()) {
return true;
}
}
return false;
}
@Override public IHeightMap getOpacityIndex() {
return this.opacityIndex;
}
@Override public Collection getLoadedCubes() {
return this.cubeMap.all();
}
@Override public Iterable getLoadedCubes(int startY, int endY) {
return this.cubeMap.cubes(startY, endY);
}
@Override public void preCacheCube(ICube cube) {
this.cachedCube = (Cube) cube;
}
@Override public int getX() {
return x;
}
@Override public int getZ() {
return z;
}
@Override
public int getHeightValue(int localX, int blockY, int localZ) {
return this.heightMap[localZ << 4 | localX];
}
@Intrinsic
@Override
public int getHeightValue(int localX, int localZ) {
return this.heightMap[localZ << 4 | localX];
}
}
|
package org.spongepowered.common.mixin.exploit;
import net.minecraft.entity.Entity;
import net.minecraft.nbt.NBTBase;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.storage.AnvilChunkLoader;
import org.apache.logging.log4j.Level;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.ModifyArg;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.Slice;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.common.SpongeImpl;
import java.util.ArrayList;
import java.util.List;
@Mixin(AnvilChunkLoader.class)
public class MixinAnvilChunkLoader_FilterInvalidEntities {
private List<Entity> toUpdate = new ArrayList<>();
@Inject(method = "writeChunkToNBT", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/WorldProvider;hasSkyLight()Z", shift = At.Shift.AFTER))
private void onWriteChunk(Chunk chunkIn, World worldIn, NBTTagCompound compound, CallbackInfo ci) {
this.toUpdate.clear();
}
@Redirect(method = "writeChunkToNBT", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/Entity;writeToNBTOptional(Lnet/minecraft/nbt/NBTTagCompound;)Z"))
private boolean onEntityWriteCheckForValidPosition(Entity entity, NBTTagCompound compound, Chunk chunkIn, World worldIn, NBTTagCompound chunkCompound) {
if ((int) Math.floor(entity.posX) >> 4 != chunkIn.x || (int) Math.floor(entity.posZ) >> 4 != chunkIn.z) {
SpongeImpl.getLogger().log(Level.WARN, "{} is not in this chunk, skipping save. This is a bug fix to a vanilla bug. Do not report this to Sponge or Forge please.", entity);
this.toUpdate.add(entity);
// Instead of telling the entity to write to the optional, we just return false.
// then the compound is not added to the list, nor is the compound actually used
// and we artificially continue.
return false;
}
if (entity.isDead) {
return false;
}
return entity.writeToNBTOptional(compound);
}
@Redirect(
method = "writeChunkToNBT",
at = @At(
value = "INVOKE",
target = "Lnet/minecraft/nbt/NBTTagCompound;setTag(Ljava/lang/String;Lnet/minecraft/nbt/NBTBase;)V"
),
slice = @Slice(
from = @At(
value = "CONSTANT",
args = "stringValue=Entities"
),
to = @At(
value = "INVOKE",
target = "Lnet/minecraft/world/chunk/Chunk;getTileEntityMap()Ljava/util/Map;"
)
)
)
private void onWriteEntities(NBTTagCompound nbtTagCompound, String key, NBTBase value, Chunk chunk, World world, NBTTagCompound chunkCompound) {
if (!"Entities".equals(key)) {
nbtTagCompound.setTag(key, value);
return;
}
if (!this.toUpdate.isEmpty()) {
for (Entity entity : this.toUpdate) {
world.updateEntityWithOptionalForce(entity, false);
}
this.toUpdate.clear();
}
nbtTagCompound.setTag(key, value);
}
}
|
package org.sagebionetworks.web.unitclient.presenter;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.sagebionetworks.web.client.place.ACTDataAccessSubmissionsPlace.ACCESS_REQUIREMENT_ID_PARAM;
import static org.sagebionetworks.web.client.place.ACTDataAccessSubmissionsPlace.MAX_DATE_PARAM;
import static org.sagebionetworks.web.client.place.ACTDataAccessSubmissionsPlace.MIN_DATE_PARAM;
import static org.sagebionetworks.web.client.presenter.ACTDataAccessSubmissionsPresenter.SHOW_AR_TEXT;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.sagebionetworks.repo.model.ManagedACTAccessRequirement;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.dataaccess.Submission;
import org.sagebionetworks.repo.model.dataaccess.SubmissionOrder;
import org.sagebionetworks.repo.model.dataaccess.SubmissionPage;
import org.sagebionetworks.repo.model.dataaccess.SubmissionState;
import org.sagebionetworks.repo.model.file.FileHandleAssociateType;
import org.sagebionetworks.repo.model.file.FileHandleAssociation;
import org.sagebionetworks.web.client.DataAccessClientAsync;
import org.sagebionetworks.web.client.GWTWrapper;
import org.sagebionetworks.web.client.GlobalApplicationState;
import org.sagebionetworks.web.client.PortalGinInjector;
import org.sagebionetworks.web.client.place.ACTDataAccessSubmissionsPlace;
import org.sagebionetworks.web.client.presenter.ACTDataAccessSubmissionsPresenter;
import org.sagebionetworks.web.client.utils.Callback;
import org.sagebionetworks.web.client.view.ACTDataAccessSubmissionsView;
import org.sagebionetworks.web.client.widget.Button;
import org.sagebionetworks.web.client.widget.FileHandleWidget;
import org.sagebionetworks.web.client.widget.LoadMoreWidgetContainer;
import org.sagebionetworks.web.client.widget.accessrequirements.ManagedACTAccessRequirementWidget;
import org.sagebionetworks.web.client.widget.accessrequirements.SubjectsWidget;
import org.sagebionetworks.web.client.widget.accessrequirements.submission.ACTDataAccessSubmissionWidget;
import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert;
import org.sagebionetworks.web.test.helper.AsyncMockStubber;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.i18n.client.DateTimeFormat;
import com.google.gwt.i18n.client.DateTimeFormat.PredefinedFormat;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.IsWidget;
public class ACTDataAccessSubmissionsPresenterTest {
ACTDataAccessSubmissionsPresenter presenter;
@Mock
ACTDataAccessSubmissionsPlace mockPlace;
@Mock
ACTDataAccessSubmissionsView mockView;
@Mock
SynapseAlert mockSynAlert;
@Mock
PortalGinInjector mockGinInjector;
@Mock
GlobalApplicationState mockGlobalApplicationState;
@Mock
LoadMoreWidgetContainer mockLoadMoreContainer;
@Mock
ManagedACTAccessRequirementWidget mockACTAccessRequirementWidget;
@Mock
Button mockButton;
@Mock
FileHandleWidget mockDucTemplateFileHandleWidget;
@Mock
DataAccessClientAsync mockDataAccessClient;
@Mock
ManagedACTAccessRequirement mockACTAccessRequirement;
@Mock
SubmissionPage mockDataAccessSubmissionPage;
@Mock
Submission mockDataAccessSubmission;
@Captor
ArgumentCaptor<FileHandleAssociation> fhaCaptor;
@Mock
ACTDataAccessSubmissionWidget mockACTDataAccessSubmissionWidget;
@Mock
SubjectsWidget mockSubjectsWidget;
@Mock
List<RestrictableObjectDescriptor> mockSubjects;
@Mock
GWTWrapper mockGWT;
@Mock
DateTimeFormat mockDateTimeFormat;
public static final String FILE_HANDLE_ID = "9999";
public static final Long AR_ID = 76555L;
public static final String NEXT_PAGE_TOKEN = "abc678";
@Before
public void setup(){
MockitoAnnotations.initMocks(this);
when(mockGWT.getDateTimeFormat(any(PredefinedFormat.class))).thenReturn(mockDateTimeFormat);
presenter = new ACTDataAccessSubmissionsPresenter(mockView, mockSynAlert, mockGinInjector, mockGlobalApplicationState, mockLoadMoreContainer, mockACTAccessRequirementWidget, mockButton, mockDucTemplateFileHandleWidget, mockDataAccessClient, mockSubjectsWidget, mockGWT);
AsyncMockStubber.callSuccessWith(mockACTAccessRequirement).when(mockDataAccessClient).getAccessRequirement(anyLong(), any(AsyncCallback.class));
AsyncMockStubber.callSuccessWith(mockDataAccessSubmissionPage).when(mockDataAccessClient).getDataAccessSubmissions(anyLong(), anyString(), any(SubmissionState.class), any(SubmissionOrder.class), anyBoolean(), any(AsyncCallback.class));
when(mockDataAccessSubmissionPage.getResults()).thenReturn(Collections.singletonList(mockDataAccessSubmission));
when(mockDataAccessSubmissionPage.getNextPageToken()).thenReturn(NEXT_PAGE_TOKEN);
when(mockACTAccessRequirement.getDucTemplateFileHandleId()).thenReturn(FILE_HANDLE_ID);
when(mockACTAccessRequirement.getId()).thenReturn(AR_ID);
when(mockGinInjector.getACTDataAccessSubmissionWidget()).thenReturn(mockACTDataAccessSubmissionWidget);
when(mockACTAccessRequirement.getSubjectIds()).thenReturn(mockSubjects);
}
@Test
public void testConstruction() {
verify(mockView).setStates(anyList());
verify(mockButton).setText(SHOW_AR_TEXT);
verify(mockView).setAccessRequirementUIVisible(false);
verify(mockView).setSynAlert(any(IsWidget.class));
verify(mockView).setAccessRequirementWidget(any(IsWidget.class));
verify(mockView).setLoadMoreContainer(any(IsWidget.class));
verify(mockView).setShowHideButton(any(IsWidget.class));
verify(mockView).setPresenter(presenter);
verify(mockLoadMoreContainer).configure(any(Callback.class));
verify(mockButton).addClickHandler(any(ClickHandler.class));
}
@Test
public void testLoadData() {
String time1 = "8765";
String time2 = "5678";
when(mockPlace.getParam(MIN_DATE_PARAM)).thenReturn(time1);
when(mockPlace.getParam(MAX_DATE_PARAM)).thenReturn(time2);
when(mockPlace.getParam(ACCESS_REQUIREMENT_ID_PARAM)).thenReturn(AR_ID.toString());
when(mockACTAccessRequirement.getDucTemplateFileHandleId()).thenReturn(FILE_HANDLE_ID);
when(mockACTAccessRequirement.getAreOtherAttachmentsRequired()).thenReturn(true);
Long expirationPeriod = 0L;
when(mockACTAccessRequirement.getExpirationPeriod()).thenReturn(expirationPeriod);
when(mockACTAccessRequirement.getIsCertifiedUserRequired()).thenReturn(true);
when(mockACTAccessRequirement.getIsDUCRequired()).thenReturn(false);
when(mockACTAccessRequirement.getIsIDUPublic()).thenReturn(true);
when(mockACTAccessRequirement.getIsIRBApprovalRequired()).thenReturn(false);
when(mockACTAccessRequirement.getIsValidatedProfileRequired()).thenReturn(true);
presenter.setPlace(mockPlace);
verify(mockDataAccessClient).getAccessRequirement(eq(AR_ID), any(AsyncCallback.class));
//verify duc template file handle widget is configured properly (basd on act duc file handle id)
verify(mockDucTemplateFileHandleWidget).configure(fhaCaptor.capture());
FileHandleAssociation fha = fhaCaptor.getValue();
assertEquals(FileHandleAssociateType.AccessRequirementAttachment, fha.getAssociateObjectType());
assertEquals(AR_ID.toString(), fha.getAssociateObjectId());
assertEquals(FILE_HANDLE_ID, fha.getFileHandleId());
verify(mockSubjectsWidget).configure(mockSubjects, true);
verify(mockView).setAreOtherAttachmentsRequired(true);
verify(mockView).setExpirationPeriod(expirationPeriod);
verify(mockView).setIsCertifiedUserRequired(true);
verify(mockView).setIsDUCRequired(false);
verify(mockView).setIsIDUPublic(true);
verify(mockView).setIsIRBApprovalRequired(false);
verify(mockView).setIsValidatedProfileRequired(true);
verify(mockACTAccessRequirementWidget).setRequirement(mockACTAccessRequirement);
verify(mockDataAccessClient).getDataAccessSubmissions(anyLong(), eq((String)null), any(SubmissionState.class), any(SubmissionOrder.class), anyBoolean(), any(AsyncCallback.class));
//verify DataAccessSubmission widget is created/configured for the submission (based on the mockACTAccessRequirement configuration)
verify(mockGinInjector).getACTDataAccessSubmissionWidget();
verify(mockACTDataAccessSubmissionWidget).setDucColumnVisible(false);
verify(mockACTDataAccessSubmissionWidget).setIrbColumnVisible(false);
verify(mockACTDataAccessSubmissionWidget).setOtherAttachmentsColumnVisible(true);
verify(mockLoadMoreContainer).setIsMore(true);
//verify final load of empty page
when(mockDataAccessSubmissionPage.getResults()).thenReturn(Collections.EMPTY_LIST);
when(mockDataAccessSubmissionPage.getNextPageToken()).thenReturn(null);
presenter.loadMore();
verify(mockDataAccessClient).getDataAccessSubmissions(anyLong(), eq(NEXT_PAGE_TOKEN), any(SubmissionState.class), any(SubmissionOrder.class), anyBoolean(), any(AsyncCallback.class));
verify(mockLoadMoreContainer).setIsMore(false);
verify(mockView).setProjectedExpirationDateVisible(false);
verify(mockView, never()).setProjectedExpirationDateVisible(true);
}
@Test
public void testProjectedExpiration() {
String formattedDateTime = "In the future";
when(mockDateTimeFormat.format(any(Date.class))).thenReturn(formattedDateTime);
when(mockPlace.getParam(ACCESS_REQUIREMENT_ID_PARAM)).thenReturn(AR_ID.toString());
Long expirationPeriod = 1111L;
when(mockACTAccessRequirement.getExpirationPeriod()).thenReturn(expirationPeriod);
presenter.setPlace(mockPlace);
verify(mockView).setProjectedExpirationDateVisible(false);
verify(mockView).setProjectedExpirationDateVisible(true);
verify(mockView).setProjectedExpirationDate(formattedDateTime);
}
@Test
public void testLoadDataFailure() {
Exception ex = new Exception();
AsyncMockStubber.callFailureWith(ex).when(mockDataAccessClient).getDataAccessSubmissions(anyLong(), anyString(), any(SubmissionState.class), any(SubmissionOrder.class), anyBoolean(), any(AsyncCallback.class));
presenter.loadData();
verify(mockSynAlert).handleException(ex);
verify(mockLoadMoreContainer).setIsMore(false);
}
}
|
package stroom.annotation.api;
import stroom.datasource.api.v2.DataSourceField;
import stroom.datasource.api.v2.DataSourceField.DataSourceFieldType;
import stroom.index.shared.IndexConstants;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public interface AnnotationDataSource {
String ANNOTATION_PERMISSION = "Annotation";
String CURRENT_USER_FUNCTION = "currentUser()";
String NAMESPACE = "annotation";
String ANNOTATION_FIELD_PREFIX = NAMESPACE + ":";
String ID = ANNOTATION_FIELD_PREFIX + "Id";
String CREATED_ON = ANNOTATION_FIELD_PREFIX + "CreatedOn";
String CREATED_BY = ANNOTATION_FIELD_PREFIX + "CreatedBy";
String UPDATED_ON = ANNOTATION_FIELD_PREFIX + "UpdatedOn";
String UPDATED_BY = ANNOTATION_FIELD_PREFIX + "UpdatedBy";
String TITLE = ANNOTATION_FIELD_PREFIX + "Title";
String SUBJECT = ANNOTATION_FIELD_PREFIX + "Subject";
String STATUS = ANNOTATION_FIELD_PREFIX + "Status";
String ASSIGNED_TO = ANNOTATION_FIELD_PREFIX + "AssignedTo";
String COMMENT = ANNOTATION_FIELD_PREFIX + "Comment";
String HISTORY = ANNOTATION_FIELD_PREFIX + "History";
DataSourceField ID_FIELD = new DataSourceField.Builder().name(ID).type(DataSourceFieldType.ID_FIELD).queryable(true).build();
DataSourceField STREAM_ID_FIELD = new DataSourceField.Builder().name(IndexConstants.STREAM_ID).type(DataSourceFieldType.ID_FIELD).queryable(true).build();
DataSourceField EVENT_ID_FIELD = new DataSourceField.Builder().name(IndexConstants.EVENT_ID).type(DataSourceFieldType.ID_FIELD).queryable(true).build();
DataSourceField CREATED_ON_FIELD = new DataSourceField.Builder().name(CREATED_ON).type(DataSourceFieldType.DATE_FIELD).queryable(true).build();
DataSourceField CREATED_BY_FIELD = new DataSourceField.Builder().name(CREATED_BY).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField UPDATED_ON_FIELD = new DataSourceField.Builder().name(UPDATED_ON).type(DataSourceFieldType.DATE_FIELD).queryable(true).build();
DataSourceField UPDATED_BY_FIELD = new DataSourceField.Builder().name(UPDATED_BY).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField TITLE_FIELD = new DataSourceField.Builder().name(TITLE).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField SUBJECT_FIELD = new DataSourceField.Builder().name(SUBJECT).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField STATUS_FIELD = new DataSourceField.Builder().name(STATUS).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField ASSIGNED_TO_FIELD = new DataSourceField.Builder().name(ASSIGNED_TO).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField COMMENT_FIELD = new DataSourceField.Builder().name(COMMENT).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
DataSourceField HISTORY_FIELD = new DataSourceField.Builder().name(HISTORY).type(DataSourceFieldType.TEXT_FIELD).queryable(true).build();
List<DataSourceField> FIELDS = Arrays.asList(
ID_FIELD,
STREAM_ID_FIELD,
EVENT_ID_FIELD,
CREATED_ON_FIELD,
CREATED_BY_FIELD,
UPDATED_ON_FIELD,
UPDATED_BY_FIELD,
TITLE_FIELD,
SUBJECT_FIELD,
STATUS_FIELD,
ASSIGNED_TO_FIELD,
COMMENT_FIELD,
HISTORY_FIELD);
Map<String, DataSourceField> FIELD_MAP = FIELDS.stream().collect(Collectors.toMap(DataSourceField::getName, Function.identity()));
}
|
package stroom.config.global.impl;
import stroom.config.app.AppConfig;
import stroom.config.app.ConfigLocation;
import stroom.config.app.YamlUtil;
import stroom.config.global.impl.validation.ConfigValidator;
import stroom.util.HasHealthCheck;
import stroom.util.config.FieldMapper;
import stroom.util.logging.LogUtil;
import stroom.util.shared.AbstractConfig;
import com.codahale.metrics.health.HealthCheck;
import io.dropwizard.lifecycle.Managed;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static java.nio.file.StandardWatchEventKinds.OVERFLOW;
@Singleton
public class AppConfigMonitor implements Managed, HasHealthCheck {
private static final Logger LOGGER = LoggerFactory.getLogger(AppConfigMonitor.class);
private final AppConfig appConfig;
private final ConfigMapper configMapper;
private final ConfigValidator configValidator;
private final GlobalConfigService globalConfigService;
private final Path configFile;
private final Path dirToWatch;
private final ExecutorService executorService;
private WatchService watchService = null;
private Future<?> watcherFuture = null;
private AtomicBoolean isRunning = new AtomicBoolean(false);
private final boolean isValidFile;
private final AtomicBoolean isFileReadScheduled = new AtomicBoolean(false);
private final List<String> errors = new ArrayList<>();
private static final long DELAY_BEFORE_FILE_READ_MS = 1_000;
@Inject
public AppConfigMonitor(final AppConfig appConfig,
final ConfigLocation configLocation,
final ConfigMapper configMapper,
final ConfigValidator configValidator,
final GlobalConfigService globalConfigService) {
this.appConfig = appConfig;
this.configFile = configLocation.getConfigFilePath();
this.configMapper = configMapper;
this.configValidator = configValidator;
this.globalConfigService = globalConfigService;
if (Files.isRegularFile(configFile)) {
isValidFile = true;
dirToWatch = configFile.getParent();
if (!Files.isDirectory(dirToWatch)) {
throw new RuntimeException(LogUtil.message("{} is not a directory", dirToWatch));
}
executorService = Executors.newSingleThreadExecutor();
} else {
isValidFile = false;
dirToWatch = null;
executorService = null;
}
}
/**
* Starts the object. Called <i>before</i> the application becomes available.
*/
@Override
public void start() {
if (isValidFile) {
try {
startWatcher();
} catch (Exception e) {
// Swallow and log as we don't want to stop the app from starting just for this
errors.add(e.getMessage());
LOGGER.error("Unable to start config file monitor due to [{}]. Changes to {} will not be monitored.",
e.getMessage(), configFile.toAbsolutePath().normalize(), e);
}
} else {
LOGGER.error("Unable to start watcher as {} is not a valid file", configFile.toAbsolutePath().normalize());
}
}
private void startWatcher() throws IOException {
try {
watchService = FileSystems.getDefault().newWatchService();
} catch (IOException e) {
throw new RuntimeException(LogUtil.message("Error creating watch new service, {}", e.getMessage()), e);
}
dirToWatch.register(watchService, StandardWatchEventKinds.ENTRY_MODIFY);
// run the watcher in its own thread else it will block app startup
watcherFuture = executorService.submit(() -> {
WatchKey watchKey = null;
LOGGER.info("Starting config file modification watcher for {}", configFile.toAbsolutePath().normalize());
while (true) {
if (Thread.currentThread().isInterrupted()) {
LOGGER.debug("Thread interrupted, stopping watching directory {}", dirToWatch.toAbsolutePath().normalize());
break;
}
try {
isRunning.compareAndSet(false, true);
// block until the watch service spots a change
watchKey = watchService.take();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
// continue to re-use the if block above
continue;
}
for (WatchEvent<?> event : watchKey.pollEvents()) {
if (LOGGER.isDebugEnabled()) {
if (event == null) {
LOGGER.debug("Event is null");
} else {
String name = event.kind() != null ? event.kind().name() : "kind==null";
String type = event.kind() != null ? event.kind().type().getSimpleName() : "kind==null";
LOGGER.debug("Dir watch event {}, {}, {}", name, type, event.context());
}
}
if (event.kind().equals(OVERFLOW)) {
LOGGER.warn("{} event detected breaking out. Retry config file change", OVERFLOW.name());
break;
}
if (event.kind() != null && Path.class.isAssignableFrom(event.kind().type())) {
handleWatchEvent((WatchEvent<Path>) event);
} else {
LOGGER.debug("Not an event we care about");
}
}
boolean isValid = watchKey.reset();
if (!isValid) {
LOGGER.warn("Watch key is no longer valid, the watch service may have been stopped");
break;
}
}
});
}
private void handleWatchEvent(final WatchEvent<Path> pathEvent) {
final WatchEvent.Kind<Path> kind = pathEvent.kind();
// Only trigger on modify events and when count is one to avoid repeated events
if (kind.equals(StandardWatchEventKinds.ENTRY_MODIFY)) {
final Path modifiedFile = dirToWatch.resolve(pathEvent.context());
try {
// we don't care about changes to other files
if (Files.isRegularFile(modifiedFile) && Files.isSameFile(configFile, modifiedFile)) {
LOGGER.info("Change detected to config file {}", configFile.toAbsolutePath().normalize());
scheduleUpdateIfRequired();
}
} catch (IOException e) {
// Swallow error so future changes can be monitored.
LOGGER.error("Error comparing paths {} and {}", configFile, modifiedFile, e);
}
}
}
private synchronized void scheduleUpdateIfRequired() {
// When a file is changed the filesystem can trigger two changes, one to change the file content
// and another to change the file access time. To prevent a duplicate read we delay the read
// a bit so we can have many changes during that delay period but with only one read of the file.
if (isFileReadScheduled.compareAndSet(false, true)) {
LOGGER.info("Scheduling update of application config from file in {}ms", DELAY_BEFORE_FILE_READ_MS);
CompletableFuture.delayedExecutor(DELAY_BEFORE_FILE_READ_MS, TimeUnit.MILLISECONDS)
.execute(() -> {
try {
updateAppConfigFromFile();
} finally {
isFileReadScheduled.set(false);
}
});
}
}
private synchronized void updateAppConfigFromFile() {
final AppConfig newAppConfig;
try {
LOGGER.info("Reading updated config file");
newAppConfig = YamlUtil.readAppConfig(configFile);
final ConfigValidator.Result result = validateNewConfig(newAppConfig);
if (result.hasErrors()) {
LOGGER.error("Unable to update application config from file {} because it failed validation. " +
"Fix the errors and save the file.", configFile.toAbsolutePath().normalize().toString());
} else {
try {
// Don't have to worry about the DB config merging that goes on in DataSourceFactoryImpl
// as that doesn't mutate the config objects
final AtomicInteger updateCount = new AtomicInteger(0);
final FieldMapper.UpdateAction updateAction = (destParent, prop, sourcePropValue, destPropValue) -> {
final String fullPath = ((AbstractConfig)destParent).getFullPath(prop.getName());
LOGGER.info(" Updating config value of {} from [{}] to [{}]",
fullPath, destPropValue, sourcePropValue);
updateCount.incrementAndGet();
};
LOGGER.info("Updating application config from file.");
// Copy changed values from the newly modified appConfig into the guice bound one
FieldMapper.copy(newAppConfig, this.appConfig, updateAction);
// Update the config objects using the DB as the removal of a yaml value may trigger
// a DB value to be effective
LOGGER.info("Completed updating application config from file. Changes: {}", updateCount.get());
globalConfigService.updateConfigObjects();
} catch (Throwable e) {
// Swallow error as we don't want to break the app because the new config is bad
// The admins can fix the problem and let it have another go.
LOGGER.error("Error updating runtime configuration from file {}",
configFile.toAbsolutePath().normalize(), e);
}
}
} catch (Throwable e) {
// Swallow error as we don't want to break the app because the file is bad.
LOGGER.error("Error parsing configuration from file {}",
configFile.toAbsolutePath().normalize(), e);
}
}
private ConfigValidator.Result validateNewConfig(final AppConfig newAppConfig) {
// Decorate the new config tree so it has all the paths,
// i.e. call setBasePath on each branch in the newAppConfig tree so if we get any violations we
// can log their locations with full paths.
ConfigMapper.decorateWithPropertyPaths(newAppConfig);
LOGGER.info("Validating modified config file");
final ConfigValidator.Result result = configValidator.validateRecursively(newAppConfig);
result.handleViolations(ConfigValidator::logConstraintViolation);
LOGGER.info("Completed validation of application configuration, errors: {}, warnings: {}",
result.getErrorCount(),
result.getWarningCount());
return result;
}
/**
* Stops the object. Called <i>after</i> the application is no longer accepting requests.
*
* @throws Exception if something goes wrong.
*/
@Override
public void stop() throws Exception {
if (isValidFile) {
LOGGER.info("Stopping file modification watcher for {}", configFile.toAbsolutePath().normalize());
if (watchService != null) {
watchService.close();
}
if (executorService != null) {
watchService.close();
if (watcherFuture != null && !watcherFuture.isCancelled() && !watcherFuture.isDone()) {
watcherFuture.cancel(true);
}
executorService.shutdown();
}
}
isRunning.set(false);
}
public boolean isRunning() {
return isRunning.get();
}
@Override
public HealthCheck.Result getHealth() {
HealthCheck.ResultBuilder resultBuilder = HealthCheck.Result.builder();
// isRunning will only be true if the file is also present and valid
if (isRunning.get()) {
resultBuilder.healthy();
} else {
resultBuilder
.unhealthy()
.withDetail("errors", errors);
}
return resultBuilder
.withDetail("configFilePath", configFile != null
? configFile.toAbsolutePath().normalize().toString()
: null)
.withDetail("isRunning", isRunning)
.withDetail("isValidFile", isValidFile)
.build();
}
}
|
package com.splicemachine.derby.impl.sql.execute.operations;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.splicemachine.constants.SpliceConstants;
import com.splicemachine.derby.iapi.sql.execute.SpliceNoPutResultSet;
import com.splicemachine.derby.iapi.sql.execute.SpliceOperation;
import com.splicemachine.derby.iapi.sql.execute.SpliceRuntimeContext;
import com.splicemachine.derby.management.StatementInfo;
import com.splicemachine.derby.utils.Exceptions;
import com.splicemachine.utils.SpliceLogUtils;
import org.apache.derby.iapi.error.StandardException;
import org.apache.log4j.Logger;
import java.util.Arrays;
import java.util.List;
import java.util.NavigableMap;
import java.util.concurrent.*;
public class OperationTree {
private static final Logger LOG = Logger.getLogger(OperationTree.class);
private static final ThreadPoolExecutor levelExecutor;
static {
ThreadFactory factory = new ThreadFactoryBuilder().setNameFormat("operation-shuffle-pool-%d")
.setDaemon(true).build();
levelExecutor = new ThreadPoolExecutor(SpliceConstants.maxTreeThreads,
SpliceConstants.maxTreeThreads, 60, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>(),factory,
new ThreadPoolExecutor.CallerRunsPolicy());
}
public static SpliceNoPutResultSet executeTree(SpliceOperation operation, final SpliceRuntimeContext runtimeContext,boolean useProbe) throws StandardException{
//first form the level Map
NavigableMap<Integer,List<SpliceOperation>> levelMap = split(operation);
if (LOG.isDebugEnabled())
SpliceLogUtils.debug(LOG, "OperationTree levelMap: %s \n\tfor operation %s", levelMap, operation);
//The levelMap is sorted so that lower level number means higher on the tree, so
//since we need to execute from bottom up, we go in descending order
StatementInfo info = runtimeContext.getStatementInfo();
boolean setStatement = info !=null;
long statementUuid = setStatement? info.getStatementUuid():0l;
for(Integer level:levelMap.descendingKeySet()){
List<SpliceOperation> levelOps = levelMap.get(level);
if(levelOps.size()>1){
List<Future<Void>> shuffleFutures = Lists.newArrayListWithCapacity(levelOps.size());
for(final SpliceOperation opToShuffle:levelOps){
if(setStatement)
opToShuffle.setStatementId(statementUuid);
shuffleFutures.add(levelExecutor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
opToShuffle.executeShuffle(runtimeContext);
return null;
}
}));
}
//wait for all operations to complete before proceeding to the next level
for(Future<Void> future:shuffleFutures){
try {
future.get();
} catch (InterruptedException e) {
//TODO -sf- cancel other futures!
throw Exceptions.parseException(e);
} catch (ExecutionException e) {
//TODO -sf- cancel other futures!
throw Exceptions.parseException(e);
}
}
}else{
for(SpliceOperation op:levelOps){
if(setStatement)
op.setStatementId(statementUuid);
op.executeShuffle(runtimeContext);
}
}
}
//operation is the highest level, it has the final scan
if(useProbe)
return operation.executeProbeScan();
else
return operation.executeScan(runtimeContext);
}
private static NavigableMap<Integer, List<SpliceOperation>> split(SpliceOperation parentOperation) {
NavigableMap<Integer,List<SpliceOperation>> levelMap = Maps.newTreeMap();
if(parentOperation.getNodeTypes().contains(SpliceOperation.NodeType.REDUCE))
levelMap.put(0, Arrays.asList(parentOperation));
split(parentOperation, levelMap, 1);
return levelMap;
}
private static void split(SpliceOperation parentOp,NavigableMap<Integer,List<SpliceOperation>> levelMap, int level){
List<SpliceOperation> levelOps = levelMap.get(level);
List<SpliceOperation> children;
if(parentOp instanceof NestedLoopJoinOperation){
/*
* NestedLoopJoin shouldn't execute a shuffle on it's right side,
* but it SHOULD if there's a shuffle on the left side
*/
children = Arrays.asList(parentOp.getLeftOperation());
}else
children = parentOp.getSubOperations();
for(SpliceOperation child:children){
if(child.getNodeTypes().contains(SpliceOperation.NodeType.REDUCE)){
if(levelOps==null){
levelOps = Lists.newArrayListWithCapacity(children.size());
levelMap.put(level,levelOps);
}
levelOps.add(child);
}
split(child,levelMap,level+1);
}
}
public static int getNumSinks(SpliceOperation topOperation) {
List<SpliceOperation> children = topOperation.getSubOperations();
int numSinks = 0;
for(SpliceOperation child:children){
numSinks+=getNumSinks(child);
}
if(topOperation.getNodeTypes().contains(SpliceOperation.NodeType.REDUCE))
numSinks++;
return numSinks;
}
}
|
package org.sagebionetworks.tool.migration.v3;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sagebionetworks.client.SynapseAdminClient;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.repo.model.PaginatedResults;
import org.sagebionetworks.repo.model.migration.CrowdMigrationResult;
import org.sagebionetworks.repo.model.migration.CrowdMigrationResultType;
import org.sagebionetworks.repo.model.migration.MigrationType;
import org.sagebionetworks.repo.model.migration.MigrationTypeCount;
import org.sagebionetworks.repo.model.migration.MigrationTypeCounts;
import org.sagebionetworks.repo.model.status.StackStatus;
import org.sagebionetworks.repo.model.status.StatusEnum;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.tool.migration.Progress.BasicProgress;
import org.sagebionetworks.tool.migration.v3.stream.BufferedRowMetadataReader;
import org.sagebionetworks.tool.migration.v3.stream.BufferedRowMetadataWriter;
/**
* The V3 migration client.
* @author jmhill
*
*/
public class MigrationClient {
static private Log log = LogFactory.getLog(MigrationClient.class);
SynapseClientFactory factory;
ExecutorService threadPool;
List<Exception> deferredExceptions;
final int MAX_DEFERRED_EXCEPTIONS = 10;
/**
* New migration client.
* @param factory
*/
public MigrationClient(SynapseClientFactory factory) {
if(factory == null) throw new IllegalArgumentException("Factory cannot be null");
this.factory = factory;
threadPool = Executors.newFixedThreadPool(1);
deferredExceptions = new ArrayList<Exception>();
}
/**
* Migrate all data from the source to destination.
*
* @param finalSynchronize - If finalSynchronize is set to true then source repository will be placed in read-only mode during the migration and left in read-only
* after migration finishes successfully (failures will result in the source returning to read-write).
* If finalSynchronize is set to false, the source repository will remain in READ_WRITE mode during the migration process.
* @throws Exception
*/
public void migrate(boolean finalSynchronize, long batchSize, long timeoutMS, int retryDenominator, boolean deferExceptions) throws Exception {
// First set the destination stack status to down
setDestinationStatus(StatusEnum.DOWN, "Staging is down for data migration");
if(finalSynchronize){
// This is the final synchronize so place the source into read-only mode.
setSourceStatus(StatusEnum.READ_ONLY, "Synapse is in read-only mode for maintenance");
}
try{
this.migrateAllTypes(batchSize, timeoutMS, retryDenominator, deferExceptions);
// After migration is complete, re-enable staging
setDestinationStatus(StatusEnum.READ_WRITE, "Synapse is ready for read/write");
}catch (Exception e){
// If an error occurs the source server must be returned to read-write
if(finalSynchronize){
// This is the final synchronize so place the source into read-only mode.
log.error("Migration failed on a final synchronize, so the source stack will be set back to read/write");
setSourceStatus(StatusEnum.READ_WRITE, "Synapse returned to read/write.");
}
log.error("Migration failed", e);
throw e;
}
}
/**
*
* @param status
* @param message
* @throws SynapseException
* @throws JSONObjectAdapterException
*/
public void setDestinationStatus(StatusEnum status, String message) throws SynapseException, JSONObjectAdapterException {
setStatus(this.factory.createNewDestinationClient(), status, message);
}
/**
*
* @param status
* @param message
* @throws SynapseException
* @throws JSONObjectAdapterException
*/
public void setSourceStatus(StatusEnum status, String message) throws SynapseException, JSONObjectAdapterException {
setStatus(this.factory.createNewSourceClient(), status, message);
}
/**
*
* @param client
* @param status
* @param message
* @throws SynapseException
* @throws JSONObjectAdapterException
*/
private static void setStatus(SynapseAdminClient client, StatusEnum status, String message) throws JSONObjectAdapterException, SynapseException{
StackStatus destStatus = client.getCurrentStackStatus();
destStatus.setStatus(status);
destStatus.setCurrentMessage(message);
destStatus = client.updateCurrentStackStatus(destStatus);
}
/**
* Get the current change number of the destination.
* @return
* @throws SynapseException
* @throws JSONObjectAdapterException
*/
private long getDestinationCurrentChangeNumber() throws SynapseException, JSONObjectAdapterException{
return this.factory.createNewDestinationClient().getCurrentChangeNumber().getNextChangeNumber();
}
/**
* Migrate all types.
* @param batchSize - Max batch size
* @param timeoutMS - max time to wait for a deamon job.
* @param retryDenominator - how to divide a batch into sub-batches when errors occur.
* @throws Exception
*/
public void migrateAllTypes(long batchSize, long timeoutMS, int retryDenominator, boolean deferExceptions) throws Exception {
SynapseAdminClient source = factory.createNewSourceClient();
SynapseAdminClient destination = factory.createNewDestinationClient();
// Get the counts for all type from both the source and destination
MigrationTypeCounts startSourceCounts = source.getTypeCounts();
MigrationTypeCounts startDestCounts = destination.getTypeCounts();
log.info("Start counts:");
printCounts(startSourceCounts.getList(), startDestCounts.getList());
// Get the primary types
List<MigrationType> primaryTypes = source.getPrimaryTypes().getList();
// Do the actual migration.
migrateAll(batchSize, timeoutMS, retryDenominator, primaryTypes, deferExceptions);
// Print the final counts
MigrationTypeCounts endSourceCounts = source.getTypeCounts();
MigrationTypeCounts endDestCounts = destination.getTypeCounts();
log.info("End counts:");
printCounts(endSourceCounts.getList(), endDestCounts.getList());
log.info("Migrating crowd");
migrateCrowd();
if ((deferExceptions) && (this.deferredExceptions.size() > 0)) {
log.error("Encountered " + this.deferredExceptions.size() + " execution exceptions in the worker threads");
this.dumpDeferredExceptions();
throw this.deferredExceptions.get(deferredExceptions.size()-1);
}
}
/**
* Does the actaul migration work.
* @param batchSize
* @param timeoutMS
* @param retryDenominator
* @param primaryTypes
* @throws Exception
*/
private void migrateAll(long batchSize, long timeoutMS, int retryDenominator, List<MigrationType> primaryTypes, boolean deferExceptions)
throws Exception {
List<DeltaData> deltaList = new LinkedList<DeltaData>();
for(MigrationType type: primaryTypes){
DeltaData dd = calculateDeltaForType(type, batchSize);
deltaList.add(dd);
}
// First attempt to delete, catching any exception (for case like fileHandles)
Exception firstDeleteException = null;
try {
// Delete any data in reverse order
for(int i=deltaList.size()-1; i >= 0; i
DeltaData dd = deltaList.get(i);
long count = dd.getCounts().getDelete();
if(count > 0){
deleteFromDestination(dd.getType(), dd.getDeleteTemp(), count, batchSize, deferExceptions);
}
}
} catch (Exception e) {
firstDeleteException = e;
log.info("Exception thrown during first delete phase.", e);
}
// If exception in insert/update phase, then rethrow at end so main is aware of problem
Exception insException = null;
try {
// Now do all adds in the original order
for(int i=0; i<deltaList.size(); i++){
DeltaData dd = deltaList.get(i);
long count = dd.getCounts().getCreate();
if(count > 0){
createUpdateInDestination(dd.getType(), dd.getCreateTemp(), count, batchSize, timeoutMS, retryDenominator, deferExceptions);
}
}
} catch (Exception e) {
insException = e;
log.info("Exception thrown during insert phase", e);
}
Exception updException = null;
try {
// Now do all updates in the original order
for(int i=0; i<deltaList.size(); i++){
DeltaData dd = deltaList.get(i);
long count = dd.getCounts().getUpdate();
if(count > 0){
createUpdateInDestination(dd.getType(), dd.getUpdateTemp(), count, batchSize, timeoutMS, retryDenominator, deferExceptions);
}
}
} catch (Exception e) {
updException = e;
log.info("Exception thrown during update phases", e);
}
// Only do the post-deletes if the initial ones raised an exception
if (firstDeleteException != null) {
// Now we need to delete any data in reverse order
for(int i=deltaList.size()-1; i >= 0; i
DeltaData dd = deltaList.get(i);
long count = dd.getCounts().getDelete();
if(count > 0){
deleteFromDestination(dd.getType(), dd.getDeleteTemp(), count, batchSize, deferExceptions);
}
}
}
if (insException != null) {
throw insException;
}
if (updException != null) {
throw updException;
}
}
/**
* Create or update
* @param type
* @param createUpdateTemp
* @param create
* @param batchSize
* @throws Exception
*/
private void createUpdateInDestination(MigrationType type, File createUpdateTemp, long count, long batchSize, long timeout, int retryDenominator, boolean deferExceptions) throws Exception {
BufferedRowMetadataReader reader = new BufferedRowMetadataReader(new FileReader(createUpdateTemp));
try{
BasicProgress progress = new BasicProgress();
CreateUpdateWorker worker = new CreateUpdateWorker(type, count, reader,progress,factory.createNewDestinationClient(), factory.createNewSourceClient(), batchSize, timeout, retryDenominator);
Future<Long> future = this.threadPool.submit(worker);
while(!future.isDone()){
// Log the progress
String message = progress.getMessage();
if(message == null){
message = "";
}
log.info("Creating/updating data for type: "+type.name()+" Progress: "+progress.getCurrentStatus()+" "+message);
Thread.sleep(2000);
}
try {
Long counts = future.get();
log.info("Creating/updating the following counts for type: "+type.name()+" Counts: "+counts);
} catch (ExecutionException e) {
if (deferExceptions) {
deferException(e);
} else {
throw(e);
}
}
}finally{
reader.close();
}
}
private void printCounts(List<MigrationTypeCount> srcCounts, List<MigrationTypeCount> destCounts) {
Map<MigrationType, Long> mapSrcCounts = new HashMap<MigrationType, Long>();
for (MigrationTypeCount sMtc: srcCounts) {
mapSrcCounts.put(sMtc.getType(), sMtc.getCount());
}
// All migration types of source should be at destination
for (MigrationTypeCount mtc: destCounts) {
log.info("\t" + mtc.getType().name() + ":\t" + (mapSrcCounts.containsKey(mtc.getType()) ? mapSrcCounts.get(mtc.getType()).toString() : "NA") + "\t" + mtc.getCount());
}
}
/**
* Migrate one type.
* @param type
* @param progress
* @throws Exception
*/
public DeltaData calculateDeltaForType(MigrationType type, long batchSize) throws Exception{
// the first thing we need to do is calculate the what needs to be created, updated, or deleted.
// We need three temp file to keep track of the deltas
File createTemp = File.createTempFile("create", ".tmp");
File updateTemp = File.createTempFile("update", ".tmp");
File deleteTemp = File.createTempFile("delete", ".tmp");
// Calculate the deltas
DeltaCounts counts = calcualteDeltas(type, batchSize, createTemp, updateTemp, deleteTemp);
return new DeltaData(type, createTemp, updateTemp, deleteTemp, counts);
}
/**
* Calcaulte the deltas
* @param type
* @param batchSize
* @param createTemp
* @param updateTemp
* @param deleteTemp
* @throws SynapseException
* @throws FileNotFoundException
* @throws IOException
*/
private DeltaCounts calcualteDeltas(MigrationType type, long batchSize, File createTemp, File updateTemp, File deleteTemp) throws Exception {
BasicProgress sourceProgress = new BasicProgress();
BasicProgress destProgress = new BasicProgress();
BufferedRowMetadataWriter createOut = null;
BufferedRowMetadataWriter updateOut = null;
BufferedRowMetadataWriter deleteOut = null;
try{
createOut = new BufferedRowMetadataWriter(new FileWriter(createTemp));
updateOut = new BufferedRowMetadataWriter(new FileWriter(updateTemp));
deleteOut = new BufferedRowMetadataWriter(new FileWriter(deleteTemp));
MetadataIterator sourceIt = new MetadataIterator(type, factory.createNewSourceClient(), batchSize, sourceProgress);
MetadataIterator destIt = new MetadataIterator(type, factory.createNewDestinationClient(), batchSize, destProgress);
DeltaBuilder builder = new DeltaBuilder(sourceIt, destIt, createOut, updateOut, deleteOut);
// Do the work on a separate thread
Future<DeltaCounts> future = this.threadPool.submit(builder);
// Wait for the future to finish
while(!future.isDone()){
// Log the progress
log.info("Calculating deltas for type: "+type.name()+" Progress: "+sourceProgress.getCurrentStatus());
Thread.sleep(2000);
}
DeltaCounts counts = future.get();
log.info("Calculated the following counts for type: "+type.name()+" Counts: "+counts);
return counts;
}finally{
if(createOut != null){
try {
createOut.close();
} catch (Exception e) {}
}
if(updateOut != null){
try {
updateOut.close();
} catch (Exception e) {}
}
if(deleteOut != null){
try {
deleteOut.close();
} catch (Exception e) {}
}
}
}
/**
* Delete the requested object from the destination.
* @throws IOException
*
*/
private void deleteFromDestination(MigrationType type, File deleteTemp, long count, long batchSize, boolean deferExceptions) throws Exception{
BufferedRowMetadataReader reader = new BufferedRowMetadataReader(new FileReader(deleteTemp));
try{
BasicProgress progress = new BasicProgress();
DeleteWorker worker = new DeleteWorker(type, count, reader, progress, factory.createNewDestinationClient(), batchSize);
Future<Long> future = this.threadPool.submit(worker);
while(!future.isDone()){
// Log the progress
log.info("Deleting data for type: "+type.name()+" Progress: "+progress.getCurrentStatus());
Thread.sleep(2000);
}
try {
Long counts = future.get();
log.info("Deleted the following counts for type: "+type.name()+" Counts: "+counts);
} catch (ExecutionException e) {
if (deferExceptions) {
deferException(e);
} else {
throw(e);
}
}
}finally{
reader.close();
}
}
private void deferException(ExecutionException e) throws ExecutionException {
if (deferredExceptions.size() <= this.MAX_DEFERRED_EXCEPTIONS) {
log.debug("Deferring execution exception in MigrationClient.createUpdateInDestination()");
deferredExceptions.add(e);
} else {
log.debug("Encountered more than " + this.MAX_DEFERRED_EXCEPTIONS + " execution exceptions in the worker threads. Dumping deferred first");
this.dumpDeferredExceptions();
throw e;
}
}
private void dumpDeferredExceptions() {
int i = 0;
for (Exception e: this.deferredExceptions) {
log.error("Deferred exception " + i++, e);
}
}
public void migrateCrowd() throws SynapseException, JSONObjectAdapterException {
SynapseAdminClient client = factory.createNewDestinationClient();
long offset = 100;
PaginatedResults<CrowdMigrationResult> res = client.migrateFromCrowd(100, 0);
boolean batchFailed = this.containsFailure(res.getResults());
long crowdTotalNumRes = res.getTotalNumberOfResults();
while (offset <= crowdTotalNumRes) {
res = client.migrateFromCrowd(500, offset);
if (this.containsFailure(res.getResults())) {
batchFailed = true;
}
offset += 500;
}
if (batchFailed) {
throw new RuntimeException("Failed during Crowd migration");
}
}
public boolean containsFailure(List<CrowdMigrationResult> batch) {
boolean failed = false;
for (CrowdMigrationResult r: batch) {
if (r.getResultType() == CrowdMigrationResultType.FAILURE) {
log.error("Crowd migration failed for " + r.getUsername() + " / " + r.getUserId() + " with error '" + r.getMessage() + "'");
failed = true;
}
}
return failed;
}
}
|
package org.jboss.as.txn.subsystem;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import com.arjuna.ats.arjuna.coordinator.TxControl;
import org.jboss.as.controller.AbstractWriteAttributeHandler;
import org.jboss.as.controller.AttributeDefinition;
import org.jboss.as.controller.ModelVersion;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.OperationStepHandler;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.ReloadRequiredWriteAttributeHandler;
import org.jboss.as.controller.SimpleAttributeDefinition;
import org.jboss.as.controller.SimpleAttributeDefinitionBuilder;
import org.jboss.as.controller.SimpleResourceDefinition;
import org.jboss.as.controller.access.management.SensitiveTargetAccessConstraintDefinition;
import org.jboss.as.controller.capability.RuntimeCapability;
import org.jboss.as.controller.client.helpers.MeasurementUnit;
import org.jboss.as.controller.descriptions.ModelDescriptionConstants;
import org.jboss.as.controller.operations.validation.IntRangeValidator;
import org.jboss.as.controller.operations.validation.StringBytesLengthValidator;
import org.jboss.as.controller.operations.validation.StringLengthValidator;
import org.jboss.as.controller.registry.AttributeAccess;
import org.jboss.as.controller.registry.ImmutableManagementResourceRegistration;
import org.jboss.as.controller.registry.ManagementResourceRegistration;
import org.jboss.as.controller.registry.OperationEntry;
import org.jboss.as.controller.registry.Resource;
import org.jboss.as.txn.logging.TransactionLogger;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
public class TransactionSubsystemRootResourceDefinition extends SimpleResourceDefinition {
public static final RuntimeCapability<Void> TRANSACTION_CAPABILITY = RuntimeCapability.Builder.of("org.wildfly.transactions")
.build();
//recovery environment
public static final SimpleAttributeDefinition BINDING = new SimpleAttributeDefinitionBuilder(CommonAttributes.BINDING, ModelType.STRING, false)
.setValidator(new StringLengthValidator(1))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.BINDING.getLocalName())
.setAllowExpression(true)
.addAccessConstraint(SensitiveTargetAccessConstraintDefinition.SOCKET_BINDING_REF)
.build();
public static final SimpleAttributeDefinition STATUS_BINDING = new SimpleAttributeDefinitionBuilder(CommonAttributes.STATUS_BINDING, ModelType.STRING, false)
.setValidator(new StringLengthValidator(1))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.STATUS_BINDING.getLocalName())
.setAllowExpression(true)
.addAccessConstraint(SensitiveTargetAccessConstraintDefinition.SOCKET_BINDING_REF)
.build();
public static final SimpleAttributeDefinition RECOVERY_LISTENER = new SimpleAttributeDefinitionBuilder(CommonAttributes.RECOVERY_LISTENER, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.RECOVERY_LISTENER.getLocalName())
.setAllowExpression(true).build();
//core environment
public static final SimpleAttributeDefinition NODE_IDENTIFIER = new SimpleAttributeDefinitionBuilder(CommonAttributes.NODE_IDENTIFIER, ModelType.STRING, true)
.setDefaultValue(new ModelNode().set("1"))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setAllowExpression(true)
.setValidator(new StringBytesLengthValidator(0,23,true,true))
.build();
public static final SimpleAttributeDefinition PROCESS_ID_UUID = new SimpleAttributeDefinitionBuilder("process-id-uuid", ModelType.BOOLEAN, false)
.setDefaultValue(new ModelNode().set(false))
.setAlternatives("process-id-socket-binding")
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.build();
public static final SimpleAttributeDefinition PROCESS_ID_SOCKET_BINDING = new SimpleAttributeDefinitionBuilder("process-id-socket-binding", ModelType.STRING, false)
.setValidator(new StringLengthValidator(1, true))
.setAlternatives("process-id-uuid")
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.BINDING.getLocalName())
.setAllowExpression(true)
.addAccessConstraint(SensitiveTargetAccessConstraintDefinition.SOCKET_BINDING_REF)
.build();
public static final SimpleAttributeDefinition PROCESS_ID_SOCKET_MAX_PORTS = new SimpleAttributeDefinitionBuilder("process-id-socket-max-ports", ModelType.INT, true)
.setValidator(new IntRangeValidator(1, true))
.setDefaultValue(new ModelNode().set(10))
.setRequires("process-id-socket-binding")
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.SOCKET_PROCESS_ID_MAX_PORTS.getLocalName())
.setAllowExpression(true)
.addAccessConstraint(SensitiveTargetAccessConstraintDefinition.SOCKET_CONFIG)
.build();
//coordinator environment
public static final SimpleAttributeDefinition STATISTICS_ENABLED = new SimpleAttributeDefinitionBuilder(CommonAttributes.STATISTICS_ENABLED, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES) // TODO should be runtime-changeable
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition ENABLE_STATISTICS = new SimpleAttributeDefinitionBuilder(CommonAttributes.ENABLE_STATISTICS, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES) // TODO should be runtime-changeable
.setXmlName(Attribute.ENABLE_STATISTICS.getLocalName())
.setDeprecated(ModelVersion.create(2))
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition ENABLE_TSM_STATUS = new SimpleAttributeDefinitionBuilder(CommonAttributes.ENABLE_TSM_STATUS, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES) // TODO is this runtime-changeable?
.setXmlName(Attribute.ENABLE_TSM_STATUS.getLocalName())
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition DEFAULT_TIMEOUT = new SimpleAttributeDefinitionBuilder(CommonAttributes.DEFAULT_TIMEOUT, ModelType.INT, true)
.setMeasurementUnit(MeasurementUnit.SECONDS)
.setDefaultValue(new ModelNode().set(300))
.setFlags(AttributeAccess.Flag.RESTART_NONE)
.setXmlName(Attribute.DEFAULT_TIMEOUT.getLocalName())
.setAllowExpression(true).build();
//object store
public static final SimpleAttributeDefinition OBJECT_STORE_RELATIVE_TO = new SimpleAttributeDefinitionBuilder(CommonAttributes.OBJECT_STORE_RELATIVE_TO, ModelType.STRING, true)
.setDefaultValue(new ModelNode().set("jboss.server.data.dir"))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.RELATIVE_TO.getLocalName())
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition OBJECT_STORE_PATH = new SimpleAttributeDefinitionBuilder(CommonAttributes.OBJECT_STORE_PATH, ModelType.STRING, true)
.setDefaultValue(new ModelNode().set("tx-object-store"))
.setFlags(AttributeAccess.Flag.RESTART_ALL_SERVICES)
.setXmlName(Attribute.PATH.getLocalName())
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition JTS = new SimpleAttributeDefinitionBuilder(CommonAttributes.JTS, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM) //I think the use of statics in arjunta will require a JVM restart
.setAllowExpression(false).build();
public static final SimpleAttributeDefinition USE_HORNETQ_STORE = new SimpleAttributeDefinitionBuilder(CommonAttributes.USE_HORNETQ_STORE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setAllowExpression(false)
.setDeprecated(ModelVersion.create(3)).build();
public static final SimpleAttributeDefinition HORNETQ_STORE_ENABLE_ASYNC_IO = new SimpleAttributeDefinitionBuilder(CommonAttributes.HORNETQ_STORE_ENABLE_ASYNC_IO, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.ENABLE_ASYNC_IO.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_HORNETQ_STORE)
.setDeprecated(ModelVersion.create(3)).build();
public static final SimpleAttributeDefinition USE_JOURNAL_STORE = new SimpleAttributeDefinitionBuilder(CommonAttributes.USE_JOURNAL_STORE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setAllowExpression(false).build();
public static final SimpleAttributeDefinition JOURNAL_STORE_ENABLE_ASYNC_IO = new SimpleAttributeDefinitionBuilder(CommonAttributes.JOURNAL_STORE_ENABLE_ASYNC_IO, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode().set(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.ENABLE_ASYNC_IO.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JOURNAL_STORE).build();
public static final SimpleAttributeDefinition USE_JDBC_STORE = new SimpleAttributeDefinitionBuilder(CommonAttributes.USE_JDBC_STORE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setAllowExpression(false).build();
public static final SimpleAttributeDefinition JDBC_STORE_DATASOURCE = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_STORE_DATASOURCE, ModelType.STRING, true)
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.DATASOURCE_JNDI_NAME.getLocalName())
.setAllowExpression(true).build();
public static final SimpleAttributeDefinition JDBC_ACTION_STORE_TABLE_PREFIX =
new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_ACTION_STORE_TABLE_PREFIX, ModelType.STRING, true)
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.TABLE_PREFIX.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
public static final SimpleAttributeDefinition JDBC_ACTION_STORE_DROP_TABLE = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_ACTION_STORE_DROP_TABLE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.DROP_TABLE.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
public static final SimpleAttributeDefinition JDBC_COMMUNICATION_STORE_TABLE_PREFIX = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_COMMUNICATION_STORE_TABLE_PREFIX, ModelType.STRING, true)
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.TABLE_PREFIX.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
public static final SimpleAttributeDefinition JDBC_COMMUNICATION_STORE_DROP_TABLE = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_COMMUNICATION_STORE_DROP_TABLE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.DROP_TABLE.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
public static final SimpleAttributeDefinition JDBC_STATE_STORE_TABLE_PREFIX = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_STATE_STORE_TABLE_PREFIX, ModelType.STRING, true)
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.TABLE_PREFIX.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
public static final SimpleAttributeDefinition JDBC_STATE_STORE_DROP_TABLE = new SimpleAttributeDefinitionBuilder(CommonAttributes.JDBC_STATE_STORE_DROP_TABLE, ModelType.BOOLEAN, true)
.setDefaultValue(new ModelNode(false))
.setFlags(AttributeAccess.Flag.RESTART_JVM)
.setXmlName(Attribute.DROP_TABLE.getLocalName())
.setAllowExpression(true)
.setRequires(CommonAttributes.USE_JDBC_STORE).build();
private final boolean registerRuntimeOnly;
TransactionSubsystemRootResourceDefinition(boolean registerRuntimeOnly) {
super(TransactionExtension.SUBSYSTEM_PATH,
TransactionExtension.getResourceDescriptionResolver(),
TransactionSubsystemAdd.INSTANCE, TransactionSubsystemRemove.INSTANCE,
OperationEntry.Flag.RESTART_ALL_SERVICES, OperationEntry.Flag.RESTART_ALL_SERVICES);
this.registerRuntimeOnly = registerRuntimeOnly;
}
// all attributes
static final AttributeDefinition[] attributes = new AttributeDefinition[] {
BINDING, STATUS_BINDING, RECOVERY_LISTENER, NODE_IDENTIFIER, PROCESS_ID_UUID, PROCESS_ID_SOCKET_BINDING,
PROCESS_ID_SOCKET_MAX_PORTS, STATISTICS_ENABLED, ENABLE_TSM_STATUS, DEFAULT_TIMEOUT,
OBJECT_STORE_RELATIVE_TO, OBJECT_STORE_PATH, JTS, USE_JOURNAL_STORE, USE_JDBC_STORE, JDBC_STORE_DATASOURCE,
JDBC_ACTION_STORE_DROP_TABLE, JDBC_ACTION_STORE_TABLE_PREFIX, JDBC_COMMUNICATION_STORE_DROP_TABLE,
JDBC_COMMUNICATION_STORE_TABLE_PREFIX, JDBC_STATE_STORE_DROP_TABLE, JDBC_STATE_STORE_TABLE_PREFIX,
JOURNAL_STORE_ENABLE_ASYNC_IO
};
static final AttributeDefinition[] ATTRIBUTES_WITH_EXPRESSIONS_AFTER_1_1_0 = new AttributeDefinition[] {
DEFAULT_TIMEOUT, STATISTICS_ENABLED, ENABLE_STATISTICS, ENABLE_TSM_STATUS, NODE_IDENTIFIER, OBJECT_STORE_PATH, OBJECT_STORE_RELATIVE_TO,
PROCESS_ID_SOCKET_BINDING, PROCESS_ID_SOCKET_MAX_PORTS, RECOVERY_LISTENER, BINDING, STATUS_BINDING
};
static final AttributeDefinition[] ATTRIBUTES_WITH_EXPRESSIONS_AFTER_1_1_1 = new AttributeDefinition[] {
JTS, USE_HORNETQ_STORE
};
static final AttributeDefinition[] attributes_1_2 = new AttributeDefinition[] {USE_JDBC_STORE, JDBC_STORE_DATASOURCE,
JDBC_ACTION_STORE_DROP_TABLE, JDBC_ACTION_STORE_TABLE_PREFIX,
JDBC_COMMUNICATION_STORE_DROP_TABLE, JDBC_COMMUNICATION_STORE_TABLE_PREFIX,
JDBC_STATE_STORE_DROP_TABLE, JDBC_STATE_STORE_TABLE_PREFIX
};
@Override
public void registerAttributes(ManagementResourceRegistration resourceRegistration) {
// Register all attributes except of the mutual ones
Set<AttributeDefinition> attributesWithoutMutuals = new HashSet<>(Arrays.asList(attributes));
attributesWithoutMutuals.remove(USE_JOURNAL_STORE);
attributesWithoutMutuals.remove(USE_JDBC_STORE);
attributesWithoutMutuals.remove(DEFAULT_TIMEOUT);
attributesWithoutMutuals.remove(JDBC_STORE_DATASOURCE); // Remove these as it also needs special write handler
attributesWithoutMutuals.remove(PROCESS_ID_UUID);
attributesWithoutMutuals.remove(PROCESS_ID_SOCKET_BINDING);
attributesWithoutMutuals.remove(PROCESS_ID_SOCKET_MAX_PORTS);
OperationStepHandler writeHandler = new ReloadRequiredWriteAttributeHandler(attributesWithoutMutuals);
for(final AttributeDefinition def : attributesWithoutMutuals) {
resourceRegistration.registerReadWriteAttribute(def, null, writeHandler);
}
// Register mutual object store attributes
OperationStepHandler mutualWriteHandler = new ObjectStoreMutualWriteHandler(USE_JOURNAL_STORE, USE_JDBC_STORE);
resourceRegistration.registerReadWriteAttribute(USE_JOURNAL_STORE, null, mutualWriteHandler);
resourceRegistration.registerReadWriteAttribute(USE_JDBC_STORE, null, mutualWriteHandler);
//Register default-timeout attribute
resourceRegistration.registerReadWriteAttribute(DEFAULT_TIMEOUT, null, new DefaultTimeoutHandler(DEFAULT_TIMEOUT));
// Register jdbc-store-datasource attribute
resourceRegistration.registerReadWriteAttribute(JDBC_STORE_DATASOURCE, null, new JdbcStoreDatasourceWriteHandler(JDBC_STORE_DATASOURCE));
// Register mutual object store attributes
OperationStepHandler mutualProcessIdWriteHandler = new ProcessIdWriteHandler(PROCESS_ID_UUID, PROCESS_ID_SOCKET_BINDING, PROCESS_ID_SOCKET_MAX_PORTS);
resourceRegistration.registerReadWriteAttribute(PROCESS_ID_UUID, null, mutualProcessIdWriteHandler);
resourceRegistration.registerReadWriteAttribute(PROCESS_ID_SOCKET_BINDING, null, mutualProcessIdWriteHandler);
resourceRegistration.registerReadWriteAttribute(PROCESS_ID_SOCKET_MAX_PORTS, null, mutualProcessIdWriteHandler);
AliasedHandler esh = new AliasedHandler(STATISTICS_ENABLED.getName());
resourceRegistration.registerReadWriteAttribute(ENABLE_STATISTICS, esh, esh);
AliasedHandler hsh = new AliasedHandler(USE_JOURNAL_STORE.getName());
resourceRegistration.registerReadWriteAttribute(USE_HORNETQ_STORE, hsh, hsh);
AliasedHandler hseh = new AliasedHandler(JOURNAL_STORE_ENABLE_ASYNC_IO.getName());
resourceRegistration.registerReadWriteAttribute(HORNETQ_STORE_ENABLE_ASYNC_IO, hseh, hseh);
if (registerRuntimeOnly) {
TxStatsHandler.INSTANCE.registerMetrics(resourceRegistration);
}
}
@Override
public void registerCapabilities(ManagementResourceRegistration resourceRegistration) {
resourceRegistration.registerCapability(TRANSACTION_CAPABILITY);
}
private static class AliasedHandler implements OperationStepHandler {
private String aliasedName;
public AliasedHandler(String aliasedName) {
this.aliasedName = aliasedName;
}
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
ModelNode aliased = getAliasedOperation(operation);
context.addStep(aliased, getHandlerForOperation(context, operation), OperationContext.Stage.MODEL, true);
context.stepCompleted();
}
private ModelNode getAliasedOperation(ModelNode operation) {
ModelNode aliased = operation.clone();
aliased.get(ModelDescriptionConstants.NAME).set(aliasedName);
return aliased;
}
private static OperationStepHandler getHandlerForOperation(OperationContext context, ModelNode operation) {
ImmutableManagementResourceRegistration imrr = context.getResourceRegistration();
return imrr.getOperationHandler(PathAddress.EMPTY_ADDRESS, operation.get(OP).asString());
}
}
private static class ObjectStoreMutualWriteHandler extends ReloadRequiredWriteAttributeHandler {
public ObjectStoreMutualWriteHandler(final AttributeDefinition... definitions) {
super(definitions);
}
@Override
protected void finishModelStage(final OperationContext context, final ModelNode operation, String attributeName,
ModelNode newValue, ModelNode oldValue, final Resource model) throws OperationFailedException {
super.finishModelStage(context, operation, attributeName, newValue, oldValue, model);
assert !USE_JOURNAL_STORE.isAllowExpression() && !USE_JDBC_STORE.isAllowExpression() : "rework this before enabling expression";
if (attributeName.equals(USE_JOURNAL_STORE.getName()) || attributeName.equals(USE_JDBC_STORE.getName())) {
if (newValue.isDefined() && newValue.asBoolean()) {
// check the value of the mutual attribute and disable it if it is set to true
final String mutualAttributeName = attributeName.equals(USE_JDBC_STORE.getName())
? USE_JOURNAL_STORE.getName()
: USE_JDBC_STORE.getName();
ModelNode resourceModel = model.getModel();
if (resourceModel.hasDefined(mutualAttributeName) && resourceModel.get(mutualAttributeName).asBoolean()) {
resourceModel.get(mutualAttributeName).set(new ModelNode(false));
}
}
}
context.addStep(JdbcStoreValidationStep.INSTANCE, OperationContext.Stage.MODEL);
}
}
private static class JdbcStoreDatasourceWriteHandler extends ReloadRequiredWriteAttributeHandler {
public JdbcStoreDatasourceWriteHandler(AttributeDefinition... definitions) {
super(definitions);
}
@Override
protected void validateUpdatedModel(OperationContext context, Resource model) throws OperationFailedException {
super.validateUpdatedModel(context, model);
context.addStep(JdbcStoreValidationStep.INSTANCE, OperationContext.Stage.MODEL);
}
}
/**
* Validates that if use-jdbc-store is set, jdbc-store-datasource must be also set.
*
* Must be added to both use-jdbc-store and jdbc-store-datasource fields.
*/
private static class JdbcStoreValidationStep implements OperationStepHandler {
private static JdbcStoreValidationStep INSTANCE = new JdbcStoreValidationStep();
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
ModelNode modelNode = context.readResource(PathAddress.EMPTY_ADDRESS).getModel();
if (modelNode.hasDefined(USE_JDBC_STORE.getName()) && modelNode.get(USE_JDBC_STORE.getName()).asBoolean()
&& !modelNode.hasDefined(JDBC_STORE_DATASOURCE.getName())) {
throw TransactionLogger.ROOT_LOGGER.mustBeDefinedIfTrue(JDBC_STORE_DATASOURCE.getName(), USE_JDBC_STORE.getName());
}
}
}
private static class ProcessIdWriteHandler extends ReloadRequiredWriteAttributeHandler {
public ProcessIdWriteHandler(final AttributeDefinition... definitions) {
super(definitions);
}
@Override
protected void validateUpdatedModel(final OperationContext context, final Resource model) throws OperationFailedException {
context.addStep(new OperationStepHandler() {
@Override
public void execute(OperationContext operationContext, ModelNode operation) throws OperationFailedException {
ModelNode node = context.readResource(PathAddress.EMPTY_ADDRESS).getModel();
if (node.hasDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_UUID.getName()) && node.get(TransactionSubsystemRootResourceDefinition.PROCESS_ID_UUID.getName()).asBoolean()) {
if (node.hasDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_BINDING.getName())) {
throw TransactionLogger.ROOT_LOGGER.mustBeUndefinedIfTrue(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_BINDING.getName(), TransactionSubsystemRootResourceDefinition.PROCESS_ID_UUID.getName());
} else if (node.hasDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_MAX_PORTS.getName())) {
throw TransactionLogger.ROOT_LOGGER.mustBeUndefinedIfTrue(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_MAX_PORTS.getName(), TransactionSubsystemRootResourceDefinition.PROCESS_ID_UUID.getName());
}
} else if (node.hasDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_BINDING.getName())) {
//it's fine do nothing
} else if (node.hasDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_MAX_PORTS.getName())) {
throw TransactionLogger.ROOT_LOGGER.mustBedefinedIfDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_BINDING.getName(), TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_MAX_PORTS.getName());
} else {
// not uuid and also not sockets!
throw TransactionLogger.ROOT_LOGGER.eitherTrueOrDefined(TransactionSubsystemRootResourceDefinition.PROCESS_ID_UUID.getName(), TransactionSubsystemRootResourceDefinition.PROCESS_ID_SOCKET_BINDING.getName());
}
}
}, OperationContext.Stage.MODEL);
}
@Override
protected void finishModelStage(final OperationContext context, final ModelNode operation, String attributeName,
ModelNode newValue, ModelNode oldValue, final Resource model) throws OperationFailedException {
if (attributeName.equals(PROCESS_ID_SOCKET_BINDING.getName())) {
if (newValue.isDefined()) {
ModelNode resourceModel = model.getModel();
if (resourceModel.hasDefined(PROCESS_ID_UUID.getName()) && resourceModel.get(PROCESS_ID_UUID.getName()).asBoolean()) {
resourceModel.get(PROCESS_ID_UUID.getName()).set(new ModelNode(false));
}
}
}
if (attributeName.equals(PROCESS_ID_UUID.getName())) {
if (newValue.asBoolean(false)) {
ModelNode resourceModel = model.getModel();
resourceModel.get(PROCESS_ID_SOCKET_BINDING.getName()).clear();
resourceModel.get(PROCESS_ID_SOCKET_MAX_PORTS.getName()).clear();
}
}
validateUpdatedModel(context, model);
}
}
@Override
public void registerChildren(ManagementResourceRegistration resourceRegistration) {
resourceRegistration.registerSubModel(new CMResourceResourceDefinition());
}
private static class DefaultTimeoutHandler extends AbstractWriteAttributeHandler<Void> {
public DefaultTimeoutHandler(final AttributeDefinition... definitions) {
super(definitions);
}
@Override
protected boolean applyUpdateToRuntime(final OperationContext context, final ModelNode operation,
final String attributeName, final ModelNode resolvedValue,
final ModelNode currentValue, final HandbackHolder<Void> handbackHolder)
throws OperationFailedException {
TxControl.setDefaultTimeout(resolvedValue.asInt());
return false;
}
@Override
protected void revertUpdateToRuntime(final OperationContext context, final ModelNode operation,
final String attributeName, final ModelNode valueToRestore,
final ModelNode valueToRevert, final Void handback)
throws OperationFailedException {
TxControl.setDefaultTimeout(valueToRestore.asInt());
}
}
}
|
package com.jme.scene;
import java.io.Serializable;
import java.util.*;
import com.jme.bounding.BoundingVolume;
import com.jme.intersection.CollisionResults;
import com.jme.intersection.PickResults;
import com.jme.math.Matrix3f;
import com.jme.math.Quaternion;
import com.jme.math.Ray;
import com.jme.math.Vector3f;
import com.jme.renderer.Camera;
import com.jme.renderer.CloneCreator;
import com.jme.renderer.Renderer;
import com.jme.scene.state.LightState;
import com.jme.scene.state.RenderState;
import com.jme.scene.state.TextureState;
/**
* <code>Spatial</code> defines the base class for scene graph nodes. It
* maintains a link to a parent, it's local transforms and the world's
* transforms. All other nodes, such as <code>Node</code> and
* <code>Geometry</code> are subclasses of <code>Spatial</code>.
*
* @author Mark Powell
* @author Joshua Slack
* @version $Id: Spatial.java,v 1.89 2006-01-03 21:15:10 renanse Exp $
*/
public abstract class Spatial implements Serializable {
public static final int NODE = 1;
public static final int GEOMETRY = 2;
public static final int TRIMESH = 4;
public static final int SHARED_MESH = 8;
public static final int SKY_BOX = 16;
public static final int TERRAIN_BLOCK = 32;
public static final int TERRAIN_PAGE = 64;
public static final int CULL_INHERIT = 0;
public static final int CULL_DYNAMIC = 1;
public static final int CULL_ALWAYS = 2;
public static final int CULL_NEVER = 3;
public static final int LOCKED_BOUNDS = 1;
public static final int LOCKED_MESH_DATA = 2;
public static final int LOCKED_TRANSFORMS = 4;
/** Spatial's rotation relative to its parent. */
protected Quaternion localRotation;
/** Spatial's world absolute rotation. */
protected Quaternion worldRotation;
/** Spatial's translation relative to its parent. */
protected Vector3f localTranslation;
/** Spatial's world absolute translation. */
protected Vector3f worldTranslation;
/** Spatial's scale relative to its parent. */
protected Vector3f localScale;
/** Spatial's world absolute scale. */
protected Vector3f worldScale;
/**
* A flag indicating if scene culling should be done on this object by
* inheritance, dynamically, never, or always.
*/
private int cullMode = CULL_INHERIT;
/** Spatial's bounding volume relative to the world. */
protected BoundingVolume worldBound;
/** Spatial's parent, or null if it has none. */
protected transient Node parent;
/** List of default states all spatials take if none is set. */
public final static RenderState[] defaultStateList = new RenderState[RenderState.RS_MAX_STATE];
/** List of states that override any set states on a spatial if not null. */
public final static RenderState[] enforcedStateList = new RenderState[RenderState.RS_MAX_STATE];
/** RenderStates a Spatial contains during rendering. */
protected final static RenderState[] currentStates = new RenderState[RenderState.RS_MAX_STATE];
/** The render states of this spatial. */
private RenderState[] renderStateList;
protected int renderQueueMode = Renderer.QUEUE_INHERIT;
/** Used to determine draw order for ortho mode rendering. */
protected int zOrder = 0;
/**
* Used to indicate this spatial (and any below it in the case of Node) is
* locked against certain changes.
*/
protected int lockedMode = 0;
public transient float queueDistance = Float.NEGATIVE_INFINITY;
/**
* Flag signaling how lights are combined for this node. By default set to
* INHERIT.
*/
protected int lightCombineMode = LightState.INHERIT;
/**
* Flag signaling how textures are combined for this node. By default set to
* INHERIT.
*/
protected int textureCombineMode = TextureState.INHERIT;
/** ArrayList of controllers for this spatial. */
protected ArrayList geometricalControllers = new ArrayList();
/** This spatial's name. */
protected String name;
// scale values
protected int frustrumIntersects = Camera.INTERSECTS_FRUSTUM;
/** Defines if this spatial will be used in intersection operations or not. Default is true*/
protected boolean isCollidable = true;
private static final Vector3f compVecA = new Vector3f();
private static final Quaternion compQuat = new Quaternion();
/**
* Empty Constructor to be used internally only.
*/
public Spatial() {
}
/**
* Constructor instantiates a new <code>Spatial</code> object setting the
* rotation, translation and scale value to defaults.
*
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
*/
public Spatial(String name) {
this.name = name;
localRotation = new Quaternion();
worldRotation = new Quaternion();
localTranslation = new Vector3f();
worldTranslation = new Vector3f();
localScale = new Vector3f(1.0f, 1.0f, 1.0f);
worldScale = new Vector3f(1.0f, 1.0f, 1.0f);
}
/**
* Sets the name of this spatial.
*
* @param name
* The spatial's new name.
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the name of this spatial.
*
* @return This spatial's name.
*/
public String getName() {
return name;
}
/**
* Sets if this Spatial is to be used in intersection (collision and picking) calculations.
* By default this is true.
* @param isCollidable true if this Spatial is to be used in intersection calculations, false otherwise.
*/
public void setIsCollidable(boolean isCollidable) {
this.isCollidable = isCollidable;
}
/**
* Defines if this Spatial is to be used in intersection (collision and picking) calculations.
* By default this is true.
* @return true if this Spatial is to be used in intersection calculations, false otherwise.
*/
public boolean isCollidable() {
return this.isCollidable;
}
/**
* Adds a Controller to this Spatial's list of controllers.
*
* @param controller
* The Controller to add
* @see com.jme.scene.Controller
*/
public void addController(Controller controller) {
if (geometricalControllers == null) {
geometricalControllers = new ArrayList();
}
geometricalControllers.add(controller);
}
/**
* Removes a Controller to this Spatial's list of controllers, if it exist.
*
* @param controller
* The Controller to remove
* @return True if the Controller was in the list to remove.
* @see com.jme.scene.Controller
*/
public boolean removeController(Controller controller) {
if (geometricalControllers == null) {
geometricalControllers = new ArrayList();
}
return geometricalControllers.remove(controller);
}
/**
* Returns the controller in this list of controllers at index i.
*
* @param i
* The index to get a controller from.
* @return The controller at index i.
* @see com.jme.scene.Controller
*/
public Controller getController(int i) {
if (geometricalControllers == null) {
geometricalControllers = new ArrayList();
}
return (Controller) geometricalControllers.get(i);
}
/**
* Returns the ArrayList that contains this spatial's Controllers.
*
* @return This spatial's geometricalControllers.
*/
public ArrayList getControllers() {
if (geometricalControllers == null) {
geometricalControllers = new ArrayList();
}
return geometricalControllers;
}
/**
*
* <code>getWorldBound</code> retrieves the world bound at this node
* level.
*
* @return the world bound at this level.
*/
public BoundingVolume getWorldBound() {
return worldBound;
}
/**
*
* <code>setWorldBound</code> sets the world bound for this node level.
* This function should only be used in rare situations. In most cases,
* users will let jME's engine set the world bound and will instead call
* setModelBound on the leaf nodes.
*
* @param worldBound
* the world bound at this level.
* @see com.jme.scene.Geometry#setModelBound(com.jme.bounding.BoundingVolume)
*/
public void setWorldBound(BoundingVolume worldBound) {
this.worldBound = worldBound;
}
/**
*
* <code>onDraw</code> checks the spatial with the camera to see if it should
* be culled, if not, the node's draw method is called.
*
* @param r
* the renderer used for display.
*/
public void onDraw(Renderer r) {
int cm = getCullMode();
if (cm == CULL_ALWAYS) {
return;
}
Camera camera = r.getCamera();
int state = camera.getPlaneState();
// check to see if we can cull this node
frustrumIntersects = (parent != null ? parent.frustrumIntersects
: Camera.INTERSECTS_FRUSTUM);
if (cm == CULL_DYNAMIC && frustrumIntersects == Camera.INTERSECTS_FRUSTUM) {
frustrumIntersects = camera.contains(worldBound);
}
if (cm == CULL_NEVER || frustrumIntersects != Camera.OUTSIDE_FRUSTUM) {
draw(r);
}
camera.setPlaneState(state);
}
/**
* <code>getType</code> returns an int representing the class type
* of this Spatial. This allows avoidance of instanceof. Comparisons
* are to be done via bitwise & allowing checking of superclass instance.
*/
public abstract int getType();
/**
*
* <code>draw</code> abstract method that handles drawing data to the
* renderer if it is geometry and passing the call to it's children if it is
* a node.
*
* @param r
* the renderer used for display.
*/
public abstract void draw(Renderer r);
/**
*
* <code>getWorldRotation</code> retrieves the absolute rotation of the
* Spatial.
*
* @return the Spatial's world rotation matrix.
*/
public Quaternion getWorldRotation() {
return worldRotation;
}
/**
*
* <code>getWorldTranslation</code> retrieves the absolute translation of
* the spatial.
*
* @return the world's tranlsation vector.
*/
public Vector3f getWorldTranslation() {
return worldTranslation;
}
/**
*
* <code>getWorldScale</code> retrieves the absolute scale factor of the
* spatial.
*
* @return the world's scale factor.
*/
public Vector3f getWorldScale() {
return worldScale;
}
/**
* <code>setCullMode</code> sets how scene culling should work on this
* spatial during drawing.
*
* CULL_DYNAMIC: Determine via the defined Camera planes whether or not this
* Spatial should be culled.
*
* CULL_ALWAYS: Always throw away this object and any children during draw
* commands.
*
* CULL_NEVER: Never throw away this object (always draw it)
*
* CULL_INHERIT: Look for a non-inherit parent and use its cull mode.
*
* NOTE: You must set this AFTER attaching to a parent or it will be reset
* with the parent's cullMode value.
*
* @param mode
* one of CULL_DYNAMIC, CULL_ALWAYS, CULL_INHERIT or CULL_NEVER
*/
public void setCullMode(int mode) {
cullMode = mode;
}
/**
* @see #setCullMode(int)
*
* @return the cull mode of this spatial
*/
public int getCullMode() {
if (cullMode != CULL_INHERIT)
return cullMode;
else if (parent != null)
return parent.getCullMode();
else return CULL_DYNAMIC;
}
/**
* <code>rotateUpTo</code> is a util function that alters the
* localrotation to point the Y axis in the direction given by newUp.
*
* @param newUp the up vector to use - assumed to be a unit vector.
*/
public void rotateUpTo(Vector3f newUp) {
//First figure out the current up vector.
Vector3f upY = compVecA.set(Vector3f.UNIT_Y);
localRotation.multLocal(upY);
// get angle between vectors
float angle = upY.angleBetween(newUp);
//figure out rotation axis by taking cross product
Vector3f rotAxis = upY.crossLocal(newUp);
// Build a rotation quat and apply current local rotation.
Quaternion q = compQuat;
q.fromAngleAxis(angle, rotAxis);
q.mult(localRotation, localRotation);
}
/**
* <code>lookAt</code> is a convienence method for auto-setting the
* local rotation based on a position and an up vector. It computes
* the rotation to transform the z-axis to point onto 'position'
* and the y-axis to 'up'. Unlike {@link Quaternion#lookAt} this method
* takes a world position to look at not a relative direction.
*
* @param position
* where to look at in terms of world coordinates
* @param upVector
* a vector indicating the (local) up direction.
* (typically {0, 1, 0} in jME.)
*/
public void lookAt(Vector3f position, Vector3f upVector) {
compVecA.set( position ).subtractLocal( getWorldTranslation() );
getLocalRotation().lookAt( compVecA, upVector );
}
/**
* Calling this method tells the scenegraph that it is not necessary to
* update bounds from this point in the scenegraph on down to the leaves.
* This is useful for performance gains where you have scene items that do
* not move (at all) or change shape and thus do not need constant
* re-calculation of boundaries.
*
* When you call lock, the bounds are first updated to ensure current bounds
* are accurate.
*
* @see #unlockBounds()
*/
public void lockBounds() {
updateWorldBound();
lockedMode |= LOCKED_BOUNDS;
}
/**
* Flags this spatial and those below it in the scenegraph to not
* recalculate world transforms such as translation, rotation and scale on
* every update.
*
* This is useful for efficiency when you have scene items that stay in one
* place all the time as it avoids needless recalculation of transforms.
*
* @see #unlockTransforms()
*/
public void lockTransforms() {
updateWorldVectors();
lockedMode |= LOCKED_TRANSFORMS;
}
/**
* Flags this spatial and those below it that any meshes in the specified
* scenegraph location or lower will not have changes in vertex, texcoord,
* normal or color data. This allows optimizations by the engine such as
* creating display lists from the data.
*
* Calling this method does not provide a guarentee that data changes will
* not be allowed or will/won't show up in the scene. It is merely a hint to
* the engine.
*
* @param r A renderer to lock against.
* @see #unlockMeshes(Renderer)
*/
public void lockMeshes(Renderer r) {
updateRenderState();
lockedMode |= LOCKED_MESH_DATA;
}
/**
* Convienence function for locking all aspects of a Spatial.
* @see #lockBounds()
* @see #lockTransforms()
* @see #lockMeshes(Renderer)
*/
public void lock(Renderer r) {
lockBounds();
lockTransforms();
lockMeshes(r);
}
/**
* Flags this spatial and those below it to allow for bounds updating (the
* default).
*
* @see #lockBounds()
*/
public void unlockBounds() {
lockedMode &= ~LOCKED_BOUNDS;
}
/**
* Flags this spatial and those below it to allow for transform updating (the
* default).
*
* @see #lockTransforms()
*/
public void unlockTransforms() {
lockedMode &= ~LOCKED_TRANSFORMS;
}
/**
* Flags this spatial and those below it to allow for mesh updating (the
* default). Generally this means that any display lists setup will be
* erased and released.
*
* @param r The renderer used to lock against.
* @see #lockMeshes(Renderer)
*/
public void unlockMeshes(Renderer r) {
lockedMode &= ~LOCKED_MESH_DATA;
}
/**
* Convienence function for unlocking all aspects of a Spatial.
* @see #unlockBounds()
* @see #unlockTransforms()
* @see #unlockMeshes(Renderer)
*/
public void unlock(Renderer r) {
unlockBounds();
unlockTransforms();
unlockMeshes(r);
}
/**
* @return a bitwise combination of the current locks established on this
* Spatial.
*/
public int getLocks() {
return lockedMode;
}
/**
*
* <code>updateGeometricState</code> updates all the geometry information
* for the node.
*
* @param time
* the frame time.
* @param initiator
* true if this node started the update process.
*/
public void updateGeometricState(float time, boolean initiator) {
updateWorldData(time);
if ((lockedMode & LOCKED_BOUNDS) == 0) {
updateWorldBound();
if (initiator) {
propagateBoundToRoot();
}
}
}
/**
*
* <code>updateWorldData</code> updates the world transforms from the
* parent down to the leaf.
*
* @param time
* the frame time.
*/
public void updateWorldData(float time) {
// update spatial state via controllers
Object controller;
for (int i = 0, gSize = geometricalControllers.size(); i < gSize; i++) {
try {
controller = geometricalControllers.get( i );
} catch ( IndexOutOfBoundsException e ) {
// a controller was removed in Controller.update (note: this may skip one controller)
break;
}
if ( controller != null ) {
( (Controller) controller ).update( time );
}
}
updateWorldVectors();
}
public void updateWorldVectors() {
if ((lockedMode & LOCKED_BOUNDS) == 0) {
updateWorldScale();
updateWorldRotation();
updateWorldTranslation();
}
}
private void updateWorldTranslation() {
if (parent != null) {
worldTranslation = parent.getWorldRotation().mult(localTranslation,
worldTranslation).multLocal(parent.getWorldScale())
.addLocal(parent.getWorldTranslation());
} else {
worldTranslation.set(localTranslation);
}
}
private void updateWorldRotation() {
if (parent != null) {
parent.getWorldRotation().mult(localRotation, worldRotation);
} else {
worldRotation.set(localRotation);
}
}
private void updateWorldScale() {
if (parent != null) {
worldScale.set(parent.getWorldScale()).multLocal(localScale);
} else {
worldScale.set(localScale);
}
}
/**
*
* <code>updateWorldBound</code> updates the bounding volume of the world.
* Abstract, geometry transforms the bound while node merges the children's
* bound. In most cases, users will want to call updateModelBound() and let
* this function be called automatically during updateGeometricState().
*
*/
public abstract void updateWorldBound();
/**
* Updates the render state values of this Spatial and and children it has.
* Should be called whenever render states change.
*/
public void updateRenderState() {
updateRenderState(null);
}
/**
* Called internally. Updates the render states of this Spatial. The stack
* contains parent render states.
*
* @param parentStates
* The list of parent renderstates.
*/
protected void updateRenderState(Stack[] parentStates) {
boolean initiator = (parentStates == null);
// first we need to get all the states from parent to us.
if (initiator) {
// grab all states from root to here.
parentStates = new Stack[RenderState.RS_MAX_STATE];
for (int x = 0; x < parentStates.length; x++)
parentStates[x] = new Stack();
propagateStatesFromRoot(parentStates);
} else {
for (int x = 0; x < RenderState.RS_MAX_STATE; x++) {
if (getRenderState(x) != null)
parentStates[x].push(getRenderState(x));
}
}
applyRenderState(parentStates);
// restore previous if we are not the initiator
if (!initiator) {
for (int x = 0; x < RenderState.RS_MAX_STATE; x++)
if (getRenderState(x) != null)
parentStates[x].pop();
}
}
/**
* Called during updateRenderState(Stack[]), this function determines how
* the render states are actually applied to the spatial and any children it
* may have. By default, this function does nothing.
*
* @param states
* An array of stacks for each state.
*/
protected void applyRenderState(Stack[] states) {
}
/**
* Called during updateRenderState(Stack[]), this function goes up the scene
* graph tree until the parent is null and pushes RenderStates onto the
* states Stack array.
*
* @param states
* The Stack[] to push states onto.
*/
protected void propagateStatesFromRoot(Stack[] states) {
// traverse to root to allow downward state propagation
if (parent != null)
parent.propagateStatesFromRoot(states);
// push states onto current render state stack
for (int x = 0; x < RenderState.RS_MAX_STATE; x++)
if (getRenderState(x) != null)
states[x].push(getRenderState(x));
}
/**
*
* <code>propagateBoundToRoot</code> passes the new world bound up the
* tree to the root.
*
*/
public void propagateBoundToRoot() {
if (parent != null) {
parent.updateWorldBound();
parent.propagateBoundToRoot();
}
}
/**
* <code>getParent</code> retrieve's this node's parent. If the parent is
* null this is the root node.
*
* @return the parent of this node.
*/
public Node getParent() {
return parent;
}
/**
* Called by {@link Node#attachChild(Spatial)} and {@link Node#detachChild(Spatial)} - don't call directly.
* <code>setParent</code> sets the parent of this node.
*
* @param parent
* the parent of this node.
*/
protected void setParent(Node parent) {
this.parent = parent;
}
/**
* <code>removeFromParent</code> removes this Spatial from it's parent.
*
* @return true if it has a parent and performed the remove.
*/
public boolean removeFromParent() {
if (parent != null) {
parent.detachChild(this);
return true;
}
return false;
}
/**
* <code>getLocalRotation</code> retrieves the local rotation of this
* node.
*
* @return the local rotation of this node.
*/
public Quaternion getLocalRotation() {
return localRotation;
}
/**
* <code>setLocalRotation</code> sets the local rotation of this node.
*
* @param rotation
* the new local rotation.
*/
public void setLocalRotation(Matrix3f rotation) {
if (localRotation == null)
localRotation = new Quaternion();
localRotation.fromRotationMatrix(rotation);
this.worldRotation.set(this.localRotation);
}
/**
*
* <code>setLocalRotation</code> sets the local rotation of this node,
* using a quaterion to build the matrix.
*
* @param quaternion
* the quaternion that defines the matrix.
*/
public void setLocalRotation(Quaternion quaternion) {
localRotation = quaternion;
this.worldRotation.set(this.localRotation);
}
/**
* <code>getLocalScale</code> retrieves the local scale of this node.
*
* @return the local scale of this node.
*/
public Vector3f getLocalScale() {
return localScale;
}
/**
* <code>setLocalScale</code> sets the local scale of this node.
*
* @param localScale
* the new local scale, applied to x, y and z
*/
public void setLocalScale(float localScale) {
this.localScale.x = localScale;
this.localScale.y = localScale;
this.localScale.z = localScale;
this.worldScale.set(this.localScale);
}
/**
* <code>setLocalScale</code> sets the local scale of this node.
*
* @param localScale
* the new local scale.
*/
public void setLocalScale(Vector3f localScale) {
this.localScale = localScale;
this.worldScale.set(this.localScale);
}
/**
* <code>getLocalTranslation</code> retrieves the local translation of
* this node.
*
* @return the local translation of this node.
*/
public Vector3f getLocalTranslation() {
return localTranslation;
}
/**
* <code>setLocalTranslation</code> sets the local translation of this
* node.
*
* @param localTranslation
* the local translation of this node.
*/
public void setLocalTranslation(Vector3f localTranslation) {
this.localTranslation = localTranslation;
this.worldTranslation.set(this.localTranslation);
}
/**
*
* <code>setRenderState</code> sets a render state for this node. Note,
* there can only be one render state per type per node. That is, there can
* only be a single AlphaState a single TextureState, etc. If there is
* already a render state for a type set the old render state will be
* returned. Otherwise, null is returned.
*
* @param rs
* the render state to add.
* @return the old render state.
*/
public RenderState setRenderState(RenderState rs) {
if ( renderStateList == null )
{
renderStateList = new RenderState[RenderState.RS_MAX_STATE];
}
RenderState oldState = renderStateList[rs.getType()];
renderStateList[rs.getType()] = rs;
return oldState;
}
/**
* Returns the requested RenderState that this Spatial currently has set or
* null if none is set.
*
* @param type
* the renderstate type to retrieve
* @return a renderstate at the given position or null
*/
public RenderState getRenderState( int type ) {
return renderStateList != null ? renderStateList[type] : null;
}
/**
* Clears a given render state index by setting it to null.
*
* @param renderStateType
* The index of a RenderState to clear
* @see com.jme.scene.state.RenderState#getType()
*/
public void clearRenderState(int renderStateType) {
if ( renderStateList != null )
{
renderStateList[renderStateType] = null;
}
}
/**
* Enforce a particular state. In other words, the given state will override
* any state of the same type set on a scene object. Remember to clear the
* state when done enforcing. Very useful for multipass techniques where
* multiple sets of states need to be applied to a scenegraph drawn multiple
* times.
*
* @param state
* state to enforce
*/
public static void enforceState(RenderState state) {
Spatial.enforcedStateList[state.getType()] = state;
}
/**
* Clears an enforced render state index by setting it to null. This allows
* object specific states to be used.
*
* @param renderStateType
* The type of RenderState to clear enforcement on.
*/
public static void clearEnforcedState(int renderStateType) {
if ( enforcedStateList != null )
{
enforcedStateList[renderStateType] = null;
}
}
/**
* sets all enforced states to null.
*
* @see com.jme.scene.Spatial#clearEnforcedState(int)
*/
public static void clearEnforcedStates() {
for (int i = 0; i < enforcedStateList.length; i++)
enforcedStateList[i] = null;
}
public void setRenderQueueMode(int renderQueueMode) {
this.renderQueueMode = renderQueueMode;
}
public int getRenderQueueMode() {
if (renderQueueMode != Renderer.QUEUE_INHERIT)
return renderQueueMode;
else if (parent != null)
return parent.getRenderQueueMode();
else
return Renderer.QUEUE_SKIP;
}
public void setZOrder(int zOrder) {
this.zOrder = zOrder;
}
public int getZOrder() {
return zOrder;
}
/**
* Sets how lights from parents should be combined for this spatial.
*
* @param lightCombineMode
* The light combine mode for this spatial
* @see com.jme.scene.state.LightState#COMBINE_CLOSEST
* @see com.jme.scene.state.LightState#COMBINE_FIRST
* @see com.jme.scene.state.LightState#COMBINE_RECENT_ENABLED
* @see com.jme.scene.state.LightState#INHERIT
* @see com.jme.scene.state.LightState#OFF
* @see com.jme.scene.state.LightState#REPLACE
*/
public void setLightCombineMode(int lightCombineMode) {
this.lightCombineMode = lightCombineMode;
}
/**
* Returns this spatial's light combine mode. If the mode is set to inherit,
* then the spatial gets its combine mode from its parent.
*
* @return The spatial's light current combine mode.
*/
public int getLightCombineMode() {
if (lightCombineMode != LightState.INHERIT)
return lightCombineMode;
else if (parent != null)
return parent.getLightCombineMode();
else
return LightState.COMBINE_FIRST;
}
/**
* Sets how textures from parents should be combined for this Spatial.
*
* @param textureCombineMode
* The new texture combine mode for this spatial.
* @see com.jme.scene.state.TextureState#COMBINE_CLOSEST
* @see com.jme.scene.state.TextureState#COMBINE_FIRST
* @see com.jme.scene.state.TextureState#COMBINE_RECENT_ENABLED
* @see com.jme.scene.state.TextureState#INHERIT
* @see com.jme.scene.state.TextureState#OFF
* @see com.jme.scene.state.TextureState#REPLACE
*/
public void setTextureCombineMode(int textureCombineMode) {
this.textureCombineMode = textureCombineMode;
}
/**
* Returns this spatial's texture combine mode. If the mode is set to
* inherit, then the spatial gets its combine mode from its parent.
*
* @return The spatial's texture current combine mode.
*/
public int getTextureCombineMode() {
if (textureCombineMode != TextureState.INHERIT)
return textureCombineMode;
else if (parent != null)
return parent.getTextureCombineMode();
else
return TextureState.COMBINE_CLOSEST;
}
/**
* Returns this spatial's last frustum intersection result. This int is set
* when a check is made to determine if the bounds of the object fall inside
* a camera's frustum. If a parent is found to fall outside the frustum, the
* value for this spatial will not be updated.
*
* Possible values include: Camera.OUTSIDE_FRUSTUM,
* Camera.INTERSECTS_FRUSTUM, and Camera.INSIDE_FRUSTUM
*
* @return The spatial's last frustum intersection result.
*/
public int getLastFrustumIntersection() {
return frustrumIntersects;
}
/**
* Overrides the last intersection result. This is useful for
* operations that want to start rendering at the middle of a
* scene tree and don't want the parent of that node to
* influence culling. (See texture renderer code for example.)
*
* Possible values include: Camera.OUTSIDE_FRUSTUM,
* Camera.INTERSECTS_FRUSTUM, and Camera.INSIDE_FRUSTUM
* @param intersects the new value, one of those given above.
*/
public void setLastFrustumIntersection(int intersects) {
frustrumIntersects = intersects;
}
/**
* sets all current states to null, and therefore forces the use of the
* default states.
*
*/
public static void clearCurrentStates() {
for (int i = 0; i < currentStates.length; i++)
currentStates[i] = null;
}
/**
* clears the specified state. The state is referenced by it's int value,
* and therefore should be called via RenderState's constant list. For
* example, RenderState.RS_ALPHA.
*
* @param state
* the state to clear.
*/
public static void clearCurrentState(int state) {
currentStates[state] = null;
}
public static RenderState getCurrentState(int state) {
return currentStates[state];
}
/**
* All non null default states are applied to the renderer.
*/
public static void applyDefaultStates() {
for (int i = 0; i < defaultStateList.length; i++) {
if (defaultStateList[i] != null)
defaultStateList[i].apply();
}
}
/**
*
* <code>calculateCollisions</code> calls findCollisions to populate the
* CollisionResults object then processes the collision results.
*
* @param scene
* the scene to test against.
* @param results
* the results object.
*/
public void calculateCollisions(Spatial scene, CollisionResults results) {
findCollisions(scene, results);
results.processCollisions();
}
/**
* checks this spatial against a second spatial, any collisions are stored
* in the results object.
*
* @param scene
* the scene to test against.
* @param results
* the results of the collisions.
*/
public abstract void findCollisions(Spatial scene, CollisionResults results);
public abstract boolean hasCollision(Spatial scene, boolean checkTriangles);
public void calculatePick(Ray ray, PickResults results) {
findPick(ray, results);
results.processPick();
}
public abstract void findPick(Ray toTest, PickResults results);
/**
* This method updates the exact bounding tree of any this Spatial. If this
* spatial has children, the function is called recursivly on its children.
* Spatial objects, such as text, which don't make sense to have an exact
* bounds are ignored.
*/
public void updateCollisionTree() {
}
/**
* Returns the Spatial's name followed by the class of the spatial <br>
* Example: "MyNode (com.jme.scene.Spatial)
*
* @return Spatial's name followed by the class of the Spatial
*/
public String toString() {
return name + " (" + this.getClass().getName() + ')';
}
public Spatial putClone(Spatial store, CloneCreator properties) {
if (store == null)
return null;
store.renderQueueMode = this.renderQueueMode;
store.setLocalTranslation(new Vector3f(getLocalTranslation()));
store.setLocalRotation(new Quaternion(getLocalRotation()));
store.setLocalScale(new Vector3f(getLocalScale()));
if ( renderStateList != null )
{
if (store.renderStateList == null)
store.renderStateList = new RenderState[RenderState.RS_MAX_STATE];
System.arraycopy( renderStateList, 0, store.renderStateList, 0, renderStateList.length );
}
Iterator I = geometricalControllers.iterator();
while (I.hasNext()) {
Controller c = (Controller) I.next();
Controller toAdd = c.putClone(null, properties);
if (toAdd != null)
store.addController(toAdd);
}
properties.originalToCopy.put(this, store);
return store;
}
}
|
package com.rtg.graph;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.prefs.Preferences;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.Box;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JProgressBar;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.WindowConstants;
import javax.swing.filechooser.FileFilter;
import com.reeltwo.jumble.annotations.JumbleIgnore;
import com.reeltwo.plot.Box2D;
import com.reeltwo.plot.Graph2D;
import com.reeltwo.plot.KeyPosition;
import com.reeltwo.plot.Plot2D;
import com.reeltwo.plot.Point2D;
import com.reeltwo.plot.PointPlot2D;
import com.reeltwo.plot.renderer.Mapping;
import com.reeltwo.plot.ui.InnerZoomPlot;
import com.rtg.util.ContingencyTable;
import com.rtg.util.Resources;
import com.rtg.util.StringUtils;
import com.rtg.util.diagnostic.NoTalkbackSlimException;
import com.rtg.util.io.FileUtils;
import com.rtg.vcf.eval.RocFilter;
/**
* Starts a new Swing window for displaying {@code Graph2D}s in. The window has
* zooming and picture in picture functionality enabled.
*
*/
@JumbleIgnore
public class RocPlot {
/** Minimum allowed line width */
public static final int LINE_WIDTH_MIN = 1;
/** Maximum allowed line width */
public static final int LINE_WIDTH_MAX = 10;
// File chooser stored user-preference keys
private static final String CHOOSER_WIDTH = "chooser-width";
private static final String CHOOSER_HEIGHT = "chooser-height";
private static final String ROC_PLOT = "ROC Plot";
private static final String PRECISION = "Precision";
private static final String SENSITIVITY = "Sensitivity";
private static final String PRECISION_SENSITIVITY = PRECISION + "/" + SENSITIVITY;
private static final String ROC = "ROC";
private final JPanel mMainPanel;
/** panel showing plot */
private final RocZoomPlotPanel mZoomPP;
/** a progress bar */
private final JProgressBar mProgressBar;
/** pop up menu */
private final JPopupMenu mPopup;
private final JLabel mIconLabel;
private final RocLinesPanel mRocLinesPanel;
private final JSlider mLineWidthSlider;
private final JCheckBox mScoreCB;
private final JCheckBox mSelectAllCB;
private final JButton mOpenButton;
private final JButton mCommandButton;
private final JTextField mTitleEntry;
private final JComboBox<String> mGraphType;
private JSplitPane mSplitPane;
private final JLabel mStatusLabel;
// Graph data and state
final Map<String, DataBundle> mData = Collections.synchronizedMap(new HashMap<>());
boolean mShowScores = true;
int mLineWidth = 2;
private float mMaxX = Float.NaN;
private float mMaxY = Float.NaN;
private float mMinX = Float.NaN;
private float mMinY = Float.NaN;
private final JScrollPane mScrollPane;
private final JFileChooser mFileChooser;
private File mFileChooserParent = null;
private int mColorCounter = -1;
static final Color[] PALETTE = {
new Color(0xFF4030),
new Color(0x30F030),
new Color(0x3030FF),
new Color(0xFF30FF),
new Color(0x30FFFF),
new Color(0xA05050),
new Color(0xF0C040),
new Color(0x707070),
new Color(0xC00000),
new Color(0x00C000),
new Color(0x0000C0),
new Color(0xC000C0),
new Color(0x00C0C0),
new Color(0xB0B0B0),
};
private static class RocFileFilter extends FileFilter {
@Override
public String getDescription() {
return "ROC data files(*" + RocFilter.ROC_EXT + ",*" + RocFilter.ROC_EXT + FileUtils.GZ_SUFFIX + ")";
}
@Override
public boolean accept(File f) {
final String flc = f.getName();
return f.isDirectory() || flc.endsWith(RocFilter.ROC_EXT) || flc.endsWith(RocFilter.ROC_EXT + FileUtils.GZ_SUFFIX);
}
}
/**
* Creates a new swing plot.
* @param precisionRecall true defaults to precision recall graph
*/
RocPlot(boolean precisionRecall) {
mMainPanel = new JPanel();
UIManager.put("FileChooser.readOnly", Boolean.TRUE);
mFileChooser = new JFileChooser();
final Action details = mFileChooser.getActionMap().get("viewTypeDetails");
if (details != null) {
details.actionPerformed(null);
}
mFileChooser.setMultiSelectionEnabled(true);
mFileChooser.setFileFilter(new RocFileFilter());
mZoomPP = new RocZoomPlotPanel();
mZoomPP.setOriginIsMin(true);
mProgressBar = new JProgressBar(-1, -1);
mProgressBar.setVisible(true);
mProgressBar.setStringPainted(true);
mProgressBar.setIndeterminate(true);
mStatusLabel = new JLabel();
mPopup = new JPopupMenu();
mRocLinesPanel = new RocLinesPanel(this);
mScrollPane = new JScrollPane(mRocLinesPanel, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
mScrollPane.setWheelScrollingEnabled(true);
mLineWidthSlider = new JSlider(JSlider.HORIZONTAL, LINE_WIDTH_MIN, LINE_WIDTH_MAX, 1);
mScoreCB = new JCheckBox("Show Scores");
mScoreCB.setSelected(true);
mSelectAllCB = new JCheckBox("Select / Deselect all");
mTitleEntry = new JTextField("ROC");
mTitleEntry.setMaximumSize(new Dimension(Integer.MAX_VALUE, mTitleEntry.getPreferredSize().height));
mOpenButton = new JButton("Open...");
mOpenButton.setToolTipText("Add a new curve from a file");
mCommandButton = new JButton("Cmd...");
mCommandButton.setToolTipText("Send the equivalent rocplot command-line to the terminal");
final ImageIcon icon = createImageIcon("com/rtg/graph/resources/realtimegenomics_logo.png", "RTG Logo");
mIconLabel = new JLabel(icon);
mIconLabel.setBackground(new Color(16, 159, 205));
mIconLabel.setForeground(Color.WHITE);
mIconLabel.setOpaque(true);
mIconLabel.setFont(new Font("Arial", Font.BOLD, 24));
mIconLabel.setHorizontalAlignment(JLabel.LEFT);
mIconLabel.setIconTextGap(50);
if (icon != null) {
mIconLabel.setMinimumSize(new Dimension(icon.getIconWidth(), icon.getIconHeight()));
}
mGraphType = new JComboBox<>(new String[] {ROC_PLOT, PRECISION_SENSITIVITY});
mGraphType.setSelectedItem(precisionRecall ? PRECISION_SENSITIVITY : ROC_PLOT);
configureUI();
}
protected static ImageIcon createImageIcon(String path, String description) {
final java.net.URL imgURL = Resources.getResource(path);
if (imgURL != null) {
return new ImageIcon(imgURL, description);
} else {
System.err.println("Couldn't find file: " + path);
return null;
}
}
/**
* Layout and show the GUI.
*/
private void configureUI() {
mMainPanel.setLayout(new BorderLayout());
final JPanel pane = new JPanel(new BorderLayout());
pane.add(mZoomPP, BorderLayout.CENTER);
final JPanel rightPanel = new JPanel(new GridBagLayout());
mSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, pane, rightPanel);
mSplitPane.setContinuousLayout(true);
mSplitPane.setOneTouchExpandable(true);
mSplitPane.setResizeWeight(1);
mMainPanel.add(mSplitPane, BorderLayout.CENTER);
mMainPanel.add(mStatusLabel, BorderLayout.SOUTH);
mPopup.setLightWeightPopupEnabled(false);
mPopup.add(mZoomPP.getZoomOutAction());
mPopup.addSeparator();
mPopup.add(mZoomPP.getPrintAction());
mPopup.add(mZoomPP.getSaveImageAction());
mPopup.add(mZoomPP.getSnapShotAction());
mPopup.addSeparator();
mZoomPP.addMouseListener(new PopupListener());
mZoomPP.setBackground(Color.WHITE);
mZoomPP.setGraphBGColor(new Color(0.8f, 0.9f, 1.0f), Color.WHITE);
mZoomPP.setGraphShadowWidth(4);
final GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(2, 2, 2, 2);
c.gridx = 0;
c.weightx = 1; c.weighty = 0;
c.fill = GridBagConstraints.HORIZONTAL; c.anchor = GridBagConstraints.CENTER;
rightPanel.add(new JLabel("Title", JLabel.CENTER), c);
mTitleEntry.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
mTitleEntry.setText(mIconLabel.getText()); // revert edit
}
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
setTitle(mTitleEntry.getText());
showCurrentGraph();
}
}
});
mTitleEntry.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
setTitle(mTitleEntry.getText());
showCurrentGraph();
}
});
rightPanel.add(mTitleEntry, c);
rightPanel.add(mGraphType, c);
mGraphType.addItemListener(e -> {
showCurrentGraph();
SwingUtilities.invokeLater(() -> {
final String text = mTitleEntry.getText();
if (mGraphType.getSelectedItem().equals(PRECISION_SENSITIVITY)) {
if (text.equals(ROC)) {
mTitleEntry.setText(PRECISION_SENSITIVITY);
}
} else {
if (text.equals(PRECISION_SENSITIVITY)) {
mTitleEntry.setText(ROC);
}
}
mZoomPP.setCrossHair(null);
mZoomPP.getZoomOutAction().actionPerformed(new ActionEvent(mGraphType, 0, "GraphTypeChanged"));
});
});
rightPanel.add(new JLabel("Line Width", JLabel.CENTER), c);
mLineWidthSlider.setSnapToTicks(true);
mLineWidthSlider.setValue(mLineWidth);
mLineWidthSlider.addChangeListener(e -> {
mLineWidth = mLineWidthSlider.getValue();
showCurrentGraph();
});
rightPanel.add(mLineWidthSlider, c);
mScoreCB.addItemListener(e -> {
mShowScores = mScoreCB.isSelected();
showCurrentGraph();
});
mScoreCB.setAlignmentX(0);
rightPanel.add(mScoreCB, c);
mSelectAllCB.addItemListener(e -> {
for (final Component component : mRocLinesPanel.getComponents()) {
final RocLinePanel cp = (RocLinePanel) component;
cp.setSelected(mSelectAllCB.isSelected());
}
});
mSelectAllCB.setSelected(true);
mOpenButton.addActionListener(new LoadFileListener());
mCommandButton.addActionListener(e -> {
final String command = getCommand();
final JTextArea ta = new JTextArea(1 + command.length() / 120, 120);
ta.setFont(new Font(Font.MONOSPACED, Font.PLAIN, 12));
ta.setText(command);
ta.setCaretPosition(0);
ta.setLineWrap(true);
ta.setEditable(false);
JOptionPane.showMessageDialog(mMainPanel, new JScrollPane(ta), "Equivalent rocplot command", JOptionPane.INFORMATION_MESSAGE);
});
final JPanel namePanel = new JPanel(new GridBagLayout());
c.fill = GridBagConstraints.NONE; c.anchor = GridBagConstraints.LINE_START;
rightPanel.add(mOpenButton, c);
rightPanel.add(mSelectAllCB, c);
c.fill = GridBagConstraints.HORIZONTAL; c.anchor = GridBagConstraints.CENTER;
final GridBagConstraints scrollConstraints = new GridBagConstraints();
scrollConstraints.gridx = 0; scrollConstraints.gridy = 1;
scrollConstraints.weightx = 2; scrollConstraints.weighty = 2;
scrollConstraints.fill = GridBagConstraints.BOTH;
mRocLinesPanel.setPreferredSize(new Dimension(mScrollPane.getViewport().getViewSize().width, mRocLinesPanel.getPreferredSize().height));
namePanel.add(mScrollPane, scrollConstraints);
c.weighty = 1;
c.fill = GridBagConstraints.BOTH;
rightPanel.add(namePanel, c);
c.weighty = 0;
c.fill = GridBagConstraints.NONE; c.anchor = GridBagConstraints.FIRST_LINE_START;
final Box b = Box.createHorizontalBox();
b.add(mOpenButton);
b.add(mCommandButton);
rightPanel.add(b, c);
pane.add(mProgressBar, BorderLayout.SOUTH);
mIconLabel.setText(mTitleEntry.getText());
pane.add(mIconLabel, BorderLayout.NORTH);
}
@JumbleIgnore
private class LoadFileListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if (mFileChooserParent != null) {
mFileChooser.setCurrentDirectory(mFileChooserParent);
}
final Preferences prefs = Preferences.userNodeForPackage(RocPlot.this.getClass());
if (prefs.getInt(CHOOSER_WIDTH, -1) != -1 && prefs.getInt(CHOOSER_HEIGHT, -1) != -1) {
mFileChooser.setPreferredSize(new Dimension(prefs.getInt(CHOOSER_WIDTH, 640), prefs.getInt(CHOOSER_HEIGHT, 480)));
}
if (mFileChooser.showOpenDialog(mMainPanel.getTopLevelAncestor()) == JFileChooser.APPROVE_OPTION) {
final File[] files = mFileChooser.getSelectedFiles();
for (File f : files) {
try {
loadFile(f, "", new ParseRocFile.NullProgressDelegate());
} catch (final IOException | NoTalkbackSlimException e1) {
JOptionPane.showMessageDialog(mMainPanel.getTopLevelAncestor(),
"Could not open file: " + f.getPath() + "\n"
+ (e1.getMessage().length() > 100 ? e1.getMessage().substring(0, 100) + "..." : e1.getMessage()),
"Invalid ROC File", JOptionPane.ERROR_MESSAGE);
}
}
}
final Dimension r = mFileChooser.getSize();
prefs.putInt(CHOOSER_WIDTH, (int) r.getWidth());
prefs.putInt(CHOOSER_HEIGHT, (int) r.getHeight());
}
}
private String getCommand() {
final StringBuilder sb = new StringBuilder("rtg rocplot");
sb.append(" --").append(RocPlotCli.TITLE_FLAG).append(' ').append(StringUtils.dumbQuote(mTitleEntry.getText()));
sb.append(" --").append(RocPlotCli.LINE_WIDTH_FLAG).append(' ').append(mLineWidth);
if (mShowScores) {
sb.append(" --").append(RocPlotCli.SCORES_FLAG);
}
if (mGraphType.getSelectedItem().equals(PRECISION_SENSITIVITY)) {
sb.append(" --").append(RocPlotCli.PRECISION_SENSITIVITY_FLAG);
}
if (mZoomPP.isZoomed()) {
final ExternalZoomGraph2D graph = (ExternalZoomGraph2D) mZoomPP.getGraph();
if (graph != null) {
sb.append(" --").append(RocPlotCli.ZOOM_FLAG).append(' ').append(graph.getZoomString());
}
}
for (final Component component : mRocLinesPanel.getComponents()) {
final RocLinePanel cp = (RocLinePanel) component;
if (cp.isSelected()) {
sb.append(" --").append(RocPlotCli.CURVE_FLAG).append(" ").append(StringUtils.dumbQuote(cp.getPath() + "=" + cp.getLabel()));
}
}
return sb.toString();
}
// Adds the notion of painting a current crosshair position
@JumbleIgnore
private static class RocZoomPlotPanel extends InnerZoomPlot {
private Point2D mCrosshair; // In TP / FP coordinates.
RocZoomPlotPanel() {
super();
}
@Override
public void paint(Graphics g) {
super.paint(g);
final Mapping[] mapping = getMapping();
if (mapping != null && mapping.length > 1 && mCrosshair != null) {
Point p = new Point((int) mapping[0].worldToScreen(mCrosshair.getX()), (int) mapping[1].worldToScreen(mCrosshair.getY()));
final boolean inView = p.x >= mapping[0].getScreenMin() && p.x <= mapping[0].getScreenMax()
&& p.y <= mapping[1].getScreenMin() && p.y >= mapping[1].getScreenMax(); // Y screen min/max is inverted due to coordinate system
if (inView) {
p = SwingUtilities.convertPoint(this, p, this);
g.setColor(Color.BLACK);
final int size = 9;
g.drawLine(p.x - size, p.y - size, p.x + size, p.y + size);
g.drawLine(p.x - size, p.y + size, p.x + size, p.y - size);
}
}
}
void setCrossHair(Point2D p) {
mCrosshair = p;
}
public void setZoom(Box2D zoom) {
final ExternalZoomGraph2D graph = (ExternalZoomGraph2D) getGraph();
if (graph != null) {
graph.setZoom(zoom);
}
}
}
abstract static class ExternalZoomGraph2D extends Graph2D {
void setZoom(Box2D zoom) {
if (uses(Graph2D.Y, Graph2D.TWO)) { // Update alternate axis before setting new range on the primary
final Mapping m = new Mapping(getLo(Graph2D.Y, Graph2D.ONE), getHi(Graph2D.Y, Graph2D.ONE), getLo(Graph2D.Y, Graph2D.TWO), getHi(Graph2D.Y, Graph2D.TWO));
setRange(Graph2D.Y, Graph2D.TWO, m.worldToScreen(zoom.getYLo()), m.worldToScreen(zoom.getYHi()));
}
setRange(Graph2D.X, zoom.getXLo(), zoom.getXHi());
setRange(Graph2D.Y, zoom.getYLo(), zoom.getYHi());
}
String getZoomString() {
final int xlo = Math.round(getLo(Graph2D.X, Graph2D.ONE));
final int ylo = Math.round(getLo(Graph2D.Y, Graph2D.ONE));
final int xhi = Math.round(getHi(Graph2D.X, Graph2D.ONE));
final int yhi = Math.round(getHi(Graph2D.Y, Graph2D.ONE));
if (xlo == 0 && ylo == 0) {
return String.format("%d,%d", xhi, yhi);
} else {
return String.format("%d,%d,%d,%d", xlo, ylo, xhi, yhi);
}
}
}
@JumbleIgnore
static class PrecisionRecallGraph2D extends ExternalZoomGraph2D {
PrecisionRecallGraph2D(ArrayList<String> lineOrdering, int lineWidth, boolean showScores, Map<String, DataBundle> data, String title) {
setKeyVerticalPosition(KeyPosition.BOTTOM);
setKeyHorizontalPosition(KeyPosition.RIGHT);
setGrid(true);
setLabel(Graph2D.X, SENSITIVITY);
setLabel(Graph2D.Y, PRECISION);
setTitle(title);
float yLow = 100;
for (int i = 0; i < lineOrdering.size(); ++i) {
final DataBundle db = data.get(lineOrdering.get(i));
if (db.show()) {
final PointPlot2D plot = db.getPrecisionRecallPlot(lineWidth, i);
addPlot(plot);
if (showScores) {
addPlot(db.getPrecisionRecallScorePoints(lineWidth, i));
addPlot(db.getPrecisionRecallScoreLabels());
}
yLow = Math.min(yLow, db.getMinPrecision());
}
}
setRange(Graph2D.X, 0, 100);
setRange(Graph2D.Y, Math.max(0, yLow), 100);
setTitle(title);
}
}
@JumbleIgnore
static class RocGraph2D extends ExternalZoomGraph2D {
private final int mMaxVariants;
RocGraph2D(ArrayList<String> lineOrdering, int lineWidth, boolean showScores, Map<String, DataBundle> data, String title) {
setKeyVerticalPosition(KeyPosition.BOTTOM);
setKeyHorizontalPosition(KeyPosition.RIGHT);
setGrid(true);
setLabel(Graph2D.Y, "True Positives");
setLabel(Graph2D.X, "False Positives");
setTitle(title);
int maxVariants = -1;
for (int i = 0; i < lineOrdering.size(); ++i) {
final DataBundle db = data.get(lineOrdering.get(i));
if (db.show()) {
final PointPlot2D plot = db.getPlot(lineWidth, i);
addPlot(plot);
if (showScores) {
addPlot(db.getScorePoints(lineWidth, i));
addPlot(db.getScoreLabels());
}
if (db.getTotalVariants() > maxVariants) {
maxVariants = db.getTotalVariants();
}
}
}
if (maxVariants > 0) {
setRange(Graph2D.Y, 0, maxVariants);
setTitle(title + " (baseline total = " + maxVariants + ")");
setRange(Graph2D.Y, Graph2D.TWO, 0, 100);
setShowTics(Graph2D.Y, Graph2D.TWO, true);
setGrid(Graph2D.Y, Graph2D.TWO, false);
setLabel(Graph2D.Y, Graph2D.TWO, "%");
// dummy plot to show Y2 axis
final PointPlot2D pp = new PointPlot2D(Graph2D.ONE, Graph2D.TWO);
addPlot(pp);
}
mMaxVariants = maxVariants;
}
// Total number of variants for calculating sensitivity at crosshair location
int getMaxVariants() {
return mMaxVariants;
}
}
void showCurrentGraph() {
SwingUtilities.invokeLater(() -> {
final Graph2D graph;
final ArrayList<String> ordering = RocPlot.this.mRocLinesPanel.plotOrder();
if (mGraphType.getSelectedItem().equals(PRECISION_SENSITIVITY)) {
graph = new PrecisionRecallGraph2D(ordering, RocPlot.this.mLineWidth, RocPlot.this.mShowScores, RocPlot.this.mData, RocPlot.this.mTitleEntry.getText());
} else {
graph = new RocGraph2D(ordering, RocPlot.this.mLineWidth, RocPlot.this.mShowScores, RocPlot.this.mData, RocPlot.this.mTitleEntry.getText());
}
if (graph.getPlots().length > 0) {
maintainZoomBounds(graph);
}
final Color[] colors;
if (!Float.isNaN(mMaxX)) {
graph.addPlot(invisibleGraph());
colors = PALETTE;
} else {
colors = new Color[ordering.size()];
int k = 0;
for (final Component cp : RocPlot.this.mRocLinesPanel.getComponents()) {
colors[k++] = ((RocLinePanel) cp).getColor();
}
}
mZoomPP.setColors(colors);
mZoomPP.setGraph(graph, true);
});
}
private Plot2D invisibleGraph() {
// Invisible graph to maintain graph size when no lines are shown
final PointPlot2D plot = new PointPlot2D();
plot.setData(Arrays.asList(new Point2D(mMinX, mMinY), new Point2D(mMaxX, mMaxY)));
plot.setLines(false);
plot.setPoints(false);
return plot;
}
private void maintainZoomBounds(Graph2D graph) {
if (Float.isNaN(mMaxX)) {
mMaxX = graph.getHi(Graph2D.X, Graph2D.ONE);
mMaxY = graph.getHi(Graph2D.Y, Graph2D.ONE);
mMinX = graph.getLo(Graph2D.X, Graph2D.ONE);
mMinY = graph.getLo(Graph2D.Y, Graph2D.ONE);
} else {
mMaxX = Math.max(mMaxX, graph.getHi(Graph2D.X, Graph2D.ONE));
mMaxY = Math.max(mMaxY, graph.getHi(Graph2D.Y, Graph2D.ONE));
mMinX = Math.min(mMinX, graph.getLo(Graph2D.X, Graph2D.ONE));
mMinY = Math.min(mMinY, graph.getLo(Graph2D.Y, Graph2D.ONE));
}
}
/**
* Set the title of the plot
* @param title plot title
*/
public void setTitle(final String title) {
SwingUtilities.invokeLater(() -> {
mIconLabel.setText(title);
mTitleEntry.setText(title);
});
}
/**
* Set whether to show scores on the plot lines
* @param flag show scores
*/
public void showScores(boolean flag) {
mShowScores = flag;
SwingUtilities.invokeLater(() -> mScoreCB.setSelected(mShowScores));
}
/**
* Set the line width slider to the given value
* @param width line width
*/
public void setLineWidth(int width) {
mLineWidth = width < LINE_WIDTH_MIN ? LINE_WIDTH_MIN : width > LINE_WIDTH_MAX ? LINE_WIDTH_MAX : width;
SwingUtilities.invokeLater(() -> mLineWidthSlider.setValue(mLineWidth));
}
/**
* Sets the split pane divider location
* @param loc proportional location
*/
public void setSplitPaneDividerLocation(double loc) {
mSplitPane.setDividerLocation(loc);
}
/**
* Set a status message
* @param message test to display
*/
public void setStatus(String message) {
mStatusLabel.setText(message);
}
private void loadData(ArrayList<File> files, ArrayList<String> names, Box2D initialZoom) throws IOException {
final StringBuilder sb = new StringBuilder();
final ProgressBarDelegate progress = new ProgressBarDelegate(mProgressBar);
for (int i = 0; i < files.size(); ++i) {
final File f = files.get(i);
final String name = names.get(i);
try {
loadFile(f, name, progress);
} catch (final IOException | NoTalkbackSlimException e1) {
sb.append(f.getPath()).append('\n');
}
}
progress.done();
if (sb.length() > 0) {
JOptionPane.showMessageDialog(mMainPanel.getTopLevelAncestor(),
"Some files could not be loaded:\n" + sb.toString() + "\n",
"Invalid ROC File", JOptionPane.ERROR_MESSAGE);
}
if (initialZoom == null) {
SwingUtilities.invokeLater(() -> mZoomPP.getZoomOutAction().actionPerformed(new ActionEvent(this, 0, "LoadComplete")));
} else {
SwingUtilities.invokeLater(() -> mZoomPP.setZoom(initialZoom));
}
}
private void loadFile(final File f, final String name, ProgressDelegate progress) throws IOException {
mFileChooserParent = f.getParentFile();
final String path = f.getAbsolutePath();
if (mRocLinesPanel.plotOrder().contains(path)) {
mProgressBar.setString("This file has already been loaded");
} else {
final DataBundle data = ParseRocFile.loadStream(progress, FileUtils.createInputStream(f, false), f.getAbsolutePath());
setBundleTitle(data, f, name);
addLine(path, data);
}
}
static void setBundleTitle(DataBundle data, File f, String name) throws IOException {
if (name.length() > 0) {
data.setTitle(name);
} else {
final StringBuilder autoname = new StringBuilder();
autoname.append(f.getAbsoluteFile().getParentFile().getName());
final String fname = f.getName();
final int rocIdx = fname.indexOf(RocFilter.ROC_EXT);
if (rocIdx != -1 && !fname.startsWith(RocFilter.ALL.fileName())) {
if (autoname.length() > 0) {
autoname.append(' ');
}
autoname.append(fname.substring(0, rocIdx));
}
if (data.getScoreName() != null) {
if (autoname.length() > 0) {
autoname.append(' ');
}
autoname.append(data.getScoreName());
}
data.setTitle(autoname.toString());
}
}
private void addLine(String path, DataBundle dataBundle) {
final Color initialColor = PALETTE[++mColorCounter % PALETTE.length];
mData.put(path, dataBundle);
mRocLinesPanel.addLine(new RocLinePanel(this, path, dataBundle.getTitle(), dataBundle, mProgressBar, initialColor));
showCurrentGraph();
}
/**
* A class required to listen for right-clicks
*/
@JumbleIgnore
private class PopupListener extends MouseAdapter {
@Override
public void mouseClicked(MouseEvent e) {
final Point p = e.getPoint();
final Mapping[] mapping = mZoomPP.getMapping();
final Graph2D zoomedGraph = mZoomPP.getGraph();
if (zoomedGraph instanceof RocGraph2D) {
final RocGraph2D graph = (RocGraph2D) zoomedGraph;
final int maxVariants = graph.getMaxVariants();
if (mapping != null && mapping.length > 1) {
final boolean inView = p.x >= mapping[0].getScreenMin() && p.x <= mapping[0].getScreenMax()
&& p.y <= mapping[1].getScreenMin() && p.y >= mapping[1].getScreenMax(); // Y screen min/max is inverted due to coordinate system
final float fp = mapping[0].screenToWorld((float) p.getX());
final float tp = mapping[1].screenToWorld((float) p.getY());
if (inView && fp >= 0 && tp >= 0 && (fp + tp > 0)) {
mProgressBar.setString(getMetricString(tp, fp, maxVariants));
mZoomPP.setCrossHair(new Point2D(fp, tp));
} else {
mZoomPP.setCrossHair(null);
mProgressBar.setString("");
}
}
} else if (zoomedGraph instanceof PrecisionRecallGraph2D) {
if (mapping != null && mapping.length > 1) {
final boolean inView = p.x >= mapping[0].getScreenMin() && p.x <= mapping[0].getScreenMax()
&& p.y <= mapping[1].getScreenMin() && p.y >= mapping[1].getScreenMax(); // Y screen min/max is inverted due to coordinate system
final float recall = mapping[0].screenToWorld((float) p.getX());
final float precision = mapping[1].screenToWorld((float) p.getY());
if (inView && recall >= 0 && precision >= 0 && (recall + precision > 0)) {
mZoomPP.setCrossHair(new Point2D(recall, precision));
mProgressBar.setString(getPrecisionRecallString(precision, recall));
} else {
mZoomPP.setCrossHair(null);
mProgressBar.setString(getPrecisionRecallString(precision, recall));
}
}
}
}
@Override
public void mousePressed(MouseEvent e) {
maybeShowPopup(e);
}
@Override
public void mouseReleased(MouseEvent e) {
maybeShowPopup(e);
}
private void maybeShowPopup(MouseEvent e) {
if (e.isPopupTrigger()) {
mPopup.show(e.getComponent(), e.getX(), e.getY());
}
}
}
static String getMetricString(double truePositive, double falsePositive, int totalPositive) {
final double precision = ContingencyTable.precision(truePositive, falsePositive);
String message = String.format("TP=%.0f FP=%.0f Precision=%.2f%%", truePositive, falsePositive, precision * 100);
if (totalPositive > 0) {
final double falseNegative = totalPositive - truePositive;
final double recall = ContingencyTable.recall(truePositive, falseNegative);
final double fMeasure = ContingencyTable.fMeasure(precision, recall);
message += String.format(" Sensitivity=%.2f%% F-measure=%.2f%%", recall * 100, fMeasure * 100);
}
return message;
}
private static String getPrecisionRecallString(double precision, double recall) {
String message = String.format("Precision=%.2f%%", precision);
final double fMeasure = ContingencyTable.fMeasure(precision, recall);
message += String.format(" Sensitivity=%.2f%% F-measure=%.2f%%", recall, fMeasure);
return message;
}
static void rocStandalone(ArrayList<File> fileList, ArrayList<String> nameList, String title, boolean scores, final boolean hideSidePanel, int lineWidth, boolean precisionRecall, Box2D initialZoom) throws InterruptedException, InvocationTargetException, IOException {
final JFrame frame = new JFrame();
final ImageIcon icon = createImageIcon("com/rtg/graph/resources/realtimegenomics_logo_sm.png", "rtg rocplot");
if (icon != null) {
frame.setIconImage(icon.getImage());
}
final RocPlot rp = new RocPlot(precisionRecall) {
@Override
public void setTitle(final String title) {
super.setTitle(title);
frame.setTitle("rtg rocplot - " + title);
}
};
rp.setLineWidth(lineWidth);
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.setLayout(new BorderLayout());
frame.add(rp.mMainPanel, BorderLayout.CENTER);
final CountDownLatch lock = new CountDownLatch(1);
rp.mPopup.add(new AbstractAction("Exit", null) {
@Override
public void actionPerformed(ActionEvent e) {
frame.setVisible(false);
frame.dispose();
lock.countDown();
}
});
rp.showScores(scores);
rp.setTitle(title != null ? title : precisionRecall ? PRECISION_SENSITIVITY : ROC);
SwingUtilities.invokeAndWait(() -> {
frame.pack();
frame.setSize(1024, 768);
frame.setLocation(50, 50);
frame.setVisible(true);
rp.showCurrentGraph();
if (hideSidePanel) {
rp.setSplitPaneDividerLocation(1.0);
}
});
rp.loadData(fileList, nameList, initialZoom);
SwingUtilities.invokeAndWait(rp::showCurrentGraph);
lock.await();
}
}
|
package com.rtg.util.cli;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import com.reeltwo.jumble.annotations.TestClass;
import com.rtg.util.IntegerOrPercentage;
import com.rtg.util.StringUtils;
import com.rtg.util.Utils;
import com.rtg.visualization.DisplayHelper;
/**
* Encapsulates a single flag.
* @param <T> flag value type
*/
@TestClass(value = {"com.rtg.util.cli.CFlagsTest"})
public class Flag<T> implements Comparable<Flag<T>> {
enum Level { DEFAULT, EXTENDED, EXPERIMENTAL }
static final String EXTENDED_FLAG_PREFIX = "X";
static final String EXPERIMENTAL_FLAG_PREFIX = "XX";
private final Character mFlagChar;
private final String mFlagName;
private final String mFlagDescription;
private final Level mLevel;
/** The maximum number of times the flag can occur. */
private int mMaxCount;
/** The minimum number of times the flag can occur. */
private int mMinCount;
private final Class<T> mParameterType;
private String mParameterDescription;
private T mParameterDefault = null;
private String mCategory = null;
private String mPsuedoMinMaxString = null;
/** Optional list of valid values for the parameter. */
private List<String> mParameterRange;
private boolean mRangeList = false;
/** Values supplied by the user */
private List<T> mParameter = new ArrayList<>();
/**
* Creates a new <code>Flag</code> for which the name must be supplied on
* the command line.
*
* @param flagChar a <code>Character</code> which can be supplied by the
* user as an abbreviation for flagName. May be null.
* @param flagName a <code>String</code> which is the name that the user
* specifies on the command line to denote the flag.
* @param flagDescription a name used when printing help messages.
* @param minCount the minimum number of times the flag must be specified.
* @param maxCount the maximum number of times the flag can be specified.
* @param paramType a <code>Class</code> denoting the type of values to be
* accepted. Maybe null for "switch" type flags.
* @param paramDescription a description of the meaning of the flag.
* @param paramDefault a default value that can be used for optional flags.
* @param category The flag category
*/
public Flag(final Character flagChar, final String flagName, final String flagDescription,
final int minCount, final int maxCount, final Class<T> paramType, final String paramDescription,
final T paramDefault, final String category) {
if (flagDescription == null) {
throw new NullPointerException();
}
if (flagName == null) {
if (paramType == null) {
throw new IllegalArgumentException();
}
} else if (flagName.length() == 0) {
throw new IllegalArgumentException("Flag name cannot be empty");
} else if (flagName.chars().anyMatch(Character::isWhitespace)) {
throw new IllegalArgumentException("Flag name cannot contain whitespace: " + flagName);
} else {
if (flagName.startsWith("-")) {
throw new IllegalArgumentException("Long flag names cannot start with '-'");
}
}
setCategory(category);
mFlagName = flagName;
mFlagChar = flagChar;
mFlagDescription = flagDescription;
mParameterType = paramType;
mParameterDescription = (mParameterType == null) ? null
: ((paramDescription == null) || (paramDescription.length() == 0)) ? autoDescription(mParameterType)
: paramDescription.toUpperCase(Locale.getDefault());
if (mParameterType != null) {
setParameterDefault(paramDefault);
}
mMinCount = minCount;
mMaxCount = maxCount;
// For enums set up the limited set of values message
final String[] range = values(mParameterType);
if (range != null) {
setParameterRange(range);
}
if (mFlagName != null) {
if (mFlagName.startsWith(EXPERIMENTAL_FLAG_PREFIX)) {
mLevel = Level.EXPERIMENTAL;
} else if (mFlagName.startsWith(EXTENDED_FLAG_PREFIX)) {
mLevel = Level.EXTENDED;
} else {
mLevel = Level.DEFAULT;
}
} else {
mLevel = Level.DEFAULT;
}
}
/**
* @return the flag level
*/
Level level() {
return mLevel;
}
/**
* Gets the enum object specified by str.
* @param type of object.
* @param str string to specify object.
* @return object of class type (null if the type does not look sufficiently like an Enum).
*/
static Object valueOf(final Class<?> type, final String str) {
if (!isValidEnum(type)) {
return null;
}
try {
final String valueOfMethod = "valueOf";
final Method m = type.getMethod(valueOfMethod, String.class);
if (!Modifier.isStatic(m.getModifiers())) {
return null;
}
final Class<?> returnType = m.getReturnType();
if (!type.isAssignableFrom(returnType)) {
return null;
}
return m.invoke(null, str.toUpperCase(Locale.getDefault()).replace('-', '_'));
} catch (final NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
//Should never happen
throw new RuntimeException(e);
}
}
/**
* Gets the range of values for an Enum (or at least something that looks like an Enum).
* @param type from which values to be extracted.
* @return the allowed values for the specified type (null if does not look sufficiently like an Enum).
*/
static String[] values(final Class<?> type) {
if (type == null) {
return null;
}
if (!isValidEnum(type)) {
return null;
}
try {
final String valuesMethod = "values";
final Method m = type.getMethod(valuesMethod);
final Class<?> returnType = m.getReturnType();
if (returnType.isArray()) {
final Object[] ret = (Object[]) m.invoke(null);
final String[] res = new String[ret.length];
for (int i = 0; i < ret.length; ++i) {
res[i] = ret[i].toString().toLowerCase(Locale.getDefault()).replace('_', '-'); // List enums as lowercase by default
}
return res;
}
return null;
} catch (final NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
// Should never happen
throw new RuntimeException(e);
}
}
/**
* Check if looks sufficiently like an Enum to be treated as one.
* Must implement
* static T[] values()
* static T value(String)
* @param type class type
* @return true iff is an Enum or looks sufficiently like one.
*/
static boolean isValidEnum(final Class<?> type) {
if (type == null) {
return false;
}
if (type.isEnum()) {
return true;
}
final Method m;
try {
final String valuesMethod = "values";
m = type.getDeclaredMethod(valuesMethod);
if (m == null) {
return false;
}
} catch (final SecurityException | NoSuchMethodException e) {
return false;
}
if (!Modifier.isStatic(m.getModifiers())) {
return false;
}
final Class<?> returnType = m.getReturnType();
if (!returnType.isArray()) {
return false;
}
final Method v;
try {
final String valueOfMethod = "valueOf";
v = type.getMethod(valueOfMethod, String.class);
if (v == null) {
return false;
}
} catch (final SecurityException | NoSuchMethodException e) {
return false;
}
if (!Modifier.isStatic(v.getModifiers())) {
return false;
}
final Class<?> returnTypev = v.getReturnType();
if (!type.isAssignableFrom(returnTypev)) {
return false;
}
return true;
}
/**
* Sets the maximum number of times the flag can be specified.
*
* @param count the maximum number of times the flag can be specified.
* @return this flag, so calls can be chained.
*/
public Flag<T> setMaxCount(final int count) {
if ((count < 1) || (count < mMinCount)) {
throw new IllegalArgumentException("MaxCount (" + count
+ ") must not be 0 or less than MinCount (" + mMinCount + ")");
}
mMaxCount = count;
return this;
}
/**
* Gets the maximum number of times the flag can be specified.
*
* @return the maximum number of times the flag can be specified.
*/
public int getMaxCount() {
return mMaxCount;
}
/**
* Sets the minimum number of times the flag can be specified.
*
* @param count the minimum number of times the flag can be specified.
* @return this flag, so calls can be chained.
*/
public Flag<T> setMinCount(final int count) {
if (count > mMaxCount) {
throw new IllegalArgumentException("MinCount (" + count
+ ") must not be greater than MaxCount (" + mMaxCount + ")");
}
if (count == Integer.MAX_VALUE) {
throw new IllegalArgumentException(
"You're crazy man -- MinCount cannot be Integer.MAX_VALUE");
}
mMinCount = count;
return this;
}
/**
* Gets the minimum number of times the flag can be specified.
*
* @return the minimum number of times the flag can be specified.
*/
public int getMinCount() {
return mMinCount;
}
/**
* Return the number of times the flag has been set.
*
* @return the number of times the flag has been set.
*/
public int getCount() {
return mParameter.size();
}
/**
* Return true if the flag has been set.
*
* @return true if the flag has been set.
*/
public boolean isSet() {
return mParameter.size() > 0;
}
/**
* Gets the character name of this flag, if set.
*
* @return the character name of this flag, or null if no character name has
* been set.
*/
public Character getChar() {
return mFlagChar;
}
/**
* Gets the name of the flag.
*
* @return the name of the flag.
*/
public String getName() {
return mFlagName;
}
/**
* Gets the description of the flag's purpose.
*
* @return the description.
*/
public String getDescription() {
return mFlagDescription;
}
/**
* Gets the description of the flag parameter. This is usually a single word
* that indicates a little more than the parameter type.
*
* @return the parameter description, or null for untyped flags.
*/
public String getParameterDescription() {
return mParameterDescription;
}
protected void setParameterDescription(String desc) {
mParameterDescription = desc;
}
/**
* Gets the type of the parameter. This will return null for untyped
* (switch) flags. Parameters will be checked that they are of the specified
* type.
*
* @return the parameter type, or null if the flag is untyped.
*/
public Class<?> getParameterType() {
return mParameterType;
}
/**
* Gets the default value of the parameter.
*
* @return the default value, or null if no default has been specified.
*/
public Object getParameterDefault() {
return mParameterDefault;
}
/**
* Sets the default value of the parameter.
*
* @param paramDefault a default value that can be used for optional flags.
* @return this flag, so calls can be chained.
*/
public Flag<T> setParameterDefault(final T paramDefault) {
if (mParameterType == null) {
throw new IllegalArgumentException("Cannot set default parameter for untyped flags");
}
mParameterDefault = paramDefault;
return this;
}
/**
* Defines the set of strings that are valid for this flag.
*
* @param range a collection of Strings.
* @return this flag, so calls can be chained.
*/
public Flag<T> setParameterRange(final Collection<String> range) {
//System.err.println("setParameterRange range=" + range.toString());
final String[] rarray = range.toArray(new String[range.size()]);
return setParameterRange(rarray);
}
/**
* Defines the set of strings that are valid for this flag.
*
* @param range an array of Strings.
* @return this flag, so calls can be chained.
*/
public Flag<T> setParameterRange(final String[] range) {
if (mParameterType == null) {
throw new IllegalArgumentException("Cannot set parameter range for no-arg flags.");
}
if (range == null) {
mParameterRange = null;
} else {
if (range.length < 1) {
throw new IllegalArgumentException("Must specify at least one value in parameter range.");
}
final List<String> l = new ArrayList<>(range.length);
for (final String s : range) {
try {
parseValue(s);
} catch (final Exception e) {
throw new IllegalArgumentException("Range value " + s + " could not be parsed.", e);
}
l.add(s);
}
mParameterRange = Collections.unmodifiableList(l);
}
return this;
}
/**
* Override the default minimum - maximum string with one representing the given range.
* @param min the minimum
* @param max the maximum
*/
public void setPsuedoMinMaxRangeString(final int min, final int max) {
final String str = minMaxUsage(min, max, mRangeList);
if (str.length() == 0) {
mPsuedoMinMaxString = null;
} else {
mPsuedoMinMaxString = str;
}
}
/**
* Gets the list of valid parameter values, if these have been specified.
*
* @return a <code>List</code> containing the permitted values, or null if
* this has not been set.
*/
public List<String> getParameterRange() {
return mParameterRange;
}
/**
* Get the value for this flag. If the flag was not user-set, then the
* default value is returned (if defined). The value will have been checked
* to comply with any parameter typing. If called on an untyped flag, this
* will return Boolean.TRUE or Boolean.FALSE appropriately.
*
* @return a value for this flag.
*/
public T getValue() {
return (isSet()) ? mParameter.get(0) : mParameterDefault;
}
/**
* Get a collection of all values set for this flag. This is for flags that
* can be set multiple times. If the flag was not user-set, then the
* collection contains only the default value (if defined).
*
* @return a <code>Collection</code> of the supplied values.
*/
public List<T> getValues() {
final List<T> result;
if (isSet()) {
result = mParameter;
} else {
result = new ArrayList<>();
if (mParameterDefault != null) {
result.add(mParameterDefault);
}
}
return Collections.unmodifiableList(result);
}
void reset() {
mParameter = new ArrayList<>();
}
FlagValue<T> setValue(final String valueStr) {
if (mParameter.size() >= mMaxCount) {
throw new FlagCountException("Value cannot be set more than " + mMaxCount + "times for flag: " + mFlagName);
}
if (mParameterRange != null) {
if (mRangeList) {
final String[] vs = StringUtils.split(valueStr, ',');
for (String vi : vs) {
if (!mParameterRange.contains(vi)) {
throw new IllegalArgumentException("A value supplied is not in the set of allowed values.");
}
}
} else {
if (!mParameterRange.contains(valueStr)) {
throw new IllegalArgumentException("Value supplied is not in the set of allowed values.");
}
}
}
if (mRangeList) {
final String[] valueStrs = StringUtils.split(valueStr, ',');
final List<T> values = new ArrayList<>(valueStrs.length);
for (final String valueStr2 : valueStrs) {
final T value = parseValue(valueStr2);
mParameter.add(value);
values.add(value);
}
return new FlagValue<>(this, values);
} else {
final T value = parseValue(valueStr);
mParameter.add(value);
return new FlagValue<>(this, value);
}
}
/**
* Converts the string representation of a parameter value into the
* appropriate Object. This default implementation knows how to convert
* based on the parameter type for several common types. Override for custom
* parsing.
* @param valueStr the value to parse
* @return the parsed value
*/
T parseValue(final String valueStr) {
return Flag.instanceHelper(mParameterType, valueStr);
}
@Override
public boolean equals(final Object other) {
return other instanceof Flag && getName().equals(((Flag<?>) other).getName());
}
@Override
public int hashCode() {
return getName() == null ? 0 : getName().hashCode();
}
@Override
public int compareTo(final Flag<T> other) {
if (other == null) {
return -1;
}
if (other.getName() != null) {
return getName().compareTo(other.getName());
}
return -1;
}
/** @return a compact usage string (prefers char name if present). */
final String getCompactFlagUsage() {
return getCompactFlagUsage(new DisplayHelper());
}
String getCompactFlagUsage(DisplayHelper dh) {
final StringBuilder sb = new StringBuilder();
final String flagName = (getChar() != null) ? CFlags.SHORT_FLAG_PREFIX + getChar() : CFlags.LONG_FLAG_PREFIX + getName();
sb.append(dh.decorateForeground(flagName, DisplayHelper.THEME_LITERAL_COLOR));
final String usage = getParameterDescription();
if (usage.length() > 0) {
sb.append(' ').append(dh.decorateForeground(usage, DisplayHelper.THEME_TYPE_COLOR));
}
return sb.toString();
}
/** @return a usage string. */
final String getFlagUsage() {
return getFlagUsage(new DisplayHelper());
}
String getFlagUsage(DisplayHelper dh) {
final StringBuilder sb = new StringBuilder();
final String flagName = CFlags.LONG_FLAG_PREFIX + getName();
sb.append(dh.decorateForeground(flagName, DisplayHelper.THEME_LITERAL_COLOR));
if (getParameterType() != null) {
sb.append('=').append(dh.decorateForeground(getParameterDescription(), DisplayHelper.THEME_TYPE_COLOR));
}
return sb.toString();
}
static String minMaxUsage(int min, int max, boolean allowCsv) {
final StringBuilder ret = new StringBuilder();
if (min >= 1 && max > 1) {
if (max == Integer.MAX_VALUE) {
ret.append("Must be specified ").append(min).append(" or more times");
} else if (max - min == 0) {
ret.append("Must be specified ").append(min).append(" times");
} else if (max - min == 1) {
ret.append("Must be specified ").append(min).append(" or ").append(max).append(" times");
} else {
ret.append("Must be specified ").append(min).append(" to ").append(max).append(" times");
}
} else {
if (min == 0) {
if (max > 1) {
if (max == Integer.MAX_VALUE) {
ret.append("May be specified 0 or more times");
} else {
ret.append("May be specified up to ").append(max).append(" times");
}
}
}
}
if (ret.length() > 0 && allowCsv) {
ret.append(", or as a comma separated list");
}
return ret.toString();
}
void appendLongFlagUsage(final WrappingStringBuilder wb, final int longestUsageLength) {
wb.append(" ");
if (getChar() == null) {
wb.append(" ");
} else {
final String flagName = CFlags.SHORT_FLAG_PREFIX + getChar();
wb.append(wb.displayHelper().decorateForeground(flagName, DisplayHelper.THEME_LITERAL_COLOR)).append(", ");
}
final int len = getFlagUsage().length();
//final String disp = getFlagUsage();
final String disp = getFlagUsage(wb.displayHelper());
wb.append(disp);
for (int i = 0; i < longestUsageLength - len; ++i) {
wb.append(" ");
}
wb.append(" ");
final String description = getUsageDescription();
wb.wrapText(description);
wb.append(CFlags.LS);
}
/**
* The description including default values and bounds/limits
* @return description string
*/
public String getUsageDescription() {
final StringBuilder description = new StringBuilder(getDescription());
final List<String> range = getParameterRange();
if (range != null) {
if (description.length() > 0) {
description.append(". ");
}
description.append("Allowed values are ").append(Arrays.toString(range.toArray()));
}
final String minMaxUsage;
if (mPsuedoMinMaxString != null) {
minMaxUsage = mPsuedoMinMaxString;
} else {
minMaxUsage = minMaxUsage(getMinCount(), getMaxCount(), mRangeList);
}
if (minMaxUsage.length() > 0) {
description.append(". ").append(minMaxUsage);
}
final Object def = getParameterDefault();
if (def != null) {
String defs;
if (def instanceof Double) {
defs = Utils.realFormat((Double) def);
} else if (isValidEnum(mParameterType)) {
defs = def.toString().toLowerCase(Locale.getDefault());
} else {
defs = def.toString();
}
if (defs.length() == 0) {
defs = "\"\"";
}
if (description.length() > 0) {
description.append(" ");
}
description.append("(Default is ").append(defs).append(")");
}
return description.toString();
}
private static String autoDescription(final Class<?> type) {
final String result = type.getName();
return result.substring(result.lastIndexOf('.') + 1).toUpperCase(Locale.getDefault());
}
private static final Set<String> BOOLEAN_AFFIRMATIVE = new HashSet<>();
private static final Set<String> BOOLEAN_NEGATIVE = new HashSet<>();
static {
BOOLEAN_AFFIRMATIVE.addAll(Arrays.asList("true", "yes", "y", "t", "1", "on", "aye", "hai", "ja", "da", "ya", "positive", "fer-shure", "totally", "affirmative", "+5v"));
BOOLEAN_NEGATIVE.addAll(Arrays.asList("false", "no", "n", "f", "0", "off"));
}
@SuppressWarnings("unchecked")
static <T> T instanceHelper(final Class<T> type, final String stringRep) {
try {
if (type == Boolean.class) {
final String lStr = stringRep.toLowerCase(Locale.getDefault());
if (BOOLEAN_AFFIRMATIVE.contains(lStr)) {
return (T) Boolean.TRUE;
} else if (BOOLEAN_NEGATIVE.contains(lStr)) {
return (T) Boolean.FALSE;
} else {
throw new IllegalArgumentException("Invalid boolean value " + stringRep);
}
} else if (type == Byte.class) {
return (T) Byte.valueOf(stringRep);
} else if (type == Character.class) {
return (T) Character.valueOf(stringRep.charAt(0));
} else if (type == Float.class) {
return (T) Float.valueOf(stringRep);
} else if (type == Double.class) {
return (T) Double.valueOf(stringRep);
} else if (type == Integer.class) {
return (T) Integer.valueOf(stringRep);
} else if (type == Long.class) {
return (T) Long.valueOf(stringRep);
} else if (type == Short.class) {
return (T) Short.valueOf(stringRep);
} else if (type == File.class) {
return (T) new File(stringRep);
} else if (type == URL.class) {
return (T) new URL(stringRep);
} else if (type == String.class) {
return (T) stringRep;
} else if (isValidEnum(type)) {
return (T) valueOf(type, stringRep);
} else if (type == Class.class) {
return (T) Class.forName(stringRep);
} else if (type == IntegerOrPercentage.class) {
return (T) IntegerOrPercentage.valueOf(stringRep);
}
} catch (final MalformedURLException | ClassNotFoundException e) {
throw new IllegalArgumentException(e);
} catch (final NumberFormatException e) {
throw new IllegalArgumentException(""); // We rely on this message being empty
}
throw new IllegalArgumentException("Unknown parameter type: " + type);
}
/**
* When enabled, this flag can take a comma-separated list of range values
* and produce an list of those values
* @return this flag, so calls can be chained.
*/
public Flag<T> enableCsv() {
mRangeList = true;
return this;
}
/**
* @param category the category to set
* @return this flag, so calls can be chained.
*/
public Flag<T> setCategory(final String category) {
mCategory = category;
return this;
}
/**
* @return the category
*/
public String getCategory() {
return mCategory;
}
}
|
package goldrush;
/**
*
* @author Merlano Riccardo 427720
*/
public class MerlanoRiccardo extends GoldDigger {
int sito=0, i=0;
@Override
public int chooseDiggingSite(int[] distances) {
return 4;
}
}
|
package com.rtg.vcf;
import static com.rtg.util.StringUtils.TAB;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.rtg.util.StringUtils;
import com.rtg.util.intervals.SequenceNameLocus;
import com.rtg.util.intervals.SequenceNameLocusSimple;
/**
* Class to hold a single VCF record
*/
public class VcfRecord implements SequenceNameLocus {
/** VCF missing value string **/
public static final String MISSING = "" + VcfUtils.MISSING_VALUE;
/** The character used to delimit subfields within FORMAT and SAMPLE fields */
public static final String FORMAT_AND_SAMPLE_SEPARATOR = ":";
/** The character used to delimit subfields within FILTER and INFO fields */
public static final String ID_FILTER_AND_INFO_SEPARATOR = ";";
/** The character used to delimit ALT alleles and multi-valued subfield values */
public static final String ALT_CALL_INFO_SEPARATOR = ",";
private String mSequence;
private int mStart = -1;
private String mId; // These are stored in VCF representation (users may need to perform splitting)
private String mRefCall;
private final List<String> mAltCalls;
private String mQual;
private final List<String> mFilters;
private int mNumSamples;
/**
* Each <code>Key=Value;</code> entry in the <code>INFO</code> field becomes
* one (Key,Value) entry in this map.
*/
private final Map<String, ArrayList<String>> mInfo;
/**
* Maps from each format key to all the values of that key for each sample.
* For example, <code>"GT:GQ 0|0:48 1|0:49"</code> would give:
* <code>"GT"</code> maps to <code>["0|0","1|0"]</code> and
* <code>"GQ"</code> maps to <code>["48","49"]</code>.
*/
private final Map<String, ArrayList<String>> mFormatAndSample;
/**
* Construct a new standard (non gVCF) VcfRecord
* @param sequence the sequence name
* @param start the start position
* @param ref the ref allele
*/
public VcfRecord(String sequence, int start, String ref) {
mSequence = sequence;
mStart = start;
mRefCall = ref;
mAltCalls = new ArrayList<>();
mFilters = new ArrayList<>();
mInfo = new LinkedHashMap<>();
mFormatAndSample = new LinkedHashMap<>();
}
/**
* Copy constructor. Useful when splitting a record.
* @param rec original record
*/
public VcfRecord(final VcfRecord rec) {
mSequence = rec.mSequence;
mStart = rec.mStart;
mRefCall = rec.mRefCall;
mAltCalls = new ArrayList<>(rec.mAltCalls);
mFilters = new ArrayList<>(rec.mFilters);
mInfo = new LinkedHashMap<>(rec.mInfo);
mFormatAndSample = new LinkedHashMap<>(rec.mFormatAndSample);
mId = rec.mId;
mQual = rec.mQual;
mNumSamples = rec.mNumSamples;
}
@Override
public String getSequenceName() {
return mSequence;
}
public void setStart(final int start) {
mStart = start;
}
@Override
public int getStart() {
return mStart;
}
@Override
public int getEnd() {
return mStart + getLength();
}
@Override
public int getLength() {
return mRefCall == null ? 0 : mRefCall.length();
}
@Override
public boolean overlaps(SequenceNameLocus other) {
return SequenceNameLocusSimple.overlaps(this, other);
}
@Override
public boolean contains(String sequence, int pos) {
return SequenceNameLocusSimple.contains(this, sequence, pos);
}
/**
* Gets the one-based start position
* @return the one-based start position
*/
public int getOneBasedStart() {
return getStart() + 1;
}
/**
* @return id field, caller responsibility to split on ";" for multiple ids
*/
public String getId() {
return mId == null ? MISSING : mId;
}
/**
* Sets the variant ID
* @param id id (or ids) to set
* @return this, for call chaining
*/
public VcfRecord setId(String... id) {
if (id.length == 0) {
mId = null;
} else if (id.length == 1) {
mId = id[0];
} else {
final StringBuilder ids = new StringBuilder(id[0]);
for (int i = 1; i < id.length; ++i) {
ids.append(ID_FILTER_AND_INFO_SEPARATOR).append(id[i]);
}
mId = ids.toString();
}
return this;
}
/**
* @return reference call
*/
public String getRefCall() {
return mRefCall;
}
/**
* Sets the reference allele
* @param ref reference call to set
* @return this, for call chaining
*/
public VcfRecord setRefCall(String ref) {
assert ref != null;
mRefCall = ref;
return this;
}
/**
*
* @param altCall the next alternate call
* @return this, for call chaining
*/
public VcfRecord addAltCall(String altCall) {
if (MISSING.equals(altCall)) {
throw new VcfFormatException("Attempt to add missing value '.' as explicit ALT allele");
} else {
mAltCalls.add(altCall);
}
return this;
}
/**
* @return alternate calls (this should be treated as read-only).
*/
public List<String> getAltCalls() {
return mAltCalls;
}
/**
* Get the allele with the specified index
* @param allele the allele to retrieve
* @return the allele, or null if allele index is -1 (missing)
*/
public String getAllele(int allele) {
if (allele > mAltCalls.size()) {
throw new VcfFormatException("Invalid allele number " + allele);
}
return allele == -1 ? null : allele == 0 ? getRefCall() : mAltCalls.get(allele - 1);
}
/**
* @return quality
*/
public String getQuality() {
return mQual == null ? MISSING : mQual;
}
/**
* @param qual set quality values
* @return this, for call chaining
*/
public VcfRecord setQuality(String qual) {
mQual = qual;
return this;
}
/**
* @return true if the record has failed filters
*/
public boolean isFiltered() {
for (final String f : getFilters()) {
if (!(VcfUtils.FILTER_PASS.equals(f) || VcfRecord.MISSING.equals(f))) {
return true;
}
}
return false;
}
/**
* @return list of filters (should be treated as read-only).
*/
public List<String> getFilters() {
return mFilters;
}
/**
* Adds a filter
* @param filter filter to be added
* @return this, for call chaining
*/
public VcfRecord addFilter(String filter) {
// VCF spec says the field is either PASS or semicolon separated list of failures.
// So we may need to remove any existing PASS, or other filters, depending on what comes in.
if (VcfUtils.FILTER_PASS.equals(filter)) {
mFilters.clear();
} else {
mFilters.remove(VcfUtils.FILTER_PASS);
}
mFilters.add(filter);
return this;
}
/**
* Adds a sample-specific filter
* @param filter filter to be added
* @param sampleIndex index of sample, (from <code>VcfHeader</code>)
* @return this, for call chaining
*/
public VcfRecord addSampleFilter(String filter, int sampleIndex) {
final List<String> filters = mFormatAndSample.computeIfAbsent(VcfUtils.FORMAT_FILTER, l -> new ArrayList<>());
while (filters.size() < mNumSamples) {
filters.add(MISSING);
}
String ft = filters.get(sampleIndex);
// VCF spec says the field is either PASS or semicolon separated list of failures.
// So we may need to remove any existing PASS, or other filters, depending on what comes in.
if (MISSING.equals(ft)
|| MISSING.equals(filter)
|| VcfUtils.FILTER_PASS.equals(filter)
|| VcfUtils.FILTER_PASS.equals(ft)) {
ft = filter;
} else {
ft = ft + VcfUtils.VALUE_SEPARATOR + filter;
}
filters.set(sampleIndex, ft);
return this;
}
/**
* @return info fields (should be treated as read-only).
*/
public Map<String, ArrayList<String>> getInfo() {
return mInfo;
}
/**
* Adds an info field
* @param key key for field
* @param values values for the field
* @return this, for call chaining
*/
public VcfRecord addInfo(String key, String... values) {
final ArrayList<String> val = mInfo.computeIfAbsent(key, k -> new ArrayList<>());
if (values != null) {
Collections.addAll(val, values);
}
return this;
}
/**
* Adds or resets an info field
* @param key key for field
* @param values values for the field
* @return this, for call chaining
*/
public VcfRecord setInfo(String key, String... values) {
final ArrayList<String> val = mInfo.computeIfAbsent(key, k -> new ArrayList<>());
val.clear();
if (values != null) {
Collections.addAll(val, values);
}
return this;
}
/**
* Remove an existing info field.
* @param key field to remove
* @return this, for call chaining
*/
public VcfRecord removeInfo(final String key) {
mInfo.remove(key);
return this;
}
/**
* Returns a map that maps each genotype keyword to a list of value
* strings, one for each sample.
* @return format keywords mapped to sample values (should be treated as read-only).
*/
public Map<String, ArrayList<String>> getFormatAndSample() {
return mFormatAndSample;
}
/**
* @return a set of the format ids used in this record
*/
public Set<String> getFormats() {
return mFormatAndSample.keySet();
}
/**
* Returns true if the record contains the specified format field
* @param key format value
* @return true if the format field is contained in this record
*/
public boolean hasFormat(String key) {
return mFormatAndSample.containsKey(key);
}
/**
* Remove the specified format field from this record
* @param key format value
*/
public void removeFormat(String key) {
mFormatAndSample.remove(key);
}
/**
* Gets the sample format values for a specified format
* @param key format value to be retrieved
* @return the sample values for this format field
*/
public ArrayList<String> getFormat(String key) {
return mFormatAndSample.get(key);
}
/**
* Adds a format key without setting any sample values.
* @param key format value to be set
* @return this, for call chaining
*/
public VcfRecord addFormat(String key) {
if (!mFormatAndSample.containsKey(key)) {
mFormatAndSample.put(key, new ArrayList<>());
}
return this;
}
/**
* Sets format key and value for the next sample.
* @param key format value to be set
* @param val value for the key
* @return this, for call chaining
*/
public VcfRecord addFormatAndSample(String key, String val) {
if (mFormatAndSample.containsKey(key)) {
assert mFormatAndSample.get(key).size() < mNumSamples : "Tried to insert more " + key + " format values than number of samples";
mFormatAndSample.get(key).add(val);
} else {
final ArrayList<String> list = new ArrayList<>();
list.add(val);
mFormatAndSample.put(key, list);
}
return this;
}
/**
* Sets format key and value for the specified sample. If the format key does not already exist in this record,
* it will be created with missing values for all other samples.
* @param key format value to be set
* @param val value for the key
* @param sampleIndex index of sample, (from <code>VcfHeader</code>)
* @return this, for call chaining
*/
public VcfRecord setFormatAndSample(String key, String val, int sampleIndex) {
assert sampleIndex < mNumSamples : "Invalid sample index: " + sampleIndex;
final ArrayList<String> vals;
if (mFormatAndSample.containsKey(key)) {
vals = mFormatAndSample.get(key);
} else {
vals = new ArrayList<>();
mFormatAndSample.put(key, vals);
while (vals.size() < mNumSamples) {
vals.add(MISSING);
}
}
vals.set(sampleIndex, val);
return this;
}
/**
* Removes all samples from the record and clears format information
* @return this, for call chaining
*/
public VcfRecord removeSamples() {
mNumSamples = 0;
mFormatAndSample.clear();
return this;
}
/**
* Fills any remaining values with "missing value" marker
* @param key attribute to fill
* @return this, for chain calling
*/
public VcfRecord padFormatAndSample(String key) {
if (mFormatAndSample.containsKey(key)) {
final ArrayList<String> list = mFormatAndSample.get(key);
while (list.size() < mNumSamples) {
list.add(MISSING);
}
}
return this;
}
/**
* @return the number of samples
*/
public int getNumberOfSamples() {
return mNumSamples;
}
/**
* Sets number of samples
* @param num the number of samples
* @return this, for chain calling
*/
public VcfRecord setNumberOfSamples(int num) {
mNumSamples = num;
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(getSequenceName());
sb.append(TAB);
sb.append(String.valueOf(getOneBasedStart()));
sb.append(TAB);
sb.append(getId());
sb.append(TAB);
sb.append(getRefCall());
sb.append(TAB);
sb.append(getAltCalls(mAltCalls));
sb.append(TAB);
sb.append(getQuality());
sb.append(TAB);
sb.append(getFilter(mFilters));
sb.append(TAB);
sb.append(getPrintableInfo(getInfo()));
if (countNumberOfSamples(mFormatAndSample) != mNumSamples) {
throw new IllegalStateException("Number of samples (" + mNumSamples + ") disagrees with contents of VCF record (" + countNumberOfSamples(mFormatAndSample) + ") at: " + getSequenceName() + ":" + getOneBasedStart());
}
if (mNumSamples > 0) {
sb.append(TAB);
sb.append(getFormat(getFormatAndSample()));
for (int i = 0; i < mNumSamples; ++i) {
sb.append(TAB);
sb.append(getSample(i, getFormatAndSample()));
}
}
return sb.toString();
}
private static String getAltCalls(List<String> altCalls) {
if (altCalls.isEmpty()) {
return MISSING;
}
return StringUtils.join(ALT_CALL_INFO_SEPARATOR, altCalls);
}
private static int countNumberOfSamples(Map<String, ArrayList<String>> formatAndSample) {
int firstCount = 0;
boolean first = true;
for (final Entry<String, ArrayList<String>> formatField : formatAndSample.entrySet()) {
final int currentCount = formatField.getValue().size();
if (first) {
firstCount = currentCount;
first = false;
}
if (firstCount != currentCount) {
throw new IllegalStateException("not enough data for all samples, FORMAT field = " + formatField.getKey() + ", expected " + firstCount + " entries, saw " + currentCount);
}
}
return firstCount;
}
private static String getPrintableInfo(Map<String, ArrayList<String>> info) {
final StringBuilder sb = new StringBuilder();
for (final Entry<String, ArrayList<String>> e : info.entrySet()) {
sb.append(e.getKey());
final Collection<String> values = e.getValue();
if (values != null && values.size() > 0) {
sb.append("=")
.append(StringUtils.join(ALT_CALL_INFO_SEPARATOR, values));
}
sb.append(ID_FILTER_AND_INFO_SEPARATOR);
}
if (sb.length() == 0) {
return MISSING;
}
return sb.substring(0, sb.length() - 1);
}
private static String getSample(int i, Map<String, ArrayList<String>> formatAndSample) {
final StringBuilder sb = new StringBuilder();
final StringBuilder msb = new StringBuilder(); // Allow omitting trailing missing sub-fields.
for (final Entry<String, ArrayList<String>> formatField : formatAndSample.entrySet()) {
final String val = formatField.getValue().get(i);
if (MISSING.equals(val)) {
msb.append(val).append(FORMAT_AND_SAMPLE_SEPARATOR);
} else {
sb.append(msb).append(val).append(FORMAT_AND_SAMPLE_SEPARATOR);
msb.setLength(0);
}
}
if (sb.length() == 0) {
return MISSING;
}
return sb.substring(0, sb.length() - 1);
}
private static String getFormat(Map<String, ArrayList<String>> formatAndSample) {
if (formatAndSample.isEmpty()) {
return MISSING;
}
return StringUtils.join(FORMAT_AND_SAMPLE_SEPARATOR, formatAndSample.keySet());
}
private static String getFilter(List<String> filter) {
if (filter.isEmpty()) {
return MISSING;
}
return StringUtils.join(ID_FILTER_AND_INFO_SEPARATOR, filter);
}
/**
* Returns the value of the specified sample field as a String.
* @param sampleNumber sample number
* @param formatField field of sample
* @return value as a String or null if not specified
*/
public String getSampleString(int sampleNumber, String formatField) {
final ArrayList<String> samples = mFormatAndSample.get(formatField);
if (samples != null) {
return samples.get(sampleNumber);
}
return null;
}
/**
* Returns the value of the specified sample field as a Double.
* @param sampleNumber sample number
* @param formatField field of sample
* @return value as a double or null if not specified
* @throws NumberFormatException if value is not a double
*/
public Double getSampleDouble(int sampleNumber, String formatField) {
final String valueStr = getSampleString(sampleNumber, formatField);
if (valueStr != null && !VcfRecord.MISSING.equals(valueStr)) {
return Double.valueOf(valueStr);
}
return null;
}
/**
* Returns the value of the specified sample field as an Integer.
* @param sampleNumber sample number
* @param formatField field of sample
* @return value as an integer or null if not specified
* @throws NumberFormatException if value is not an integer
*/
public Integer getSampleInteger(int sampleNumber, String formatField) {
final String valueStr = getSampleString(sampleNumber, formatField);
if (valueStr != null && !VcfRecord.MISSING.equals(valueStr)) {
return Integer.valueOf(valueStr);
}
return null;
}
}
|
package commandFactory;
import java.util.ArrayList;
import Parser.ParsedResult;
import commonClasses.StorageList;
import taskDo.SearchType;
import taskDo.Task;
import taskDo.TaskType;
public class Search {
int taskIndex;
ArrayList<Task> returnList;
SearchType searchType;
public Search(){
taskIndex = -1;
returnList = new ArrayList<Task>();
}
public int searchById(int id){
assert !StorageList.getInstance().getTaskList().isEmpty();
for(Task taskIterator: StorageList.getInstance().getTaskList()){
if(id == taskIterator.getId()){
taskIndex = StorageList.getInstance().getTaskList().indexOf(taskIterator);
break;
}
}
return taskIndex;
}
public ArrayList<Task> searchForDisplay(ParsedResult parsedResult) {
switch(parsedResult.getSearchMode()){
case ALL:
return searchByAll();
case DATE:
return searchByDate(parsedResult);
case RANGEOFDATES:
return searchByRangeOfDates(parsedResult);
case COMPLETED:
return searchByCompleted(parsedResult);
default:
return null;
}
}
private ArrayList<Task> searchByAll() {
for(Task task: StorageList.getInstance().getTaskList()){
if(isNotCompleted(task)){
returnList.add(task);
}
}
return returnList;
}
public ArrayList<Task> searchByDate(ParsedResult parsedResult) {
Task sourceTask = parsedResult.getTaskDetails();
for(Task targetTask: StorageList.getInstance().getTaskList()){
if(isSameDueDate(sourceTask, targetTask)&& isNotCompleted(targetTask)){
returnList.add(targetTask);
}
}
return returnList;
}
private boolean isSameDueDate(Task sourceTask, Task targetTask) {
return targetTask.getDueDate().toLocalDate().equals(sourceTask.getDueDate().toLocalDate());
}
public ArrayList<Task> searchByRangeOfDates(ParsedResult parsedResult){
Task sourceTask = parsedResult.getTaskDetails();
for(Task targetTask: StorageList.getInstance().getTaskList()){
if(isNotCompleted(targetTask)){
if(targetTask.getTaskType().equals(TaskType.DEADLINE)){
if(isDeadlineTaskWithinRange(sourceTask, targetTask)){
returnList.add(targetTask);
}
}else if(targetTask.getTaskType().equals(TaskType.TIMED)){
if(isTimedTaskWithinRange(sourceTask, targetTask)){
returnList.add(targetTask);
}
}
}
}
return returnList;
}
private boolean isTimedTaskWithinRange(Task sourceTask, Task targetTask) {
boolean isNotBefore = !targetTask.getDueDate().toLocalDate().isBefore(sourceTask.getStartDate().toLocalDate());
boolean isNotAfter = !targetTask.getStartDate().toLocalDate().isAfter(sourceTask.getDueDate().toLocalDate());
return isNotBefore && isNotAfter;
}
private boolean isDeadlineTaskWithinRange(Task sourceTask, Task targetTask) {
boolean isNotBefore = !targetTask.getDueDate().toLocalDate().isBefore(sourceTask.getStartDate().toLocalDate());
boolean isNotAfter = !targetTask.getDueDate().toLocalDate().isAfter(sourceTask.getDueDate().toLocalDate());
return isNotBefore && isNotAfter;
}
public ArrayList<Task> searchByKeyword(ParsedResult parsedResult){
String searchInput = parsedResult.getTaskDetails().getTitle();
ArrayList<Task> taskList = StorageList.getInstance().getTaskList();
String[] splittedInput = searchInput.split(" ");
for(int i=0;i<splittedInput.length;i++) {
for(int j=0;j<taskList.size();j++){
if(isNotCompleted(taskList.get(j))){
if(taskList.get(j).getTitle().contains(splittedInput[i])){
returnList.add(taskList.get(j));
}
}
}
}
if(returnList.isEmpty()) { //2nd level search fail
WagnerFischerSearch wfSearch = new WagnerFischerSearch();
for(int i=0;i<splittedInput.length;i++) {
for(int j=0;j<taskList.size();j++){
if(isNotCompleted(taskList.get(j))){
for(int k=0;k<taskList.get(j).getTitle().length();k++) {
String[] splittedDescription = taskList.get(j).getTitle().split(" ");
int editDist = wfSearch.getEditDistance(splittedDescription[k], splittedInput[i]);
if(editDist <= 2) {
returnList.add(taskList.get(j));
break;
}
}
}
}
}
}
return returnList;
}
public ArrayList<Task> searchByCompleted(ParsedResult parsedResult){
for(Task task: StorageList.getInstance().getTaskList()){
if(task.isCompleted()){
returnList.add(task);
}
}
return returnList;
}
private boolean isNotCompleted(Task targetTask) {
return !targetTask.isCompleted();
}
}
|
package org.elasticsearch.xpack.core.indexing;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponseSections;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.Matchers.equalTo;
public class AsyncTwoPhaseIndexerTests extends ESTestCase {
AtomicBoolean isFinished = new AtomicBoolean(false);
private class MockIndexer extends AsyncTwoPhaseIndexer<Integer, MockJobStats> {
private final CountDownLatch latch;
// test the execution order
private int step;
protected MockIndexer(Executor executor, AtomicReference<IndexerState> initialState, Integer initialPosition,
CountDownLatch latch) {
super(executor, initialState, initialPosition, new MockJobStats());
this.latch = latch;
}
@Override
protected String getJobId() {
return "mock";
}
@Override
protected IterationResult<Integer> doProcess(SearchResponse searchResponse) {
awaitForLatch();
assertThat(step, equalTo(3));
++step;
return new IterationResult<Integer>(Collections.emptyList(), 3, true);
}
private void awaitForLatch() {
try {
latch.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
protected SearchRequest buildSearchRequest() {
assertThat(step, equalTo(1));
++step;
return null;
}
@Override
protected void onStart(long now, ActionListener<Void> listener) {
assertThat(step, equalTo(0));
++step;
listener.onResponse(null);
}
@Override
protected void doNextSearch(SearchRequest request, ActionListener<SearchResponse> nextPhase) {
assertThat(step, equalTo(2));
++step;
final SearchResponseSections sections = new SearchResponseSections(
new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), null,
null, false, null, null, 1);
nextPhase.onResponse(new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null));
}
@Override
protected void doNextBulk(BulkRequest request, ActionListener<BulkResponse> nextPhase) {
fail("should not be called");
}
@Override
protected void doSaveState(IndexerState state, Integer position, Runnable next) {
assertThat(step, equalTo(5));
++step;
next.run();
}
@Override
protected void onFailure(Exception exc) {
fail(exc.getMessage());
}
@Override
protected void onFinish(ActionListener<Void> listener) {
assertThat(step, equalTo(4));
++step;
isFinished.set(true);
listener.onResponse(null);
}
@Override
protected void onAbort() {
}
public int getStep() {
return step;
}
}
private class MockIndexerThrowsFirstSearch extends AsyncTwoPhaseIndexer<Integer, MockJobStats> {
// test the execution order
private int step;
protected MockIndexerThrowsFirstSearch(Executor executor, AtomicReference<IndexerState> initialState, Integer initialPosition) {
super(executor, initialState, initialPosition, new MockJobStats());
}
@Override
protected String getJobId() {
return "mock";
}
@Override
protected IterationResult<Integer> doProcess(SearchResponse searchResponse) {
fail("should not be called");
return null;
}
@Override
protected SearchRequest buildSearchRequest() {
assertThat(step, equalTo(1));
++step;
return null;
}
@Override
protected void onStart(long now, ActionListener<Void> listener) {
assertThat(step, equalTo(0));
++step;
listener.onResponse(null);
}
@Override
protected void doNextSearch(SearchRequest request, ActionListener<SearchResponse> nextPhase) {
throw new RuntimeException("Failed to build search request");
}
@Override
protected void doNextBulk(BulkRequest request, ActionListener<BulkResponse> nextPhase) {
fail("should not be called");
}
@Override
protected void doSaveState(IndexerState state, Integer position, Runnable next) {
fail("should not be called");
}
@Override
protected void onFailure(Exception exc) {
assertThat(step, equalTo(2));
++step;
isFinished.set(true);
}
@Override
protected void onFinish(ActionListener<Void> listener) {
fail("should not be called");
}
@Override
protected void onAbort() {
fail("should not be called");
}
public int getStep() {
return step;
}
}
private static class MockJobStats extends IndexerJobStats {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return null;
}
}
@AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/40946")
public void testStateMachine() throws InterruptedException {
AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED);
final ExecutorService executor = Executors.newFixedThreadPool(1);
isFinished.set(false);
try {
CountDownLatch countDownLatch = new CountDownLatch(1);
MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch);
indexer.start();
assertThat(indexer.getState(), equalTo(IndexerState.STARTED));
assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()));
assertThat(indexer.getState(), equalTo(IndexerState.INDEXING));
countDownLatch.countDown();
assertThat(indexer.getPosition(), equalTo(2));
ESTestCase.awaitBusy(() -> isFinished.get());
assertThat(indexer.getStep(), equalTo(6));
assertThat(indexer.getStats().getNumInvocations(), equalTo(1L));
assertThat(indexer.getStats().getNumPages(), equalTo(1L));
assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L));
assertTrue(indexer.abort());
} finally {
executor.shutdownNow();
}
}
public void testStateMachineBrokenSearch() throws InterruptedException {
AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED);
final ExecutorService executor = Executors.newFixedThreadPool(1);
isFinished.set(false);
try {
MockIndexerThrowsFirstSearch indexer = new MockIndexerThrowsFirstSearch(executor, state, 2);
indexer.start();
assertThat(indexer.getState(), equalTo(IndexerState.STARTED));
assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()));
assertTrue(ESTestCase.awaitBusy(() -> isFinished.get(), 10000, TimeUnit.SECONDS));
assertThat(indexer.getStep(), equalTo(3));
} finally {
executor.shutdownNow();
}
}
}
|
package bpsm.edn.parser;
import static bpsm.edn.model.Symbol.newSymbol;
import static bpsm.edn.model.Tag.newTag;
import static bpsm.edn.model.TaggedValue.newTaggedValue;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.junit.Test;
public class TestParser {
@Test
public void parseEdnSample() throws IOException {
Parser parser = parser(IOUtil
.stringFromResource("bpsm/edn/edn-sample.txt"));
@SuppressWarnings("unchecked")
List<Object> expected = Arrays.asList(
map(TestScanner.key("keyword"), TestScanner.sym("symbol"), 1,
2.0d, new BigInteger("3"), new BigDecimal("4.0")),
Arrays.asList(1, 1, 2, 3, 5, 8),
new HashSet<Object>(Arrays.asList('\n', '\t')),
Arrays.asList(Arrays.asList(Arrays.asList(true, false, null))));
List<Object> results = new ArrayList<Object>();
for (int i = 0; i < 4; i++) {
results.add(parser.nextValue());
}
assertEquals(expected, results);
}
@Test
public void parseTaggedValueWithUnkownTag() {
assertEquals(newTaggedValue(newTag(newSymbol("foo", "bar")), 1), parse("#foo/bar 1"));
}
@Test
public void parseTaggedInstant() {
assertEquals(1347235200000L, ((Date)parse("#inst \"2012-09-10\"")).getTime());
}
@Test
public void parseTaggedUUID() {
assertEquals(UUID.fromString("f81d4fae-7dec-11d0-a765-00a0c91e6bf6"),
parse("#uuid \"f81d4fae-7dec-11d0-a765-00a0c91e6bf6\""));
}
private static final String INVALID_UUID = "#uuid \"f81d4fae-XXXX-11d0-a765-00a0c91e6bf6\"";
@Test(expected=NumberFormatException.class)
public void invalidUUIDCausesException() {
parse(INVALID_UUID);
}
@Test
public void discardedTaggedValuesDoNotCallTransformer() {
// The given UUID is invalid, as demonstrated in the test above.
// were the transformer for #uuid to be called despite the #_,
// it would throw an exception and cause this test to fail.
assertEquals(123L, parse("#_ " + INVALID_UUID + " 123"));
}
@Test(expected=UnsupportedOperationException.class)
public void parserShouldReturnUnmodifiableListByDefault() {
((List<?>)parse("(1)")).remove(0);
}
@Test(expected=UnsupportedOperationException.class)
public void parserShouldReturnUnmodifiableVectorByDefault() {
((List<?>)parse("[1]")).remove(0);
}
@Test(expected=UnsupportedOperationException.class)
public void parserShouldReturnUnmodifiableSetByDefault() {
((Set<?>)parse("#{1}")).remove(1);
}
@Test(expected=UnsupportedOperationException.class)
public void parserShouldReturnUnmodifiableMapByDefault() {
((Map<?,?>)parse("{1,-1}")).remove(1);
}
//@Test
public void performanceOfInstantParsing() {
StringBuilder b = new StringBuilder();
for (int h = -12; h <= 12; h++) {
b.append("#inst ")
.append('"')
.append("2012-11-25T10:11:12.343")
.append(String.format("%+03d", h))
.append(":00")
.append('"')
.append(' ');
}
for (int i = 0; i < 9; i++) {
b.append(b.toString());
}
String txt = "[" + b.toString() + "]";
long ns = System.nanoTime();
List<?> result = (List<?>) parse(txt);
ns = System.nanoTime() - ns;
long ms = ns / 1000000;
System.out.printf("%d insts took %d ms (%1.2f ms/inst)\n",
result.size(), ms, (1.0*ms)/result.size());
}
static Object parse(String input) {
try {
return parser(input).nextValue();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static Parser parser(String input) {
try {
return Parser.newParser(ParserConfiguration.defaultConfiguration(), input);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private Map<Object, Object> map(Object... kvs) {
Map<Object, Object> m = new HashMap<Object, Object>();
for (int i = 0; i < kvs.length; i += 2) {
m.put(kvs[i], kvs[i + 1]);
}
return m;
}
}
|
package com.conveyal.gtfs;
import com.conveyal.gtfs.error.NewGTFSErrorType;
import com.conveyal.gtfs.loader.FeedLoadResult;
import com.conveyal.gtfs.loader.JdbcGtfsExporter;
import com.conveyal.gtfs.loader.SnapshotResult;
import com.conveyal.gtfs.storage.ErrorExpectation;
import com.conveyal.gtfs.storage.ExpectedFieldType;
import com.conveyal.gtfs.storage.PersistenceExpectation;
import com.conveyal.gtfs.storage.RecordExpectation;
import com.conveyal.gtfs.util.InvalidNamespaceException;
import com.conveyal.gtfs.validator.FeedValidatorCreator;
import com.conveyal.gtfs.validator.MTCValidator;
import com.conveyal.gtfs.validator.ValidationResult;
import com.csvreader.CsvReader;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.io.Files;
import graphql.Assert;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.input.BOMInputStream;
import org.hamcrest.Matcher;
import org.hamcrest.comparator.ComparatorMatcherBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.nio.charset.Charset;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.jupiter.api.Assertions.assertThrows;
/**
* A test suite for the {@link GTFS} Class.
*/
public class GTFSTest {
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private static final String JDBC_URL = "jdbc:postgresql://localhost";
private static final Logger LOG = LoggerFactory.getLogger(GTFSTest.class);
// setup a stream to capture the output from the program
@BeforeEach
public void setUpStreams() {
System.setOut(new PrintStream(outContent));
}
/**
* Make sure that help can be printed.
*
* @throws Exception
*/
@Test
public void canPrintHelp() throws Exception {
String[] args = {"-help"};
GTFS.main(args);
assertThat(outContent.toString(), containsString("usage: java"));
}
/**
* Make sure that help is printed if no recognizable arguments are provided.
*
* @throws Exception
*/
@Test
public void handlesUnknownArgs() throws Exception {
String[] args = {"-blah"};
GTFS.main(args);
assertThat(outContent.toString(), containsString("usage: java"));
}
/**
* Make sure that help is printed if not enough key arguments are provided.
*
* @throws Exception
*/
@Test
public void requiresActionCommand() throws Exception {
String[] args = {"-u", "blah"};
GTFS.main(args);
assertThat(outContent.toString(), containsString("usage: java"));
}
/**
* Tests whether or not a super simple 2-stop, 1-route, 1-trip, valid gtfs can be loaded and exported
*/
@Test
public void canLoadAndExportSimpleAgency() {
ErrorExpectation[] fakeAgencyErrorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.MISSING_FIELD),
new ErrorExpectation(NewGTFSErrorType.REFERENTIAL_INTEGRITY),
new ErrorExpectation(NewGTFSErrorType.ROUTE_LONG_NAME_CONTAINS_SHORT_NAME),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED),
new ErrorExpectation(NewGTFSErrorType.STOP_UNUSED, equalTo("1234567")),
new ErrorExpectation(NewGTFSErrorType.DATE_NO_SERVICE)
);
assertThat(
runIntegrationTestOnFolder(
"fake-agency",
nullValue(),
fakeAgencyPersistenceExpectations,
fakeAgencyErrorExpectations
),
equalTo(true)
);
}
/**
* Tests that a GTFS feed with bad date values in calendars.txt and calendar_dates.txt can pass the integration test.
*/
@Test
public void canLoadFeedWithBadDates () {
PersistenceExpectation[] expectations = PersistenceExpectation.list(
new PersistenceExpectation(
"calendar",
new RecordExpectation[]{
new RecordExpectation("start_date", null)
}
)
);
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.MISSING_FIELD),
new ErrorExpectation(NewGTFSErrorType.DATE_FORMAT),
new ErrorExpectation(NewGTFSErrorType.DATE_FORMAT),
new ErrorExpectation(NewGTFSErrorType.DATE_FORMAT),
new ErrorExpectation(NewGTFSErrorType.REFERENTIAL_INTEGRITY),
new ErrorExpectation(NewGTFSErrorType.DATE_FORMAT),
new ErrorExpectation(NewGTFSErrorType.DATE_FORMAT),
// The below "wrong number of fields" errors are for empty new lines
// found in the file.
new ErrorExpectation(NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS),
new ErrorExpectation(NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS),
new ErrorExpectation(NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS),
new ErrorExpectation(NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS),
new ErrorExpectation(NewGTFSErrorType.WRONG_NUMBER_OF_FIELDS),
new ErrorExpectation(NewGTFSErrorType.REFERENTIAL_INTEGRITY),
new ErrorExpectation(NewGTFSErrorType.ROUTE_LONG_NAME_CONTAINS_SHORT_NAME),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED),
new ErrorExpectation(NewGTFSErrorType.SERVICE_NEVER_ACTIVE),
new ErrorExpectation(NewGTFSErrorType.TRIP_NEVER_ACTIVE),
new ErrorExpectation(NewGTFSErrorType.SERVICE_UNUSED),
new ErrorExpectation(NewGTFSErrorType.DATE_NO_SERVICE)
);
assertThat(
"Integration test passes",
runIntegrationTestOnFolder("fake-agency-bad-calendar-date", nullValue(), expectations, errorExpectations),
equalTo(true)
);
}
/**
* Tests that a GTFS feed with blank (unspecified) values for pickup and dropoff types in stop_times.txt
* is loaded with the blank values resolved, so that the patterns are counted correctly.
*/
@Test
public void canLoadFeedAndResolveUnsetPickupDropOffValues () {
PersistenceExpectation persistenceExpectation1 = makePickupDropOffPersistenceExpectation(1);
PersistenceExpectation persistenceExpectation2 = makePickupDropOffPersistenceExpectation(2);
PersistenceExpectation[] expectations1 = PersistenceExpectation.list(
persistenceExpectation1
);
PersistenceExpectation[] expectations2 = PersistenceExpectation.list(
persistenceExpectation1,
// There should be only one pattern, so the record below should not be created after loading the test feed.
persistenceExpectation2
);
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED)
);
// The first pattern should be added to the editor patterns table.
assertThat(
"There should be one pattern in the patterns table after resolving blank pickup/dropoff values in stop_times.",
runIntegrationTestOnFolder("fake-ferry-blank-pickups", nullValue(), expectations1, errorExpectations),
equalTo(true)
);
// The second pattern should not be added to the editor patterns table
// (there *should* be an assertion error about the second record from expectations2 not found).
assertThrows(
AssertionError.class,
() -> runIntegrationTestOnFolder("fake-ferry-blank-pickups", nullValue(), expectations2, errorExpectations),
"There should be *only* one pattern in the patterns table after resolving blank pickup/dropoff values."
);
}
private PersistenceExpectation makePickupDropOffPersistenceExpectation(int index) {
return new PersistenceExpectation(
"patterns",
new RecordExpectation[]{
new RecordExpectation("pattern_id", String.valueOf(index)),
new RecordExpectation("route_id", "Tib-AIF"),
new RecordExpectation("direction_id", 1),
new RecordExpectation("shape_id", "y7d8"),
},
true
);
}
/**
* Tests that a GTFS feed with errors is loaded properly and that the various errors were detected and stored in the
* database.
*/
@Test
public void canLoadFeedWithErrors () {
PersistenceExpectation[] expectations = PersistenceExpectation.list();
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.FARE_TRANSFER_MISMATCH, equalTo("fare-02")),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP, equalTo("freq-01_08:30:00_to_10:15:00_every_15m00s")),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP, equalTo("freq-01_08:30:00_to_10:15:00_every_15m00s")),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP),
new ErrorExpectation(NewGTFSErrorType.FREQUENCY_PERIOD_OVERLAP),
new ErrorExpectation(NewGTFSErrorType.TRIP_OVERLAP_IN_BLOCK, equalTo("1A00000"))
);
assertThat(
"Integration test passes",
runIntegrationTestOnFolder("fake-agency-overlapping-trips", nullValue(), expectations, errorExpectations),
equalTo(true)
);
}
/**
* Tests whether or not "fake-agency" GTFS can be placed in a zipped subdirectory and loaded/exported successfully.
*/
@Test
public void canLoadAndExportSimpleAgencyInSubDirectory() {
String zipFileName = null;
// Get filename for fake-agency resource
String resourceFolder = TestUtils.getResourceFileName("fake-agency");
// Recursively copy folder into temp directory, which we zip up and run the integration test on.
File tempDir = Files.createTempDir();
tempDir.deleteOnExit();
File nestedDir = new File(TestUtils.fileNameWithDir(tempDir.getAbsolutePath(), "fake-agency"));
LOG.info("Creating temp folder with nested subdirectory at {}", tempDir.getAbsolutePath());
try {
FileUtils.copyDirectory(new File(resourceFolder), nestedDir);
zipFileName = TestUtils.zipFolderFiles(tempDir.getAbsolutePath(), false);
} catch (IOException e) {
e.printStackTrace();
}
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.MISSING_FIELD),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.TABLE_IN_SUBDIRECTORY),
new ErrorExpectation(NewGTFSErrorType.REFERENTIAL_INTEGRITY),
new ErrorExpectation(NewGTFSErrorType.ROUTE_LONG_NAME_CONTAINS_SHORT_NAME),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED),
new ErrorExpectation(NewGTFSErrorType.STOP_UNUSED),
new ErrorExpectation(NewGTFSErrorType.DATE_NO_SERVICE)
);
assertThat(
runIntegrationTestOnZipFile(zipFileName, nullValue(), fakeAgencyPersistenceExpectations, errorExpectations),
equalTo(true)
);
}
/**
* Tests whether the simple gtfs can be loaded and exported if it has only calendar_dates.txt
*/
@Test
public void canLoadAndExportSimpleAgencyWithOnlyCalendarDates() {
PersistenceExpectation[] persistenceExpectations = new PersistenceExpectation[]{
new PersistenceExpectation(
"agency",
new RecordExpectation[]{
new RecordExpectation("agency_id", "1"),
new RecordExpectation("agency_name", "Fake Transit"),
new RecordExpectation("agency_timezone", "America/Los_Angeles")
}
),
new PersistenceExpectation(
"calendar_dates",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "04100312-8fe1-46a5-a9f2-556f39478f57"
),
new RecordExpectation("date", 20170916),
new RecordExpectation("exception_type", 1)
}
),
new PersistenceExpectation(
"stop_times",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "a30277f8-e50a-4a85-9141-b1e0da9d429d"
),
new RecordExpectation("arrival_time", 25200, "07:00:00"),
new RecordExpectation("departure_time", 25200, "07:00:00"),
new RecordExpectation("stop_id", "4u6g"),
new RecordExpectation("stop_sequence", 1),
new RecordExpectation("pickup_type", 0),
new RecordExpectation("drop_off_type", 0),
new RecordExpectation("shape_dist_traveled", 0.0, 0.01)
}
),
// Check that the shape_dist_traveled values in stop_times are not rounded.
new PersistenceExpectation(
"stop_times",
new RecordExpectation[]{
new RecordExpectation("shape_dist_traveled", 341.4491961, 0.00001)
}
),
new PersistenceExpectation(
"trips",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "a30277f8-e50a-4a85-9141-b1e0da9d429d"
),
new RecordExpectation(
"service_id", "04100312-8fe1-46a5-a9f2-556f39478f57"
),
new RecordExpectation("route_id", "1"),
new RecordExpectation("direction_id", 0),
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("bikes_allowed", 0),
new RecordExpectation("wheelchair_accessible", 0)
}
)
};
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.MISSING_FIELD),
new ErrorExpectation(NewGTFSErrorType.ROUTE_LONG_NAME_CONTAINS_SHORT_NAME),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED)
);
assertThat(
runIntegrationTestOnFolder(
"fake-agency-only-calendar-dates",
nullValue(),
persistenceExpectations,
errorExpectations
),
equalTo(true)
);
}
/**
* Tests whether the simple gtfs can be loaded and exported if it has a mixture of service_id definitions in both
* the calendar.txt and calendar_dates.txt files.
*/@Test
public void canLoadAndExportSimpleAgencyWithMixtureOfCalendarDefinitions() {
PersistenceExpectation[] persistenceExpectations = new PersistenceExpectation[]{
new PersistenceExpectation(
"agency",
new RecordExpectation[]{
new RecordExpectation("agency_id", "1"),
new RecordExpectation("agency_name", "Fake Transit"),
new RecordExpectation("agency_timezone", "America/Los_Angeles")
}
),
// calendar.txt-only expectation
new PersistenceExpectation(
"calendar",
new RecordExpectation[]{
new RecordExpectation("service_id", "only-in-calendar-txt"),
new RecordExpectation("start_date", 20170915),
new RecordExpectation("end_date", 20170917)
}
),
// calendar.txt and calendar-dates.txt expectation
new PersistenceExpectation(
"calendar",
new RecordExpectation[]{
new RecordExpectation("service_id", "in-both-calendar-txt-and-calendar-dates"),
new RecordExpectation("start_date", 20170918),
new RecordExpectation("end_date", 20170920)
}
),
new PersistenceExpectation(
"calendar_dates",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "in-both-calendar-txt-and-calendar-dates"
),
new RecordExpectation("date", 20170920),
new RecordExpectation("exception_type", 2)
}
),
// calendar-dates.txt-only expectation
new PersistenceExpectation(
"calendar",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "only-in-calendar-dates-txt"
),
new RecordExpectation("start_date", 20170916),
new RecordExpectation("end_date", 20170916)
},
true
),
new PersistenceExpectation(
"calendar_dates",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "only-in-calendar-dates-txt"
),
new RecordExpectation("date", 20170916),
new RecordExpectation("exception_type", 1)
}
),
new PersistenceExpectation(
"stop_times",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "non-frequency-trip"
),
new RecordExpectation("arrival_time", 25200, "07:00:00"),
new RecordExpectation("departure_time", 25200, "07:00:00"),
new RecordExpectation("stop_id", "4u6g"),
new RecordExpectation("stop_sequence", 1),
new RecordExpectation("pickup_type", 0),
new RecordExpectation("drop_off_type", 0),
new RecordExpectation("shape_dist_traveled", 0.0, 0.01)
}
),
// calendar-dates only expectation
new PersistenceExpectation(
"trips",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "non-frequency-trip"
),
new RecordExpectation(
"service_id", "only-in-calendar-dates-txt"
),
new RecordExpectation("route_id", "1"),
new RecordExpectation("direction_id", 0),
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("bikes_allowed", 0),
new RecordExpectation("wheelchair_accessible", 0)
}
),
// calendar-only expectation
new PersistenceExpectation(
"trips",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "non-frequency-trip-2"
),
new RecordExpectation(
"service_id", "only-in-calendar-txt"
),
new RecordExpectation("route_id", "1"),
new RecordExpectation("direction_id", 0),
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("bikes_allowed", 0),
new RecordExpectation("wheelchair_accessible", 0)
}
),
// calendar-dates and calendar expectation
new PersistenceExpectation(
"trips",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "frequency-trip"
),
new RecordExpectation(
"service_id", "in-both-calendar-txt-and-calendar-dates"
),
new RecordExpectation("route_id", "1"),
new RecordExpectation("direction_id", 0),
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("bikes_allowed", 0),
new RecordExpectation("wheelchair_accessible", 0)
}
)
};
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.MISSING_FIELD),
new ErrorExpectation(NewGTFSErrorType.ROUTE_LONG_NAME_CONTAINS_SHORT_NAME),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED)
);
assertThat(
runIntegrationTestOnFolder(
"fake-agency-mixture-of-calendar-definitions",
nullValue(),
persistenceExpectations,
errorExpectations
),
equalTo(true)
);
}
/**
* Tests that a GTFS feed with long field values generates corresponding
* validation errors per MTC guidelines.
*/
@Test
public void canLoadFeedWithLongFieldValues () {
PersistenceExpectation[] expectations = PersistenceExpectation.list();
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FIELD_VALUE_TOO_LONG),
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED) // Not related, not worrying about this one.
);
assertThat(
"Long-field-value test passes",
runIntegrationTestOnFolder(
"fake-agency-mtc-long-fields",
nullValue(),
expectations,
errorExpectations,
MTCValidator::new
),
equalTo(true)
);
}
/**
* Tests that a GTFS feed with a service id that doesn't apply to any day of the week
* (i.e. when 'monday' through 'sunday' fields are set to zero)
* generates a validation error.
*/
@Test
public void canLoadFeedWithServiceWithoutDaysOfWeek() {
PersistenceExpectation[] expectations = PersistenceExpectation.list();
ErrorExpectation[] errorExpectations = ErrorExpectation.list(
new ErrorExpectation(NewGTFSErrorType.FEED_TRAVEL_TIMES_ROUNDED), // Not related, not worrying about this one.
new ErrorExpectation(NewGTFSErrorType.SERVICE_WITHOUT_DAYS_OF_WEEK)
);
assertThat(
"service-without-days test passes",
runIntegrationTestOnFolder(
"fake-agency-service-without-days",
nullValue(),
expectations,
errorExpectations
),
equalTo(true)
);
}
/**
* A helper method that will zip a specified folder in test/main/resources and call
* {@link #runIntegrationTestOnZipFile} on that file.
*/
private boolean runIntegrationTestOnFolder(
String folderName,
Matcher<Object> fatalExceptionExpectation,
PersistenceExpectation[] persistenceExpectations,
ErrorExpectation[] errorExpectations,
FeedValidatorCreator... customValidators
) {
LOG.info("Running integration test on folder {}", folderName);
// zip up test folder into temp zip file
String zipFileName = null;
try {
zipFileName = TestUtils.zipFolderFiles(folderName, true);
} catch (IOException e) {
e.printStackTrace();
return false;
}
return runIntegrationTestOnZipFile(
zipFileName,
fatalExceptionExpectation,
persistenceExpectations,
errorExpectations,
customValidators
);
}
/**
* Load a feed and remove all data from the tables that match the mandatory file list. Confirm that the export
* contains the mandatory files which will be exported even though the matching table has no data.
*/
@Test
void canExportEmptyMandatoryFiles() {
String testDBName = TestUtils.generateNewDB();
File tempFile = null;
try {
String zipFileName = TestUtils.zipFolderFiles("fake-agency", true);
String dbConnectionUrl = String.join("/", JDBC_URL, testDBName);
DataSource dataSource = TestUtils.createTestDataSource(dbConnectionUrl);
FeedLoadResult loadResult = GTFS.load(zipFileName, dataSource);
String namespace = loadResult.uniqueIdentifier;
// Remove data from tables that match the mandatory files.
for (String fileName : JdbcGtfsExporter.mandatoryFileList) {
try (Connection connection = dataSource.getConnection()) {
String tableName = fileName.split("\\.")[0];
String sql = String.format("delete from %s.%s", namespace, tableName);
LOG.info(sql);
connection.prepareStatement(sql).execute();
}
}
// Confirm that the mandatory files are present in the zip file.
tempFile = exportGtfs(namespace, dataSource, false);
ZipFile gtfsZipFile = new ZipFile(tempFile.getAbsolutePath());
for (String fileName : JdbcGtfsExporter.mandatoryFileList) {
Assert.assertNotNull(gtfsZipFile.getEntry(fileName));
}
} catch (IOException | SQLException e) {
LOG.error("An error occurred while attempting to test exporting of mandatory files.", e);
} finally {
TestUtils.dropDB(testDBName);
if (tempFile != null) tempFile.deleteOnExit();
}
}
/**
* A helper method that will run GTFS#main with a certain zip file.
* This tests whether a GTFS zip file can be loaded without any errors. The full list of steps includes:
* 1. GTFS#load
* 2. GTFS#validate
* 3. exportGtfs/check exported GTFS integrity
* 4. makeSnapshot
* 5. Delete feed/namespace
*/
private boolean runIntegrationTestOnZipFile(
String zipFileName,
Matcher<Object> fatalExceptionExpectation,
PersistenceExpectation[] persistenceExpectations,
ErrorExpectation[] errorExpectations,
FeedValidatorCreator... customValidators
) {
String testDBName = TestUtils.generateNewDB();
String dbConnectionUrl = String.join("/", JDBC_URL, testDBName);
DataSource dataSource = TestUtils.createTestDataSource(dbConnectionUrl);
String namespace;
// Verify that loading the feed completes and data is stored properly
try (Connection connection = dataSource.getConnection()) {
// load and validate feed
LOG.info("load and validate GTFS file {}", zipFileName);
FeedLoadResult loadResult = GTFS.load(zipFileName, dataSource);
ValidationResult validationResult = GTFS.validate(
loadResult.uniqueIdentifier,
dataSource,
customValidators
);
assertThat(validationResult.fatalException, is(fatalExceptionExpectation));
namespace = loadResult.uniqueIdentifier;
assertThatImportedGtfsMeetsExpectations(
connection,
namespace,
persistenceExpectations,
errorExpectations,
false
);
// Verify that exporting the feed (in non-editor mode) completes and data is outputted properly
LOG.info("export GTFS from created namespace");
File tempFile = exportGtfs(namespace, dataSource, false);
assertThatExportedGtfsMeetsExpectations(tempFile, persistenceExpectations, false);
// Verify that making a snapshot from an existing feed database, then exporting that snapshot to a GTFS zip
// file works as expected
boolean snapshotIsOk = assertThatSnapshotIsSuccessful(
connection,
namespace,
dataSource,
testDBName,
persistenceExpectations,
false
);
if (!snapshotIsOk) return false;
// Also, verify that if we're normalizing stop_times#stop_sequence, the stop_sequence values conform with
// our expectations (zero-based, incrementing values).
PersistenceExpectation[] expectationsWithNormalizedStopTimesSequence =
updatePersistenceExpectationsWithNormalizedStopTimesSequence(persistenceExpectations);
boolean normalizedSnapshotIsOk = assertThatSnapshotIsSuccessful(
connection,
namespace,
dataSource,
testDBName,
expectationsWithNormalizedStopTimesSequence,
true
);
if (!normalizedSnapshotIsOk) return false;
} catch (IOException | SQLException e) {
LOG.error("An error occurred while loading/snapshotting the database!");
TestUtils.dropDB(testDBName);
e.printStackTrace();
return false;
} catch (AssertionError e) {
TestUtils.dropDB(testDBName);
throw e;
}
// Get a new connection here, because sometimes the old connection causes hanging issues upon trying to drop a
// schema (via deleting a GTFS namespace).
try (Connection connection = dataSource.getConnection()) {
// Verify that deleting a feed works as expected.
LOG.info("Deleting GTFS feed from database.");
GTFS.delete(namespace, dataSource);
String sql = String.format("select * from feeds where namespace = '%s'", namespace);
LOG.info(sql);
ResultSet resultSet = connection.prepareStatement(sql).executeQuery();
while (resultSet.next()) {
// Assert that the feed registry shows feed as deleted.
assertThat(resultSet.getBoolean("deleted"), is(true));
}
// Ensure that schema no longer exists for namespace (note: this is Postgres specific).
String schemaSql = String.format(
"SELECT * FROM information_schema.schemata where schema_name = '%s'",
namespace
);
LOG.info(schemaSql);
ResultSet schemaResultSet = connection.prepareStatement(schemaSql).executeQuery();
int schemaCount = 0;
while (schemaResultSet.next()) schemaCount++;
// There should be no schema records matching the deleted namespace.
assertThat(schemaCount, is(0));
} catch (SQLException | InvalidNamespaceException e) {
LOG.error("An error occurred while deleting a schema!", e);
TestUtils.dropDB(testDBName);
return false;
} catch (AssertionError e) {
TestUtils.dropDB(testDBName);
throw e;
}
// This should be run following all of the above tests (any new tests should go above these lines).
TestUtils.dropDB(testDBName);
return true;
}
private void assertThatLoadIsErrorFree(FeedLoadResult loadResult) {
assertThat(loadResult.fatalException, is(nullValue()));
assertThat(loadResult.agency.fatalException, is(nullValue()));
assertThat(loadResult.calendar.fatalException, is(nullValue()));
assertThat(loadResult.calendarDates.fatalException, is(nullValue()));
assertThat(loadResult.fareAttributes.fatalException, is(nullValue()));
assertThat(loadResult.fareRules.fatalException, is(nullValue()));
assertThat(loadResult.feedInfo.fatalException, is(nullValue()));
assertThat(loadResult.frequencies.fatalException, is(nullValue()));
assertThat(loadResult.routes.fatalException, is(nullValue()));
assertThat(loadResult.shapes.fatalException, is(nullValue()));
assertThat(loadResult.stops.fatalException, is(nullValue()));
assertThat(loadResult.stopTimes.fatalException, is(nullValue()));
assertThat(loadResult.transfers.fatalException, is(nullValue()));
assertThat(loadResult.trips.fatalException, is(nullValue()));
}
private void assertThatSnapshotIsErrorFree(SnapshotResult snapshotResult) {
assertThatLoadIsErrorFree(snapshotResult);
assertThat(snapshotResult.scheduleExceptions.fatalException, is(nullValue()));
}
/**
* Helper function to export a GTFS from the database to a temporary zip file.
*/
private File exportGtfs(String namespace, DataSource dataSource, boolean fromEditor) throws IOException {
File tempFile = File.createTempFile("snapshot", ".zip");
GTFS.export(namespace, tempFile.getAbsolutePath(), dataSource, fromEditor);
return tempFile;
}
private class ValuePair {
private final Object expected;
private final Object found;
private ValuePair (Object expected, Object found) {
this.expected = expected;
this.found = found;
}
}
/**
* Creates a snapshot, and asserts persistence expectations on the newly-created database of that snapshot. Then,
* exports that snapshot to a GTFS and asserts persistence expectations on the newly-exported GTFS.
*/
private boolean assertThatSnapshotIsSuccessful(
Connection connection,
String namespace,
DataSource dataSource,
String testDBName,
PersistenceExpectation[] persistenceExpectations,
boolean normalizeStopTimes
) {
try {
LOG.info("copy GTFS from created namespace");
SnapshotResult copyResult = GTFS.makeSnapshot(namespace, dataSource, normalizeStopTimes);
assertThatSnapshotIsErrorFree(copyResult);
assertThatImportedGtfsMeetsExpectations(
connection,
copyResult.uniqueIdentifier,
persistenceExpectations,
null,
true
);
LOG.info("export GTFS from copied namespace");
File tempFile = exportGtfs(copyResult.uniqueIdentifier, dataSource, true);
assertThatExportedGtfsMeetsExpectations(tempFile, persistenceExpectations, true);
} catch (IOException | SQLException e) {
e.printStackTrace();
TestUtils.dropDB(testDBName);
return false;
} catch (AssertionError e) {
TestUtils.dropDB(testDBName);
throw e;
}
return true;
}
/**
* Run through the list of persistence expectations to make sure that the feed was imported properly into the
* database.
*/
private void assertThatImportedGtfsMeetsExpectations(
Connection connection,
String namespace,
PersistenceExpectation[] persistenceExpectations,
ErrorExpectation[] errorExpectations,
boolean isEditorDatabase
) throws SQLException {
// Store field mismatches here (to provide assertion statements with more details).
Multimap<String, ValuePair> fieldsWithMismatches = ArrayListMultimap.create();
// Check that no validators failed during validation in non-editor databases only (validators do not run
// when creating an editor database).
if (!isEditorDatabase) {
assertThat(
"One or more validators failed during GTFS import.",
countValidationErrorsOfType(connection, namespace, NewGTFSErrorType.VALIDATOR_FAILED),
equalTo(0)
);
}
// run through testing expectations
LOG.info("testing expectations of record storage in the database");
for (PersistenceExpectation persistenceExpectation : persistenceExpectations) {
if (persistenceExpectation.appliesToEditorDatabaseOnly && !isEditorDatabase) continue;
// select all entries from a table
String sql = String.format(
"select * from %s.%s",
namespace,
persistenceExpectation.tableName
);
LOG.info(sql);
ResultSet rs = connection.prepareStatement(sql).executeQuery();
boolean foundRecord = false;
int numRecordsSearched = 0;
while (rs.next()) {
numRecordsSearched++;
LOG.info("record {} in ResultSet", numRecordsSearched);
boolean allFieldsMatch = true;
for (RecordExpectation recordExpectation: persistenceExpectation.recordExpectations) {
switch (recordExpectation.expectedFieldType) {
case DOUBLE:
double doubleVal = rs.getDouble(recordExpectation.fieldName);
LOG.info("{}: {}", recordExpectation.fieldName, doubleVal);
if (doubleVal != recordExpectation.doubleExpectation) {
allFieldsMatch = false;
}
break;
case INT:
int intVal = rs.getInt(recordExpectation.fieldName);
LOG.info("{}: {}", recordExpectation.fieldName, intVal);
if (intVal != recordExpectation.intExpectation) {
fieldsWithMismatches.put(
recordExpectation.fieldName,
new ValuePair(recordExpectation.stringExpectation, intVal)
);
allFieldsMatch = false;
}
break;
case STRING:
String strVal = rs.getString(recordExpectation.fieldName);
LOG.info("{}: {}", recordExpectation.fieldName, strVal);
if (strVal == null && recordExpectation.stringExpectation == null) {
break;
} else if (strVal == null || !strVal.equals(recordExpectation.stringExpectation)) {
fieldsWithMismatches.put(
recordExpectation.fieldName,
new ValuePair(recordExpectation.stringExpectation, strVal)
);
LOG.error("Expected {}, found {}", recordExpectation.stringExpectation, strVal);
allFieldsMatch = false;
}
break;
}
if (!allFieldsMatch) {
break;
}
}
// all fields match expectations! We have found the record.
if (allFieldsMatch) {
LOG.info("Database record satisfies expectations.");
foundRecord = true;
break;
} else {
LOG.error("Persistence mismatch on record {}", numRecordsSearched);
}
}
assertThatDatabasePersistenceExpectationRecordWasFound(
persistenceExpectation,
numRecordsSearched,
foundRecord,
fieldsWithMismatches
);
}
// Skip error expectation analysis on editor database
if (isEditorDatabase) {
LOG.info("Skipping error expectations for non-editor database.");
return;
}
// Expect zero errors if errorExpectations is null.
if (errorExpectations == null) errorExpectations = new ErrorExpectation[]{};
// Check that error expectations match errors stored in database.
LOG.info("Checking {} error expectations", errorExpectations.length);
// select all entries from error table
String sql = String.format("select * from %s.errors", namespace);
LOG.info(sql);
ResultSet rs = connection.prepareStatement(sql).executeQuery();
int errorCount = 0;
Iterator<ErrorExpectation> errorExpectationIterator = Arrays.stream(errorExpectations).iterator();
while (rs.next()) {
errorCount++;
String errorType = rs.getString("error_type");
String entityType = rs.getString("entity_type");
String entityId = rs.getString("entity_id");
String badValue = rs.getString("bad_value");
LOG.info("Found error {}: {} {} {} {}", errorCount, errorType, entityId, entityType, badValue);
// Skip error expectation if not exists. But continue iteration to count all errors.
if (!errorExpectationIterator.hasNext()) continue;
ErrorExpectation errorExpectation = errorExpectationIterator.next();
LOG.info("Expecting error {}: {}", errorCount, errorExpectation.errorTypeMatcher);
// Error expectation must contain error type matcher. The others are optional.
assertThat(errorType, errorExpectation.errorTypeMatcher);
if (errorExpectation.entityTypeMatcher != null) assertThat(entityType, errorExpectation.entityTypeMatcher);
if (errorExpectation.entityIdMatcher != null) assertThat(entityId, errorExpectation.entityIdMatcher);
if (errorExpectation.badValueMatcher != null) assertThat(badValue, errorExpectation.badValueMatcher);
}
assertThat(
"Error count is equal to number of error expectations.",
errorCount,
equalTo(errorExpectations.length));
}
private static int countValidationErrorsOfType(
Connection connection,
String namespace,
NewGTFSErrorType errorType
) throws SQLException {
String errorCheckSql = String.format(
"select * from %s.errors where error_type = '%s'",
namespace,
errorType);
LOG.info(errorCheckSql);
ResultSet errorResults = connection.prepareStatement(errorCheckSql).executeQuery();
int errorCount = 0;
while (errorResults.next()) {
errorCount++;
}
return errorCount;
}
/**
* Helper to assert that the GTFS that was exported to a zip file matches all data expectations defined in the
* persistence expectations.
*/
private void assertThatExportedGtfsMeetsExpectations(
File tempFile,
PersistenceExpectation[] persistenceExpectations,
boolean fromEditor
) throws IOException {
LOG.info("testing expectations of csv outputs in an exported gtfs");
ZipFile gtfsZipfile = new ZipFile(tempFile.getAbsolutePath());
// iterate through all expectations
for (PersistenceExpectation persistenceExpectation : persistenceExpectations) {
if (persistenceExpectation.appliesToEditorDatabaseOnly) continue;
// No need to check that errors were exported because it is an internal table only.
if ("errors".equals(persistenceExpectation.tableName)) continue;
final String tableFileName = persistenceExpectation.tableName + ".txt";
LOG.info(String.format("reading table: %s", tableFileName));
ZipEntry entry = gtfsZipfile.getEntry(tableFileName);
// ensure file exists in zip
if (entry == null) {
throw new AssertionError(
String.format("expected table %s not found in outputted zip file", tableFileName)
);
}
// prepare to read the file
InputStream zipInputStream = gtfsZipfile.getInputStream(entry);
// Skip any byte order mark that may be present. Files must be UTF-8,
// but the GTFS spec says that "files that include the UTF byte order mark are acceptable".
InputStream bomInputStream = new BOMInputStream(zipInputStream);
CsvReader csvReader = new CsvReader(bomInputStream, ',', Charset.forName("UTF8"));
csvReader.readHeaders();
boolean foundRecord = false;
int numRecordsSearched = 0;
// read each record
while (csvReader.readRecord() && !foundRecord) {
numRecordsSearched++;
LOG.info(String.format("record %d in csv file", numRecordsSearched));
boolean allFieldsMatch = true;
// iterate through all rows in record to determine if it's the one we're looking for
for (RecordExpectation recordExpectation: persistenceExpectation.recordExpectations) {
String val = csvReader.get(recordExpectation.fieldName);
String expectation = recordExpectation.getStringifiedExpectation(fromEditor);
LOG.info(String.format(
"%s: %s (Expectation: %s)",
recordExpectation.fieldName,
val,
expectation
));
if (val.isEmpty() && expectation == null) {
// First check that the csv value is an empty string and that the expectation is null. Null
// exported from the database to a csv should round trip into an empty string, so this meets the
// expectation.
break;
} else if (!val.equals(expectation)) {
// sometimes there are slight differences in decimal precision in various fields
// check if the decimal delta is acceptable
if (equalsWithNumericDelta(val, recordExpectation)) continue;
allFieldsMatch = false;
break;
}
}
// all fields match expectations! We have found the record.
if (allFieldsMatch) {
LOG.info("CSV record satisfies expectations.");
foundRecord = true;
}
}
assertThatCSVPersistenceExpectationRecordWasFound(
persistenceExpectation,
tableFileName,
numRecordsSearched,
foundRecord
);
}
}
/**
* Check whether a potentially numeric value is equal given potentially small decimal deltas
*/
private boolean equalsWithNumericDelta(String val, RecordExpectation recordExpectation) {
if (recordExpectation.expectedFieldType != ExpectedFieldType.DOUBLE) return false;
double d;
try {
d = Double.parseDouble(val);
}
catch(NumberFormatException nfe)
{
return false;
}
return Math.abs(d - recordExpectation.doubleExpectation) < recordExpectation.acceptedDelta;
}
/**
* Helper that calls assertion with corresponding context for better error reporting.
*/
private void assertThatDatabasePersistenceExpectationRecordWasFound(
PersistenceExpectation persistenceExpectation,
int numRecordsSearched,
boolean foundRecord,
Multimap<String, ValuePair> fieldsWithMismatches
) {
assertThatPersistenceExpectationRecordWasFound(
persistenceExpectation,
String.format("Database table `%s`", persistenceExpectation.tableName),
numRecordsSearched,
foundRecord,
fieldsWithMismatches
);
}
/**
* Helper that calls assertion with corresponding context for better error reporting.
*/
private void assertThatCSVPersistenceExpectationRecordWasFound(
PersistenceExpectation persistenceExpectation,
String tableFileName,
int numRecordsSearched,
boolean foundRecord
) {
assertThatPersistenceExpectationRecordWasFound(
persistenceExpectation,
String.format("CSV file `%s`", tableFileName),
numRecordsSearched,
foundRecord,
null
);
}
/**
* Helper method to make sure a persistence expectation was actually found after searching through records
*/
private void assertThatPersistenceExpectationRecordWasFound(
PersistenceExpectation persistenceExpectation,
String contextDescription,
int numRecordsSearched,
boolean foundRecord,
Multimap<String, ValuePair> mismatches
) {
contextDescription = String.format("in the %s", contextDescription);
// Assert that more than 0 records were found
assertThat(
String.format("No records found %s", contextDescription),
numRecordsSearched,
ComparatorMatcherBuilder.<Integer>usingNaturalOrdering().greaterThan(0)
);
// If the record wasn't found, but at least one mismatching record was found, return info about the record that
// was found to attempt to aid with debugging.
if (!foundRecord && mismatches != null) {
for (String field : mismatches.keySet()) {
Collection<ValuePair> valuePairs = mismatches.get(field);
for (ValuePair valuePair : valuePairs) {
assertThat(
String.format(
"The value expected for %s was not found %s. NOTE: there could be other values, but the first found value is shown.",
field,
contextDescription
),
valuePair.found,
equalTo(valuePair.expected)
);
}
}
} else {
// Assert that the record was found
assertThat(
String.format(
"The record as defined in the PersistenceExpectation was not found %s. Unfound Record: %s",
contextDescription,
persistenceExpectation.toString()
),
foundRecord,
equalTo(true)
);
}
}
/**
* Persistence expectations for use with the GTFS contained within the "fake-agency" resources folder.
*/
private PersistenceExpectation[] fakeAgencyPersistenceExpectations = new PersistenceExpectation[]{
new PersistenceExpectation(
"agency",
new RecordExpectation[]{
new RecordExpectation("agency_id", "1"),
new RecordExpectation("agency_name", "Fake Transit"),
new RecordExpectation("agency_timezone", "America/Los_Angeles")
}
),
new PersistenceExpectation(
"calendar",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "04100312-8fe1-46a5-a9f2-556f39478f57"
),
new RecordExpectation("monday", 1),
new RecordExpectation("tuesday", 1),
new RecordExpectation("wednesday", 1),
new RecordExpectation("thursday", 1),
new RecordExpectation("friday", 1),
new RecordExpectation("saturday", 1),
new RecordExpectation("sunday", 1),
new RecordExpectation("start_date", "20170915"),
new RecordExpectation("end_date", "20170917")
}
),
new PersistenceExpectation(
"calendar_dates",
new RecordExpectation[]{
new RecordExpectation(
"service_id", "04100312-8fe1-46a5-a9f2-556f39478f57"
),
new RecordExpectation("date", 20170916),
new RecordExpectation("exception_type", 2)
}
),
new PersistenceExpectation(
"fare_attributes",
new RecordExpectation[]{
new RecordExpectation("fare_id", "route_based_fare"),
new RecordExpectation("price", 1.23, 0),
new RecordExpectation("currency_type", "USD")
}
),
new PersistenceExpectation(
"fare_rules",
new RecordExpectation[]{
new RecordExpectation("fare_id", "route_based_fare"),
new RecordExpectation("route_id", "1")
}
),
new PersistenceExpectation(
"feed_info",
new RecordExpectation[]{
new RecordExpectation("feed_id", "fake_transit"),
new RecordExpectation("feed_publisher_name", "Conveyal"),
new RecordExpectation(
"feed_publisher_url", "http:
),
new RecordExpectation("feed_lang", "en"),
new RecordExpectation("feed_version", "1.0")
}
),
new PersistenceExpectation(
"frequencies",
new RecordExpectation[]{
new RecordExpectation("trip_id", "frequency-trip"),
new RecordExpectation("start_time", 28800, "08:00:00"),
new RecordExpectation("end_time", 32400, "09:00:00"),
new RecordExpectation("headway_secs", 1800),
new RecordExpectation("exact_times", 0)
}
),
new PersistenceExpectation(
"routes",
new RecordExpectation[]{
new RecordExpectation("agency_id", "1"),
new RecordExpectation("route_id", "1"),
new RecordExpectation("route_short_name", "1"),
new RecordExpectation("route_long_name", "Route 1"),
new RecordExpectation("route_type", 3),
new RecordExpectation("route_color", "7CE6E7")
}
),
new PersistenceExpectation(
"shapes",
new RecordExpectation[]{
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("shape_pt_lat", 37.061172, 0.00001),
new RecordExpectation("shape_pt_lon", -122.007500, 0.00001),
new RecordExpectation("shape_pt_sequence", 2),
new RecordExpectation("shape_dist_traveled", 7.4997067, 0.01)
}
),
new PersistenceExpectation(
"stop_times",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "a30277f8-e50a-4a85-9141-b1e0da9d429d"
),
new RecordExpectation("arrival_time", 25200, "07:00:00"),
new RecordExpectation("departure_time", 25200, "07:00:00"),
new RecordExpectation("stop_id", "4u6g"),
new RecordExpectation("stop_sequence", 1),
new RecordExpectation("pickup_type", 0),
new RecordExpectation("drop_off_type", 0),
new RecordExpectation("shape_dist_traveled", 0.0, 0.01)
}
),
new PersistenceExpectation(
"trips",
new RecordExpectation[]{
new RecordExpectation(
"trip_id", "a30277f8-e50a-4a85-9141-b1e0da9d429d"
),
new RecordExpectation(
"service_id", "04100312-8fe1-46a5-a9f2-556f39478f57"
),
new RecordExpectation("route_id", "1"),
new RecordExpectation("direction_id", 0),
new RecordExpectation(
"shape_id", "5820f377-f947-4728-ac29-ac0102cbc34e"
),
new RecordExpectation("bikes_allowed", 0),
new RecordExpectation("wheelchair_accessible", 0)
}
)
};
/**
* Update persistence expectations to expect normalized stop_sequence values (zero-based, incrementing).
*/
private PersistenceExpectation[] updatePersistenceExpectationsWithNormalizedStopTimesSequence(
PersistenceExpectation[] inputExpectations
) {
PersistenceExpectation[] persistenceExpectations = new PersistenceExpectation[inputExpectations.length];
// Add all of the table expectations.
for (int i = 0; i < inputExpectations.length; i++) {
// Collect record expectations.
PersistenceExpectation inputExpectation = inputExpectations[i];
RecordExpectation[] newRecordExpectations = new RecordExpectation[inputExpectation.recordExpectations.length];
for (int j = 0; j < inputExpectation.recordExpectations.length; j++) {
RecordExpectation newRecordExpectation = inputExpectation.recordExpectations[j].clone();
// Update the stop_sequence expectation to be normalized.
if (newRecordExpectation.fieldName.equals("stop_sequence")) {
newRecordExpectation.intExpectation = 0;
}
newRecordExpectations[j] = newRecordExpectation;
}
// Once cloning/updating has been done for all record expectations, add the new table expectation to the
// array.
persistenceExpectations[i] = new PersistenceExpectation(
inputExpectation.tableName,
newRecordExpectations,
inputExpectation.appliesToEditorDatabaseOnly
);
}
return persistenceExpectations;
}
}
|
package com.cronutils;
import com.cronutils.model.Cron;
import com.cronutils.model.CronType;
import com.cronutils.model.definition.CronDefinition;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import org.junit.Test;
import org.threeten.bp.ZonedDateTime;
import static org.junit.Assert.assertEquals;
public class Issue228Test {
/**
* Issue #228: dayOfWeek just isn't honored in the cron next execution evaluation and needs to be
*/
//@Test
public void testFirstMondayOfTheMonthNextExecution() {
CronDefinition cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX);
CronParser parser = new CronParser(cronDefinition);
// This is 9am on a day between the 1st and 7th which is a Monday (in this case it should be Oct 2
Cron myCron = parser.parse("0 9 1-7 * 1");
ZonedDateTime time = ZonedDateTime.parse("2017-09-29T14:46:01.166-07:00");
assertEquals(ZonedDateTime.parse("2017-10-02T09:00-07:00"), ExecutionTime.forCron(myCron).nextExecution(time).get());
}
//@Test
public void testEveryWeekdayFirstWeekOfMonthNextExecution() {
CronDefinition cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX);
CronParser parser = new CronParser(cronDefinition);
// This is 9am on Mon-Fri day between the 1st and 7th (in this case it should be Oct 2)
Cron myCron = parser.parse("0 9 1-7 * 1-5");
ZonedDateTime time = ZonedDateTime.parse("2017-09-29T14:46:01.166-07:00");
assertEquals(ZonedDateTime.parse("2017-10-02T09:00-07:00"), ExecutionTime.forCron(myCron).nextExecution(time).get());
}
//@Test
public void testEveryWeekendFirstWeekOfMonthNextExecution() {
CronDefinition cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX);
CronParser parser = new CronParser(cronDefinition);
// This is 9am on Sat and Sun day between the 1st and 7th (in this case it should be Oct 1)
Cron myCron = parser.parse("0 9 1-7 * 6-7");
ZonedDateTime time = ZonedDateTime.parse("2017-09-29T14:46:01.166-07:00");
assertEquals(ZonedDateTime.parse("2017-10-01T09:00-07:00"), ExecutionTime.forCron(myCron).nextExecution(time).get());
}
//@Test
public void testEveryWeekdaySecondWeekOfMonthNextExecution() {
CronDefinition cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX);
CronParser parser = new CronParser(cronDefinition);
// This is 9am on Mon-Fri day between the 8th and 14th (in this case it should be Oct 9 Mon)
Cron myCron = parser.parse("0 9 8-14 * 1-5");
ZonedDateTime time = ZonedDateTime.parse("2017-09-29T14:46:01.166-07:00");
assertEquals(ZonedDateTime.parse("2017-10-09T09:00-07:00"), ExecutionTime.forCron(myCron).nextExecution(time).get());
}
//@Test
public void testEveryWeekendForthWeekOfMonthNextExecution() {
CronDefinition cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX);
CronParser parser = new CronParser(cronDefinition);
// This is 9am on Sat and Sun day between the 22nd and 28th (in this case it should be Oct 22)
Cron myCron = parser.parse("0 9 22-28 * 6-7");
ZonedDateTime time = ZonedDateTime.parse("2017-09-29T14:46:01.166-07:00");
assertEquals(ZonedDateTime.parse("2017-10-22T09:00-07:00"), ExecutionTime.forCron(myCron).nextExecution(time).get());
}
}
|
//@@author A0147996E
package guitests;
import static org.junit.Assert.assertTrue;
import static seedu.address.logic.commands.FinishCommand.MESSAGE_FINISH_TASK_MARKED;
import static seedu.address.logic.commands.FinishCommand.MESSAGE_FINISH_TASK_SUCCESS;
import org.junit.Test;
import seedu.address.commons.core.Messages;
import seedu.address.commons.exceptions.IllegalValueException;
import seedu.address.testutil.TestTask;
import seedu.address.testutil.TestUtil;
public class FinishCommandTest extends TaskManagerGuiTest {
private TestTask[] currentList = td.getTypicalTasks();
@Test
public void finish_recurringTask_finishSuccess() throws IllegalValueException {
commandBox.runCommand("clear");
TestTask[] currentList = {};
int targetIndex = 1;
currentList = TestUtil.addTasksToList(currentList, tr.gym);
commandBox.runCommand(tr.gym.getAddCommand());
currentList = TestUtil.addTasksToList(currentList, tr.cs2103);
commandBox.runCommand(tr.cs2103.getAddCommand());
commandBox.runCommand("finish " + targetIndex);
currentList[targetIndex - 1].setDate("27/12/2017");
targetIndex = 2;
commandBox.runCommand("finish " + targetIndex);
currentList[targetIndex - 1].setDate("01/03/2018");
assertTrue(taskListPanel.isListMatching(currentList));
}
@Test
public void finish_validIndexTest_finishSuccess() {
int targetIndex = 1;
assertFinishSuccess(targetIndex);
targetIndex = currentList.length / 2;
assertFinishSuccess(targetIndex);
targetIndex = currentList.length;
assertFinishSuccess(targetIndex);
}
@Test
public void finish_alreadyFinishedTask_finishUnsuccess() {
int targetIndex = 1;
commandBox.runCommand("finish " + targetIndex);
commandBox.runCommand("list all");
commandBox.runCommand("finish " + targetIndex);
assertResultMessage(MESSAGE_FINISH_TASK_MARKED);
}
@Test
public void finish_outOfBoundInx_finishUnsuccess() {
int maxIndex = currentList.length;
commandBox.runCommand("finish " + (maxIndex + 1));
assertResultMessage(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX);
}
@Test
public void finish_invalidCommandFormat_unknownCommand() {
commandBox.runCommand("finishes 1");
assertResultMessage(Messages.MESSAGE_UNKNOWN_COMMAND);
}
@Test
public void finish_validEvent_finishSuccess() {
commandBox.runCommand("clear");
TestTask[] currentList = {};
int targetIndex = 1;
currentList = TestUtil.addTasksToList(currentList, te.gym);
commandBox.runCommand(te.gym.getAddCommand());
currentList = TestUtil.addTasksToList(currentList, te.cs2103);
commandBox.runCommand(te.cs2103.getAddCommand());
commandBox.runCommand("finish " + targetIndex);
currentList[targetIndex - 1].setFinished(true);
targetIndex = 1;
commandBox.runCommand("finish " + targetIndex);
currentList[targetIndex - 1].setFinished(true);
assertTrue(taskListPanel.isListMatching());
}
/**
* Runs the finish command to finish the task at specified index and confirms the result is correct.
* @param targetIndexOneIndexed e.g. index 1 to finish the first task in the list,
* @param currentList A copy of the current unfinished list of tasks (before finish command).
*/
private void assertFinishSuccess(int targetIndexOneIndexed) {
TestTask taskToFinish = currentList[targetIndexOneIndexed - 1]; // -1 as array uses zero indexing
currentList = TestUtil.removeTaskFromList(currentList, targetIndexOneIndexed);
commandBox.runCommand("finish " + targetIndexOneIndexed);
assertResultMessage(String.format(MESSAGE_FINISH_TASK_SUCCESS, taskToFinish.getName()));
commandBox.runCommand("list");
assertTrue(taskListPanel.isListMatching(currentList));
}
}
|
package innovimax.mixthem;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import innovimax.mixthem.arguments.ParamValue;
import innovimax.mixthem.arguments.Rule;
import innovimax.mixthem.arguments.RuleParam;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
/**
* Provides different runs for testing a rule according to the additional parameters
* @author Innovimax
* @version 1.0
*/
public class RuleRuns {
/**
* Returns a list of test runs for the rule.
* @param url The URL of rule additional parameters file
* @return Returns a list of test runs for the rule
*/
public static List<RuleRun> getRuns(final Rule rule, final URL url) throws FileNotFoundException, IOException, NumberFormatException {
final List<RuleRun> runs = new LinkedList<RuleRun>();
runs.add(new RuleRun(1, Collections.emptySet(), Collections.emptyMap()));
if (url != null) {
final File file = new File(url.getFile());
final BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8);
final Stream<String> entries = reader.lines();
entries.forEach(entry -> {
//final String[] parts = entry.split("\\s");
//if (parts.length > 0) {
// final String value = parts[0];
try {
final Set<Integer> selection = new LinkedHashSet<Integer>();
final Map<RuleParam, ParamValue> params = new EnumMap<RuleParam, ParamValue>(RuleParam.class);
final ObjectMapper jsonMapper = new ObjectMapper();
final JsonNode jsonParams = jsonMapper.readTree(entry);
System.out.println(">>> JSON=" + jsonMapper.writeValueAsString(jsonParams));
switch (rule) {
case ADD:
//params.put(RuleParam.FILE_LIST, RuleParam.FILE_LIST.createValue(value));
if (jsonParams.has("selection")) {
//TODO
}
break;
case RANDOM_ALT_LINE:
case RANDOM_ALT_CHAR:
case RANDOM_ALT_BYTE:
//params.put(RuleParam.RANDOM_SEED, ParamValue.createInt(Integer.parseInt(value)));
if (jsonParams.has(RuleParam.RANDOM_SEED.getName())) {
final JsonNode seed = jsonParams.get(RuleParam.RANDOM_SEED.getName());
if (seed.isInt()) {
params.put(RuleParam.RANDOM_SEED, ParamValue.createInt(seed.asInt()));
}
}
break;
case JOIN:
//params.put(RuleParam.JOIN_COLS, RuleParam.JOIN_COLS.createValue(value));
if (jsonParams.has(RuleParam.JOIN_COLS.getName())) {
final JsonNode cols = jsonParams.get(RuleParam.JOIN_COLS.getName());
//TODO: get as array
if (cols.isTextual()) {
params.put(RuleParam.JOIN_COLS, RuleParam.JOIN_COLS.createValue(cols.asText()));
}
}
break;
case ZIP_LINE:
case ZIP_CELL:
case ZIP_CHAR:
//params.put(RuleParam.ZIP_SEP, ParamValue.createString(value));
if (jsonParams.has(RuleParam.ZIP_SEP.getName())) {
final JsonNode sep = jsonParams.get(RuleParam.ZIP_SEP.getName());
if (sep.isTextual()) {
params.put(RuleParam.ZIP_SEP, ParamValue.createString(sep.asText()));
}
}
}
if (!selection.isEmpty() || !params.isEmpty()) {
runs.add(new RuleRun(runs.size()+1, selection, params));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
// runs.add(new RuleRun(runs.size()+1, params));
});
}
return runs;
}
}
|
package jfdi.test.ui;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.testfx.api.FxToolkit;
import org.testfx.framework.junit.ApplicationTest;
import org.testfx.util.WaitForAsyncUtils;
import com.google.common.io.Files;
import javafx.fxml.FXMLLoader;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import javafx.scene.input.MouseButton;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import jfdi.ui.Constants;
import jfdi.ui.IUserInterface;
import jfdi.ui.MainController;
import jfdi.ui.UI;
import jfdi.ui.items.ListItem;
import jfdi.ui.items.StatsItem;
public class TestMainSetUp extends ApplicationTest {
/* The widgets of the GUI used for the tests. */
Label dayDisplayer;
ListView<StatsItem> statsDisplayer;
Label listStatus;
ListView<ListItem> listMain;
TextArea fbArea;
TextField cmdArea;
VBox helpOverLay;
Stage stage;
Scene scene;
Parent rootLayout;
AnchorPane listLayout;
MainController controller;
/* This operation comes from ApplicationTest and loads the GUI to test. */
@Override
public void start(Stage stage) throws Exception {
this.stage = stage;
this.stage.setTitle("JFDI");
//loadFonts();
initRootLayout();
initView();
}
private void initRootLayout() throws IOException {
rootLayout = FXMLLoader.load(getClass().getResource(Constants.URL_ROOT_PATH));
Scene scene = new Scene(rootLayout);
stage.setScene(scene);
scene.getStylesheets().add("https://fonts.googleapis.com/css?family=Hammersmith+One");
scene.getStylesheets().add("https://fonts.googleapis.com/css?family=Titillium+Web:200");
stage.show();
stage.setResizable(false);
/* Put the GUI in front of windows. So that the robots will not interact with another
window */
stage.toFront();
}
private void initView() throws IOException, InterruptedException {
IUserInterface ui = UI.getInstance();
// Load View
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource(Constants.URL_LIST_PATH));
listLayout = (AnchorPane) loader.load();
// Initialize Controller
controller = loader.getController();
controller.initialize();
// Link UI with Controller
ui.setController(controller);
ui.init();
((BorderPane) rootLayout).setCenter(listLayout);
controller.setStage(stage);
// Link Controller with UI, MainSetUp and CommandHandler
controller.setUi(ui);
controller.importantList.removeAll(controller.importantList);
File tempDir = Files.createTempDir();
controller.displayList("use " + tempDir.getPath());
controller.displayList(Constants.CTRL_CMD_INCOMPLETE);
ui.displayWelcome();
}
/* Just a shortcut to retrieve widgets in the GUI. */
public <T extends Node> T find(final String query) {
/** TestFX provides many operations to retrieve elements from the loaded GUI. */
return lookup(query).queryFirst();
}
@Before
public void setUp() {
/* Retrieve the tested widgets from the GUI. */
dayDisplayer = find("#dayDisplayer");
listStatus = find("#listStatus");
listMain = find("#listMain");
fbArea = find("#fbArea");
cmdArea = find("#cmdArea");
helpOverLay = find("#helpOverLay");
}
/* To clear the ongoing events */
@After
public void tearDown() throws TimeoutException {
/* Close the window. It will be re-opened at the next test. */
FxToolkit.hideStage();
release(new KeyCode[] {});
release(new MouseButton[] {});
}
@Test
public void testWidgetsExist() {
final String errMsg = " %s cannot be retrieved!";
assertNotNull(String.format(errMsg, "dayDisplayer"), dayDisplayer);
assertNotNull(String.format(errMsg, "listStatus"), listStatus);
assertNotNull(String.format(errMsg, "listMain"), listMain);
assertNotNull(String.format(errMsg, "fbArea"), fbArea);
assertNotNull(String.format(errMsg, "cmdArea"), cmdArea);
//assertNotNull(String.format(errMsg, "helpOverLay"), helpOverLay);
}
/*
* Test a simple "add hello" command and check if the feedback displayed matches the expected lines.
*/
@Test
public void testAddTask() {
clickOn(cmdArea).type(KeyCode.BACK_SPACE).type(KeyCode.A).type(KeyCode.D).type(KeyCode.D).type(KeyCode.SPACE)
.type(KeyCode.H).type(KeyCode.E).type(KeyCode.L).type(KeyCode.L).type(KeyCode.O).type(KeyCode.ENTER);
WaitForAsyncUtils.waitForFxEvents();
assertEquals("The feedback message does not match the intended result.",
fbArea.getText(), "\nJ.F.D.I. : " + String.format(
Constants.CMD_SUCCESS_ADDED, controller.importantList.size(), "hello"));
}
}
|
package spark;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static spark.Spark.after;
import static spark.Spark.before;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import spark.examples.books.Books;
import spark.utils.IOUtils;
public class BooksIntegrationTest {
private static int PORT = 4567;
private static String AUTHOR = "FOO";
private static String TITLE = "BAR";
private static String NEW_TITLE = "SPARK";
private String bookId;
@AfterClass
public static void tearDown() {
Spark.stop();
}
@After
public void clearBooks() {
Books.books.clear();
}
@BeforeClass
public static void setup() {
before((request, response) -> {
response.header("FOZ", "BAZ");
});
Books.main(null);
after((request, response) -> {
response.header("FOO", "BAR");
});
try {
Thread.sleep(500);
} catch (Exception e) {
}
}
@Test
public void canCreateBook() {
UrlResponse response = createBookViaPOST();
assertNotNull(response);
assertNotNull(response.body);
assertTrue(Integer.valueOf(response.body) > 0);
assertEquals(201, response.status);
}
@Test
public void canListBooks() {
bookId = createBookViaPOST().body.trim();
UrlResponse response = doMethod("GET", "/books", null);
assertNotNull(response);
String body = response.body.trim();
assertNotNull(body);
assertTrue(Integer.valueOf(body) > 0);
assertEquals(200, response.status);
assertTrue(response.body.contains(bookId));
}
@Test
public void canGetBook() {
bookId = createBookViaPOST().body.trim();
UrlResponse response = doMethod("GET", "/books/" + bookId, null);
String result = response.body;
assertNotNull(response);
assertNotNull(response.body);
assertEquals(200, response.status);
assertTrue(result.contains(AUTHOR));
assertTrue(result.contains(TITLE));
assertTrue(beforeFilterIsSet(response));
assertTrue(afterFilterIsSet(response));
}
@Test
public void canUpdateBook() {
bookId = createBookViaPOST().body.trim();
UrlResponse response = updateBook();
String result = response.body;
assertNotNull(response);
assertNotNull(response.body);
assertEquals(200, response.status);
assertTrue(result.contains(bookId));
assertTrue(result.contains("updated"));
}
@Test
public void canGetUpdatedBook() {
bookId = createBookViaPOST().body.trim();
updateBook();
UrlResponse response = doMethod("GET", "/books/" + bookId, null);
String result = response.body;
assertNotNull(response);
assertNotNull(response.body);
assertEquals(200, response.status);
assertTrue(result.contains(AUTHOR));
assertTrue(result.contains(NEW_TITLE));
}
@Test
public void canDeleteBook() {
bookId = createBookViaPOST().body.trim();
UrlResponse response = doMethod("DELETE", "/books/" + bookId, null);
String result = response.body;
assertNotNull(response);
assertNotNull(response.body);
assertEquals(200, response.status);
assertTrue(result.contains(bookId));
assertTrue(result.contains("deleted"));
}
@Test(expected = FileNotFoundException.class)
public void wontFindBook() throws IOException {
getResponse("GET", "/books/" + bookId, null);
}
private static UrlResponse doMethod(String requestMethod, String path, String body) {
UrlResponse response = new UrlResponse();
try {
getResponse(requestMethod, path, response);
} catch (IOException e) {
e.printStackTrace();
}
return response;
}
private static void getResponse(String requestMethod, String path, UrlResponse response)
throws MalformedURLException, IOException, ProtocolException {
URL url = new URL("http://localhost:" + PORT + path);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod(requestMethod);
connection.connect();
String res = IOUtils.toString(connection.getInputStream());
response.body = res;
response.status = connection.getResponseCode();
response.headers = connection.getHeaderFields();
}
private static class UrlResponse {
public Map<String, List<String>> headers;
private String body;
private int status;
}
private UrlResponse createBookViaPOST() {
return doMethod("POST", "/books?author=" + AUTHOR + "&title=" + TITLE, null);
}
private UrlResponse updateBook() {
return doMethod("PUT", "/books/" + bookId + "?title=" + NEW_TITLE, null);
}
private boolean afterFilterIsSet(UrlResponse response) {
return response.headers.get("FOO").get(0).equals("BAR");
}
private boolean beforeFilterIsSet(UrlResponse response) {
return response.headers.get("FOZ").get(0).equals("BAZ");
}
}
|
package uk.ac.ebi.atlas.experimentpage.qc;
import org.apache.commons.lang3.tuple.Pair;
import uk.ac.ebi.atlas.commons.readers.TsvReader;
import uk.ac.ebi.atlas.experimentpage.differential.download.CanStreamSupplier;
import uk.ac.ebi.atlas.model.download.ExternallyAvailableContent;
import uk.ac.ebi.atlas.model.experiment.differential.DifferentialExperiment;
import uk.ac.ebi.atlas.model.resource.AtlasResource;
import uk.ac.ebi.atlas.resource.DataFileHub;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.Writer;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
@Named
public class RnaSeqQCReport extends CanStreamSupplier<DifferentialExperiment> {
@Override
public ExternallyAvailableContent.ContentType contentType() {
return ExternallyAvailableContent.ContentType.SUPPLEMENTARY_INFORMATION;
}
private final DataFileHub dataFileHub;
@Inject
public RnaSeqQCReport(DataFileHub dataFileHub){
this.dataFileHub = dataFileHub;
}
@Override
public Collection<ExternallyAvailableContent> get(DifferentialExperiment experiment) {
List<Pair<String, Function<Writer, Void>>> documents = new RnaSeqQCFiles(dataFileHub.getExperimentFiles(experiment.getAccession()).qcFolder).get().entrySet().stream().map(new java.util.function.Function<Map.Entry<String, AtlasResource<TsvReader>>, Pair<String, java.util.function.Function<Writer, Void>>>() {
@Nullable
@Override
public Pair<String, java.util.function.Function<Writer, Void>> apply(@Nullable Map.Entry<String, AtlasResource<TsvReader>> stringAtlasResourceEntry) {
return Pair.of(
stringAtlasResourceEntry.getKey(),
readFromResourceAndWriteTsv(stringAtlasResourceEntry.getValue(), Function.identity())
);
}
}).collect(Collectors.toList());
if(! documents.isEmpty()){
return Collections.singleton(new ExternallyAvailableContent(
makeUri("qc"),
ExternallyAvailableContent.Description.create("link",
"Quality check report (generated by iRAP)"
),
documents.size() == 1 ? streamFile(documents.get(0)) : streamFolder(experiment.getAccession()+"-qc", documents)
));
} else {
return Collections.emptySet();
}
}
}
|
package ru.stqa.selenium.example;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import java.util.StringTokenizer;
public class correctProduct extends TestBase {
@Test
public void correctProduct() {
driver.get("http://localhost/litecart/en/");
WebElement campaignsBlock = driver.findElement(By.id("box-campaigns"));
String nameOfProduct1 = campaignsBlock.findElement(By.className("name")).getText();
WebElement prices = campaignsBlock.findElement(By.className("price-wrapper"));
WebElement regularPrice = prices.findElement(By.className("regular-price"));
WebElement campaignPrice = prices.findElement(By.className("campaign-price"));
String regularPrice1 = regularPrice.getText();
String campaignPrice1 = campaignPrice.getText();
String colorOfRegularPrice1 = regularPrice.getCssValue("color");
String s1 = colorOfRegularPrice1.substring(colorOfRegularPrice1.indexOf("(")+1);
StringTokenizer stringTokenizer1 = new StringTokenizer(s1);
int r = Integer.parseInt(stringTokenizer1.nextToken(",").trim());
int g = Integer.parseInt(stringTokenizer1.nextToken(",").trim());
int b = Integer.parseInt(stringTokenizer1.nextToken(",)").trim());
if ((r == g) && (g == b))
s1 = "Color of regular price in the mainpage is gray";
else
s1 = "Color of regular price in the mainpage isn't gray";
String textDecorOfRegularPrice1 = regularPrice.getCssValue("text-decoration-line");
String fontSizeOfRegularPrice1 = regularPrice.getCssValue("font-size");
StringTokenizer fr1 = new StringTokenizer(fontSizeOfRegularPrice1);
float sizeOfRegularPrice1 = Float.parseFloat(fr1.nextToken("p").trim());
String colorOfCampaignPrice1 = campaignPrice.getCssValue("color");
String s2 = colorOfCampaignPrice1.substring(colorOfCampaignPrice1.indexOf("(")+1);
StringTokenizer stringTokenizer2 = new StringTokenizer(s2);
r = Integer.parseInt(stringTokenizer2.nextToken(",").trim());
g = Integer.parseInt(stringTokenizer2.nextToken(",").trim());
b = Integer.parseInt(stringTokenizer2.nextToken(",)").trim());
if ((r != 0) && (g == 0) && (b == 0)) {
s2 = "Color of campaign price in the mainpage is red";
}
else
s2 = "Color of campaign price in the mainpage isn't red";
String textDecorOfCampaignPrice1 = campaignPrice.getCssValue("font-weight");
String fontSizeOfCampaignPrice1 = campaignPrice.getCssValue("font-size");
StringTokenizer fc1 = new StringTokenizer(fontSizeOfCampaignPrice1);
float sizeOfCampaignPrice1 = Float.parseFloat(fc1.nextToken("p").trim());
campaignsBlock.findElement(By.className("link")).click();
campaignsBlock = driver.findElement(By.id("box-product"));
String nameOfProduct2 = campaignsBlock.findElement(By.className("title")).getText();
prices = campaignsBlock.findElement(By.className("price-wrapper"));
regularPrice = prices.findElement(By.className("regular-price"));
campaignPrice = prices.findElement(By.className("campaign-price"));
String regularPrice2 = regularPrice.getText();
String campaignPrice2 = campaignPrice.getText();
String colorOfRegularPrice2 = regularPrice.getCssValue("color");
String s3 = colorOfRegularPrice2.substring(colorOfRegularPrice2.indexOf("(")+1);
StringTokenizer stringTokenizer3 = new StringTokenizer(s3);
r = Integer.parseInt(stringTokenizer3.nextToken(",").trim());
g = Integer.parseInt(stringTokenizer3.nextToken(",").trim());
b = Integer.parseInt(stringTokenizer3.nextToken(",)").trim());
if ((r == g) && (g == b))
s3 = "Color of regular price in the productpage is gray";
else
s3 = "Color of regular price in the productpage isn't gray";
String textDecorOfRegularPrice2 = regularPrice.getCssValue("text-decoration-line");
String fontSizeOfRegularPrice2 = regularPrice.getCssValue("font-size");
StringTokenizer fr2 = new StringTokenizer(fontSizeOfRegularPrice2);
float sizeOfRegularPrice2 = Float.parseFloat(fr2.nextToken("p").trim());
String colorOfCampaignPrice2 = campaignPrice.getCssValue("color");
String s4 = colorOfCampaignPrice2.substring(colorOfCampaignPrice2.indexOf("(")+1);
StringTokenizer stringTokenizer4 = new StringTokenizer(s4);
r = Integer.parseInt(stringTokenizer4.nextToken(",").trim());
g = Integer.parseInt(stringTokenizer4.nextToken(",").trim());
b = Integer.parseInt(stringTokenizer4.nextToken(",)").trim());
if ((r != 0) && (g == 0) && (b == 0)) {
s4 = "Color of campaign price in the productpage is red";
}
else
s4 = "Color of campaign price in the productpage isn't red";
String textDecorOfCampaignPrice2 = campaignPrice.getCssValue("font-weight");
String fontSizeOfCampaignPrice2 = campaignPrice.getCssValue("font-size");
StringTokenizer fc2 = new StringTokenizer(fontSizeOfCampaignPrice2);
float sizeOfCampaignPrice2 = Float.parseFloat(fc2.nextToken("p").trim());
System.out.println("
System.out.println(nameOfProduct1 + " = " + nameOfProduct2);
System.out.println("
System.out.println("
System.out.println(regularPrice1 + " = " + regularPrice2);
System.out.println(campaignPrice1 + " = " + campaignPrice2);
System.out.println("
System.out.println("
System.out.println("Text decoration of regular price in the mainpage: " + textDecorOfRegularPrice1);
System.out.println(s1);
System.out.println("
System.out.println("Text decoration of regular price in the productpage: " + textDecorOfRegularPrice2);
System.out.println(s3);
System.out.println("
System.out.println("
System.out.println("Text decoration of regular price in the mainpage: " + textDecorOfCampaignPrice1);
System.out.println(s2);
System.out.println("
System.out.println("Text decoration of regular price in the productpage: " + textDecorOfCampaignPrice2);
System.out.println(s4);
System.out.println("
System.out.println("
System.out.println("Font size of regular price in the mainpage: " + fontSizeOfRegularPrice1);
System.out.println("Font size of campaign price in the mainpage: " + fontSizeOfCampaignPrice1);
if (sizeOfRegularPrice1 < sizeOfCampaignPrice1)
System.out.println("Font-size of the campaign price is bigger than the regular price in the mainpage");
else
System.out.println("Font-size of the regular price is bigger than the campaign price in the mainpage");
System.out.println("
System.out.println("Font size of regular price in the productpage: " + fontSizeOfRegularPrice2);
System.out.println("Font size of campaign price in the productpage: " + fontSizeOfCampaignPrice2);
if (sizeOfRegularPrice2 < sizeOfCampaignPrice2) {
System.out.println("Font-size of the campaign price is bigger than the regular price in the productpage");
}
else
System.out.println("Font-size of the regular price is bigger than the campaign price in the productpage");
System.out.println("
}
}
|
package io.spine.code.java;
import com.google.errorprone.annotations.Immutable;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.Descriptors.ServiceDescriptor;
import io.spine.annotation.Internal;
import io.spine.code.proto.OneofDeclaration;
import io.spine.value.StringTypeValue;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.checker.signature.qual.FullyQualifiedName;
import java.util.Deque;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Lists.newLinkedList;
import static io.spine.code.java.SimpleClassName.OR_BUILDER_SUFFIX;
import static io.spine.util.Preconditions2.checkNotEmptyOrBlank;
/**
* A value object holding a fully-qualified Java class name.
*/
@SuppressWarnings("ClassWithTooManyMethods")
@Immutable
public final class ClassName extends StringTypeValue {
private static final long serialVersionUID = 0L;
/**
* Separates nested class name from the name of the outer class in a fully-qualified name.
*/
public static final char OUTER_CLASS_DELIMITER = '$';
/**
* Separates class name from package, and outer class name with nested when such a class is
* referenced as a parameter.
*/
static final char DOT_SEPARATOR = '.';
private static final String GRPC_POSTFIX = "Grpc";
private ClassName(String value) {
super(checkNotNull(value));
}
private ClassName(Class cls) {
this(cls.getName());
}
/**
* Creates a new instance with the name of the passed class.
*
* @param cls
* the class to get name from
* @return new instance
*/
public static ClassName of(Class cls) {
return new ClassName(checkNotNull(cls));
}
/**
* Creates a new instance with the passed class name value.
*
* @param className
* a fully-qualified Java class name
* @return new
*/
public static ClassName of(@FullyQualifiedName String className) {
checkNotEmptyOrBlank(className);
return new ClassName(className);
}
/**
* Creates a class name from the specified package the and simple name.
*
* @param packageName
* the name of the class package
* @param simpleClassName
* the simple name of a the class
* @return a new instance
*/
public static ClassName of(PackageName packageName, SimpleClassName simpleClassName) {
checkNotNull(packageName);
checkNotNull(simpleClassName);
return new ClassName(packageName.value() + DOT_SEPARATOR + simpleClassName);
}
/**
* Creates an instance of {@code ClassName} from the given Protobuf message type descriptor.
*
* <p>The resulting class name is the name of the Java class which represents the given Protobuf
* type.
*
* @param messageType
* the Protobuf message type descriptor
* @return new instance of {@code ClassName}
*/
public static ClassName from(Descriptor messageType) {
checkNotNull(messageType);
return construct(messageType.getFile(), messageType.getName(),
messageType.getContainingType());
}
/**
* Obtains a {@code ClassName} for the outer class of the given Protobuf file.
*
* @param file
* the file from which the outer class is generated
* @return new instance of {@code ClassName}
*/
public static ClassName outerClass(FileDescriptor file) {
PackageName packageName = PackageName.resolve(file.toProto());
SimpleClassName simpleName = SimpleClassName.outerOf(file);
return of(packageName, simpleName);
}
/**
* Creates an instance of {@code ClassName} from the given Protobuf enum type descriptor.
*
* <p>The resulting class name is the name of the Java enum which represents the given Protobuf
* type.
*
* @param enumType
* the Protobuf enum type descriptor
* @return new instance of {@code ClassName}
*/
public static ClassName from(EnumDescriptor enumType) {
return construct(enumType.getFile(), enumType.getName(),
enumType.getContainingType());
}
/**
* Creates an instance of {@code ClassName} from the given Protobuf service descriptor.
*
* <p>The resulting class name is the name of the Java gRPC stub class which is generated from
* the given service type.
*
* @param serviceType
* the gRPC service descriptor
* @return new instance of {@code ClassName}
*/
public static ClassName from(ServiceDescriptor serviceType) {
return construct(serviceType.getFile(), serviceType.getName() + GRPC_POSTFIX, null);
}
private static String javaPackageName(FileDescriptor file) {
PackageName packageName = PackageName.resolve(file.toProto());
return packageName.value() + DOT_SEPARATOR;
}
/**
* Obtains outer class prefix, if the file has {@code java_multiple_files} set to {@code false}.
* If the option is set, returns an empty string.
*/
private static String outerClassPrefix(FileDescriptor file) {
checkNotNull(file);
boolean multipleFiles = file.getOptions()
.getJavaMultipleFiles();
if (multipleFiles) {
return "";
} else {
String className = SimpleClassName.outerOf(file)
.value();
return className + OUTER_CLASS_DELIMITER;
}
}
/**
* Obtains prefix for a type which is enclosed into the passed message.
* If null value is passed, returns an empty string.
*/
private static String containingClassPrefix(@Nullable Descriptor containingMessage) {
if (containingMessage == null) {
return "";
}
Deque<String> parentClassNames = newLinkedList();
Descriptor current = containingMessage;
while (current != null) {
parentClassNames.addFirst(current.getName() + OUTER_CLASS_DELIMITER);
current = current.getContainingType();
}
String result = String.join("", parentClassNames);
return result;
}
/**
* Obtains the name of a nested class.
*
* @param className
* the name of the nested class to get
* @return the nested class name
*/
public ClassName withNested(SimpleClassName className) {
checkNotNull(className);
return of(value() + OUTER_CLASS_DELIMITER + className);
}
/**
* Converts the name which may be a nested class name with {@link #OUTER_CLASS_DELIMITER}
* to the name separated with dots.
*/
public ClassName toDotted() {
String withDots = toDotted(value());
return of(withDots);
}
/**
* Replaces {@link #OUTER_CLASS_DELIMITER} with {@link #DOT_SEPARATOR}.
*/
@Internal
public static String toDotted(String outerDelimited) {
String result = outerDelimited.replace(OUTER_CLASS_DELIMITER, DOT_SEPARATOR);
return result;
}
/**
* Obtains the name of the {@link com.google.protobuf.MessageOrBuilder} interface for this
* message class.
*
* <p>If this class name is {@code com.acme.cms.Customer}, the resulting class name would be
* {@code com.acme.cms.CustomerOrBuilder}.
*
* <p>If this class name is {@linkplain #toDotted() dotted}, then the resulting name is dotted.
*
* @return {@code MessageOrBuilder} interface FQN
*/
public ClassName orBuilder() {
return of(value() + OR_BUILDER_SUFFIX);
}
/**
* Obtains the name of an enum which represents cases of a {@code oneof} field.
*
* <p>Such an enum should be nested in this class. The name of the {@code oneof} field is
* obtained from the given {@link OneofDeclaration}.
*
* <p>If this class name is {@code com.acme.cms.Customer} and the {@code oneof} name is
* {@code auth_provider}, the resulting class name would be
* {@code com.acme.cms.Customer.AuthProviderCase}.
*
* <p>The resulting class name is always {@linkplain #toDotted() dotted}.
*
* @param oneof
* the declaration of the {@code oneof} field
* @return the case enum FQN
*/
public ClassName oneofCaseEnum(OneofDeclaration oneof) {
ClassName dotted = this.toDotted();
FieldName oneofName = FieldName.from(oneof.name());
String enumName = String.format("%s.%sCase", dotted.value(), oneofName.capitalize());
return of(enumName);
}
private static ClassName construct(FileDescriptor file,
String typeName,
@Nullable Descriptor enclosing) {
String packageName = javaPackageName(file);
String outerClass = outerClassPrefix(file);
String enclosingTypes = containingClassPrefix(enclosing);
String result = packageName + outerClass + enclosingTypes + typeName;
return of(result);
}
/**
* Converts fully-qualified name to simple name. If the class is nested inside one or more
* classes, the most nested name will be returned.
*/
public SimpleClassName toSimple() {
String fullName = toDotted().value();
String result = afterDot(fullName);
return SimpleClassName.create(result);
}
static String afterDot(String fullName) {
int lastDotIndex = fullName.lastIndexOf(DOT_SEPARATOR);
return fullName.substring(lastDotIndex + 1);
}
/**
* Converts a possibly nested class name into a nested name.
*
* <p>If the class is not nested, the returned value would be equivalent to a simple class name.
*/
public NestedClassName toNested() {
return NestedClassName.create(this);
}
/**
* Resolves the file which contains the declaration of the associated class.
*
* <p>The resulting {@code SourceFile} represents a <strong>relative</strong> path to the Java
* file starting at the top level package.
*
* <p>In the simplest case, the file name is the same as the simple class name. However, if
* the class is nested, then the file name coincides with the simple name of the top-level
* class.
*
* @return the file in which the Java class is declared
*/
public SourceFile resolveFile() {
Directory directory = getPackage().toDirectory();
SimpleClassName topLevelClass = topLevelClass();
FileName javaFile = FileName.forType(topLevelClass.value());
SourceFile sourceFile = directory.resolve(javaFile);
return sourceFile;
}
private PackageName getPackage() {
String fullName = value();
int lastDotIndex = fullName.lastIndexOf(DOT_SEPARATOR);
checkArgument(lastDotIndex > 0, "%s should be qualified.", fullName);
String result = fullName.substring(0, lastDotIndex);
return PackageName.of(result);
}
private SimpleClassName topLevelClass() {
String qualifiedClassName = afterDot(value());
int delimiterIndex = qualifiedClassName.indexOf(OUTER_CLASS_DELIMITER);
String topLevelClassName = delimiterIndex >= 0
? qualifiedClassName.substring(0, delimiterIndex)
: qualifiedClassName;
return SimpleClassName.create(topLevelClassName);
}
}
|
package oap.http;
import com.google.common.io.ByteStreams;
import lombok.SneakyThrows;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import oap.concurrent.AsyncCallbacks;
import oap.io.Closeables;
import oap.io.Files;
import oap.io.IoStreams;
import oap.json.Binder;
import oap.reflect.TypeRef;
import oap.util.Maps;
import oap.util.Pair;
import oap.util.Stream;
import oap.util.Try;
import okhttp3.MediaType;
import okhttp3.MultipartBody;
import okhttp3.OkHttpClient;
import okhttp3.RequestBody;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.CookieSpecs;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.utils.DateUtils;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
import org.apache.http.conn.util.PublicSuffixMatcherLoader;
import org.apache.http.cookie.Cookie;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCookieStore;
import org.apache.http.impl.client.DefaultConnectionKeepAliveStrategy;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager;
import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor;
import org.apache.http.impl.nio.reactor.IOReactorConfig;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.nio.conn.NoopIOSessionStrategy;
import org.apache.http.nio.conn.SchemeIOSessionStrategy;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.nio.reactor.IOReactorException;
import org.apache.http.ssl.SSLContexts;
import javax.annotation.Nullable;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
import java.io.BufferedInputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.security.KeyStore;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static java.net.HttpURLConnection.HTTP_MOVED_TEMP;
import static java.net.HttpURLConnection.HTTP_NOT_MODIFIED;
import static java.net.HttpURLConnection.HTTP_OK;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static oap.io.IoStreams.Encoding.PLAIN;
import static oap.io.ProgressInputStream.progress;
import static oap.util.Maps.Collectors.toMap;
import static oap.util.Pair.__;
import static org.apache.commons.lang3.StringUtils.split;
import static org.apache.http.entity.ContentType.APPLICATION_OCTET_STREAM;
@Slf4j
public class Client implements Closeable {
public static final Client DEFAULT = custom()
.onError( ( c, e ) -> log.error( e.getMessage(), e ) )
.onTimeout( ( c ) -> log.error( "timeout" ) )
.build();
private static final long FOREVER = Long.MAX_VALUE;
private static final FutureCallback<org.apache.http.HttpResponse> FUTURE_CALLBACK = new FutureCallback<org.apache.http.HttpResponse>() {
@Override
public void completed( org.apache.http.HttpResponse result ) {
}
@Override
public void failed( Exception e ) {
}
@Override
public void cancelled() {
}
};
private final BasicCookieStore basicCookieStore;
private ClientBuilder builder;
private CloseableHttpAsyncClient client;
private Client( BasicCookieStore basicCookieStore, ClientBuilder builder ) {
this.client = builder.client();
this.basicCookieStore = basicCookieStore;
this.builder = builder;
}
public static ClientBuilder custom( Path certificateLocation, String certificatePassword, int connectTimeout, int readTimeout ) {
return new ClientBuilder( certificateLocation, certificatePassword, connectTimeout, readTimeout );
}
public static ClientBuilder custom() {
return new ClientBuilder( null, null, 0, 0 );
}
private static Map<String, String> headers( org.apache.http.HttpResponse response ) {
return Arrays.stream( response.getAllHeaders() )
.map( h -> __( h.getName(), h.getValue() ) )
.collect( toMap() );
}
@SneakyThrows
private static SSLContext createSSLContext( Path certificateLocation, String certificatePassword ) {
try( val inputStream = IoStreams.in( certificateLocation, PLAIN ) ) {
KeyStore keyStore = KeyStore.getInstance( "JKS" );
keyStore.load( inputStream, certificatePassword.toCharArray() );
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance( TrustManagerFactory.getDefaultAlgorithm() );
trustManagerFactory.init( keyStore );
SSLContext sslContext = SSLContext.getInstance( "TLS" );
sslContext.init( null, trustManagerFactory.getTrustManagers(), null );
return sslContext;
}
}
public Response get( String uri ) {
return get( uri, Maps.empty(), Maps.empty() );
}
public Response get( URI uri ) {
return get( uri, Maps.empty() );
}
@SafeVarargs
public final Response get( String uri, Pair<String, Object>... params ) {
return get( uri, Maps.of( params ) );
}
public Response get( String uri, Map<String, Object> params ) {
return get( uri, params, Maps.empty() );
}
public Response get( String uri, Map<String, Object> params, Map<String, Object> headers ) {
return get( uri, params, headers, FOREVER )
.orElseThrow( () -> new oap.concurrent.TimeoutException( "no response" ) );
}
public Response get( URI uri, Map<String, Object> headers ) {
return get( uri, headers, FOREVER )
.orElseThrow( () -> new oap.concurrent.TimeoutException( "no response" ) );
}
public Optional<Response> get( String uri, Map<String, Object> params, long timeout ) {
return get( uri, params, Maps.empty(), timeout );
}
public Optional<Response> get( String uri, Map<String, Object> params, Map<String, Object> headers, long timeout ) {
return get( Uri.uri( uri, params ), headers, timeout );
}
public Optional<Response> get( URI uri, Map<String, Object> headers, long timeout ) {
return execute( new HttpGet( uri ), headers, timeout );
}
public Response post( String uri, Map<String, Object> params ) {
return post( uri, params, Maps.empty() );
}
public Response post( String uri, Map<String, Object> params, Map<String, Object> headers ) {
return post( uri, params, headers, FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Optional<Response> post( String uri, Map<String, Object> params, long timeout ) {
return post( uri, params, Maps.empty(), timeout );
}
public Optional<Response> post( String uri, Map<String, Object> params, Map<String, Object> headers, long timeout ) {
try {
HttpPost request = new HttpPost( uri );
request.setEntity( new UrlEncodedFormEntity( Stream.of( params.entrySet() )
.<NameValuePair>map( e -> new BasicNameValuePair( e.getKey(),
e.getValue() == null ? "" : e.getValue().toString() ) )
.toList()
) );
return execute( request, headers, timeout );
} catch( UnsupportedEncodingException e ) {
throw new UncheckedIOException( e );
}
}
public Response post( String uri, String content, ContentType contentType ) {
return post( uri, content, contentType, Maps.empty() );
}
public Response post( String uri, String content, ContentType contentType, Map<String, Object> headers ) {
return post( uri, content, contentType, headers, FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Optional<Response> post( String uri, String content, ContentType contentType, long timeout ) {
return post( uri, content, contentType, Maps.empty(), timeout );
}
public Optional<Response> post( String uri, String content, ContentType contentType, Map<String, Object> headers, long timeout ) {
HttpPost request = new HttpPost( uri );
request.setEntity( new StringEntity( content, contentType ) );
return execute( request, headers, timeout );
}
public Optional<Response> post( String uri, byte[] content, long timeout ) {
HttpPost request = new HttpPost( uri );
request.setEntity( new ByteArrayEntity( content, APPLICATION_OCTET_STREAM ) );
return execute( request, Maps.empty(), timeout );
}
public Response post( String uri, InputStream content, ContentType contentType ) {
HttpPost request = new HttpPost( uri );
request.setEntity( new InputStreamEntity( content, contentType ) );
return execute( request, Maps.empty(), FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Response post( String uri, InputStream content, ContentType contentType, Map<String, Object> headers ) {
HttpPost request = new HttpPost( uri );
request.setEntity( new InputStreamEntity( content, contentType ) );
return execute( request, headers, FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Response post( String uri, byte[] content, ContentType contentType, Map<String, Object> headers ) {
HttpPost request = new HttpPost( uri );
request.setEntity( new ByteArrayEntity( content, contentType ) );
return execute( request, headers, FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Response put( String uri, String content, ContentType contentType ) {
HttpPut request = new HttpPut( uri );
request.setEntity( new StringEntity( content, contentType ) );
return execute( request, Maps.empty(), FOREVER )
.orElseThrow( () -> new RuntimeException( "no response" ) );
}
public Response delete( String uri ) {
return delete( uri, FOREVER );
}
public Response delete( String uri, long timeout ) {
return delete( uri, Maps.empty(), timeout );
}
public Response delete( String uri, Map<String, Object> headers ) {
return delete( uri, headers, FOREVER );
}
public Response delete( String uri, Map<String, Object> headers, long timeout ) {
HttpDelete request = new HttpDelete( uri );
return execute( request, headers, timeout ).orElseThrow( () -> new RuntimeException( "no response" ) );
}
public List<Cookie> getCookies() {
return basicCookieStore.getCookies();
}
public void clearCookies() {
basicCookieStore.clear();
}
@SneakyThrows
private Optional<Response> execute( HttpUriRequest request, Map<String, Object> headers, long timeout ) {
try {
headers.forEach( ( name, value ) -> request.setHeader( name, value == null ? "" : value.toString() ) );
Future<HttpResponse> future = client.execute( request, FUTURE_CALLBACK );
HttpResponse response = timeout == FOREVER ? future.get()
: future.get( timeout, MILLISECONDS );
Map<String, String> responsHeaders = headers( response );
Response result;
if( response.getEntity() != null ) {
HttpEntity entity = response.getEntity();
result = new Response(
response.getStatusLine().getStatusCode(),
response.getStatusLine().getReasonPhrase(),
responsHeaders,
Optional.ofNullable( entity.getContentType() )
.map( ct -> ContentType.parse( entity.getContentType().getValue() ) ),
entity.getContent()
);
} else result = new Response(
response.getStatusLine().getStatusCode(),
response.getStatusLine().getReasonPhrase(),
responsHeaders
);
builder.onSuccess.accept( this );
return Optional.of( result );
} catch( ExecutionException e ) {
final ExecutionException newEx = new ExecutionException( request.getURI().toString(), e.getCause() );
builder.onError.accept( this, newEx );
throw newEx;
} catch( IOException e ) {
final ExecutionException newEx = new ExecutionException( request.getURI().toString(), e );
builder.onError.accept( this, newEx );
throw newEx;
} catch( InterruptedException | TimeoutException e ) {
builder.onTimeout.accept( this );
return Optional.empty();
}
}
@SneakyThrows
public Optional<Path> download( String url, Optional<Long> modificationTime, Optional<Path> file, Consumer<Integer> progress ) {
try {
val response = resolve( url, modificationTime ).orElse( null );
if( response == null ) return Optional.empty();
val entity = response.getEntity();
final Path path = file.orElseGet( Try.supply( () -> {
final IoStreams.Encoding encoding = IoStreams.Encoding.from( url );
final File tempFile = File.createTempFile( "file", "down" + encoding.extension );
tempFile.deleteOnExit();
return tempFile.toPath();
} ) );
try( InputStream in = new BufferedInputStream( entity.getContent() ) ) {
IoStreams.write( path, PLAIN, in, false, file.isPresent(), progress( entity.getContentLength(), progress ) );
}
final Header lastModified = response.getLastHeader( "Last-Modified" );
if( lastModified != null ) {
final Date date = DateUtils.parseDate( lastModified.getValue() );
Files.setLastModifiedTime( path, date.getTime() );
}
builder.onSuccess.accept( this );
return Optional.of( path );
} catch( ExecutionException | IOException e ) {
builder.onError.accept( this, e );
throw e;
} catch( InterruptedException e ) {
builder.onTimeout.accept( this );
return Optional.empty();
}
}
private Optional<HttpResponse> resolve( String url ) {
return resolve( url );
}
private Optional<HttpResponse> resolve( String url, Optional<Long> ifModifiedSince ) throws InterruptedException, ExecutionException, IOException {
HttpGet request = new HttpGet( url );
ifModifiedSince.ifPresent( ims -> request.addHeader( "If-Modified-Since", DateUtils.formatDate( new Date( ims ) ) ) );
Future<HttpResponse> future = client.execute( request, FUTURE_CALLBACK );
HttpResponse response = future.get();
if( response.getStatusLine().getStatusCode() == HTTP_OK && response.getEntity() != null )
return Optional.of( response );
else if( response.getStatusLine().getStatusCode() == HTTP_MOVED_TEMP ) {
Header location = response.getFirstHeader( "Location" );
if( location == null ) throw new IOException( "redirect w/o location!" );
log.debug( "following {}", location.getValue() );
return resolve( location.getValue() );
} else if( response.getStatusLine().getStatusCode() == HTTP_NOT_MODIFIED ) {
return Optional.empty();
} else
throw new IOException( response.getStatusLine().toString() );
}
public void reset() {
Closeables.close( client );
client = builder.client();
}
@Override
public void close() {
Closeables.close( client );
}
@SneakyThrows
public Response uploadFile( String uri, String prefix, Path path ) {
final ContentType contentType = ContentType.create( java.nio.file.Files.probeContentType( path ) );
OkHttpClient client = new OkHttpClient();
final MultipartBody body = new MultipartBody.Builder()
.setType( MultipartBody.FORM )
.addFormDataPart( "upfile", path.toFile().getName(), RequestBody.create( MediaType.parse( contentType.toString() ), path.toFile() ) )
.addFormDataPart( "prefix", prefix )
.build();
okhttp3.Request request = new okhttp3.Request.Builder()
.url( uri )
.post( body )
.build();
val response = client.newCall( request ).execute();
val headers = response.headers();
final java.util.stream.Stream<String> stream = headers.names().stream();
final Map<String, String> h = stream.collect( Collectors.toMap( n -> n, headers::get ) );
val responseBody = response.body();
return new Response( response.code(), response.message(), h,
Optional.ofNullable( responseBody.contentType() ).map( mt -> ContentType.create( mt.type() + "/" + mt.subtype(), mt.charset() ) ),
responseBody.byteStream() );
}
@ToString( exclude = { "inputStream", "content" } )
public static class Response implements Closeable {
public final int code;
public final String reasonPhrase;
public final Optional<ContentType> contentType;
public final Map<String, String> headers;
private InputStream inputStream;
private byte[] content = null;
@SneakyThrows
public Response( int code, String reasonPhrase, Map<String, String> headers, Optional<ContentType> contentType, InputStream inputStream ) {
this.code = code;
this.reasonPhrase = reasonPhrase;
this.headers = headers;
this.contentType = contentType;
this.inputStream = inputStream;
}
public Response( int code, String reasonPhrase, Map<String, String> headers ) {
this( code, reasonPhrase, headers, Optional.empty(), null );
}
@Nullable
@SneakyThrows
public byte[] content() {
if( content == null && inputStream != null ) {
synchronized( this ) {
if( content == null ) {
content = ByteStreams.toByteArray( inputStream );
close();
}
}
}
return content;
}
public InputStream getInputStream() {
return inputStream;
}
public String contentString() {
val content = content();
if( content == null ) return null;
return new String( content, contentType.map( ContentType::getCharset ).orElse( StandardCharsets.UTF_8 ) );
}
public <T> Optional<T> unmarshal( Class<?> clazz ) {
if( inputStream != null ) {
synchronized( this ) {
if( inputStream != null ) {
return Binder.json.unmarshal( clazz, inputStream );
}
}
}
val contentString = contentString();
if( contentString == null ) return Optional.empty();
return Optional.of( Binder.json.unmarshal( clazz, contentString ) );
}
public <T> Optional<T> unmarshal( TypeRef<T> ref ) {
if( inputStream != null ) {
synchronized( this ) {
if( inputStream != null ) {
return Optional.of( Binder.json.unmarshal( ref, inputStream ) );
}
}
}
val contentString = contentString();
if( contentString == null ) return Optional.empty();
return Optional.of( Binder.json.unmarshal( ref, contentString ) );
}
@Override
public void close() {
Closeables.close( inputStream );
inputStream = null;
}
}
public static class ClientBuilder extends AsyncCallbacks<ClientBuilder, Client> {
private final BasicCookieStore basicCookieStore;
private Path certificateLocation;
private String certificatePassword;
private int connectTimeout;
private int readTimeout;
private int maxConnTotal = 10000;
private int maxConnPerRoute = 1000;
private boolean redirectsEnabled = false;
private String cookieSpec = CookieSpecs.STANDARD;
public ClientBuilder( Path certificateLocation, String certificatePassword, int connectTimeout, int readTimeout ) {
basicCookieStore = new BasicCookieStore();
this.certificateLocation = certificateLocation;
this.certificatePassword = certificatePassword;
this.connectTimeout = connectTimeout;
this.readTimeout = readTimeout;
}
private HttpAsyncClientBuilder initialize() {
try {
final PoolingNHttpClientConnectionManager connManager = new PoolingNHttpClientConnectionManager(
new DefaultConnectingIOReactor( IOReactorConfig.custom()
.setConnectTimeout( connectTimeout )
.setSoTimeout( readTimeout )
.build() ),
RegistryBuilder.<SchemeIOSessionStrategy>create()
.register( "http", NoopIOSessionStrategy.INSTANCE )
.register( "https",
new SSLIOSessionStrategy( certificateLocation != null
? createSSLContext( certificateLocation, certificatePassword )
: SSLContexts.createDefault(),
split( System.getProperty( "https.protocols" ) ),
split( System.getProperty( "https.cipherSuites" ) ),
new DefaultHostnameVerifier( PublicSuffixMatcherLoader.getDefault() ) ) )
.build() );
connManager.setMaxTotal( maxConnTotal );
connManager.setDefaultMaxPerRoute( maxConnPerRoute );
return ( certificateLocation != null
? HttpAsyncClients.custom()
.setSSLContext( createSSLContext( certificateLocation, certificatePassword ) )
: HttpAsyncClients.custom() )
.setMaxConnPerRoute( maxConnPerRoute )
.setConnectionManager( connManager )
.setMaxConnTotal( maxConnTotal )
.setKeepAliveStrategy( DefaultConnectionKeepAliveStrategy.INSTANCE )
.setDefaultRequestConfig( RequestConfig
.custom()
.setRedirectsEnabled( redirectsEnabled )
.setCookieSpec( cookieSpec )
.build() )
.setDefaultCookieStore( basicCookieStore );
} catch( IOReactorException e ) {
throw new UncheckedIOException( e );
}
}
public ClientBuilder setMaxConnTotal( int maxConnTotal ) {
this.maxConnTotal = maxConnTotal;
return this;
}
public ClientBuilder setMaxConnPerRoute( int maxConnPerRoute ) {
this.maxConnPerRoute = maxConnPerRoute;
return this;
}
public ClientBuilder setRedirectsEnabled( boolean redirectsEnabled ) {
this.redirectsEnabled = redirectsEnabled;
return this;
}
public ClientBuilder setCookieSpec( String cookieSpec ) {
this.cookieSpec = cookieSpec;
return this;
}
private CloseableHttpAsyncClient client() {
final CloseableHttpAsyncClient build = initialize().build();
build.start();
return build;
}
public Client build() {
return new Client( basicCookieStore, this );
}
}
}
|
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.phsp;
import jp.oist.flint.phsp.entity.Model;
import jp.oist.flint.phsp.entity.ParameterSet;
import jp.oist.flint.phsp.entity.TargetSet;
import jp.oist.flint.textformula.TextFormula2MathML;
import jp.oist.flint.textformula.analyzer.ParseException;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Locale;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
public class PhspWriter {
private final static int SHIFT_WIDTH = 2;
private final String mLineFeed = System.getProperty("line.separator");
private String escape(String s) {
return s.replaceAll(">",">").replaceAll("<", "<").replaceAll("&", "&").replaceAll("'", "'");
}
protected String indent (int count) {
StringBuilder sb = new StringBuilder();
for (int i=0; i<count; i++)
sb.append(" ");
return sb.toString();
}
public void write (IPhspConfiguration conf, OutputStream os, boolean removeParameterIfNotUsed)
throws IOException,
ParseException,
ParserConfigurationException,
PhspException,
TransformerException {
try (OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
BufferedWriter writer = new BufferedWriter(osw)) {
int sw = 0;
writeLine("<?xml version='1.0' encoding='UTF-8'?>", sw, writer);
writeLine("<phsp xmlns='http:
StringBuilder sb = new StringBuilder();
sw += SHIFT_WIDTH;
for (Model model : conf.getModels()) {
String format = model.getModelFormat().name().toLowerCase(Locale.ENGLISH);
File modelFile = model.getModelFile();
String path = escape(modelFile.getCanonicalPath());
writeLine(String.format("<model format='%s' iref='%s'>", format, path), sw, writer);
sw += SHIFT_WIDTH;
/* wirter target-set */
TargetSet ts = model.getTargetSet();
writeLine("<target-set>", sw, writer);
sw += SHIFT_WIDTH;
TargetSet.Target[] targets = ts.getTargets();
for (TargetSet.Target t : targets) {
switch (model.getModelFormat()) {
case PHML:
sb.append("<target")
.append(String.format(" module-id='%s'", t.getModuleId()))
.append(String.format(" physical-quantity-id='%s'", t.getPhysicalQuantityId()))
.append(">");
break;
case SBML:
sb.append("<target")
.append(String.format(" species-id='%s'", t.getSpeciesId()))
.append(">");
break;
default:
sw -= SHIFT_WIDTH;
continue;
}
writeLine(sb.toString(), sw, writer);
sb.setLength(0);
sw += SHIFT_WIDTH;
TextFormula2MathML s2mathml = new TextFormula2MathML();
s2mathml.parse(new StringReader(t.getValue()));
String mathml = s2mathml.getMathML();
sb.append("<value>");
sb.append(mathml);
sb.append("</value>");
writeLine(sb.toString(), sw, writer);
sb.setLength(0);
sw -= SHIFT_WIDTH;
writeLine("</target>", sw, writer);
}
sw -= SHIFT_WIDTH;
writeLine("</target-set>", sw, writer);
/* write parameter-set*/
ParameterSet ps = model.getParameterSet();
if (removeParameterIfNotUsed)
ps = ps.filterByNames(Arrays.asList(ts.getUsingParameterNames()));
writeLine("<parameter-set>", sw, writer);
sw += SHIFT_WIDTH;
ParameterSet.Parameter[] parameters = ps.getParameters();
if (parameters.length == 0)
parameters = new ParameterSet.Dummy().getParameters();
for (ParameterSet.Parameter p : parameters) {
String name = p.getName();
writeLine(String.format("<parameter name='%s'>", name), sw, writer);
sw += SHIFT_WIDTH;
String type = p.getType().name().toLowerCase(Locale.ENGLISH);
switch (p.getType()) {
case ENUM:
sb.append(String.format("<range type='%s'>", type))
.append(p.getEnumValue())
.append(String.format("</range>"));
break;
case INTERVAL:
sb.append("<range")
.append(String.format(" type='%s'", type))
.append(String.format(" upper='%s'", p.getRangeUpper()))
.append(String.format(" lower='%s'", p.getRangeLower()))
.append(String.format(" step='%s'", p.getRangeStep()))
.append("/>");
break;
default:
sw -= SHIFT_WIDTH;
continue;
}
writeLine(sb.toString(), sw, writer);
sb.setLength(0);
sw -= SHIFT_WIDTH;
writeLine("</parameter>", sw, writer);
}
sw -= SHIFT_WIDTH;
writeLine("</parameter-set>", sw, writer);
sw -= SHIFT_WIDTH;
writeLine("</model>", sw, writer);
}
sw -= SHIFT_WIDTH;
writeLine("</phsp>", sw, writer);
}
}
private void writeLine(String s, int shiftWidth, BufferedWriter writer) throws IOException {
writer.append(indent(shiftWidth));
writer.append(s);
writer.append(mLineFeed);
}
}
|
package com.hp.mwtests.ts.jts.orbspecific.recovery;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.arjuna.ats.internal.jts.ORBManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.arjuna.ats.internal.jts.orbspecific.recovery.RecoveryEnablement;
import com.hp.mwtests.ts.jts.resources.TestBase;
public class RecoveryEnablementUnitTest extends TestBase
{
@BeforeClass
public static void setupClass() {
// persistent POAs can't be anonymous, need a name:
System.setProperty("jacorb.implname", "arjuna");
}
@AfterClass
public static void tearDownClass() {
System.clearProperty("jacorb.implname");
}
@Test
public void test () throws Exception
{
ORBManager.reset();
RecoveryEnablement rec = new RecoveryEnablement();
assertTrue(rec.startRCservice());
RecoveryEnablement.isNotANormalProcess();
assertFalse(RecoveryEnablement.isNormalProcess());
assertTrue(RecoveryEnablement.getRecoveryManagerTag() != null);
}
}
|
package com.diegocarloslima.fgelv.lib;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ExpandableListAdapter;
import android.widget.ExpandableListView;
public class FloatingGroupExpandableListView extends ExpandableListView {
private static final int[] EMPTY_STATE_SET = {};
// State indicating the group is expanded
private static final int[] GROUP_EXPANDED_STATE_SET = {android.R.attr.state_expanded};
// State indicating the group is empty (has no children)
private static final int[] GROUP_EMPTY_STATE_SET = {android.R.attr.state_empty};
// State indicating the group is expanded and empty (has no children)
private static final int[] GROUP_EXPANDED_EMPTY_STATE_SET = {android.R.attr.state_expanded, android.R.attr.state_empty};
// States for the group where the 0th bit is expanded and 1st bit is empty.
private static final int[][] GROUP_STATE_SETS = {
EMPTY_STATE_SET,
GROUP_EXPANDED_STATE_SET,
GROUP_EMPTY_STATE_SET,
GROUP_EXPANDED_EMPTY_STATE_SET
};
private WrapperExpandableListAdapter mAdapter;
private DataSetObserver mDataSetObserver;
private OnScrollListener mOnScrollListener;
// By default, the floating group is enabled
private boolean mFloatingGroupEnabled = true;
private View mFloatingGroupView;
private int mFloatingGroupPosition;
private int mFloatingGroupScrollY;
private OnScrollFloatingGroupListener mOnScrollFloatingGroupListener;
private OnGroupClickListener mOnGroupClickListener;
private int mWidthMeasureSpec;
// An AttachInfo instance is added to the FloatingGroupView in order to have proper touch event handling
private Object mViewAttachInfo;
private boolean mHandledByOnInterceptTouchEvent;
private boolean mHandledByOnTouchEvent;
private Runnable mOnClickAction;
private GestureDetector mGestureDetector;
private boolean mSelectorEnabled;
private boolean mShouldPositionSelector;
private boolean mDrawSelectorOnTop;
private Drawable mSelector;
private int mSelectorPosition;
private final Rect mSelectorRect = new Rect();
private Runnable mPositionSelectorOnTapAction;
private Runnable mClearSelectorOnTapAction;
private final Rect mIndicatorRect = new Rect();
public FloatingGroupExpandableListView(Context context) {
super(context);
init();
}
public FloatingGroupExpandableListView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public FloatingGroupExpandableListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
super.setOnScrollListener(new OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if(mOnScrollListener != null) {
mOnScrollListener.onScrollStateChanged(view, scrollState);
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
if(mOnScrollListener != null) {
mOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount);
}
if(mFloatingGroupEnabled && mAdapter != null && mAdapter.getGroupCount() > 0 && visibleItemCount > 0) {
createFloatingGroupView(firstVisibleItem);
}
}
});
mOnClickAction = new Runnable() {
@Override
public void run() {
boolean allowSelection = true;
if(mOnGroupClickListener != null) {
allowSelection = !mOnGroupClickListener.onGroupClick(FloatingGroupExpandableListView.this, mFloatingGroupView, mFloatingGroupPosition, mAdapter.getGroupId(mFloatingGroupPosition));
}
if (allowSelection) {
if(mAdapter.isGroupExpanded(mFloatingGroupPosition)) {
collapseGroup(mFloatingGroupPosition);
} else {
expandGroup(mFloatingGroupPosition);
}
setSelectedGroup(mFloatingGroupPosition);
}
}
};
mPositionSelectorOnTapAction = new Runnable() {
@Override
public void run() {
positionSelectorOnFloatingGroup();
setPressed(true);
if (mFloatingGroupView != null) {
mFloatingGroupView.setPressed(true);
}
}
};
mClearSelectorOnTapAction = new Runnable() {
@Override
public void run() {
setPressed(false);
if(mFloatingGroupView != null) {
mFloatingGroupView.setPressed(false);
}
invalidate();
}
};
mGestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public void onLongPress(MotionEvent e) {
if(mFloatingGroupView != null && !mFloatingGroupView.isLongClickable()) {
final ContextMenuInfo contextMenuInfo = new ExpandableListContextMenuInfo(mFloatingGroupView, getPackedPositionForGroup(mFloatingGroupPosition), mAdapter.getGroupId(mFloatingGroupPosition));
ReflectionUtils.setFieldValue(AbsListView.class, "mContextMenuInfo", FloatingGroupExpandableListView.this, contextMenuInfo);
showContextMenu();
}
}
});
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if(mAdapter != null && mDataSetObserver != null) {
mAdapter.unregisterDataSetObserver(mDataSetObserver);
mDataSetObserver = null;
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
mWidthMeasureSpec = widthMeasureSpec;
}
@Override
protected void dispatchDraw(Canvas canvas) {
// Reflection is used here to obtain info about the selector
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
mSelectorPosition = (Integer) ReflectionUtils.getFieldValue(AbsListView.class, "mSelectorPosition", FloatingGroupExpandableListView.this);
} else {
mSelectorPosition = (Integer) ReflectionUtils.getFieldValue(AbsListView.class, "mMotionPosition", FloatingGroupExpandableListView.this);
}
mSelectorRect.set((Rect) ReflectionUtils.getFieldValue(AbsListView.class, "mSelectorRect", FloatingGroupExpandableListView.this));
if(!mDrawSelectorOnTop) {
drawDefaultSelector(canvas);
}
super.dispatchDraw(canvas);
if(mFloatingGroupEnabled && mFloatingGroupView != null) {
if(!mDrawSelectorOnTop) {
drawFloatingGroupSelector(canvas);
}
canvas.save();
canvas.clipRect(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom());
if(mFloatingGroupView.getVisibility() == View.VISIBLE) {
drawChild(canvas, mFloatingGroupView, getDrawingTime());
}
drawFloatingGroupIndicator(canvas);
canvas.restore();
if(mDrawSelectorOnTop) {
drawDefaultSelector(canvas);
drawFloatingGroupSelector(canvas);
}
}
}
@Override
public boolean dispatchTouchEvent(MotionEvent ev) {
final int action = ev.getAction() & MotionEvent.ACTION_MASK;
if(action == MotionEvent.ACTION_DOWN || action == MotionEvent.ACTION_CANCEL) {
mHandledByOnInterceptTouchEvent = false;
mHandledByOnTouchEvent = false;
mShouldPositionSelector = false;
}
// If touch events are being handled by onInterceptTouchEvent() or onTouchEvent() we shouldn't dispatch them to the floating group
if(!mHandledByOnInterceptTouchEvent && !mHandledByOnTouchEvent && mFloatingGroupView != null) {
final int screenCoords[] = new int[2];
getLocationInWindow(screenCoords);
final RectF floatingGroupRect = new RectF(screenCoords[0] + mFloatingGroupView.getLeft(), screenCoords[1] + mFloatingGroupView.getTop(), screenCoords[0] + mFloatingGroupView.getRight(), screenCoords[1] + mFloatingGroupView.getBottom());
if(floatingGroupRect.contains(ev.getRawX(), ev.getRawY())) {
if(mSelectorEnabled) {
switch(action) {
case MotionEvent.ACTION_DOWN:
mShouldPositionSelector = true;
removeCallbacks(mPositionSelectorOnTapAction);
postDelayed(mPositionSelectorOnTapAction, ViewConfiguration.getTapTimeout());
break;
case MotionEvent.ACTION_UP:
positionSelectorOnFloatingGroup();
setPressed(true);
if (mFloatingGroupView != null) {
mFloatingGroupView.setPressed(true);
}
removeCallbacks(mClearSelectorOnTapAction);
postDelayed(mClearSelectorOnTapAction, ViewConfiguration.getPressedStateDuration());
break;
}
}
if(mFloatingGroupView.dispatchTouchEvent(ev)) {
mGestureDetector.onTouchEvent(ev);
onInterceptTouchEvent(ev);
return true;
}
}
}
return super.dispatchTouchEvent(ev);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
mHandledByOnInterceptTouchEvent = super.onInterceptTouchEvent(ev);
return mHandledByOnInterceptTouchEvent;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
mHandledByOnTouchEvent = super.onTouchEvent(ev);
return mHandledByOnTouchEvent;
}
@Override
public void setSelector(Drawable sel) {
super.setSelector(new ColorDrawable(Color.TRANSPARENT));
if (mSelector != null) {
mSelector.setCallback(null);
unscheduleDrawable(mSelector);
}
mSelector = sel;
mSelector.setCallback(this);
}
@Override
public void setDrawSelectorOnTop(boolean onTop) {
super.setDrawSelectorOnTop(onTop);
mDrawSelectorOnTop = onTop;
}
@Override
public void setAdapter(ExpandableListAdapter adapter) {
if(!(adapter instanceof WrapperExpandableListAdapter)) {
throw new IllegalArgumentException("The adapter must be an instance of WrapperExpandableListAdapter");
}
setAdapter((WrapperExpandableListAdapter) adapter);
}
public void setAdapter(WrapperExpandableListAdapter adapter) {
super.setAdapter(adapter);
if(mAdapter != null && mDataSetObserver != null) {
mAdapter.unregisterDataSetObserver(mDataSetObserver);
mDataSetObserver = null;
}
mAdapter = adapter;
if(mAdapter != null && mDataSetObserver == null) {
mDataSetObserver = new DataSetObserver() {
@Override
public void onChanged() {
mFloatingGroupView = null;
}
@Override
public void onInvalidated() {
mFloatingGroupView = null;
}
};
mAdapter.registerDataSetObserver(mDataSetObserver);
}
}
@Override
public void setOnScrollListener(OnScrollListener listener) {
mOnScrollListener = listener;
}
@Override
public void setOnGroupClickListener(OnGroupClickListener onGroupClickListener) {
super.setOnGroupClickListener(onGroupClickListener);
mOnGroupClickListener = onGroupClickListener;
}
public void setFloatingGroupEnabled(boolean floatingGroupEnabled) {
mFloatingGroupEnabled = floatingGroupEnabled;
}
public void setOnScrollFloatingGroupListener(OnScrollFloatingGroupListener listener) {
mOnScrollFloatingGroupListener = listener;
}
private void createFloatingGroupView(int position) {
mFloatingGroupView = null;
mFloatingGroupPosition = getPackedPositionGroup(getExpandableListPosition(position));
for(int i = 0; i < getChildCount(); i++) {
final View child = getChildAt(i);
final Object tag = child.getTag(R.id.fgelv_tag_changed_visibility);
if(tag instanceof Boolean) {
final boolean changedVisibility = (Boolean) tag;
if(changedVisibility) {
child.setVisibility(View.VISIBLE);
child.setTag(R.id.fgelv_tag_changed_visibility, null);
}
}
}
if(!mFloatingGroupEnabled) {
return;
}
final int floatingGroupFlatPosition = getFlatListPosition(getPackedPositionForGroup(mFloatingGroupPosition));
final int floatingGroupListPosition = floatingGroupFlatPosition - position;
if(floatingGroupListPosition >= 0 && floatingGroupListPosition < getChildCount()) {
final View currentGroupView = getChildAt(floatingGroupListPosition);
if(currentGroupView.getTop() >= getPaddingTop()) {
return;
} else if(currentGroupView.getTop() < getPaddingTop() && currentGroupView.getVisibility() == View.VISIBLE) {
currentGroupView.setVisibility(View.INVISIBLE);
currentGroupView.setTag(R.id.fgelv_tag_changed_visibility, true);
}
}
if(mFloatingGroupPosition >= 0) {
mFloatingGroupView = mAdapter.getGroupView(mFloatingGroupPosition, mAdapter.isGroupExpanded(mFloatingGroupPosition), mFloatingGroupView, this);
if(!mFloatingGroupView.isClickable()) {
mSelectorEnabled = true;
mFloatingGroupView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
postDelayed(mOnClickAction, ViewConfiguration.getPressedStateDuration());
}
});
} else {
mSelectorEnabled = false;
}
loadAttachInfo();
setAttachInfo(mFloatingGroupView);
}
if(mFloatingGroupView == null) {
return;
}
AbsListView.LayoutParams params = (AbsListView.LayoutParams) mFloatingGroupView.getLayoutParams();
if(params == null) {
params = new AbsListView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, 0);
mFloatingGroupView.setLayoutParams(params);
}
final int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec, getPaddingLeft() + getPaddingRight(), params.width);
final int paramsHeight = params.height;
int childHeightSpec;
if(paramsHeight > 0) {
childHeightSpec = MeasureSpec.makeMeasureSpec(paramsHeight, MeasureSpec.EXACTLY);
} else {
childHeightSpec = MeasureSpec.makeMeasureSpec(0 , MeasureSpec.UNSPECIFIED);
}
mFloatingGroupView.measure(childWidthSpec, childHeightSpec);
int floatingGroupScrollY = 0;
final int nextGroupFlatPosition = getFlatListPosition(getPackedPositionForGroup(mFloatingGroupPosition + 1));
final int nextGroupListPosition = nextGroupFlatPosition - position;
if(nextGroupListPosition >= 0 && nextGroupListPosition < getChildCount()) {
final View nextGroupView = getChildAt(nextGroupListPosition);
if(nextGroupView != null && nextGroupView.getTop() < getPaddingTop() + mFloatingGroupView.getMeasuredHeight() + getDividerHeight()) {
floatingGroupScrollY = nextGroupView.getTop() - (getPaddingTop() + mFloatingGroupView.getMeasuredHeight() + getDividerHeight());
}
}
final int left = getPaddingLeft();
final int top = getPaddingTop() + floatingGroupScrollY;
final int right = left + mFloatingGroupView.getMeasuredWidth();
final int bottom = top + mFloatingGroupView.getMeasuredHeight();
mFloatingGroupView.layout(left, top, right, bottom);
mFloatingGroupScrollY = floatingGroupScrollY;
if(mOnScrollFloatingGroupListener != null) {
mOnScrollFloatingGroupListener.onScrollFloatingGroupListener(mFloatingGroupView, mFloatingGroupScrollY);
}
}
private void loadAttachInfo() {
if(mViewAttachInfo == null) {
mViewAttachInfo = ReflectionUtils.getFieldValue(View.class, "mAttachInfo", FloatingGroupExpandableListView.this);
}
}
private void setAttachInfo(View v) {
if(v == null) {
return;
}
if(mViewAttachInfo != null) {
ReflectionUtils.setFieldValue(View.class, "mAttachInfo", v, mViewAttachInfo);
}
if(v instanceof ViewGroup) {
final ViewGroup viewGroup = (ViewGroup) v;
for(int i = 0; i < viewGroup.getChildCount(); i++) {
setAttachInfo(viewGroup.getChildAt(i));
}
}
}
private void positionSelectorOnFloatingGroup() {
if(mShouldPositionSelector && mFloatingGroupView != null) {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
final int floatingGroupFlatPosition = getFlatListPosition(getPackedPositionForGroup(mFloatingGroupPosition));
ReflectionUtils.invokeMethod(AbsListView.class, "positionSelector", new Class<?>[]{int.class, View.class}, FloatingGroupExpandableListView.this, floatingGroupFlatPosition, mFloatingGroupView);
} else {
ReflectionUtils.invokeMethod(AbsListView.class, "positionSelector", new Class<?>[]{View.class}, FloatingGroupExpandableListView.this, mFloatingGroupView);
}
invalidate();
}
mShouldPositionSelector = false;
removeCallbacks(mPositionSelectorOnTapAction);
}
private void drawDefaultSelector(Canvas canvas) {
final int selectorListPosition = mSelectorPosition - getFirstVisiblePosition();
if(selectorListPosition >= 0 && selectorListPosition < getChildCount() && mSelectorRect != null && !mSelectorRect.isEmpty()) {
final int floatingGroupFlatPosition = getFlatListPosition(getPackedPositionForGroup(mFloatingGroupPosition));
if(mFloatingGroupView == null || mSelectorPosition != floatingGroupFlatPosition) {
drawSelector(canvas);
}
}
}
private void drawFloatingGroupSelector(Canvas canvas) {
if(mSelectorRect != null && !mSelectorRect.isEmpty()) {
final int floatingGroupFlatPosition = getFlatListPosition(getPackedPositionForGroup(mFloatingGroupPosition));
if(mSelectorPosition == floatingGroupFlatPosition) {
mSelectorRect.set(mFloatingGroupView.getLeft(), mFloatingGroupView.getTop(), mFloatingGroupView.getRight(), mFloatingGroupView.getBottom());
drawSelector(canvas);
}
}
}
private void drawSelector(Canvas canvas) {
canvas.save();
canvas.clipRect(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom());
if(isPressed()) {
mSelector.setState(getDrawableState());
} else {
mSelector.setState(EMPTY_STATE_SET);
}
mSelector.setBounds(mSelectorRect);
mSelector.draw(canvas);
canvas.restore();
}
private void drawFloatingGroupIndicator(Canvas canvas) {
final Drawable groupIndicator = (Drawable) ReflectionUtils.getFieldValue(ExpandableListView.class, "mGroupIndicator", FloatingGroupExpandableListView.this);
if(groupIndicator != null) {
final int stateSetIndex =
(mAdapter.isGroupExpanded(mFloatingGroupPosition) ? 1 : 0) | // Expanded?
(mAdapter.getChildrenCount(mFloatingGroupPosition) > 0 ? 2 : 0); // Empty?
groupIndicator.setState(GROUP_STATE_SETS[stateSetIndex]);
final int indicatorLeft = (Integer) ReflectionUtils.getFieldValue(ExpandableListView.class, "mIndicatorLeft", FloatingGroupExpandableListView.this);
final int indicatorRight = (Integer) ReflectionUtils.getFieldValue(ExpandableListView.class, "mIndicatorRight", FloatingGroupExpandableListView.this);
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
mIndicatorRect.set(indicatorLeft + getPaddingLeft(), mFloatingGroupView.getTop(), indicatorRight + getPaddingLeft(), mFloatingGroupView.getBottom());
} else {
mIndicatorRect.set(indicatorLeft, mFloatingGroupView.getTop(), indicatorRight, mFloatingGroupView.getBottom());
}
groupIndicator.setBounds(mIndicatorRect);
groupIndicator.draw(canvas);
}
}
public interface OnScrollFloatingGroupListener {
public void onScrollFloatingGroupListener(View floatingGroupView, int scrollY);
}
public int getFloatingGroupPosition() {
return mFloatingGroupPosition;
}
public View getFloatingGroupView() {
return mFloatingGroupView;
}
}
|
package com.stanleyidesis.quotograph.api.controller;
import com.stanleyidesis.quotograph.BuildConfig;
public class LWQLoggerHelper {
static LWQLogger logger;
public static LWQLogger get() {
if (logger != null) {
return logger;
}
logger = BuildConfig.DEBUG ? new LWQDebugLogger() : new LWQLoggerImpl();
return get();
}
}
|
package uk.ac.kent.dover.fastGraph;
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
import java.util.*;
import org.json.*;
import uk.ac.kent.displayGraph.*;
public class FastGraph {
protected static final int NODE_LABEL_START_OFFSET = 0; // integer
protected static final int NODE_LABEL_LENGTH_OFFSET = 4; // short
protected static final int NODE_IN_CONNECTION_START_OFFSET = 6; // integer
protected static final int NODE_IN_DEGREE_OFFSET = 10; // integer
protected static final int NODE_OUT_CONNECTION_START_OFFSET = 14; // integer
protected static final int NODE_OUT_DEGREE_OFFSET = 18; // integer
protected static final int NODE_WEIGHT_OFFSET = 22; // integer
protected static final int NODE_TYPE_OFFSET = 26; // byte
protected static final int NODE_AGE_OFFSET = 27; // byte
protected static final int EDGE_NODE1_OFFSET = 0; // integer
protected static final int EDGE_NODE2_OFFSET = 4; // integer
protected static final int EDGE_LABEL_START_OFFSET = 8; // integer
protected static final int EDGE_LABEL_LENGTH_OFFSET = 12; // short
protected static final int EDGE_WEIGHT_OFFSET = 14; // integer
protected static final int EDGE_TYPE_OFFSET = 18; // byte
protected static final int EDGE_AGE_OFFSET = 19; // byte
protected static final int CONNECTION_EDGE_OFFSET = 0; // integer, edge is first of the pair
protected static final int CONNECTION_NODE_OFFSET = 4; // integer, node is straight after the edge
public static final int DEFAULT_AVERAGE_LABEL_LENGTH = 20;
protected static final int NODE_BYTE_SIZE = 28;
protected static final int EDGE_BYTE_SIZE = 20;
protected static final int CONNECTION_PAIR_SIZE = 8; // this is an edge index plus an node index
public static final String INFO_SPLIT_STRING = "~";
public static final int MAX_BYTE_BUFFER_SIZE = Integer.MAX_VALUE-5000;
private ByteBuffer nodeBuf;
private ByteBuffer edgeBuf;
private ByteBuffer connectionBuf;
private ByteBuffer nodeLabelBuf;
private ByteBuffer edgeLabelBuf;
private int numberOfNodes;
private int numberOfEdges;
private String name = "";
private boolean direct;
/**
* No direct access to constructor, as a number of data structures need to be created when
* graph nodes and edges are added.
*
* @param nodeTotal the number of nodes in the graph
* @param edgeTotal the number of edges in the graph
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
*/
private FastGraph(int nodeTotal, int edgeTotal, boolean direct) {
this.numberOfNodes = nodeTotal;
this.numberOfEdges = edgeTotal;
this.direct = direct;
init();
}
/**
* @param args not used
* @throws Exception
*/
public static void main(String[] args) throws Exception {
long time;
// FastGraph g1 = randomGraphFactory(1,0,false);
// FastGraph g1 = randomGraphFactory(2,1,false);
// FastGraph g1 = randomGraphFactory(5,6,1,true);
// FastGraph g1 = randomGraphFactory(8,9,1,false);
// FastGraph g1 = randomGraphFactory(100,1000,1,false); // 1 hundred nodes, 1 thousand edges
// FastGraph g1 = randomGraphFactory(10000,100000,1,false); // 10 thousand nodes, 100 thousand edges
// FastGraph g1 = randomGraphFactory(100000,1000000,1,false); // 100 thousand nodes, 1 million edges
// FastGraph g1 = randomGraphFactory(1000000,10000000,1,false); // 1 million nodes, 10 million edges
// FastGraph g1 = randomGraphFactory(5000000,50000000,1,false); // limit for edgeLabelBuf at 20 chars per label
// FastGraph g1 = randomGraphFactory(4847571,68993773,1,false); // Size of LiveJournal1 example from SNAP
// FastGraph g1 = randomGraphFactory(10000000,100000000,1,false); // 10 million nodes, 100 million edges, close to edgeBuf limit, but fails on heap space with 14g, but pass with heap space of 30g
time = System.currentTimeMillis();
// FastGraph g1 = adjacencyListGraphFactory(7115,103689,null,"Wiki-Vote.txt",false);
// FastGraph g1 = adjacencyListGraphFactory(36692,367662,null,"Email-Enron1.txt",false);
// FastGraph g1 = adjacencyListGraphFactory(81306,2420766,null,"twitter_combined.txt",false); // SNAP web page gives 1768149 edges
// FastGraph g1 = adjacencyListGraphFactory(1696415,11095298,null,"as-skitter.txt",false);
// FastGraph g1 = adjacencyListGraphFactory(1632803,30622564,null,"soc-pokec-relationships.txt",false);
// FastGraph g1 = adjacencyListGraphFactory(4847571,68993773,null,"soc-LiveJournal1.txt",false);
/*
for(int n = 0; n < g1.getNumberOfNodes(); n++) {
System.out.println(n +" "+g1.getNodeLabel(n)+" outEdgelist "+Util.convertArray(g1.getNodeConnectingOutEdges(n))+" inEdgelist "+Util.convertArray(g1.getNodeConnectingInEdges(n)));
}
for(int e = 0; e < g1.getNumberOfEdges(); e++) {
System.out.println(e +" "+g1.getEdgeLabel(e)+" node1 "+g1.getEdgeNode1(e)+" node2 "+g1.getEdgeNode2(e));
}
*/
//System.out.println("snap load time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
/*
time = System.currentTimeMillis();
g1.saveBuffers(null,g1.getName());
System.out.println("saveBuffers test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
*/
//String name = "random-n-100-e-1000";
//String name = "as-skitter.txt";
//String name = "soc-pokec-relationships.txt-short";
String name = "soc-pokec-relationships.txt-veryshort-veryshort";
//String name = "twitter_combined.txt";
//String name = "Wiki-Vote.txt";
// String name = g1.getName();
FastGraph g2;
try {
//time = System.currentTimeMillis();
g2 = loadBuffersGraphFactory(null,name);
System.out.println("CONSISTENT g2 "+g2.checkConsistency());
time = System.currentTimeMillis();
LinkedList<Integer> nodes = new LinkedList<Integer>();
LinkedList<Integer> edges = new LinkedList<Integer>();
//FastGraph g3 = g2.removeNodesAndEdgesFromGraph(nodes,edges,90,800);
//FastGraph g3 = g2.removeNodesAndEdgesFromGraph(nodes,edges,1500000,10000000);
FastGraph g3 = g2;
long deletionTime = (long) ((System.currentTimeMillis()-time)/1000.0);
// System.out.println("deletion test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println("CONSISTENT g3 a "+g3.checkConsistency());
time = System.currentTimeMillis();
g3.relabelFastGraph(g3.getNumberOfNodes()/10);
// System.out.println("relabelling test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
// System.out.println("deletion test time (from before) " + deletionTime+" seconds");
// System.out.println("New graph has: nodes: " + g3.getNumberOfNodes() + " and edges: " + g3.getNumberOfEdges());
time = System.currentTimeMillis();
g3.setName(g2.getName()+"-short");
g3.saveBuffers(null,g3.getName()+"-pjr");
System.out.println("CONSISTENT g3 b "+g3.checkConsistency());
// System.out.println("saveBuffers test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
FastGraph g4 = loadBuffersGraphFactory(null,g3.getName()+"-pjr");
System.out.println("CONSISTENT g4 "+g4.checkConsistency());
//just for testing
System.out.println();
/*
System.out.println("graph now has the labels (taken from the buffer):");
FastGraphNodeType[] ntypes = FastGraphNodeType.values();
for(int j = 0; j < g4.getNumberOfNodes(); j++) {
byte type = g4.getNodeType(j);
System.out.println(g4.getNodeLabel(j) + " " + type + " (" + ntypes[type] + ")");
}
System.out.println();
System.out.println("edges now have the types (taken from the buffer):");
for(int j = 0; j < g4.getNumberOfEdges(); j++) {
byte type = g4.getEdgeType(j);
System.out.println("n1" + g4.getEdgeNode1(j) + " n2" + g4.getEdgeNode2(j) + " type " + type + " (" + g4.getEdgeLabel(j) + ")");
}
*/
// int[] degrees = g2.countInstancesOfNodeDegrees(4);
// System.out.println(Arrays.toString(degrees));
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Relabels the current FastGraph with the family groups in subgraphs/families folder.<br>
* Any remaining nodes and edges are labelled randomly.
* Each family is tested against each of the induced subgraphs and will be relabeled if the two are isomorphic
*
* @param subgraphsToTest How many subgraphs will be induced for each family.
*
* @throws Exception If there is a problem loading the family subgraphs, or if there is a problem inducing a subgraph
*/
public void relabelFastGraph(int subgraphsToTest) throws Exception{
System.out.println("Relabelling FastGraph");
long time = System.currentTimeMillis();
//load node and edge labels arrays
String[] nodeLabels = new String[this.getNumberOfNodes()];
String[] edgeLabels = new String[this.getNumberOfEdges()];
//load node and edge Types arrays
byte[] nodeTypes = new byte[this.getNumberOfNodes()];
byte[] edgeTypes = new byte[this.getNumberOfEdges()];
//create induction class
InducedSubgraph is = new InducedSubgraph(this);
//create Name Picker class
NamePicker np = new NamePicker();
//number of families found
int fams = 0;
//load family subgraphs
FastGraph[] families = loadFamilies();
for(FastGraph family : families) {
System.out.println("Testing family " + family.getName());
ExactIsomorphism ei = new ExactIsomorphism(family);
int familyNodesSize = family.getNumberOfNodes();
for (int i = 0; i < subgraphsToTest; i++) { //induce subgraphs to test
if(i%10000 == 0) {
System.out.println("Testing subgraph " + i);
}
LinkedList<Integer> subNodes = new LinkedList<Integer>();
LinkedList<Integer> subEdges = new LinkedList<Integer>();
//create a subgraph and build to a FastGraph
is.createInducedSubgraph(subNodes, subEdges, familyNodesSize);
FastGraph subgraph = this.generateGraphFromSubgraph(Util.convertLinkedList(subNodes), Util.convertLinkedList(subEdges));
//is this FastGraph isomorphic to the one in the constructor (i.e. the family)
boolean isomorphic = ei.isomorphic(subgraph);
if(isomorphic) {
fams++;
//rename original graph
//pick a surname, so all family members have the same surname
String surname = np.getSurname();
// System.out.println("Family name: " + surname);
for(int n : subNodes) {
nodeLabels[n] = np.getForename() + " " + surname;
nodeTypes[n] = FastGraphNodeType.CHILD.getValue();
}
//set the parents
nodeTypes[subNodes.get(0)] = FastGraphNodeType.PARENT.getValue();
nodeTypes[subNodes.get(1)] = FastGraphNodeType.PARENT.getValue();
//label the edges with types rather than names
for(int e : subEdges) {
//if this is the parent's relationship
if ((getEdgeNode1(e) == subNodes.get(0) && getEdgeNode2(e) == subNodes.get(1)) ||
(getEdgeNode1(e) == subNodes.get(1) && getEdgeNode2(e) == subNodes.get(0))) {
edgeTypes[e] = FastGraphEdgeType.MARRIED.getValue();
edgeLabels[e] = FastGraphEdgeType.MARRIED.toString();
//if this is the parent child relationship
} else if (getEdgeNode1(e) == subNodes.get(0) || getEdgeNode1(e) == subNodes.get(1) ||
getEdgeNode2(e) == subNodes.get(0) || getEdgeNode2(e) == subNodes.get(1)) {
edgeTypes[e] = FastGraphEdgeType.PARENT.getValue();
edgeLabels[e] = FastGraphEdgeType.PARENT.toString();
//otherwise these are siblings
} else {
edgeTypes[e] = FastGraphEdgeType.SIBLING.getValue();
edgeLabels[e] = FastGraphEdgeType.SIBLING.toString();
}
}//end for each subEdge
} //end if isomorphic
}//end for each subgraph to test
}//end foreach family
System.out.println("## Number of families found: " + fams);
//replace the blanks with other names
for(int j = 0; j < nodeLabels.length; j++) {
if (nodeLabels[j] == null) {
nodeLabels[j] = np.getName();
}
}
//Leave node types as they are - these are not used
Random r = new Random(nodeBuf.getLong(0));
//replace the blanks with other edge types
FastGraphEdgeType[] values = FastGraphEdgeType.values();
for(int j = 0; j < edgeTypes.length; j++) {
if (edgeTypes[j] == FastGraphEdgeType.UNKNOWN.getValue()) {
//pick a random relationship
byte relationship = (byte) (r.nextInt(values.length - 4)+4); //ignore the family relationships
edgeTypes[j] = relationship;
edgeLabels[j] = values[relationship].toString();
}
}
//Set all node & edge labels, and node & edge types
this.setAllNodeLabels(nodeLabels);
this.setAllEdgeLabels(edgeLabels);
for(int i = 0; i < nodeTypes.length; i++) {
this.setNodeType(i, nodeTypes[i]);
}
for(int i = 0; i < edgeTypes.length; i++) {
this.setEdgeType(i, edgeTypes[i]);
}
}
/**
* Loads the families subgraphs
* @return A list of FastGraphs based on the family subgraphs
* @throws Exception If the node or edge count can't be converted, or the file can't be loaded
*/
public FastGraph[] loadFamilies() throws Exception {
File folder = new File(Launcher.startingWorkingDirectory+File.separatorChar+"subgraphs"+File.separatorChar+"families");
File[] listOfFiles = folder.listFiles();
LinkedList<FastGraph> graphs = new LinkedList<>();
for (File f : listOfFiles) {
String[] splits = f.getName().split("-");
int nodeCount = Integer.parseInt(splits[2]);
int edgeCount = Integer.parseInt(splits[4]);
System.out.println(f + " n" + nodeCount + "e" + edgeCount);
FastGraph g = nodeListEdgeListGraphFactory(nodeCount, edgeCount, folder.getPath() + File.separatorChar + f.getName(), f.getName(), direct);
System.out.println("new g nodes" + g.getNumberOfNodes());
graphs.add(g);
}
//System.out.println(Arrays.toString(listOfFiles));
FastGraph[] graphArray = new FastGraph[graphs.size()];
Util.convertLinkedListObject(graphs, graphArray);
return graphArray;
}
/**
* This method creates a new FastGraph of the rough size given in targetNodes and targetEdges. <br>
* The new graph will never have fewer nodes than the target, but may have fewer edges. <br>
* <b>Note: This may take some time to complete</b>
*
* @param nodes The list of nodes to be removed
* @param edges The list of edges to be removed
* @param targetNodes The target number of nodes
* @param targetEdges The target number of edges
* @return A new FastGraph that is roughly the size of the target
* @throws FastGraphException If there is an exception here, e.g. targetNodes is too big
*/
public FastGraph removeNodesAndEdgesFromGraph(LinkedList<Integer> nodes, LinkedList<Integer> edges, int targetNodes, int targetEdges) throws FastGraphException {
System.out.println("Suggesting nodes and egdes to remove");
long time = System.currentTimeMillis();
int currentTotalNodes = getNumberOfNodes();
int currentTotalEdges = getNumberOfEdges();
//if a graph of the same size has been specified
if (targetNodes == currentTotalNodes && targetEdges == currentTotalEdges) {
return this;
}
//if the node target is too big
if(targetNodes > currentTotalNodes) {
throw new FastGraphException("The target node size is too big");
}
//if the edge target is too big
if(targetEdges > currentTotalEdges) {
throw new FastGraphException("The target edge size is too big");
}
int nodeReductionAmount = currentTotalNodes - targetNodes; //how many nodes we need to remove
int edgeReductionAmount = currentTotalEdges - targetEdges; //how many edges we need to remove
LinkedHashSet<Integer> edgesToRemove = new LinkedHashSet<Integer>(); //edges that need removing
LinkedHashSet<Integer> nodesToRemove = new LinkedHashSet<Integer>(); //nodes that need removing
System.out.println("Current Nodes: " + currentTotalNodes + " Target Nodes: " + targetNodes);
System.out.println("Current Edges: " + currentTotalEdges + " Target Edges: " + targetEdges);
System.out.println("setup test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println();
time = System.currentTimeMillis();
System.out.println("# Starting STEP ONE");
//STEP ONE:
//Find a subgraph with the required number of nodes. Remove it
InducedSubgraph is = new InducedSubgraph(this);
LinkedList<Integer> subNodes = new LinkedList<Integer>();
LinkedList<Integer> subEdges = new LinkedList<Integer>();
is.createInducedSubgraph(subNodes, subEdges, nodeReductionAmount);
nodesToRemove.addAll(subNodes);
edgesToRemove.addAll(subEdges);
//delete all edges connecting to the nodes to be deleted
for(int n : nodesToRemove) {
Util.addAll(edgesToRemove, this.getNodeConnectingInEdges(n));
Util.addAll(edgesToRemove, this.getNodeConnectingOutEdges(n));
}
System.out.println("After induction test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println("nodes to remove size: " + nodesToRemove.size() + " edges to remove size: " + edgesToRemove.size());
System.out.println();
//STEP TWO:
//if we haven't removed enough nodes
System.out.println("# Starting STEP TWO");
time = System.currentTimeMillis();
if(nodeReductionAmount > nodesToRemove.size()) { //could we thread these to make this quicker?
Random r = new Random(nodeBuf.getLong(1));
//make local stores, as we might not want to remove these nodes if they are too big
LinkedHashSet<Integer> localEdgesToRemove = new LinkedHashSet<Integer>(); //edges that need removing
LinkedHashSet<Integer> localNodesToRemove = new LinkedHashSet<Integer>(); //nodes that need removing
int chances = 10; //if a tree is too big, then skip it. But only do this 10 times, in case we are stuck
while(nodeReductionAmount > nodesToRemove.size() && chances > 0) {
//long time2 = System.currentTimeMillis();
localNodesToRemove.clear();
localEdgesToRemove.clear();
int stillToRemove = nodeReductionAmount - nodesToRemove.size(); //what nodes are left to remove
this.buildTree(localNodesToRemove, localEdgesToRemove, r, 3);
//System.out.println("Tree: " + localNodesToRemove);
//System.out.println("nodeRA: " + nodeReductionAmount + " stillTR: " + stillToRemove + " localNodesToRemove: " + localNodesToRemove.size());
if (localNodesToRemove.size() <= stillToRemove) {
nodesToRemove.addAll(localNodesToRemove);
edgesToRemove.addAll(localEdgesToRemove);
//delete all edges connecting to the nodes in this tree
for(int n : localNodesToRemove) {
Util.addAll(edgesToRemove, this.getNodeConnectingInEdges(n));
Util.addAll(edgesToRemove, this.getNodeConnectingOutEdges(n));
}
} else {
chances--; //Avoids getting stuck if there are no further options
continue;
}
//System.out.println("After this tree test time " + (System.currentTimeMillis()-time2)/1000.0+" seconds");
}
}
System.out.println("After tree test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println("nodes to remove size: " + nodesToRemove.size() + " edges to remove size: " + edgesToRemove.size());
System.out.println();
//STEP THREE:
//if we haven't removed enough nodes
//pick some at random
System.out.println("# Starting STEP THREE");
time = System.currentTimeMillis();
if(nodeReductionAmount > nodesToRemove.size()) {
Random r = new Random(nodeBuf.getLong(2));
while(nodeReductionAmount > nodesToRemove.size()) {
int n = r.nextInt(this.getNumberOfNodes());
nodesToRemove.add(n);
edgesToRemove.addAll(Util.convertArray(this.getNodeConnectingEdges(n)));
}
}
System.out.println("After node removal test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println("nodes to remove size: " + nodesToRemove.size() + " edges to remove size: " + edgesToRemove.size());
System.out.println();
//STEP FOUR:
//if we haven't removed enough edges
//pick some at random
System.out.println("# Starting STEP FOUR");
time = System.currentTimeMillis();
if(edgeReductionAmount > edgesToRemove.size()) {
Random r = new Random(edgeBuf.getLong(2));
while(edgeReductionAmount > edgesToRemove.size()) {
int e = r.nextInt(this.getNumberOfEdges());
edgesToRemove.add(e);
}
}
System.out.println("After edge removal test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
System.out.println("nodes to remove size: " + nodesToRemove.size() + " edges to remove size: " + edgesToRemove.size());
System.out.println();
nodes.addAll(nodesToRemove);
edges.addAll(edgesToRemove);
time = System.currentTimeMillis();
System.out.println("Building new FastGraph");
FastGraph g = this.generateGraphByDeletingItems(Util.convertLinkedList(nodes), Util.convertLinkedList(edges), false);
System.out.println("After FastGraph building test time " + (System.currentTimeMillis()-time)/1000.0+" seconds");
return g;
}
/**
* Builds a tree like structure from a random node, to a particular depth
*
* @param nodes A LinkedHashSet of nodes, ready to be populated with nodes to be removed
* @param edges A LinkedHashSet of edges, ready to be populated with nodes to be removed
* @param r A random number generator used to pick a starting place.
* @param depth The depth of the tree. 1 would equal the starting node and it's children. 2 would be the same as 1, but with grandchildren.
*/
public void buildTree(LinkedHashSet<Integer> nodes, LinkedHashSet<Integer> edges, Random r, int depth) {
int startingNode = r.nextInt(this.getNumberOfNodes());
nodes.add(startingNode);
edges.addAll(Util.convertArray(this.getNodeConnectingEdges(startingNode)));
LinkedList<Integer> startingNodes = new LinkedList<>();
startingNodes.add(startingNode);
//while we are not at the required depth
while(depth != 0) {
//System.out.println("Starting Node: " + startingNode);
int[] cn = new int[0]; //get ready to store connecting nodes
for (int sn : startingNodes) { //for each of the starting nodes
cn = this.getNodeConnectingNodes(sn); //get this node's connecting nodes
//System.out.println(" Connecting Nodes: " + Arrays.toString(cn));
for(int n : cn) {
nodes.add(n); //add them all the the tree
edges.addAll(Util.convertArray(this.getNodeConnectingEdges(n))); //add the edges too
}
}
startingNodes = Util.convertArray(cn); // make the connections the starting nodes for the next loop
depth--; //"We need to go deeper"
}
}
/**
* @return the number of nodes in the graph
*/
public int getNumberOfNodes() {
return numberOfNodes;
}
/**
* @return the number of edges in the graph
*/
public int getNumberOfEdges() {
return numberOfEdges;
}
/**
* @return the graph name
*/
public String getName() {
return name;
}
/**
* @return the direct flag, false is on heap, true is off heap
*/
public boolean getDirect() {
return direct;
}
/**
*
* @return the node ByteBuffer
*/
public ByteBuffer getNodeBuf() {
return nodeBuf;
}
/**
*
* @return the edge ByteBuffer
*/
public ByteBuffer getEdgeBuf() {
return edgeBuf;
}
/**
*
* @return the node label ByteBuffer
*/
public ByteBuffer getNodeLabelBuf() {
return nodeLabelBuf;
}
/**
*
* @return the edge label ByteBuffer
*/
public ByteBuffer getEdgeLabelBuf() {
return edgeLabelBuf;
}
/**
*
* @return the connections ByteBuffer
*/
public ByteBuffer getConnectionBuf() {
return connectionBuf;
}
/**
* @param nodeIndex the node
* @return the node label
*/
public String getNodeLabel(int nodeIndex) {
int labelStart = nodeBuf.getInt(NODE_LABEL_START_OFFSET+nodeIndex*NODE_BYTE_SIZE);
int labelLength = nodeBuf.getShort(NODE_LABEL_LENGTH_OFFSET+nodeIndex*NODE_BYTE_SIZE);
char[] label = new char[labelLength];
for(int i = 0; i < labelLength; i++) {
int offset = labelStart+i*2;
char c = nodeLabelBuf.getChar(offset);
label[i] = c;
}
String ret = new String(label);
return ret;
}
/**
* @param nodeIndex the node
* @return the node weight
*/
public int getNodeWeight(int nodeIndex) {
int weight = nodeBuf.getInt(NODE_WEIGHT_OFFSET+nodeIndex*NODE_BYTE_SIZE);
return weight;
}
/**
* @param nodeIndex the node
* @return the node type
*/
public byte getNodeType(int nodeIndex) {
byte type = nodeBuf.get(NODE_TYPE_OFFSET+nodeIndex*NODE_BYTE_SIZE);
return type;
}
/**
* @param nodeIndex the node
* @return the node age
*/
public byte getNodeAge(int nodeIndex) {
byte age = nodeBuf.get(NODE_AGE_OFFSET+nodeIndex*NODE_BYTE_SIZE);
return age;
}
/**
* @param nodeIndex the node
* @return the node degree (number of connecting edges)
*/
public int getNodeDegree(int nodeIndex) {
int degree = getNodeInDegree(nodeIndex)+getNodeOutDegree(nodeIndex);
return degree;
}
/**
* @param nodeIndex the node
* @return the node in-degree (number of edges entering the node)
*/
public int getNodeInDegree(int nodeIndex) {
int degree = nodeBuf.getInt(NODE_IN_DEGREE_OFFSET+nodeIndex*NODE_BYTE_SIZE);
return degree;
}
/**
* @param nodeIndex the node
* @return the node out-degree (number of edges leaving the node)
*/
public int getNodeOutDegree(int nodeIndex) {
int degree = nodeBuf.getInt(NODE_OUT_DEGREE_OFFSET+nodeIndex*NODE_BYTE_SIZE);
return degree;
}
/**
* @param nodeIndex the node
* @return all connecting edges
*/
public int[] getNodeConnectingEdges(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* This version puts the connecting edges in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting edges found
* @param nodeIndex the node
* @return all connecting edges via parameter array.
*/
public void getNodeConnectingEdges(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* @param nodeIndex the node
* @return all node neighbours.
*/
public int[] getNodeConnectingNodes(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* This version puts the connecting nodes in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting nodes found
* @param nodeIndex the node
* @return all node neighbours.
*/
public void getNodeConnectingNodes(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* For directed graphs.
*
* @param nodeIndex the node
* @return all connecting edges for the node
*/
public int[] getNodeConnectingInEdges(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeInDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* For directed graphs.
* This version puts the connecting edges in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeInDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting edges found
* @param nodeIndex the node
* @return all connecting edges that enter the node via the parameter array.
*/
public void getNodeConnectingInEdges(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeInDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* For directed graphs.
*
* @param nodeIndex the node
* @return all node neighbours that are on the end of edges that enter the node.
*/
public int[] getNodeConnectingInNodes(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeInDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* For directed graphs.
* This version puts the connecting nodes in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeInDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting nodes found
* @param nodeIndex the node
* @return all node neighbours that are on the end of edges that enter the node via the parameter array.
*/
public void getNodeConnectingInNodes(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_IN_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeInDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* For directed graphs.
*
* @param nodeIndex the node
* @return all edges that leave the node.
*/
public int[] getNodeConnectingOutEdges(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_OUT_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeOutDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* For directed graphs.
* This version puts the connecting nodes in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeOutDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting edges found
* @param nodeIndex the node
* @return all edges that leave the node via the argument array.
*/
public void getNodeConnectingOutEdges(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_OUT_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeOutDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_EDGE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* For directed graphs.
*
* @param nodeIndex the node
* @return all node neighbours that are on the end of edges that leave the passed node.
*/
public int[] getNodeConnectingOutNodes(int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_OUT_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeOutDegree(nodeIndex);
int[] ret = new int[degree];
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
return ret;
}
/**
* For directed graphs.
* This version puts the connecting nodes in the argument array, to avoid repeated object creation and so speed up multiple accesses.
* create array with size of either getNodeOutDegree(nodeIndex) or maxDegree(). array elements beyond nodeDegree(nodeIndex)-1 are undefined.
* Will throw an exception if ret is not large enough.
*
* @param ret this is populated with the connecting nodes found
* @param nodeIndex the node
* @return all node neighbours that are on the end of edges that leave the passed node via the parameter array.
*/
public void getNodeConnectingOutNodes(int[] ret, int nodeIndex) {
int connectionOffset = nodeBuf.getInt(NODE_OUT_CONNECTION_START_OFFSET+nodeIndex*NODE_BYTE_SIZE); // in offset is the first one
int degree = getNodeOutDegree(nodeIndex);
for(int i = 0; i < degree; i++) {
// don't need the edge, so step over edge/node pairs and the ege
int nodeOffset = connectionOffset+(i*CONNECTION_PAIR_SIZE)+CONNECTION_NODE_OFFSET;
int node = connectionBuf.getInt(nodeOffset);
ret[i] = node;
}
}
/**
* @param edgeIndex the edge
* @return the edge label
*/
public String getEdgeLabel(int edgeIndex) {
int labelStart = edgeBuf.getInt(EDGE_LABEL_START_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int labelLength = edgeBuf.getShort(EDGE_LABEL_LENGTH_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
char[] label = new char[labelLength];
for(int i = 0; i < labelLength; i++) {
int offset = labelStart+i*2;
char c = edgeLabelBuf.getChar(offset);
label[i] = c;
}
String ret = new String(label);
return ret;
}
/**
* @param edgeIndex the edge
* @return the first connecting node (the node the edge leaves for directed graphs).
*/
public int getEdgeNode1(int edgeIndex) {
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
return n1;
}
/**
* @param edgeIndex the edge
* @return the second connecting node (the node the edge enters for directed graphs).
*/
public int getEdgeNode2(int edgeIndex) {
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
return n2;
}
/**
* @param edgeIndex the edge
* @return the edge weight
*/
public int getEdgeWeight(int edgeIndex) {
int type = edgeBuf.getInt(EDGE_WEIGHT_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
return type;
}
/**
* @param edgeIndex the edge
* @return the edge type
*/
public byte getEdgeType(int edgeIndex) {
byte type= edgeBuf.get(EDGE_TYPE_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
return type;
}
/**
* @param edgeIndex the edge
* @return the edge age
*/
public byte getEdgeAge(int edgeIndex) {
byte age = edgeBuf.get(EDGE_AGE_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
return age;
}
/**
* Names should be simple alphanumeric. Spaces and dashes are permitted. Note that tilde ("~") cannot be used.
* @param name the name of the graph
*/
public void setName(String name) {
this.name = name;
}
/**
* @param nodeIndex the node
* @param weight the new node weight
*/
public void setNodeWeight(int nodeIndex, int weight) {
nodeBuf.putInt(NODE_WEIGHT_OFFSET+nodeIndex*NODE_BYTE_SIZE, weight);
}
/**
* @param nodeIndex the node
* @param type the new node type
*/
public void setNodeType(int nodeIndex, byte type) {
nodeBuf.put(NODE_TYPE_OFFSET+nodeIndex*NODE_BYTE_SIZE, type);
}
/**
* @param nodeIndex the node
* @return the node age
*/
public void setNodeAge(int nodeIndex, byte age) {
nodeBuf.put(NODE_AGE_OFFSET+nodeIndex*NODE_BYTE_SIZE, age);
}
/**
* @param edgeIndex the edge
* @param weight the new edge weight
*/
public void setEdgeWeight(int edgeIndex, int weight) {
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+edgeIndex*EDGE_BYTE_SIZE, weight);
}
/**
* @param edgeIndex the edge
* @param type the new edge type
*/
public void setEdgeType(int edgeIndex, byte type) {
edgeBuf.put(EDGE_TYPE_OFFSET+edgeIndex*EDGE_BYTE_SIZE, type);
}
/**
* @param edgeIndex the edge
* @return the edge age
*/
public void setEdgeAge(int edgeIndex, byte age) {
edgeBuf.put(EDGE_AGE_OFFSET+edgeIndex*EDGE_BYTE_SIZE, age);
}
/**
* Change all the node labels in the graph. Creates a new nodeLabelBuf, changes the label pointers in nodeBuf.
*
* @param labels Must contain the same number of labels as number of nodes in the graph
*/
public void setAllNodeLabels(String[] labels) {
long totalLabelLength = 0;
for(int i = 0; i < numberOfNodes; i++) {
totalLabelLength += labels[i].length();
}
if(totalLabelLength*2 > MAX_BYTE_BUFFER_SIZE) {
throw new OutOfMemoryError("Tried to create a nodeLabelBuf with too many chars");
}
int bufSize = (int)(totalLabelLength*2); // this cast is safe because of the previous test
if(!direct) {
nodeLabelBuf = ByteBuffer.allocate(bufSize);
} else {
nodeLabelBuf = ByteBuffer.allocateDirect(bufSize);
}
nodeLabelBuf.clear();
int labelOffset = 0;
for(int i = 0; i < numberOfNodes; i++) {
String label = labels[i];
char[] labelArray = label.toCharArray();
short labelLength = (short)(labelArray.length);
nodeBuf.putInt(NODE_LABEL_START_OFFSET+i*NODE_BYTE_SIZE,labelOffset); // label start
nodeBuf.putShort(NODE_LABEL_LENGTH_OFFSET+i*NODE_BYTE_SIZE,labelLength); // label size
for(int j = 0; j < labelArray.length; j++) {
char c = labelArray[j];
nodeLabelBuf.putChar(labelOffset,c);
labelOffset += 2; // increment by 2 as it is a char (2 bytes)
}
}
}
/**
* Change all the edge labels in the graph. Creates a new edgeLabelBuf, changes the label pointers in edgeBuf
*
* @param labels Must contain the same number of labels as there are edges in the graph
* @throws OutofMemoryError
*/
public void setAllEdgeLabels(String[] labels) {
long totalLabelLength = 0;
for(int i = 0; i < numberOfEdges; i++) {
totalLabelLength += labels[i].length();
}
if(totalLabelLength*2 > MAX_BYTE_BUFFER_SIZE) {
throw new OutOfMemoryError("Tried to create a edgeLabelBuf with too many chars");
}
int bufSize = (int)(totalLabelLength*2); // this cast is safe because of the previous test
if(!direct) {
edgeLabelBuf = ByteBuffer.allocate(bufSize);
} else {
edgeLabelBuf = ByteBuffer.allocateDirect(bufSize);
}
edgeLabelBuf.clear();
int labelOffset = 0;
for(int i = 0; i < numberOfEdges; i++) {
String label = labels[i];
char[] labelArray = label.toCharArray();
short labelLength = (short)(labelArray.length);
edgeBuf.putInt(EDGE_LABEL_START_OFFSET+i*EDGE_BYTE_SIZE,labelOffset); // label start
edgeBuf.putShort(EDGE_LABEL_LENGTH_OFFSET+i*EDGE_BYTE_SIZE,labelLength); // label size
for(int j = 0; j < labelArray.length; j++) {
char c = labelArray[j];
edgeLabelBuf.putChar(labelOffset,c);
labelOffset += 2; // increment by 2 as it is a char (2 bytes)
}
}
}
/**
* gets the other node connecting to the edge.
* If the argument node is not connected to the edge, then an undefined node will
* be returned.
*
* @param edge the edge
* @param node the known node
* @return the node on the opposite side of the edge
*/
public int oppositeEnd(int edge, int node) {
int n1 = getEdgeNode1(edge);
int n2 = getEdgeNode2(edge);
if(n1 == node) {
return n2;
}
return n1;
}
/**
* Allocates space for the node, edge and connection ByteBuffers. The label ByteBuffers
* are created later
*/
private void init() {
if(!direct) {
nodeBuf = ByteBuffer.allocate(numberOfNodes*NODE_BYTE_SIZE);
edgeBuf = ByteBuffer.allocate(numberOfEdges*EDGE_BYTE_SIZE);
connectionBuf = ByteBuffer.allocate(numberOfEdges*2*CONNECTION_PAIR_SIZE);
// nodeLabelBuf and edgeLabelBuf now created in Factories by setAllNodeLabels
} else {
nodeBuf = ByteBuffer.allocateDirect(numberOfNodes*NODE_BYTE_SIZE);
edgeBuf = ByteBuffer.allocateDirect(numberOfEdges*EDGE_BYTE_SIZE);
connectionBuf = ByteBuffer.allocateDirect(numberOfEdges*2*CONNECTION_PAIR_SIZE);
// nodeLabelBuf and edgeLabelBuf now created in Factories by setAllNodeLabels
}
nodeBuf.clear();
edgeBuf.clear();
connectionBuf.clear();
}
/**
* Create a FastGraph from a json string.
*
* @param json the json as a string
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
* @return the created FastGraph.
*/
public static FastGraph jsonStringGraphFactory(String json, boolean direct) {
int nodeCount = 0;
int edgeCount = 0;
JSONObject jsonObj = new JSONObject(json);
String graphName = jsonObj.getString("name");
JSONArray nodes = jsonObj.getJSONArray("nodes");
Iterator<Object> itNodes = nodes.iterator();
while(itNodes.hasNext()) {
JSONObject node = (JSONObject)(itNodes.next());
int index = node.getInt("nodeIndex");
if(index+1 > nodeCount) {
nodeCount = index+1;
}
}
JSONArray edges = jsonObj.getJSONArray("edges");
Iterator<Object> itEdges = edges.iterator();
while(itEdges.hasNext()) {
JSONObject edge = (JSONObject)(itEdges.next());
int index = edge.getInt("edgeIndex");
if(index+1 > edgeCount) {
edgeCount = index+1;
}
}
FastGraph g = new FastGraph(nodeCount,edgeCount,direct);
g.populateFromJsonString(jsonObj);
g.setName(graphName);
return g;
}
/**
* Generate a random graph of the desired size. Self sourcing edges and parallel edges may exist.
*
* @param numberOfNodes the number of nodes in the graph
* @param numberOfEdges the number of edges in the graph
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
* @return the created FastGraph
* @throws Exception
*/
public static FastGraph randomGraphFactory(int numberOfNodes, int numberOfEdges, boolean direct) throws Exception {
FastGraph graph = randomGraphFactory(numberOfNodes, numberOfEdges, -1, false, direct);
return graph;
}
/**
* Generate a random graph of the desired size. Self sourcing edges and parallel edges may exist.
*
* @param numberOfNodes the number of nodes in the graph
* @param numberOfEdges the number of edges in the graph
* @param seed random number seed, -1 for current time
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
* @return the created FastGraph
* @throws Exception
*/
public static FastGraph randomGraphFactory(int numberOfNodes, int numberOfEdges, long seed, boolean direct) throws Exception {
FastGraph graph = randomGraphFactory(numberOfNodes, numberOfEdges, seed, false, direct);
return graph;
}
/**
* Generate a random graph of the desired size. Self sourcing edges and parallel edges may exist.
*
* @param numberOfNodes the number of nodes in the graph
* @param numberOfEdges the number of edges in the graph
* @param seed random number seed, -1 for current time
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
* @return the created FastGraph
* @throws Exception
*/
public static FastGraph randomGraphFactory(int numberOfNodes, int numberOfEdges, long seed, boolean simple, boolean direct) throws Exception {
FastGraph g = new FastGraph(numberOfNodes,numberOfEdges,direct);
g.setName("random-n-"+numberOfNodes+"-e-"+numberOfEdges);
g.populateRandomGraph(simple, seed);
return g;
}
/**
* creates a FastGraph by loading in various files from the given directory, or data under
* current working directory if directory is null.
*
* @param directory where the files are held, or if null fileBaseName under data under the current working directory
* @param fileBaseName the name of the files, to which extensions are added
* @return the created FastGraph
* @throws IOException If the buffers cannot be loaded
* @See loadBuffers
*/
public static FastGraph loadBuffersGraphFactory(String directory, String fileBaseName) throws IOException {
FastGraph g = loadBuffers(null,fileBaseName);
return g;
}
/**
* Populates the FastGraph ByteBuffers from a json string.
* @param jsonObj the json code after parsing
*/
private void populateFromJsonString(JSONObject jsonObj) {
//long time;
String[] nodeLabels = new String[numberOfNodes];
String[] edgeLabels = new String[numberOfEdges];
int inStart = -888;
int inLength = -3;
int outStart = -777;
int outLength = -2;
int index = -1;
int weight = -5;
byte type = -7;
byte age = -9;
String label;
//the nodes are the first elements
JSONArray nodes = jsonObj.getJSONArray("nodes");
Iterator<Object> itNodes = nodes.iterator();
while(itNodes.hasNext()) {
JSONObject node = (JSONObject)(itNodes.next());
index = node.getInt("nodeIndex");
weight = node.getInt("nodeWeight");
type = (byte)(node.getInt("nodeType"));
age = (byte)(node.getInt("nodeAge"));
label = node.getString("nodeLabel");
nodeBuf.putInt(NODE_IN_CONNECTION_START_OFFSET+index*NODE_BYTE_SIZE,inStart); // offset for inward connecting edges/nodes
nodeBuf.putInt(NODE_IN_DEGREE_OFFSET+index*NODE_BYTE_SIZE,inLength); // number of inward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_CONNECTION_START_OFFSET+index*NODE_BYTE_SIZE,outStart); // offset for outward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_DEGREE_OFFSET+index*NODE_BYTE_SIZE,outLength); // number of outward connecting edges/nodes
nodeBuf.putInt(NODE_WEIGHT_OFFSET+index*NODE_BYTE_SIZE,weight); // weight
nodeBuf.put(NODE_TYPE_OFFSET+index*NODE_BYTE_SIZE,type); // type
nodeBuf.put(NODE_AGE_OFFSET+index*NODE_BYTE_SIZE,age); // age
// save labels for later
nodeLabels[index] = label;
}
setAllNodeLabels(nodeLabels);
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of inward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(i,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of outward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(i,edges);
}
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
int node1;
int node2;
index = -1;
weight = -101;
type = -103;
age = -105;
//time = System.currentTimeMillis();
//populate the edges
JSONArray edges = jsonObj.getJSONArray("edges");
Iterator<Object> itEdges = edges.iterator();
while(itEdges.hasNext()) {
JSONObject edge = (JSONObject)(itEdges.next());
index = edge.getInt("edgeIndex");
node1 = edge.getInt("node1");
node2 = edge.getInt("node2");
weight = edge.getInt("edgeWeight");
type = (byte)(edge.getInt("edgeType"));
age = (byte)(edge.getInt("edgeAge"));
label = edge.getString("edgeLabel");
edgeBuf.putInt(EDGE_NODE1_OFFSET+index*EDGE_BYTE_SIZE,node1); // one end of edge
edgeBuf.putInt(EDGE_NODE2_OFFSET+index*EDGE_BYTE_SIZE,node2); // other end of edge
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+index*EDGE_BYTE_SIZE,weight); // weight
edgeBuf.put(EDGE_TYPE_OFFSET+index*EDGE_BYTE_SIZE,type); // type
edgeBuf.put(EDGE_AGE_OFFSET+index*EDGE_BYTE_SIZE,age); // age
// save labels for later
edgeLabels[index] = label;
// store connecting nodes
inEdgeList = nodeIn.get(node2);
inEdgeList.add(index);
outEdgeList = nodeOut.get(node1);
outEdgeList.add(index);
}
setAllEdgeLabels(edgeLabels);
// Initialise the connection buffer, modifying the node buffer connection data
int offset = 0;
for(int i = 0; i < numberOfNodes; i++) {
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(i);
int inEdgeLength = inEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int edgeIndex : inEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(i);
int outEdgeLength = outEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int edgeIndex : outEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
}
}
/**
* saves the current graph to several files, in directory given to base name given (i.e. fileBaseName should have no extension).
* If directory is null, then to a directory named data under current working directory.
*
* @param directory where the files are to be stored, or if null fileBaseName under data under the current working directory
* @param fileBaseName the name of the files, to which extensions are added
*/
// @SuppressWarnings("resource")
public void saveBuffers(String directory, String fileBaseName) {
String directoryAndBaseName = "";
if(directory != null) {
if(directory.charAt(directory.length()-1)== File.separatorChar) {
directoryAndBaseName = directory+fileBaseName;
} else {
directoryAndBaseName = directory+File.separatorChar+fileBaseName;
}
} else {
directoryAndBaseName = Launcher.startingWorkingDirectory+File.separatorChar+"data"+File.separatorChar+fileBaseName+File.separatorChar+fileBaseName;
new File(Launcher.startingWorkingDirectory+File.separatorChar+"data"+File.separatorChar+fileBaseName).mkdirs();
}
try {
FileOutputStream fos = new FileOutputStream(directoryAndBaseName+".info");
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, "utf-8"));
writer.write("name"+INFO_SPLIT_STRING+name+"\n");
writer.write("numberOfNodes"+INFO_SPLIT_STRING+numberOfNodes+"\n");
writer.write("numberOfEdges"+INFO_SPLIT_STRING+numberOfEdges+"\n");
writer.write("numberOfNodeLabelBytes"+INFO_SPLIT_STRING+nodeLabelBuf.capacity()+"\n");
writer.write("numberOfEdgeLabelBytes"+INFO_SPLIT_STRING+edgeLabelBuf.capacity()+"\n");
String directValue = "false";
if(direct) {
directValue = "true";
}
writer.write("direct"+INFO_SPLIT_STRING+directValue+"\n");
writer.close();
fos.close();
} catch(Exception e) {
System.out.println("ERROR executing info file save in saveBuffers("+directory+","+fileBaseName+")");
e.printStackTrace();
}
try {
writeBuf(directoryAndBaseName+".nodeBuf",nodeBuf);
writeBuf(directoryAndBaseName+".edgeBuf",edgeBuf);
writeBuf(directoryAndBaseName+".connectionBuf",connectionBuf);
writeBuf(directoryAndBaseName+".nodeLabelBuf",nodeLabelBuf);
writeBuf(directoryAndBaseName+".edgeLabelBuf",edgeLabelBuf);
} catch(Exception e) {
System.out.println("ERROR executing buffer save in saveBuffers("+directory+","+fileBaseName+")");
e.printStackTrace();
}
}
/**
* Save a ByteBuffer to a file.
*
* @param file to write to
* @param buf to be written
* @throws Exception if file save fails
*/
private void writeBuf(String fileName, ByteBuffer buf) throws Exception {
try {
buf.rewind();
File file = new File(fileName);
FileOutputStream fos = new FileOutputStream(file, false);
FileChannel wChannel = fos.getChannel();
wChannel.write(buf);
fos.flush();
wChannel.close();
fos.close();
} catch(Exception e) {
System.out.println("ERROR executing writeBuf("+fileName+","+buf+")");
e.printStackTrace();
}
}
public static FastGraph adjacencyListGraphFactory(int nodeCount, int edgeCount, String dir, String fileName, boolean direct) throws Exception {
FastGraph g = new FastGraph(nodeCount,edgeCount,direct);
g.setName(fileName);
g.loadAdjacencyListGraph(dir,fileName);
return g;
}
/**
* Assumes edges represented by one node index pair per line delimited by
* tabs or spaces, ignores lines starting with # and any line without a tab.
* Looks for the file in given directory. If directory is null, then to a
* directory named /data/snap under current working directory.
*
* @param dir the directory for the file, if null then a directory called data/ under the current working directory
* @param fileName the fileName for the file
*
* @throws IOException If the buffers cannot be loaded
*/
private void loadAdjacencyListGraph(String dir, String fileName) throws Exception {
String directory = dir;
if(directory == null) {
directory = Launcher.startingWorkingDirectory+File.separatorChar+"data"+File.separatorChar+"snap";
}
String path = null;
if(directory.charAt(directory.length()-1)== File.separatorChar) {
path = directory+fileName;
} else {
path = directory+File.separatorChar+fileName;
}
int edgeIndex = 0;
int nodeIndex = 0;
HashMap<String,Integer> nodeSnapIdToIndexMap = new HashMap<String,Integer>(numberOfNodes*4);
HashMap<Integer,String> nodeIndexToSnapIdMap = new HashMap<Integer,String>(numberOfNodes*4);
HashMap<Integer,Integer> edgeNode1Map = new HashMap<Integer,Integer>(numberOfEdges*4);
HashMap<Integer,Integer> edgeNode2Map = new HashMap<Integer,Integer>(numberOfEdges*4);
File f = new File(path);
if(!f.exists()) {
throw new IOException("Problem loading file "+path+". If you expect to access a SNAP file try downloading the file from:\nhttps://snap.stanford.edu/data/\nthen unzipping it and placing it in the directory "+directory);
//System.exit(1);
}
FileInputStream is = new FileInputStream(path);
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String[] splitLine;
String line = "";
long time = System.currentTimeMillis();
while(line != null) {
line = br.readLine();
if(line == null) {
continue;
}
if(line.length() == 0) {
continue;
}
if(line.charAt(0) == '
continue;
}
splitLine = line.split(" ");
if(splitLine.length < 2) {
splitLine = line.split("\t");
if(splitLine.length < 2) {
System.out.println("FAILED TO RECOGNISE LINE:"+line+" in loadAdjacencyListGraph("+directory+","+fileName+")");
continue;
}
}
String node1String = splitLine[0];
String node2String = splitLine[1];
if(!nodeSnapIdToIndexMap.containsKey(node1String)) {
nodeSnapIdToIndexMap.put(node1String,nodeIndex);
nodeIndexToSnapIdMap.put(nodeIndex,node1String);
nodeIndex++;
}
if(!nodeSnapIdToIndexMap.containsKey(node2String)) {
nodeSnapIdToIndexMap.put(node2String,nodeIndex);
nodeIndexToSnapIdMap.put(nodeIndex,node2String);
nodeIndex++;
}
edgeNode1Map.put(edgeIndex, nodeSnapIdToIndexMap.get(node1String));
edgeNode2Map.put(edgeIndex, nodeSnapIdToIndexMap.get(node2String));
edgeIndex++;
if(edgeIndex%1000000==0 ) {
System.out.println("edgesLoaded "+edgeIndex+" time "+(System.currentTimeMillis()-time)/1000.0);
}
}
String[] nodeLabels = new String[numberOfNodes];
String[] edgeLabels = new String[numberOfEdges];
int inStart = -88;
int inLength = -33;
int outStart = -77;
int outLength = -22;
int weight = -55;
byte type = -77;
byte age = -99;
for(int i = 0; i < numberOfNodes; i++) {
nodeBuf.putInt(NODE_IN_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,inStart); // offset for inward connecting edges/nodes
nodeBuf.putInt(NODE_IN_DEGREE_OFFSET+i*NODE_BYTE_SIZE,inLength); // number of inward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,outStart); // offset for outward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_DEGREE_OFFSET+i*NODE_BYTE_SIZE,outLength); // number of outward connecting edges/nodes
nodeBuf.putInt(NODE_WEIGHT_OFFSET+i*NODE_BYTE_SIZE,weight); // weight
nodeBuf.put(NODE_TYPE_OFFSET+i*NODE_BYTE_SIZE,type); // type
nodeBuf.put(NODE_AGE_OFFSET+i*NODE_BYTE_SIZE,age); // age
// save labels for later
String label = nodeIndexToSnapIdMap.get(i);
nodeLabels[i] = label;
}
setAllNodeLabels(nodeLabels);
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of inward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(i,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of outward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(i,edges);
}
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
int node1;
int node2;
weight = -51;
type = -53;
age = -55;
for(int i = 0; i < numberOfEdges; i++) {
node1 = edgeNode1Map.get(i);
node2 = edgeNode2Map.get(i);
edgeBuf.putInt(EDGE_NODE1_OFFSET+i*EDGE_BYTE_SIZE,node1); // one end of edge
edgeBuf.putInt(EDGE_NODE2_OFFSET+i*EDGE_BYTE_SIZE,node2); // other end of edge
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+i*EDGE_BYTE_SIZE,weight); // weight
edgeBuf.put(EDGE_TYPE_OFFSET+i*EDGE_BYTE_SIZE,type); // type
edgeBuf.put(EDGE_AGE_OFFSET+i*EDGE_BYTE_SIZE,age); // age
// store labels for later
String label = "e"+i;
edgeLabels[i] = label;
// store connecting nodes
inEdgeList = nodeIn.get(node2);
inEdgeList.add(i);
outEdgeList = nodeOut.get(node1);
outEdgeList.add(i);
}
setAllEdgeLabels(edgeLabels);
// Initialise the connection buffer, modifying the node buffer connection data
//time = System.currentTimeMillis();
int offset = 0;
for(int i = 0; i < numberOfNodes; i++) {
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(i);
int inEdgeLength = inEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int e : inEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(i);
int outEdgeLength = outEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int e : outEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
}
}
/**
* Creates a graph from two files: baseFileName.nodes and baseFileName.edges.
* Files are structured as line *"\n" separated) lists of items. Each element
* in an item is tab ("\t") separated. Hence no tabs in file names are allowed.
* <br/>
* Nodes are lists of <code>index label weight type age</code>
* <br>
* where index must start at 0 and end at nodeCount-1, label is a string, weight is
* integer valued, type is byte valued and age is byte valued.
* <br/>
* Edges are lists of <code>index node1Index node2Index label weight type age</code>
* <br>
* where index must start at 0 and end at edgeCount-1, node1Index is a node index,
* node2Index is a nodeIndex, label is a string, weight is
* integer valued, type is byte valued and age is byte valued.
* <br>
* Ignores empty lines and lines starting with a hash ("#").
*
* @param nodeCount the number of nodes
* @param edgeCount the number of edges
* @param dir the directory for the file, if null then a directory called data/ under the current working directory
* @param baseFileName the base of the file name for the file, two files called baseFileName.nodes and baseFileName.edges are expected
* @param direct if true the ByteBuffers are direct, if false they are allocated on the heap
*
* @throws Exception Throws if the graph cannot be built correctly. Might be an IO error
*/
public static FastGraph nodeListEdgeListGraphFactory(int nodeCount, int edgeCount, String dir, String baseFileName, boolean direct) throws Exception {
FastGraph g = new FastGraph(nodeCount,edgeCount,direct);
g.setName(baseFileName);
g.loadnodeListEdgeListGraph(dir,baseFileName);
return g;
}
/**
* Populates a graph from two files: baseFileName.nodes and baseFileName.edges.
* Files are structured as line *"\n" separated) lists of items. Each element
* in an item is tab ("\t") separated. Hence no tabs in file names are allowed.
* <br/>
* Nodes are lists of <code>index label weight type age</code>
* <br>
* where index must start at 0 and end at nodeCount-1, label is a string, weight is
* integer valued, type is byte valued and age is byte valued.
* <br/>
* Edges are lists of <code>index node1Index node2Index label weight type age</code>
* <br>
* where index must start at 0 and end at edgeCount-1, node1Index is a node index,
* node2Index is a nodeIndex, label is a string, weight is
* integer valued, type is byte valued and age is byte valued.
* <br>
* Ignores empty lines and lines starting with a hash ("#").
*
* @param dir the directory for the file, if null then a directory called data/ under the current working directory
* @param baseFileName the base of the file name for the file, two files called baseFileName.nodes and baseFileName.edges are expected
*
* @throws IOException If the buffers cannot be loaded
*
*/
private void loadnodeListEdgeListGraph(String dir, String baseFileName) throws Exception {
String directory = dir;
if(directory == null) {
directory = Launcher.startingWorkingDirectory+File.separatorChar+"data";
}
String basePath = null;
if(directory.charAt(directory.length()-1)== File.separatorChar) {
basePath = directory+baseFileName;
} else {
basePath = directory+File.separatorChar+baseFileName;
}
String nodePath = basePath+".nodes";
File f = new File(nodePath);
if(!f.exists()) {
throw new IOException("Problem loading file "+nodePath);
}
FileInputStream is = new FileInputStream(nodePath);
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
// load the nodes
String[] splitLine;
String[] nodeLabels = new String[numberOfNodes];
int inStart = -18;
int inLength = -13;
int outStart = -21;
int outLength = -23;
int index = -1;
String label;
int weight = -15;
byte type = -17;
byte age = -19;
String nodeLine = "";
while(nodeLine != null) {
nodeLine = br.readLine();
if(nodeLine == null) {
continue;
}
if(nodeLine.length() == 0) {
continue;
}
if(nodeLine.charAt(0) == '
continue;
}
splitLine = nodeLine.split("\t");
if(splitLine.length < 5) {
br.close();
throw new IOException("Not enough elements, needs 5 tab separated elements in "+nodeLine);
}
try {
index = Integer.parseInt(splitLine[0]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node index in line "+nodeLine);
}
if(index > numberOfNodes) {
br.close();
throw new IOException("index "+index+" is greater than the number of nodes "+numberOfNodes);
}
label = splitLine[1];
try {
weight = Integer.parseInt(splitLine[2]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node weight in line "+nodeLine);
}
try {
type = Byte.parseByte(splitLine[3]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node type in line "+nodeLine);
}
try {
age = Byte.parseByte(splitLine[4]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node age in line "+nodeLine);
}
nodeBuf.putInt(NODE_IN_CONNECTION_START_OFFSET+index*NODE_BYTE_SIZE,inStart); // offset for inward connecting edges/nodes
nodeBuf.putInt(NODE_IN_DEGREE_OFFSET+index*NODE_BYTE_SIZE,inLength); // number of inward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_CONNECTION_START_OFFSET+index*NODE_BYTE_SIZE,outStart); // offset for outward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_DEGREE_OFFSET+index*NODE_BYTE_SIZE,outLength); // number of outward connecting edges/nodes
nodeBuf.putInt(NODE_WEIGHT_OFFSET+index*NODE_BYTE_SIZE,weight); // weight
nodeBuf.put(NODE_TYPE_OFFSET+index*NODE_BYTE_SIZE,type); // type
nodeBuf.put(NODE_AGE_OFFSET+index*NODE_BYTE_SIZE,age); // age
// save labels for later
nodeLabels[index] = label;
}
br.close();
setAllNodeLabels(nodeLabels);
String[] edgeLabels = new String[numberOfEdges];
String edgePath = basePath+".edges";
f = new File(edgePath);
if(!f.exists()) {
throw new IOException("Problem loading file "+edgePath+""+directory);
}
is = new FileInputStream(edgePath);
isr = new InputStreamReader(is);
br = new BufferedReader(isr);
// load the Edges
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of inward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(i,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of outward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(i,edges);
}
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
int node1 = -64;
int node2 = -65;
String edgeLine = "";
while(edgeLine != null) {
edgeLine = br.readLine();
if(edgeLine == null) {
continue;
}
if(edgeLine.length() == 0) {
continue;
}
if(edgeLine.charAt(0) == '
continue;
}
splitLine = edgeLine.split("\t");
if(splitLine.length < 7) {
br.close();
throw new IOException("Not enough elements, needs 7 tab separated elements in "+edgeLine);
}
try {
index = Integer.parseInt(splitLine[0]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing edge index in line "+edgeLine);
}
if(index > numberOfEdges) {
br.close();
throw new IOException("index "+index+" is greater than the number of edges "+numberOfEdges);
}
try {
node1 = Integer.parseInt(splitLine[1]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node 1 index in line "+edgeLine);
}
try {
node2 = Integer.parseInt(splitLine[2]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing node 2 index in line "+edgeLine);
}
label = splitLine[3];
try {
weight = Integer.parseInt(splitLine[4]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing edge weight in line "+edgeLine);
}
try {
type = Byte.parseByte(splitLine[5]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing edge type in line "+edgeLine);
}
try {
age = Byte.parseByte(splitLine[6]);
} catch(NumberFormatException e) {
br.close();
throw new IOException("Problem parsing edge age in line "+edgeLine);
}
edgeBuf.putInt(EDGE_NODE1_OFFSET+index*EDGE_BYTE_SIZE,node1); // one end of edge
edgeBuf.putInt(EDGE_NODE2_OFFSET+index*EDGE_BYTE_SIZE,node2); // other end of edge
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+index*EDGE_BYTE_SIZE,weight); // weight
edgeBuf.put(EDGE_TYPE_OFFSET+index*EDGE_BYTE_SIZE,type); // type
edgeBuf.put(EDGE_AGE_OFFSET+index*EDGE_BYTE_SIZE,age); // age
// save labels for later
edgeLabels[index] = label;
// store connecting nodes
inEdgeList = nodeIn.get(node2);
inEdgeList.add(index);
outEdgeList = nodeOut.get(node1);
outEdgeList.add(index);
}
br.close();
setAllEdgeLabels(edgeLabels);
// Initialise the connection buffer, modifying the node buffer connection data
//time = System.currentTimeMillis();
int offset = 0;
for(int i = 0; i < numberOfNodes; i++) {
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(i);
int inEdgeLength = inEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int e : inEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(i);
int outEdgeLength = outEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int e : outEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
}
}
@SuppressWarnings("resource")
private static FastGraph loadBuffers(String directory, String fileBaseName) throws IOException {
String directoryAndBaseName = Launcher.startingWorkingDirectory+File.separatorChar+"data"+File.separatorChar+fileBaseName+File.separatorChar+fileBaseName;
if(directory != null) {
if(directory.charAt(directory.length()-1)== File.separatorChar) {
directoryAndBaseName = directory+fileBaseName;
} else {
directoryAndBaseName = directory+File.separatorChar+fileBaseName;
}
}
FastGraph g = null;
File file;
FileChannel rChannel;
String line;
String[] splitLine;
FileInputStream is = new FileInputStream(directoryAndBaseName+".info");
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
String name = splitLine[1];
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
int inNodeTotal = Integer.parseInt(splitLine[1]);
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
int inEdgeTotal = Integer.parseInt(splitLine[1]);
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
int inNodeLabelSize = Integer.parseInt(splitLine[1]);
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
int inEdgeLabelSize = Integer.parseInt(splitLine[1]);
line = br.readLine();
splitLine = line.split(INFO_SPLIT_STRING);
String directValue = splitLine[1];
boolean inDirect = true;
if(directValue.equals("false")) {
inDirect = false;
}
br.close();
g = new FastGraph(inNodeTotal, inEdgeTotal, inDirect);
if(!inDirect) {
g.nodeLabelBuf = ByteBuffer.allocate(inNodeLabelSize);
g.edgeLabelBuf = ByteBuffer.allocate(inEdgeLabelSize);
} else {
g.nodeLabelBuf = ByteBuffer.allocateDirect(inNodeLabelSize);
g.edgeLabelBuf = ByteBuffer.allocateDirect(inEdgeLabelSize);
}
g.setName(name);
file = new File(directoryAndBaseName+".nodeBuf");
rChannel = new FileInputStream(file).getChannel();
rChannel.read(g.nodeBuf);
rChannel.close();
file = new File(directoryAndBaseName+".edgeBuf");
rChannel = new FileInputStream(file).getChannel();
rChannel.read(g.edgeBuf);
rChannel.close();
file = new File(directoryAndBaseName+".connectionBuf");
rChannel = new FileInputStream(file).getChannel();
rChannel.read(g.connectionBuf);
rChannel.close();
file = new File(directoryAndBaseName+".nodeLabelBuf");
rChannel = new FileInputStream(file).getChannel();
rChannel.read(g.nodeLabelBuf);
rChannel.close();
file = new File(directoryAndBaseName+".edgeLabelBuf");
rChannel = new FileInputStream(file).getChannel();
rChannel.read(g.edgeLabelBuf);
rChannel.close();
return g;
}
/**
* Creates a graph with the size specified by numberOfNodes and numberOfEdges. Possibly includes parallel edges and self sourcing nodes.
* If the graph is simple, and there are too many edges for the nodes, an exception is thrown
*
* @param seed the random number generator seed, -1 for current time
* @param simple if true then no selfsourcing edges or parallel edges
* @throws FastGraphException If the desired number of edges is more than a complete graph for when simple is true
*/
public void populateRandomGraph(boolean simple, long seed) throws FastGraphException {
if(simple) {
if((numberOfNodes*(numberOfNodes-1))/2 < numberOfEdges) {
throw new FastGraphException("Too many edges to generate a simple graph.");
}
}
//long time;
if(seed == -1) {
seed = System.currentTimeMillis();
}
Random r = new Random(seed);
String[] nodeLabels = new String[numberOfNodes];
String[] edgeLabels = new String[numberOfEdges];
int inStart = -888;
int inLength = -3;
int outStart = -777;
int outLength = -2;
int weight = -5;
byte type = -7;
byte age = -9;
//generate the nodes
for(int i = 0; i < numberOfNodes; i++) {
weight = r.nextInt(100);
nodeBuf.putInt(NODE_IN_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,inStart); // offset for inward connecting edges/nodes
nodeBuf.putInt(NODE_IN_DEGREE_OFFSET+i*NODE_BYTE_SIZE,inLength); // number of inward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,outStart); // offset for outward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_DEGREE_OFFSET+i*NODE_BYTE_SIZE,outLength); // number of outward connecting edges/nodes
nodeBuf.putInt(NODE_WEIGHT_OFFSET+i*NODE_BYTE_SIZE,weight); // weight
nodeBuf.put(NODE_TYPE_OFFSET+i*NODE_BYTE_SIZE,type); // type
nodeBuf.put(NODE_AGE_OFFSET+i*NODE_BYTE_SIZE,age); // age
// store labels for later
String label = "n"+i;
nodeLabels[i] = label;
}
setAllNodeLabels(nodeLabels);
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of inward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(i,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of outward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(i,edges);
}
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
int node1;
int node2;
weight = -101;
type = -103;
age = -105;
//generate the edges, with random node connections
HashSet<String> nodePairs = new HashSet<String>();
if(simple) {
nodePairs = new HashSet<String>(numberOfEdges);
}
for(int i = 0; i < numberOfEdges; i++) {
weight = r.nextInt(100);
node1 = r.nextInt(numberOfNodes);
node2 = r.nextInt(numberOfNodes);
if(simple) {
boolean parallel = false;
String pairString1 = Integer.toString(node1)+" "+Integer.toString(node2);
String pairString2 = Integer.toString(node2)+" "+Integer.toString(node1);
if(nodePairs.contains(pairString1) || nodePairs.contains(pairString2) ) {
parallel = true;
}
while(node2 == node1 || parallel) {
node1 = r.nextInt(numberOfNodes);
node2 = r.nextInt(numberOfNodes);
pairString1 = Integer.toString(node1)+" "+Integer.toString(node2);
pairString2 = Integer.toString(node2)+" "+Integer.toString(node1);
if(nodePairs.contains(pairString1) || nodePairs.contains(pairString2) ) {
parallel = true;
} else {
parallel = false;
}
}
nodePairs.add(pairString1);
}
edgeBuf.putInt(EDGE_NODE1_OFFSET+i*EDGE_BYTE_SIZE,node1); // one end of edge
edgeBuf.putInt(EDGE_NODE2_OFFSET+i*EDGE_BYTE_SIZE,node2); // other end of edge
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+i*EDGE_BYTE_SIZE,weight); // weight
edgeBuf.put(EDGE_TYPE_OFFSET+i*EDGE_BYTE_SIZE,type); // type
edgeBuf.put(EDGE_AGE_OFFSET+i*EDGE_BYTE_SIZE,age); // age
// label
String label = "e"+i;
edgeLabels[i] = label;
// store connecting nodes
inEdgeList = nodeIn.get(node2);
inEdgeList.add(i);
outEdgeList = nodeOut.get(node1);
outEdgeList.add(i);
}
setAllEdgeLabels(edgeLabels);
// Initialise the connection buffer, modifying the node buffer connection data
int offset = 0;
for(int i = 0; i < numberOfNodes; i++) {
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(i);
int inEdgeLength = inEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int e : inEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(i);
int outEdgeLength = outEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int e : outEdges) {
int n = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
if(n1 == i) {
n = n2;
} else if(n2 == i) {
n = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+e+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,e);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,n);
offset += CONNECTION_PAIR_SIZE;
}
}
//System.out.println("connection put time " + (System.currentTimeMillis()-time)/1000.0+" seconds, direct "+edgeBuf.isDirect());
}
/**
* Creates a graph from a displayGraph.Graph. label becomes the displayGraph.Graph name
* node and edge labels, are taken from displayGraph.Graph nodes and edges.
* node and edge weights are from displayGraph node and edge scores. Types are
* from displayGraph edgeType edgeType if they can be parsed as bytes,
* otherwise they get a default of -1. Node and edge Age is from displayGraph age, but
* only least significant byte, as the displayGraph age is a integer.
* Order of nodes and edges is as in the displayGraph.Graph
*
* @param displayGraph the graph that the new FastGraph is based on
* @param direct if true then off heap ByteBuffers, if false then on heap ByteBuffers
* @return new FastGraph with attributes based on the given displayGraph.
*/
public static FastGraph displayGraphFactory(Graph displayGraph, boolean direct) {
FastGraph g = new FastGraph(displayGraph.getNodes().size(),displayGraph.getEdges().size(),direct);
g.setName(displayGraph.getLabel());
g.populateFromDisplayGraph(displayGraph);
return g;
}
/**
* Populates byteBuffers based on the contents of the displayGraph.graph.
* Nodes and edges are in the order they appear in the displayGraph.
*
* @param displayGraph the graph that the new FastGraph is based on
*/
private void populateFromDisplayGraph(Graph displayGraph) {
String[] nodeLabels = new String[numberOfNodes];
String[] edgeLabels = new String[numberOfEdges];
int inStart = -27;
int inLength = -37;
int outStart = -47;
int outLength = -57;
int weight = -67;
byte type = -77;
byte age = -87;
ByteBuffer bb = ByteBuffer.allocate(4); // used to convert from int to byte, due to lack of direct casting
// nodes first, will be in the same order as the list in the displayGraph
for(int i = 0; i < numberOfNodes; i++) {
Node dgn = displayGraph.getNodes().get(i);
weight = (int)(dgn.getScore());
bb.putInt(0,dgn.getAge());
age = bb.get(3); // get least significant byte of age
try {
type = Byte.parseByte(dgn.getType().getLabel());
} catch(NumberFormatException e) {
type = -1;
}
nodeBuf.putInt(NODE_IN_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,inStart); // offset for inward connecting edges/nodes
nodeBuf.putInt(NODE_IN_DEGREE_OFFSET+i*NODE_BYTE_SIZE,inLength); // number of inward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_CONNECTION_START_OFFSET+i*NODE_BYTE_SIZE,outStart); // offset for outward connecting edges/nodes
nodeBuf.putInt(NODE_OUT_DEGREE_OFFSET+i*NODE_BYTE_SIZE,outLength); // number of outward connecting edges/nodes
nodeBuf.putInt(NODE_WEIGHT_OFFSET+i*NODE_BYTE_SIZE,weight); // weight
nodeBuf.put(NODE_TYPE_OFFSET+i*NODE_BYTE_SIZE,type); // type
nodeBuf.put(NODE_AGE_OFFSET+i*NODE_BYTE_SIZE,age); // age
// store labels for later
String label = dgn.getLabel();
nodeLabels[i] = label;
}
setAllNodeLabels(nodeLabels);
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of inward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(i,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(numberOfNodes); // temporary store of outward edges
for(int i = 0; i < numberOfNodes; i++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(i,edges);
}
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
int node1;
int node2;
weight = -15;
type = -25;
age = -35;
// edges once nodes exist, will be in the same order as the list in the displayGraph
for(int i = 0; i < numberOfEdges; i++) {
Edge dge = displayGraph.getEdges().get(i);
node1 = displayGraph.getNodes().indexOf(dge.getFrom()); // we can find the FastGraph node index from its position in the displayGraph nodeList
node2 = displayGraph.getNodes().indexOf(dge.getTo()); // we can find the FastGraph node index from its position in the displayGraph nodeList
weight = (int)(dge.getScore());
bb.putInt(0,dge.getAge());
age = bb.get(3); // get least significant byte of age
try {
type = Byte.parseByte(dge.getType().getLabel());
} catch(NumberFormatException e) {
type = -1;
}
edgeBuf.putInt(EDGE_NODE1_OFFSET+i*EDGE_BYTE_SIZE,node1); // one end of edge
edgeBuf.putInt(EDGE_NODE2_OFFSET+i*EDGE_BYTE_SIZE,node2); // other end of edge
edgeBuf.putInt(EDGE_WEIGHT_OFFSET+i*EDGE_BYTE_SIZE,weight); // weight
edgeBuf.put(EDGE_TYPE_OFFSET+i*EDGE_BYTE_SIZE,type); // type
edgeBuf.put(EDGE_AGE_OFFSET+i*EDGE_BYTE_SIZE,age); // age
// store labels for later
String label = dge.getLabel();
edgeLabels[i] = label;
// store connecting nodes
inEdgeList = nodeIn.get(node2);
inEdgeList.add(i);
outEdgeList = nodeOut.get(node1);
outEdgeList.add(i);
}
setAllEdgeLabels(edgeLabels);
// Initialise the connection buffer, modifying the node buffer connection data
//time = System.currentTimeMillis();
int offset = 0;
for(int i = 0; i < numberOfNodes; i++) {
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(i);
int inEdgeLength = inEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int edgeIndex : inEdges) {
int nodeIndex = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == i) {
nodeIndex = n2;
} else if(n2 == i) {
nodeIndex = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,nodeIndex);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(i);
int outEdgeLength = outEdges.size();
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
nodeBuf.putInt(i*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int edgeIndex : outEdges) {
int nodeIndex = -1;
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == i) {
nodeIndex = n2;
} else if(n2 == i) {
nodeIndex = n1;
} else {
System.out.println("ERROR When finding connections for node "+i+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,nodeIndex);
offset += CONNECTION_PAIR_SIZE;
}
}
}
/**
* Generates a new graph from the subgraph specified by the parameters. All
* edges connected to deleted nodes are also removed.
*
*
* @param nodesToDelete nodes in this graph that will not appear in the new graph
* @param edgesToDelete edges in this graph that will not appear in the new graph
* @param orphanEdgeCheckNeeded If the method calling this has already checked for orphan nodes, then false.
* @return the new FastGraph
*/
public FastGraph generateGraphByDeletingItems(int[] nodesToDelete, int[] edgesToDelete, boolean orphanEdgeCheckNeeded) {
System.out.println("Nodes to remove: " + Arrays.toString(nodesToDelete));
long time = System.currentTimeMillis();
LinkedList<Integer> allEdgesToDeleteList = new LinkedList<Integer>();
LinkedList<Integer> allNodesToDeleteList = new LinkedList<Integer>();
System.out.println("Z setup " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
for(int e : edgesToDelete) {
allEdgesToDeleteList.add(e);
}
// delete the edges connecting to deleted nodes and create the node list
for(int n : nodesToDelete) {
allNodesToDeleteList.add(n);
if(orphanEdgeCheckNeeded) { //only check for orphan nodes if needed
int[] connectingEdges = getNodeConnectingEdges(n);
for(int e : connectingEdges) {
if(!allEdgesToDeleteList.contains(e)) {
allEdgesToDeleteList.add(e);
}
}
}
}
System.out.println("A Created the node and edge delete lists " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
// find the nodes that will remain
HashSet<Integer> remainingNodeList = new HashSet<Integer>(allNodesToDeleteList.size()*3);
for(int i = 0; i < getNumberOfNodes(); i++) {
remainingNodeList.add(i);
}
remainingNodeList.removeAll(allNodesToDeleteList); //this is quicker than checking each entry
System.out.println("AAA created the node remain lists " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
// turn it into an array
int[] remainingNodes = Util.convertHashSet(remainingNodeList);
System.out.println("AA converted the node remain lists " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
// find the edges that will remain
HashSet<Integer> remainingEdgeList = new HashSet<Integer>(allEdgesToDeleteList.size()*3);
for(int i = 0; i < getNumberOfEdges(); i++) {
remainingEdgeList.add(i);
}
remainingEdgeList.removeAll(allEdgesToDeleteList);
System.out.println("AB Created the edge remain lists " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
// turn it into an array
int[] remainingEdges = Util.convertHashSet(remainingEdgeList);
System.out.println("B converted the edge remain lists " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
FastGraph g = generateGraphFromSubgraph(remainingNodes,remainingEdges);
return g;
}
/**
* Generates a new graph from the subgraph specified by the parameters. The nodes at the end of the edges must be in subgraphEdges.
*
* @param subgraphNodes nodes in this graph that will appear in the new graph
* @param subgraphEdges edges in this graph that will appear in the new graph, must connect only to subgraphNodes
* @return the new FastGraph
*/
public FastGraph generateGraphFromSubgraph(int[] subgraphNodes, int[] subgraphEdges) {
long time = System.currentTimeMillis();
FastGraph g = new FastGraph(subgraphNodes.length, subgraphEdges.length, getDirect());
String[] nodeLabels = new String[subgraphNodes.length]; // stores the labels for creating the nodeLabelBuffer
HashMap<Integer,Integer> oldNodesToNew = new HashMap<>(subgraphNodes.length*4); // for reference when adding edges, multiplier reduces chances of clashes
// initial population of the new node array
int weight = -98;
byte type = -97;
byte age = -96;
int index = 0;
for(int n : subgraphNodes) {
weight = nodeBuf.getInt(NODE_WEIGHT_OFFSET+n*NODE_BYTE_SIZE);
type = nodeBuf.get(NODE_TYPE_OFFSET+n*NODE_BYTE_SIZE);
age = nodeBuf.get(NODE_AGE_OFFSET+n*NODE_BYTE_SIZE);
g.nodeBuf.putInt(NODE_WEIGHT_OFFSET+index*NODE_BYTE_SIZE,weight);
g.nodeBuf.put(NODE_TYPE_OFFSET+index*NODE_BYTE_SIZE,type);
g.nodeBuf.put(NODE_AGE_OFFSET+index*NODE_BYTE_SIZE,age);
// store labels for later
nodeLabels[index] = getNodeLabel(n);
// store old to new mapping for later
oldNodesToNew.put(n, index);
index++;
}
//System.out.println("C popluated the new node buffer " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
g.setAllNodeLabels(nodeLabels); // create the node label buffer
//System.out.println("D popluated the new node list buffer " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
ArrayList<ArrayList<Integer>> nodeIn = new ArrayList<ArrayList<Integer>>(subgraphNodes.length); // temporary store of inward edges
for(int nodeIndex = 0; nodeIndex < subgraphNodes.length; nodeIndex++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeIn.add(nodeIndex,edges);
}
ArrayList<ArrayList<Integer>> nodeOut = new ArrayList<ArrayList<Integer>>(subgraphNodes.length); // temporary store of outward edges
for(int nodeIndex = 0; nodeIndex < subgraphNodes.length; nodeIndex++) {
ArrayList<Integer> edges = new ArrayList<Integer>(100);
nodeOut.add(nodeIndex,edges);
}
//System.out.println("E created the neighbour store " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
//System.out.println(oldNodesToNew);
String[] edgeLabels = new String[subgraphEdges.length]; // stores the labels for creating the edgeLabelBuffer
ArrayList<Integer> inEdgeList;
ArrayList<Integer> outEdgeList;
// create the edges
index = 0;
edgeBuf.position(0);
g.edgeBuf.position(0);
for(int e : subgraphEdges) {
weight = edgeBuf.getInt(EDGE_WEIGHT_OFFSET+e*EDGE_BYTE_SIZE);
type = edgeBuf.get(EDGE_TYPE_OFFSET+e*EDGE_BYTE_SIZE);
age = edgeBuf.get(EDGE_AGE_OFFSET+e*EDGE_BYTE_SIZE);
g.edgeBuf.putInt(EDGE_WEIGHT_OFFSET+index*EDGE_BYTE_SIZE,weight);
g.edgeBuf.put(EDGE_TYPE_OFFSET+index*EDGE_BYTE_SIZE,type);
g.edgeBuf.put(EDGE_AGE_OFFSET+index*EDGE_BYTE_SIZE,age);
int n1 = edgeBuf.getInt(EDGE_NODE1_OFFSET+e*EDGE_BYTE_SIZE);
int n2 = edgeBuf.getInt(EDGE_NODE2_OFFSET+e*EDGE_BYTE_SIZE);
//System.out.print("old node n1: " + n1);
int gn1 = oldNodesToNew.get(n1);
//System.out.print(", new node n1: " + gn1);
//System.out.print(", old node n2: " + n2);
int gn2 = oldNodesToNew.get(n2);
//System.out.print(", new node n2: " + gn2);
//System.out.println();
g.edgeBuf.putInt(EDGE_NODE1_OFFSET+index*EDGE_BYTE_SIZE,gn1); // one end of edge
g.edgeBuf.putInt(EDGE_NODE2_OFFSET+index*EDGE_BYTE_SIZE,gn2); // other end of edge
// store labels for later
edgeLabels[index] = getEdgeLabel(e);
// store connecting edges
inEdgeList = nodeIn.get(gn2);
inEdgeList.add(index);
outEdgeList = nodeOut.get(gn1);
outEdgeList.add(index);
index++;
}
//System.out.println("F populated the new edge buffer " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
g.setAllEdgeLabels(edgeLabels);
//System.out.println("G populated the new edge label buffer " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
// Initialise the connection buffer, modifying the node buffer connection data
//time = System.currentTimeMillis();
int offset = 0;
for(int node = 0; node < subgraphNodes.length; node++) {
if(node%100000 == 0) {
//System.out.println("H populated "+node+" nodes in connection buffer " + (System.currentTimeMillis()-time)/1000.0+" seconds");
time = System.currentTimeMillis();
}
// setting the in connection offset and length
ArrayList<Integer> inEdges = nodeIn.get(node);
int inEdgeLength = inEdges.size();
g.nodeBuf.putInt(node*NODE_BYTE_SIZE+NODE_IN_CONNECTION_START_OFFSET,offset);
g.nodeBuf.putInt(node*NODE_BYTE_SIZE+NODE_IN_DEGREE_OFFSET,inEdgeLength);
// now put the in edge/node pairs
for(int edgeIndex : inEdges) {
int nodeIndex = -1;
int n1 = g.edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = g.edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == node) {
nodeIndex = n2;
} else if(n2 == node) {
nodeIndex = n1;
} else {
System.out.println("ERROR A When finding connections for node "+node+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
g.connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
g.connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,nodeIndex);
offset += CONNECTION_PAIR_SIZE;
}
// setting the out connection offset and length
ArrayList<Integer> outEdges = nodeOut.get(node);
int outEdgeLength = outEdges.size();
g.nodeBuf.putInt(node*NODE_BYTE_SIZE+NODE_OUT_CONNECTION_START_OFFSET,offset);
g.nodeBuf.putInt(node*NODE_BYTE_SIZE+NODE_OUT_DEGREE_OFFSET,outEdgeLength);
// now put the out edge/node pairs
for(int edgeIndex : outEdges) {
int nodeIndex = -1;
int n1 = g.edgeBuf.getInt(EDGE_NODE1_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
int n2 = g.edgeBuf.getInt(EDGE_NODE2_OFFSET+edgeIndex*EDGE_BYTE_SIZE);
if(n1 == node) {
nodeIndex = n2;
} else if(n2 == node) {
nodeIndex = n1;
} else {
System.out.println("ERROR B When finding connections for node "+node+" connecting edge "+edgeIndex+ " has connecting nodes "+n1+" "+n2);
}
g.connectionBuf.putInt(CONNECTION_EDGE_OFFSET+offset,edgeIndex);
g.connectionBuf.putInt(CONNECTION_NODE_OFFSET+offset,nodeIndex);
offset += CONNECTION_PAIR_SIZE;
}
}
return g;
}
/**
* @return the largest degree for a node in the graph.
*/
public int maximumDegree() {
int max = 0;
for(int i = 0; i < numberOfNodes; i++) {
int inDegree = nodeBuf.getInt(NODE_IN_DEGREE_OFFSET+i*NODE_BYTE_SIZE);
int outDegree = nodeBuf.getInt(NODE_OUT_DEGREE_OFFSET+i*NODE_BYTE_SIZE);
int degree = inDegree+outDegree;
if(degree > max) {
max = degree;
}
}
return max;
}
/**
* Creates a displayGraph.Graph which can then be accessed, manipulated and visualized
* using that package. displayGraph.Graph name becomes this FastGraph label
* The displayGraph.Graph node and edge labels, are taken
* from this FastGraph nodes and edges. node and edge weights become node
* and edge scores node and edge ages become ages in displayGraph nodes and edges.
* New NodeType and EdgeType are created if needed with label of the integer of this type.
* Order of nodes and edges in the displayGraph.Graph is as this FastGraph.
*
* @return a displayGraph.Graph with the same data as this Fast Graph
*/
public Graph generateDisplayGraph() {
Graph g = new Graph(getName());
for(int i = 0; i < numberOfNodes; i++) {
Node n = new Node();
n.setLabel(getNodeLabel(i));
n.setScore(getNodeWeight(i));
n.setAge(getNodeAge(i));
String typeLabel = Integer.toString(getNodeType(i));
NodeType type = NodeType.withLabel(typeLabel);
if(type == null) {
type = new NodeType(typeLabel);
}
n.setType(type);
g.addNode(n);
}
for(int i = 0; i < numberOfEdges; i++) {
Node n1 = g.getNodes().get(getEdgeNode1(i));
Node n2 = g.getNodes().get(getEdgeNode2(i));
Edge e = new Edge(n1,n2);
e.setLabel(getEdgeLabel(i));
e.setScore(getEdgeWeight(i));
e.setAge(getEdgeAge(i));
String typeLabel = Integer.toString(getEdgeType(i));
EdgeType type = EdgeType.withLabel(typeLabel);
if(type == null) {
type = new EdgeType(typeLabel);
}
e.setType(type);
g.addEdge(e);
}
return g;
}
/**
* Counts the number of instances of nodes with various degrees.
*
* @param maxDegrees The maximum number of degrees to look for. If given 3, will count all nodes with degrees 0,1,2.
* @return The list of number of nodes at each degree.
*/
public int[] countInstancesOfNodeDegrees(int maxDegrees) {
int[] res = new int[maxDegrees];
for(int n = 0; n < getNumberOfNodes(); n++) {
int deg = getNodeDegree(n);
if (deg < maxDegrees) {
//System.out.print(deg + " ");
//System.out.println(res[deg]);
res[deg]++;
}
}
return res;
}
/**
* Check the consistency of a graph. Checks: <ul>
* <li> If edges link to node indexes outside of the current range</li>
* <li> If all edges are reflected in the connection lists</li>
* <li> If the connection list data points to the correct edges</li>
* <li> If the nodes and edges in the connection list are correct</li>
* </ul>
*
* @return true if the graph is consistent, false otherwise
*/
public boolean checkConsistency() {
// consistency of edges
for(int e = 0; e < getNumberOfEdges(); e++) {
int node1 = getEdgeNode1(e);
int node2 = getEdgeNode2(e);
if(node1 < 0 || node1 >= getNumberOfNodes()) {
System.out.println("INCONSISTENT. Edge "+e+" has node1 "+node1+ " but there are only "+getNumberOfNodes()+" nodes");
return false;
}
if(node2 < 0 || node2 >= getNumberOfNodes()) {
System.out.println("INCONSISTENT. Edge "+e+" has node2 "+node2+ " but there are only "+getNumberOfNodes()+" nodes");
return false;
}
if(!Util.convertArray(getNodeConnectingOutEdges(node1)).contains(e)) {
System.out.println("INCONSISTENT. Edge "+e+" has node1 "+node1+ " but it is not in the node out list");
return false;
}
if(!Util.convertArray(getNodeConnectingInEdges(node2)).contains(e)) {
System.out.println("INCONSISTENT. Edge "+e+" has node2 "+node2+ " but it is not in the node in list");
return false;
}
}
// consistency of nodes and connection lists
for(int n = 0; n < getNumberOfNodes(); n++) {
if(getNodeConnectingOutEdges(n).length != getNodeConnectingOutNodes(n).length) {
System.out.println("INCONSISTENT. Node "+n+" has different number of out edges to out nodes");
return false;
}
if(getNodeConnectingInEdges(n).length != getNodeConnectingInNodes(n).length) {
System.out.println("INCONSISTENT. Node "+n+" has different number of in edges to in nodes");
return false;
}
for(int i = 0; i < getNodeConnectingOutEdges(n).length; i++) {
int connectingEdge = getNodeConnectingOutEdges(n)[i];
int otherEnd = oppositeEnd(connectingEdge, n);
int connectingNode = getNodeConnectingOutNodes(n)[i];
if(otherEnd != connectingNode) {
System.out.println("INCONSISTENT. Node "+n+" has inconsitent edge and node in connecting out list");
return false;
}
if(n != oppositeEnd(connectingEdge, otherEnd)) {
System.out.println("INCONSISTENT. Node "+n+" has edge in connecting out list that does not point to the node");
return false;
}
}
for(int i = 0; i < getNodeConnectingInEdges(n).length; i++) {
int connectingEdge = getNodeConnectingInEdges(n)[i];
int otherEnd = oppositeEnd(connectingEdge, n);
int connectingNode = getNodeConnectingInNodes(n)[i];
if(otherEnd != connectingNode) {
System.out.println("INCONSISTENT. Node "+n+" has inconsitent edge and node in connecting in list");
return false;
}
if(n != oppositeEnd(connectingEdge, otherEnd)) {
System.out.println("INCONSISTENT. Node "+n+" has edge in connecting in list that does not point to the node");
return false;
}
}
}
return true;
}
}
|
package us.kbase.genomecomparison;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import us.kbase.auth.AuthToken;
import us.kbase.common.service.Tuple3;
import us.kbase.common.service.UObject;
import us.kbase.userandjobstate.InitProgress;
import us.kbase.userandjobstate.Results;
import us.kbase.userandjobstate.UserAndJobStateClient;
import us.kbase.workspaceservice.GetObjectParams;
import us.kbase.workspaceservice.ObjectData;
import us.kbase.workspaceservice.SaveObjectParams;
import us.kbase.workspaceservice.WorkspaceServiceClient;
public class TaskHolder {
private Map<String, Task> taskMap = new HashMap<String, Task>();
private LinkedList<Task> taskQueue = new LinkedList<Task>();
private Thread[] allThreads;
private boolean needToStop = false;
private final Object idleMonitor = new Object();
private final File tempDir;
private final File blastBin;
private static final String wsUrl = "https://kbase.us/services/workspace/";
private static final String jobSrvUrl = "http://140.221.84.180:7083";
public TaskHolder(int threadCount, File tempDir, File blastBin) {
this.tempDir = tempDir;
this.blastBin = blastBin;
allThreads = new Thread[threadCount];
for (int i = 0; i < allThreads.length; i++) {
allThreads[i] = startNewThread(i);
}
}
public synchronized String addTask(BlastProteomesParams params, String authToken) throws Exception {
String jobId = createQueuedTaskJob(params, authToken);
Task task = new Task(jobId, params, authToken);
taskQueue.addLast(task);
taskMap.put(task.getJobId(), task);
synchronized (idleMonitor) {
idleMonitor.notify();
}
return jobId;
}
private synchronized void removeTask(Task task) {
taskMap.remove(task.getJobId());
}
public synchronized Task getTask(String jobId) {
return taskMap.get(jobId);
}
private synchronized Task gainNewTask() {
if (taskQueue.size() > 0) {
Task ret = taskQueue.removeFirst();
return ret;
}
return null;
}
private void runTask(Task task) {
String token = task.getAuthToken();
try {
changeTaskStateIntoRunning(task, token);
List<InnerFeature> features1 = extractProteome(task.getParams().getGenome1ws(),
task.getParams().getGenome1id(), token);
Map<String, String> proteome1 = featuresToProtMap(features1);
List<InnerFeature> features2 = extractProteome(task.getParams().getGenome2ws(),
task.getParams().getGenome2id(), token);
Map<String, String> proteome2 = featuresToProtMap(features2);
final Map<String, List<InnerHit>> data1 = new LinkedHashMap<String, List<InnerHit>>();
final Map<String, List<InnerHit>> data2 = new LinkedHashMap<String, List<InnerHit>>();
String maxEvalue = task.getParams().getMaxEvalue() == null ? "1e-10" : task.getParams().getMaxEvalue();
long time = System.currentTimeMillis();
BlastStarter.run(tempDir, proteome1, proteome2, blastBin, maxEvalue, new BlastStarter.ResultCallback() {
@Override
public void proteinPair(String name1, String name2, double ident,
int alnLen, int mismatch, int gapopens, int qstart, int qend,
int tstart, int tend, String eval, double bitScore) {
InnerHit h = new InnerHit().withId1(name1).withId2(name2).withScore(bitScore);
List<InnerHit> l1 = data1.get(name1);
if (l1 == null) {
l1 = new ArrayList<InnerHit>();
data1.put(name1, l1);
}
l1.add(h);
List<InnerHit> l2 = data2.get(name2);
if (l2 == null) {
l2 = new ArrayList<InnerHit>();
data2.put(name2, l2);
}
l2.add(h);
}
});
Comparator<InnerHit> hcmp = new Comparator<InnerHit>() {
@Override
public int compare(InnerHit o1, InnerHit o2) {
int ret = Double.compare(o2.getScore(), o1.getScore());
if (ret == 0) {
if (o1.getPercentOfBestScore() != null && o2.getPercentOfBestScore() != null) {
ret = Utils.compare(o2.getPercentOfBestScore(), o1.getPercentOfBestScore());
}
}
return ret;
}
};
Double subBbhPercentParam = task.getParams().getSubBbhPercent();
double subBbhPercent = subBbhPercentParam == null ? 90 : subBbhPercentParam;
for (Map.Entry<String, List<InnerHit>> entry : data1.entrySet())
Collections.sort(entry.getValue(), hcmp);
for (Map.Entry<String, List<InnerHit>> entry : data2.entrySet())
Collections.sort(entry.getValue(), hcmp);
for (Map.Entry<String, List<InnerHit>> entry : data1.entrySet()) {
List<InnerHit> l = entry.getValue();
double best1 = l.get(0).getScore();
for (InnerHit h : l) {
double best2 = getBestScore(h.getId2(), data2);
h.setPercentOfBestScore(Math.round(h.getScore() * 100.0 / Math.max(best1, best2) + 1e-6));
}
for (int pos = l.size() - 1; pos > 0; pos
if (l.get(pos).getPercentOfBestScore() < subBbhPercent)
l.remove(pos);
Collections.sort(entry.getValue(), hcmp);
}
for (Map.Entry<String, List<InnerHit>> entry : data2.entrySet()) {
List<InnerHit> l = entry.getValue();
double best2 = l.get(0).getScore();
for (InnerHit h : l) {
double best1 = getBestScore(h.getId1(), data1);
h.setPercentOfBestScore(Math.round(h.getScore() * 100.0 / Math.max(best1, best2) + 1e-6));
}
for (int pos = l.size() - 1; pos > 0; pos
if (l.get(pos).getPercentOfBestScore() < subBbhPercent)
l.remove(pos);
Collections.sort(entry.getValue(), hcmp);
}
List<String> prot1names = new ArrayList<String>();
Map<String, Long> prot1map = new HashMap<String, Long>();
linkedMapToPos(proteome1, prot1names, prot1map);
List<String> prot2names = new ArrayList<String>();
Map<String, Long> prot2map = new HashMap<String, Long>();
linkedMapToPos(proteome2, prot2names, prot2map);
List<List<Tuple3<Long, Long, Long>>> data1new = new ArrayList<List<Tuple3<Long, Long, Long>>>();
for (String prot1name : prot1names) {
List<Tuple3<Long, Long, Long>> hits = new ArrayList<Tuple3<Long, Long, Long>>();
data1new.add(hits);
List<InnerHit> ihits = data1.get(prot1name);
if (ihits == null)
continue;
for (InnerHit ih : ihits) {
Tuple3<Long, Long, Long> h = new Tuple3<Long, Long, Long>()
.withE1(prot2map.get(ih.getId2())).withE2(Math.round(ih.getScore() * 100))
.withE3(ih.getPercentOfBestScore());
hits.add(h);
}
}
List<List<Tuple3<Long, Long, Long>>> data2new = new ArrayList<List<Tuple3<Long, Long, Long>>>();
for (String prot2name : prot2names) {
List<Tuple3<Long, Long, Long>> hits = new ArrayList<Tuple3<Long, Long, Long>>();
data2new.add(hits);
List<InnerHit> ihits = data2.get(prot2name);
if (ihits == null)
continue;
for (InnerHit ih : ihits) {
Tuple3<Long, Long, Long> h = new Tuple3<Long, Long, Long>()
.withE1(prot1map.get(ih.getId1())).withE2(Math.round(ih.getScore() * 100))
.withE3(ih.getPercentOfBestScore());
hits.add(h);
}
}
ProteomeComparison res = new ProteomeComparison()
.withSubBbhPercent(subBbhPercent)
.withGenome1ws(task.getParams().getGenome1ws())
.withGenome1id(task.getParams().getGenome1id())
.withGenome2ws(task.getParams().getGenome2ws())
.withGenome2id(task.getParams().getGenome2id())
.withProteome1names(prot1names).withProteome1map(prot1map)
.withProteome2names(prot2names).withProteome2map(prot2map)
.withData1(data1new).withData2(data2new);
saveResult(task.getParams().getOutputWs(), task.getParams().getOutputId(), token, res);
completeTaskState(task, token, null, null);
time = System.currentTimeMillis() - time;
//System.out.println("Time: " + time + " ms.");
}catch(Throwable e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.close();
try {
completeTaskState(task, token, e.getMessage(), sw.toString());
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
private static Map<String, String> featuresToProtMap(List<InnerFeature> features) {
Map<String, String> ret = new LinkedHashMap<String, String>();
for (InnerFeature inf : features)
ret.put(inf.protName, inf.seq);
return ret;
}
private static void linkedMapToPos(Map<String, String> linked, List<String> arr,
Map<String, Long> posMap) {
for (String name: linked.keySet()) {
long pos = arr.size();
arr.add(name);
posMap.put(name, pos);
}
}
private static double getBestScore(String name, Map<String, List<InnerHit>> data) {
List<InnerHit> l = data.get(name);
if (l == null || l.isEmpty())
return 0;
return l.get(0).getScore();
}
public static WorkspaceServiceClient createWsClient(String token) throws Exception {
WorkspaceServiceClient ret = new WorkspaceServiceClient(new URL(wsUrl), new AuthToken(token));
ret.setAuthAllowedForHttp(true);
return ret;
}
public static UserAndJobStateClient createJobClient(String token) throws Exception {
UserAndJobStateClient ret = new UserAndJobStateClient(new URL(jobSrvUrl), new AuthToken(token));
ret.setAuthAllowedForHttp(true);
return ret;
}
private String createQueuedTaskJob(BlastProteomesParams params, String token) throws Exception {
return createJobClient(token).createAndStartJob(token, "queued", "Blast proteomes of two genomes",
new InitProgress().withPtype("none"), null);
}
private void changeTaskStateIntoRunning(Task task, String token) throws Exception {
createJobClient(token).updateJob(task.getJobId(), token, "running", null);
}
private void completeTaskState(Task task, String token, String errorMessage, String errorStacktrace) throws Exception {
if (errorMessage == null) {
createJobClient(token).completeJob(task.getJobId(), token, "done", null,
new Results().withWorkspaceurl(wsUrl).withWorkspaceids(
Arrays.asList(task.getParams().getOutputWs() + "/" + task.getParams().getOutputId())));
} else {
createJobClient(token).completeJob(task.getJobId(), token, "Error: " + errorMessage,
errorStacktrace, new Results());
}
}
@SuppressWarnings("unchecked")
private List<InnerFeature> extractProteome(String ws, String genomeId, String token) throws Exception {
Map<String, Object> genome = (Map<String, Object>)createWsClient(token).getObject(
new GetObjectParams().withAuth(token).withWorkspace(ws)
.withId(genomeId).withType("Genome")).getData();
List<Map<String, Object>> features = (List<Map<String, Object>>)genome.get("features");
List<InnerFeature> ret = new ArrayList<InnerFeature>();
for (Map<String, Object> feature : features) {
String type = "" + feature.get("type");
if (!type.equals("CDS"))
continue;
InnerFeature inf = new InnerFeature();
inf.protName = "" + feature.get("id");
inf.seq = "" + feature.get("protein_translation");
List<Object> location = ((List<List<Object>>)feature.get("location")).get(0);
inf.contigName = "" + location.get(0);
int realStart = (Integer)location.get(1);
String dir = "" + location.get(2);
int len = (Integer)location.get(3);
inf.start = dir.equals("+") ? realStart : (realStart - len);
inf.stop = dir.equals("+") ? (realStart + len) : realStart;
ret.add(inf);
}
Collections.sort(ret, new Comparator<InnerFeature>() {
@Override
public int compare(InnerFeature o1, InnerFeature o2) {
int ret = o1.contigName.compareTo(o2.contigName);
if (ret == 0) {
ret = Utils.compare(o1.start, o2.start);
if (ret == 0)
ret = Utils.compare(o1.stop, o2.stop);
}
return ret;
}
});
return ret;
}
@SuppressWarnings("unchecked")
private void saveResult(String ws, String id, String token, ProteomeComparison res) throws Exception {
ObjectData data = new ObjectData();
data.getAdditionalProperties().putAll(UObject.transformObjectToObject(res, Map.class));
createWsClient(token).saveObject(new SaveObjectParams().withAuth(token).withWorkspace(ws)
.withType("ProteomeComparison").withId(id).withData(data));
}
public void stopAllThreads() {
needToStop = true;
for (Thread t : allThreads)
t.interrupt();
}
private Thread startNewThread(final int num) {
Thread ret = new Thread(
new Runnable() {
@Override
public void run() {
while (!needToStop) {
Task task = gainNewTask();
if (task != null) {
runTask(task);
removeTask(task);
} else {
int seconds = 55 + (int)(10 * Math.random());
synchronized (idleMonitor) {
try {
idleMonitor.wait(TimeUnit.SECONDS.toMillis(seconds));
} catch (InterruptedException e) {
if (!needToStop)
e.printStackTrace();
}
}
}
}
System.out.println("Task thread " + (num + 1) + " was stoped");
}
},"Task thread " + (num + 1));
ret.start();
return ret;
}
private static class InnerHit {
private String id1;
private String id2;
private Double score;
private Long percentOfBestScore;
public String getId1() {
return id1;
}
public InnerHit withId1(String id1) {
this.id1 = id1;
return this;
}
public String getId2() {
return id2;
}
public InnerHit withId2(String id2) {
this.id2 = id2;
return this;
}
public Double getScore() {
return score;
}
public InnerHit withScore(Double score) {
this.score = score;
return this;
}
public Long getPercentOfBestScore() {
return percentOfBestScore;
}
public void setPercentOfBestScore(Long percentOfBestScore) {
this.percentOfBestScore = percentOfBestScore;
}
}
private static class InnerFeature {
String protName;
String seq;
String contigName;
int start;
int stop;
}
}
|
package us.kbase.workspace;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import org.codehaus.jackson.annotate.JsonAnyGetter;
import org.codehaus.jackson.annotate.JsonAnySetter;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.annotate.JsonPropertyOrder;
import org.codehaus.jackson.map.annotate.JsonSerialize;
/**
* <p>Original spec-file type: SaveObjectsParams</p>
* <pre>
* Input parameters for the "save_objects" function.
* One, and only one, of the following is required:
* ws_id id - the numerical ID of the workspace.
* ws_name workspace - name of the workspace or the workspace ID in KBase
* format, e.g. kb|ws.78.
* Required arguments:
* list<ObjectSaveData> objects - the objects to save.
* </pre>
*
*/
@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
@Generated("com.googlecode.jsonschema2pojo")
@JsonPropertyOrder({
"workspace",
"id",
"objects"
})
public class SaveObjectsParams {
@JsonProperty("workspace")
private String workspace;
@JsonProperty("id")
private Integer id;
@JsonProperty("objects")
private List<ObjectSaveData> objects = new ArrayList<ObjectSaveData>();
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
@JsonProperty("workspace")
public String getWorkspace() {
return workspace;
}
@JsonProperty("workspace")
public void setWorkspace(String workspace) {
this.workspace = workspace;
}
public SaveObjectsParams withWorkspace(String workspace) {
this.workspace = workspace;
return this;
}
@JsonProperty("id")
public Integer getId() {
return id;
}
@JsonProperty("id")
public void setId(Integer id) {
this.id = id;
}
public SaveObjectsParams withId(Integer id) {
this.id = id;
return this;
}
@JsonProperty("objects")
public List<ObjectSaveData> getObjects() {
return objects;
}
@JsonProperty("objects")
public void setObjects(List<ObjectSaveData> objects) {
this.objects = objects;
}
public SaveObjectsParams withObjects(List<ObjectSaveData> objects) {
this.objects = objects;
return this;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperties(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
|
package com.groupon.seleniumgridextras.videorecording;
import com.groupon.seleniumgridextras.config.Config;
import com.groupon.seleniumgridextras.config.RuntimeConfig;
import com.groupon.seleniumgridextras.utilities.ImageUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import static org.junit.Assert.assertEquals;
public class ImageProcessorTest {
private static final String IMAGE_PROCESSOR_TEST_JSON = "image_processor_test.json";
@Before
public void setUp() throws Exception {
RuntimeConfig.setConfigFile(IMAGE_PROCESSOR_TEST_JSON);
Config config = new Config(true);
config.writeToDisk(RuntimeConfig.getConfigFile());
RuntimeConfig.load();
RuntimeConfig.getConfig().getVideoRecording().setTitleFrameFontColor(129, 182, 64,128);
RuntimeConfig.getConfig().getVideoRecording().setLowerThirdBackgroundColor(129, 182, 64,128);
}
@After
public void tearDown() throws Exception {
File config = new File(IMAGE_PROCESSOR_TEST_JSON);
config.delete();
new File(IMAGE_PROCESSOR_TEST_JSON + ".example").delete();
}
@Test
public void testAddTextCaptions() throws Exception {
BufferedImage processedImage = MissingFrameImage.getMissingFrameAsBufferedImage();
processedImage = ImageProcessor.addTextCaption(processedImage,
"LINE 1: Lorem ipsum dolor sit amet, consectetur adipiscing elit.",
"LINE 2: Lorem ipsum dolor sit amet, consectetur adipiscing elit.",
"LINE 3: Lorem ipsum dolor sit amet, consectetur adipiscing elit.",
"LINE 4: Lorem ipsum dolor sit amet, consectetur adipiscing elit.");
// Uncomment this line after updating ImageProcessor class, to generate a new expected image
// ImageUtils.saveImage(new File("SeleniumGridExtras/src/test/resources/fixtures/expected_processed_image.png"), processedImage);
String actual = ImageUtils.encodeToString(processedImage, "PNG");
final File
expectedFile =
new File(ClassLoader.getSystemResource("fixtures/expected_processed_image.png").getFile());
assertEquals(ImageUtils.encodeToString(ImageUtils.readImage(expectedFile), "PNG"), actual);
}
@Test
public void testCreateTitleFrame() throws Exception {
Dimension size = new Dimension(1024, 768);
BufferedImage image = ImageProcessor
.createTitleFrame(size, BufferedImage.TYPE_3BYTE_BGR, "Line 1", "Line 2", "Line 3");
// Uncomment this line after updating ImageProcessor class, to generate a new expected image
// ImageUtils.saveImage(new File("SeleniumGridExtras/src/test/resources/fixtures/expected_title_image.png"), image);
final File
expectedFile =
new File(ClassLoader.getSystemResource("fixtures/expected_title_image.png").getFile());
assertEquals(ImageUtils.encodeToString(ImageUtils.readImage(expectedFile), "PNG"),
ImageUtils.encodeToString(image, "PNG"));
}
}
|
package com.java110.store.listener.purchaseApply;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.java110.core.annotation.Java110Listener;
import com.java110.core.context.DataFlowContext;
import com.java110.core.factory.GenerateCodeFactory;
import com.java110.entity.center.Business;
import com.java110.store.dao.IPurchaseApplyServiceDao;
import com.java110.utils.constant.BusinessTypeConstant;
import com.java110.utils.constant.StatusConstant;
import com.java110.utils.util.Assert;
import com.java110.vo.api.purchaseApply.PurchaseApplyDetailVo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Java110Listener("savePurchaseApplyInfoListener")
@Transactional
public class SavePurchaseApplyInfoListener extends AbstractPurchaseApplyBusinessServiceDataFlowListener{
private static Logger logger = LoggerFactory.getLogger(SavePurchaseApplyInfoListener.class);
@Autowired
private IPurchaseApplyServiceDao purchaseApplyServiceDaoImpl;
@Override
public int getOrder() {
return 0;
}
@Override
public String getBusinessTypeCd() {
return BusinessTypeConstant.BUSINESS_TYPE_SAVE_PURCHASE_APPLY;
}
/**
* business
* @param dataFlowContext
* @param business
*/
@Override
protected void doSaveBusiness(DataFlowContext dataFlowContext, Business business) {
JSONObject data = business.getDatas();
Assert.notEmpty(data,"datas ");
// businessPurchaseApply
if(data.containsKey("businessPurchaseApply")){
Object bObj = data.get("businessPurchaseApply");
JSONArray businessPurchaseApplys = null;
if(bObj instanceof JSONObject){
businessPurchaseApplys = new JSONArray();
businessPurchaseApplys.add(bObj);
}else {
businessPurchaseApplys = (JSONArray)bObj;
}
//JSONObject businessPurchaseApply = data.getJSONObject("businessPurchaseApply");
for (int bPurchaseApplyIndex = 0; bPurchaseApplyIndex < businessPurchaseApplys.size();bPurchaseApplyIndex++) {
JSONObject businessPurchaseApply = businessPurchaseApplys.getJSONObject(bPurchaseApplyIndex);
doBusinessPurchaseApply(business, businessPurchaseApply);
if(bObj instanceof JSONObject) {
dataFlowContext.addParamOut("applyOrderId", businessPurchaseApply.getString("applyOrderId"));
}
}
}
}
/**
* business instance
* @param dataFlowContext
* @param business
*/
@Override
protected void doBusinessToInstance(DataFlowContext dataFlowContext, Business business) {
JSONObject data = business.getDatas();
Map info = new HashMap();
info.put("bId",business.getbId());
info.put("operate",StatusConstant.OPERATE_ADD);
List<Map> businessPurchaseApplyInfo = purchaseApplyServiceDaoImpl.getBusinessPurchaseApplyInfo(info);
List<Map> businessPurchaseApplyDetailInfo = purchaseApplyServiceDaoImpl.getBusinessPurchaseApplyDetailInfo(info);
List<PurchaseApplyDetailVo> purchaseApplyDetailVos = new ArrayList<>();
for( int i = 0; i < businessPurchaseApplyDetailInfo.size(); i++){
PurchaseApplyDetailVo purchaseApplyDetailVo = JSON.parseObject(JSON.toJSONString(businessPurchaseApplyDetailInfo.get(i)), PurchaseApplyDetailVo.class);
purchaseApplyDetailVos.add(purchaseApplyDetailVo);
}
if( businessPurchaseApplyInfo != null && businessPurchaseApplyInfo.size() >0) {
reFreshShareColumn(info, businessPurchaseApplyInfo.get(0));
purchaseApplyServiceDaoImpl.savePurchaseApplyInfoInstance(info);
purchaseApplyServiceDaoImpl.savePurchaseApplyDetailInfo(purchaseApplyDetailVos);
if(businessPurchaseApplyInfo.size() == 1) {
dataFlowContext.addParamOut("applyOrderId", businessPurchaseApplyInfo.get(0).get("apply_order_id"));
}
}
}
/**
*
*
* @param info
* @param businessInfo ID
*/
private void reFreshShareColumn(Map info, Map businessInfo) {
if (info.containsKey("storeId")) {
return;
}
if (!businessInfo.containsKey("store_id")) {
return;
}
info.put("storeId", businessInfo.get("store_id"));
}
/**
*
* @param dataFlowContext
* @param business
*/
@Override
protected void doRecover(DataFlowContext dataFlowContext, Business business) {
String bId = business.getbId();
//Assert.hasLength(bId," bId");
Map info = new HashMap();
info.put("bId",bId);
info.put("statusCd",StatusConstant.STATUS_CD_VALID);
Map paramIn = new HashMap();
paramIn.put("bId",bId);
paramIn.put("statusCd",StatusConstant.STATUS_CD_INVALID);
List<Map> purchaseApplyInfo = purchaseApplyServiceDaoImpl.getPurchaseApplyInfo(info);
if(purchaseApplyInfo != null && purchaseApplyInfo.size() > 0){
reFreshShareColumn(paramIn, purchaseApplyInfo.get(0));
purchaseApplyServiceDaoImpl.updatePurchaseApplyInfoInstance(paramIn);
}
}
/**
* businessPurchaseApply
* @param business
* @param businessPurchaseApply
*/
private void doBusinessPurchaseApply(Business business,JSONObject businessPurchaseApply){
Assert.jsonObjectHaveKey(businessPurchaseApply,"applyOrderId","businessPurchaseApply applyOrderId ");
if(businessPurchaseApply.getString("applyOrderId").startsWith("-")){
businessPurchaseApply.put("applyOrderId",GenerateCodeFactory.getGeneratorId(GenerateCodeFactory.CODE_PREFIX_applyOrderId));
}
businessPurchaseApply.put("state","1000");
businessPurchaseApply.put("bId",business.getbId());
businessPurchaseApply.put("operate", StatusConstant.OPERATE_ADD);
Object jsonArray = businessPurchaseApply.get("resourceStores");
List<PurchaseApplyDetailVo> list = JSONObject.parseArray(jsonArray.toString(),PurchaseApplyDetailVo.class);
for( PurchaseApplyDetailVo purchaseApplyDetailVo : list){
purchaseApplyDetailVo.setApplyOrderId(businessPurchaseApply.get("applyOrderId").toString());
purchaseApplyDetailVo.setbId(business.getbId());
purchaseApplyDetailVo.setOperate(StatusConstant.OPERATE_ADD);
}
purchaseApplyServiceDaoImpl.saveBusinessPurchaseApplyInfo(businessPurchaseApply);
purchaseApplyServiceDaoImpl.saveBusinessPurchaseApplyDetailInfo(list);
}
public IPurchaseApplyServiceDao getPurchaseApplyServiceDaoImpl() {
return purchaseApplyServiceDaoImpl;
}
public void setPurchaseApplyServiceDaoImpl(IPurchaseApplyServiceDao purchaseApplyServiceDaoImpl) {
this.purchaseApplyServiceDaoImpl = purchaseApplyServiceDaoImpl;
}
}
|
package math;
public class DivideByZeroException
extends
Exception
{
private static final long serialVersionUID = 1L;
/*
* @param String reason
* @purpose calls super constructor of same param
*/
public DivideByZeroException(String reason){
super(reason);
} // DivideByZeroException(String)
/*
* @purpose calls super constructor of same param
*/
public DivideByZeroException(){
super();
}// DivideByZeroException()
/*
* @purpose returns customized message
*/
public String printException(){
return "Divide by zero!";
} // printException()
} // class DivideByZeroException
|
package org.duracloud.account.app.controller;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.validation.Valid;
import org.duracloud.account.app.controller.GroupsForm.Action;
import org.duracloud.account.db.model.AccountInfo;
import org.duracloud.account.db.model.DuracloudGroup;
import org.duracloud.account.db.model.DuracloudUser;
import org.duracloud.account.db.util.AccountService;
import org.duracloud.account.db.util.DuracloudGroupService;
import org.duracloud.account.db.util.error.DuracloudGroupAlreadyExistsException;
import org.duracloud.account.db.util.error.DuracloudGroupNotFoundException;
import org.duracloud.account.db.util.error.InvalidGroupNameException;
import org.duracloud.common.error.DuraCloudRuntimeException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
@Controller
@Lazy
public class AccountGroupsController extends AbstractAccountController {
protected static final String AVAILABLE_USERS_KEY = "availableUsers";
protected static final String GROUPS_VIEW_ID = "account-groups";
protected static final String GROUP_VIEW_ID = "account-group";
protected static final String GROUP_EDIT_VIEW_ID = "account-group-edit";
protected static final String GROUPS_FORM_KEY = "groupsForm";
protected static final String GROUP_FORM_KEY = "groupForm";
protected static String GROUP_USERS_KEY = "groupUsers";
protected static String GROUP_NAME_KEY = "groupName";
protected static String GROUP_KEY = "group";
protected static final String GROUPS_PATH = ACCOUNT_PATH + "/groups";
protected static final String GROUP_PATH = GROUPS_PATH
+ "/{groupName:[a-z0-9._\\-@]+}";
protected static final String GROUP_EDIT_PATH = GROUP_PATH + "/edit";
private static final String GROUP_NAME_RESERVED_ERROR_CODE = "error.groupName.reserved";
private static final String GROUP_NAME_INVALID_ERROR_CODE = "error.groupName.invalid";
private static final String GROUP_NAME_EXISTS_ERROR_CODE = "error.groupName.exists";
private static final String GROUP_NAME_RESERVED_MESSAGE = "The group name you specified is a reserved name. Please try another.";
private static final String GROUP_NAME_INVALID_MESSAGE = "Group names can contain only lowercase letters, numbers, '.', '@',"
+ "'-', or '_' and must start and end only with letters or numbers.";
private static final String GROUP_NAME_EXISTS_MESSAGE = "This group already exists. Please choose another name.";
@Autowired
protected DuracloudGroupService duracloudGroupService;
@RequestMapping(value = GROUPS_PATH, method = RequestMethod.GET)
public String getGroups(@PathVariable Long accountId, Model model)
throws Exception {
addGroupsObjectsToModel(getAccountService(accountId), model);
return GROUPS_VIEW_ID;
}
@RequestMapping(value = GROUPS_PATH, method = RequestMethod.POST)
public String modifyGroups(@PathVariable Long accountId, Model model,
@ModelAttribute(GROUPS_FORM_KEY) @Valid GroupsForm form,
BindingResult result) throws Exception {
AccountService as = this.accountManagerService.getAccount(accountId);
GroupsForm.Action action = form.getAction();
if (action == Action.ADD) {
String name = form.getGroupName();
String groupName = DuracloudGroup.PREFIX + name;
try {
duracloudGroupService.createGroup(groupName, accountId);
} catch (InvalidGroupNameException e) {
if (groupName
.equalsIgnoreCase(DuracloudGroup.PUBLIC_GROUP_NAME)) {
result.rejectValue(GROUP_NAME_KEY,
GROUP_NAME_RESERVED_ERROR_CODE,
GROUP_NAME_RESERVED_MESSAGE);
} else {
result.rejectValue(GROUP_NAME_KEY,
GROUP_NAME_INVALID_ERROR_CODE,
GROUP_NAME_INVALID_MESSAGE);
}
} catch (DuracloudGroupAlreadyExistsException e) {
result.rejectValue(GROUP_NAME_KEY,
GROUP_NAME_EXISTS_ERROR_CODE, GROUP_NAME_EXISTS_MESSAGE);
}
if (!result.hasFieldErrors()) {
return formatGroupRedirect(accountId, groupName, "/edit");
}
} else {
String[] groups = form.getGroupNames();
if (groups != null) {
for (String name : groups) {
DuracloudGroup group = duracloudGroupService.getGroup(name,
accountId);
removeGroup(group, accountId);
}
}
}
addGroupsObjectsToModel(as, model);
return GROUPS_VIEW_ID;
}
@RequestMapping(value = GROUP_PATH, method = RequestMethod.GET)
public String getGroup(@PathVariable Long accountId,
@PathVariable String groupName, Model model) throws Exception {
addUserToModel(model);
model.addAttribute(GROUPS_FORM_KEY, new GroupsForm());
AccountService as = getAccountService(accountId);
List<DuracloudGroup> groups = getGroups(accountId);
DuracloudGroup group = getGroup(groupName, groups);
addGroupToModel(group, model);
addGroupsObjectsToModel(as, groups, model);
return GROUP_VIEW_ID;
}
private List<DuracloudGroup> getGroups(Long accountId) {
Set<DuracloudGroup> set = this.duracloudGroupService
.getGroups(accountId);
List<DuracloudGroup> list = new LinkedList<DuracloudGroup>();
if (set != null) {
list.addAll(set);
}
return list;
}
@RequestMapping(value = GROUP_EDIT_PATH, method = RequestMethod.GET)
public String editGroup(@PathVariable Long accountId,
@PathVariable String groupName, HttpServletRequest request,
Model model) throws Exception {
AccountService as = getAccountService(accountId);
List<DuracloudGroup> groups = getGroups(accountId);
addGroupsObjectsToModel(as, groups, model);
DuracloudGroup group = getGroup(groupName, groups);
addGroupToModel(group, model);
model.addAttribute(GROUP_FORM_KEY, new GroupForm());
Set<DuracloudUser> groupUsers = group.getUsers();
addAvailableUsersToModel(as, groupUsers, model);
HttpSession session = request.getSession();
session.removeAttribute(GROUP_USERS_KEY);
addGroupUsersIfNotAlreadyInSession(group, model, session);
return GROUP_EDIT_VIEW_ID;
}
@RequestMapping(value = GROUP_EDIT_PATH, method = RequestMethod.POST)
public String editGroup(@PathVariable Long accountId,
@PathVariable String groupName,
@ModelAttribute(GROUP_FORM_KEY) @Valid GroupForm form,
HttpServletRequest request, Model model) throws Exception {
GroupForm.Action action = form.getAction();
AccountService as = getAccountService(accountId);
List<DuracloudGroup> groups = getGroups(accountId);
DuracloudGroup group = getGroup(groupName, groups);
addGroupToModel(group, model);
HttpSession session = request.getSession();
List<DuracloudUser> groupUsers = addGroupUsersIfNotAlreadyInSession(
group, model, session);
// handle save case
if (action == GroupForm.Action.SAVE) {
Set<DuracloudUser> users = new HashSet<DuracloudUser>(groupUsers);
save(group, users, accountId, form);
session.removeAttribute(GROUP_USERS_KEY);
return formatGroupRedirect(accountId, groupName, null);
}
Collection<DuracloudUser> availableUsers = getAvailableUsers(as,
groupUsers);
if (action == GroupForm.Action.ADD) {
// handle add
String[] availableUsernames = form.getAvailableUsernames();
if (availableUsernames != null) {
for (String username : availableUsernames) {
DuracloudUser user = getUser(username, availableUsers);
if (user != null) {
groupUsers.add(user);
}
}
form.setAvailableUsernames(null);
form.setGroupUsernames(availableUsernames);
}
} else if (action == GroupForm.Action.REMOVE) {
String[] groupUsernames = form.getGroupUsernames();
if (groupUsernames != null) {
for (String username : groupUsernames) {
DuracloudUser user = getUser(username, groupUsers);
if (user != null) {
groupUsers.remove(user);
availableUsers.add(user);
}
}
form.setGroupUsernames(null);
form.setAvailableUsernames(groupUsernames);
}
} else {
throw new DuraCloudRuntimeException("[" + action
+ "] not supported.");
}
model.addAttribute(GROUP_FORM_KEY, form);
addAvailableUsersToModel(availableUsers, groupUsers, model);
addGroupsObjectsToModel(as, groups, model);
return GROUP_EDIT_VIEW_ID;
}
private String formatGroupRedirect(Long accountId, String groupName,
String suffix) {
String redirect = "redirect:" + ACCOUNTS_PATH + GROUP_PATH;
redirect = redirect.replace("{accountId}", String.valueOf(accountId));
redirect = redirect.replaceAll("\\{groupName.*\\}",
String.valueOf(groupName));
redirect += (suffix != null ? suffix : "");
return redirect;
}
private void addAvailableUsersToModel(Collection<DuracloudUser> allUsers,
Collection<DuracloudUser> groupUsers, Model model) {
if (allUsers != null && groupUsers != null) {
allUsers.removeAll(groupUsers);
}
model.addAttribute(AVAILABLE_USERS_KEY, allUsers);
}
private DuracloudUser getUser(String username,
Collection<DuracloudUser> users) {
for (DuracloudUser user : users) {
if (user.getUsername().equals(username)) {
return user;
}
}
return null;
}
private List<DuracloudUser> addGroupUsersIfNotAlreadyInSession(
DuracloudGroup group, Model model, HttpSession session) {
List<DuracloudUser> groupUsers = (List<DuracloudUser>) session
.getAttribute(GROUP_USERS_KEY);
if (groupUsers == null) {
groupUsers = new LinkedList<DuracloudUser>();
groupUsers.addAll(group.getUsers());
Collections.sort(groupUsers, USERNAME_COMPARATOR);
session.setAttribute(GROUP_USERS_KEY, groupUsers);
}
model.addAttribute(GROUP_USERS_KEY, groupUsers);
return groupUsers;
}
private void save(DuracloudGroup group, Set<DuracloudUser> groupUsers,
Long accountId, GroupForm form)
throws DuracloudGroupNotFoundException {
duracloudGroupService.updateGroupUsers(group, groupUsers, accountId);
form.reset();
}
private void removeGroup(DuracloudGroup group, Long accountId) {
this.duracloudGroupService.deleteGroup(group, accountId);
}
private AccountService getAccountService(Long accountId) throws Exception {
return this.accountManagerService.getAccount(accountId);
}
private void addGroupsObjectsToModel(AccountService as, Model model)
throws Exception {
AccountInfo account = as.retrieveAccountInfo();
Long accountId = account.getId();
model.addAttribute(ACCOUNT_INFO_KEY, account);
addGroupsObjectsToModel(as, this.getGroups(accountId), model);
}
private void addGroupsObjectsToModel(AccountService as,
List<DuracloudGroup> groups, Model model) throws Exception {
addUserToModel(model);
model.addAttribute(ACCOUNT_INFO_KEY, as.retrieveAccountInfo());
model.addAttribute("accountId", as.getAccountId());
if (!model.asMap().containsKey(GROUPS_FORM_KEY)) {
model.addAttribute(GROUPS_FORM_KEY, new GroupsForm());
}
addGroupsToModel(model, groups);
}
private void addAvailableUsersToModel(AccountService as,
Collection<DuracloudUser> groupUsers, Model model) {
Collection<DuracloudUser> availableUsers = getAvailableUsers(as,
groupUsers);
addAvailableUsersToModel(availableUsers, groupUsers, model);
}
private Collection<DuracloudUser> getAvailableUsers(AccountService as,
Collection<DuracloudUser> groupUsers) {
Set<DuracloudUser> allUsers = as.getUsers();
LinkedList<DuracloudUser> list = new LinkedList<DuracloudUser>();
list.addAll(allUsers);
for (DuracloudUser user : allUsers) {
if (user.isRoot()) {
list.remove(user);
}
}
if (groupUsers != null) {
list.removeAll(groupUsers);
}
Collections.sort(list, USERNAME_COMPARATOR);
return list;
}
private static Comparator<DuracloudUser> USERNAME_COMPARATOR = new Comparator<DuracloudUser>() {
@Override
public int compare(DuracloudUser o1, DuracloudUser o2) {
return o1.getUsername().compareTo(o2.getUsername());
}
};
private void addGroupToModel(DuracloudGroup group, Model model) {
model.addAttribute(GROUP_KEY, group);
model.addAttribute(GROUP_USERS_KEY, group.getUsers());
}
private DuracloudGroup getGroup(String groupName,
List<DuracloudGroup> groups) throws DuracloudGroupNotFoundException {
for (DuracloudGroup g : groups) {
if (g.getName().equalsIgnoreCase(groupName)) {
return g;
}
}
throw new DuracloudGroupNotFoundException("no group named '"
+ groupName + "' found in group set.");
}
private void addGroupsToModel(Model model, List<DuracloudGroup> groups) {
model.addAttribute("groups", groups);
}
public void setDuracloudGroupService(DuracloudGroupService groupService) {
this.duracloudGroupService = groupService;
}
}
|
package ru.yandex.qatools.allure.utils;
import com.sun.xml.bind.marshaller.CharacterEscapeHandler;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import ru.yandex.qatools.allure.config.AllureModelUtils;
import ru.yandex.qatools.allure.model.ObjectFactory;
import ru.yandex.qatools.allure.model.TestSuiteResult;
import javax.xml.bind.JAXB;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Validator;
import java.io.File;
import java.util.Arrays;
import java.util.Collection;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class XmlEscapeHandlerTest {
private File testSuiteResultFile;
private Marshaller m;
private TestSuiteResult result;
private String character;
@Rule
public TemporaryFolder folder = new TemporaryFolder();
public XmlEscapeHandlerTest(String character) {
this.character = character;
}
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(
new Object[]{"\u0000"},
new Object[]{"\u0017"}
);
}
@Before
public void setUp() throws Exception {
testSuiteResultFile = folder.newFile();
m = JAXBContext.newInstance(TestSuiteResult.class).createMarshaller();
m.setProperty(
CharacterEscapeHandler.class.getName(),
XmlEscapeHandler.getInstance()
);
result = new TestSuiteResult().withName("name-and-кириллицей-also");
}
@Test
public void dataWithInvalidCharacterTest() throws Exception {
result.setTitle("prefix " + character + " suffix");
m.marshal(new ObjectFactory().createTestSuite(result), testSuiteResultFile);
Validator validator = AllureModelUtils.getAllureSchemaValidator();
validator.validate(new StreamSource(testSuiteResultFile));
TestSuiteResult testSuite = JAXB.unmarshal(testSuiteResultFile, TestSuiteResult.class);
assertThat(testSuite.getName(), is("name-and-кириллицей-also"));
assertTrue(testSuite.getTitle().startsWith("prefix "));
assertTrue(testSuite.getTitle().endsWith(" suffix"));
}
}
|
package nodomain.freeyourgadget.gadgetbridge.devices.huami.amazfitbip;
import java.util.UUID;
import static nodomain.freeyourgadget.gadgetbridge.devices.miband.MiBand2Service.DISPLAY_ITEM_BIT_CLOCK;
import static nodomain.freeyourgadget.gadgetbridge.devices.miband.MiBand2Service.ENDPOINT_DISPLAY;
import static nodomain.freeyourgadget.gadgetbridge.devices.miband.MiBand2Service.ENDPOINT_DISPLAY_ITEMS;
public class AmazfitBipService {
public static final UUID UUID_CHARACTERISTIC_WEATHER = UUID.fromString("0000000e-0000-3512-2118-0009af100700");
// goes to UUID_CHARACTERISTIC_3_CONFIGURATION, TODO: validate this for Mi Band 2, it maybe triggers more than only GPS version...
public static final byte[] COMMAND_REQUEST_GPS_VERSION = new byte[]{0x0e};
public static final byte COMMAND_ACTIVITY_DATA_TYPE_DEBUGLOGS = 0x07;
public static final byte[] COMMAND_SET_LANGUAGE_SIMPLIFIED_CHINESE = new byte[]{ENDPOINT_DISPLAY, 0x13, 0x00, 0x00};
public static final byte[] COMMAND_SET_LANGUAGE_TRADITIONAL_CHINESE = new byte[]{ENDPOINT_DISPLAY, 0x13, 0x00, 0x01};
public static final byte[] COMMAND_SET_LANGUAGE_ENGLISH = new byte[]{ENDPOINT_DISPLAY, 0x13, 0x00, 0x02};
public static final byte[] COMMAND_SET_LANGUAGE_SPANISH = new byte[]{ENDPOINT_DISPLAY, 0x13, 0x00, 0x03};
public static final byte[] COMMAND_SET_LANGUAGE_NEW_TEMPLATE = new byte[]{ENDPOINT_DISPLAY, 0x17, 0x00, 0, 0, 0, 0, 0};
public static final byte COMMAND_ACTIVITY_DATA_TYPE_SPORTS_SUMMARIES = 0x05;
public static final byte COMMAND_ACTIVITY_DATA_TYPE_SPORTS_DETAILS = 0x06;
public static final byte[] COMMAND_ACK_FIND_PHONE_IN_PROGRESS = new byte[]{ENDPOINT_DISPLAY, 0x14, 0x00, 0x00};
public static final byte[] COMMAND_CHANGE_SCREENS = new byte[]{ENDPOINT_DISPLAY_ITEMS, DISPLAY_ITEM_BIT_CLOCK, 0x10, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08};
}
|
package io.cattle.platform.api.resource.jooq;
import io.cattle.platform.api.auth.Policy;
import io.cattle.platform.api.resource.AbstractObjectResourceManager;
import io.cattle.platform.api.utils.ApiUtils;
import io.cattle.platform.object.jooq.utils.JooqUtils;
import io.cattle.platform.object.meta.MapRelationship;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.meta.Relationship;
import io.cattle.platform.object.meta.Relationship.RelationshipType;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import io.github.ibuildthecloud.gdapi.model.Include;
import io.github.ibuildthecloud.gdapi.model.ListOptions;
import io.github.ibuildthecloud.gdapi.model.Pagination;
import io.github.ibuildthecloud.gdapi.model.Schema;
import io.github.ibuildthecloud.gdapi.model.Sort;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.jooq.Condition;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.JoinType;
import org.jooq.SelectQuery;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.exception.DataAccessException;
import org.jooq.impl.DSL;
import org.jooq.impl.DefaultDSLContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractJooqResourceManager extends AbstractObjectResourceManager {
private static final Logger log = LoggerFactory.getLogger(AbstractJooqResourceManager.class);
Configuration configuration;
protected DSLContext create() {
return new DefaultDSLContext(configuration);
}
@Override
protected Object listInternal(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options) {
return listInternal(schemaFactory, type, criteria, options, null);
}
protected Object listInternal(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options, Map<Table<?>, Condition> joins) {
Class<?> clz = getClass(schemaFactory, type, criteria, true);
if (clz == null) {
return null;
}
/* Use core schema, parent may not be authorized */
type = getObjectManager().getSchemaFactory().getSchemaName(clz);
Table<?> table = JooqUtils.getTableFromRecordClass(clz);
Sort sort = options == null ? null : options.getSort();
Pagination pagination = options == null ? null : options.getPagination();
Include include = options == null ? null : options.getInclude();
if (table == null)
return null;
SelectQuery<?> query = create().selectQuery();
MultiTableMapper mapper = addTables(schemaFactory, query, type, table, criteria, include, pagination, joins);
addJoins(query, joins);
addConditions(schemaFactory, query, type, table, criteria);
addSort(schemaFactory, type, sort, query);
addLimit(schemaFactory, type, pagination, query);
List<?> result = mapper == null ? query.fetch() : query.fetchInto(mapper);
processPaginationResult(result, pagination, mapper);
return result;
}
protected void addJoins(SelectQuery<?> query, Map<Table<?>, Condition> joins) {
if (joins == null) {
return;
}
for (Map.Entry<Table<?>, Condition> entry : joins.entrySet()) {
query.addJoin(entry.getKey(), JoinType.LEFT_OUTER_JOIN, entry.getValue());
}
}
protected void processPaginationResult(List<?> result, Pagination pagination, MultiTableMapper mapper) {
Integer limit = pagination == null ? null : pagination.getLimit();
if (limit == null) {
return;
}
long offset = getOffset(pagination);
boolean partial = false;
if (mapper == null) {
partial = result.size() > limit;
if (partial) {
result.remove(result.size() - 1);
}
} else {
partial = mapper.getResultSize() > limit;
}
if (partial) {
Pagination paginationResponse = new Pagination(limit);
paginationResponse.setPartial(true);
paginationResponse.setNext(ApiContext.getUrlBuilder().next("m" + (offset + limit)));
pagination.setResponse(paginationResponse);
} else {
pagination.setResponse(new Pagination(limit));
}
}
protected int getOffset(Pagination pagination) {
Object marker = getMarker(pagination);
if (marker == null) {
return 0;
} else if (marker instanceof String) {
/*
* Important to check that marker is a string. If you don't then
* somebody could use the marker functionality to deobfuscate ID's
* and find their long value.
*/
try {
return Integer.parseInt((String) marker);
} catch (NumberFormatException nfe) {
return 0;
}
}
return 0;
}
protected Class<?> getClass(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, boolean alterCriteria) {
Schema schema = schemaFactory.getSchema(type);
Class<?> clz = schemaFactory.getSchemaClass(type);
Schema clzSchema = schemaFactory.getSchema(clz);
if (clz != null && (clzSchema == null || !schema.getId().equals(clzSchema.getId())) && alterCriteria) {
criteria.put(ObjectMetaDataManager.KIND_FIELD, type);
}
return clz;
}
protected MultiTableMapper addTables(SchemaFactory schemaFactory, SelectQuery<?> query, String type, Table<?> table, Map<Object, Object> criteria,
Include include, Pagination pagination, Map<Table<?>, Condition> joins) {
if ((joins == null || joins.size() == 0) && (include == null || include.getLinks().size() == 0)) {
query.addFrom(table);
return null;
}
MultiTableMapper tableMapper = new MultiTableMapper(getMetaDataManager(), pagination);
tableMapper.map(table);
if (include == null) {
query.addSelect(tableMapper.getFields());
query.addFrom(table);
return tableMapper;
}
List<Relationship> rels = new ArrayList<Relationship>();
rels.add(null);
for (Map.Entry<String, Relationship> entry : getLinkRelationships(schemaFactory, type, include).entrySet()) {
Relationship rel = entry.getValue();
Table<?> childTable = JooqUtils.getTableFromRecordClass(rel.getObjectType());
if (childTable == null) {
throw new IllegalStateException("Failed to find table for type [" + rel.getObjectType() + "]");
} else {
String key = rel.getRelationshipType() == RelationshipType.REFERENCE ? ApiUtils.SINGLE_ATTACHMENT_PREFIX + rel.getName() : rel.getName();
tableMapper.map(key, childTable);
rels.add(rel);
}
}
List<Table<?>> tables = tableMapper.getTables();
query.addSelect(tableMapper.getFields());
query.addFrom(table);
for (int i = 0; i < tables.size(); i++) {
Relationship rel = rels.get(i);
Table<?> toTable = tables.get(i);
if (rel != null) {
if (rel.getRelationshipType() == RelationshipType.MAP) {
addMappingJoins(query, toTable, schemaFactory, type, table, toTable.getName(), (MapRelationship) rel);
} else {
query.addJoin(toTable, JoinType.LEFT_OUTER_JOIN, getJoinCondition(schemaFactory, type, table, toTable.getName(), rel));
}
}
}
return tableMapper;
}
protected void addMappingJoins(SelectQuery<?> query, Table<?> toTable, SchemaFactory schemaFactory, String fromType, Table<?> from, String asName,
MapRelationship rel) {
Table<?> mappingTable = JooqUtils.getTableFromRecordClass(rel.getMappingType());
/*
* We don't required the mapping type to be visible external, that's why
* we use the schemaFactory from the objectManager, because it is the
* superset schemaFactory.
*/
String mappingType = getObjectManager().getSchemaFactory().getSchemaName(rel.getMappingType());
TableField<?, Object> fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, ObjectMetaDataManager.ID_FIELD);
TableField<?, Object> fieldTo = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getPropertyName());
TableField<?, Object> fieldRemoved = JooqUtils.getTableField(getMetaDataManager(), mappingType, ObjectMetaDataManager.REMOVED_FIELD);
org.jooq.Condition cond = fieldFrom.eq(fieldTo.getTable().field(fieldTo.getName())).and(
fieldRemoved == null ? DSL.trueCondition() : fieldRemoved.isNull());
query.addJoin(mappingTable, JoinType.LEFT_OUTER_JOIN, cond);
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getOtherRelationship().getPropertyName());
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), ObjectMetaDataManager.ID_FIELD);
cond = fieldFrom.eq(fieldTo.getTable().asTable(asName).field(fieldTo.getName()));
query.addJoin(toTable, JoinType.LEFT_OUTER_JOIN, cond);
}
protected org.jooq.Condition getJoinCondition(SchemaFactory schemaFactory, String fromType, Table<?> from, String asName, Relationship rel) {
TableField<?, Object> fieldFrom = null;
TableField<?, Object> fieldTo = null;
switch (rel.getRelationshipType()) {
case REFERENCE:
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, rel.getPropertyName());
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), ObjectMetaDataManager.ID_FIELD);
break;
case CHILD:
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, ObjectMetaDataManager.ID_FIELD);
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), rel.getPropertyName());
break;
default:
throw new IllegalArgumentException("Illegal Relationship type [" + rel.getRelationshipType() + "]");
}
if (fieldFrom == null || fieldTo == null) {
throw new IllegalStateException("Failed to construction join query for [" + fromType + "] [" + from + "] [" + rel + "]");
}
return fieldFrom.eq(fieldTo.getTable().as(asName).field(fieldTo.getName()));
}
protected void addConditions(SchemaFactory schemaFactory, SelectQuery<?> query, String type, Table<?> table, Map<Object, Object> criteria) {
org.jooq.Condition condition = JooqUtils.toConditions(getMetaDataManager(), type, criteria);
if (condition != null) {
query.addConditions(condition);
}
}
@Override
protected Object getMapLink(String fromType, String id, MapRelationship rel, ApiRequest request) {
SchemaFactory schemaFactory = request.getSchemaFactory();
/*
* We don't required the mapping type to be visible external, that's why
* we use the schemaFactory from the objectManager, because it is the
* superset schemaFactory.
*/
String mappingType = getObjectManager().getSchemaFactory().getSchemaName(rel.getMappingType());
String type = schemaFactory.getSchemaName(rel.getObjectType());
Map<Table<?>, Condition> joins = new LinkedHashMap<Table<?>, Condition>();
Map<Object, Object> criteria = new LinkedHashMap<Object, Object>();
if (mappingType == null || type == null) {
return null;
}
Table<?> mappingTable = JooqUtils.getTable(schemaFactory, rel.getMappingType());
TableField<?, Object> fieldFrom = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ID_FIELD);
TableField<?, Object> fieldTo = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getOtherRelationship().getPropertyName());
TableField<?, Object> fieldRemoved = JooqUtils.getTableField(getMetaDataManager(), mappingType, ObjectMetaDataManager.REMOVED_FIELD);
TableField<?, Object> fromTypeIdField = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getSelfRelationship().getPropertyName());
org.jooq.Condition cond = fieldFrom.eq(fieldTo.getTable().field(fieldTo.getName())).and(
fieldRemoved == null ? DSL.trueCondition() : fieldRemoved.isNull());
joins.put(mappingTable, cond);
criteria.put(Condition.class, fromTypeIdField.eq(id));
return listInternal(schemaFactory, type, criteria, new ListOptions(request), joins);
}
protected void addLimit(SchemaFactory schemaFactory, String type, Pagination pagination, SelectQuery<?> query) {
if (pagination == null || pagination.getLimit() == null) {
return;
}
int limit = pagination.getLimit() + 1;
int offset = getOffset(pagination);
query.addLimit(offset, limit);
}
protected void addSort(SchemaFactory schemaFactory, String type, Sort sort, SelectQuery<?> query) {
if (sort == null) {
return;
}
TableField<?, Object> sortField = JooqUtils.getTableField(getMetaDataManager(), type, sort.getName());
if (sortField == null) {
return;
}
switch (sort.getOrderEnum()) {
case DESC:
query.addOrderBy(sortField.desc());
break;
default:
query.addOrderBy(sortField.asc());
}
}
@Override
protected void addAccountAuthorization(boolean byId, boolean byLink, String type, Map<Object, Object> criteria, Policy policy) {
super.addAccountAuthorization(byId, byLink, type, criteria, policy);
if (!policy.isOption(Policy.LIST_ALL_ACCOUNTS)) {
if (policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS) && (byId || byLink)) {
return;
}
TableField<?, Object> accountField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ACCOUNT_FIELD);
TableField<?, Object> publicField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.PUBLIC_FIELD);
Object accountValue = criteria.get(ObjectMetaDataManager.ACCOUNT_FIELD);
if (accountField == null || publicField == null || accountValue == null) {
return;
}
criteria.remove(ObjectMetaDataManager.ACCOUNT_FIELD);
Condition accountCondition = null;
if (accountValue instanceof io.github.ibuildthecloud.gdapi.condition.Condition) {
accountCondition = accountField.in(((io.github.ibuildthecloud.gdapi.condition.Condition) accountValue).getValues());
} else {
accountCondition = accountField.eq(accountValue);
}
criteria.put(Condition.class, publicField.isTrue().or(accountCondition));
}
}
@Override
protected Object removeFromStore(String type, String id, Object obj, ApiRequest request) {
Table<?> table = JooqUtils.getTableFromRecordClass(JooqUtils.getRecordClass(request.getSchemaFactory(), obj.getClass()));
TableField<?, Object> idField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ID_FIELD);
int result = create().delete(table).where(idField.eq(id)).execute();
if (result != 1) {
log.error("While deleting type [{}] and id [{}] got a result of [{}]", type, id, result);
throw new ClientVisibleException(ResponseCodes.CONFLICT);
}
return obj;
}
@Override
public boolean handleException(Throwable t, ApiRequest apiRequest) {
if (t instanceof DataAccessException) {
log.info("Database error", t.getMessage());
throw new ClientVisibleException(ResponseCodes.CONFLICT);
}
return super.handleException(t, apiRequest);
}
public Configuration getConfiguration() {
return configuration;
}
@Inject
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
}
|
package io.cattle.platform.process.externalevent;
import static io.cattle.platform.process.externalevent.ExternalEventConstants.*;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.dao.StackDao;
import io.cattle.platform.core.dao.GenericResourceDao;
import io.cattle.platform.core.dao.ServiceDao;
import io.cattle.platform.core.model.Stack;
import io.cattle.platform.core.model.ExternalEvent;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.deferred.util.DeferredUtils;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.engine.process.impl.ProcessCancelException;
import io.cattle.platform.lock.LockCallbackNoReturn;
import io.cattle.platform.lock.LockManager;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.object.resource.ResourceMonitor;
import io.cattle.platform.object.resource.ResourcePredicate;
import io.cattle.platform.object.util.DataUtils;
import io.cattle.platform.object.util.ObjectUtils;
import io.cattle.platform.process.base.AbstractDefaultProcessHandler;
import io.cattle.platform.process.common.util.ProcessUtils;
import io.cattle.platform.util.type.CollectionUtils;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Named
public class ExternalServiceEventCreate extends AbstractDefaultProcessHandler {
private static final Logger log = LoggerFactory.getLogger(ExternalServiceEventCreate.class);
@Inject
ServiceDao serviceDao;
@Inject
ResourceMonitor resourceMonitor;
@Inject
GenericResourceDao resourceDao;
@Inject
LockManager lockManager;
@Inject
SchemaFactory schemaFactory;
@Inject
StackDao stackDao;
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
final ExternalEvent event = (ExternalEvent)state.getResource();
if (!ExternalEventConstants.KIND_SERVICE_EVENT.equals(event.getKind())) {
return null;
}
lockManager.lock(new ExternalEventLock(SERVICE_LOCK_NAME, event.getAccountId(), event.getExternalId()), new LockCallbackNoReturn() {
@Override
public void doWithLockNoResult() {
Map<String, Object> serviceData = CollectionUtils.toMap(DataUtils.getFields(event).get(FIELD_SERVICE));
if (serviceData.isEmpty()) {
log.warn("Empty service for externalServiceEvent: {}.", event);
return;
}
String kind = serviceData.get(ObjectMetaDataManager.KIND_FIELD) != null ? serviceData.get(ObjectMetaDataManager.KIND_FIELD).toString() : null;
if (StringUtils.isEmpty(kind) || schemaFactory.getSchema(kind) == null) {
log.warn("Couldn't find schema for service type [{}]. Returning.", kind);
return;
}
if (StringUtils.equals(event.getEventType(), TYPE_SERVICE_CREATE)) {
createService(event, serviceData);
} else if (StringUtils.equals(event.getEventType(), TYPE_SERVICE_UPDATE)) {
updateService(event, serviceData);
} else if (StringUtils.equals(event.getEventType(), TYPE_SERVICE_DELETE)) {
deleteService(event, serviceData);
} else if (StringUtils.equals(event.getEventType(), TYPE_STACK_DELETE)) {
deleteStack(event, serviceData);
}
}
});
return null;
}
void createService(ExternalEvent event, Map<String, Object> serviceData) {
Service svc = serviceDao.getServiceByExternalId(event.getAccountId(), event.getExternalId());
if (svc != null) {
return;
}
Stack stack = getStack(event);
if (stack == null) {
log.info("Can't process service event. Could not get or create stack. Event: [{}]", event);
return;
}
Map<String, Object> service = new HashMap<String, Object>();
if (serviceData != null) {
service.putAll(serviceData);
}
service.put(ObjectMetaDataManager.ACCOUNT_FIELD, event.getAccountId());
service.put(FIELD_STACK_ID, stack.getId());
try {
String create = objectProcessManager.getStandardProcessName(StandardProcess.CREATE, Service.class);
String activate = objectProcessManager.getStandardProcessName(StandardProcess.ACTIVATE, Service.class);
ProcessUtils.chainInData(service, create, activate);
resourceDao.createAndSchedule(Service.class, service);
} catch (ProcessCancelException e) {
log.info("Create and activate process cancelled for service with account id {}and external id {}",
event.getAccountId(), event.getExternalId());
}
}
Stack getStack(final ExternalEvent event) {
final Map<String, Object> env = CollectionUtils.castMap(DataUtils.getFields(event).get(FIELD_ENVIRIONMENT));
Object eId = CollectionUtils.getNestedValue(env, FIELD_EXTERNAL_ID);
if (eId == null) {
return null;
}
final String envExtId = eId.toString();
Stack stack = stackDao.getStackByExternalId(event.getAccountId(), envExtId);
//If stack has not been created yet
if (stack == null) {
final Stack newEnv = objectManager.newRecord(Stack.class);
Object possibleName = CollectionUtils.getNestedValue(env, "name");
newEnv.setExternalId(envExtId);
newEnv.setAccountId(event.getAccountId());
String name = possibleName != null ? possibleName.toString() : envExtId;
newEnv.setName(name);
stack = DeferredUtils.nest(new Callable<Stack>() {
@Override
public Stack call() {
return resourceDao.createAndSchedule(newEnv);
}
});
stack = resourceMonitor.waitFor(stack, new ResourcePredicate<Stack>() {
@Override
public boolean evaluate(Stack obj) {
return obj != null && CommonStatesConstants.ACTIVE.equals(obj.getState());
}
@Override
public String getMessage() {
return "active state";
}
});
}
return stack;
}
void updateService(ExternalEvent event, Map<String, Object> serviceData) {
Service svc = serviceDao.getServiceByExternalId(event.getAccountId(), event.getExternalId());
if (svc == null) {
log.info("Unable to find service while attempting to update. Returning. Service external id: [{}], account id: [{}]", event.getExternalId(),
event.getAccountId());
return;
}
Map<String, Object> fields = DataUtils.getFields(svc);
Map<String, Object> updates = new HashMap<String, Object>();
for (Map.Entry<String, Object> resourceField : serviceData.entrySet()) {
String fieldName = resourceField.getKey();
Object newFieldValue = resourceField.getValue();
Object currentFieldValue = fields.get(fieldName);
if (ObjectUtils.hasWritableProperty(svc, fieldName)) {
Object property = ObjectUtils.getProperty(svc, fieldName);
if (newFieldValue != null && !newFieldValue.equals(property) || property == null) {
updates.put(fieldName, newFieldValue);
}
} else if ((newFieldValue != null && !newFieldValue.equals(currentFieldValue)) || currentFieldValue != null) {
updates.put(fieldName, newFieldValue);
}
}
if (!updates.isEmpty()) {
objectManager.setFields(svc, updates);
resourceDao.updateAndSchedule(svc);
}
}
void deleteService(ExternalEvent event, Map<String, Object> serviceData) {
Service svc = serviceDao.getServiceByExternalId(event.getAccountId(), event.getExternalId());
if (svc != null) {
objectProcessManager.scheduleStandardProcess(StandardProcess.REMOVE, svc, null);
}
}
void deleteStack(ExternalEvent event, Map<String, Object> stackData) {
Stack env = stackDao.getStackByExternalId(event.getAccountId(), event.getExternalId());
if (env != null) {
objectProcessManager.scheduleStandardProcess(StandardProcess.REMOVE, env, null);
}
}
String getSelector(ExternalEvent event) {
Object s = DataUtils.getFields(event).get("selector");
String selector = s != null ? s.toString() : null;
return selector;
}
@Override
public String[] getProcessNames() {
return new String[] { ExternalEventConstants.KIND_EXTERNAL_EVENT + ".create" };
}
}
|
package com.sap.cloud.lm.sl.cf.core.cf.v2;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.Mockito;
import com.sap.cloud.lm.sl.cf.client.lib.domain.CloudApplicationExtended;
import com.sap.cloud.lm.sl.cf.client.lib.domain.CloudServiceExtended;
import com.sap.cloud.lm.sl.cf.core.cf.HandlerFactory;
import com.sap.cloud.lm.sl.cf.core.cf.util.ModulesCloudModelBuilderContentCalculator;
import com.sap.cloud.lm.sl.cf.core.cf.util.ResourcesCloudModelBuilderContentCalculator;
import com.sap.cloud.lm.sl.cf.core.cf.util.UnresolvedModulesContentValidator;
import com.sap.cloud.lm.sl.cf.core.helpers.ModuleToDeployHelper;
import com.sap.cloud.lm.sl.cf.core.helpers.XsPlaceholderResolver;
import com.sap.cloud.lm.sl.cf.core.model.DeployedMta;
import com.sap.cloud.lm.sl.cf.core.model.SupportedParameters;
import com.sap.cloud.lm.sl.cf.core.util.NameUtil;
import com.sap.cloud.lm.sl.cf.core.util.UserMessageLogger;
import com.sap.cloud.lm.sl.common.util.Callable;
import com.sap.cloud.lm.sl.common.util.JsonUtil;
import com.sap.cloud.lm.sl.common.util.MapUtil;
import com.sap.cloud.lm.sl.common.util.TestUtil;
import com.sap.cloud.lm.sl.common.util.TestUtil.Expectation;
import com.sap.cloud.lm.sl.mta.handlers.ConfigurationParser;
import com.sap.cloud.lm.sl.mta.handlers.v2.DescriptorHandler;
import com.sap.cloud.lm.sl.mta.handlers.v2.DescriptorMerger;
import com.sap.cloud.lm.sl.mta.handlers.v2.DescriptorParser;
import com.sap.cloud.lm.sl.mta.mergers.PlatformMerger;
import com.sap.cloud.lm.sl.mta.model.Platform;
import com.sap.cloud.lm.sl.mta.model.SystemParameters;
import com.sap.cloud.lm.sl.mta.model.v2.DeploymentDescriptor;
import com.sap.cloud.lm.sl.mta.model.v2.ExtensionDescriptor;
import com.sap.cloud.lm.sl.mta.model.v2.Module;
import com.sap.cloud.lm.sl.mta.resolvers.ResolverBuilder;
import com.sap.cloud.lm.sl.mta.resolvers.v2.DescriptorReferenceResolver;
@RunWith(Parameterized.class)
public class CloudModelBuilderTest {
protected static final String DEFAULT_DOMAIN_CF = "cfapps.neo.ondemand.com";
protected static final String DEFAULT_DOMAIN_XS = "sofd60245639a";
protected static final String DEPLOY_ID = "123";
protected final DescriptorParser descriptorParser = getDescriptorParser();
protected final ConfigurationParser configurationParser = new ConfigurationParser();
protected final DescriptorHandler descriptorHandler = getDescriptorHandler();
protected DeploymentDescriptor deploymentDescriptor;
protected final String deploymentDescriptorLocation;
protected final String extensionDescriptorLocation;
protected final String platformLocation;
protected final String deployedMtaLocation;
protected final boolean useNamespaces;
protected final boolean useNamespacesForServices;
protected final Set<String> mtaArchiveModules;
protected final Set<String> mtaModules;
protected final Set<String> deployedApps;
protected final Expectation expectedServices;
protected final Expectation expectedApps;
private ModulesCloudModelBuilderContentCalculator modulesCalculator;
protected ModuleToDeployHelper moduleToDeployHelper;
protected ResourcesCloudModelBuilderContentCalculator resourcesCalculator;
protected ApplicationCloudModelBuilder appBuilder;
protected ServicesCloudModelBuilder servicesBuilder;
@Parameters
public static Iterable<Object[]> getParameters() {
return Arrays.asList(new Object[][] {
// @formatter:off
// (00) Full MTA:
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/apps.json"),
},
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/xs2-config.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/xs2-services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/xs2-apps.json"),
},
// (02) Full MTA with namespaces:
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
true, true,
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services-ns.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/apps-ns-1.json"),
},
// (03) Full MTA with namespaces (w/o services):
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
true, false,
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/apps-ns-2.json"),
},
// (04) Patch MTA (resolved inter-module dependencies):
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "java-hello-world" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services-patch.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/apps-patch.json"),
},
// (05) Patch MTA with namespaces (resolved inter-module dependencies):
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
true, true,
new String[] { "java-hello-world" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services-patch-ns.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/apps-patch-ns.json"),
},
// (06) Patch MTA (unresolved inter-module dependencies):
{ "/mta/javahelloworld/mtad.yaml", "/mta/javahelloworld/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "java-hello-world" }, // mtaArchiveModules
new String[] { "java-hello-world", "java-hello-world-db", "java-hello-world-backend" }, // mtaModules
new String[] { "java-hello-world", }, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/javahelloworld/services-patch.json"),
new Expectation(Expectation.Type.EXCEPTION, "Unresolved MTA modules [java-hello-world-db, java-hello-world-backend]")
},
{ "/mta/shine/mtad.yaml", "/mta/shine/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "shine", "shine-xsjs", "shine-odata" }, // mtaArchiveModules
new String[] { "shine", "shine-xsjs", "shine-odata" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/shine/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/shine/apps.json"),
},
{ "/mta/sample/mtad.yaml", "/mta/sample/config.mtaext", "/mta/sample/platform.json", null,
false, false,
new String[] { "pricing", "pricing-db", "web-server" }, // mtaArchiveModules
new String[] { "pricing", "pricing-db", "web-server" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/sample/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/sample/apps.json"),
},
{ "/mta/devxwebide/mtad.yaml", "/mta/devxwebide/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "webide" }, // mtaArchiveModules
new String[] { "webide" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/apps.json"),
},
{ "/mta/devxwebide/mtad.yaml", "/mta/devxwebide/xs2-config-1.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "webide" }, // mtaArchiveModules
new String[] { "webide" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/xs2-apps.json"),
},
{ "/mta/devxdi/mtad.yaml", "/mta/devxdi/config.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "di-core", "di-builder", "di-runner" }, // mtaArchiveModules
new String[] { "di-core", "di-builder", "di-runner" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/apps.json"),
},
{ "/mta/devxdi/mtad.yaml", "/mta/devxdi/xs2-config-1.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "di-core", "di-builder", "di-runner" }, // mtaArchiveModules
new String[] { "di-core", "di-builder", "di-runner" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/xs2-services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/xs2-apps.json"),
},
{ "/mta/devxwebide/mtad.yaml", "/mta/devxwebide/xs2-config-2.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "webide" }, // mtaArchiveModules
new String[] { "webide" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxwebide/xs2-apps.json"),
},
// (14) Unknown typed resource parameters:
{ "/mta/devxdi/mtad.yaml", "/mta/devxdi/xs2-config-2.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "di-core", "di-builder", "di-runner" }, // mtaArchiveModules
new String[] { "di-core", "di-builder", "di-runner" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/xs2-services.json"),
new Expectation(Expectation.Type.RESOURCE, "/mta/devxdi/xs2-apps.json"),
},
// (15) Service binding parameters in requires dependency:
{ "mtad-01.yaml", "config-01.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "foo", }, // mtaArchiveModules
new String[] { "foo", }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-01.json"),
},
// (16) Service binding parameters in requires dependency:
{ "mtad-02.yaml", "config-01.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "foo", }, // mtaArchiveModules
new String[] { "foo", }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.EXCEPTION, "Invalid type for key \"foo#bar#config\", expected \"Map\" but got \"String\""),
},
// (17) Custom application names are used:
{
"mtad-03.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1", "module-2" }, // mtaArchiveModules
new String[] { "module-1", "module-2" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-02.json"),
},
// (18) Custom application names are used:
{
"mtad-03.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
true, true,
new String[] { "module-1", "module-2" }, // mtaArchiveModules
new String[] { "module-1", "module-2" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-03.json"),
},
// (19) Temporary URIs are used:
{
"mtad-05.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1", "module-2" }, // mtaArchiveModules
new String[] { "module-1", "module-2" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-05.json"),
},
// (20) Use list parameter:
{
"mtad-06.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "framework" }, // mtaArchiveModules
new String[] { "framework" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-06.json"),
},
// (21) Use partial plugin:
{
"mtad-07.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "framework" }, // mtaArchiveModules
new String[] { "framework" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-07.json"),
},
// (22) Overwrite service-name resource property in ext. descriptor:
{
"mtad-08.yaml", "config-03.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1" }, // mtaArchiveModules
new String[] { "module-1" }, // mtaModules
new String[] {}, // deployedApps
new Expectation(Expectation.Type.RESOURCE, "services-03.json"),
new Expectation(Expectation.Type.RESOURCE, "apps-08.json"),
},
// (23) Test support for one-off tasks:
{
"mtad-09.yaml", "config-03.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1", "module-2", "module-3", "module-4" }, // mtaArchiveModules
new String[] { "module-1", "module-2", "module-3", "module-4" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-09.json"),
},
// (24) With 'health-check-type' set to 'port':
{
"mtad-health-check-type-port.yaml", "config-03.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-health-check-type-port.json"),
},
// (25) With 'health-check-type' set to 'http' and a non-default 'health-check-http-endpoint':
{
"mtad-health-check-type-http-with-endpoint.yaml", "config-03.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-health-check-type-http-with-endpoint.json"),
},
// (26) With 'health-check-type' set to 'http' and no 'health-check-http-endpoint':
{
"mtad-health-check-type-http-without-endpoint.yaml", "config-03.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-health-check-type-http-without-endpoint.json"),
},
// (27) Test inject service keys:
{
"mtad-10.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1" }, // mtaArchiveModules
new String[] { "module-1" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-10.json"),
},
// (28) With 'enable-ssh' set to true:
{
"mtad-ssh-enabled-true.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-ssh-enabled-true.json"),
},
// (29) With 'enable-ssh' set to false:
{
"mtad-ssh-enabled-false.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-ssh-enabled-false.json"),
},
// (30) With TCPS routes
{
"mtad-11.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1", "module-2", "module-3" }, // mtaArchiveModules
new String[] { "module-1", "module-2", "module-3" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-tcp-routes.json"),
},
// (31) Do not restart on env change - bg-deploy
{ "mtad-restart-on-env-change.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "module-1", "module-2", "module-3" }, // mtaArchiveModules
new String[] { "module-1", "module-2", "module-3" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "apps-with-restart-parameters-false.json") // services
},
// (32) With 'keep-existing-routes' set to true and no deployed MTA:
{
"keep-existing-routes/mtad.yaml", "config-02.mtaext", "/mta/xs-platform.json", null,
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "keep-existing-routes/apps.json"),
},
// (33) With 'keep-existing-routes' set to true and no deployed module:
{
"keep-existing-routes/mtad.yaml", "config-02.mtaext", "/mta/xs-platform.json",
"keep-existing-routes/deployed-mta-without-foo-module.json",
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "keep-existing-routes/apps.json"),
},
// (34) With 'keep-existing-routes' set to true and an already deployed module with no URIs:
{
"keep-existing-routes/mtad.yaml", "config-02.mtaext", "/mta/xs-platform.json",
"keep-existing-routes/deployed-mta-without-uris.json",
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "keep-existing-routes/apps.json"),
},
// (35) With 'keep-existing-routes' set to true and an already deployed module:
{
"keep-existing-routes/mtad.yaml", "config-02.mtaext", "/mta/xs-platform.json",
"keep-existing-routes/deployed-mta.json",
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "keep-existing-routes/apps-with-existing-routes.json"),
},
// (36) With global 'keep-existing-routes' set to true and an already deployed module:
{
"keep-existing-routes/mtad-with-global-parameter.yaml", "config-02.mtaext", "/mta/xs-platform.json",
"keep-existing-routes/deployed-mta.json",
false, false,
new String[] { "foo" }, // mtaArchiveModules
new String[] { "foo" }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"),
new Expectation(Expectation.Type.RESOURCE, "keep-existing-routes/apps-with-existing-routes.json"),
},
// (37) With new parameter - 'route'
{
"mtad-12.yaml", "config-01.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "foo", }, // mtaArchiveModules
new String[] { "foo", }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"), //services
new Expectation(Expectation.Type.RESOURCE, "apps-12.json"), //applications
},
// (38) With new parameter - 'routes'
{
"mtad-13.yaml", "config-01.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "foo", }, // mtaArchiveModules
new String[] { "foo", }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"), //services
new Expectation(Expectation.Type.RESOURCE, "apps-13.json"), //applications
},
// (39) With parameter - 'route', using tcp
{
"mtad-14.yaml", "config-01.mtaext", "/mta/cf-platform.json", null,
false, false,
new String[] { "foo", }, // mtaArchiveModules
new String[] { "foo", }, // mtaModules
new String[] {}, // deployedApps
new Expectation("[]"), //services
new Expectation(Expectation.Type.RESOURCE, "apps-14.json"), //applications
},
// @formatter:on
});
}
public CloudModelBuilderTest(String deploymentDescriptorLocation, String extensionDescriptorLocation, String platformsLocation,
String deployedMtaLocation, boolean useNamespaces, boolean useNamespacesForServices, String[] mtaArchiveModules,
String[] mtaModules, String[] deployedApps, Expectation expectedServices, Expectation expectedApps) {
this.deploymentDescriptorLocation = deploymentDescriptorLocation;
this.extensionDescriptorLocation = extensionDescriptorLocation;
this.platformLocation = platformsLocation;
this.deployedMtaLocation = deployedMtaLocation;
this.useNamespaces = useNamespaces;
this.useNamespacesForServices = useNamespacesForServices;
this.mtaArchiveModules = new HashSet<>(Arrays.asList(mtaArchiveModules));
this.mtaModules = new HashSet<>(Arrays.asList(mtaModules));
this.deployedApps = new HashSet<>(Arrays.asList(deployedApps));
this.expectedServices = expectedServices;
this.expectedApps = expectedApps;
}
protected UserMessageLogger getUserMessageLogger() {
return null;
}
protected DescriptorParser getDescriptorParser() {
return getHandlerFactory().getDescriptorParser();
}
protected HandlerFactory getHandlerFactory() {
return new HandlerFactory(2);
}
protected Map<String, Object> getParameters(Module module) {
return module.getParameters();
}
protected DescriptorHandler getDescriptorHandler() {
return getHandlerFactory().getDescriptorHandler();
}
protected ServicesCloudModelBuilder getServicesCloudModelBuilder(DeploymentDescriptor deploymentDescriptor,
CloudModelConfiguration configuration) {
return new ServicesCloudModelBuilder(deploymentDescriptor, configuration);
}
protected ApplicationCloudModelBuilder getApplicationCloudModelBuilder(DeploymentDescriptor deploymentDescriptor,
CloudModelConfiguration configuration, DeployedMta deployedMta, SystemParameters systemParameters,
XsPlaceholderResolver xsPlaceholderResolver) {
deploymentDescriptor = new DescriptorReferenceResolver(deploymentDescriptor, new ResolverBuilder(), new ResolverBuilder())
.resolve();
return new ApplicationCloudModelBuilder(deploymentDescriptor, configuration, deployedMta, systemParameters, xsPlaceholderResolver,
DEPLOY_ID, Mockito.mock(UserMessageLogger.class));
}
protected PlatformMerger getPlatformMerger(Platform platform, DescriptorHandler handler) {
return getHandlerFactory().getPlatformMerger(platform);
}
protected void setParameters(Module module, Map<String, Object> parameters) {
module.setParameters(parameters);
}
protected DescriptorMerger getDescriptorMerger() {
return new DescriptorMerger();
}
@Before
public void setUp() throws Exception {
deploymentDescriptor = loadDeploymentDescriptor();
ExtensionDescriptor extensionDescriptor = loadExtensionDescriptor();
Platform platform = loadPlatform();
DeployedMta deployedMta = loadDeployedMta();
deploymentDescriptor = getDescriptorMerger().merge(deploymentDescriptor, Arrays.asList(extensionDescriptor));
insertProperAppNames(deploymentDescriptor);
PlatformMerger platformMerger = getPlatformMerger(platform, descriptorHandler);
platformMerger.mergeInto(deploymentDescriptor);
String defaultDomain = getDefaultDomain(platform.getName());
SystemParameters systemParameters = createSystemParameters(deploymentDescriptor, defaultDomain);
XsPlaceholderResolver xsPlaceholderResolver = new XsPlaceholderResolver();
xsPlaceholderResolver.setDefaultDomain(defaultDomain);
CloudModelConfiguration configuration = createCloudModelConfiguration(defaultDomain);
appBuilder = getApplicationCloudModelBuilder(deploymentDescriptor, configuration, deployedMta, systemParameters,
xsPlaceholderResolver);
servicesBuilder = getServicesCloudModelBuilder(deploymentDescriptor, configuration);
modulesCalculator = new ModulesCloudModelBuilderContentCalculator(mtaArchiveModules, deployedApps, null, getUserMessageLogger(),
new ModuleToDeployHelper(), Arrays.asList(new UnresolvedModulesContentValidator(mtaModules, deployedApps)));
moduleToDeployHelper = new ModuleToDeployHelper();
resourcesCalculator = new ResourcesCloudModelBuilderContentCalculator(null, getUserMessageLogger());
}
private DeploymentDescriptor loadDeploymentDescriptor() {
InputStream deploymentDescriptorYaml = getClass().getResourceAsStream(deploymentDescriptorLocation);
return descriptorParser.parseDeploymentDescriptorYaml(deploymentDescriptorYaml);
}
private ExtensionDescriptor loadExtensionDescriptor() {
InputStream extensionDescriptorYaml = getClass().getResourceAsStream(extensionDescriptorLocation);
return descriptorParser.parseExtensionDescriptorYaml(extensionDescriptorYaml);
}
private Platform loadPlatform() {
InputStream platformJson = getClass().getResourceAsStream(platformLocation);
return configurationParser.parsePlatformJson(platformJson);
}
private DeployedMta loadDeployedMta() throws IOException {
if (deployedMtaLocation == null) {
return null;
}
InputStream deployedMtaStream = getClass().getResourceAsStream(deployedMtaLocation);
String deployedMtaJson = IOUtils.toString(deployedMtaStream, StandardCharsets.UTF_8);
return JsonUtil.fromJson(deployedMtaJson, DeployedMta.class);
}
protected void insertProperAppNames(DeploymentDescriptor descriptor) throws Exception {
for (Module module : descriptor.getModules2()) {
Map<String, Object> parameters = new TreeMap<>(getParameters(module));
String appName = NameUtil.getApplicationName(module.getName(), descriptor.getId(), useNamespaces);
if (parameters.containsKey(SupportedParameters.APP_NAME)) {
appName = NameUtil.getApplicationName((String) parameters.get(SupportedParameters.APP_NAME), descriptor.getId(),
useNamespaces);
}
parameters.put(SupportedParameters.APP_NAME, appName);
setParameters(module, parameters);
}
}
protected String getDefaultDomain(String targetName) {
return (targetName.equals("CLOUD-FOUNDRY")) ? DEFAULT_DOMAIN_CF : DEFAULT_DOMAIN_XS;
}
protected SystemParameters createSystemParameters(DeploymentDescriptor descriptor, String defaultDomain) {
Map<String, Object> generalParameters = new HashMap<>();
generalParameters.put(SupportedParameters.DEFAULT_DOMAIN, defaultDomain);
Map<String, Map<String, Object>> moduleParameters = new HashMap<>();
for (Module module : descriptor.getModules2()) {
String moduleName = module.getName();
moduleParameters.put(moduleName, MapUtil.asMap(SupportedParameters.DEFAULT_HOST, moduleName));
}
return new SystemParameters(generalParameters, moduleParameters, Collections.emptyMap(), Collections.emptyMap());
}
private CloudModelConfiguration createCloudModelConfiguration(String defaultDomain) {
CloudModelConfiguration configuration = new CloudModelConfiguration();
configuration.setPortBasedRouting(defaultDomain.equals(DEFAULT_DOMAIN_XS));
configuration.setPrettyPrinting(false);
configuration.setUseNamespaces(useNamespaces);
configuration.setUseNamespacesForServices(useNamespacesForServices);
return configuration;
}
@Test
public void testGetApplications() {
TestUtil.test(new Callable<List<CloudApplicationExtended>>() {
@Override
public List<CloudApplicationExtended> call() throws Exception {
List<CloudApplicationExtended> apps = new ArrayList<CloudApplicationExtended>();
List<Module> modulesToDeploy = modulesCalculator.calculateContentForBuilding(deploymentDescriptor.getModules2());
for (Module module : modulesToDeploy) {
apps.add(appBuilder.build(module, moduleToDeployHelper));
}
return apps;
}
}, expectedApps, getClass(), new TestUtil.JsonSerializationOptions(false, true));
}
@Test
public void testGetServices() {
TestUtil.test(new Callable<List<CloudServiceExtended>>() {
@Override
public List<CloudServiceExtended> call() throws Exception {
return servicesBuilder.build(resourcesCalculator.calculateContentForBuilding(deploymentDescriptor.getResources2()));
}
}, expectedServices, getClass(), new TestUtil.JsonSerializationOptions(false, true));
}
}
|
package com.swookiee.runtime.core.internal.logging;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Map;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.contrib.json.classic.JsonLayout;
public class FullJsonLayout extends JsonLayout {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss:SSS'Z'");
@Override
protected Map<String, Object> toJsonMap(final ILoggingEvent event) {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = super.toJsonMap(event);
Calendar cal = Calendar.getInstance();
jsonMap.put("ts", df.format(cal.getTime()));
if (isMeantToBeLoggedAsFullJson(event)) {
jsonMap.put("message", event.getArgumentArray()[0]);
}
return jsonMap;
}
private boolean isMeantToBeLoggedAsFullJson(final ILoggingEvent event) {
Object[] args = event.getArgumentArray();
return event.getMessage().equals("{}") && args.length == 1 && args[0] instanceof Map;
}
}
|
package com.yahoo.vespa.hosted.controller.api.integration.deployment;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.RegionName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.yahoo.config.provision.SystemName.Public;
import static com.yahoo.config.provision.SystemName.PublicCd;
import static com.yahoo.config.provision.SystemName.cd;
import static com.yahoo.config.provision.SystemName.main;
/** Job types that exist in the build system */
public enum JobType {
component ("component", // TODO jonmv: remove when no longer present in serialized data
Map.of()),
systemTest ("system-test",
Map.of(main , ZoneId.from("test", "us-east-1"),
cd , ZoneId.from("test", "cd-us-central-1"),
PublicCd, ZoneId.from("test", "aws-us-east-1c"),
Public , ZoneId.from("test", "aws-us-east-1c"))),
stagingTest ("staging-test",
Map.of(main , ZoneId.from("staging", "us-east-3"),
cd , ZoneId.from("staging", "cd-us-central-1"),
PublicCd, ZoneId.from("staging", "aws-us-east-1c"),
Public , ZoneId.from("staging", "aws-us-east-1c"))),
productionUsEast3 ("production-us-east-3",
Map.of(main, ZoneId.from("prod" , "us-east-3"))),
productionUsWest1 ("production-us-west-1",
Map.of(main, ZoneId.from("prod" , "us-west-1"))),
productionUsCentral1 ("production-us-central-1",
Map.of(main, ZoneId.from("prod" , "us-central-1"))),
productionApNortheast1 ("production-ap-northeast-1",
Map.of(main, ZoneId.from("prod" , "ap-northeast-1"))),
productionApNortheast2 ("production-ap-northeast-2",
Map.of(main, ZoneId.from("prod" , "ap-northeast-2"))),
productionApSoutheast1 ("production-ap-southeast-1",
Map.of(main, ZoneId.from("prod" , "ap-southeast-1"))),
productionEuWest1 ("production-eu-west-1",
Map.of(main, ZoneId.from("prod" , "eu-west-1"))),
productionAwsUsEast1a ("production-aws-us-east-1a",
Map.of(main, ZoneId.from("prod" , "aws-us-east-1a"))),
productionAwsUsEast1c ("production-aws-us-east-1c",
Map.of(PublicCd, ZoneId.from("prod", "aws-us-east-1c"),
Public, ZoneId.from("prod", "aws-us-east-1c"))),
productionAwsUsWest2a ("production-aws-us-west-2a",
Map.of(main, ZoneId.from("prod" , "aws-us-west-2a"))),
productionAwsUsEast1b ("production-aws-us-east-1b",
Map.of(main, ZoneId.from("prod" , "aws-us-east-1b"))),
devUsEast1 ("dev-us-east-1",
Map.of(main, ZoneId.from("dev" , "us-east-1"))),
devAwsUsEast2a ("dev-aws-us-east-2a",
Map.of(main, ZoneId.from("dev" , "us-east-1"))),
productionCdAwsUsEast1a("production-cd-aws-us-east-1a",
Map.of(cd , ZoneId.from("prod" , "cd-aws-us-east-1a"))),
productionCdUsCentral1 ("production-cd-us-central-1",
Map.of(cd , ZoneId.from("prod" , "cd-us-central-1"))),
// TODO: Cannot remove production-cd-us-central-2 until we know there are no serialized data in controller referencing it
productionCdUsCentral2 ("production-cd-us-central-2",
Map.of(cd , ZoneId.from("prod" , "cd-us-central-2"))),
productionCdUsWest1 ("production-cd-us-west-1",
Map.of(cd , ZoneId.from("prod" , "cd-us-west-1"))),
devCdUsCentral1 ("dev-cd-us-central-1",
Map.of(cd , ZoneId.from("dev" , "cd-us-central-1"))),
devCdUsWest1 ("dev-cd-us-west-1",
Map.of(cd , ZoneId.from("dev" , "cd-us-west-1"))),
devAwsUsEast1c ("dev-aws-us-east-1c",
Map.of(Public, ZoneId.from("dev", "aws-us-east-1c"),
PublicCd, ZoneId.from("dev", "aws-us-east-1c"))),
perfUsEast3 ("perf-us-east-3",
Map.of(main, ZoneId.from("perf" , "us-east-3")));
private final String jobName;
private final Map<SystemName, ZoneId> zones;
JobType(String jobName, Map<SystemName, ZoneId> zones) {
if (zones.values().stream().map(ZoneId::environment).distinct().count() > 1)
throw new IllegalArgumentException("All zones of a job must be in the same environment");
this.jobName = jobName;
this.zones = zones;
}
public String jobName() { return jobName; }
/** Returns the zone for this job in the given system, or throws if this job does not have a zone */
public ZoneId zone(SystemName system) {
if ( ! zones.containsKey(system))
throw new IllegalArgumentException(this + " does not have any zones in " + system);
return zones.get(system);
}
public static List<JobType> allIn(SystemName system) {
return Stream.of(values()).filter(job -> job.zones.containsKey(system)).collect(Collectors.toUnmodifiableList());
}
/** Returns whether this is a production job */
public boolean isProduction() { return environment() == Environment.prod; }
/** Returns whether this is an automated test job */
public boolean isTest() { return environment() != null && environment().isTest(); }
/** Returns the environment of this job type, or null if it does not have an environment */
public Environment environment() {
if (this == component) return null;
return zones.values().iterator().next().environment();
}
public static Optional<JobType> fromOptionalJobName(String jobName) {
return Stream.of(values())
.filter(jobType -> jobType.jobName.equals(jobName))
.findAny();
}
public static JobType fromJobName(String jobName) {
return fromOptionalJobName(jobName)
.orElseThrow(() -> new IllegalArgumentException("Unknown job name '" + jobName + "'"));
}
/** Returns the job type for the given zone */
public static Optional<JobType> from(SystemName system, ZoneId zone) {
return Stream.of(values())
.filter(job -> zone.equals(job.zones.get(system)))
.findAny();
}
/** Returns the job job type for the given environment and region or null if none */
public static Optional<JobType> from(SystemName system, Environment environment, RegionName region) {
switch (environment) {
case test: return Optional.of(systemTest);
case staging: return Optional.of(stagingTest);
}
return from(system, ZoneId.from(environment, region));
}
}
|
package org.jboss.as.controller.client.impl;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.concurrent.TimeUnit;
import org.jboss.as.controller.client.ModelControllerClient;
import org.jboss.as.controller.client.ModelControllerClientConfiguration;
import org.jboss.as.controller.client.logging.ControllerClientLogger;
import org.jboss.as.protocol.ProtocolConnectionConfiguration;
import org.jboss.as.protocol.StreamUtils;
import org.jboss.as.protocol.mgmt.ManagementChannelAssociation;
import org.jboss.as.protocol.mgmt.ManagementChannelHandler;
import org.jboss.as.protocol.mgmt.ManagementClientChannelStrategy;
import org.jboss.remoting3.Channel;
import org.jboss.remoting3.CloseHandler;
import org.jboss.remoting3.Endpoint;
public class RemotingModelControllerClient extends AbstractModelControllerClient {
private Endpoint endpoint;
private ManagementClientChannelStrategy strategy;
private boolean closed;
private final ManagementChannelHandler channelAssociation;
private final ModelControllerClientConfiguration clientConfiguration;
private final StackTraceElement[] allocationStackTrace;
public RemotingModelControllerClient(final ModelControllerClientConfiguration configuration) {
this.channelAssociation = new ManagementChannelHandler(new ManagementClientChannelStrategy() {
@Override
public Channel getChannel() throws IOException {
return getOrCreateChannel();
}
@Override
public synchronized void close() throws IOException {
}
}, configuration.getExecutor(), this);
this.clientConfiguration = configuration;
this.allocationStackTrace = Thread.currentThread().getStackTrace();
}
@Override
protected ManagementChannelAssociation getChannelAssociation() throws IOException {
return channelAssociation;
}
@Override
public void close() throws IOException {
synchronized (this) {
if(closed) {
return;
}
closed = true;
// Don't allow any new request
channelAssociation.shutdown();
// First close the channel and connection
if (strategy != null) {
StreamUtils.safeClose(strategy);
strategy = null;
}
// Then the endpoint
final Endpoint endpoint = this.endpoint;
if (endpoint != null) {
this.endpoint = null;
try {
endpoint.closeAsync();
} catch (UnsupportedOperationException ignored) {
}
}
// Cancel all still active operations
channelAssociation.shutdownNow();
try {
channelAssociation.awaitCompletion(1, TimeUnit.SECONDS);
} catch (InterruptedException ignore) {
Thread.currentThread().interrupt();
} finally {
StreamUtils.safeClose(clientConfiguration);
}
// Per WFCORE-1573 remoting endpoints should be closed asynchronously, however consumers of this client
// likely need to wait until the endpoints are fully shutdown.
if (endpoint != null) try {
endpoint.awaitClosed();
} catch (InterruptedException e) {
final InterruptedIOException cause = new InterruptedIOException(e.getLocalizedMessage());
cause.initCause(e);
throw cause;
}
}
}
protected synchronized Channel getOrCreateChannel() throws IOException {
if (closed) {
throw ControllerClientLogger.ROOT_LOGGER.objectIsClosed(ModelControllerClient.class.getSimpleName());
}
if (strategy == null) {
try {
endpoint = Endpoint.builder().setEndpointName("management-client").build();
final ProtocolConnectionConfiguration configuration = ProtocolConfigurationFactory.create(clientConfiguration, endpoint);
strategy = ManagementClientChannelStrategy.create(configuration, channelAssociation, clientConfiguration.getCallbackHandler(),
clientConfiguration.getSaslOptions(), clientConfiguration.getSSLContext(),
new CloseHandler<Channel>() {
@Override
public void handleClose(final Channel closed, final IOException exception) {
channelAssociation.handleChannelClosed(closed, exception);
}
});
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return strategy.getChannel();
}
@Override
protected void finalize() throws Throwable {
try {
if(! closed) {
// Create the leak description
final Throwable t = ControllerClientLogger.ROOT_LOGGER.controllerClientNotClosed();
t.setStackTrace(allocationStackTrace);
ControllerClientLogger.ROOT_LOGGER.leakedControllerClient(t);
// Close
StreamUtils.safeClose(this);
}
} finally {
super.finalize();
}
}
}
|
package com.yahoo.vespa.hosted.controller.restapi.filter;
import com.google.inject.Inject;
import com.yahoo.config.provision.ApplicationName;
import com.yahoo.config.provision.TenantName;
import com.yahoo.jdisc.Response;
import com.yahoo.jdisc.http.filter.DiscFilterRequest;
import com.yahoo.jdisc.http.filter.security.base.JsonSecurityRequestFilterBase;
import com.yahoo.log.LogLevel;
import com.yahoo.restapi.Path;
import com.yahoo.vespa.athenz.api.AthenzDomain;
import com.yahoo.vespa.athenz.api.AthenzIdentity;
import com.yahoo.vespa.athenz.api.AthenzPrincipal;
import com.yahoo.vespa.athenz.client.zms.ZmsClientException;
import com.yahoo.vespa.hosted.controller.Controller;
import com.yahoo.vespa.hosted.controller.TenantController;
import com.yahoo.vespa.hosted.controller.api.integration.athenz.AthenzClientFactory;
import com.yahoo.vespa.hosted.controller.api.role.Role;
import com.yahoo.vespa.hosted.controller.api.role.SecurityContext;
import com.yahoo.vespa.hosted.controller.athenz.ApplicationAction;
import com.yahoo.vespa.hosted.controller.athenz.impl.AthenzFacade;
import com.yahoo.vespa.hosted.controller.tenant.AthenzTenant;
import com.yahoo.vespa.hosted.controller.tenant.Tenant;
import com.yahoo.vespa.hosted.controller.tenant.UserTenant;
import com.yahoo.yolean.Exceptions;
import java.net.URI;
import java.security.Principal;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Logger;
import static com.yahoo.vespa.hosted.controller.athenz.HostedAthenzIdentities.SCREWDRIVER_DOMAIN;
/**
* Enriches the request principal with roles from Athenz, if an AthenzPrincipal is set on the request.
*
* @author jonmv
*/
public class AthenzRoleFilter extends JsonSecurityRequestFilterBase {
private static final Logger logger = Logger.getLogger(AthenzRoleFilter.class.getName());
private final AthenzFacade athenz;
private final TenantController tenants;
@Inject
public AthenzRoleFilter(AthenzClientFactory athenzClientFactory, Controller controller) {
this.athenz = new AthenzFacade(athenzClientFactory);
this.tenants = controller.tenants();
}
@Override
protected Optional<ErrorResponse> filter(DiscFilterRequest request) {
try {
Principal principal = request.getUserPrincipal();
if (principal instanceof AthenzPrincipal) {
request.setAttribute(SecurityContext.ATTRIBUTE_NAME, new SecurityContext(principal,
roles((AthenzPrincipal) principal,
request.getUri())));
}
}
catch (Exception e) {
logger.log(LogLevel.INFO, () -> "Exception mapping Athenz principal to roles: " + Exceptions.toMessageString(e));
}
return Optional.empty();
}
Set<Role> roles(AthenzPrincipal principal, URI uri) {
Path path = new Path(uri);
path.matches("/application/v4/tenant/{tenant}/{*}");
Optional<Tenant> tenant = Optional.ofNullable(path.get("tenant")).map(TenantName::from).flatMap(tenants::get);
path.matches("/application/v4/tenant/{tenant}/application/{application}/{*}");
Optional<ApplicationName> application = Optional.ofNullable(path.get("application")).map(ApplicationName::from);
AthenzIdentity identity = principal.getIdentity();
if (athenz.hasHostedOperatorAccess(identity))
return Set.of(Role.hostedOperator());
// A principal can be both tenant admin and tenantPipeline
Set<Role> roleMemberships = new HashSet<>();
if (tenant.isPresent() && isTenantAdmin(identity, tenant.get()))
roleMemberships.add(Role.athenzTenantAdmin(tenant.get().name()));
if (identity.getDomain().equals(SCREWDRIVER_DOMAIN) && application.isPresent() && tenant.isPresent())
// NOTE: Only fine-grained deploy authorization for Athenz tenants
if ( tenant.get().type() != Tenant.Type.athenz
|| hasDeployerAccess(identity, ((AthenzTenant) tenant.get()).domain(), application.get()))
roleMemberships.add(Role.tenantPipeline(tenant.get().name(), application.get()));
return roleMemberships.isEmpty()
? Set.of(Role.everyone())
: Set.copyOf(roleMemberships);
}
private boolean isTenantAdmin(AthenzIdentity identity, Tenant tenant) {
switch (tenant.type()) {
case athenz: return athenz.hasTenantAdminAccess(identity, ((AthenzTenant) tenant).domain());
case user: return ((UserTenant) tenant).is(identity.getName()) || athenz.hasHostedOperatorAccess(identity);
default: throw new IllegalArgumentException("Unexpected tenant type '" + tenant.type() + "'.");
}
}
private boolean hasDeployerAccess(AthenzIdentity identity, AthenzDomain tenantDomain, ApplicationName application) {
try {
return athenz.hasApplicationAccess(identity,
ApplicationAction.deploy,
tenantDomain,
application);
} catch (ZmsClientException e) {
throw new RuntimeException("Failed to authorize operation: (" + e.getMessage() + ")", e);
}
}
}
|
package nz.gen.geek_central.GPSTest;
/*
Graphical plot of satellite location
*/
import android.util.FloatMath;
class GraphicsUseful
{
public final static float Pi = (float)Math.PI;
public static float ToRadians
(
float Degrees
)
{
return Pi * Degrees / 180.0f;
} /*ToRadians*/
public static float ToDegrees
(
float Radians
)
{
return 180.0f * Radians / Pi;
} /*ToDegrees*/
public static android.graphics.Paint FillWithColor
(
int TheColor
)
/* returns a Paint that will fill with a solid colour. */
{
final android.graphics.Paint ThePaint = new android.graphics.Paint();
ThePaint.setStyle(android.graphics.Paint.Style.FILL);
ThePaint.setColor(TheColor);
return
ThePaint;
} /*FillWithColor*/
} /*GraphicsUseful*/
public class VectorView extends android.view.View
{
class SatInfo
{
/* all the GpsSatellite info I care about */
final float Azimuth, Elevation; /* radians */
final int Prn; /* unique satellite id? Could use to colour-code or something */
public SatInfo
(
float Azimuth, /* degrees */
float Elevation, /* degrees */
int Prn
)
{
this.Azimuth = GraphicsUseful.ToRadians(Azimuth);
this.Elevation = GraphicsUseful.ToRadians(Elevation);
this.Prn = Prn;
} /*SatInfo*/
} /*SatInfo*/
float OrientAzi = 0.0f;
float OrientElev = 0.0f;
Vec3f OrientVector = new Vec3f(0.0f, -1.0f, 0.0f);
SatInfo[] Sats = {};
android.os.Handler RunTask;
int FlashPrn = -1;
Runnable NextUnflash = null;
public VectorView
(
android.content.Context TheContext,
android.util.AttributeSet TheAttributes
)
{
super(TheContext, TheAttributes);
RunTask = new android.os.Handler();
} /*VectorView*/
class FlashResetter implements Runnable
{
VectorView Parent;
boolean DidRun;
public FlashResetter
(
VectorView Parent
)
{
this.Parent = Parent;
DidRun = false;
} /*FlashResetter*/
public void run()
{
if (!DidRun)
{
Parent.FlashPrn = -1; /* clear highlight */
Parent.invalidate();
DidRun = true;
}
} /*run*/
} /*FlashResetter*/
public void SetOrientation
(
float[] Datum /* 3 values from orientation sensor */
)
/* sets reference orientation for computing satellite directions. */
{
/* work this out myself--SensorManager.getRotationMatrixFromVector
isn't available before Android 2.3, API level 9 */
OrientAzi = GraphicsUseful.ToRadians(Datum[0]);
OrientElev = GraphicsUseful.ToRadians(Datum[1]);
final float AziCos = FloatMath.cos(OrientAzi);
final float AziSin = FloatMath.sin(OrientAzi);
final float ElevCos = FloatMath.cos(OrientElev);
final float ElevSin = FloatMath.sin(OrientElev);
OrientVector = new Vec3f
(
- AziSin * ElevCos,
- AziCos * ElevCos,
ElevSin
);
invalidate();
} /*SetOrientation*/
public void SetSats
(
Iterable<android.location.GpsSatellite> Sats
)
/* specifies a new set of satellite data to display. */
{
java.util.ArrayList<SatInfo> NewSats = new java.util.ArrayList<SatInfo>();
for (android.location.GpsSatellite ThisSat : Sats)
{
NewSats.add
(
new SatInfo(ThisSat.getAzimuth(), ThisSat.getElevation(), ThisSat.getPrn())
);
} /*for*/
this.Sats = NewSats.toArray(new SatInfo[NewSats.size()]);
invalidate();
} /*SetSats*/
public void FlashSat
(
int Prn
)
/* temporarily highlight the part of the graphic representing the specified satellite. */
{
if (NextUnflash != null)
{
/* Note there might be a race condition here! Not that it matters for what
NextUnflash does. */
RunTask.removeCallbacks(NextUnflash);
NextUnflash.run();
NextUnflash = null;
}
FlashPrn = Prn;
NextUnflash = new FlashResetter(this);
RunTask.postDelayed(NextUnflash, 250);
invalidate();
} /*FlashSat*/
@Override
protected void onDraw
(
android.graphics.Canvas Draw
)
{
super.onDraw(Draw);
Draw.save();
final float Radius = 100.0f;
Draw.translate(Radius, Radius);
Draw.drawArc /* background */
(
/*oval =*/ new android.graphics.RectF(-Radius, -Radius, Radius, Radius),
/*startAngle =*/ 0.0f,
/*sweepAngle =*/ 360.0f,
/*useCenter =*/ false,
/*paint =*/ GraphicsUseful.FillWithColor(0xff0a6d01)
);
float YPos = 12.0f; /* debug */
for (SatInfo ThisSat : Sats)
{
Vec3f D; /* satellite direction in phone coordinates */
{
final float AziCos = FloatMath.cos(ThisSat.Azimuth);
final float AziSin = FloatMath.sin(ThisSat.Azimuth);
final float ElevCos = FloatMath.cos(ThisSat.Elevation);
final float ElevSin = FloatMath.sin(ThisSat.Elevation);
D =
Mat4f.rotate_align
(
new Vec3f(0.0f, -1.0f, 0.0f),
OrientVector
)
.xform(
new Vec3f
(
AziSin * ElevCos,
- AziCos * ElevCos,
ElevSin
)
);
Draw.drawText
(
String.format
(
"%d (%.2f, %.2f, %.2f) from (%.2f, %.2f, %.2f)(%.2f, %.2f) = (%.2f, %.2f, %.2f)",
ThisSat.Prn,
AziSin * ElevCos, - AziCos * ElevCos, ElevSin,
OrientVector.x, OrientVector.y, OrientVector.z,
OrientAzi, OrientElev,
D.x, D.y, D.z
),
- Radius, YPos - Radius,
GraphicsUseful.FillWithColor(0x80ffffff)
); /* debug */
YPos += 12.0f; /* debug */
}
final android.graphics.Path V = new android.graphics.Path();
final float BaseWidth = 5.0f;
final float EndWidth = BaseWidth * (1.0f + D.z);
/* taper to simulate perspective foreshortening */
V.moveTo(0.0f, 0.0f);
V.lineTo(+ BaseWidth * D.y, - BaseWidth * D.x);
V.lineTo
(
+ EndWidth * D.y + Radius * D.x,
- EndWidth * D.x + Radius * D.y
);
V.lineTo
(
- EndWidth * D.y + Radius * D.x,
+ EndWidth * D.x + Radius * D.y
);
V.lineTo(- BaseWidth * D.y, + BaseWidth * D.x);
V.close();
Draw.drawPath
(
V,
GraphicsUseful.FillWithColor
(
ThisSat.Prn == FlashPrn ?
0xffce15ee
:
0xffeedf09
)
);
} /*for*/
Draw.restore();
} /*onDraw*/
} /*VectorView*/
|
package org.hyperic.sigar;
import org.hyperic.jni.ArchLoader;
import org.hyperic.jni.ArchLoaderException;
import org.hyperic.jni.ArchName;
import org.hyperic.jni.ArchNotSupportedException;
public class SigarLoader extends ArchLoader {
public static final String PROP_SIGAR_JAR_NAME = "sigar.jar.name";
private static String location = null;
private static String nativeName = null;
public SigarLoader(Class loaderClass) {
super(loaderClass);
}
//XXX same as super.getArchLibName()
//but db2monitor.jar gets loaded first in jboss
//results in NoSuchMethodError
public String getArchLibName()
throws ArchNotSupportedException {
return getName() + "-" + ArchName.getName();
}
public String getDefaultLibName()
throws ArchNotSupportedException {
return getArchLibName(); //drop "java" prefix
}
//override these methods to ensure our ClassLoader
//loads the native library.
protected void systemLoadLibrary(String name) {
System.loadLibrary(name);
}
protected void systemLoad(String name) {
System.load(name);
}
public String getJarName() {
return System.getProperty(PROP_SIGAR_JAR_NAME,
super.getJarName());
}
public static void setSigarJarName(String jarName) {
System.setProperty(PROP_SIGAR_JAR_NAME, jarName);
}
public static String getSigarJarName() {
return System.getProperty(PROP_SIGAR_JAR_NAME,
"sigar.jar");
}
/**
* Returns the path where sigar.jar is located.
*/
public synchronized static String getLocation() {
if (location == null) {
SigarLoader loader = new SigarLoader(Sigar.class);
try {
location = loader.findJarPath(getSigarJarName());
} catch (ArchLoaderException e) {
location = ".";
}
}
return location;
}
/**
* Returns the name of the native sigar library.
*/
public synchronized static String getNativeLibraryName() {
if (nativeName == null) {
SigarLoader loader = new SigarLoader(Sigar.class);
try {
nativeName = loader.getLibraryName();
} catch (ArchNotSupportedException e) {
nativeName = null;
}
}
return nativeName;
}
}
|
package edu.umd.cs.findbugs;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import org.dom4j.DocumentException;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.ba.SourceFinder;
import edu.umd.cs.findbugs.ba.URLClassPath;
import edu.umd.cs.findbugs.filter.Filter;
import edu.umd.cs.findbugs.util.Util;
import edu.umd.cs.findbugs.xml.OutputStreamXMLOutput;
import edu.umd.cs.findbugs.xml.XMLAttributeList;
import edu.umd.cs.findbugs.xml.XMLOutput;
import edu.umd.cs.findbugs.xml.XMLOutputUtil;
import edu.umd.cs.findbugs.xml.XMLWriteable;
/**
* A project in the GUI.
* This consists of some number of Jar files to analyze for bugs, and optionally
* <p/>
* <ul>
* <li> some number of source directories, for locating the program's
* source code
* <li> some number of auxiliary classpath entries, for locating classes
* referenced by the program which the user doesn't want to analyze
* <li> some number of boolean options
* </ul>
*
* @author David Hovemeyer
*/
public class Project implements XMLWriteable {
private static final boolean DEBUG = SystemProperties.getBoolean("findbugs.project.debug");
private List<File> currentWorkingDirectoryList;
/**
* Project filename.
*/
@Deprecated
private String projectFileName;
private String projectName;
/**
* Options.
*/
@Deprecated
private Map<String, Boolean> optionsMap;
/**
* List of jars/directories to analyze
*/
private List<String> analysisTargets;
/**
* The list of source directories.
*/
private List<String> srcDirList;
/**
* The list of auxiliary classpath entries.
*/
private List<String> auxClasspathEntryList;
/**
* Flag to indicate that this Project has been modified.
*/
private boolean isModified;
/**
* Constant used to name anonymous projects.
*/
public static final String UNNAMED_PROJECT = "<<unnamed project>>";
private long timestampForAnalyzedClasses = 0L;
private IGuiCallback guiCallback;
@NonNull private Filter suppressionFilter = new Filter();
private SourceFinder sourceFinder;
/**
* Create an anonymous project.
*/
public Project() {
this.projectFileName = UNNAMED_PROJECT;
optionsMap = new HashMap<String, Boolean>();
optionsMap.put(RELATIVE_PATHS, Boolean.FALSE);
analysisTargets = new LinkedList<String>();
srcDirList = new LinkedList<String>();
auxClasspathEntryList = new LinkedList<String>();
isModified = false;
currentWorkingDirectoryList = new ArrayList<File>();
}
/**
* Return an exact copy of this Project.
*/
public Project duplicate() {
Project dup = new Project();
dup.projectFileName = this.projectFileName;
dup.currentWorkingDirectoryList.addAll(this.currentWorkingDirectoryList);
dup.projectName = this.projectName;
dup.optionsMap.clear();
dup.optionsMap.putAll(this.optionsMap);
dup.analysisTargets.addAll(this.analysisTargets);
dup.srcDirList.addAll(this.srcDirList);
dup.auxClasspathEntryList.addAll(this.auxClasspathEntryList);
dup.timestampForAnalyzedClasses = timestampForAnalyzedClasses;
dup.guiCallback = guiCallback;
return dup;
}
public SourceFinder getSourceFinder() {
if (sourceFinder == null) {
sourceFinder = new SourceFinder(this);
}
return sourceFinder;
}
public boolean isGuiAvaliable(){
return guiCallback != null;
}
/**
* add information from project2 to this project
*/
public void add(Project project2) {
optionsMap.putAll(project2.optionsMap);
analysisTargets = appendWithoutDuplicates(analysisTargets, project2.analysisTargets);
srcDirList = appendWithoutDuplicates(srcDirList, project2.srcDirList);
auxClasspathEntryList = appendWithoutDuplicates(auxClasspathEntryList, project2.auxClasspathEntryList);
}
public static <T> List<T> appendWithoutDuplicates(List<T> lst1, List<T> lst2) {
LinkedHashSet<T> joined = new LinkedHashSet<T>(lst1);
joined.addAll(lst2);
return new ArrayList<T>(joined);
}
public void setCurrentWorkingDirectory(File f) {
if (f != null)
addWorkingDir(f.toString());
}
/**
* Return whether or not this Project has unsaved modifications.
*/
public boolean isModified() {
return isModified;
}
/**
* Set whether or not this Project has unsaved modifications.
*/
public void setModified(boolean isModified) {
this.isModified = isModified;
}
/**
* Get the project filename.
*/
@Deprecated
public String getProjectFileName() {
return projectFileName;
}
/**
* Set the project filename.
*
* @param projectFileName the new filename
*/
@Deprecated
public void setProjectFileName(String projectFileName) {
this.projectFileName = projectFileName;
}
/**
* Add a file to the project.
*
* @param fileName the file to add
* @return true if the file was added, or false if the
* file was already present
*/
public boolean addFile(String fileName) {
return addToListInternal(analysisTargets, makeAbsoluteCWD(fileName));
}
/**
* Add a source directory to the project.
* @param dirName the directory to add
* @return true if the source directory was added, or false if the
* source directory was already present
*/
public boolean addSourceDir(String dirName) {
boolean isNew = false;
for (String dir : makeAbsoluteCwdCandidates(dirName)) {
isNew = addToListInternal(srcDirList, dir) || isNew;
}
sourceFinder = new SourceFinder(this);
return isNew;
}
/**
* Add a working directory to the project.
* @param dirName the directory to add
* @return true if the working directory was added, or false if the
* working directory was already present
*/
public boolean addWorkingDir(String dirName) {
if (dirName == null)
throw new NullPointerException();
return addToListInternal(currentWorkingDirectoryList, new File(dirName));
}
/**
* Retrieve the Options value.
*
* @param option the name of option to get
* @return the value of the option
*/
@Deprecated
public boolean getOption(String option) {
Boolean value = optionsMap.get(option);
return value != null && value.booleanValue();
}
/**
* Get the number of files in the project.
*
* @return the number of files in the project
*/
public int getFileCount() {
return analysisTargets.size();
}
/**
* Get the given file in the list of project files.
*
* @param num the number of the file in the list of project files
* @return the name of the file
*/
public String getFile(int num) {
return analysisTargets.get(num);
}
/**
* Remove file at the given index in the list of project files
*
* @param num index of the file to remove in the list of project files
*/
public void removeFile(int num) {
analysisTargets.remove(num);
isModified = true;
}
/**
* Get the list of files, directories, and zip files in the project.
*/
public List<String> getFileList() {
return analysisTargets;
}
/**
* Get the number of source directories in the project.
*
* @return the number of source directories in the project
*/
public int getNumSourceDirs() {
return srcDirList.size();
}
/**
* Get the given source directory.
*
* @param num the number of the source directory
* @return the source directory
*/
public String getSourceDir(int num) {
return srcDirList.get(num);
}
/**
* Remove source directory at given index.
*
* @param num index of the source directory to remove
*/
public void removeSourceDir(int num) {
srcDirList.remove(num);
sourceFinder = new SourceFinder(this);
isModified = true;
}
/**
* Get project files as an array of Strings.
*/
public String[] getFileArray() {
return analysisTargets.toArray(new String[analysisTargets.size()]);
}
/**
* Get source dirs as an array of Strings.
*/
public String[] getSourceDirArray() {
return srcDirList.toArray(new String[srcDirList.size()]);
}
/**
* Get the source dir list.
*/
public List<String> getSourceDirList() {
return srcDirList;
}
/**
* Add an auxiliary classpath entry
*
* @param auxClasspathEntry the entry
* @return true if the entry was added successfully, or false
* if the given entry is already in the list
*/
public boolean addAuxClasspathEntry(String auxClasspathEntry) {
return addToListInternal(auxClasspathEntryList, makeAbsoluteCWD(auxClasspathEntry));
}
/**
* Get the number of auxiliary classpath entries.
*/
public int getNumAuxClasspathEntries() {
return auxClasspathEntryList.size();
}
/**
* Get the n'th auxiliary classpath entry.
*/
public String getAuxClasspathEntry(int n) {
return auxClasspathEntryList.get(n);
}
/**
* Remove the n'th auxiliary classpath entry.
*/
public void removeAuxClasspathEntry(int n) {
auxClasspathEntryList.remove(n);
isModified = true;
}
/**
* Return the list of aux classpath entries.
*/
public List<String> getAuxClasspathEntryList() {
return auxClasspathEntryList;
}
/**
* Worklist item for finding implicit classpath entries.
*/
private static class WorkListItem {
private URL url;
/**
* Constructor.
*
* @param url the URL of the Jar or Zip file
*/
public WorkListItem(URL url) {
this.url = url;
}
/**
* Get URL of Jar/Zip file.
*/
public URL getURL() {
return this.url;
}
}
/**
* Worklist for finding implicit classpath entries.
*/
private static class WorkList {
private LinkedList<WorkListItem> itemList;
private HashSet<String> addedSet;
/**
* Constructor.
* Creates an empty worklist.
*/
public WorkList() {
this.itemList = new LinkedList<WorkListItem>();
this.addedSet = new HashSet<String>();
}
/**
* Create a URL from a filename specified in the project file.
*/
public URL createURL(String fileName) throws MalformedURLException {
String protocol = URLClassPath.getURLProtocol(fileName);
if (protocol == null) {
fileName = "file:" + fileName;
}
return new URL(fileName);
}
/**
* Create a URL of a file relative to another URL.
*/
public URL createRelativeURL(URL base, String fileName) throws MalformedURLException {
return new URL(base, fileName);
}
/**
* Add a worklist item.
*
* @param item the WorkListItem representing a zip/jar file to be examined
* @return true if the item was added, false if not (because it was
* examined already)
*/
public boolean add(WorkListItem item) {
if (DEBUG) {
System.out.println("Adding " + item.getURL().toString());
}
if (!addedSet.add(item.getURL().toString())) {
if (DEBUG) {
System.out.println("\t==> Already processed");
}
return false;
}
itemList.add(item);
return true;
}
/**
* Return whether or not the worklist is empty.
*/
public boolean isEmpty() {
return itemList.isEmpty();
}
/**
* Get the next item in the worklist.
*/
public WorkListItem getNextItem() {
return itemList.removeFirst();
}
}
/**
* Return the list of implicit classpath entries. The implicit
* classpath is computed from the closure of the set of jar files
* that are referenced by the <code>"Class-Path"</code> attribute
* of the manifest of the any jar file that is part of this project
* or by the <code>"Class-Path"</code> attribute of any directly or
* indirectly referenced jar. The referenced jar files that exist
* are the list of implicit classpath entries.
*
* @deprecated FindBugs2 and ClassPathBuilder take care of this automatically
*/
@Deprecated
public List<String> getImplicitClasspathEntryList() {
final LinkedList<String> implicitClasspath = new LinkedList<String>();
WorkList workList = new WorkList();
// Prime the worklist by adding the zip/jar files
// in the project.
for (String fileName : analysisTargets) {
try {
URL url = workList.createURL(fileName);
WorkListItem item = new WorkListItem(url);
workList.add(item);
} catch (MalformedURLException ignore) {
// Ignore
}
}
// Scan recursively.
while (!workList.isEmpty()) {
WorkListItem item = workList.getNextItem();
processComponentJar(item.getURL(), workList, implicitClasspath);
}
return implicitClasspath;
}
/**
* Examine the manifest of a single zip/jar file for implicit
* classapth entries.
*
* @param jarFileURL URL of the zip/jar file
* @param workList worklist of zip/jar files to examine
* @param implicitClasspath list of implicit classpath entries found
*/
private void processComponentJar(URL jarFileURL, WorkList workList,
List<String> implicitClasspath) {
if (DEBUG) {
System.out.println("Processing " + jarFileURL.toString());
}
if (!jarFileURL.toString().endsWith(".zip") && !jarFileURL.toString().endsWith(".jar")) {
return;
}
try {
URL manifestURL = new URL("jar:" + jarFileURL.toString() + "!/META-INF/MANIFEST.MF");
InputStream in = null;
try {
in = manifestURL.openStream();
Manifest manifest = new Manifest(in);
Attributes mainAttrs = manifest.getMainAttributes();
String classPath = mainAttrs.getValue("Class-Path");
if (classPath != null) {
String[] fileList = classPath.split("\\s+");
for (String jarFile : fileList) {
URL referencedURL = workList.createRelativeURL(jarFileURL, jarFile);
if (workList.add(new WorkListItem(referencedURL))) {
implicitClasspath.add(referencedURL.toString());
if (DEBUG) {
System.out.println("Implicit jar: " + referencedURL.toString());
}
}
}
}
} finally {
if (in != null) {
in.close();
}
}
} catch (IOException ignore) {
// Ignore
}
}
private static final String OPTIONS_KEY = "[Options]";
private static final String JAR_FILES_KEY = "[Jar files]";
private static final String SRC_DIRS_KEY = "[Source dirs]";
private static final String AUX_CLASSPATH_ENTRIES_KEY = "[Aux classpath entries]";
// Option keys
public static final String RELATIVE_PATHS = "relative_paths";
/**
* Save the project to an output file.
*
* @param outputFile name of output file
* @param useRelativePaths true if the project should be written
* using only relative paths
* @param relativeBase if useRelativePaths is true,
* this file is taken as the base directory in terms of which
* all files should be made relative
* @throws IOException if an error occurs while writing
*/
@Deprecated
public void write(String outputFile, boolean useRelativePaths, String relativeBase)
throws IOException {
PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(outputFile)));
try {
writer.println(JAR_FILES_KEY);
for (String jarFile : analysisTargets) {
if (useRelativePaths) {
jarFile = convertToRelative(jarFile, relativeBase);
}
writer.println(jarFile);
}
writer.println(SRC_DIRS_KEY);
for (String srcDir : srcDirList) {
if (useRelativePaths) {
srcDir = convertToRelative(srcDir, relativeBase);
}
writer.println(srcDir);
}
writer.println(AUX_CLASSPATH_ENTRIES_KEY);
for (String auxClasspathEntry : auxClasspathEntryList) {
if (useRelativePaths) {
auxClasspathEntry = convertToRelative(auxClasspathEntry, relativeBase);
}
writer.println(auxClasspathEntry);
}
if (useRelativePaths) {
writer.println(OPTIONS_KEY);
writer.println(RELATIVE_PATHS + "=true");
}
} finally {
writer.close();
}
// Project successfully saved
isModified = false;
}
public static Project readXML(File f) throws IOException, DocumentException, SAXException {
InputStream in = new BufferedInputStream(new FileInputStream(f));
Project project = new Project();
try {
String tag = Util.getXMLType(in);
SAXBugCollectionHandler handler;
if (tag.equals("Project")) {
handler = new SAXBugCollectionHandler(project, f);
} else if (tag.equals("BugCollection")) {
SortedBugCollection bugs = new SortedBugCollection(project);
handler = new SAXBugCollectionHandler(bugs, f);
} else {
throw new IOException("Can't load a project from a " + tag + " file");
}
XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
Reader reader = Util.getReader(in);
xr.parse(new InputSource(reader));
} finally {
in.close();
}
// Presumably, project is now up-to-date
project.setModified(false);
return project;
}
public void writeXML(File f) throws IOException {
OutputStream out = new FileOutputStream(f);
XMLOutput xmlOutput = new OutputStreamXMLOutput(out);
try {
writeXML(xmlOutput);
} finally {
xmlOutput.finish();
}
}
/**
* Read the project from an input file.
* This method should only be used on an empty Project
* (created with the default constructor).
*
* @param inputFile name of the input file to read the project from
* @throws IOException if an error occurs while reading
*/
@Deprecated
public void read(String inputFile) throws IOException {
if (isModified) {
throw new IllegalStateException("Reading into a modified Project!");
}
// Make the input file absolute, if necessary
File file = new File(inputFile);
if (!file.isAbsolute()) {
inputFile = file.getAbsolutePath();
}
// Store the project filename
setProjectFileName(inputFile);
BufferedReader reader = null;
try {
reader = new BufferedReader(Util.getFileReader(inputFile));
String line;
line = getLine(reader);
if (line == null || !line.equals(JAR_FILES_KEY)) {
throw new IOException("Bad format: missing jar files key");
}
while ((line = getLine(reader)) != null && !line.equals(SRC_DIRS_KEY)) {
addToListInternal(analysisTargets, line);
}
if (line == null) {
throw new IOException("Bad format: missing source dirs key");
}
while ((line = getLine(reader)) != null && !line.equals(AUX_CLASSPATH_ENTRIES_KEY)) {
addToListInternal(srcDirList, line);
}
// The list of aux classpath entries is optional
if (line != null) {
while ((line = getLine(reader)) != null) {
if (line.equals(OPTIONS_KEY)) {
break;
}
addToListInternal(auxClasspathEntryList, line);
}
}
// The Options section is also optional
if (line != null && line.equals(OPTIONS_KEY)) {
while ((line = getLine(reader)) != null && !line.equals(JAR_FILES_KEY)) {
parseOption(line);
}
}
// If this project has the relative paths option set,
// resolve all internal relative paths into absolute
// paths, using the absolute path of the project
// file as a base directory.
if (getOption(RELATIVE_PATHS)) {
makeListAbsoluteProject(analysisTargets);
makeListAbsoluteProject(srcDirList);
makeListAbsoluteProject(auxClasspathEntryList);
}
// Clear the modification flag set by the various "add" methods.
isModified = false;
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Read Project from named file.
*
* @param argument command line argument containing project file name
* @return the Project
* @throws IOException
*/
public static Project readProject(String argument) throws IOException {
String projectFileName = argument;
File projectFile = new File(projectFileName);
if (projectFile.isDirectory()) {
// New-style (GUI2) project directory.
// We read in the bug collection in order to read the project
// information as a side effect.
// Inefficient, but effective.
String name = projectFile.getAbsolutePath() + File.separator + projectFile.getName() + ".xml";
File f = new File(name);
SortedBugCollection bugCollection = new SortedBugCollection();
try {
bugCollection.readXML(f.getPath());
return bugCollection.getProject();
} catch (DocumentException e) {
IOException ioe = new IOException("Couldn't read saved XML in project directory");
ioe.initCause(e);
throw ioe;
}
} else if (projectFileName.endsWith(".xml") || projectFileName.endsWith(".fbp")) {
try {
return Project.readXML(projectFile);
} catch (DocumentException e) {
IOException ioe = new IOException("Couldn't read saved FindBugs project");
ioe.initCause(e);
throw ioe;
}
catch (SAXException e) {
IOException ioe = new IOException("Couldn't read saved FindBugs project");
ioe.initCause(e);
throw ioe;
}
} else {
// Old-style (original GUI) project file
// Convert project file to be an absolute path
projectFileName = new File(projectFileName).getAbsolutePath();
try {
Project project = new Project();
project.read(projectFileName);
return project;
} catch (IOException e) {
System.err.println("Error opening " + projectFileName);
e.printStackTrace(System.err);
throw e;
}
}
}
/**
* Read a line from a BufferedReader, ignoring blank lines
* and comments.
*/
private static String getLine(BufferedReader reader) throws IOException {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (!line.equals("") && !line.startsWith("
break;
}
}
return line;
}
public String projectNameFromProjectFileName() {
String name = projectFileName;
int lastSep = name.lastIndexOf(File.separatorChar);
if (lastSep >= 0) {
name = name.substring(lastSep + 1);
}
int dot = name.lastIndexOf('.');
if (dot >= 0) {
name = name.substring(0, dot);
}
return name;
}
/**
* Convert to a string in a nice (displayable) format.
*/
@Override
public String toString() {
if(projectName != null) {
return projectName;
}
// TODO Andrei: if this old stuff is not more used, delete it
String name = projectFileName;
int lastSep = name.lastIndexOf(File.separatorChar);
if (lastSep >= 0) {
name = name.substring(lastSep + 1);
}
//int dot = name.lastIndexOf('.');
//Don't hide every suffix--some are informative and/or disambiguative.
int dot = (name.endsWith(".fb") ? name.length()-3 : -1);
if (dot >= 0) {
name = name.substring(0, dot);
}
return name;
}
/**
* Transform a user-entered filename into a proper filename,
* by adding the ".fb" file extension if it isn't already present.
*/
public static String transformFilename(String fileName) {
if (!fileName.endsWith(".fb")) {
fileName = fileName + ".fb";
}
return fileName;
}
static final String JAR_ELEMENT_NAME = "Jar";
static final String AUX_CLASSPATH_ENTRY_ELEMENT_NAME = "AuxClasspathEntry";
static final String SRC_DIR_ELEMENT_NAME = "SrcDir";
static final String WRK_DIR_ELEMENT_NAME = "WrkDir";
static final String FILENAME_ATTRIBUTE_NAME = "filename";
static final String PROJECTNAME_ATTRIBUTE_NAME = "projectName";
public void writeXML(XMLOutput xmlOutput) throws IOException {
writeXML(xmlOutput, null);
}
public void writeXML(XMLOutput xmlOutput, @CheckForNull Object destination) throws IOException {
XMLAttributeList attributeList = new XMLAttributeList();
if (!getProjectFileName().equals(UNNAMED_PROJECT))
attributeList.addAttribute(FILENAME_ATTRIBUTE_NAME, getProjectFileName());
if (getProjectName() != null) {
attributeList = attributeList.addAttribute(PROJECTNAME_ATTRIBUTE_NAME, getProjectName());
}
xmlOutput.openTag(
BugCollection.PROJECT_ELEMENT_NAME,
attributeList
);
XMLOutputUtil.writeElementList(xmlOutput, JAR_ELEMENT_NAME, analysisTargets);
XMLOutputUtil.writeElementList(xmlOutput, AUX_CLASSPATH_ENTRY_ELEMENT_NAME, auxClasspathEntryList);
XMLOutputUtil.writeElementList(xmlOutput, SRC_DIR_ELEMENT_NAME, srcDirList);
XMLOutputUtil.writeFileList(xmlOutput, WRK_DIR_ELEMENT_NAME, currentWorkingDirectoryList);
if (suppressionFilter != null && !suppressionFilter.isEmpty()) {
xmlOutput.openTag("SuppressionFilter");
suppressionFilter.writeBodyAsXML(xmlOutput);
xmlOutput.closeTag("SuppressionFilter");
}
xmlOutput.closeTag(BugCollection.PROJECT_ELEMENT_NAME);
}
List<String> makeRelative(List<String> files, @CheckForNull Object destination) {
if (destination == null) {
return files;
}
if (currentWorkingDirectoryList.isEmpty()) {
return files;
}
if (destination instanceof File) {
File where = (File)destination;
if (where.getParentFile().equals(currentWorkingDirectoryList.get(0))) {
List<String> result = new ArrayList<String>(files.size());
String root = where.getParent();
for(String s : files) {
if (s.startsWith(root)) {
result.add(s.substring(root.length()));
} else {
result.add(s);
}
}
return result;
}
}
return files;
}
/**
* Parse one line in the [Options] section.
*
* @param option one line in the [Options] section
*/
private void parseOption(String option) throws IOException {
int equalPos = option.indexOf("=");
if (equalPos < 0) {
throw new IOException("Bad format: invalid option format");
}
String name = option.substring(0, equalPos);
String value = option.substring(equalPos + 1);
optionsMap.put(name, Boolean.valueOf(value));
}
/**
* Hack for whether files are case insensitive.
* For now, we'll assume that Windows is the only
* case insensitive OS. (OpenVMS users,
* feel free to submit a patch :-)
*/
private static final boolean FILE_IGNORE_CASE =
SystemProperties.getProperty("os.name", "unknown").startsWith("Windows");
/**
* Converts a full path to a relative path if possible
*
* @param srcFile path to convert
* @return the converted filename
*/
private String convertToRelative(String srcFile, String base) {
String slash = SystemProperties.getProperty("file.separator");
if (FILE_IGNORE_CASE) {
srcFile = srcFile.toLowerCase();
base = base.toLowerCase();
}
if (base.equals(srcFile)) {
return ".";
}
if (!base.endsWith(slash)) {
base = base + slash;
}
if (base.length() <= srcFile.length()) {
String root = srcFile.substring(0, base.length());
if (root.equals(base)) {
// Strip off the base directory, make relative
return "." + SystemProperties.getProperty("file.separator") + srcFile.substring(base.length());
}
}
//See if we can build a relative path above the base using .. notation
int slashPos = srcFile.indexOf(slash);
int branchPoint;
if (slashPos >= 0) {
String subPath = srcFile.substring(0, slashPos);
if ((subPath.length() == 0) || base.startsWith(subPath)) {
branchPoint = slashPos + 1;
slashPos = srcFile.indexOf(slash, branchPoint);
while (slashPos >= 0) {
subPath = srcFile.substring(0, slashPos);
if (base.startsWith(subPath)) {
branchPoint = slashPos + 1;
} else {
break;
}
slashPos = srcFile.indexOf(slash, branchPoint);
}
int slashCount = 0;
slashPos = base.indexOf(slash, branchPoint);
while (slashPos >= 0) {
slashCount++;
slashPos = base.indexOf(slash, slashPos + 1);
}
StringBuilder path = new StringBuilder();
String upDir = ".." + slash;
for (int i = 0; i < slashCount; i++) {
path.append(upDir);
}
path.append(srcFile.substring(branchPoint));
return path.toString();
}
}
return srcFile;
}
/**
* Converts a relative path to an absolute path if possible.
*
* @param fileName path to convert
* @return the converted filename
*/
private String convertToAbsolute(String fileName) throws IOException {
// At present relative paths are only calculated if the fileName is
// below the project file. This need not be the case, and we could use ..
// syntax to move up the tree. (To Be Added)
File file = new File(fileName);
if (!file.isAbsolute()) {
// Only try to make the relative path absolute
// if the project file is absolute.
File projectFile = new File(projectFileName);
if (projectFile.isAbsolute()) {
// Get base directory (parent of the project file)
String base = new File(projectFileName).getParent();
// Make the file absolute in terms of the parent directory
fileName = new File(base, fileName).getCanonicalPath();
}
}
return fileName;
}
/**
* Make the given filename absolute relative to the
* current working directory.
*/
private String makeAbsoluteCWD(String fileName) {
List<String> candidates = makeAbsoluteCwdCandidates(fileName);
return candidates.get(0);
}
/**
* Make the given filename absolute relative to the current working directory candidates.
*
* If the given filename exists in more than one of the working directories, a list of
* these existing absolute paths is returned.
*
* The returned list is guaranteed to be non-empty.
* The returned paths might exist or not exist and might be relative or absolute.
*
* @return A list of at least one candidate path for the given filename.
*/
private List<String> makeAbsoluteCwdCandidates(String fileName) {
List<String> candidates = new ArrayList<String>();
boolean hasProtocol = (URLClassPath.getURLProtocol(fileName) != null);
if (hasProtocol) {
candidates.add(fileName);
return candidates;
}
if (new File(fileName).isAbsolute()) {
candidates.add(fileName);
return candidates;
}
for (File currentWorkingDirectory : currentWorkingDirectoryList) {
File relativeToCurrent = new File(currentWorkingDirectory, fileName);
if (relativeToCurrent.exists()) {
candidates.add(relativeToCurrent.toString());
}
}
if (candidates.isEmpty()) {
candidates.add(fileName);
}
return candidates;
}
/**
* Add a value to given list, making the Project modified
* if the value is not already present in the list.
*
* @param list the list
* @param value the value to be added
* @return true if the value was not already present in the list,
* false otherwise
*/
private <T> boolean addToListInternal(Collection<T> list, T value) {
if (!list.contains(value)) {
list.add(value);
isModified = true;
return true;
} else {
return false;
}
}
/**
* Make the given list of pathnames absolute relative
* to the absolute path of the project file.
*/
private void makeListAbsoluteProject(List<String> list) throws IOException {
List<String> replace = new LinkedList<String>();
for (String fileName : list) {
fileName = convertToAbsolute(fileName);
replace.add(fileName);
}
list.clear();
list.addAll(replace);
}
/**
* @param timestamp The timestamp to set.
*/
public void setTimestamp(long timestamp) {
this.timestampForAnalyzedClasses = timestamp;
}
public void addTimestamp(long timestamp) {
if (this.timestampForAnalyzedClasses < timestamp && FindBugs.validTimestamp(timestamp)) {
this.timestampForAnalyzedClasses = timestamp;
}
}
/**
* @return Returns the timestamp.
*/
public long getTimestamp() {
return timestampForAnalyzedClasses;
}
/**
* @param projectName The projectName to set.
*/
public void setProjectName(String projectName) {
this.projectName = projectName;
}
/**
* @return Returns the projectName.
*/
public String getProjectName() {
return projectName;
}
/**
* @param suppressionFilter The suppressionFilter to set.
*/
public void setSuppressionFilter(Filter suppressionFilter) {
this.suppressionFilter = suppressionFilter;
}
/**
* @return Returns the suppressionFilter.
*/
public Filter getSuppressionFilter() {
if (suppressionFilter == null) {
suppressionFilter = new Filter();
}
return suppressionFilter;
}
public void setGuiCallback(IGuiCallback guiCallback) {
this.guiCallback = guiCallback;
}
public IGuiCallback getGuiCallback() {
return guiCallback;
}
/**
* @return
*/
public Iterable<String> getResolvedSourcePaths() {
List<String> result = new ArrayList<String>();
for(String s : srcDirList) {
boolean hasProtocol = (URLClassPath.getURLProtocol(s) != null);
if (hasProtocol) {
result.add(s);
continue;
}
File f = new File(s);
if (f.isAbsolute() || currentWorkingDirectoryList.isEmpty()) {
if (f.canRead())
result.add(s);
continue;
}
for(File d : currentWorkingDirectoryList)
if (d.canRead() && d.isDirectory()) {
File a = new File(d, s);
if (a.canRead())
result.add(a.getAbsolutePath());
}
}
return result;
}
}
// vim:ts=4
|
package model;
import processing.core.PApplet;
import processing.core.PConstants;
import processing.core.PFont;
public class DesenhistaAmigoHomem implements Desenhador, Movedor {
VetorComposto vetorComposto;
AmigoHomem amigoHomem;
PApplet processing;
PFont font;
public DesenhistaAmigoHomem(PApplet processing, PFont font,
AmigoHomem amigoHomem) {
this.font = font;
this.amigoHomem = amigoHomem;
this.processing = processing;
this.vetorComposto = new VetorComposto();
}
@Override
public void display() {
processing.pushMatrix();
processing.translate(vetorComposto.local.x, vetorComposto.local.y);
/*
* Desenho: Triangulo
*
* Preenchimento: Branco
*
* Contorno: Branco
*
* Peso do Contorno: 1 pixel wide
*/
processing.fill(255);
processing.stroke(255);
processing.strokeWeight(1);
processing.triangle(vetorComposto.local.x, vetorComposto.local.y + 3,
vetorComposto.local.x - 3, vetorComposto.local.y,
vetorComposto.local.x + 3, vetorComposto.local.y);
/*
* Desenho: Circulo
*
* Preenchimento: Vazio
*
* Contorno: Azul Escuro
*
* Peso do Contorno: 2 pixel wide
*/
processing.noFill();
processing.strokeWeight(2);
processing.stroke(88, 32, 244);
processing.ellipse(vetorComposto.local.x, vetorComposto.local.y, 20, 20);
processing.popMatrix();
}
@Override
public void mova() {
vetorComposto.local.add(vetorComposto.dir);
vetorComposto.local.add(vetorComposto.acel);
}
@Override
public void escreve() {
processing.fill(255, 60);
processing.textAlign(PConstants.LEFT);
processing.textFont(font);
processing.text(toString(), vetorComposto.local.x + 12,
vetorComposto.local.y + 12);
}
}
|
package v1.rest;
import com.esri.core.geometry.Envelope;
import com.esri.core.geometry.OperatorWithin;
import com.esri.core.geometry.Point;
import com.esri.core.geometry.SpatialReference;
import exceptions.Logging;
import exceptions.RdfException;
import exceptions.ResourceNotAvailableException;
import exceptions.SparqlParseException;
import exceptions.SparqlQueryException;
import exceptions.TransformRdfToApiJsonException;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.MalformedQueryException;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import org.eclipse.rdf4j.repository.RepositoryException;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import rdf.RDF;
import rdf.RDF4J_20;
import v1.utils.config.ConfigProperties;
import v1.utils.transformer.Transformer;
@Path("/search")
public class SearchResource {
@GET
@Produces(MediaType.APPLICATION_JSON + ";charset=UTF-8")
public Response getDatasetsBySearch(
@QueryParam("labels") boolean labels,
@QueryParam("projects") boolean projects,
@QueryParam("lang") String lang,
@QueryParam("project") String project,
@QueryParam("creator") String creator,
@QueryParam("concept") String concept,
@QueryParam("resource") String resource,
@QueryParam("start") String start,
@QueryParam("end") String end,
@QueryParam("lat_min") String lat_min,
@QueryParam("lng_min") String lng_min,
@QueryParam("lat_max") String lat_max,
@QueryParam("lng_max") String lng_max,
@QueryParam("languages") boolean languages,
@QueryParam("geojson") boolean geojson) {
try {
JSONArray outArray = new JSONArray();
JSONObject geoJSON = new JSONObject();
// get datasets for a project
if (project != null) {
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s lsdh:project lsdh-p:" + project + " . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
} else if (creator != null) { // get datasets for a publisher
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s lsdh:project ?pro . "
+ "?pro dcelements:creator \"" + creator + "\" . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
} else if (concept != null) { // get datasets for a specific concept
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s oa:hasBody <" + concept + "> . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
} else if (resource != null) { // get datasets for a specific resource related to a concept
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
// query labeling system of concepts related to this resource
String query = rdf.getPREFIXSPARQL() + "SELECT ?concept WHERE { ?concept ?p <" + resource + "> }";
List<BindingSet> result = RDF4J_20.SPARQLquery("labelingsystem", ConfigProperties.getPropertyParam("ts_server"), query);
List<String> conceptsForResource = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "concept");
// query for datasets with this concepts
query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s oa:hasBody ?uri . "
+ "FILTER ( ";
for (String item : conceptsForResource) {
query += "?uri=<" + item + "> || ";
}
query = query.substring(0, query.length() - 3);
query += ") ";
query += "}";
result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
} else if (start != null && end != null) { // get datasets for a timespan
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s oa:hasTarget ?t . "
+ "?t prov:startedAtTime ?start . "
+ "?t prov:endedAtTime ?end . "
+ "FILTER (xsd:integer(?start) > " + start + " && xsd:integer(?end) < " + end + ") "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
} else if (lat_min != null && lng_min != null && lat_max != null && lng_max != null) { // get datasets for envelope
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
JSONArray outArray2 = new JSONArray();
for (Object item : outArray) {
JSONObject tmp = (JSONObject) item;
String lat = (String) tmp.get("lat");
String lng = (String) tmp.get("lng");
if (lat != null && lng != null) {
SpatialReference spatialRef = SpatialReference.create(4326);
Point center = new Point(Double.parseDouble(lat), Double.parseDouble(lng));
// envelope
/* Another way to create an envelope is to specify the X and Y extents.
* xmin: Minimum X-coordinate, ymin: Minimum Y-coordinate,
* xmax: Maximum X-coordinate and ymax: Maximum Y-coordinate
*/
//Envelope env1 = new Envelope(xmin, ymin, xmax, ymax);
// x = lat - y = lng
//Envelope env1 = new Envelope(0, 0, 30.5, 30.5);
Envelope env1 = new Envelope(Double.parseDouble(lat_min), Double.parseDouble(lng_min), Double.parseDouble(lat_max), Double.parseDouble(lng_max));
boolean isWithin = OperatorWithin.local().execute(center, env1, spatialRef, null);
if (isWithin) {
outArray2.add(tmp);
}
}
}
// labels output if requested
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray2);
} else {
outArray = getProjects(projects, outArray2);
}
} else if (languages) {
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
outArray = getLabels(true, null, outArray);
// get set with used languages
HashSet<String> uniqueLanguages = new HashSet();
for (Object item : outArray) {
JSONObject tmp = (JSONObject) item;
uniqueLanguages.add((String) tmp.get("lang"));
}
// load languages json
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(SearchResource.class.getClassLoader().getResource("languages.json").getFile()), "UTF8"));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = br.readLine()) != null) {
response.append(inputLine);
}
br.close();
JSONArray languagesArray = (JSONArray) new JSONParser().parse(response.toString());
// get long version for language
outArray.clear();
for (String tmpLang : uniqueLanguages) {
for (Object tmpLangObj : languagesArray) {
JSONObject tmp = (JSONObject) tmpLangObj;
if (tmpLang.equals(tmp.get("value"))) {
outArray.add(tmpLangObj);
}
}
}
// sort array
JSONArray sortedJsonArray = new JSONArray();
List<JSONObject> jsonValues = new ArrayList();
for (int i = 0; i < outArray.size(); i++) {
jsonValues.add((JSONObject) outArray.get(i));
}
Collections.sort(jsonValues, new Comparator<JSONObject>() {
private static final String KEY_NAME = "name";
@Override
public int compare(JSONObject a, JSONObject b) {
String valA = new String();
String valB = new String();
valA = (String) a.get(KEY_NAME);
valB = (String) b.get(KEY_NAME);
return valA.compareTo(valB);
}
});
for (int i = 0; i < outArray.size(); i++) {
sortedJsonArray.add(jsonValues.get(i));
}
outArray = sortedJsonArray;
} else if (geojson) {
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT DISTINCT ?lat ?lng WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ "?s oa:hasTarget ?t . "
+ "?t geo:lat ?lat . "
+ "?t geo:long ?lng . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> lat = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "lat");
List<String> lng = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "lng");
// cretae geojson
geoJSON.put("type", "FeatureCollection");
JSONArray features = new JSONArray();
for (int i = 0; i < lat.size(); i++) {
JSONObject feature = new JSONObject();
feature.put("type", "Feature");
JSONObject geometry = new JSONObject();
geometry.put("type", "Point");
JSONArray point = new JSONArray();
Double latDbl = Double.parseDouble(lng.get(i));
Double lngDbl = Double.parseDouble(lat.get(i));
point.add(latDbl);
point.add(lngDbl);
geometry.put("coordinates", point);
feature.put("geometry", geometry);
JSONObject properties = new JSONObject();
feature.put("properties", properties);
features.add(feature);
}
geoJSON.put("features", features);
} else {
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Dataset . "
+ " } ";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.dataset_GET(hh, h);
String datasetBody = (String) tmp.get("dataset");
RDF rdf2 = new RDF(ConfigProperties.getPropertyParam("host"));
String query2 = rdf2.getPREFIXSPARQL() + "SELECT * WHERE { <" + datasetBody + "> ?p ?o }";
List<BindingSet> result2 = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query2);
List<String> predicates2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "p");
List<String> objects2 = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result2, "o");
if (result2.size() < 1) {
throw new ResourceNotAvailableException("resource " + h + " is not available");
}
for (int i = 0; i < predicates2.size(); i++) {
rdf2.setModelTriple(datasetBody, predicates2.get(i), objects2.get(i));
}
String out2 = Transformer.target_GET(rdf2.getModel("RDF/JSON"), datasetBody).toJSONString();
JSONObject out2Object = (JSONObject) new JSONParser().parse(out2);
tmp.put("title", out2Object.get("title"));
if (out2Object.get("description") != null) {
tmp.put("description", out2Object.get("description"));
}
if (out2Object.get("depiction") != null) {
tmp.put("depiction", out2Object.get("depiction"));
}
if (out2Object.get("coverage") != null) {
tmp.put("coverage", out2Object.get("coverage"));
}
if (out2Object.get("lat") != null) {
tmp.put("lat", out2Object.get("lat"));
}
if (out2Object.get("lng") != null) {
tmp.put("lng", out2Object.get("lng"));
}
if (out2Object.get("temporal") != null) {
tmp.put("temporal", out2Object.get("temporal"));
}
if (out2Object.get("begin") != null) {
tmp.put("begin", out2Object.get("begin"));
}
if (out2Object.get("end") != null) {
tmp.put("end", out2Object.get("end"));
}
outArray.add(tmp);
}
// labels output if requested
if (labels) {
outArray = getLabels(labels, lang, outArray);
} else {
outArray = getProjects(projects, outArray);
}
}
if (!geoJSON.isEmpty()) {
return Response.ok(geoJSON).header("Content-Type", "application/json;charset=UTF-8").build();
} else {
return Response.ok(outArray).header("Content-Type", "application/json;charset=UTF-8").build();
}
} catch (Exception e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(Logging.getMessageJSON(e, "v1.rest.SearchResource"))
.header("Content-Type", "application/json;charset=UTF-8").build();
}
}
private static JSONArray getLabels(boolean labels, String lang, JSONArray outArray) throws IOException, RepositoryException, MalformedQueryException, QueryEvaluationException, SparqlQueryException, SparqlParseException {
if (labels && !outArray.isEmpty()) {
List<String> concepts = new ArrayList();
for (Object tmp : outArray) {
JSONObject outObj = (JSONObject) tmp;
concepts.add((String) outObj.get("label"));
}
// get labels from labeling system
outArray.clear();
String sparql = "PREFIX skos: <http://www.w3.org/2004/02/skos/core
+ "SELECT DISTINCT ?uri ?pl WHERE { "
+ "?uri skos:prefLabel ?pl . "
+ "FILTER ( ";
for (String item : concepts) {
sparql += " ?uri=<" + item + "> || ";
}
sparql = sparql.substring(0, sparql.length() - 3);
sparql += ") ";
if (lang != null) {
sparql += "FILTER(LANGMATCHES(LANG(?pl), \"" + lang + "\")) ";
}
sparql += "}";
List<BindingSet> result = RDF4J_20.SPARQLquery("labelingsystem", ConfigProperties.getPropertyParam("ts_server"), sparql);
List<String> uris = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "uri");
List<String> pls = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "pl");
for (int i = 0; i < uris.size(); i++) {
JSONObject labelObj = new JSONObject();
labelObj.put("uri", uris.get(i));
labelObj.put("value", pls.get(i).split("@")[0].replace("\"", ""));
labelObj.put("lang", pls.get(i).split("@")[1]);
// count appearance of uri in datahub
int count = 0;
for (String conceptURI : concepts) {
int z = 0;
if (conceptURI.equals(uris.get(i))) {
count++;
}
}
labelObj.put("datasets", count);
outArray.add(labelObj);
}
// sort array
JSONArray sortedJsonArray = new JSONArray();
List<JSONObject> jsonValues = new ArrayList();
for (int i = 0; i < outArray.size(); i++) {
jsonValues.add((JSONObject) outArray.get(i));
}
Collections.sort(jsonValues, new Comparator<JSONObject>() {
private static final String KEY_NAME = "value";
@Override
public int compare(JSONObject a, JSONObject b) {
String valA = new String();
String valB = new String();
valA = (String) a.get(KEY_NAME);
valB = (String) b.get(KEY_NAME);
return valA.compareTo(valB);
}
});
for (int i = 0; i < outArray.size(); i++) {
sortedJsonArray.add(jsonValues.get(i));
}
outArray = sortedJsonArray;
}
return outArray;
}
private static JSONArray getProjects(boolean projects, JSONArray outArray) throws IOException, RepositoryException, MalformedQueryException, QueryEvaluationException, SparqlQueryException, SparqlParseException, ResourceNotAvailableException, RdfException, ParseException, TransformRdfToApiJsonException {
if (projects && !outArray.isEmpty()) {
HashSet<String> project = new HashSet();
for (Object tmp : outArray) {
JSONObject outObj = (JSONObject) tmp;
project.add((String) outObj.get("project"));
}
// get labels from labeling system
outArray.clear();
// get project
RDF rdf = new RDF(ConfigProperties.getPropertyParam("host"));
String query = rdf.getPREFIXSPARQL();
query += "SELECT ?s ?p ?o WHERE { "
+ "?s ?p ?o . "
+ "?s a lsdh:Project . "
+ "?s dcterms:identifier ?identifier . "
+ "FILTER ( ";
for (String item : project) {
query += " ?identifier=\"" + item + "\" || ";
}
query = query.substring(0, query.length() - 3);
query += ") ";
query += "}";
List<BindingSet> result = RDF4J_20.SPARQLquery(ConfigProperties.getPropertyParam("repository"), ConfigProperties.getPropertyParam("ts_server"), query);
List<String> s = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "s");
List<String> p = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "p");
List<String> o = RDF4J_20.getValuesFromBindingSet_ORDEREDLIST(result, "o");
if (result.size() < 1) {
throw new ResourceNotAvailableException("resource is not available");
}
for (int i = 0; i < s.size(); i++) {
rdf.setModelTriple(s.get(i), p.get(i), o.get(i));
}
JSONObject jsonObject = (JSONObject) new JSONParser().parse(rdf.getModel("RDF/JSON"));
Set keys = jsonObject.keySet();
Iterator a = keys.iterator();
while (a.hasNext()) {
String key = (String) a.next();
JSONObject tmpObject = (JSONObject) jsonObject.get(key);
JSONArray idArray = (JSONArray) tmpObject.get(rdf.getPrefixItem("dcterms:identifier"));
JSONObject idObject = (JSONObject) idArray.get(0);
String h = (String) idObject.get("value");
JSONObject tmpObject2 = new JSONObject();
tmpObject2.put(key, tmpObject);
String hh = tmpObject2.toString();
JSONObject tmp = Transformer.project_GET(hh, h);
outArray.add(tmp);
}
}
// sort array
JSONArray sortedJsonArray = new JSONArray();
List<JSONObject> jsonValues = new ArrayList();
for (int i = 0; i < outArray.size(); i++) {
jsonValues.add((JSONObject) outArray.get(i));
}
Collections.sort(jsonValues, new Comparator<JSONObject>() {
private static final String KEY_NAME = "title";
@Override
public int compare(JSONObject a, JSONObject b) {
String valA = new String();
String valB = new String();
valA = (String) a.get(KEY_NAME);
valB = (String) b.get(KEY_NAME);
return valA.compareTo(valB);
}
});
for (int i = 0; i < outArray.size(); i++) {
sortedJsonArray.add(jsonValues.get(i));
}
outArray = sortedJsonArray;
return outArray;
}
}
|
package edu.umd.cs.findbugs;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
/**
* Version number and release date information.
*/
public class Version {
/**
* Major version number.
*/
public static final int MAJOR = 1;
/**
* Minor version number.
*/
public static final int MINOR = 3;
/**
* Patch level.
*/
public static final int PATCHLEVEL = 8;
/**
* Development version or release candidate?
*/
public static final boolean IS_DEVELOPMENT = true;
/**
* Release candidate number.
* "0" indicates that the version is not a release candidate.
*/
public static final int RELEASE_CANDIDATE = 1;
/**
* Release date.
*/
public static final String COMPUTED_DATE;
public static final String DATE;
private static final String COMPUTED_ECLIPSE_DATE;
static {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss z, dd MMMM, yyyy");
SimpleDateFormat eclipseDateFormat = new SimpleDateFormat("yyyyMMdd");
COMPUTED_DATE = dateFormat.format(new Date());
COMPUTED_ECLIPSE_DATE = eclipseDateFormat.format(new Date()) ;
}
/**
* Preview release number.
* "0" indicates that the version is not a preview release.
*/
public static final int PREVIEW = 0;
private static final String RELEASE_SUFFIX_WORD =
(RELEASE_CANDIDATE > 0
? "rc" + RELEASE_CANDIDATE
: (PREVIEW > 0 ? "preview" + PREVIEW : "dev-" + COMPUTED_ECLIPSE_DATE));
public static final String RELEASE_BASE = MAJOR + "." + MINOR + "." + PATCHLEVEL ;
/**
* Release version string.
*/
public static final String COMPUTED_RELEASE =
RELEASE_BASE + (IS_DEVELOPMENT ? "-" + RELEASE_SUFFIX_WORD : "");
/**
* Release version string.
*/
public static final String RELEASE;
/**
* Version of Eclipse plugin.
*/
public static final String COMPUTED_ECLIPSE_UI_VERSION =
RELEASE_BASE + "." + COMPUTED_ECLIPSE_DATE;
static {
InputStream in = null;
String release, date;
try {
Properties versionProperties = new Properties();
in = Version.class.getResourceAsStream("version.properties");
versionProperties.load(in);
release = (String) versionProperties.get("release.number");
date = (String) versionProperties.get("release.date");
if (release == null)
release = COMPUTED_RELEASE;
if (date == null)
date = COMPUTED_DATE;
} catch (RuntimeException e) {
release = COMPUTED_RELEASE;
date = COMPUTED_DATE;
} catch (IOException e) {
release = COMPUTED_RELEASE;
date = COMPUTED_DATE;
} finally {
try {
if (in != null) in.close();
} catch (IOException e) {
assert true; // nothing to do here
}
}
RELEASE = release;
DATE = date;
}
/**
* FindBugs website.
*/
public static final String WEBSITE = "http://findbugs.sourceforge.net";
/**
* Downloads website.
*/
public static final String DOWNLOADS_WEBSITE = "http://prdownloads.sourceforge.net/findbugs";
/**
* Support email.
*/
public static final String SUPPORT_EMAIL = "http://findbugs.sourceforge.net/reportingBugs.html";
public static void main(String[] argv) {
if (argv.length != 1)
usage();
String arg = argv[0];
if (!IS_DEVELOPMENT && RELEASE_CANDIDATE != 0) {
throw new IllegalStateException("Non developmental version, but is release candidate " + RELEASE_CANDIDATE);
}
if (arg.equals("-release"))
System.out.println(RELEASE);
else if (arg.equals("-date"))
System.out.println(DATE);
else if (arg.equals("-props")) {
System.out.println("release.base=" + RELEASE_BASE);
System.out.println("release.number=" + COMPUTED_RELEASE);
System.out.println("release.date=" + COMPUTED_DATE);
System.out.println("eclipse.ui.version=" + COMPUTED_ECLIPSE_UI_VERSION);
System.out.println("findbugs.website=" + WEBSITE);
System.out.println("findbugs.downloads.website=" + DOWNLOADS_WEBSITE);
} else {
usage();
System.exit(1);
}
}
private static void usage() {
System.err.println("Usage: " + Version.class.getName() +
" (-release|-date|-props)");
}
}
// vim:ts=4
|
package core;
/**
* Represents an invalid attribute. This attribute can be used with any policy implementation and must always be the
* least preferable attribute. It is implemented as a singleton meaning it has only one instance every time.
*
* Invalid attribute can not be implemented as an enum type because there is a conflict between the enum
* comparable interface and the comparable interface of an attribute.
*/
public class InvalidAttribute implements Attribute {
// It exists only one unique Invalid Attribute instance that can be accessed through the
// static method invalidAttr().
private static final InvalidAttribute INSTANCE = new InvalidAttribute();
private InvalidAttribute() {
} // not be instantiated directly
/**
* Returns always the same instance of an invalid attribute. Its the only way to get an invalid attribute
* instance.
*
* @return invalid attribute instance.
*/
public static InvalidAttribute invalidAttr() {
return INSTANCE;
}
/**
* The invalid attribute is equal to other invalid attributes and greater than any other attribute.
*
* @param attribute attribute to be compared.
* @return 0 if attribute is invalid or greater than 0 if is not an invalid attribute.
*/
@Override
public int compareTo(Attribute attribute) {
return attribute == invalidAttr() ? 0 : 1;
}
@SuppressWarnings("EqualsWhichDoesntCheckParameterClass")
@Override
public boolean equals(Object other) {
return other == invalidAttr();
}
// HASHCODE - Since there is only one object the default hashCode() implementation is sufficient
@Override
public String toString() {
return "•";
}
}
|
package com.vladmihalcea.book.hpjp.hibernate.association;
import com.vladmihalcea.book.hpjp.util.AbstractTest;
import org.junit.Test;
import org.springframework.beans.BeanUtils;
import javax.persistence.*;
import java.io.Serializable;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import static org.junit.Assert.assertEquals;
/**
* @author Vlad Mihalcea
*/
public class ElementCollectionSetMergeTest extends AbstractTest {
@Override
protected Class<?>[] entities() {
return new Class<?>[] {
Post.class,
PostCategory.class
};
}
@Override
protected void afterInit() {
doInJPA(entityManager -> {
PostCategory category1 = new PostCategory()
.setCategory("Post");
PostCategory category2 = new PostCategory()
.setCategory("Archive");
entityManager.persist(category1);
entityManager.persist(category2);
entityManager.persist(
new Post()
.setId(1L)
.setTitle("High-Performance Java Persistence")
.addCategory(category1)
.addCategory(category2)
.addComment(new Comment().setComment("firstComment"))
);
});
}
@Test
public void testMerge() {
PostDTO postDTO = getPostDTO();
doInJPA(entityManager -> {
EntityManager entityManager1 = entityManagerFactory().createEntityManager();
//second find and copy from dto
Post post = entityManager1.find(Post.class, 1L);
BeanUtils.copyProperties(postDTO, post);
entityManager1.close();
// find posts by category
List<Post> posts = entityManager.createQuery("""
select p
from Post p
join p.categories c
where c.id = :categoryId
""", Post.class)
.setParameter("categoryId", post.categories.iterator().next().id)
.getResultList();
// update post
post = entityManager.find(Post.class, 1L);
BeanUtils.copyProperties(postDTO, post);
Object mergedEntity = update(entityManager, post);
});
doInJPA(entityManager -> {
Post post = entityManager.find(Post.class, 1L);
assertEquals(2, post.getTags().size());
assertEquals(3, post.getComments().size());
assertEquals(2, post.getCategories().size());
});
}
@Test
public void testMergeDetach() {
PostDTO postDTO = getPostDTO();
doInJPA(entityManager -> {
//second find and copy from dto
Post post = entityManager.find(Post.class, 1L);
entityManager.detach(post);
BeanUtils.copyProperties(postDTO, post);
// find posts by category
List<Post> posts = entityManager.createQuery("""
select p
from Post p
join p.categories c
where c.id = :categoryId
""", Post.class)
.setParameter("categoryId", post.categories.iterator().next().id)
.getResultList();
// update post
post = entityManager.find(Post.class, 1L);
BeanUtils.copyProperties(postDTO, post);
Object mergedEntity = update(entityManager, post);
});
doInJPA(entityManager -> {
Post post = entityManager.find(Post.class, 1L);
assertEquals(2, post.getTags().size());
assertEquals(3, post.getComments().size());
assertEquals(2, post.getCategories().size());
});
}
@Test
public void testMergeWorkingReorder() {
PostDTO postDTO = getPostDTO();
doInJPA(entityManager -> {
//second find and copy from dto
Post post = entityManager.find(Post.class, 1L);
// find posts by category
List<Post> posts = entityManager.createQuery("""
select p
from Post p
join p.categories c
where c.id = :categoryId
""", Post.class)
.setParameter("categoryId", post.categories.iterator().next().id)
.getResultList();
BeanUtils.copyProperties(postDTO, post);
// update post
post = entityManager.find(Post.class, 1L);
BeanUtils.copyProperties(postDTO, post);
Object mergedEntity = update(entityManager, post);
});
doInJPA(entityManager -> {
Post post = entityManager.find(Post.class, 1L);
assertEquals(2, post.getTags().size());
assertEquals(3, post.getComments().size());
assertEquals(2, post.getCategories().size());
});
}
private PostDTO getPostDTO() {
Post post = doInJPA(entityManager -> {
return entityManager.find(Post.class, 1L);
});
PostDTO postDTO = new PostDTO();
postDTO.id = post.id;
postDTO.title = post.title;
postDTO.categories = post.categories;
postDTO.tags = post.tags;
postDTO.comments = new HashSet<>();
postDTO.addComment(new Comment().setComment("Best book on JPA and Hibernate!").setAuthor("Alice"))
.addComment(new Comment().setComment("A must-read for every Java developer!").setAuthor("Bob"))
.addComment(new Comment().setComment("A great reference book").setAuthor("Carol"))
.addTag(new Tag().setName("JPA").setAuthor("Alice"))
.addTag(new Tag().setName("Hibernate").setAuthor("Alice"));
return postDTO;
}
private Object update(EntityManager entityManager, Object object) {
Object mergedEntity = entityManager.merge(object);
entityManager.detach(object);
Object merged2 = entityManager.merge(mergedEntity);
return merged2;
}
@Entity(name = "Post")
@Table(name = "post")
public static class Post {
@Id
private Long id;
private String title;
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(
name = "post_comment",
joinColumns = @JoinColumn(name = "post_id")
)
private Set<Comment> comments = new HashSet<>();
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(
name = "post_tag",
joinColumns = @JoinColumn(name = "post_id")
)
private Set<Tag> tags = new HashSet<>();
@ManyToMany(fetch = FetchType.EAGER)
@JoinTable(
name = "post_categories",
joinColumns = @JoinColumn(name = "category_id"),
inverseJoinColumns = @JoinColumn(name = "post_id")
)
private Set<PostCategory> categories = new HashSet<>();
public Long getId() {
return id;
}
public Post setId(Long id) {
this.id = id;
return this;
}
public String getTitle() {
return title;
}
public Post setTitle(String title) {
this.title = title;
return this;
}
public Set<Comment> getComments() {
return comments;
}
public void setComments(Set<Comment> comments) {
this.comments = comments;
}
public Set<Tag> getTags() {
return tags;
}
public void setTags(Set<Tag> tags) {
this.tags = tags;
}
public Set<PostCategory> getCategories() {
return categories;
}
public void setCategories(Set<PostCategory> categories) {
this.categories = categories;
}
public Post addComment(Comment comment) {
comments.add(comment);
return this;
}
public Post addTag(Tag tag) {
tags.add(tag);
return this;
}
public Post addCategory(PostCategory category) {
categories.add(category);
return this;
}
}
public static class PostDTO {
private Long id;
private String title;
private Set<Comment> comments = new HashSet<>();
private Set<Tag> tags = new HashSet<>();
private Set<PostCategory> categories = new HashSet<>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Set<Comment> getComments() {
return comments;
}
public void setComments(Set<Comment> comments) {
this.comments = comments;
}
public Set<Tag> getTags() {
return tags;
}
public void setTags(Set<Tag> tags) {
this.tags = tags;
}
public Set<PostCategory> getCategories() {
return categories;
}
public void setCategories(Set<PostCategory> categories) {
this.categories = categories;
}
public PostDTO addComment(Comment comment) {
comments.add(comment);
return this;
}
public PostDTO addTag(Tag tag) {
tags.add(tag);
return this;
}
public PostDTO addCategory(PostCategory category) {
categories.add(category);
return this;
}
}
@Entity(name = "PostCategory")
@Table(name = "post_category")
public static class PostCategory {
@Id
@GeneratedValue
private Long id;
private String category;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getCategory() {
return category;
}
public PostCategory setCategory(String category) {
this.category = category;
return this;
}
}
@Embeddable
public static class Comment implements Serializable {
private String comment;
private String author;
public String getComment() {
return comment;
}
public Comment setComment(String comment) {
this.comment = comment;
return this;
}
public String getAuthor() {
return author;
}
public Comment setAuthor(String author) {
this.author = author;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Comment)) return false;
Comment comment1 = (Comment) o;
return Objects.equals(comment, comment1.comment) &&
Objects.equals(author, comment1.author);
}
@Override
public int hashCode() {
return Objects.hash(comment, author);
}
}
@Embeddable
public static class Tag implements Serializable {
private String name;
private String author;
public String getName() {
return name;
}
public Tag setName(String name) {
this.name = name;
return this;
}
public String getAuthor() {
return author;
}
public Tag setAuthor(String author) {
this.author = author;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Tag)) return false;
Tag tag = (Tag) o;
return Objects.equals(name, tag.name) &&
Objects.equals(author, tag.author);
}
@Override
public int hashCode() {
return Objects.hash(name, author);
}
}
}
|
public class SelfFieldOperation {
int x,y;
volatile int z;
boolean volatileFalsePositive() {
return z == z;
}
int f(int x, int y) {
if (x < x)
x = y^y;
if (x != x)
y = x|x;
if (x >= x)
x = y&y;
return x;
}
Integer a, b;
boolean e() {
return a.equals(a);
}
int c() {
return a.compareTo(a);
}
}
|
package es.tid.cosmos.mobility.itineraries;
import java.io.IOException;
import com.twitter.elephantbird.mapreduce.io.ProtobufWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Reducer;
import es.tid.cosmos.mobility.Config;
import es.tid.cosmos.mobility.data.ItinMovementUtil;
import es.tid.cosmos.mobility.data.MobDataUtil;
import es.tid.cosmos.mobility.data.generated.MobProtocol.ItinTime;
import es.tid.cosmos.mobility.data.generated.MobProtocol.MobData;
/**
* Input: <Long, ItinTime>
* Output: <Long, ItinMovement>
*
* @author dmicol
*/
public class ItinMoveClientPoisReducer extends Reducer<LongWritable,
ProtobufWritable<MobData>, LongWritable, ProtobufWritable<MobData>> {
private int maxMinutesInMoves;
private int minMinutesInMoves;
@Override
protected void setup(Context context) throws IOException,
InterruptedException {
final Configuration conf = context.getConfiguration();
this.maxMinutesInMoves = conf.getInt(Config.MAX_MINUTES_IN_MOVES,
Integer.MAX_VALUE);
this.minMinutesInMoves = conf.getInt(Config.MIN_MINUTES_IN_MOVES,
Integer.MIN_VALUE);
}
@Override
protected void reduce(LongWritable key,
Iterable<ProtobufWritable<MobData>> values,
Context context) throws IOException, InterruptedException {
ItinTime prevLoc;
ItinTime curLoc = null;
for (ProtobufWritable<MobData> value : values) {
value.setConverter(MobData.class);
final MobData mobData = value.get();
prevLoc = curLoc;
curLoc = ItinTime.newBuilder(mobData.getItinTime()).build();
if (prevLoc == null) {
// We are analyzing the first record, so move on to the next one
continue;
}
if (curLoc.getBts() != prevLoc.getBts()) {
int difMonth = curLoc.getDate().getMonth()
- prevLoc.getDate().getMonth();
if (difMonth > 1) {
continue;
}
int difDay = curLoc.getDate().getDay()
- prevLoc.getDate().getDay();
int difHour = curLoc.getTime().getHour()
- prevLoc.getTime().getHour();
int difMin = curLoc.getTime().getMinute()
- prevLoc.getTime().getMinute();
int nMinsMonth;
switch (prevLoc.getDate().getMonth()) {
case 4: case 6: case 9: case 11:
nMinsMonth = 1440 * 30;
break;
case 2:
nMinsMonth = 1440 * 28;
break;
default:
nMinsMonth = 1440 * 31;
}
int distance = (nMinsMonth * difMonth) + (1440 * difDay)
+ (60 * difHour) + difMin;
// Filter movements by diff of time
if (distance <= maxMinutesInMoves &&
distance >= minMinutesInMoves) {
ProtobufWritable<MobData> move = MobDataUtil.createAndWrap(
ItinMovementUtil.create(prevLoc, curLoc));
context.write(key, move);
}
}
}
}
}
|
package com.oath.cyclops.internal.stream.spliterators.push;
import java.util.function.Consumer;
import java.util.function.Predicate;
public class LimitWhileClosedOperator<T,R> extends BaseOperator<T,T> {
private final Predicate<? super T> predicate;
public LimitWhileClosedOperator(Operator<T> source, final Predicate<? super T> predicate){
super(source);
this.predicate = predicate;
}
@Override
public StreamSubscription subscribe(Consumer<? super T> onNext, Consumer<? super Throwable> onError, Runnable onComplete) {
boolean closed[] = {false};
StreamSubscription sub[] = {null};
sub[0] = source.subscribe(e-> {
try {
if(!closed[0])
onNext.accept(e);
if(!predicate.test(e)){
closed[0]=true;
sub[0].cancel();
onComplete.run();
}
} catch (Throwable t) {
onError.accept(t);
}
}
,onError,onComplete);
return sub[0];
}
@Override
public void subscribeAll(Consumer<? super T> onNext, Consumer<? super Throwable> onError, Runnable onCompleteDs) {
boolean[] complete = {false};
boolean closed[] = {false};
source.subscribeAll(e-> {
try {
if(!closed[0] && !complete[0])
onNext.accept(e);
if(!predicate.test(e)){
closed[0]=true;
onCompleteDs.run();
complete[0]=true;
}
} catch (Throwable t) {
onError.accept(t);
}
}
,onError,()->{
if(!complete[0]) {
complete[0] = true;
onCompleteDs.run();
}
});
}
}
|
//$HeadURL$
package org.deegree.commons.tom.primitive;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.ParseException;
import org.apache.xerces.xs.XSConstants;
import org.apache.xerces.xs.XSSimpleTypeDefinition;
import org.deegree.commons.tom.datetime.Date;
import org.deegree.commons.tom.datetime.DateTime;
import org.deegree.commons.tom.datetime.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Converts between internal object values and XML strings.
*
* @author <a href="mailto:[email protected]">Markus Schneider</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public class XMLValueMangler {
private static final Logger LOG = LoggerFactory.getLogger( XMLValueMangler.class );
public static Object xmlToInternal( String s, PrimitiveType pt )
throws IllegalArgumentException {
Object value = s;
switch ( pt ) {
case BOOLEAN: {
if ( s.equals( "true" ) || s.equals( "1" ) ) {
value = Boolean.TRUE;
} else if ( s.equals( "false" ) || s.equals( "0" ) ) {
value = Boolean.FALSE;
} else {
String msg = "Value ('" + s + "') is not valid with respect to the xs:boolean type. "
+ "Valid values are 'true', 'false', '1' and '0'.";
throw new IllegalArgumentException( msg );
}
break;
}
case DATE: {
try {
value = new Date( s );
} catch ( ParseException e ) {
String msg = "Value ('" + s + "') is not valid with respect to the xs:date type.";
throw new IllegalArgumentException( msg );
}
break;
}
case DATE_TIME: {
try {
value = new DateTime( s );
} catch ( ParseException e ) {
String msg = "Value ('" + s + "') is not valid with respect to the xs:dateTime type.";
throw new IllegalArgumentException( msg );
}
break;
}
case DECIMAL: {
value = new BigDecimal( s );
break;
}
case DOUBLE: {
value = new Double( s );
break;
}
case INTEGER: {
value = new BigInteger( s );
break;
}
case STRING: {
break;
}
case TIME: {
try {
value = new Time( s );
} catch ( ParseException e ) {
String msg = "Value ('" + s + "') is not valid with respect to the xs:time type.";
throw new IllegalArgumentException( msg );
}
break;
}
default: {
LOG.warn( "Unhandled primitive type " + pt + " -- treating as string value." );
}
}
return value;
}
public static String internalToXML( Object o ) {
String xml = null;
if ( o != null ) {
// PrimitiveType pt = PrimitiveType.determinePrimitiveType( o );
// switch ( pt ) {
// case BOOLEAN:
// case DATE_TIME:
// case DECIMAL:
// case DOUBLE:
// case INTEGER:
// case STRING:
// case TIME:
// TODO is this always sufficient?
xml = o.toString();
// break;
}
return xml;
}
/**
* Returns the best matching {@link PrimitiveType} for the given XSD simple type definition.
*
* @param xsdTypeDef
* @return best matching {@link PrimitiveType}, never <code>null</code>
*/
public static PrimitiveType getPrimitiveType( XSSimpleTypeDefinition xsdTypeDef ) {
switch ( xsdTypeDef.getBuiltInKind() ) {
// date and time types
case XSConstants.DATE_DT: {
return PrimitiveType.DATE;
}
case XSConstants.DATETIME_DT: {
return PrimitiveType.DATE_TIME;
}
case XSConstants.TIME_DT: {
return PrimitiveType.TIME;
}
// numeric types
// -1.23, 0, 123.4, 1000.00
case XSConstants.DECIMAL_DT:
// -INF, -1E4, -0, 0, 12.78E-2, 12, INF, NaN (equivalent to double-precision 64-bit floating point)
case XSConstants.DOUBLE_DT:
// -INF, -1E4, -0, 0, 12.78E-2, 12, INF, NaN (single-precision 32-bit floating point)
case XSConstants.FLOAT_DT: {
return PrimitiveType.DECIMAL;
}
// integer types
// ...-1, 0, 1, ...
case XSConstants.INTEGER_DT:
// 1, 2, ...
case XSConstants.POSITIVEINTEGER_DT:
// ... -2, -1
case XSConstants.NEGATIVEINTEGER_DT:
// 0, 1, 2, ...
case XSConstants.NONNEGATIVEINTEGER_DT:
// ... -2, -1, 0
case XSConstants.NONPOSITIVEINTEGER_DT:
// -9223372036854775808, ... -1, 0, 1, ... 9223372036854775807
case XSConstants.LONG_DT:
// 0, 1, ... 18446744073709551615
case XSConstants.UNSIGNEDLONG_DT:
// -2147483648, ... -1, 0, 1, ... 2147483647
case XSConstants.INT_DT:
// 0, 1, ...4294967295
case XSConstants.UNSIGNEDINT_DT:
// -32768, ... -1, 0, 1, ... 32767
case XSConstants.SHORT_DT:
// 0, 1, ... 65535
case XSConstants.UNSIGNEDSHORT_DT:
// -128, ...-1, 0, 1, ... 127
case XSConstants.BYTE_DT:
// 0, 1, ... 255
case XSConstants.UNSIGNEDBYTE_DT: {
return PrimitiveType.INTEGER;
}
// true, false
case XSConstants.BOOLEAN_DT:{
return PrimitiveType.BOOLEAN;
}
// other types
case XSConstants.ANYSIMPLETYPE_DT:
case XSConstants.ANYURI_DT:
case XSConstants.BASE64BINARY_DT:
case XSConstants.DURATION_DT:
case XSConstants.ENTITY_DT:
case XSConstants.GDAY_DT:
case XSConstants.GMONTH_DT:
case XSConstants.GMONTHDAY_DT:
case XSConstants.GYEAR_DT:
case XSConstants.GYEARMONTH_DT:
case XSConstants.HEXBINARY_DT:
case XSConstants.ID_DT:
case XSConstants.IDREF_DT:
case XSConstants.LANGUAGE_DT:
case XSConstants.LIST_DT:
case XSConstants.LISTOFUNION_DT:
case XSConstants.NAME_DT:
case XSConstants.NCNAME_DT:
case XSConstants.NMTOKEN_DT:
case XSConstants.NORMALIZEDSTRING_DT:
case XSConstants.NOTATION_DT:
case XSConstants.QNAME_DT:
case XSConstants.STRING_DT:
case XSConstants.TOKEN_DT:
case XSConstants.UNAVAILABLE_DT: {
return PrimitiveType.STRING;
}
}
throw new IllegalArgumentException( "Unexpected simple type: " + xsdTypeDef);
}
}
|
package crazypants.enderio.base.integration.jei;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.enderio.core.common.util.NNList;
import com.enderio.core.common.util.NullHelper;
import crazypants.enderio.api.upgrades.IDarkSteelItem;
import crazypants.enderio.base.Log;
import crazypants.enderio.base.handler.darksteel.DarkSteelRecipeManager;
import crazypants.enderio.base.handler.darksteel.DarkSteelRecipeManager.UpgradePath;
import mezz.jei.api.IModRegistry;
import mezz.jei.api.ISubtypeRegistry;
import mezz.jei.api.recipe.IFocus;
import mezz.jei.api.recipe.IFocus.Mode;
import mezz.jei.api.recipe.IRecipeCategory;
import mezz.jei.api.recipe.IRecipeRegistryPlugin;
import mezz.jei.api.recipe.IRecipeWrapper;
import mezz.jei.api.recipe.IVanillaRecipeFactory;
import mezz.jei.api.recipe.VanillaRecipeCategoryUid;
import mezz.jei.plugins.vanilla.anvil.AnvilRecipeWrapper;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fml.relauncher.ReflectionHelper;
import static crazypants.enderio.base.init.ModObject.blockDarkSteelAnvil;
public class DarkSteelUpgradeRecipeCategory {
private static class ItemStackKey {
final @Nonnull ItemStack wrapped;
ItemStackKey(@Nonnull ItemStack stack) {
this.wrapped = stack;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != ItemStackKey.class) {
return false;
}
ItemStack stack = ((ItemStackKey) obj).wrapped;
return ItemStack.areItemsEqual(wrapped, stack) && ItemStack.areItemStackTagsEqual(wrapped, stack);
}
@Override
public int hashCode() {
return Objects.hash(wrapped.getItem(), wrapped.getItemDamage(), wrapped.getTagCompound());
}
}
private static final @Nonnull List<UpgradePath> allRecipes = DarkSteelRecipeManager.getAllRecipes(ItemHelper.getValidItems());
public static void registerSubtypes(ISubtypeRegistry subtypeRegistry) {
DarkSteelUpgradeSubtypeInterpreter dsusi = new DarkSteelUpgradeSubtypeInterpreter();
Set<Item> items = new HashSet<Item>();
for (ItemStack stack : ItemHelper.getValidItems()) {
if (stack.getItem() instanceof IDarkSteelItem) {
items.add(stack.getItem());
}
}
for (Item item : items) {
if (item != null) {
subtypeRegistry.registerSubtypeInterpreter(item, dsusi);
}
}
}
public static void register(IModRegistry registry) {
registry.addRecipeCatalyst(new ItemStack(blockDarkSteelAnvil.getBlockNN()), VanillaRecipeCategoryUid.ANVIL);
registry.addRecipeRegistryPlugin(new IRecipeRegistryPlugin() {
@Override
public @Nonnull <T extends IRecipeWrapper, V> List<T> getRecipeWrappers(@Nonnull IRecipeCategory<T> recipeCategory, @Nonnull IFocus<V> focus) {
if (recipeCategory.getUid().equals(VanillaRecipeCategoryUid.ANVIL) && focus.getValue() instanceof ItemStack) {
Collection<UpgradePath> recipes;
ItemStack focusStack = (ItemStack) focus.getValue();
if (focus.getMode() == Mode.INPUT) {
Set<UpgradePath> res = new HashSet<>();
DarkSteelRecipeManager.getRecipes(res, new NNList<>(focusStack));
recipes = res;
} else {
recipes = allRecipes.stream().filter(u -> u.getOutput().getItem() == focusStack.getItem()).collect(Collectors.toSet());
}
if (recipes.isEmpty()) {
return getWrappers(allRecipes, focusStack);
}
return getWrappers(recipes, null);
}
return NNList.emptyList();
}
@Override
public @Nonnull <T extends IRecipeWrapper> List<T> getRecipeWrappers(@Nonnull IRecipeCategory<T> recipeCategory) {
if (recipeCategory.getUid().equals(VanillaRecipeCategoryUid.ANVIL)) {
return getWrappers(allRecipes, null);
}
return NNList.emptyList();
}
@SuppressWarnings("unchecked")
private @Nonnull <T extends IRecipeWrapper> List<T> getWrappers(@Nonnull Collection<UpgradePath> recipes, @Nullable ItemStack upgradeFocus) {
final IVanillaRecipeFactory factory = registry.getJeiHelpers().getVanillaRecipeFactory();
Map<ItemStackKey, List<UpgradePath>> byUpgrade = NullHelper.notnullJ(
recipes.stream().collect(Collectors.groupingBy(rec -> new ItemStackKey(rec.getUpgrade()))),
"Stream#collect");
List<IRecipeWrapper> wrappers = new ArrayList<>();
ItemStackKey focusKey = upgradeFocus == null ? null : new ItemStackKey(upgradeFocus);
for (Entry<ItemStackKey, List<UpgradePath>> e : byUpgrade.entrySet()) {
if (upgradeFocus != null && !e.getKey().equals(focusKey)) {
continue;
}
List<UpgradePath> recs = e.getValue();
ItemStack upgrade = e.getKey().wrapped;
List<ItemStack> upgradesRepeated = new ArrayList<>();
for (int i = 0; i < recs.size(); i++) {
upgradesRepeated.add(upgrade);
}
IRecipeWrapper w = factory.createAnvilRecipe(recs.get(0).getInput(), upgradesRepeated,
NullHelper.notnullJ(recs.stream().map(UpgradePath::getOutput).collect(Collectors.toList()), "Stream#collect"));
try {
// Force the wrapper's input list to be all items instead of just the first
ReflectionHelper.<List<List<ItemStack>>, AnvilRecipeWrapper>getPrivateValue(AnvilRecipeWrapper.class, (AnvilRecipeWrapper) w, "inputs").set(0, recs.stream().map(UpgradePath::getInput).collect(Collectors.toList()));
} catch (Exception ex) {
// Something changed in JEI internals, we can just fall back to the first input
Log.LOGGER.debug("Error modifying AnvilRecipeWrapper, falling back to single input...", ex);
}
wrappers.add(w);
}
return (List<T>) wrappers;
}
@Override
public @Nonnull <V> List<String> getRecipeCategoryUids(@Nonnull IFocus<V> focus) {
return new NNList<>(VanillaRecipeCategoryUid.ANVIL);
}
});
}
}
|
package org.eclipse.persistence.oxm.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* The XmlCustomizer annotation is used to specify a class that implements the
* org.eclipse.persistence.config.DescriptorCustomizer
* interface and is to run against a class descriptor after all metadata
* processing has been completed.
*/
@Target({TYPE})
@Retention(RUNTIME)
public @interface XmlCustomizer {
/**
* (Required) Defines the name of the descriptor customizer that should be
* applied to this classes descriptor.
*/
Class value();
}
|
package es.usc.citius.hipster.examples;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.impls.tg.TinkerGraph;
import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader;
import es.usc.citius.hipster.algorithm.Hipster;
import es.usc.citius.hipster.model.Transition;
import es.usc.citius.hipster.model.function.ActionFunction;
import es.usc.citius.hipster.model.function.ActionStateTransitionFunction;
import es.usc.citius.hipster.model.function.BinaryFunction;
import es.usc.citius.hipster.model.function.CostFunction;
import es.usc.citius.hipster.model.function.impl.BinaryOperation;
import es.usc.citius.hipster.model.problem.ProblemBuilder;
import es.usc.citius.hipster.model.problem.SearchProblem;
import java.io.IOException;
import java.net.URL;
public class BlueprintsGraphMultiobjectiveSearch {
public static void main(String[] args) throws IOException {
Graph g = new TinkerGraph();
GraphMLReader.inputGraph(g, new URL("https://gist.githubusercontent.com/pablormier/5d52543b4dcae297ab14/raw/56b6b540b68679f201db2f0cb51e9d915ac3d32c/multiobjective-graph.graphml").openStream());
g = buildGraph();
// Since we use a special cost, we need to define a BinaryOperation<Cost>
// that provides the required elements to work with our special cost type.
// These elements are: a BinaryFunction<Cost> that defines how to compute
// a new cost from two costs: C x C -> C, the identity element I of our
// cost (C + I = C, I + C = C), and the maximum value.
// Cost a + Cost b is defined as a new cost a.c1+b.c1, a.c2+b.c2
BinaryFunction<Cost> f = new BinaryFunction<Cost>() {
@Override
public Cost apply(Cost a, Cost b) {
Cost c = new Cost(a.c1 + b.c1, a.c2 + b.c2);
return c;
}
};
// The identity cost identity satisfy:
Cost identity = new Cost(0d, 0d);
// Maximum value of our costs
Cost max = new Cost(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY);
// Create our custom binary operation:
BinaryOperation<Cost> bf = new BinaryOperation<Cost>(f, identity, max);
// Define a problem
SearchProblem p = ProblemBuilder.create()
.initialState(g.getVertex("v1"))
.defineProblemWithExplicitActions()
.useActionFunction(new ActionFunction<Edge, Vertex>() {
@Override
public Iterable<Edge> actionsFor(Vertex state) {
return state.getEdges(Direction.OUT);
}
}).useTransitionFunction(new ActionStateTransitionFunction<Edge, Vertex>() {
@Override
public Vertex apply(Edge action, Vertex state) {
return action.getVertex(Direction.IN);
}
}).useGenericCostFunction(new CostFunction<Edge, Vertex, Cost>() {
@Override
public Cost evaluate(Transition<Edge, Vertex> transition) {
Double cost1 = Double.valueOf(transition.getAction().getProperty("c1").toString());
Double cost2 = Double.valueOf(transition.getAction().getProperty("c2").toString());
return new Cost(cost1, cost2);
}
}, bf).build();
System.out.println(Hipster.createMultiobjectiveLS(p).search(g.getVertex("v6")));
}
private static Graph buildGraph() {
Graph g = new TinkerGraph();
Vertex v1 = g.addVertex("v1");
Vertex v2 = g.addVertex("v2");
Vertex v3 = g.addVertex("v3");
Vertex v4 = g.addVertex("v4");
Vertex v5 = g.addVertex("v5");
Vertex v6 = g.addVertex("v6");
Edge e1 = g.addEdge("e1", v1, v2, "(7, 1)");
e1.setProperty("c1", 7);
e1.setProperty("c2", 1);
Edge e2 = g.addEdge("e2", v1, v3, "(1, 4)");
e2.setProperty("c1", 1);
e2.setProperty("c2", 4);
Edge e3 = g.addEdge("e3", v1, v4, "(2, 1)");
e3.setProperty("c1", 2);
e3.setProperty("c2", 1);
Edge e4 = g.addEdge("e4", v2, v4, "(1, 1)");
e4.setProperty("c1", 1);
e4.setProperty("c2", 1);
Edge e5 = g.addEdge("e5", v2, v6, "(2, 1)");
e5.setProperty("c1", 2);
e5.setProperty("c2", 1);
Edge e6 = g.addEdge("e6", v3, v4, "(1, 1)");
e6.setProperty("c1", 1);
e6.setProperty("c2", 1);
Edge e7 = g.addEdge("e7", v4, v5, "(3, 2)");
e7.setProperty("c1", 3);
e7.setProperty("c2", 2);
Edge e8 = g.addEdge("e8", v4, v6, "(4, 8)");
e8.setProperty("c1", 4);
e8.setProperty("c2", 8);
Edge e9 = g.addEdge("e9", v5, v6, "(1, 1)");
e9.setProperty("c1", 1);
e9.setProperty("c2", 1);
return g;
}
static class Cost implements Comparable<Cost> {
private double c1;
private double c2;
public Cost(double c1, double c2) {
this.c1 = c1;
this.c2 = c2;
}
@Override
public String toString() {
return "Cost{" +
"c1=" + c1 +
", c2=" + c2 +
'}';
}
@Override
public int compareTo(Cost o) {
if (c1 <= o.c1 && c2 <= o.c2){
if (c1 < o.c1 || c2 < o.c2){
return -1;
}
} else if (o.c1 <= c1 && o.c2 <= o.c2){
if (o.c1 < c1 || o.c2 < c2){
return 1;
}
}
// Non-dominated
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Cost cost = (Cost) o;
if (Double.compare(cost.c1, c1) != 0) return false;
if (Double.compare(cost.c2, c2) != 0) return false;
return true;
}
@Override
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(c1);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(c2);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
}
}
|
package com.pchmn.materialchips.adapter;
import android.content.Context;
import android.os.Build;
import android.support.v7.widget.RecyclerView;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.RelativeLayout;
import com.pchmn.materialchips.ChipView;
import com.pchmn.materialchips.ChipsInput;
import com.pchmn.materialchips.model.ChipInterface;
import com.pchmn.materialchips.views.ChipsInputEditText;
import com.pchmn.materialchips.views.DetailedChipView;
import com.pchmn.materialchips.model.Chip;
import com.pchmn.materialchips.util.ViewUtil;
import com.pchmn.materialchips.views.FilterableListView;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class ChipsAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private static final String TAG = ChipsAdapter.class.toString();
private static final int TYPE_EDIT_TEXT = 0;
private static final int TYPE_ITEM = 1;
private Context mContext;
private ChipsInput mChipsInput;
private List<ChipInterface> mChipList = new ArrayList<>();
private String mHintLabel;
private ChipsInputEditText mEditText;
private RecyclerView mRecycler;
public ChipsAdapter(Context context, ChipsInput chipsInput, RecyclerView recycler) {
mContext = context;
mChipsInput = chipsInput;
mRecycler = recycler;
mHintLabel = mChipsInput.getHint();
mEditText = mChipsInput.getEditText();
initEditText();
}
private class ItemViewHolder extends RecyclerView.ViewHolder {
private final ChipView chipView;
ItemViewHolder(View view) {
super(view);
chipView = (ChipView) view;
}
}
private class EditTextViewHolder extends RecyclerView.ViewHolder {
private final EditText editText;
EditTextViewHolder(View view) {
super(view);
editText = (EditText) view;
}
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
if(viewType == TYPE_EDIT_TEXT)
return new EditTextViewHolder(mEditText);
else
return new ItemViewHolder(mChipsInput.getChipView());
}
@Override
public void onBindViewHolder(final RecyclerView.ViewHolder holder, int position) {
// edit text
if(position == mChipList.size()) {
if(mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// auto fit edit text
autofitEditText();
}
// chip
else if(getItemCount() > 1) {
ItemViewHolder itemViewHolder = (ItemViewHolder) holder;
itemViewHolder.chipView.inflate(getItem(position));
// handle click
handleClickOnEditText(itemViewHolder.chipView, position);
}
}
@Override
public int getItemCount() {
return mChipList.size() + 1;
}
private ChipInterface getItem(int position) {
return mChipList.get(position);
}
@Override
public int getItemViewType(int position) {
if (position == mChipList.size())
return TYPE_EDIT_TEXT;
return TYPE_ITEM;
}
@Override
public long getItemId(int position) {
return mChipList.get(position).hashCode();
}
private void initEditText() {
mEditText.setLayoutParams(new RelativeLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
mEditText.setHint(mHintLabel);
mEditText.setBackgroundResource(android.R.color.transparent);
// prevent fullscreen on landscape
mEditText.setImeOptions(EditorInfo.IME_FLAG_NO_EXTRACT_UI);
mEditText.setPrivateImeOptions("nm");
// no suggestion
mEditText.setInputType(InputType.TYPE_TEXT_VARIATION_FILTER | InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS);
// handle back space
mEditText.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
// backspace
if(event.getAction() == KeyEvent.ACTION_DOWN
&& event.getKeyCode() == KeyEvent.KEYCODE_DEL) {
// remove last chip
if(mChipList.size() > 0 && mEditText.getText().toString().length() == 0)
removeChip(mChipList.size() - 1);
}
return false;
}
});
// text changed
mEditText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
mChipsInput.onTextChanged(s);
}
@Override
public void afterTextChanged(Editable s) {
}
});
}
private void autofitEditText() {
// min width of edit text = 50 dp
ViewGroup.LayoutParams params = mEditText.getLayoutParams();
params.width = ViewUtil.dpToPx(50);
mEditText.setLayoutParams(params);
// listen to change in the tree
mEditText.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
// get right of recycler and left of edit text
int right = mRecycler.getRight();
int left = mEditText.getLeft();
// edit text will fill the space
ViewGroup.LayoutParams params = mEditText.getLayoutParams();
params.width = right - left - ViewUtil.dpToPx(8);
mEditText.setLayoutParams(params);
// request focus
mEditText.requestFocus();
// remove the listener:
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
mEditText.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
mEditText.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
}
});
}
private void handleClickOnEditText(ChipView chipView, final int position) {
// delete chip
chipView.setOnDeleteClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
removeChip(position);
}
});
// show detailed chip
if(mChipsInput.isShowChipDetailed()) {
chipView.setOnChipClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
// get chip position
int[] coord = new int[2];
v.getLocationInWindow(coord);
final DetailedChipView detailedChipView = mChipsInput.getDetailedChipView(getItem(position));
setDetailedChipViewPosition(detailedChipView, coord);
// delete button
detailedChipView.setOnDeleteClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
removeChip(position);
detailedChipView.fadeOut();
}
});
}
});
}
}
private void setDetailedChipViewPosition(DetailedChipView detailedChipView, int[] coord) {
// window width
ViewGroup rootView = (ViewGroup) mRecycler.getRootView();
int windowWidth = ViewUtil.getWindowWidth(mContext);
// chip size
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(
ViewUtil.dpToPx(300),
ViewUtil.dpToPx(100));
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT);
// align left window
if(coord[0] <= 0) {
layoutParams.leftMargin = 0;
layoutParams.topMargin = coord[1] - ViewUtil.dpToPx(13);
detailedChipView.alignLeft();
}
// align right
else if(coord[0] + ViewUtil.dpToPx(300) > windowWidth + ViewUtil.dpToPx(13)) {
layoutParams.leftMargin = windowWidth - ViewUtil.dpToPx(300);
layoutParams.topMargin = coord[1] - ViewUtil.dpToPx(13);
detailedChipView.alignRight();
}
// same position as chip
else {
layoutParams.leftMargin = coord[0] - ViewUtil.dpToPx(13);
layoutParams.topMargin = coord[1] - ViewUtil.dpToPx(13);
}
// show view
rootView.addView(detailedChipView, layoutParams);
detailedChipView.fadeIn();
}
public void setFilterableListView(FilterableListView filterableListView) {
if(mEditText != null)
mEditText.setFilterableListView(filterableListView);
}
public void addChip(ChipInterface chip) {
if(!listContains(mChipList, chip)) {
mChipList.add(chip);
// notify listener
mChipsInput.onChipAdded(chip, mChipList.size());
// hide hint
mEditText.setHint(null);
// reset text
mEditText.setText(null);
// refresh data
notifyItemInserted(mChipList.size());
}
}
public void removeChip(ChipInterface chip) {
int position = mChipList.indexOf(chip);
mChipList.remove(position);
// notify listener
notifyItemRangeChanged(position, getItemCount());
// if 0 chip
if (mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// refresh data
notifyDataSetChanged();
}
public void removeChip(int position) {
ChipInterface chip = mChipList.get(position);
// remove contact
mChipList.remove(position);
// notify listener
mChipsInput.onChipRemoved(chip, mChipList.size());
// if 0 chip
if (mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// refresh data
notifyDataSetChanged();
}
public void removeChipById(Object id) {
for (Iterator<ChipInterface> iter = mChipList.listIterator(); iter.hasNext(); ) {
ChipInterface chip = iter.next();
if (chip.getId() != null && chip.getId().equals(id)) {
// remove chip
iter.remove();
// notify listener
mChipsInput.onChipRemoved(chip, mChipList.size());
}
}
// if 0 chip
if (mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// refresh data
notifyDataSetChanged();
}
public void removeChipByLabel(String label) {
for (Iterator<ChipInterface> iter = mChipList.listIterator(); iter.hasNext(); ) {
ChipInterface chip = iter.next();
if (chip.getLabel().equals(label)) {
// remove chip
iter.remove();
// notify listener
mChipsInput.onChipRemoved(chip, mChipList.size());
}
}
// if 0 chip
if (mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// refresh data
notifyDataSetChanged();
}
public void removeChipByInfo(String info) {
for (Iterator<ChipInterface> iter = mChipList.listIterator(); iter.hasNext(); ) {
ChipInterface chip = iter.next();
if (chip.getInfo() != null && chip.getInfo().equals(info)) {
// remove chip
iter.remove();
// notify listener
mChipsInput.onChipRemoved(chip, mChipList.size());
}
}
// if 0 chip
if (mChipList.size() == 0)
mEditText.setHint(mHintLabel);
// refresh data
notifyDataSetChanged();
}
public List<ChipInterface> getChipList() {
return mChipList;
}
private boolean listContains(List<ChipInterface> contactList, ChipInterface chip) {
if(mChipsInput.getChipValidator() != null) {
for(ChipInterface item: contactList) {
if(mChipsInput.getChipValidator().areEquals(item, chip))
return true;
}
}
else {
for(ChipInterface item: contactList) {
if(chip.getId() != null && chip.getId().equals(item.getId()))
return true;
if(chip.getLabel().equals(item.getLabel()))
return true;
}
}
return false;
}
}
|
package com.ociweb.iot.metronome;
import com.ociweb.iot.grove.Grove_LCD_RGB;
import com.ociweb.iot.maker.AnalogListener;
import com.ociweb.iot.maker.CommandChannel;
import com.ociweb.iot.maker.DigitalListener;
import com.ociweb.iot.maker.DeviceRuntime;
import com.ociweb.iot.maker.PayloadReader;
import com.ociweb.iot.maker.PubSubListener;
import com.ociweb.iot.maker.StartupListener;
/*
* Beats per minute (build an ENUM of these so we can diplay the names on the screen.
*
* Largo 40-60
* Larghetto 60-66
* Adagio 66-76
* Andante 76-108
* Moderato 108-120
* Allegro 120-168
* Presto 168-200
* Prestissimo 200-208
*
*
* 1 minute = 60_000 ms
* 40 BPM = 1500ms
* 300 BPM = 200ms required (max err +-2ms)
* 600 BPM = 100ms nice (max err +-1ms)
*
* Test at 40, 60, 120 and 208, the error must be < 1%
*
*
*/
public class MetronomeBehavior implements AnalogListener, PubSubListener, StartupListener {
private final CommandChannel commandChannel;
private final String topic = "tick";
private static final int BBM_SLOWEST = 40;
private static final int BBM_FASTEST = 208;
private static final int BBM_VALUES = 1+BBM_FASTEST-BBM_SLOWEST;
private static final int MAX_ANGLE_VALUE = 1024;
private long base;
private int beatIdx;
private int activeBPM;
private long timeOfNewValue;
private int tempBPM;
public MetronomeBehavior(DeviceRuntime runtime) {
commandChannel = runtime.newCommandChannel();
}
@Override
public void startup() {
commandChannel.subscribe(topic,this);
commandChannel.openTopic(topic).publish();
Grove_LCD_RGB.commandForColor(commandChannel, 255, 255, 255);
}
@Override
public void analogEvent(int connector, long time, int average, int value) {
int newBPM = BBM_SLOWEST + ((BBM_VALUES*value)/MAX_ANGLE_VALUE);
if (newBPM != tempBPM) {
timeOfNewValue = System.currentTimeMillis();
tempBPM = newBPM;
String message = " BPM "+tempBPM;
//System.out.println(message);
//Can not call frequenlty or we get stack trace error.
Grove_LCD_RGB.commandForText(commandChannel, message);
} else {
if (System.currentTimeMillis()-timeOfNewValue>100) {
if (tempBPM != activeBPM) {
//System.out.println("set new active to "+tempBPM);
activeBPM = tempBPM;
base = 0; //reset signal
}
}
}
}
@Override
public void message(CharSequence topic, PayloadReader payload) {
commandChannel.openTopic(topic).publish();//request next tick while we get this one ready
if (activeBPM>0) {
if (0==base) {
base = System.currentTimeMillis();
beatIdx = 0;
}
long delta = (++beatIdx*60_000L)/activeBPM;
long until = base + delta;
commandChannel.digitalPulse(IoTApp.BUZZER_CONNECTION);
commandChannel.blockUntil(IoTApp.BUZZER_CONNECTION, until); //mark connection as blocked until
if (beatIdx==activeBPM) {
beatIdx = 0;
base += 60_000;
}
}
}
}
|
package com.buschmais.xo.impl.proxy.common.property;
import com.buschmais.xo.api.metadata.method.PrimitivePropertyMethodMetadata;
import com.buschmais.xo.impl.AbstractPropertyManager;
import com.buschmais.xo.impl.converter.ValueConverter;
public abstract class AbstractPrimitivePropertyGetMethod<DatastoreType, PropertyManager extends AbstractPropertyManager<DatastoreType>>
extends AbstractPropertyMethod<DatastoreType, PropertyManager, PrimitivePropertyMethodMetadata> {
public AbstractPrimitivePropertyGetMethod(PropertyManager propertyManager, PrimitivePropertyMethodMetadata metadata) {
super(propertyManager, metadata);
}
@Override
public Object invoke(DatastoreType datastoreType, Object instance, Object[] args) {
PrimitivePropertyMethodMetadata<?> metadata = getMetadata();
PropertyManager propertyManager = getPropertyManager();
Class<?> propertyType = metadata.getAnnotatedMethod().getType();
return ValueConverter.convert(propertyManager.hasProperty(datastoreType, metadata) ? propertyManager.getProperty(datastoreType, metadata) : null,
propertyType);
}
}
|
package com.github.pocmo.sensordashboard;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.EditText;
import android.widget.Toast;
import com.github.pocmo.sensordashboard.data.Sensor;
import com.github.pocmo.sensordashboard.events.BusProvider;
import com.github.pocmo.sensordashboard.events.NewSensorEvent;
import com.squareup.otto.Subscribe;
import java.util.List;
public class MainActivity extends AppCompatActivity {
private RemoteSensorManager remoteSensorManager;
Toolbar mToolbar;
private ViewPager pager;
private View emptyState;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mToolbar = (Toolbar) findViewById(R.id.my_awesome_toolbar);
emptyState = findViewById(R.id.empty_state);
initToolbar();
initViewPager();
remoteSensorManager = RemoteSensorManager.getInstance(this);
final EditText tagname = (EditText) findViewById(R.id.tagname);
findViewById(R.id.tag_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String tagnameText = "EMPTY";
if (!tagname.getText().toString().isEmpty()) {
tagnameText = tagname.getText().toString();
}
RemoteSensorManager.getInstance(MainActivity.this).addTag(tagnameText);
}
});
getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN);
}
private void initToolbar() {
setSupportActionBar(mToolbar);
final ActionBar ab = getSupportActionBar();
if (ab != null) {
ab.setDisplayHomeAsUpEnabled(false);
ab.setTitle(R.string.app_name);
mToolbar.setOnMenuItemClickListener(new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_about:
startActivity(new Intent(MainActivity.this, AboutActivity.class));
return true;
}
return true;
}
});
}
}
private void initViewPager() {
pager = (ViewPager) findViewById(R.id.pager);
pager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int i, float v, int i2) {
}
@Override
public void onPageSelected(int id) {
ScreenSlidePagerAdapter adapter = (ScreenSlidePagerAdapter) pager.getAdapter();
if (adapter != null) {
Sensor sensor = adapter.getItemObject(id);
if (sensor != null) {
remoteSensorManager.filterBySensorId((int) sensor.getId());
}
}
}
@Override
public void onPageScrollStateChanged(int i) {
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
protected void onResume() {
super.onResume();
BusProvider.getInstance().register(this);
List<Sensor> sensors = RemoteSensorManager.getInstance(this).getSensors();
pager.setAdapter(new ScreenSlidePagerAdapter(getSupportFragmentManager(), sensors));
if (sensors.size() > 0) {
emptyState.setVisibility(View.GONE);
} else {
emptyState.setVisibility(View.VISIBLE);
}
remoteSensorManager.startMeasurement();
}
@Override
protected void onPause() {
super.onPause();
BusProvider.getInstance().unregister(this);
remoteSensorManager.stopMeasurement();
}
private class ScreenSlidePagerAdapter extends FragmentStatePagerAdapter {
private List<Sensor> sensors;
public ScreenSlidePagerAdapter(FragmentManager fm, List<Sensor> symbols) {
super(fm);
this.sensors = symbols;
}
public void addNewSensor(Sensor sensor) {
this.sensors.add(sensor);
}
private Sensor getItemObject(int position) {
return sensors.get(position);
}
@Override
public android.support.v4.app.Fragment getItem(int position) {
return SensorFragment.newInstance(sensors.get(position).getId());
}
@Override
public int getCount() {
return sensors.size();
}
}
private void notifyUSerForNewSensor(Sensor sensor) {
Toast.makeText(this, "New Sensor!\n" + sensor.getName(), Toast.LENGTH_SHORT).show();
}
@Subscribe
public void onNewSensorEvent(final NewSensorEvent event) {
((ScreenSlidePagerAdapter) pager.getAdapter()).addNewSensor(event.getSensor());
pager.getAdapter().notifyDataSetChanged();
emptyState.setVisibility(View.GONE);
notifyUSerForNewSensor(event.getSensor());
}
}
|
package org.jboss.resteasy.test.regression;
import static org.junit.Assert.assertNotNull;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.sql.Date;
import java.util.List;
import javax.ws.rs.Priorities;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.container.ContainerResponseFilter;
import org.jboss.resteasy.core.interception.JaxrsInterceptorRegistry.InterceptorFactory;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.resteasy.spi.StringParameterUnmarshaller;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* resteasy-584
*
* @author <a href="mailto:[email protected]">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ResteasyProviderFactoryTest
{
private ResteasyProviderFactory factory;
@Before
public void createBean() {
factory = new ResteasyProviderFactory();
}
@Test
public void shouldReturnStringParameterUnmarshallerAddedForType() {
factory.addStringParameterUnmarshaller(MyStringParameterUnmarshaller.class);
assertNotNull(factory.createStringParameterUnmarshaller(Date.class));
}
public static class MyStringParameterUnmarshaller implements StringParameterUnmarshaller<Date>
{
@Override
public void setAnnotations(Annotation[] annotations) {
}
@Override
public Date fromString(String str) {
return null;
}
}
/**
* Test case for bug RESTEASY-1311.
* Test whether the priority is supplied to the container request filter registry.
*/
@Test
public void testRegisterProviderInstancePriorityContainerRequestFilter() throws Exception {
ContainerRequestFilter requestFilter = new ContainerRequestFilter() {
public void filter(ContainerRequestContext requestContext) {}
};
this.testRegisterProviderInstancePriority(requestFilter, factory.getContainerRequestFilterRegistry());
}
/**
* Test case for bug RESTEASY-1311.
* Test whether the priority is supplied to the container response filter registry.
*/
@Test
public void testRegisterProviderInstancePriorityContainerResponseFilter() throws Exception {
ContainerResponseFilter responseFilter = new ContainerResponseFilter() {
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) {}
};
this.testRegisterProviderInstancePriority(responseFilter, factory.getContainerResponseFilterRegistry());
}
/**
* Generic helper method for RESTEASY-1311 cases, because the test logic is the same.
* Unfortunately, there seems to be no public accessors for the properties we need,
* so we have to resort to using reflection to check the right priority setting.
*/
private void testRegisterProviderInstancePriority(Object filter, Object registry) throws Exception {
int priorityOverride = Priorities.USER + 1;
factory.registerProviderInstance(filter, null, priorityOverride, false);
Field interceptorsField = registry.getClass().getSuperclass().getDeclaredField("interceptors");
interceptorsField.setAccessible(true);
@SuppressWarnings("unchecked")
List<InterceptorFactory> interceptors = (List<InterceptorFactory>) interceptorsField.get(registry);
Field orderField = interceptors.get(0).getClass().getSuperclass().getDeclaredField("order");
orderField.setAccessible(true);
int order = (Integer) orderField.get(interceptors.get(0));
Assert.assertEquals(priorityOverride, order);
}
}
|
package org.motechproject.nms.kilkari.service.impl;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.motechproject.metrics.service.Timer;
import org.motechproject.nms.csv.exception.CsvImportDataException;
import org.motechproject.nms.csv.utils.ConstraintViolationUtils;
import org.motechproject.nms.csv.utils.CsvImporterBuilder;
import org.motechproject.nms.csv.utils.CsvMapImporter;
import org.motechproject.nms.csv.utils.GetInstanceByString;
import org.motechproject.nms.csv.utils.GetString;
import org.motechproject.nms.kilkari.domain.DeactivationReason;
import org.motechproject.nms.kilkari.domain.MctsChild;
import org.motechproject.nms.kilkari.domain.MctsMother;
import org.motechproject.nms.kilkari.domain.Subscriber;
import org.motechproject.nms.kilkari.domain.Subscription;
import org.motechproject.nms.kilkari.domain.SubscriptionError;
import org.motechproject.nms.kilkari.domain.SubscriptionPack;
import org.motechproject.nms.kilkari.domain.SubscriptionPackType;
import org.motechproject.nms.kilkari.domain.SubscriptionRejectionReason;
import org.motechproject.nms.kilkari.domain.SubscriptionStatus;
import org.motechproject.nms.kilkari.exception.MultipleSubscriberException;
import org.motechproject.nms.kilkari.repository.MctsMotherDataService;
import org.motechproject.nms.kilkari.repository.SubscriptionErrorDataService;
import org.motechproject.nms.kilkari.service.MctsBeneficiaryImportService;
import org.motechproject.nms.kilkari.service.MctsBeneficiaryValueProcessor;
import org.motechproject.nms.kilkari.service.SubscriberService;
import org.motechproject.nms.kilkari.service.SubscriptionService;
import org.motechproject.nms.kilkari.utils.KilkariConstants;
import org.motechproject.nms.kilkari.utils.MctsBeneficiaryUtils;
import org.motechproject.nms.region.exception.InvalidLocationException;
import org.motechproject.nms.region.service.LocationService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.supercsv.cellprocessor.Optional;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.prefs.CsvPreference;
import javax.validation.ConstraintViolationException;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
/**
* Implementation of the {@link MctsBeneficiaryImportService} interface.
*/
@Service("mctsBeneficiaryImportService")
public class MctsBeneficiaryImportServiceImpl implements MctsBeneficiaryImportService {
private static final Logger LOGGER = LoggerFactory.getLogger(MctsBeneficiaryImportServiceImpl.class);
private SubscriptionService subscriptionService;
private SubscriptionErrorDataService subscriptionErrorDataService;
private LocationService locationService;
private SubscriberService subscriberService;
private MctsBeneficiaryValueProcessor mctsBeneficiaryValueProcessor;
private SubscriptionPack pregnancyPack;
private SubscriptionPack childPack;
private MctsMotherDataService mctsMotherDataService;
@Autowired
public MctsBeneficiaryImportServiceImpl(SubscriptionService subscriptionService,
SubscriptionErrorDataService subscriptionErrorDataService,
LocationService locationService, SubscriberService subscriberService,
MctsBeneficiaryValueProcessor mctsBeneficiaryValueProcessor,
MctsMotherDataService mctsMotherDataService) {
this.subscriptionService = subscriptionService;
this.subscriptionErrorDataService = subscriptionErrorDataService;
this.locationService = locationService;
this.subscriberService = subscriberService;
this.mctsBeneficiaryValueProcessor = mctsBeneficiaryValueProcessor;
this.mctsMotherDataService = mctsMotherDataService;
}
/**
* Expected file format:
* - any number of empty lines
* - header lines in the following format: State Name : ACTUAL STATE_ID NAME
* - one empty line
* - CSV data (tab-separated)
*/
@Override
@Transactional
public int importMotherData(Reader reader) throws IOException {
pregnancyPack = subscriptionService.getSubscriptionPack(SubscriptionPackType.PREGNANCY);
int count = 0;
/**
* Count of all the records rejected for unknown exceptions. So, doesn't include the ones saved in nms_subscription_errors.
* This is used just for debugging purpose.
*/
int rejected = 0;
BufferedReader bufferedReader = new BufferedReader(reader);
CsvMapImporter csvImporter = new CsvImporterBuilder()
.setProcessorMapping(getMotherProcessorMapping())
.setPreferences(CsvPreference.TAB_PREFERENCE)
.createAndOpen(bufferedReader);
try {
Map<String, Object> record;
Timer timer = new Timer("mom", "moms");
while (null != (record = csvImporter.read())) {
MctsMother mother = (MctsMother) record.get(KilkariConstants.BENEFICIARY_ID);
LOGGER.debug("Started import for msisdn {} beneficiary_id {}", record.get(KilkariConstants.MSISDN), mother.getBeneficiaryId());
try {
importMotherRecord(record);
count++;
if (count % KilkariConstants.PROGRESS_INTERVAL == 0) {
LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(count));
}
} catch (Exception e) {
LOGGER.error("Error at msisdn {} beneficiary_id {}", record.get(KilkariConstants.MSISDN), mother.getBeneficiaryId(), e);
rejected++;
}
}
if (count % KilkariConstants.PROGRESS_INTERVAL != 0) {
LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(count));
}
if (rejected != 0) {
LOGGER.debug(KilkariConstants.REJECTED, timer.frequency(rejected));
}
} catch (ConstraintViolationException e) {
throw new CsvImportDataException(String.format("MCTS mother import error, constraints violated: %s",
ConstraintViolationUtils.toString(e.getConstraintViolations())), e);
}
return count;
}
@Transactional
public int importChildData(Reader reader) throws IOException {
childPack = subscriptionService.getSubscriptionPack(SubscriptionPackType.CHILD);
int count = 0;
/**
* Count of all the records rejected for unknown exceptions. So, doesn't include the ones saved in nms_subscription_errors.
* This is used just for debugging purpose.
*/
int rejected = 0;
BufferedReader bufferedReader = new BufferedReader(reader);
CsvMapImporter csvImporter = new CsvImporterBuilder()
.setProcessorMapping(getChildProcessorMapping())
.setPreferences(CsvPreference.TAB_PREFERENCE)
.createAndOpen(bufferedReader);
try {
Map<String, Object> record;
Timer timer = new Timer("kid", "kids");
while (null != (record = csvImporter.read())) {
MctsChild child = (MctsChild) record.get(KilkariConstants.BENEFICIARY_ID);
LOGGER.debug("Started import for msisdn {} beneficiary_id {}", record.get(KilkariConstants.MSISDN), child.getBeneficiaryId());
try {
importChildRecord(record);
count++;
if (count % KilkariConstants.PROGRESS_INTERVAL == 0) {
LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(count));
}
} catch (Exception e) {
LOGGER.error("Error at msisdn {} beneficiary_id {}", record.get(KilkariConstants.MSISDN), child.getBeneficiaryId(), e);
rejected++;
}
}
if (count % KilkariConstants.PROGRESS_INTERVAL != 0) {
LOGGER.debug(KilkariConstants.IMPORTED, timer.frequency(count));
}
if (rejected != 0) {
LOGGER.debug(KilkariConstants.REJECTED, timer.frequency(rejected));
}
} catch (ConstraintViolationException e) {
throw new CsvImportDataException(String.format("MCTS child import error, constraints violated: %s",
ConstraintViolationUtils.toString(e.getConstraintViolations())), e);
}
return count;
}
@Override // NO CHECKSTYLE Cyclomatic Complexity
@Transactional
public boolean importMotherRecord(Map<String, Object> record) {
if (pregnancyPack == null) {
pregnancyPack = subscriptionService.getSubscriptionPack(SubscriptionPackType.PREGNANCY);
}
MctsMother mother = (MctsMother) record.get(KilkariConstants.BENEFICIARY_ID);
String name = (String) record.get(KilkariConstants.BENEFICIARY_NAME);
Long msisdn = (Long) record.get(KilkariConstants.MSISDN);
DateTime lmp = (DateTime) record.get(KilkariConstants.LMP);
DateTime motherDOB = (DateTime) record.get(KilkariConstants.MOTHER_DOB);
Boolean abortion = (Boolean) record.get(KilkariConstants.ABORTION);
Boolean stillBirth = (Boolean) record.get(KilkariConstants.STILLBIRTH);
Boolean death = (Boolean) record.get(KilkariConstants.DEATH);
LocalDate mctsUpdatedDateNic = (LocalDate) record.get(KilkariConstants.LAST_UPDATE_DATE);
// validate msisdn
if (!validateMsisdn(msisdn, SubscriptionPackType.PREGNANCY)) {
return false;
}
// validate lmp date. We do not sanitize for lmp in the future to be in sync with MCTS data
// NOTE: getId is a way to see if this is a new user. We only accept new users if they
// have 12 weeks left in the pack. For existing users, their lmp could be updated to an earlier date
if (mother.getId() == null && !validateReferenceDate(lmp, SubscriptionPackType.PREGNANCY, msisdn)) {
return false;
}
// validate and set location
try {
MctsBeneficiaryUtils.setLocationFields(locationService.getLocations(record), mother);
} catch (InvalidLocationException le) {
LOGGER.error(le.toString());
subscriptionErrorDataService.create(new SubscriptionError(msisdn, SubscriptionRejectionReason.INVALID_LOCATION,
SubscriptionPackType.PREGNANCY, le.getMessage()));
return false;
}
//validate if it's an updated record compared to one from database
if (mother.getUpdatedDateNic() != null && (mctsUpdatedDateNic == null || mother.getUpdatedDateNic().isAfter(mctsUpdatedDateNic))) {
subscriptionErrorDataService.create(new SubscriptionError(msisdn, mother.getBeneficiaryId(),
SubscriptionRejectionReason.ALREADY_SUBSCRIBED, SubscriptionPackType.PREGNANCY, "Updated Record exits"));
return false;
}
//validate if an ACTIVE child is already present for the mother. If yes, ignore the update
if (childAlreadyPresent(mother.getBeneficiaryId())) {
subscriptionErrorDataService.create(new SubscriptionError(msisdn, mother.getBeneficiaryId(),
SubscriptionRejectionReason.ACTIVE_CHILD_PRESENT, SubscriptionPackType.PREGNANCY, "Active child is present for this mother."));
return false;
}
mother.setName(name);
mother.setDateOfBirth(motherDOB);
mother.setUpdatedDateNic(mctsUpdatedDateNic);
Subscription subscription = subscriberService.updateMotherSubscriber(msisdn, mother, lmp);
// We rejected the update/create for the subscriber
if (subscription == null) {
return false;
}
if ((abortion != null) && abortion) {
subscriptionService.deactivateSubscription(subscription, DeactivationReason.MISCARRIAGE_OR_ABORTION);
return true;
}
if ((stillBirth != null) && stillBirth) {
subscriptionService.deactivateSubscription(subscription, DeactivationReason.STILL_BIRTH);
return true;
}
if ((death != null) && death) {
subscriptionService.deactivateSubscription(subscription, DeactivationReason.MATERNAL_DEATH);
}
return true;
}
@Override // NO CHECKSTYLE Cyclomatic Complexity
@Transactional
public boolean importChildRecord(Map<String, Object> record) {
if (childPack == null) {
childPack = subscriptionService.getSubscriptionPack(SubscriptionPackType.CHILD);
}
MctsChild child = (MctsChild) record.get(KilkariConstants.BENEFICIARY_ID);
String name = (String) record.get(KilkariConstants.BENEFICIARY_NAME);
Long msisdn = (Long) record.get(KilkariConstants.MSISDN);
MctsMother mother = (MctsMother) record.get(KilkariConstants.MOTHER_ID);
DateTime dob = (DateTime) record.get(KilkariConstants.DOB);
Boolean death = (Boolean) record.get(KilkariConstants.DEATH);
LocalDate mctsUpdatedDateNic = (LocalDate) record.get(KilkariConstants.LAST_UPDATE_DATE);
// validate msisdn
if (!validateMsisdn(msisdn, SubscriptionPackType.CHILD)) {
return false;
}
// validate dob. We do not sanitize for dob in the future to be in sync with MCTS data
// NOTE: getId is a way to check for new user. We only accept new children if they have 12 weeks left
// in the pack. Existing children could have their dob udpated to an earlier date
if (child.getId() == null && !validateReferenceDate(dob, SubscriptionPackType.CHILD, msisdn)) {
return false;
}
// validate and set location
try {
MctsBeneficiaryUtils.setLocationFields(locationService.getLocations(record), child);
} catch (InvalidLocationException le) {
LOGGER.error(le.toString());
subscriptionErrorDataService.create(new SubscriptionError(msisdn, child.getBeneficiaryId(),
SubscriptionRejectionReason.INVALID_LOCATION, SubscriptionPackType.CHILD, le.getMessage()));
return false;
}
//validate if it's an updated record compared to one from database
if (child.getUpdatedDateNic() != null && (mctsUpdatedDateNic == null || child.getUpdatedDateNic().isAfter(mctsUpdatedDateNic))) {
subscriptionErrorDataService.create(new SubscriptionError(msisdn, child.getBeneficiaryId(),
SubscriptionRejectionReason.ALREADY_SUBSCRIBED, SubscriptionPackType.CHILD, "Updated Record exits"));
return false;
}
child.setName(name);
child.setMother(mother);
child.setUpdatedDateNic(mctsUpdatedDateNic);
Subscription childSubscription = subscriberService.updateChildSubscriber(msisdn, child, dob);
// child subscription create/update was rejected
if (childSubscription == null) {
return false;
}
// a new child subscription was created -- deactivate mother's pregnancy subscription if she has one
Subscriber subscriber = childSubscription.getSubscriber();
if ((mother != null) && (mother.equals(subscriber.getMother()))) {
Subscription pregnancySubscription = subscriptionService.getActiveSubscription(subscriber,
SubscriptionPackType.PREGNANCY);
if (pregnancySubscription != null) {
subscriptionService.deactivateSubscription(pregnancySubscription, DeactivationReason.LIVE_BIRTH);
}
}
if ((death != null) && death) {
subscriptionService.deactivateSubscription(childSubscription, DeactivationReason.CHILD_DEATH);
}
return true;
}
private boolean validateMsisdn(Long msisdn, SubscriptionPackType packType) {
if (msisdn == null) {
subscriptionErrorDataService.create(
new SubscriptionError(-1, SubscriptionRejectionReason.MISSING_MSISDN, packType));
return false;
}
return true;
}
private boolean childAlreadyPresent(final String motherBenificiaryId) {
//Found mother by beneficiary id. If there is no mother already present,then import will
//go to the next check. Else we get the subscriber by the mother id
//and check if the child subscription is ACTIVE. If yes we do not update the mother.
MctsMother mctsMother = null;
try {
mctsMother = mctsMotherDataService.findByBeneficiaryId(motherBenificiaryId);
if (mctsMother == null) {
return false;
} else {
Long motherId = mctsMother.getId();
Subscriber subscriber = subscriberService.getSubscriberByMother(motherId);
for (Subscription subscription : subscriber.getAllSubscriptions()) {
if (subscription.getSubscriptionPack().getType().equals(SubscriptionPackType.CHILD)
&& subscription.getStatus().equals(SubscriptionStatus.ACTIVE)
&& subscriber.getChild().getMother() != null
&& subscriber.getChild().getMother().getBeneficiaryId().equals(motherBenificiaryId)) {
return true;
}
}
}
return false;
} catch (MultipleSubscriberException m){
LOGGER.error(m.toString());
return true;
}
}
private boolean validateReferenceDate(DateTime referenceDate, SubscriptionPackType packType, Long msisdn) {
if (referenceDate == null) {
subscriptionErrorDataService.create(
new SubscriptionError(msisdn,
(packType == SubscriptionPackType.PREGNANCY) ?
SubscriptionRejectionReason.MISSING_LMP :
SubscriptionRejectionReason.MISSING_DOB,
packType));
return false;
}
if (packType == SubscriptionPackType.PREGNANCY) {
String referenceDateValidationError = pregnancyPack.isReferenceDateValidForPack(referenceDate);
if (!referenceDateValidationError.isEmpty()) {
subscriptionErrorDataService.create(
new SubscriptionError(msisdn, SubscriptionRejectionReason.INVALID_LMP, SubscriptionPackType.PREGNANCY, referenceDateValidationError));
return false;
}
} else { // childPack
String referenceDateValidationError = childPack.isReferenceDateValidForPack(referenceDate);
if (!referenceDateValidationError.isEmpty()) {
subscriptionErrorDataService.create(
new SubscriptionError(msisdn, SubscriptionRejectionReason.INVALID_DOB, SubscriptionPackType.CHILD, referenceDateValidationError));
return false;
}
}
return true;
}
private Map<String, CellProcessor> getMotherProcessorMapping() {
Map<String, CellProcessor> mapping = new HashMap<>();
MctsBeneficiaryUtils.getBeneficiaryLocationMapping(mapping);
mapping.put(KilkariConstants.BENEFICIARY_ID, new GetInstanceByString<MctsMother>() {
@Override
public MctsMother retrieve(String value) {
return mctsBeneficiaryValueProcessor.getOrCreateMotherInstance(value);
}
});
mapping.put(KilkariConstants.BENEFICIARY_NAME, new GetString());
mapping.put(KilkariConstants.MSISDN, new Optional(new GetInstanceByString<Long>() {
@Override
public Long retrieve(String value) {
return mctsBeneficiaryValueProcessor.getMsisdnByString(value);
}
}));
mapping.put(KilkariConstants.LMP, new Optional(new GetInstanceByString<DateTime>() {
@Override
public DateTime retrieve(String value) {
return mctsBeneficiaryValueProcessor.getDateByString(value);
}
}));
mapping.put(KilkariConstants.MOTHER_DOB, new Optional(new GetInstanceByString<DateTime>() {
@Override
public DateTime retrieve(String value) {
return mctsBeneficiaryValueProcessor.getDateByString(value);
}
}));
mapping.put(KilkariConstants.ABORTION, new Optional(new GetInstanceByString<Boolean>() {
@Override
public Boolean retrieve(String value) {
return mctsBeneficiaryValueProcessor.getAbortionDataFromString(value);
}
}));
mapping.put(KilkariConstants.STILLBIRTH, new Optional(new GetInstanceByString<Boolean>() {
@Override
public Boolean retrieve(String value) {
return mctsBeneficiaryValueProcessor.getStillBirthFromString(value);
}
}));
mapping.put(KilkariConstants.DEATH, new Optional(new GetInstanceByString<Boolean>() {
@Override
public Boolean retrieve(String value) {
return mctsBeneficiaryValueProcessor.getDeathFromString(value);
}
}));
mapping.put(KilkariConstants.LAST_UPDATE_DATE, new Optional(new GetInstanceByString<LocalDate>() {
@Override
public LocalDate retrieve(String value) {
return (LocalDate) mctsBeneficiaryValueProcessor.getDateByString(value).toLocalDate();
}
}));
return mapping;
}
private Map<String, CellProcessor> getChildProcessorMapping() {
Map<String, CellProcessor> mapping = new HashMap<>();
MctsBeneficiaryUtils.getBeneficiaryLocationMapping(mapping);
mapping.put(KilkariConstants.BENEFICIARY_ID, new GetInstanceByString<MctsChild>() {
@Override
public MctsChild retrieve(String value) {
return mctsBeneficiaryValueProcessor.getChildInstanceByString(value);
}
});
mapping.put(KilkariConstants.BENEFICIARY_NAME, new Optional(new GetString()));
mapping.put(KilkariConstants.MOTHER_ID, new Optional(new GetInstanceByString<MctsMother>() {
@Override
public MctsMother retrieve(String value) {
return mctsBeneficiaryValueProcessor.getMotherInstanceByBeneficiaryId(value);
}
}));
mapping.put(KilkariConstants.MSISDN, new Optional(new GetInstanceByString<Long>() {
@Override
public Long retrieve(String value) {
return mctsBeneficiaryValueProcessor.getMsisdnByString(value);
}
}));
mapping.put(KilkariConstants.DOB, new Optional(new GetInstanceByString<DateTime>() {
@Override
public DateTime retrieve(String value) {
return mctsBeneficiaryValueProcessor.getDateByString(value);
}
}));
mapping.put(KilkariConstants.DEATH, new Optional(new GetInstanceByString<Boolean>() {
@Override
public Boolean retrieve(String value) {
return mctsBeneficiaryValueProcessor.getDeathFromString(value);
}
}));
mapping.put(KilkariConstants.LAST_UPDATE_DATE, new Optional(new GetInstanceByString<LocalDate>() {
@Override
public LocalDate retrieve(String value) {
return (LocalDate) mctsBeneficiaryValueProcessor.getDateByString(value).toLocalDate();
}
}));
return mapping;
}
}
|
package data_manipulation;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
public class D0701_tryPullData_from_ExAC {
public static void main(String[] args) throws InterruptedException, IOException {
D0701_tryPullData_from_ExAC pullData_exac = new D0701_tryPullData_from_ExAC();
D0627_GetGeneNames_from_ALSData getNames = new D0627_GetGeneNames_from_ALSData();
//initiate 4 genes for testing
//String[] geneNames = {"SAMD9", "TNN", "SAMD11", "GPR160", "AGAP8", "AGAP9"} ; //
//Here, AGAP8 and AGAP9 do not have LoF variants;
//gene[689], gene[1558], gene[1559], gene[1560] 1613, 2917 do not have any variants;
String[] geneNames = getNames.run();
//initiate an arrayList to store all genes that could not be downloaded
ArrayList<String> genes_without_LoF = new ArrayList<String>();
//create a buffer writer:
//save the genes without any LoF variants into a txt document
File output = new File("C:/Users/Jeff/Downloads/0to5000_genes_without_Lof.txt");
BufferedWriter out_writer = new BufferedWriter(new FileWriter(output));
//for each gene name, call pullData_exac.run() method to pull CSV variants document from ExAC
for(int i=0; i<5000; i++){
System.out.print("
//cll pullData_exac.run() method, update the arrayList, genes withoug LoF variants will be put into ArrayList
pullData_exac.run(geneNames[i], genes_without_LoF);
if(genes_without_LoF.size() > 0){
//write the gene into a txt document;
out_writer.write("gene # " + i + " \t " + genes_without_LoF.get(0) + "\n" );
//empty the arrayList
genes_without_LoF.clear();
}
//rename the last CSV file downloaded with the gene_name
pullData_exac.reName_lastCSV(geneNames[i]);
}//end for i<geneNames.length loop;
out_writer.close();
System.out.println("\nFor the first 3000 genes. \n"
+ "There are: " + genes_without_LoF.size() + " genes do not have LoF variants.");
//printout genes without LoF variants
//check the txt document: C:/Users/Jeff/Downloads/0to5000_genes_without_Lof.txt.
} //end main()
/**************
*
* @param string
*/
private void reName_lastCSV(String geneName) {
// TODO Auto-generated method stub
File folder = new File("C:/Users/Jeff/Downloads");
File[] CSV_list = folder.listFiles();
//1st, sort all files
Arrays.sort(CSV_list, new Comparator<File>(){
public int compare(File f1, File f2){
return Long.valueOf(f1.lastModified()).compareTo(f2.lastModified() );
}
});
//2nd reName the last one:
int last = CSV_list.length;
File new_file = new File("C:/Users/Jeff/Downloads/" + CSV_list[last-1].getName() );
new_file.renameTo( new File("C:/Users/Jeff/Downloads/" + geneName +"_LoF.CSV") );
}
/*****************
* pass by a value of string, geneName;
* visit ExAC website, input the geneName into searchBox,
* check LoF button
* Export LoF variants CSV document to local harddrive;
*
* @param geneName
* @param genes_LoF
* @throws InterruptedException
*/
private void run(String geneName, ArrayList<String> genes_LoF) throws InterruptedException {
// TODO Auto-generated method stub
//add geneName into ArrayList, removed if the LoF variants CSV document is downloaded
genes_LoF.add(geneName);
//the chromedriver.exe has already been placed in the PATH folder
//System.setProperty("webdriver.chrome.driver", "C:/Users/Downloads/chromedriver.exe");
WebDriver driver = new ChromeDriver();
driver.get("http://exac.broadinstitute.org/");
//Thread.sleep(1000); // Let the user actually see something!
//Also, this step is very import to make sure the code will export CSV document instead of TMP docs.
//get the query box by ID: home-searchbox-input
WebElement searchBox = driver.findElement(By.id("home-searchbox-input"));
//submit the query
searchBox.sendKeys( geneName );
searchBox.submit();
boolean variants_displayed = isButtenPresent(driver, By.id( "consequence_lof_variant_button" ));
//in the new page, check the LoF function button
if( variants_displayed ) {
WebElement lof_button = driver.findElement(By.id("consequence_lof_variant_button"));
//some genes do not have any variants; so in those cases, quit the explor directly;
//Thread.sleep(1000); // Let the user actually see something!
//Also, this step is very import to make sure the code will export CSV document instead of TMP docs.
lof_button.click();
//check if export_to_csv button is visiable;
boolean lof_displayed = isButtenPresent(driver, By.id( "export_to_csv" ));
if( lof_displayed ){
//trigger the Export table to CSV button
WebElement ExportCSV_button = driver.findElement(By.id("export_to_csv"));
//System.out.println(ExportCSV_button + " ? ");
//some genes may not have any LoF variants, in that case, we have to check if the button is displayed or not;
if(ExportCSV_button.isDisplayed()){
ExportCSV_button.click();
Thread.sleep(2000);
//remove the last gene name from ArrayList;
genes_LoF.remove(genes_LoF.size()-1);
System.out.println( " downloaded.");
//System.out.println(" The gene " + geneName + " does not have any LoF variants.");
} else {
System.out.println("gene " + geneName + " does not have LoF variants." );
}
} else {
System.out.println("gene " + geneName + " does not have LoF variants." );
}//end inner if export_to_csv size != 0;
} else {
System.out.println("gene " + geneName + " does not have variants.");
}//end outter in LoF button size != 0;
driver.quit();
} //end run() method;
/************
* Check if a web element is displayed
*
* @param driver
* @param id
* @return
*/
private boolean isButtenPresent(WebDriver driver, By id) {
// TODO Auto-generated method stub
try {
driver.findElement(id);
return true;
}
catch (org.openqa.selenium.NoSuchElementException e){
return false;
}
}
}
|
package com.cinnober.msgcodec.util;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
/**
* Reusable output stream where temporary encoded data can be stored.
* The data is stored in pooled byte arrays.
*
* @author mikael.brannstrom
*
*/
public class TempOutputStream extends OutputStream {
private final Pool<byte[]> bufferPool;
/** All buffers currently in use. */
private final ArrayList<byte[]> buffers = new ArrayList<>();
/** The current (last) buffer. */
private byte[] currentBuffer;
/** The next byte to write in currentBuffer. */
private int currentPosition;
/** The number of bytes before currentBuffer. */
private int currentOffset;
public TempOutputStream(Pool<byte[]> bufferPool) {
this.bufferPool = bufferPool;
}
/**
* Returns the number of bytes written to this buffer.
* @return the number of bytes written to this buffer.
*/
public int position() {
return currentPosition + currentOffset;
}
/**
* Discard any data after the specified position.
* @param position the new position after this operation.
*/
public void truncate(int position) {
if (position > position()) {
throw new IllegalArgumentException("Position must be <= current position");
}
if (position < 0) {
throw new IllegalArgumentException("Position must be >= 0");
}
while (position <= currentOffset) {
byte[] buf = buffers.remove(buffers.size()-1);
currentOffset -= buf.length;
bufferPool.release(buf);
}
currentBuffer = buffers.isEmpty() ? null : buffers.get(buffers.size()-1);
currentPosition = position - currentOffset;
}
/**
* Reset this stream.
* The number of bytes written is restored to zero, any internal buffers are released.
*/
public void reset() {
for (byte[] buf : buffers) {
bufferPool.release(buf);
}
buffers.clear();
currentBuffer = null;
currentPosition = 0;
currentOffset = 0;
}
/**
* Close this stream and release any buffers to the pool.
*/
@Override
public void close() {
reset();
}
/**
* Allocate a buffer
*/
protected void allocate() {
currentOffset += currentPosition;
currentPosition = 0;
currentBuffer = bufferPool.get();
buffers.add(currentBuffer);
}
@Override
public void write(int b) {
if (currentBuffer == null || currentPosition == currentBuffer.length) {
allocate();
}
currentBuffer[currentPosition++] = (byte)b;
}
@Override
public void write(byte[] buf) {
write(buf, 0, buf.length);
}
@Override
public void write(byte[] buf, int offset, int length) {
if (currentBuffer == null) {
allocate();
}
while (length > 0) {
int available = currentBuffer.length - currentPosition;
if (length > available) {
System.arraycopy(buf, offset, currentBuffer, currentPosition, available);
currentPosition += available;
offset += available;
length -= available;
allocate();
} else {
System.arraycopy(buf, offset, currentBuffer, currentPosition, length);
currentPosition += length;
break;
}
}
}
/**
* Copy bytes from this buffer to the specified output stream.
*
* @param out the stream to write to.
* @param start the position of the first byte to write
* @param end the position after the last byte to write
* @throws IOException the output stream throws an exception.
*/
public void copyTo(OutputStream out, int start, int end) throws IOException {
if (start == end) {
return;
}
if (start > end) {
throw new IllegalArgumentException("start must be <= end");
}
if (end > position()) {
throw new IllegalArgumentException("end must be <= position()");
}
int bufferStart = 0;
int bufferEnd = 0;
for (byte[] buffer : buffers) {
bufferEnd += buffer.length;
if (start < bufferEnd) {
if (end > bufferEnd) {
out.write(buffer, start - bufferStart, bufferEnd - start);
start += bufferEnd - start;
} else {
out.write(buffer, start - bufferStart, end - start);
return;
}
}
bufferStart = bufferEnd;
}
throw new Error("Internal error. start=" + start + ", end=" + end +
", bufferStart=" + bufferStart + ", bufferEnd=" + bufferEnd);
}
/**
* Copy bytes from this buffer to the specified byte buffer.
*
* @param out the byte buffer to write to.
* @param start the position of the first byte to write
* @param end the position after the last byte to write
* @throws BufferOverflowException the byte buffer gets overfull.
*/
public void copyTo(ByteBuffer out, int start, int end) throws BufferOverflowException {
if (start == end) {
return;
}
if (start > end) {
throw new IllegalArgumentException("start must be <= end");
}
if (end > position()) {
throw new IllegalArgumentException("end must be <= position()");
}
int bufferStart = 0;
int bufferEnd = 0;
for (byte[] buffer : buffers) {
bufferEnd += buffer.length;
if (start < bufferEnd) {
if (end > bufferEnd) {
out.put(buffer, start - bufferStart, bufferEnd - start);
start += bufferEnd - start;
} else {
out.put(buffer, start - bufferStart, end - start);
return;
}
}
bufferStart = bufferEnd;
}
throw new Error("Internal error. start=" + start + ", end=" + end +
", bufferStart=" + bufferStart + ", bufferEnd=" + bufferEnd);
}
/**
* Copy bytes from this buffer to the specified byte array.
*
* @param out the byte array to write to.
* @param start the position of the first byte to write
* @param end the position after the last byte to write
*/
public void copyTo(byte[] out, int start, int end) {
copyTo(out, start, end, 0);
}
/**
* Copy bytes from this buffer to the specified byte array.
*
* @param out the byte array to write to.
* @param start the position of the first byte to write
* @param end the position after the last byte to write
* @param offset offset in the array
*/
public void copyTo(byte[] out, int start, int end, int offset) {
if (start == end) {
return;
}
if (start > end) {
throw new IllegalArgumentException("start must be <= end");
}
if (end > position()) {
throw new IllegalArgumentException("end must be <= position()");
}
int bufferStart = 0;
int bufferEnd = 0;
for (byte[] buffer : buffers) {
bufferEnd += buffer.length;
if (start < bufferEnd) {
if (end > bufferEnd) {
final int srcOffset = start - bufferStart;
final int length = bufferEnd - start;
System.arraycopy(buffer, srcOffset, out, offset, length);
offset += length;
start += bufferEnd - start;
} else {
final int srcOffset = start - bufferStart;
final int length = end - start;
System.arraycopy(buffer, srcOffset, out, offset, length);
return;
}
}
bufferStart = bufferEnd;
}
throw new Error("Internal error. start=" + start + ", end=" + end +
", bufferStart=" + bufferStart + ", bufferEnd=" + bufferEnd);
}
}
|
package oida.ontology.owl.manager;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat;
import org.semanticweb.owlapi.model.AddImport;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
import org.semanticweb.owlapi.model.OWLAnnotationProperty;
import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom;
import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom;
import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLImportsDeclaration;
import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom;
import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom;
import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyChange;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyStorageException;
import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom;
import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom;
import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom;
import org.semanticweb.owlapi.model.parameters.Imports;
import org.semanticweb.owlapi.util.OWLEntityRenamer;
import org.semanticweb.owlapi.util.SimpleIRIMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oida.ontology.Ontology;
import oida.ontology.OntologyAnnotation;
import oida.ontology.OntologyAnnotationProperty;
import oida.ontology.OntologyClass;
import oida.ontology.OntologyClassEquivalence;
import oida.ontology.OntologyEntity;
import oida.ontology.OntologyIndividual;
import oida.ontology.OntologyNamespace;
import oida.ontology.OntologyObjectProperty;
import oida.ontology.OntologyObjectPropertyAssertion;
import oida.ontology.OntologyObjectPropertyEquivalence;
import oida.ontology.OntologySubClassAssignment;
import oida.ontology.manager.AbstractOntologyManager;
import oida.ontology.manager.OntologyManagerException;
import oida.ontology.manager.util.OntologyManagerUtils;
import oida.ontology.owl.manager.util.OwlOntologyManagerMapHandler;
import oida.ontology.owl.manager.util.UnicornMapHandler;
import oida.ontologyMgr.LocalOntologyMetaInfo;
import oida.ontologyMgr.OntologyMetaInfo;
import oida.util.OIDAUtil;
import oida.util.constants.StringConstants;
/**
*
* @author Michael.Shamiyeh
* @since 2016-11-23
*
*/
public class OwlOntologyManager extends AbstractOntologyManager {
private static Logger LOGGER = LoggerFactory.getLogger(OwlOntologyManager.class);
private UnicornMapHandler mapHandler;
private OWLOntologyManager owlOntologyManager;
private OWLOntology owlOntology;
private OWLDataFactory owlDataFactory;
private OWLXMLDocumentFormat owlPrefixManager;
// private OWLReasoner reasoner;
/**
* Standard Constructor.
*/
OwlOntologyManager() {
initializeOntologyManager();
}
@Override
public void initializeOntologyManager() {
owlPrefixManager = new OWLXMLDocumentFormat();
owlOntologyManager = OWLManager.createOWLOntologyManager();
owlDataFactory = owlOntologyManager.getOWLDataFactory();
mapHandler = new UnicornMapHandler();
}
@Override
public void initializeReasoner() {
// reasoner =
// PelletReasonerFactory.getInstance().createReasoner(owlOntology);
// ((PelletReasoner)reasoner).getKB().realize();
// ((PelletReasoner)reasoner).getKB().printClassTree();
}
@Override
public Ontology createLocalOntology(LocalOntologyMetaInfo metaInfo) throws OntologyManagerException {
if (metaInfo.getIri() == null || metaInfo.getIri().contentEquals(StringConstants.EMPTY))
return null;
try {
owlOntology = owlOntologyManager.createOntology(IRI.create(metaInfo.getIri()));
owlPrefixManager.setDefaultPrefix(metaInfo.getIri());
owlOntologyManager.setOntologyFormat(owlOntology, owlPrefixManager);
initializeInternalOntology(metaInfo);
LOGGER.info("Ontology created: '" + metaInfo.getIri() + "'");
return getOntology();
} catch (OWLOntologyCreationException e) {
throw new OntologyManagerException("Error while creating ontology '" + metaInfo.getIri() + "': " + e.getMessage(), e);
}
}
@Override
public Ontology loadOntology(OntologyMetaInfo metaInfo) throws OntologyManagerException {
return loadOntologyInternal(metaInfo, metaInfo.getIri());
}
@Override
public Ontology loadLocalOntology(LocalOntologyMetaInfo metaInfo) throws OntologyManagerException {
Optional<File> optFile = OIDAUtil.getOntologyFileObject(metaInfo, false);
if (!optFile.isPresent() || !optFile.get().exists())
throw new OntologyManagerException("Error while loading ontology: File doesn't exist.");
else
return loadOntologyInternal(metaInfo, OIDAUtil.convertPathToIRI(metaInfo.getLocalPath()));
}
private Ontology loadOntologyInternal(OntologyMetaInfo metaInfo, String iri) throws OntologyManagerException {
if (iri == null || iri.contentEquals(StringConstants.EMPTY))
return null;
try {
LOGGER.info("Loading Ontology: '" + iri + "'...");
updateIRIMappings();
owlOntology = owlOntologyManager.loadOntology(IRI.create(iri));
owlPrefixManager.setDefaultPrefix(owlOntology.getOntologyID().getOntologyIRI().get().getIRIString());
owlOntologyManager.setOntologyFormat(owlOntology, owlPrefixManager);
initializeInternalOntology(metaInfo);
refreshOntologyRepresentation(true);
LOGGER.info("Ontology loaded: '" + iri + "'.");
return getOntology();
} catch (OWLOntologyCreationException e) {
throw new OntologyManagerException("Error while loading ontology from file '" + iri + "': " + e.getMessage(), e);
}
}
private void updateIRIMappings() {
owlOntologyManager.getIRIMappers().clear();
for (Entry<String, LocalOntologyMetaInfo> entry : getGlobalOntologyContext().getGlobalIRIToLocalIRIMappings().entrySet())
owlOntologyManager.getIRIMappers().add(new SimpleIRIMapper(IRI.create(entry.getKey()), IRI.create(OIDAUtil.getFileIriString(entry.getValue()))));
}
@Override
public void refreshOntologyRepresentation(boolean buildLocalRepresentation) {
refreshOntologyRepresentationInternal(owlOntology, buildLocalRepresentation);
}
public void refreshOntologyRepresentationInternal(OWLOntology owlOntology, boolean buildLocalRepresentation) {
extractImports(owlOntology, getOntology());
// Classes:
Stream<OWLClass> allClasses = owlOntology.classesInSignature(Imports.INCLUDED);
List<OWLSubClassOfAxiom> allSubClassOfAxioms = owlOntology.axioms(AxiomType.SUBCLASS_OF, Imports.INCLUDED).collect(Collectors.toList());
List<OWLEquivalentClassesAxiom> allEquivalentClassAxioms = owlOntology.axioms(AxiomType.EQUIVALENT_CLASSES, Imports.INCLUDED).collect(Collectors.toList());
extractClassHierarchy(getOntology(), allClasses, allSubClassOfAxioms, allEquivalentClassAxioms, mapHandler.getMapHandler());
if (buildLocalRepresentation) {
Stream<OWLClass> allLocalClasses = owlOntology.classesInSignature(Imports.INCLUDED).filter(cl -> cl.getIRI().getNamespace().equals(getDefaultNamespace() + StringConstants.HASHTAG));
List<OWLSubClassOfAxiom> allLocalSubClassOfAxioms = owlOntology.axioms(AxiomType.SUBCLASS_OF, Imports.EXCLUDED).collect(Collectors.toList());
List<OWLEquivalentClassesAxiom> allLocalEquivalentClassAxioms = owlOntology.axioms(AxiomType.EQUIVALENT_CLASSES, Imports.EXCLUDED).collect(Collectors.toList());
extractClassHierarchy(getOntology().getLocalOntology(), allLocalClasses, allLocalSubClassOfAxioms, allLocalEquivalentClassAxioms, mapHandler.getMapHandlerLocal());
}
// Object Properties:
Stream<OWLObjectProperty> allObjectProperties = owlOntology.objectPropertiesInSignature(Imports.INCLUDED);
List<OWLSubObjectPropertyOfAxiom> allSubObjectPropertyOfAxioms = owlOntology.axioms(AxiomType.SUB_OBJECT_PROPERTY, Imports.INCLUDED).collect(Collectors.toList());
List<OWLEquivalentObjectPropertiesAxiom> allEquivalentObjectPropertiesAxioms = owlOntology.axioms(AxiomType.EQUIVALENT_OBJECT_PROPERTIES, Imports.INCLUDED).collect(Collectors.toList());
extractObjectPropertyHierarchy(getOntology(), allObjectProperties, allSubObjectPropertyOfAxioms, allEquivalentObjectPropertiesAxioms, mapHandler.getMapHandler());
if (buildLocalRepresentation) {
Stream<OWLObjectProperty> allLocalObjectProperties = owlOntology.objectPropertiesInSignature(Imports.INCLUDED)
.filter(cl -> cl.getIRI().getNamespace().equals(getDefaultNamespace() + StringConstants.HASHTAG));
List<OWLSubObjectPropertyOfAxiom> allLocalSubObjectPropertiesOfAxioms = owlOntology.axioms(AxiomType.SUB_OBJECT_PROPERTY, Imports.EXCLUDED).collect(Collectors.toList());
List<OWLEquivalentObjectPropertiesAxiom> allLocalEquivalentClassAxioms = owlOntology.axioms(AxiomType.EQUIVALENT_OBJECT_PROPERTIES, Imports.EXCLUDED).collect(Collectors.toList());
extractObjectPropertyHierarchy(getOntology().getLocalOntology(), allLocalObjectProperties, allLocalSubObjectPropertiesOfAxioms, allLocalEquivalentClassAxioms,
mapHandler.getMapHandlerLocal());
}
// Individuals:
List<OWLNamedIndividual> allIndividuals = owlOntology.individualsInSignature(Imports.INCLUDED).collect(Collectors.toList());
List<OWLClassAssertionAxiom> allClassAssertionAxioms = owlOntology.axioms(AxiomType.CLASS_ASSERTION, Imports.INCLUDED).collect(Collectors.toList());
extractIndividuals(getOntology(), allIndividuals, allClassAssertionAxioms, mapHandler.getMapHandler());
if (buildLocalRepresentation) {
List<OWLNamedIndividual> allLocalIndividuals = owlOntology.individualsInSignature(Imports.INCLUDED)
.filter(i -> i.getIRI().getNamespace().equals(getDefaultNamespace() + StringConstants.HASHTAG)).collect(Collectors.toList());
List<OWLClassAssertionAxiom> allLocalClassAssertionAxioms = owlOntology.axioms(AxiomType.CLASS_ASSERTION, Imports.EXCLUDED).collect(Collectors.toList());
extractIndividuals(getOntology().getLocalOntology(), allLocalIndividuals, allLocalClassAssertionAxioms, mapHandler.getMapHandlerLocal());
}
// Annotation Properties:
List<OWLAnnotationProperty> allAnnotationProperties = owlOntology.annotationPropertiesInSignature(Imports.INCLUDED).collect(Collectors.toList());
extractAnnotationProperties(getOntology(), allAnnotationProperties, mapHandler.getMapHandler());
extractAnnotationProperties(getOntology().getLocalOntology(), allAnnotationProperties, mapHandler.getMapHandlerLocal());
}
private void initializeInternalOntology(OntologyMetaInfo metaInfo) {
setOntology(OntologyManagerUtils.generateInternalOntologyObject(metaInfo.getIri()), metaInfo);
mapHandler.getMapHandler().initializeOntology(owlDataFactory, owlOntology, getOntology());
mapHandler.getMapHandlerLocal().initializeOntology(owlDataFactory, owlOntology, getOntology().getLocalOntology());
for (String prefixName : owlPrefixManager.getPrefixName2PrefixMap().keySet()) {
OntologyNamespace namespace = OntologyManagerUtils.generateInternalNamespaceObject(getOntology(), prefixName, owlPrefixManager.getPrefixName2PrefixMap().get(prefixName));
getOntology().getLocalOntology().getNamespaces().add(namespace);
}
}
private void extractImports(OWLOntology owlOntology, Ontology ontology) {
for (OWLOntology importedOwlOntology : owlOntology.directImports().collect(Collectors.toList()))
ontology.getImports().add(OntologyManagerUtils.generateInternalOntologyObject(importedOwlOntology.getOntologyID().getOntologyIRI().get().getIRIString()));
ontology.getLocalOntology().getImports().addAll(ontology.getImports());
}
private void extractClassHierarchy(Ontology ontology, Stream<OWLClass> classes, List<OWLSubClassOfAxiom> owlSubClassOfAxioms, List<OWLEquivalentClassesAxiom> owlClassEquivalences,
OwlOntologyManagerMapHandler mapHandler) {
List<OWLClass> owlClasses = classes.collect(Collectors.toList());
// create internal class objects for all owl classes. add the objects to
// the map handler and the ontology class collection:
for (OWLClass owlClass : owlClasses) {
if (!owlClass.getIRI().getIRIString().equals(mapHandler.getOwlThingClass().getIRI().getIRIString())) {
OntologyClass internalClass = OntologyManagerUtils.generateInternalClassObject(ontology, null, owlClass.getIRI().getIRIString());
mapHandler.toMap(owlClass, internalClass);
ontology.getClasses().add(internalClass);
}
}
// establish the class hierarchy based on the passed sub class axioms:
for (OntologyClass internalClass : ontology.getClasses()) {
OWLClass owlClass = mapHandler.getOWLClass(internalClass).get();
if (!owlClass.getIRI().getIRIString().equals(mapHandler.getOwlThingClass().getIRI().getIRIString())) {
boolean addedAsSubClass = false;
for (OWLSubClassOfAxiom owlAxiom : owlSubClassOfAxioms) {
if (owlAxiom.getSubClass().isOWLClass() && owlAxiom.getSubClass().equals(owlClass) && owlAxiom.getSuperClass().isOWLClass()) {
OWLClass owlSuperClass = owlAxiom.getSuperClass().asOWLClass();
Optional<OntologyClass> optInternalSuperClass = mapHandler.getInternalClass(owlSuperClass);
if (optInternalSuperClass.isPresent()) {
OntologyClass internalSuperClass = optInternalSuperClass.get();
internalSuperClass.getSubClasses().add(internalClass);
internalClass.getSuperClasses().add(internalSuperClass);
addedAsSubClass = true;
}
}
}
// if no super class is defined, the Thing class is set as super
// class:
if (!addedAsSubClass) {
mapHandler.getThingClass().getSubClasses().add(internalClass);
internalClass.getSuperClasses().add(mapHandler.getThingClass());
}
}
// establish class equivalence relationships:
for (OWLEquivalentClassesAxiom owlEquivalence : owlClassEquivalences) {
if (owlEquivalence.contains(owlClass)) {
List<OWLClass> equivalentClasses = owlEquivalence.namedClasses().filter(cl -> !cl.equals(owlClass)).collect(Collectors.toList());
for (OWLClass equivalentOwlCLass : equivalentClasses) {
Optional<OntologyClass> optInternalEquivalentClass = mapHandler.getInternalClass(equivalentOwlCLass);
if (!optInternalEquivalentClass.isPresent())
optInternalEquivalentClass = this.mapHandler.getMapHandler().getInternalClass(equivalentOwlCLass);
if (optInternalEquivalentClass.isPresent())
OntologyManagerUtils.assignClassesEquivalent(ontology, internalClass, optInternalEquivalentClass.get());
}
}
}
}
}
private void extractObjectPropertyHierarchy(Ontology ontology, Stream<OWLObjectProperty> objectProperties, List<OWLSubObjectPropertyOfAxiom> owlSubObjectPropertyOfAxioms,
List<OWLEquivalentObjectPropertiesAxiom> owlObjectPropertiesEquivalences, OwlOntologyManagerMapHandler mapHandler) {
List<OWLObjectProperty> owlObjectProperties = objectProperties.collect(Collectors.toList());
// create internal object property objects for all owl object
// properties. add the objects to
// the map handler and the ontology object property collection:
for (OWLObjectProperty owlObjectProperty : owlObjectProperties) {
if (!owlObjectProperty.getIRI().getIRIString().equals(mapHandler.getOwlTopObjectProperty().getIRI().getIRIString())) {
OntologyObjectProperty internalObjectProperty = OntologyManagerUtils.generateInternalObjectPropertyObject(ontology, null, owlObjectProperty.getIRI().getIRIString());
mapHandler.toMap(owlObjectProperty, internalObjectProperty);
ontology.getObjectProperties().add(internalObjectProperty);
}
}
// establish the class hierarchy based on the passed sub class axioms:
for (OntologyObjectProperty internalObjectProperty : ontology.getObjectProperties()) {
Optional<OWLObjectProperty> owlObjectProperty = mapHandler.getOWLObjectProperty(internalObjectProperty);
if (!owlObjectProperty.isPresent())
System.out.println(internalObjectProperty.getIri());
if (!owlObjectProperty.get().getIRI().getIRIString().equals(mapHandler.getOwlTopObjectProperty().getIRI().getIRIString())) {
boolean addedAsSubObjectProperty = false;
for (OWLSubObjectPropertyOfAxiom owlAxiom : owlSubObjectPropertyOfAxioms) {
if (owlAxiom.getSubProperty().isOWLObjectProperty() && owlAxiom.getSubProperty().equals(owlObjectProperty) && owlAxiom.getSuperProperty().isOWLObjectProperty()) {
OWLObjectProperty owlSuperObjectProperty = owlAxiom.getSubProperty().asOWLObjectProperty();
Optional<OntologyObjectProperty> optInternalSuperObjectProperty = mapHandler.getInternalObjectProperty(owlSuperObjectProperty);
if (optInternalSuperObjectProperty.isPresent()) {
OntologyObjectProperty internalSuperObjectProperty = optInternalSuperObjectProperty.get();
internalSuperObjectProperty.getSubObjectProperties().add(internalObjectProperty);
internalObjectProperty.getSuperProperties().add(internalSuperObjectProperty);
addedAsSubObjectProperty = true;
}
}
}
// if no super object property is defined, the TopObjectProperty
// is set as super
// property:
if (!addedAsSubObjectProperty) {
mapHandler.getTopObjectProperty().getSubObjectProperties().add(internalObjectProperty);
internalObjectProperty.getSuperProperties().add(mapHandler.getTopObjectProperty());
}
}
// establish class equivalence relationships:
for (OWLEquivalentObjectPropertiesAxiom owlEquivalence : owlObjectPropertiesEquivalences) {
if (owlEquivalence.containsEntityInSignature(owlObjectProperty.get())) {
List<OWLObjectProperty> equivalentObjectProperties = owlEquivalence.objectPropertiesInSignature().filter(op -> !op.equals(owlObjectProperty)).collect(Collectors.toList());
for (OWLObjectProperty equivalentOwlObjectProperty : equivalentObjectProperties) {
Optional<OntologyObjectProperty> optInternalEquivalentObjectProperty = mapHandler.getInternalObjectProperty(equivalentOwlObjectProperty);
if (!optInternalEquivalentObjectProperty.isPresent())
optInternalEquivalentObjectProperty = this.mapHandler.getMapHandler().getInternalObjectProperty(equivalentOwlObjectProperty);
if (optInternalEquivalentObjectProperty.isPresent())
OntologyManagerUtils.assignObjectPropertiesEquivalent(ontology, internalObjectProperty, optInternalEquivalentObjectProperty.get());
}
}
}
}
}
private void extractIndividuals(Ontology ontology, List<OWLNamedIndividual> allindividuals, List<OWLClassAssertionAxiom> allClassAssertionAxioms, OwlOntologyManagerMapHandler mapHandler) {
for (OWLNamedIndividual owlIndividual : allindividuals) {
OntologyIndividual individual = OntologyManagerUtils.generateInternalIndividualObject(ontology, owlIndividual.getIRI().getIRIString());
mapHandler.toMap(owlIndividual, individual);
for (OWLClassAssertionAxiom a : allClassAssertionAxioms) {
if (a.getIndividual().equals(owlIndividual) && a.getClassExpression().isOWLClass()) {
Optional<OntologyClass> optC = mapHandler.getInternalClass(a.getClassExpression().asOWLClass(), ontology);
if (optC.isPresent()) {
optC.get().getIndividuals().add(individual);
individual.getTypes().add(optC.get());
}
}
}
}
}
private void extractAnnotationProperties(Ontology ontology, List<OWLAnnotationProperty> allAnnotationProperties, OwlOntologyManagerMapHandler mapHandler) {
for (OWLAnnotationProperty owlAnnotationProperty : allAnnotationProperties) {
mapHandler.toMap(owlAnnotationProperty, OntologyManagerUtils.generateInternalAnnotationPropertyObject(ontology, owlAnnotationProperty.getIRI().getIRIString()));
}
}
@Override
public void saveLocalOntology() throws OntologyManagerException {
OntologyMetaInfo optOntologyMetaInfo = getOntology().getMetaInfo();
if (!(optOntologyMetaInfo instanceof LocalOntologyMetaInfo))
throw new OntologyManagerException("Error while saving ontology: Ontology is no local ontology.");
LocalOntologyMetaInfo metaInfo = (LocalOntologyMetaInfo)optOntologyMetaInfo;
Optional<File> optFile = OIDAUtil.getOntologyFileObject(metaInfo, true);
if (optFile.isPresent()) {
try {
FileOutputStream outputStream = new FileOutputStream(optFile.get());
owlOntologyManager.saveOntology(owlOntology, owlPrefixManager, outputStream);
LOGGER.info("Ontology saved: '" + getOntology().getIri() + "'");
} catch (FileNotFoundException e) {
throw new OntologyManagerException("Error while saving ontology to file '" + metaInfo.getLocalPath() + "': " + e.getMessage(), e);
} catch (OWLOntologyStorageException e) {
throw new OntologyManagerException("Error while saving ontology to file '" + metaInfo.getLocalPath() + "': " + e.getMessage(), e);
}
}
}
@Override
public String getDefaultNamespace() {
return owlPrefixManager.getDefaultPrefix().replace(StringConstants.HASHTAG, StringConstants.EMPTY);
}
@Override
public void addImportDeclaration(String iri) throws OntologyManagerException {
// Check, if the import is already existing:
for (Ontology o : getOntology().getImports())
if (o.getIri().equals(iri))
return;
updateIRIMappings();
IRI importOntologyIRI = IRI.create(iri);
OWLImportsDeclaration owlImportDeclaration = owlDataFactory.getOWLImportsDeclaration(importOntologyIRI);
owlOntologyManager.applyChange(new AddImport(owlOntology, owlImportDeclaration));
Ontology internalImportedOntology = OntologyManagerUtils.generateInternalOntologyObject(iri);
getOntology().getImports().add(internalImportedOntology);
getOntology().getLocalOntology().getImports().add(internalImportedOntology);
try {
OWLOntology owlImportedOntology = owlOntologyManager.loadOntology(importOntologyIRI);
refreshOntologyRepresentationInternal(owlImportedOntology, false);
} catch (OWLOntologyCreationException e1) {
e1.printStackTrace();
}
}
@Override
public OntologyClass createClass(final String name, final String namespace) {
Optional<OntologyClass> optInternalClass = getClass(name, namespace);
if (optInternalClass.isPresent()) {
return optInternalClass.get();
}
OWLClass owlNewClass = owlDataFactory.getOWLClass(OntologyManagerUtils.buildFullIRIString(name, namespace));
OWLAxiom owlNewClassDeclarationAxiom = owlDataFactory.getOWLDeclarationAxiom(owlNewClass);
owlOntologyManager.addAxiom(owlOntology, owlNewClassDeclarationAxiom);
OntologyClass internalClass = OntologyManagerUtils.generateInternalClassObject(getOntology(), mapHandler.getMapHandler().getThingClass(), name, namespace);
mapHandler.getMapHandler().toMap(owlNewClass, internalClass);
if (namespace.equals(getDefaultNamespace())) {
mapHandler.getMapHandlerLocal().toMap(owlNewClass,
OntologyManagerUtils.generateInternalClassObject(getOntology().getLocalOntology(), mapHandler.getMapHandlerLocal().getThingClass(), name, namespace));
}
return internalClass;
}
@Override
public OntologyIndividual createIndividual(final String name, final String namespace) {
Optional<OntologyIndividual> optInternalIndividual = getIndividual(name, namespace);
if (optInternalIndividual.isPresent()) {
return optInternalIndividual.get();
}
OWLNamedIndividual owlIndividual = owlDataFactory.getOWLNamedIndividual(OntologyManagerUtils.buildFullIRIString(name, namespace));
OWLAxiom declareNewIndividual = owlDataFactory.getOWLDeclarationAxiom(owlIndividual);
owlOntologyManager.addAxiom(owlOntology, declareNewIndividual);
OntologyIndividual internalIndividual = OntologyManagerUtils.generateInternalIndividualObject(getOntology(), name, namespace);
mapHandler.getMapHandler().toMap(owlIndividual, internalIndividual);
if (namespace.equals(getDefaultNamespace())) {
mapHandler.getMapHandlerLocal().toMap(owlIndividual, OntologyManagerUtils.generateInternalIndividualObject(getOntology().getLocalOntology(), name, namespace));
}
return internalIndividual;
}
@Override
public OntologyObjectProperty createObjectProperty(final String name, final String namespace) {
Optional<OntologyObjectProperty> optInternalObjectProperty = getObjectProperty(name, namespace);
if (optInternalObjectProperty.isPresent()) {
return optInternalObjectProperty.get();
}
OWLObjectProperty owlProperty = owlDataFactory.getOWLObjectProperty(OntologyManagerUtils.buildFullIRIString(name, namespace));
OWLDeclarationAxiom owlAxiom = owlDataFactory.getOWLDeclarationAxiom(owlProperty);
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyObjectProperty internalObjectProperty = OntologyManagerUtils.generateInternalObjectPropertyObject(getOntology(), mapHandler.getMapHandler().getTopObjectProperty(), name, namespace);
mapHandler.getMapHandler().toMap(owlProperty, internalObjectProperty);
if (namespace.equals(getDefaultNamespace())) {
mapHandler.getMapHandlerLocal().toMap(owlProperty,
OntologyManagerUtils.generateInternalObjectPropertyObject(getOntology().getLocalOntology(), mapHandler.getMapHandler().getTopObjectProperty(), name, namespace));
}
return internalObjectProperty;
}
@Override
public void assignSubClassToSuperClass(OntologyClass subClass, OntologyClass superClass) {
Optional<OWLClass> optSubOwlClass = mapHandler.getMapHandler().getOWLClass(subClass);
Optional<OWLClass> optSuperOwlClass = mapHandler.getMapHandler().getOWLClass(superClass);
if (optSubOwlClass.isPresent() && optSuperOwlClass.isPresent()) {
OWLSubClassOfAxiom owlAxiom = owlDataFactory.getOWLSubClassOfAxiom(optSubOwlClass.get(), optSuperOwlClass.get());
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologySubClassAssignment subClassAssignment = OntologyManagerUtils.assignSubClassToSuperClass(subClass, superClass);
mapHandler.getMapHandler().toMap(owlAxiom, subClassAssignment);
Optional<OntologyClass> twinSubClass = mapHandler.getTwinClass(subClass);
Optional<OntologyClass> twinSuperClass = mapHandler.getTwinClass(superClass);
if (twinSubClass.isPresent() && twinSuperClass.isPresent()) {
OntologySubClassAssignment twinSubClassAssignment = OntologyManagerUtils.assignSubClassToSuperClass(twinSubClass.get(), twinSuperClass.get());
mapHandler.getMapHandlerLocal().toMap(owlAxiom, twinSubClassAssignment);
}
}
}
public void removeSubClassAssignment(OntologyClass subClass, OntologyClass superClass) {
Optional<OntologySubClassAssignment> optSubClassOfAssignment = mapHandler.getMapHandler().getInternalSubClassAssignment(subClass, superClass);
if (optSubClassOfAssignment.isPresent()) {
Optional<OWLSubClassOfAxiom> optOwlSubClassOfAxiom = mapHandler.getMapHandler().getOWLSubClassOfAxiom(optSubClassOfAssignment.get());
if (optOwlSubClassOfAxiom.isPresent()) {
owlOntologyManager.removeAxiom(owlOntology, optOwlSubClassOfAxiom.get());
mapHandler.removeSubClassAssignment(optSubClassOfAssignment.get());
}
}
}
@Override
public void renameEntity(OntologyEntity entity, String newName) {
Collection<OWLOntology> c = new ArrayList<OWLOntology>();
c.add(owlOntology);
OWLEntityRenamer renamer = new OWLEntityRenamer(owlOntologyManager, c);
Optional<OWLEntity> optRenameEntity = mapHandler.getMapHandler().getOWLEntity(entity);
if (optRenameEntity.isPresent()) {
List<OWLOntologyChange> changes = renamer.changeIRI(optRenameEntity.get(), IRI.create(optRenameEntity.get().getIRI().toString().replace(entity.getName(), newName)));
owlOntologyManager.applyChanges(changes);
Optional<OWLEntity> optEntity = changes.get(1).signature().findFirst();
if (optEntity.isPresent())
mapHandler.getMapHandler().toMap(optEntity.get(), entity);
OntologyManagerUtils.changeOntologyEntityName(entity, newName);
Optional<OntologyEntity> twinEntity = mapHandler.getTwinEntity(entity);
if (twinEntity.isPresent())
OntologyManagerUtils.changeOntologyEntityName(twinEntity.get(), newName);
}
}
@Override
public void assignIndividualToClass(OntologyIndividual individual, OntologyClass clazz) {
Optional<OWLNamedIndividual> optOwlIndividual = mapHandler.getMapHandler().getOWLIndividual(individual);
Optional<OWLClass> optOwlClass = mapHandler.getMapHandler().getOWLClass(clazz);
if (optOwlIndividual.isPresent() && optOwlClass.isPresent()) {
owlOntologyManager.addAxiom(owlOntology, owlDataFactory.getOWLClassAssertionAxiom(optOwlClass.get(), optOwlIndividual.get()));
OntologyManagerUtils.assignIndividualToClass(individual, clazz);
Optional<OntologyIndividual> twinIndividual = mapHandler.getTwinIndividual(individual);
Optional<OntologyClass> twinClass = mapHandler.getTwinClass(clazz);
if (twinIndividual.isPresent() && twinClass.isPresent())
OntologyManagerUtils.assignIndividualToClass(twinIndividual.get(), twinClass.get());
}
}
@Override
public void assignSubObjectPropertyToSuperObjectProperty(OntologyObjectProperty subProperty, OntologyObjectProperty superProperty) {
Optional<OWLObjectProperty> optSubOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(subProperty);
Optional<OWLObjectProperty> optSuperOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(superProperty);
if (optSubOwlObjectProperty.isPresent() && optSuperOwlObjectProperty.isPresent()) {
OWLSubObjectPropertyOfAxiom owlSubObjectPropertyAxiom = owlDataFactory.getOWLSubObjectPropertyOfAxiom(optSubOwlObjectProperty.get(), optSuperOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlSubObjectPropertyAxiom);
OntologyManagerUtils.assignSubObjectPropertyToSuperObjectProperty(subProperty, superProperty);
Optional<OntologyObjectProperty> twinSubProperty = mapHandler.getTwinObjectProperty(subProperty);
Optional<OntologyObjectProperty> twinSuperProperty = mapHandler.getTwinObjectProperty(superProperty);
if (twinSubProperty.isPresent() && twinSuperProperty.isPresent())
OntologyManagerUtils.assignSubObjectPropertyToSuperObjectProperty(twinSubProperty.get(), twinSuperProperty.get());
}
}
@Override
public void assignInverseObjectProperty(OntologyObjectProperty property, OntologyObjectProperty inverseProperty) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
Optional<OWLObjectProperty> optOwlInverseObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(inverseProperty);
if (optOwlObjectProperty.isPresent() && optOwlInverseObjectProperty.isPresent()) {
OWLInverseObjectPropertiesAxiom owlInverseAxiom = owlDataFactory.getOWLInverseObjectPropertiesAxiom(optOwlObjectProperty.get(), optOwlInverseObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlInverseAxiom);
property.getInverseObjectProperties().add(inverseProperty);
inverseProperty.getInverseObjectProperties().add(property);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
Optional<OntologyObjectProperty> twinInverseProperty = mapHandler.getTwinObjectProperty(inverseProperty);
if (twinProperty.isPresent() && twinInverseProperty.isPresent()) {
twinProperty.get().getInverseObjectProperties().add(twinInverseProperty.get());
twinInverseProperty.get().getInverseObjectProperties().add(twinProperty.get());
}
}
}
@Override
public void makeObjectPropertyFunctional(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLFunctionalObjectPropertyAxiom owlFunctionalOP = owlDataFactory.getOWLFunctionalObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlFunctionalOP);
property.setFunctional(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setFunctional(true);
}
}
@Override
public void makeObjectPropertyInverseFunctional(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLInverseFunctionalObjectPropertyAxiom owlInverseFunctionalOP = owlDataFactory.getOWLInverseFunctionalObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlInverseFunctionalOP);
property.setInverseFunctional(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setInverseFunctional(true);
}
}
@Override
public void makeObjectPropertyTransitive(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLTransitiveObjectPropertyAxiom owlTransitiveOP = owlDataFactory.getOWLTransitiveObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlTransitiveOP);
property.setTransitive(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setTransitive(true);
}
}
@Override
public void makeObjectPropertySymmetric(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLSymmetricObjectPropertyAxiom owlSymmetricOP = owlDataFactory.getOWLSymmetricObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlSymmetricOP);
property.setSymmetric(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setSymmetric(true);
}
}
@Override
public void makeObjectPropertyAsymmetric(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLAsymmetricObjectPropertyAxiom owlAsymmetricOP = owlDataFactory.getOWLAsymmetricObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlAsymmetricOP);
property.setAsymmetric(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setAsymmetric(true);
}
}
@Override
public void makeObjectPropertyReflexive(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLReflexiveObjectPropertyAxiom owlReflexiveOP = owlDataFactory.getOWLReflexiveObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlReflexiveOP);
property.setReflexive(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setReflexive(true);
}
}
@Override
public void makeObjectPropertyIrreflexive(OntologyObjectProperty property) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
if (optOwlObjectProperty.isPresent()) {
OWLIrreflexiveObjectPropertyAxiom owlIrreflexiveOP = owlDataFactory.getOWLIrreflexiveObjectPropertyAxiom(optOwlObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlIrreflexiveOP);
property.setIrreflexive(true);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
if (twinProperty.isPresent())
twinProperty.get().setIrreflexive(true);
}
}
@Override
public void assignObjectPropertyRange(OntologyObjectProperty property, OntologyClass range) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
Optional<OWLClass> optOwlClass = mapHandler.getMapHandler().getOWLClass(range);
if (optOwlObjectProperty.isPresent() && optOwlClass.isPresent()) {
OWLObjectPropertyRangeAxiom owlAxiom = owlDataFactory.getOWLObjectPropertyRangeAxiom(optOwlObjectProperty.get(), optOwlClass.get());
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
property.setRange(range);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
Optional<OntologyClass> twinClass = mapHandler.getTwinClass(range);
if (twinProperty.isPresent() && twinClass.isPresent())
twinProperty.get().setRange(twinClass.get());
}
}
@Override
public void assignObjectPropertyDomain(OntologyObjectProperty property, OntologyClass domain) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
Optional<OWLClass> optOwlClass = mapHandler.getMapHandler().getOWLClass(domain);
if (optOwlObjectProperty.isPresent() && optOwlClass.isPresent()) {
OWLObjectPropertyDomainAxiom owlAxiom = owlDataFactory.getOWLObjectPropertyDomainAxiom(optOwlObjectProperty.get(), optOwlClass.get());
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
property.setDomain(domain);
Optional<OntologyObjectProperty> twinProperty = mapHandler.getTwinObjectProperty(property);
Optional<OntologyClass> twinClass = mapHandler.getTwinClass(domain);
if (twinProperty.isPresent() && twinClass.isPresent())
twinProperty.get().setDomain(twinClass.get());
}
}
@Override
public OntologyObjectPropertyAssertion createObjectPropertyAssertion(OntologyObjectProperty property, OntologyIndividual individual, OntologyIndividual object) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(property);
Optional<OWLNamedIndividual> optOwlIndividual = mapHandler.getMapHandler().getOWLIndividual(individual);
Optional<OWLNamedIndividual> optOwlObject = mapHandler.getMapHandler().getOWLIndividual(object);
if (optOwlObjectProperty.isPresent() && optOwlIndividual.isPresent() && optOwlObject.isPresent()) {
OWLObjectPropertyAssertionAxiom owlAssertionAxiom = owlDataFactory.getOWLObjectPropertyAssertionAxiom(optOwlObjectProperty.get(), optOwlIndividual.get(), optOwlObject.get());
owlOntologyManager.addAxiom(owlOntology, owlAssertionAxiom);
OntologyObjectPropertyAssertion assertion = OntologyManagerUtils.generateObjectPropertyAssertionObject(getOntology(), property, object);
individual.getObjectPropertyAssertions().add(assertion);
return assertion;
} else
return null;
}
@Override
public OntologyAnnotationProperty createAnnotationProperty(String propertyName, String namespace) {
OWLAnnotationProperty owlProperty = owlDataFactory.getOWLAnnotationProperty(OntologyManagerUtils.buildFullIRIString(propertyName, namespace));
OWLDeclarationAxiom owlAxiom = owlDataFactory.getOWLDeclarationAxiom(owlProperty);
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyAnnotationProperty prop = OntologyManagerUtils.generateInternalAnnotationPropertyObject(getOntology(), propertyName, namespace);
mapHandler.getMapHandler().toMap(owlProperty, prop);
if (namespace.equals(getDefaultNamespace())) {
mapHandler.getMapHandlerLocal().toMap(owlProperty, OntologyManagerUtils.generateInternalAnnotationPropertyObject(getOntology().getLocalOntology(), propertyName, namespace));
}
return prop;
}
@Override
public Optional<OntologyAnnotation> annotateClass(OntologyAnnotationProperty property, String annotationValue, OntologyClass clazz) {
Optional<OWLAnnotationProperty> optOwlAnnotationProperty = mapHandler.getMapHandler().getOWLAnnotationProperty(property);
Optional<OWLClass> optOwlClass = mapHandler.getMapHandler().getOWLClass(clazz);
if (optOwlAnnotationProperty.isPresent() && optOwlClass.isPresent()) {
OWLAnnotation owlAnnotation = owlDataFactory.getOWLAnnotation(optOwlAnnotationProperty.get(), owlDataFactory.getOWLLiteral(annotationValue));
OWLAnnotationAssertionAxiom owlAxiom = owlDataFactory.getOWLAnnotationAssertionAxiom(optOwlClass.get().getIRI(), owlAnnotation);
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyAnnotation annotation = OntologyManagerUtils.generateAnnotationObject(getOntology(), property, annotationValue);
clazz.getAnnotations().add(annotation);
mapHandler.getMapHandler().toMap(owlAnnotation, annotation);
// TODO: Copy Annotations to local ontology
return Optional.of(annotation);
}
return Optional.empty();
}
@Override
public Optional<OntologyAnnotation> annotateIndividual(OntologyIndividual individual, OntologyAnnotationProperty property, String annotationValue) {
Optional<OWLAnnotationProperty> optOwlAnnotationProperty = mapHandler.getMapHandler().getOWLAnnotationProperty(property);
Optional<OWLNamedIndividual> optOwlIndividual = mapHandler.getMapHandler().getOWLIndividual(individual);
if (optOwlAnnotationProperty.isPresent() && optOwlIndividual.isPresent()) {
OWLAnnotation owlAnnotation = owlDataFactory.getOWLAnnotation(optOwlAnnotationProperty.get(), owlDataFactory.getOWLLiteral(annotationValue));
OWLAnnotationAssertionAxiom owlAxiom = owlDataFactory.getOWLAnnotationAssertionAxiom(optOwlIndividual.get().getIRI(), owlAnnotation);
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyAnnotation annotation = OntologyManagerUtils.generateAnnotationObject(getOntology(), property, annotationValue);
individual.getAnnotations().add(annotation);
mapHandler.getMapHandler().toMap(owlAnnotation, annotation);
// TODO: Copy Annotations to local ontology
return Optional.of(annotation);
}
return Optional.empty();
}
@Override
public Optional<OntologyClassEquivalence> assignClassEquivalence(OntologyClass clazz, OntologyClass equivalentClazz) {
Optional<OWLClass> optOwlClass = mapHandler.getMapHandler().getOWLClass(clazz);
Optional<OWLClass> optOwlEquivalentClass = mapHandler.getMapHandler().getOWLClass(equivalentClazz);
if (optOwlClass.isPresent() && optOwlEquivalentClass.isPresent()) {
OWLEquivalentClassesAxiom owlAxiom = owlDataFactory.getOWLEquivalentClassesAxiom(optOwlClass.get(), optOwlEquivalentClass.get());
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyClassEquivalence equivalence = OntologyManagerUtils.assignClassesEquivalent(getOntology(), clazz, equivalentClazz);
mapHandler.getMapHandler().toMap(owlAxiom, equivalence);
// TODO: Copy Annotations to local ontology
return Optional.of(equivalence);
}
return Optional.empty();
}
@Override
public void removeClassEquivalence(OntologyClassEquivalence equivalence) {
Optional<OWLEquivalentClassesAxiom> optOwlClassesEquivalentAxiom = mapHandler.getMapHandler().getOWLEqivalentClassesAxiom(equivalence);
if (optOwlClassesEquivalentAxiom.isPresent()) {
owlOntology.removeAxiom(optOwlClassesEquivalentAxiom.get());
getOntology().getObjectPropertyEquivalences().remove(equivalence);
equivalence.getClass1().getEquivalentClasses().remove(equivalence.getClass2());
equivalence.getClass2().getEquivalentClasses().remove(equivalence.getClass1());
}
}
@Override
public Optional<OntologyObjectPropertyEquivalence> assignObjectPropertyEquivalence(OntologyObjectProperty objectProperty, OntologyObjectProperty equivalentObjectProperty) {
Optional<OWLObjectProperty> optOwlObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(objectProperty);
Optional<OWLObjectProperty> optOwlEquivalentObjectProperty = mapHandler.getMapHandler().getOWLObjectProperty(equivalentObjectProperty);
if (optOwlObjectProperty.isPresent() && optOwlEquivalentObjectProperty.isPresent()) {
OWLEquivalentObjectPropertiesAxiom owlAxiom = owlDataFactory.getOWLEquivalentObjectPropertiesAxiom(optOwlObjectProperty.get(), optOwlEquivalentObjectProperty.get());
owlOntologyManager.addAxiom(owlOntology, owlAxiom);
OntologyObjectPropertyEquivalence equivalence = OntologyManagerUtils.assignObjectPropertiesEquivalent(getOntology(), objectProperty, equivalentObjectProperty);
mapHandler.getMapHandler().toMap(owlAxiom, equivalence);
// TODO: Copy Annotations to local ontology
return Optional.of(equivalence);
}
return Optional.empty();
}
@Override
public void removeObjectPropertyEquivalence(OntologyObjectPropertyEquivalence equivalence) {
Optional<OWLEquivalentObjectPropertiesAxiom> optOwlObjectPropertyEquivalentAxiom = mapHandler.getMapHandler().getOWLEqivalentObjectPropertiesAxiom(equivalence);
if (optOwlObjectPropertyEquivalentAxiom.isPresent()) {
owlOntology.removeAxiom(optOwlObjectPropertyEquivalentAxiom.get());
getOntology().getObjectPropertyEquivalences().remove(equivalence);
equivalence.getObjectProperty1().getEquivalentProperties().remove(equivalence.getObjectProperty2());
equivalence.getObjectProperty2().getEquivalentProperties().remove(equivalence.getObjectProperty1());
}
}
@Override
public Stream<OntologyObjectProperty> getAllObjectProperties() {
// TODO Auto-generated method stub
return null;
}
@Override
public void deleteLocalOntology() throws OntologyManagerException {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeClass(String iri) {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeClass(String iri, String namespace) {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeIndividual(String iri) {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeIndividual(String iri, String namespace) {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeObjectProperty(String iri) {
LOGGER.error("Not implemented yet.");
}
@Override
public void removeObjectProperty(String iri, String namespace) {
LOGGER.error("Not implemented yet.");
}
}
|
package org.mwc.cmap.plotViewer.actions;
import java.awt.Dimension;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.LineAttributes;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Display;
import org.mwc.cmap.core.CorePlugin;
import org.mwc.cmap.core.operations.DebriefActionWrapper;
import org.mwc.cmap.plotViewer.editors.chart.SWTCanvas;
import org.mwc.cmap.plotViewer.editors.chart.SWTChart;
import org.mwc.cmap.plotViewer.editors.chart.SWTChart.PlotMouseDragger;
import MWC.GUI.Layers;
import MWC.GUI.PlainChart;
import MWC.GUI.Tools.Action;
import MWC.GenericData.WorldArea;
import MWC.GenericData.WorldLocation;
/**
* @author ian.mayo
*/
public class ZoomIn extends CoreDragAction
{
public static class ZoomInMode extends SWTChart.PlotMouseDragger
{
Point _startPoint;
SWTCanvas _myCanvas;
private PlainChart _myChart;
private Rectangle res;
private int JITTER;
private Layers layers;
private boolean dragResult;
final private KeyListener listener = new KeyAdapter()
{
@Override
public void keyPressed(KeyEvent e)
{
if (e.keyCode == SWT.ESC) {
dragResult = false;
}
}
};
final private PaintListener paintListener = new PaintListener()
{
@SuppressWarnings("deprecation")
public void paintControl(PaintEvent e)
{
final GC gc = e.gc;
final Color fc = new Color(Display.getDefault(), 155, 155, 155);
gc.setForeground(fc);
gc.setXORMode(true);
gc.setLineAttributes(new LineAttributes(2, SWT.CAP_FLAT, SWT.JOIN_MITER,
SWT.LINE_SOLID, null, 0, 10));
gc.drawRectangle(res);
gc.setXORMode(false);
fc.dispose();
}
};
@Override
public void doMouseDrag(final Point pt, final int JITTER,
final Layers theLayers, final SWTCanvas theCanvas)
{
// redraw canvas backdrop
_myCanvas.getCanvas().redraw();
// just do a check that we have our start point (it may have been cleared
// at the end of the move operation)
if (_startPoint != null)
{
final int deltaX = _startPoint.x - pt.x;
final int deltaY = _startPoint.y - pt.y;
this.JITTER = JITTER;
this.layers = theLayers;
final Rectangle rect = new Rectangle(_startPoint.x, _startPoint.y, -deltaX,
-deltaY);
res = rect;
}
Display.getCurrent().update();
}
@Override
public void doMouseUp(Point point, int keyState)
{
run();
_myCanvas.getCanvas().removeKeyListener(listener);
_myCanvas.getCanvas().removePaintListener(paintListener);
_myCanvas.getCanvas().redraw();
Display.getCurrent().update();
_myChart = null;
_myCanvas = null;
_startPoint = null;
}
@Override
public void mouseDown(Point point, SWTCanvas canvas, PlainChart theChart)
{
_startPoint = point;
_myCanvas = canvas;
_myChart = theChart;
_myCanvas.getCanvas().addKeyListener(listener);
_myCanvas.getCanvas().addPaintListener(paintListener);
dragResult = true;
}
private void run() {
if (dragResult) {
// get world area
java.awt.Point tl = new java.awt.Point(res.x, res.y);
java.awt.Point br = new java.awt.Point(res.x + res.width, res.y
+ res.height);
if (Math.abs(res.width) > JITTER || Math.abs(res.height) > JITTER)
{
WorldLocation locA = new WorldLocation(_myCanvas.getProjection()
.toWorld(tl));
WorldLocation locB = new WorldLocation(_myCanvas.getProjection()
.toWorld(br));
WorldArea area = new WorldArea(locA, locB);
WorldArea oldArea = _myCanvas.getProjection().getDataArea();
Action theAction = null;
// find where the cursor currently is (in absolute coords, not delta coords)
Point finalPos = Display.getCurrent().getCursorLocation();
// the finalPos we're retrieving is in screen coords, not the coords for this panel.
// so, get the display to give us the co-ords for inside the canvas
final Point mappedFinal = Display.getCurrent().map(null, _myCanvas.getCanvas(), finalPos);
// ok, now consider the overall drag operation, just in case it started with BR->TL, but
// ended up with TL->BR.
final int overallX = mappedFinal.x - _startPoint.x;
final int overallY = mappedFinal.y - _startPoint.y;
// if the drag was from TL to BR
if (overallX >= 0 || overallY >= 0)
{
// then zoom in
theAction = new MWC.GUI.Tools.Chart.ZoomIn.ZoomInAction(_myChart,
oldArea, area);
}
// if the drag was from BR to TL
else
{
final Dimension screenSize = _myCanvas.getSize();
// now, we have to root the scale, since the ZoomOutAction is expecting
// a 'length', not an 'area'.
final double scale = Math.sqrt((screenSize.height*screenSize.width)
/ (res.height*res.width));
theAction = new MWC.GUI.Tools.Chart.ZoomOut.ZoomOutAreaAction(
_myChart, oldArea, area, scale);
}
// and wrap it
DebriefActionWrapper daw = new DebriefActionWrapper(theAction,
layers, null);
// and add it to the clipboard
CorePlugin.run(daw);
}
}
}
}
@Override
public PlotMouseDragger getDragMode()
{
return new ZoomInMode();
}
}
|
package org.exist.dom;
import org.exist.security.PermissionDeniedException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.exist.EXistException;
import org.exist.collections.Collection;
import org.exist.collections.IndexInfo;
import org.exist.security.xacml.AccessContext;
import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
import org.exist.storage.ElementValue;
import org.exist.storage.serializers.Serializer;
import org.exist.storage.txn.TransactionManager;
import org.exist.storage.txn.Txn;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.exist.util.XMLFilenameFilter;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.AncestorSelector;
import org.exist.xquery.ChildSelector;
import org.exist.xquery.Constants;
import org.exist.xquery.DescendantOrSelfSelector;
import org.exist.xquery.DescendantSelector;
import org.exist.xquery.NameTest;
import org.exist.xquery.NodeSelector;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQuery;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.NodeValue;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.Type;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import java.io.File;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Test basic {@link org.exist.dom.NodeSet} operations to ensure that
* the used algorithms are correct.
*
* @author wolf
* @author Adam Retter <[email protected]>
*/
public class BasicNodeSetTest {
private final static String NESTED_XML =
"<section n='1'>" +
"<section n='1.1'>" +
"<section n='1.1.1'>" +
"<para n='1.1.1.1'/>" +
"<para n='1.1.1.2'/>" +
"<para n='1.1.1.3'/>" +
"</section>" +
"<section n='1.1.2'>" +
"<para n='1.1.2.1'/>" +
"</section>" +
"</section>" +
"<section n='1.2'>" +
"<para n='1.2.1'/>" +
"</section>" +
"</section>";
private static BrokerPool pool = null;
private static Collection root = null;
private static DBBroker broker = null;
private static Sequence seqSpeech = null;
private static DocumentSet docs = null;
@Test
public void childSelector() throws XPathException {
NodeSelector selector = new ChildSelector(seqSpeech.toNodeSet(), -1);
NameTest test = new NameTest(Type.ELEMENT, new QName("LINE", ""));
NodeSet set = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, seqSpeech.getDocumentSet(), test.getName(), selector);
assertEquals(9492, set.getLength());
}
@Test
public void descendantOrSelfSelector() throws XPathException {
NodeSelector selector = new DescendantOrSelfSelector(seqSpeech.toNodeSet(), -1);
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEECH", ""));
NodeSet set = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, seqSpeech.getDocumentSet(), test.getName(), selector);
assertEquals(2628, set.getLength());
}
@Test
public void ancestorSelector() throws XPathException {
NodeSelector selector = new AncestorSelector(seqSpeech.toNodeSet(), -1, false, true);
NameTest test = new NameTest(Type.ELEMENT, new QName("ACT", ""));
NodeSet set = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, seqSpeech.getDocumentSet(), test.getName(), selector);
assertEquals(15, set.getLength());
}
@Test
public void ancestorSelector_self() throws XPathException {
NodeSet ns = seqSpeech.toNodeSet();
NodeSelector selector = new AncestorSelector(ns, -1, true, true);
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEECH", ""));
NodeSet set = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, seqSpeech.getDocumentSet(), test.getName(), selector);
assertEquals(2628, set.getLength());
}
@Test
public void descendantSelector() throws XPathException, SAXException, PermissionDeniedException {
Sequence seq = executeQuery(broker, "//SCENE", 72, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSelector selector = new DescendantSelector(seq.toNodeSet(), -1);
NodeSet set = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, seq.getDocumentSet(), test.getName(), selector);
assertEquals(2639, set.getLength());
}
@Test
public void selectParentChild() throws XPathException, SAXException, PermissionDeniedException {
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSet speakers = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
Sequence smallSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'perturbed spirit')]/ancestor::SPEECH", 1, null);
NodeSet result = NodeSetHelper.selectParentChild(speakers, smallSet.toNodeSet(), NodeSet.DESCENDANT, -1);
assertEquals(1, result.getLength());
String value = serialize(broker, result.itemAt(0));
assertEquals(value, "<SPEAKER>HAMLET</SPEAKER>");
}
@Test
public void selectParentChild_2() throws XPathException, SAXException, PermissionDeniedException {
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSet speakers = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
Sequence largeSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'love')]/ancestor::SPEECH", 187, null);
NodeSet result = NodeSetHelper.selectParentChild(speakers, largeSet.toNodeSet(), NodeSet.DESCENDANT, -1);
assertEquals(187, result.getLength());
}
@Test
public void selectAncestorDescendant() throws XPathException, SAXException, PermissionDeniedException{
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSet speakers = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
Sequence outerSet = executeQuery(broker, "//SCENE/TITLE[fn:contains(., 'closet')]/ancestor::SCENE", 1, null);
NodeSet result = speakers.selectAncestorDescendant(outerSet.toNodeSet(), NodeSet.DESCENDANT, false, -1, true);
assertEquals(56, result.getLength());
}
@Test
public void selectAncestorDescendant_2() throws XPathException, SAXException, PermissionDeniedException{
Sequence outerSet = executeQuery(broker, "//SCENE/TITLE[fn:contains(., 'closet')]/ancestor::SCENE", 1, null);
NodeSet result = ((AbstractNodeSet)outerSet).selectAncestorDescendant(outerSet.toNodeSet(), NodeSet.DESCENDANT, true, -1, true);
assertEquals(1, result.getLength());
}
@Test
public void getParents() throws XPathException, SAXException, PermissionDeniedException{
Sequence largeSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'love')]/ancestor::SPEECH", 187, null);
NodeSet result = ((AbstractNodeSet)largeSet).getParents(-1);
assertEquals(51, result.getLength());
}
@Test
public void selectAncestors() throws XPathException, SAXException, PermissionDeniedException {
NameTest test = new NameTest(Type.ELEMENT, new QName("SCENE", ""));
NodeSet scenes = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
Sequence largeSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'love')]/ancestor::SPEECH", 187, null);
NodeSet result = ((AbstractNodeSet)scenes).selectAncestors(largeSet.toNodeSet(), false, -1);
assertEquals(49, result.getLength());
}
@Test
public void nodeProxy_getParents() throws XPathException, SAXException, PermissionDeniedException {
Sequence smallSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'perturbed spirit')]/ancestor::SPEECH", 1, null);
NodeProxy proxy = (NodeProxy) smallSet.itemAt(0);
NodeSet result = proxy.getParents(-1);
assertEquals(1, result.getLength());
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSet speakers = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
result = speakers.selectParentChild(proxy, NodeSet.DESCENDANT, -1);
assertEquals(1, result.getLength());
}
@Test
public void selectFollowingSiblings() throws XPathException, SAXException, PermissionDeniedException {
Sequence largeSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'love')]/ancestor::SPEECH/SPEAKER", 187, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("LINE", ""));
NodeSet lines = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
NodeSet result = ((AbstractNodeSet) lines).selectFollowingSiblings(largeSet.toNodeSet(), -1);
assertEquals(1689, result.getLength());
}
@Test
public void selectPrecedingSiblings() throws XPathException, SAXException, PermissionDeniedException {
NameTest test = new NameTest(Type.ELEMENT, new QName("SPEAKER", ""));
NodeSet speakers = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
Sequence largeSet = executeQuery(broker, "//SPEECH/LINE[fn:contains(., 'love')]/ancestor::SPEECH/LINE[1]", 187, null);
NodeSet result = ((AbstractNodeSet) speakers).selectPrecedingSiblings(largeSet.toNodeSet(), -1);
assertEquals(187, result.getLength());
}
@Test
public void extArrayNodeSet_selectParentChild_1() throws XPathException, SAXException, PermissionDeniedException {
Sequence nestedSet = executeQuery(broker, "//section[@n = ('1.1', '1.1.1')]", 2, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("para", ""));
NodeSet children = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
NodeSet result = children.selectParentChild(nestedSet.toNodeSet(), NodeSet.DESCENDANT);
assertEquals(3, result.getLength());
}
@Test
public void extArrayNodeSet_selectParentChild_2() throws XPathException, SAXException, PermissionDeniedException {
Sequence nestedSet = executeQuery(broker, "//section[@n = ('1.1', '1.1.2', '1.2')]", 3, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("para", ""));
NodeSet children = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
NodeSet result = children.selectParentChild(nestedSet.toNodeSet(), NodeSet.DESCENDANT);
assertEquals(2, result.getLength());
}
@Test
public void extArrayNodeSet_selectParentChild_3() throws XPathException, SAXException, PermissionDeniedException {
Sequence nestedSet = executeQuery(broker, "//section[@n = ('1.1', '1.1.1', '1.2')]", 3, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("para", ""));
NodeSet children = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
NodeSet result = children.selectParentChild(nestedSet.toNodeSet(), NodeSet.DESCENDANT);
assertEquals(4, result.getLength());
}
@Test
public void extArrayNodeSet_selectParentChild_4() throws XPathException, SAXException, PermissionDeniedException {
Sequence nestedSet = executeQuery(broker, "//para[@n = ('1.1.2.1')]", 1, null);
NameTest test = new NameTest(Type.ELEMENT, new QName("section", ""));
NodeSet sections = broker.getElementIndex().findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null);
NodeSet result = ((NodeSet) nestedSet).selectParentChild(sections.toNodeSet(), NodeSet.DESCENDANT);
assertEquals(1, result.getLength());
}
@Test
public void testOptimizations() throws XPathException, SAXException, PermissionDeniedException {
Serializer serializer = broker.getSerializer();
serializer.reset();
DocumentSet docs = root.allDocs(broker, new DefaultDocumentSet(), true, false);
System.out.println("
// parent set: 1.1.1; child set: 1.1.1.1, 1.1.1.2, 1.1.1.3, 1.1.2.1, 1.2.1
ExtNodeSet nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1.1']", 1, null);
NodeSet children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT,
new QName("para", ""), Constants.CHILD_AXIS, docs, nestedSet, -1);
assertEquals(3, children.getLength());
// parent set: 1.1; child set: 1.1.1, 1.1.2
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT,
new QName("section", ""), Constants.CHILD_AXIS, docs, nestedSet, -1);
assertEquals(2, children.getLength());
// parent set: 1, 1.1, 1.1.1, 1.1.2 ; child set: 1.1.1.1, 1.1.1.2, 1.1.1.3, 1.1.2.1, 1.2.1
// problem: ancestor set contains nested nodes
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = ('1.1', '1.1.1', '1.1.2')]", 3, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT,
new QName("para", ""), Constants.CHILD_AXIS, docs, nestedSet, -1);
assertEquals(4, children.getLength());
// parent set: 1.1, 1.1.2, 1.2 ; child set: 1.1.1.1, 1.1.1.2, 1.1.1.3, 1.1.2.1, 1.2.1
// problem: ancestor set contains nested nodes
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = ('1.1', '1.1.2', '1.2')]", 3, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT, new QName("para", ""),
Constants.CHILD_AXIS, docs, nestedSet, -1);
assertEquals(2, children.getLength());
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT, new QName("para", ""),
Constants.DESCENDANT_AXIS, docs, nestedSet, -1);
assertEquals(4, children.getLength());
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT, new QName("para", ""),
Constants.DESCENDANT_AXIS, docs, nestedSet, -1);
assertEquals(5, children.getLength());
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1.2']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ELEMENT, new QName("section", ""),
Constants.DESCENDANT_SELF_AXIS, docs, nestedSet, -1);
assertEquals(1, children.getLength());
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1.2']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ATTRIBUTE, new QName("n", ""),
Constants.ATTRIBUTE_AXIS, docs, nestedSet, -1);
assertEquals(1, children.getLength());
nestedSet = (ExtNodeSet) executeQuery(broker, "//section[@n = '1.1']", 1, null);
children =
broker.getElementIndex().findDescendantsByTagName(ElementValue.ATTRIBUTE, new QName("n", ""),
Constants.DESCENDANT_ATTRIBUTE_AXIS, docs, nestedSet, -1);
assertEquals(7, children.getLength());
System.out.println("
}
@Test
public void virtualNodeSet_1() throws XPathException, SAXException, PermissionDeniedException {
executeQuery(broker, "//*/LINE", 9492, null);
}
@Test
public void virtualNodeSet_2() throws XPathException, SAXException, PermissionDeniedException {
executeQuery(broker, "//*/LINE/*", 61, null);
}
@Test
public void virtualNodeSet_3() throws XPathException, SAXException, PermissionDeniedException {
executeQuery(broker, "//*/LINE/text()", 9485, null);
}
@Test
public void virtualNodeSet_4() throws XPathException, SAXException, PermissionDeniedException {
executeQuery(broker, "//SCENE/*/LINE", 9464, null);
}
@Test
public void virtualNodeSet_5() throws XPathException, SAXException, PermissionDeniedException {
executeQuery(broker, "//SCENE/*/LINE[fn:contains(., 'the')]", 3198, null);
|
package org.exist.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.io.StringReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import javax.xml.parsers.ParserConfigurationException;
import com.googlecode.junittoolbox.ParallelRunner;
import org.eclipse.jetty.http.HttpStatus;
import org.exist.Namespaces;
import org.exist.dom.memtree.SAXAdapter;
import org.exist.test.ExistWebServer;
import org.exist.util.Base64Encoder;
import org.exist.xmldb.XmldbURI;
import org.junit.runner.RunWith;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.parsers.SAXParser;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
/**
* A test case for accessing a remote server via REST-Style Web API.
* @author wolf
* @author Pierrick Brihaye <[email protected]>
*/
//@RunWith(ParallelRunner.class) // TODO(AR) when running in parallel a deadlock is encountered in eXist-db... this needs to be resolved!
public class RESTServiceTest {
@ClassRule
public static final ExistWebServer existWebServer = new ExistWebServer(true, false, true);
private static final String XML_DATA = "<test>"
+ "<para>\u00E4\u00E4\u00FC\u00FC\u00F6\u00F6\u00C4\u00C4\u00D6\u00D6\u00DC\u00DC</para>"
+ "</test>";
private static final String XUPDATE = "<xu:modifications xmlns:xu=\"http:
+ "<xu:append select=\"/test\" child=\"1\">"
+ "<para>Inserted paragraph.</para>"
+ "</xu:append>" + "</xu:modifications>";
private static final String QUERY_REQUEST = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<query xmlns=\"" + Namespaces.EXIST_NS + "\">"
+ "<properties>"
+ "<property name=\"indent\" value=\"yes\"/>"
+ "<property name=\"encoding\" value=\"UTF-8\"/>"
+ "</properties>"
+ "<text>"
+ "xquery version \"1.0\";"
+ "(::pragma exist:serialize indent=no ::)"
+ "//para"
+ "</text>" + "</query>";
private static final String QUERY_REQUEST_ERROR = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<query xmlns=\"" + Namespaces.EXIST_NS + "\">"
+ "<properties>"
+ "<property name=\"indent\" value=\"yes\"/>"
+ "<property name=\"encoding\" value=\"UTF-8\"/>"
+ "</properties>"
+ "<text>"
+ "xquery version \"1.0\";"
+ "//undeclared:para"
+ "</text>" + "</query>";
private static final String TEST_MODULE =
"module namespace t=\"http://test.foo\";\n" +
"declare variable $t:VAR { 'World!' };";
private static final String TEST_XQUERY =
"xquery version \"1.0\";\n" +
"declare option exist:serialize \"method=text media-type=text/text\";\n" +
"import module namespace request=\"http://exist-db.org/xquery/request\";\n" +
"import module namespace t=\"http://test.foo\" at \"module.xq\";\n" +
"let $param := request:get-parameter('p', ())\n" +
"return\n" +
" ($param, ' ', $t:VAR)";
private static final String TEST_XQUERY_PARAMETER =
"xquery version \"1.0\";\n" +
"declare namespace request=\"http://exist-db.org/xquery/request\";\n" +
"import module namespace requestparametermod=\"http://exist-db.org/xquery/requestparametermod\" at \"requestparametermod.xqm\";\n" +
"concat(\"xql=\", request:get-parameter(\"doc\",())),\n" +
"concat(\"xqm=\", $requestparametermod:request)";
private static final String TEST_XQUERY_PARAMETER_MODULE =
"module namespace requestparametermod = \"http://exist-db.org/xquery/requestparametermod\";\n" +
"declare namespace request=\"http://exist-db.org/xquery/request\";\n" +
"declare variable $requestparametermod:request { request:get-parameter(\"doc\",())};\n";
private static final String TEST_XQUERY_WITH_PATH_PARAMETER =
"xquery version \"1.0\";\n" +
"declare namespace request=\"http://exist-db.org/xquery/request\";\n" +
"declare option exist:serialize \"method=text media-type=text/text\";\n" +
"(\"pathInfo=\", request:get-path-info(), \"\n\"," +
"\"servletPath=\", request:get-servlet-path(), \"\n\")";
private static final String TEST_XQUERY_WITH_PATH_AND_CONTENT =
"xquery version \"3.0\";\n" +
"declare namespace request=\"http://exist-db.org/xquery/request\";\n" +
"declare option exist:serialize \"method=text media-type=text/text\";\n" +
"request:get-data()//data/text() || ' ' || request:get-path-info()";
private static String credentials;
private static String badCredentials;
private static String getServerUri() {
return "http://localhost:" + existWebServer.getPort() + "/rest";
}
private static String getServerUriRedirected() {
return "http://localhost:" + existWebServer.getPort();
}
private static String getCollectionUri() {
return getServerUri() + XmldbURI.ROOT_COLLECTION + "/test";
}
private static String getCollectionUriRedirected() {
return getServerUriRedirected() + XmldbURI.ROOT_COLLECTION + "/test";
}
private static String getResourceUri() {
return getServerUri() + XmldbURI.ROOT_COLLECTION + "/test/test.xml";
}
// Also in the mix: some (mandatory except %27) escapes, some multibyte UTF-8 characters
// and a superficial directory traversal and a superficial double slash too
private static String getResourceUriPlus() {
return getServerUri() + XmldbURI.ROOT_COLLECTION + "/test
}
@BeforeClass
public static void createCredentials() {
final Base64Encoder enc = new Base64Encoder();
enc.translate("admin:".getBytes());
credentials = new String(enc.getCharArray());
enc.reset();
enc.translate("johndoe:this pw should fail".getBytes());
badCredentials = new String(enc.getCharArray());
}
@Test
public void getFailNoSuchDocument() throws IOException {
final String uri = getCollectionUri() + "/nosuchdocument.xml";
final HttpURLConnection connect = getConnection(uri);
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.NOT_FOUND_404, r);
} finally {
connect.disconnect();
}
}
@Test
public void xqueryGetWithEmptyPath() throws IOException {
/* store the documents that we need for this test */
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithpath.xq", HttpStatus.CREATED_201);
final String path = getCollectionUri() + "/requestwithpath.xq";
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream());
final String pathInfo = response.substring("pathInfo=".length(), response.indexOf("servletPath=") - 2);
final String servletPath = response.substring(response.indexOf("servletPath=") + "servletPath=".length(), response.lastIndexOf("\r\n"));
//check the responses
assertEquals("XQuery servletPath is: \"" + servletPath + "\" expected: \"/db/test/requestwithpath.xq\"", "/db/test/requestwithpath.xq", servletPath);
assertEquals("XQuery pathInfo is: \"" + pathInfo + "\" expected: \"\"", "", pathInfo);
} finally {
connect.disconnect();
}
}
@Test
public void xqueryPOSTWithEmptyPath() throws IOException {
/* store the documents that we need for this test */
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithpath.xq", HttpStatus.CREATED_201);
String path = getCollectionUri() + "/requestwithpath.xq";
final HttpURLConnection connect = preparePost("boo", path);
try {
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream());
final String pathInfo = response.substring("pathInfo=".length(), response.indexOf("servletPath=")-2);
final String servletPath = response.substring(response.indexOf("servletPath=") + "servletPath=".length(), response.lastIndexOf("\r\n"));
//check the responses
assertEquals("XQuery servletPath is: \"" + servletPath + "\" expected: \"/db/test/requestwithpath.xq\"", "/db/test/requestwithpath.xq", servletPath);
assertEquals("XQuery pathInfo is: \"" + pathInfo + "\" expected: \"\"", "", pathInfo);
} finally {
connect.disconnect();
}
}
@Test
public void xqueryGetWithNonEmptyPath() throws IOException {
/* store the documents that we need for this test */
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithpath.xq", HttpStatus.CREATED_201);
final String path = getCollectionUri() + "/requestwithpath.xq/some/path";
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream());
final String pathInfo = response.substring("pathInfo=".length(), response.indexOf("servletPath=") - 2);
final String servletPath = response.substring(response.indexOf("servletPath=") + "servletPath=".length(), response.lastIndexOf("\r\n"));
//check the responses
assertEquals("XQuery servletPath is: \"" + servletPath + "\" expected: \"/db/test/requestwithpath.xq\"", "/db/test/requestwithpath.xq", servletPath);
assertEquals("XQuery pathInfo is: \"" + pathInfo + "\" expected: \"/some/path\"", "/some/path", pathInfo);
} finally {
connect.disconnect();
}
}
@Test
public void xqueryPOSTWithNonEmptyPath() throws IOException {
/* store the documents that we need for this test */
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithpath.xq", HttpStatus.CREATED_201);
final String path = getCollectionUri() + "/requestwithpath.xq/some/path";
final HttpURLConnection connect = preparePost("boo", path);
try {
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream());
final String pathInfo = response.substring("pathInfo=".length(), response.indexOf("servletPath=") - 2);
final String servletPath = response.substring(response.indexOf("servletPath=") + "servletPath=".length(), response.lastIndexOf("\r\n"));
//check the responses
assertEquals("XQuery servletPath is: \"" + servletPath + "\" expected: \"/db/test/requestwithpath.xq\"", "/db/test/requestwithpath.xq", servletPath);
assertEquals("XQuery pathInfo is: \"" + pathInfo + "\" expected: \"/some/path\"", "/some/path", pathInfo);
} finally {
connect.disconnect();
}
}
@Test
public void xqueryGetFailWithNonEmptyPath() throws IOException {
/* store the documents that we need for this test */
final HttpURLConnection sconnect = getConnection(getResourceUri());
try {
sconnect.setRequestProperty("Authorization", "Basic " + credentials);
sconnect.setRequestMethod("PUT");
sconnect.setDoOutput(true);
sconnect.setRequestProperty("ContentType", "application/xml");
try (final Writer writer = new OutputStreamWriter(sconnect.getOutputStream(), UTF_8)) {
writer.write(XML_DATA);
}
final String path = getResourceUri() + "/some/path"; // should not be able to get this path
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.NOT_FOUND_404, r);
} finally {
connect.disconnect();
}
} finally {
sconnect.disconnect();
}
}
@Test
public void testPut() throws IOException {
final int r = uploadData();
assertEquals("Server returned response code " + r, HttpStatus.CREATED_201, r);
doGet();
}
@Test
public void testPutPlus() throws IOException {
assumeThat("Requires non-Windows platform", System.getProperty("os.name").toLowerCase(), not(containsString("win")));
final int r = uploadDataPlus();
assertEquals("Server returned response code " + r, HttpStatus.CREATED_201, r);
doGetPlus();
}
@Test
public void putFailAgainstCollection() throws IOException {
final HttpURLConnection connect = getConnection(getCollectionUri());
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xml");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(XML_DATA);
}
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.BAD_REQUEST_400, r);
} finally {
connect.disconnect();
}
}
@Test
public void putWithCharset() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUri());
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xml; charset=UTF-8");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(XML_DATA);
}
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.CREATED_201, r);
doGet();
} finally {
connect.disconnect();
}
}
@Test
public void putFailAndRechallengeAuthorization() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUri());
try {
connect.setRequestProperty("Authorization", "Basic " + badCredentials);
connect.setDoOutput(true);
connect.setRequestMethod("PUT");
connect.setAllowUserInteraction(false);
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.UNAUTHORIZED_401, r);
final String auth = connect.getHeaderField("WWW-Authenticate");
assertEquals("WWW-Authenticate = " + auth, "Basic realm=\"exist\"", auth);
} finally {
connect.disconnect();
}
}
@Test
public void putAgainstXQuery() throws IOException {
doPut(TEST_XQUERY_WITH_PATH_AND_CONTENT, "requestwithcontent.xq", HttpStatus.CREATED_201);
final String path = getCollectionUriRedirected() + "/requestwithcontent.xq/a/b/c";
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xml");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write("<data>test data</data>");
}
connect.connect();
final int r = connect.getResponseCode();
assertEquals("doPut: Server returned response code " + r, HttpStatus.OK_200, r);
//get the response of the query
final String response = readResponse(connect.getInputStream());
assertEquals("test data /a/b/c", response.trim());
} finally {
connect.disconnect();
}
}
@Test
public void deleteAgainstXQuery() throws IOException {
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithcontent.xq", HttpStatus.CREATED_201);
final String path = getCollectionUriRedirected() + "/requestwithcontent.xq/a/b/c";
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("DELETE");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("doDelete: Server returned response code " + r, HttpStatus.OK_200, r);
//get the response of the query
final String response = readResponse(connect.getInputStream());
final String pathInfo = response.substring("pathInfo=".length(), response.indexOf("servletPath=")-2);
assertEquals("/a/b/c", pathInfo);
} finally {
connect.disconnect();
}
}
@Test
public void headAgainstXQuery() throws IOException {
doPut(TEST_XQUERY_WITH_PATH_PARAMETER, "requestwithcontent.xq", HttpStatus.CREATED_201);
final String path = getCollectionUriRedirected() + "/requestwithcontent.xq/a/b/c";
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("HEAD");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("doHead: Server returned response code " + r, HttpStatus.OK_200, r);
} finally {
connect.disconnect();
}
}
@Test
public void xUpdate() throws IOException {
final HttpURLConnection connect = preparePost(XUPDATE, getResourceUri());
try {
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
doGet();
} finally {
connect.disconnect();
}
}
@Test
public void queryPost() throws IOException, SAXException, ParserConfigurationException {
uploadData();
final HttpURLConnection connect = preparePost(QUERY_REQUEST, getResourceUri());
try {
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String data = readResponse(connect.getInputStream());
final int hits = parseResponse(data);
assertEquals(1, hits);
} finally {
connect.disconnect();
}
}
@Test
public void queryPostXQueryError() throws IOException {
final HttpURLConnection connect = preparePost(QUERY_REQUEST_ERROR, getResourceUri());
try {
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.BAD_REQUEST_400, r);
} finally {
connect.disconnect();
}
}
@Test
public void queryGet() throws IOException {
final String uri = getCollectionUri()
+ "?_query="
+ URLEncoder
.encode(
"doc('"
+ XmldbURI.ROOT_COLLECTION
+ "/test/test.xml')//para[. = '\u00E4\u00E4\u00FC\u00FC\u00F6\u00F6\u00C4\u00C4\u00D6\u00D6\u00DC\u00DC']/text()",
UTF_8.displayName());
final HttpURLConnection connect = getConnection(uri);
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
readResponse(connect.getInputStream());
} finally {
connect.disconnect();
}
}
@Test
public void queryGetXQueryError() throws IOException {
String uri = getCollectionUri()
+ "?_query="
+ URLEncoder
.encode(
"not-$a:-function()",
UTF_8.displayName());
final HttpURLConnection connect = getConnection(uri);
try {
connect.setRequestMethod("GET");
connect.connect();
int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.BAD_REQUEST_400, r);
} finally {
connect.disconnect();
}
}
@Test
public void requestModule() throws IOException {
String uri = getCollectionUri() + "?_query=request:get-uri()&_wrap=no";
HttpURLConnection connect = getConnection(uri);
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream()).trim();
assertTrue(response.endsWith(XmldbURI.ROOT_COLLECTION + "/test"));
} finally {
connect.disconnect();
}
uri = getCollectionUri() + "?_query=request:get-url()&_wrap=no";
try {
connect = getConnection(uri);
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String response = readResponse(connect.getInputStream()).trim();
//TODO : the server name may have been renamed by the Web server
assertTrue(response.endsWith(XmldbURI.ROOT_COLLECTION + "/test"));
} finally {
connect.disconnect();
}
}
@Test
public void requestGetParameterFromModule() throws IOException {
/* store the documents that we need for this test */
doPut(TEST_XQUERY_PARAMETER, "requestparameter.xql", HttpStatus.CREATED_201);
doPut(TEST_XQUERY_PARAMETER_MODULE, "requestparametermod.xqm", HttpStatus.CREATED_201);
/* execute the stored xquery a few times */
for (int i = 0; i < 5; i++) {
final HttpURLConnection connect = getConnection(getCollectionUri() + "/requestparameter.xql?doc=somedoc" + i);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("GET");
connect.connect();
final int iHttpResult = connect.getResponseCode();
assertEquals("Server returned response code " + iHttpResult, HttpStatus.OK_200, iHttpResult);
String contentType = connect.getContentType();
final int semicolon = contentType.indexOf(';');
if (semicolon > 0) {
contentType = contentType.substring(0, semicolon).trim();
}
assertEquals("Server returned content type " + contentType, "application/xml", contentType);
//get the response of the query
final String response = readResponse(connect.getInputStream());
final String strXQLRequestParameter = response.substring("xql=".length(), response.indexOf("xqm="));
final String strXQMRequestParameter = response.substring(response.indexOf("xqm=") + "xqm=".length(), response.lastIndexOf("\r\n"));
//check the responses
assertEquals("XQuery Request Parameter is: \"" + strXQLRequestParameter + "\" expected: \"somedoc" + i + "\"", "somedoc" + i, strXQLRequestParameter);
assertEquals("XQuery Module Request Parameter is: \"" + strXQMRequestParameter + "\" expected: \"somedoc" + i + "\"", "somedoc" + i, strXQMRequestParameter);
} finally {
connect.disconnect();
}
}
}
@Test
public void storedQuery() throws IOException {
doPut(TEST_MODULE, "module.xq", HttpStatus.CREATED_201);
doPut(TEST_XQUERY, "test.xq", HttpStatus.CREATED_201);
doStoredQuery(false, false);
// cached:
doStoredQuery(true, false);
// cached and wrapped:
doStoredQuery(true, true);
}
private void doPut(final String data, final String path, final int responseCode) throws IOException {
final HttpURLConnection connect = getConnection(getCollectionUri() + '/' + path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xquery");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(data);
}
connect.connect();
final int r = connect.getResponseCode();
assertEquals("doPut: Server returned response code " + r, responseCode, r);
} finally {
connect.disconnect();
}
}
private void doStoredQuery(final boolean cacheHeader, final boolean wrap) throws IOException {
String uri = getCollectionUri() + "/test.xq?p=Hello";
if(wrap) {
uri += "&_wrap=yes";
}
final HttpURLConnection connect = getConnection(uri);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
final String cached = connect.getHeaderField("X-XQuery-Cached");
assertNotNull(cached);
assertEquals(cacheHeader, Boolean.valueOf(cached).booleanValue());
String contentType = connect.getContentType();
final int semicolon = contentType.indexOf(';');
if (semicolon > 0) {
contentType = contentType.substring(0, semicolon).trim();
}
if (wrap) {
assertEquals("Server returned content type " + contentType, "application/xml", contentType);
} else {
assertEquals("Server returned content type " + contentType, "text/text", contentType);
}
final String response = readResponse(connect.getInputStream());
if (wrap) {
assertTrue("Server returned response: " + response,
response.startsWith("<exist:result "));
} else {
assertTrue("Server returned response: " + response,
response.startsWith("Hello World!"));
}
} finally {
connect.disconnect();
}
}
private int uploadData() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUri());
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xml");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(XML_DATA);
}
connect.connect();
return connect.getResponseCode();
} finally {
connect.disconnect();
}
}
private void doGet() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUri());
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
String contentType = connect.getContentType();
final int semicolon = contentType.indexOf(';');
if (semicolon > 0) {
contentType = contentType.substring(0, semicolon).trim();
}
assertEquals("Server returned content type " + contentType, "application/xml", contentType);
readResponse(connect.getInputStream());
} finally {
connect.disconnect();
}
}
private int uploadDataPlus() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUriPlus());
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("PUT");
connect.setDoOutput(true);
connect.setRequestProperty("ContentType", "application/xml");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(XML_DATA);
}
connect.connect();
return connect.getResponseCode();
} finally {
connect.disconnect();
}
}
private void doGetPlus() throws IOException {
final HttpURLConnection connect = getConnection(getResourceUriPlus());
try {
connect.setRequestMethod("GET");
connect.connect();
final int r = connect.getResponseCode();
assertEquals("Server returned response code " + r, HttpStatus.OK_200, r);
String contentType = connect.getContentType();
final int semicolon = contentType.indexOf(';');
if (semicolon > 0) {
contentType = contentType.substring(0, semicolon).trim();
}
assertEquals("Server returned content type " + contentType, "application/xml", contentType);
readResponse(connect.getInputStream());
} finally {
connect.disconnect();
}
}
private HttpURLConnection preparePost(final String content, final String path) throws IOException {
final HttpURLConnection connect = getConnection(path);
try {
connect.setRequestProperty("Authorization", "Basic " + credentials);
connect.setRequestMethod("POST");
connect.setDoOutput(true);
connect.setRequestProperty("Content-Type", "application/xml");
try (final Writer writer = new OutputStreamWriter(connect.getOutputStream(), UTF_8)) {
writer.write(content);
}
return connect;
} finally {
connect.disconnect();
}
}
private String readResponse(final InputStream is) throws IOException {
try(final BufferedReader reader = new BufferedReader(new InputStreamReader(is, UTF_8))) {
String line;
final StringBuilder out = new StringBuilder();
while ((line = reader.readLine()) != null) {
out.append(line);
out.append("\r\n");
}
return out.toString();
}
}
private int parseResponse(final String data) throws IOException, SAXException, ParserConfigurationException {
final SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
final InputSource src = new InputSource(new StringReader(data));
final SAXParser parser = factory.newSAXParser();
final XMLReader reader = parser.getXMLReader();
final SAXAdapter adapter = new SAXAdapter();
reader.setContentHandler(adapter);
reader.parse(src);
final Document doc = adapter.getDocument();
final Element root = doc.getDocumentElement();
final String hits = root.getAttributeNS(Namespaces.EXIST_NS, "hits");
return Integer.parseInt(hits);
}
private HttpURLConnection getConnection(final String url) throws IOException {
final URL u = new URL(url);
return (HttpURLConnection) u.openConnection();
}
}
|
package org.apache.fop.fo;
// FOP
import org.apache.fop.layout.FontState;
import org.apache.fop.datatypes.ColorType;
import org.apache.fop.traits.SpaceVal;
/**
* Collection of properties used in
*/
public class TextInfo {
public FontState fs;
public ColorType color;
public int wrapOption;
public boolean bWrap ; // True if wrap-option = WRAP
public int whiteSpaceCollapse;
public int verticalAlign;
public int lineHeight;
// Props used for calculating inline-progression-dimension
public SpaceVal wordSpacing;
public SpaceVal letterSpacing;
// Add hyphenation props too
public boolean bCanHyphenate=true;
// Textdecoration
public boolean underlined = false;
public boolean overlined = false;
public boolean lineThrough = false;
}
|
package org.assist.load;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.assist.load.config.Section;
import org.assist.load.config.util.BoolStrings;
import org.assist.load.config.util.ConfigStr;
public class ConfigFile {
private static volatile ConfigFile instance = null;
private Map<String, Section> data = new HashMap<String, Section>();
public static final String DefaultSection = "default";
private static final String Left_Variable = "%(";
private static final String Right_Variable = ")%";
private ConfigFile() {
}
public static ConfigFile _IMP() {
if (null == instance)
synchronized (ConfigFile.class) {
if (null == instance) {
instance = new ConfigFile();
}
}
return instance;
}
public boolean addSection(String section){
section = section.toLowerCase();
if(!hasSection(section)){
synchronized(ConfigFile.class){
if(!hasSection(section)){
data.put(section, new Section(section));
return true;
}
}
}
return false;
}
public boolean removeSection(String section){
section = section.toLowerCase();
synchronized(ConfigFile.class){
if(DefaultSection.equals(section)){
return false; //default section can not be removed
}
if(!hasSection(section)){
return false;
}
else
{
Section tmp = data.remove(section);
tmp.destroy();
tmp = null;
return true;
}
}
}
// It returns true if the option and value were inserted, and false if the
// value was overwritten.
// If the section does not exist in advance, it is created.
public boolean addOption(String section, String option, String value){
addSection(section); //make sure the section exists
return getSection(section).addOption(option, value);
}
public boolean hasSection(String section){
section = section.toLowerCase();
return data.containsKey(section);
}
public Section getSection(String section) {
section = section.toLowerCase();
if(!hasSection(section)){
throw new ConfigException("the section ["+section+"] does not exist.");
}
return data.get(section);
}
// It returns true if the option and value were removed, and false otherwise,
// including if the section did not exist.
public boolean removeOption(String section, String option){
section = section.toLowerCase();
option = option.toLowerCase();
if(!hasSection(section)){
return false;
}
if(!data.get(section).hasOption(option)){
return false;
}
return data.get(section).removeOption(option);
}
public void loadfromStream(BufferedReader reader)
{
String line = null;
String section = "";
String option = "";
try {
while((line = reader.readLine()) != null)
{
line = line.trim();
if(line.length() <= 0){
continue;
}
else if(line.startsWith("#") || line.startsWith(";") || line.startsWith("rem")){
continue;
}
else if(line.startsWith("[") && line.endsWith("]"))
{
section = line.substring(1, line.length()-1);
this.addSection(section);
}
else if(section.isEmpty()){
throw new ConfigException("Section not found: must start with section");
}
else{
line = ConfigStr.stripComments(line).trim();
int optionIdx = ConfigStr.optionIdx(line);
if(optionIdx > 0){
option = line.substring(0, optionIdx).trim();
String value = line.substring(optionIdx+1 , line.length()).trim();
this.addOption(section, option, value);
}
else if(!section.isEmpty() && !option.isEmpty()){
//support multi-line value
String prev = getRawString(section, option);
String value = line;
this.addOption(section, option, prev+"\n"+value);
}
else{
throw new ConfigException("Could not parse line ["+line+"]");
}
}
}
} catch (IOException e) {
throw new ConfigException(e);
}
}
public void loadCFG(String filename){
try {
BufferedReader reader = null;
try{
reader = new BufferedReader(new InputStreamReader(new FileInputStream(filename), "UTF-8"));
loadfromStream(reader);
}finally{
reader.close();
}
} catch (UnsupportedEncodingException e) {
throw new ConfigException(e);
} catch (FileNotFoundException e) {
throw new ConfigException(e);
}catch (IOException e) {
throw new ConfigException(e);
}
}
public void write2File(String filename, String headString)
{
try {
BufferedWriter writer = null;
try{
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename), "UTF-8"));
if(!headString.isEmpty()){
writer.write("# "+headString+"\n");
}
Iterator<java.util.Map.Entry<String, Section>> sections = data.entrySet().iterator();
while(sections.hasNext()){
Entry<String, Section> entry = sections.next();
String sectionName = (String)entry.getKey();
Section section = (Section) entry.getValue();
if(sectionName.equalsIgnoreCase(DefaultSection) && section.optionCount() == 0){
continue;
}
writer.write("["+sectionName+"]\n"); //write this section
section.writeOption(writer);
}
}
finally
{
writer.close();
}
} catch (UnsupportedEncodingException e) {
throw new ConfigException(e);
} catch (FileNotFoundException e) {
throw new ConfigException(e);
} catch (IOException e) {
throw new ConfigException(e);
}
}
public synchronized String getRawString(String section, String option) {
section = section.toLowerCase();
option = option.toLowerCase();
if(this.hasSection(section) || this.hasSection(DefaultSection)){
if(this.getSection(section).hasOption(option)){
return getSection(section).getOption(option);
}else if(this.getSection(DefaultSection).hasOption(option)){
return getSection(DefaultSection).getOption(option);
}
throw new ConfigException("Option not found:"+option);
}
throw new ConfigException("Section not found:"+section);
}
public String getString(String section, String option){
String rawStr = getRawString(section, option);
section = section.toLowerCase();
String ret = "";
int istart = 0;
int idx_var_left = 0;
int idx_var_right = 0;
for(;;){
if(istart >= rawStr.length()){
break;
}
idx_var_left = rawStr.indexOf(Left_Variable, istart);
idx_var_right = rawStr.indexOf(Right_Variable, idx_var_left);
if(idx_var_left < 0){
ret += rawStr.substring(istart);
break;
}else if(idx_var_left>=0 && idx_var_right>=0){
//find a variable
ret += rawStr.substring(istart, idx_var_left);
ret += getString(section, rawStr.substring(idx_var_left+Left_Variable.length(), idx_var_right).toLowerCase());
istart = idx_var_right+Right_Variable.length();
continue;
}
}
return ret;
}
public int getInt(String section, String option){
String value = getString(section, option);
return value.isEmpty() ? 0 : Integer.parseInt(value);
}
public float getFloat(String section, String option){
String value = getString(section, option);
return value.isEmpty() ? .0f : Float.parseFloat(value);
}
public boolean getBool(String section, String option){
String value = getString(section, option);
return BoolStrings._IMP().getBool(value);
}
}
|
package org.jgroups.util;
import org.jgroups.Address;
import org.jgroups.annotations.GuardedBy;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Maintains credits for senders, when credits fall below 0, a sender blocks until new credits have been received.
* @author Bela Ban
* @version $Id: CreditMap.java,v 1.8 2010/09/08 14:03:10 belaban Exp $
*/
public class CreditMap {
protected final long max_credits;
@GuardedBy("lock")
protected final Map<Address,Long> credits=new HashMap<Address,Long>();
protected long min_credits;
protected long accumulated_credits=0;
protected final Lock lock=new ReentrantLock();
protected final Condition credits_available=lock.newCondition();
public CreditMap(long max_credits) {
this.max_credits=max_credits;
min_credits=max_credits;
}
public long getAccumulatedCredits() {
return accumulated_credits;
}
public long getMinCredits() {
return min_credits;
}
public Set<Address> keys() {
lock.lock();
try {
return credits.keySet();
}
finally {
lock.unlock();
}
}
public Long get(Address member) {
lock.lock();
try {
return credits.get(member);
}
finally {
lock.unlock();
}
}
public Long remove(Address key) {
lock.lock();
try {
Long retval=credits.remove(key);
flushAccumulatedCredits();
long new_min=computeLowestCredit();
if(new_min > min_credits) {
min_credits=new_min;
credits_available.signalAll();
}
return retval;
}
finally {
lock.unlock();
}
}
public Long putIfAbsent(Address key) {
lock.lock();
try {
flushAccumulatedCredits();
Long val=credits.get(key);
return val != null? val : credits.put(key, max_credits);
}
finally {
lock.unlock();
}
}
public List<Address> getMembersWithInsufficientCredits(long credit_needed) {
List<Address> retval=new LinkedList<Address>();
lock.lock();
try {
if(credit_needed > min_credits) {
flushAccumulatedCredits();
for(Map.Entry<Address,Long> entry: credits.entrySet()) {
if(entry.getValue().longValue() < credit_needed)
retval.add(entry.getKey());
}
}
return retval;
}
finally {
lock.unlock();
}
}
/**
* Decrements credits bytes from all. Returns true if successful, or false if not. Blocks for timeout ms
* (if greater than 0).
*
* @param credits Number of bytes to decrement from all members
* @param timeout Number of milliseconds to wait until more credits have been received
* @return True if decrementing credits bytes succeeded, false otherwise
*/
public boolean decrement(long credits, long timeout) {
lock.lock();
try {
if(decrement(credits))
return true;
if(timeout <= 0)
return false;
try {
credits_available.await(timeout, TimeUnit.MILLISECONDS);
if(decrement(credits))
return true;
}
catch(InterruptedException e) {
}
return false;
}
finally {
lock.unlock();
}
}
public void replenish(Address sender, long new_credits) {
if(sender == null)
return;
lock.lock();
try {
Long val=credits.get(sender);
if(val == null)
return;
boolean potential_update=val.longValue() - accumulated_credits <= min_credits;
decrementAndAdd(sender, new_credits);
if(potential_update) {
long new_min=computeLowestCredit();
if(new_min > min_credits) {
min_credits=new_min;
credits_available.signalAll();
}
}
}
finally {
lock.unlock();
}
}
public void replenishAll() {
lock.lock();
try {
flushAccumulatedCredits();
for(Map.Entry<Address,Long> entry: credits.entrySet())
entry.setValue(max_credits);
min_credits=computeLowestCredit();
credits_available.signalAll();
}
finally {
lock.unlock();
}
}
public void clear() {
lock.lock();
try {
credits.clear();
credits_available.signalAll();
}
finally {
lock.unlock();
}
}
public String toString() {
StringBuilder sb=new StringBuilder();
lock.lock();
try {
for(Map.Entry<Address,Long> entry: credits.entrySet()) {
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
sb.append("min_credits=" + min_credits + ", accumulated=" + accumulated_credits);
}
finally {
lock.unlock();
}
return sb.toString();
}
// need to be called with lock held
protected boolean decrement(long credits) {
if(credits <= min_credits) {
accumulated_credits+=credits;
min_credits-=credits;
return true;
}
return false;
}
/** Needs to be called with lock held */
protected long computeLowestCredit() {
long lowest=max_credits;
for(long cred: credits.values())
lowest=Math.min(cred, lowest);
return lowest;
}
/**
* Decrements credits bytes from all elements and add new_credits to member (if non null).
* The lowest credit needs to be greater than min_credits. Needs to be called with lock held
* @param member The member to which new_credits are added. NOP if null
* @param new_credits Number of bytes to add to member. NOP if 0.
*/
protected void decrementAndAdd(Address member, long new_credits) {
boolean replenish=member != null && new_credits > 0;
if(accumulated_credits > 0) {
for(Map.Entry<Address,Long> entry: this.credits.entrySet()) {
entry.setValue(Math.max(0, entry.getValue().longValue() - accumulated_credits));
if(replenish) {
Address tmp=entry.getKey();
if(tmp.equals(member))
entry.setValue(Math.min(max_credits, entry.getValue().longValue() + new_credits));
}
}
accumulated_credits=0;
}
else {
if(replenish) {
Long val=this.credits.get(member);
if(val != null)
this.credits.put(member, Math.min(max_credits, val.longValue() + new_credits));
}
}
}
// Called with lock held
protected void flushAccumulatedCredits() {
if(accumulated_credits > 0) {
for(Map.Entry<Address,Long> entry: this.credits.entrySet()) {
entry.setValue(Math.max(0, entry.getValue().longValue() - accumulated_credits));
}
accumulated_credits=0;
}
}
}
|
package com.intellij.openapi.ui;
import com.intellij.CommonBundle;
import com.intellij.ide.RemoteDesktopService;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.Strings;
import com.intellij.ui.JBColor;
import com.intellij.ui.components.JBLayeredPane;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.VerticalLayout;
import com.intellij.util.Alarm;
import com.intellij.util.ui.Animator;
import com.intellij.util.ui.AsyncProcessIcon;
import com.intellij.util.ui.ImageUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
public class LoadingDecorator {
public static final Color OVERLAY_BACKGROUND = JBColor.namedColor("BigSpinner.background", JBColor.PanelBackground);
private Color myOverlayBackground = null;
JLayeredPane myPane;
LoadingLayer myLoadingLayer;
Animator myFadeOutAnimator;
int myDelay;
Alarm myStartAlarm;
boolean myStartRequest;
public LoadingDecorator(JComponent content, @NotNull Disposable parent, int startDelayMs) {
this(content, parent, startDelayMs, false);
}
public LoadingDecorator(JComponent content, @NotNull Disposable parent, int startDelayMs, boolean useMinimumSize) {
this(content, parent, startDelayMs, useMinimumSize, new AsyncProcessIcon.Big("Loading"));
}
public LoadingDecorator(JComponent content, @NotNull Disposable parent, int startDelayMs, boolean useMinimumSize, @NotNull AsyncProcessIcon icon) {
myPane = new MyLayeredPane(useMinimumSize ? content : null);
myLoadingLayer = new LoadingLayer(icon);
myDelay = startDelayMs;
myStartAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, parent);
setLoadingText(CommonBundle.getLoadingTreeNodeText());
myFadeOutAnimator = new Animator("Loading", 10, RemoteDesktopService.isRemoteSession()? 2500 : 500, false) {
@Override
public void paintNow(final int frame, final int totalFrames, final int cycle) {
myLoadingLayer.setAlpha(1f - ((float)frame) / ((float)totalFrames));
}
@Override
protected void paintCycleEnd() {
myLoadingLayer.setAlpha(0); // paint with zero alpha before hiding completely
hideLoadingLayer();
myLoadingLayer.setAlpha(-1);
myPane.repaint();
}
};
Disposer.register(parent, myFadeOutAnimator);
myPane.add(content, JLayeredPane.DEFAULT_LAYER, 0);
Disposer.register(parent, myLoadingLayer.myProgress);
}
public Color getOverlayBackground() {
return myOverlayBackground;
}
public void setOverlayBackground(@Nullable Color background) {
myOverlayBackground = background;
}
/**
* Removes a loading layer to restore a blit-accelerated scrolling.
*/
private void hideLoadingLayer() {
myPane.remove(myLoadingLayer);
myLoadingLayer.setVisible(false);
}
/* Placing the invisible layer on top of JViewport suppresses blit-accelerated scrolling
as JViewport.canUseWindowBlitter() doesn't take component's visibility into account.
We need to add / remove the loading layer on demand to preserve the blit-based scrolling.
Blit-acceleration copies as much of the rendered area as possible and then repaints only newly exposed region.
This helps to improve scrolling performance and to reduce CPU usage (especially if drawing is compute-intensive). */
private void addLoadingLayerOnDemand() {
if (myPane != myLoadingLayer.getParent()) {
myPane.add(myLoadingLayer, JLayeredPane.DRAG_LAYER, 1);
}
}
protected NonOpaquePanel customizeLoadingLayer(JPanel parent, JLabel text, AsyncProcessIcon icon) {
parent.setLayout(new GridBagLayout());
text.setFont(UIUtil.getLabelFont());
text.setForeground(UIUtil.getContextHelpForeground());
icon.setBorder(Strings.notNullize(text.getText()).endsWith("...")
? JBUI.Borders.emptyRight(8)
: JBUI.Borders.empty());
NonOpaquePanel result = new NonOpaquePanel(new VerticalLayout(JBUI.scale(6)));
result.setBorder(JBUI.Borders.empty(10));
result.add(icon);
result.add(text);
parent.add(result);
return result;
}
public JComponent getComponent() {
return myPane;
}
public void startLoading(final boolean takeSnapshot) {
if (isLoading() || myStartRequest || myStartAlarm.isDisposed()) return;
myStartRequest = true;
if (myDelay > 0) {
myStartAlarm.addRequest(() -> UIUtil.invokeLaterIfNeeded(() -> {
if (!myStartRequest) return;
_startLoading(takeSnapshot);
}), myDelay);
}
else {
_startLoading(takeSnapshot);
}
}
protected void _startLoading(final boolean takeSnapshot) {
addLoadingLayerOnDemand();
myLoadingLayer.setVisible(true, takeSnapshot);
}
public void stopLoading() {
myStartRequest = false;
myStartAlarm.cancelAllRequests();
if (!isLoading()) return;
myLoadingLayer.setVisible(false, false);
myPane.repaint();
}
public String getLoadingText() {
return myLoadingLayer.myText.getText();
}
public void setLoadingText(@Nls String loadingText) {
myLoadingLayer.myText.setVisible(!Strings.isEmptyOrSpaces(loadingText));
myLoadingLayer.myText.setText(loadingText);
}
public boolean isLoading() {
return myLoadingLayer.isLoading();
}
private final class LoadingLayer extends JPanel {
private final JLabel myText = new JLabel("", SwingConstants.CENTER);
private BufferedImage mySnapshot;
private Color mySnapshotBg;
private final AsyncProcessIcon myProgress;
private boolean myVisible;
private float myCurrentAlpha;
private final NonOpaquePanel myTextComponent;
private LoadingLayer(@NotNull AsyncProcessIcon processIcon) {
setOpaque(false);
setVisible(false);
myProgress = processIcon;
myProgress.setOpaque(false);
myTextComponent = customizeLoadingLayer(this, myText, myProgress);
myProgress.suspend();
}
public void setVisible(final boolean visible, boolean takeSnapshot) {
if (myVisible == visible) return;
if (myVisible && myCurrentAlpha != -1) return;
myVisible = visible;
myFadeOutAnimator.reset();
if (myVisible) {
setVisible(true);
myCurrentAlpha = -1;
if (takeSnapshot && getWidth() > 0 && getHeight() > 0) {
mySnapshot = ImageUtil.createImage(getGraphics(), getWidth(), getHeight(), BufferedImage.TYPE_INT_RGB);
final Graphics2D g = mySnapshot.createGraphics();
myPane.paint(g);
final Component opaque = UIUtil.findNearestOpaque(this);
mySnapshotBg = opaque != null ? opaque.getBackground() : UIUtil.getPanelBackground();
g.dispose();
}
myProgress.resume();
myFadeOutAnimator.suspend();
}
else {
disposeSnapshot();
myProgress.suspend();
myFadeOutAnimator.resume();
}
}
public boolean isLoading() {
return myVisible;
}
private void disposeSnapshot() {
if (mySnapshot != null) {
mySnapshot.flush();
mySnapshot = null;
}
}
@Override
protected void paintComponent(final Graphics g) {
if (mySnapshot != null) {
if (mySnapshot.getWidth() == getWidth() && mySnapshot.getHeight() == getHeight()) {
g.drawImage(mySnapshot, 0, 0, getWidth(), getHeight(), null);
g.setColor(new Color(200, 200, 200, 240));
g.fillRect(0, 0, getWidth(), getHeight());
return;
}
else {
disposeSnapshot();
}
}
Color background = mySnapshotBg != null ? mySnapshotBg : getOverlayBackground();
if (background != null) {
g.setColor(background);
g.fillRect(0, 0, getWidth(), getHeight());
}
}
public void setAlpha(final float alpha) {
myCurrentAlpha = alpha;
paintImmediately(myTextComponent.getBounds());
}
@Override
protected void paintChildren(final Graphics g) {
if (myCurrentAlpha != -1) {
((Graphics2D)g).setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, myCurrentAlpha));
}
super.paintChildren(g);
}
}
public interface CursorAware {
}
private static final class MyLayeredPane extends JBLayeredPane implements CursorAware {
private final JComponent myContent;
private MyLayeredPane(JComponent content) {
myContent = content;
}
@Override
public Dimension getMinimumSize() {
return myContent != null && !isMinimumSizeSet()
? myContent.getMinimumSize()
: super.getMinimumSize();
}
@Override
public Dimension getPreferredSize() {
return myContent != null && !isPreferredSizeSet()
? myContent.getPreferredSize()
: super.getPreferredSize();
}
@Override
public void doLayout() {
super.doLayout();
for (int i = 0; i < getComponentCount(); i++) {
final Component each = getComponent(i);
if (each instanceof Icon) {
each.setBounds(0, 0, each.getWidth(), each.getHeight());
}
else {
each.setBounds(0, 0, getWidth(), getHeight());
}
}
}
}
}
|
package org.nutz.mvc.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface Ok {
String value();
}
|
package org.opencms.setup;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProject;
import org.opencms.file.CmsRegistry;
import org.opencms.main.I_CmsConstants;
import org.opencms.main.I_CmsShellCommands;
import org.opencms.main.OpenCms;
import org.opencms.main.OpenCmsCore;
import org.opencms.report.CmsShellReport;
import org.opencms.util.CmsStringSubstitution;
import org.opencms.util.CmsUUID;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.commons.collections.ExtendedProperties;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
/**
* A java bean as a controller for the OpenCms setup wizard.<p>
*
* It is not allowed to customize this bean with methods for a specific database server setup!<p>
*
* Database server specific settings should be set/read using get/setDbProperty, as for example like:
*
* <pre>
* setDbProperty("oracle.defaultTablespace", value);
* </pre>
*
* @author Thomas Weckert ([email protected])
* @author Carsten Weinholz ([email protected])
* @version $Revision: 1.13 $
*/
public class CmsSetup extends Object implements Serializable, Cloneable, I_CmsShellCommands {
/** Required files per database server setup.<p> */
public static final String[] requiredDbSetupFiles = {
"step_4_database_setup.jsp",
"database.properties",
"create_db.sql",
"create_tables.sql",
"drop_db.sql",
"drop_tables.sql"
};
/** Contains the error messages to be displayed in the setup wizard.<p> */
private static Vector errors;
/** Contains the properties of "opencms.properties".<p> */
private ExtendedProperties m_extProperties;
/** Contains HTML fragments for the output in the JSP pages of the setup wizard.<p> */
private Properties m_htmlProps;
/** The absolute path to the home directory of the OpenCms webapp.<p> */
private String m_basePath;
/** Key of the selected database server (e.g. "mysql", "generic" or "oracle").<p> */
private String m_databaseKey;
/** Password used for the JDBC connection when the OpenCms database is created.<p> */
private String m_dbCreatePwd;
/** List of keys of all available database server setups (e.g. "mysql", "generic" or "oracle").<p> */
private List m_databaseKeys;
/** List of sorted keys by ranking of all available database server setups (e.g. "mysql", "generic" or "oracle").<p> */
private List m_sortedDatabaseKeys;
/** List of clear text names of all available database server setups (e.g. "MySQL", "Generic (ANSI) SQL").<p> */
private List m_databaseNames;
/** Map of database setup properties of all available database server setups keyed by their database keys.<p> */
private Map m_databaseProperties;
/** A map with tokens ${...} to be replaced in SQL scripts.<p> */
private Map m_replacer;
/** A map with all available modules.<p> */
private Map m_availableModules;
/** A map with lists of dependent module package names keyed by module package names.<p> */
private Map m_moduleDependencies;
/** A list with the package names of the modules to be installed.<p> */
private List m_installModules;
/** A CmsObject to execute shell commands.<p> */
private CmsObject m_cms;
/**
* Default constructor.<p>
*/
public CmsSetup() {
m_databaseKeys = null;
m_databaseNames = null;
m_databaseProperties = null;
errors = new Vector();
}
/**
* This method reads the properties from the htmlmsg.property file
* and sets the HTML part properties with the matching values.<p>
*/
public void initHtmlParts() {
try {
m_htmlProps = new Properties();
m_htmlProps.load(getClass().getClassLoader().getResourceAsStream(OpenCmsCore.C_FILE_HTML_MESSAGES));
} catch (Exception e) {
e.printStackTrace();
errors.add(e.toString());
}
}
/**
* This method reads the properties from the opencms.property file
* and sets the CmsSetup properties with the matching values.
* This method should be called when the first page of the OpenCms
* Setup Wizard is called, so the input fields of the wizard are pre-defined
*
* @param props path to the properties file
*/
public void initProperties(String props) {
getDatabaseNames();
initHtmlParts();
String path = getConfigFolder() + props;
try {
m_extProperties = CmsSetupUtils.loadProperties(path);
} catch (Exception e) {
e.printStackTrace();
errors.add(e.toString());
}
}
/**
* Checks the ethernet address value and generates a dummy address, if necessary.<p> *
*/
public void checkEthernetAddress() {
// check the ethernet address in order to generate a random address, if not available
if ("".equals(getEthernetAddress())) {
setEthernetAddress(CmsUUID.getDummyEthernetAddress());
}
}
/**
* This method checks the validity of the given properties
* and adds unset properties if possible
* @return boolean true if all properties are set correctly
*/
public boolean checkProperties() {
// check if properties available
if (getProperties() == null) {
return false;
}
// check the maximum file size, set it to unlimited, if not valid
String size = getFileMaxUploadSize();
if (size == null || "".equals(size)) {
setFileMaxUploadSize("-1");
} else {
try {
Integer.parseInt(size);
} catch (Exception e) {
setFileMaxUploadSize("-1");
}
}
return true;
}
/**
* This method sets the value for a given key in the extended properties.
* @param key The key of the property
* @param value The value of the property
*/
public void setExtProperty(String key, String value) {
m_extProperties.put(key, value);
}
/**
* Returns the value for a given key from the extended properties.
*
* @param key the property key
* @return the string value for a given key
*/
public String getExtProperty(String key) {
Object value = null;
return ((value = m_extProperties.get(key)) != null) ? value.toString() : "";
}
/**
* This method sets the value for a given key in the database properties.
* @param key The key of the property
* @param value The value of the property
*/
public void setDbProperty(String key, String value) {
// extract the database key out of the entire key
String databaseKey = key.substring(0, key.indexOf("."));
Map databaseProperties = (Map) getDatabaseProperties().get(databaseKey);
databaseProperties.put(key, value);
}
/**
* Returns the value for a given key from the database properties.
*
* @param key the property key
* @return the string value for a given key
*/
public String getDbProperty(String key) {
Object value = null;
// extract the database key out of the entire key
String databaseKey = key.substring(0, key.indexOf("."));
Map databaseProperties = (Map) getDatabaseProperties().get(databaseKey);
return ((value = databaseProperties.get(key)) != null) ? (String) value : "";
}
/**
* Sets the path to the OpenCms home directory
*
* @param basePath path to OpenCms home directory
*/
public void setBasePath(String basePath) {
m_basePath = basePath;
if (!m_basePath.endsWith(File.separator)) {
// make sure that Path always ends with a separator, not always the case in different
// environments since getServletContext().getRealPath("/") does not end with a "/" in
// all servlet runtimes
m_basePath += File.separator;
}
}
/**
* Returns the absolute path to the OpenCms home directory
*
* @return the path to the OpenCms home directory
*/
public String getBasePath() {
return m_basePath.replace('\\', '/').replace('/', File.separatorChar);
}
/**
* Gets the default pool
*
* @return name of the default pool
*/
public String getPool() {
StringTokenizer tok = new StringTokenizer(getExtProperty("db.pools"), ",[]");
String pool = tok.nextToken();
return pool;
}
/**
* Sets the database drivers to the given value
*
* @param databaseKey the key of the selected database server (e.g. "mysql", "generic" or "oracle")
*/
public void setDatabase(String databaseKey) {
m_databaseKey = databaseKey;
String vfsDriver = getDbProperty(m_databaseKey + ".vfs.driver");
String userDriver = getDbProperty(m_databaseKey + ".user.driver");
String projectDriver = getDbProperty(m_databaseKey + ".project.driver");
String workflowDriver = getDbProperty(m_databaseKey + ".workflow.driver");
String backupDriver = getDbProperty(m_databaseKey + ".backup.driver");
// Change/write configuration only if not available or database changed
setExtProperty("db.name", m_databaseKey);
if (getExtProperty("db.vfs.driver") == null || "".equals(getExtProperty("db.vfs.driver"))) {
setExtProperty("db.vfs.driver", vfsDriver);
}
if (getExtProperty("db.user.driver") == null || "".equals(getExtProperty("db.user.driver"))) {
setExtProperty("db.user.driver", userDriver);
}
if (getExtProperty("db.project.driver") == null || "".equals(getExtProperty("db.project.driver"))) {
setExtProperty("db.project.driver", projectDriver);
}
if (getExtProperty("db.workflow.driver") == null || "".equals(getExtProperty("db.workflow.driver"))) {
setExtProperty("db.workflow.driver", workflowDriver);
}
if (getExtProperty("db.backup.driver") == null || "".equals(getExtProperty("db.backup.driver"))) {
setExtProperty("db.backup.driver", backupDriver);
}
}
/**
* Returns the key of the selected database server (e.g. "mysql", "generic" or "oracle").<p>
*
* @return the key of the selected database server (e.g. "mysql", "generic" or "oracle")
*/
public String getDatabase() {
if (m_databaseKey == null) {
m_databaseKey = getExtProperty("db.name");
}
if (m_databaseKey == null || "".equals(m_databaseKey)) {
m_databaseKey = (String) getSortedDatabases().get(0);
}
return m_databaseKey;
}
/**
* Returns a list with they keys (e.g. "mysql", "generic" or "oracle") of all available
* database server setups found in "/setup/database/".<p>
*
* @return a list with they keys (e.g. "mysql", "generic" or "oracle") of all available database server setups
*/
public List getDatabases() {
File databaseSetupFolder = null;
File[] childResources = null;
File childResource = null;
File setupFile = null;
boolean hasMissingSetupFiles = false;
if (m_databaseKeys != null) {
return m_databaseKeys;
}
try {
m_databaseKeys = (List) new ArrayList();
databaseSetupFolder = new File(m_basePath + File.separator + "setup" + File.separator + "database");
if (databaseSetupFolder.exists()) {
childResources = databaseSetupFolder.listFiles();
if (childResources != null) {
for (int i = 0; i < childResources.length; i++) {
childResource = childResources[i];
hasMissingSetupFiles = false;
if (childResource.exists() && childResource.isDirectory() && childResource.canRead()) {
for (int j = 0; j < requiredDbSetupFiles.length; j++) {
setupFile = new File(childResource.getPath() + File.separator + requiredDbSetupFiles[j]);
if (!setupFile.exists() || !setupFile.isFile() || !setupFile.canRead()) {
hasMissingSetupFiles = true;
System.err.println("[" + getClass().getName() + "] missing or unreadable database setup file: " + setupFile.getPath());
break;
}
}
if (!hasMissingSetupFiles) {
m_databaseKeys.add(childResource.getName().trim());
}
}
}
}
}
} catch (Exception e) {
System.err.println(e.toString());
e.printStackTrace(System.err);
}
return m_databaseKeys;
}
/**
* Returns a list with the clear text names (e.g. "MySQL", "Generic (ANSI) SQL") of all
* available database server setups in "/setup/database/".<p>
*
* Second, this method stores the properties of all available database configurations in a
* map keyed by their database key names (e.g. "mysql", "generic" or "oracle").<p>
*
* @return a list with the clear text names (e.g. "MySQL", "Generic (ANSI) SQL") of all available database server setups
* @see #getDatabaseProperties()
*/
public List getDatabaseNames() {
List databaseKeys = null;
String databaseKey = null;
String databaseName = null;
FileInputStream input = null;
String configPath = null;
Properties databaseProperties = null;
if (m_databaseNames != null) {
return m_databaseNames;
}
m_databaseNames = (List) new ArrayList();
m_databaseProperties = (Map) new HashMap();
databaseKeys = getDatabases();
for (int i = 0; i < databaseKeys.size(); i++) {
databaseKey = (String) databaseKeys.get(i);
configPath = m_basePath + "setup" + File.separator + "database" + File.separator + databaseKey + File.separator + "database.properties";
try {
input = new FileInputStream(new File(configPath));
databaseProperties = new Properties();
databaseProperties.load(input);
databaseName = databaseProperties.getProperty(databaseKey + ".name");
m_databaseNames.add(databaseName);
m_databaseProperties.put(databaseKey, databaseProperties);
} catch (Exception e) {
System.err.println(e.toString());
e.printStackTrace(System.err);
continue;
} finally {
try {
if (input != null) {
input.close();
}
} catch (Exception e) {
// noop
}
}
}
return m_databaseNames;
}
/**
* Returns a sorted list with they keys (e.g. "mysql", "generic" or "oracle") of all available
* database server setups found in "/setup/database/" sorted by their ranking property.<p>
*
* @return a sorted list with they keys (e.g. "mysql", "generic" or "oracle") of all available database server setups
*/
public List getSortedDatabases() {
if (m_sortedDatabaseKeys == null) {
List databases = getDatabases();
List sortedDatabases = new ArrayList(databases.size());
SortedMap mappedDatabases = new TreeMap();
for (int i=0; i<databases.size(); i++) {
String key = (String)databases.get(i);
Integer ranking = new Integer(0);
try {
ranking = Integer.valueOf(getDbProperty(key + ".ranking"));
} catch (Exception e) {
// ignore
}
mappedDatabases.put(ranking, key);
}
while (mappedDatabases.size() > 0) {
// get database with highest ranking
Integer key = (Integer)mappedDatabases.lastKey();
String database = (String)mappedDatabases.get(key);
sortedDatabases.add(database);
mappedDatabases.remove(key);
}
m_sortedDatabaseKeys = new ArrayList(databases.size());
m_sortedDatabaseKeys = sortedDatabases;
}
return m_sortedDatabaseKeys;
}
/**
* Returns a map with the database properties of *all* available database configurations keyed
* by their database keys (e.g. "mysql", "generic" or "oracle").<p>
*
* @return a map with the database properties of *all* available database configurations
*/
public Map getDatabaseProperties() {
if (m_databaseProperties != null) {
return m_databaseProperties;
}
getDatabaseNames();
return m_databaseProperties;
}
/**
* Returns the URI of a database config page (in step 3) for a specified database key.<p>
*
*
* @param key the database key (e.g. "mysql", "generic" or "oracle")
* @return the URI of a database config page
*/
public String getDatabaseConfigPage(String key) {
return "database" + I_CmsConstants.C_FOLDER_SEPARATOR + key + I_CmsConstants.C_FOLDER_SEPARATOR + "step_4_database_setup.jsp";
}
/**
* Sets the connection string to the database to the given value
*
* @param dbWorkConStr the connection string used by the OpenCms core
*/
public void setDbWorkConStr(String dbWorkConStr) {
String driver = getDbProperty(m_databaseKey + ".driver");
// TODO: set the driver in own methods
setExtProperty("db.pool." + getPool() + ".jdbcDriver", driver);
setExtProperty("db.pool." + getPool() + ".jdbcUrl", dbWorkConStr);
setTestQuery(getDbTestQuery());
}
/**
* Returns a connection string
*
* @return the connection string used by the OpenCms core
*/
public String getDbWorkConStr() {
String str = getExtProperty("db.pool." + getPool() + ".jdbcUrl");
return str;
}
/**
* Sets the user of the database to the given value
*
* @param dbWorkUser the database user used by the opencms core
*/
public void setDbWorkUser(String dbWorkUser) {
setExtProperty("db.pool." + getPool() + ".user", dbWorkUser);
}
/**
* Returns the user of the database from the properties
*
* @return the database user used by the opencms core
*/
public String getDbWorkUser() {
return getExtProperty("db.pool." + getPool() + ".user");
}
/**
* Sets the password of the database to the given value
*
* @param dbWorkPwd the password for the OpenCms database user
*/
public void setDbWorkPwd(String dbWorkPwd) {
setExtProperty("db.pool." + getPool() + ".password", dbWorkPwd);
}
/**
* Returns the password of the database from the properties
*
* @return the password for the OpenCms database user
*/
public String getDbWorkPwd() {
return getExtProperty("db.pool." + getPool() + ".password");
}
/**
* Returns the extended properties
*
* @return the extended properties
*/
public ExtendedProperties getProperties() {
return m_extProperties;
}
/**
* Adds a new error message to the vector
*
* @param error the error message
*/
public static void setErrors(String error) {
errors.add(error);
}
/**
* Returns the error messages
*
* @return a vector of error messages
*/
public Vector getErrors() {
return errors;
}
/**
* Returns the path to the opencms config folder
*
* @return the path to the config folder
*/
public String getConfigFolder() {
return (m_basePath + "WEB-INF/config/").replace('\\', '/').replace('/', File.separatorChar);
}
/**
* Sets the database driver belonging to the database
*
* @param driver name of the opencms driver
*/
public void setDbDriver(String driver) {
setDbProperty(m_databaseKey + ".driver", driver);
}
/**
* Returns the database driver belonging to the database
* from the default configuration
*
* @return name of the opencms driver
*/
public String getDbDriver() {
return getDbProperty(m_databaseKey + ".driver");
}
/**
* Returns the validation query belonging to the database
* from the default configuration
*
* @return query used to validate connections
*/
public String getDbTestQuery() {
return getDbProperty(m_databaseKey + ".testQuery");
}
/**
* Sets the validation query to the given value
*
* @param query query used to validate connections
*/
public void setTestQuery(String query) {
setExtProperty("db.pool." + getPool() + ".testQuery", query);
}
/**
* Returns the validation query
*
* @return query used to validate connections
*/
public String getTestQuery() {
return getExtProperty("db.pool." + getPool() + ".testQuery");
}
/**
* Sets the minimum connections to the given value
*
* @param minConn number of minimum connections
*/
public void setMinConn(String minConn) {
setExtProperty("db.pool." + getPool() + ".maxIdle", minConn);
}
/**
* Returns the min. connections.<p>
*
* @return the min. connections
*/
public String getMinConn() {
return getExtProperty("db.pool." + getPool() + ".maxIdle");
}
/**
* Sets the maximum connections to the given value.<p>
*
* @param maxConn maximum connection count
*/
public void setMaxConn(String maxConn) {
setExtProperty("db.pool." + getPool() + ".maxActive", maxConn);
}
/**
* Returns the max. connections.<p>
*
* @return the max. connections
*/
public String getMaxConn() {
return getExtProperty("db.pool." + getPool() + ".maxActive");
}
/**
* Sets the timeout to the given value.<p>
*
* @param timeout the timeout to set
*/
public void setTimeout(String timeout) {
setExtProperty("db.pool." + getPool() + ".maxWait", timeout);
}
/**
* Returns the timeout value.<p>
*
* @return the timeout value
*/
public String getTimeout() {
return getExtProperty("db.pool." + getPool() + ".maxWait");
}
/**
* Set the mac ethernet address, required for UUID generation.<p>
*
* @param ethernetAddress the mac addess to set
*/
public void setEthernetAddress(String ethernetAddress) {
setExtProperty("server.ethernet.address", ethernetAddress);
}
/**
* Return the mac ethernet address
*
* @return the mac ethernet addess
*/
public String getEthernetAddress() {
return getExtProperty("server.ethernet.address");
}
/**
* Return the OpenCms server name
*
* @return the OpenCms server name
*/
public String getServerName() {
return getExtProperty("server.name");
}
/**
* Set the OpenCms server name
*
* @param name the OpenCms server name
*/
public void setServerName(String name) {
setExtProperty("server.name", name);
}
/**
* Set the maximum file upload size.<p>
*
* @param size the size to set
*/
public void setFileMaxUploadSize(String size) {
setExtProperty("workplace.file.maxuploadsize", size);
}
/**
* Returns the maximum file upload size.<p>
*
* @return the maximum file upload size
*/
public String getFileMaxUploadSize() {
return getExtProperty("workplace.file.maxuploadsize");
}
/**
* Returns the database name.<p>
*
* @return the database name
*/
public String getDb() {
return getDbProperty(m_databaseKey + ".dbname");
}
/**
* Sets the database name.<p>
*
* @param db the database name to set
*/
public void setDb(String db) {
setDbProperty(m_databaseKey + ".dbname", db);
}
/**
* Returns the database create statement.<p>
*
* @return the database create statement
*/
public String getDbCreateConStr() {
String str = null;
str = getDbProperty(m_databaseKey + ".constr");
return str;
}
/**
* Sets the database create statement.<p>
*
* @param dbCreateConStr the database create statement
*/
public void setDbCreateConStr(String dbCreateConStr) {
setDbProperty(m_databaseKey + ".constr", dbCreateConStr);
}
/**
* Returns the database user that is used to connect to the database.<p>
*
* @return the database user
*/
public String getDbCreateUser() {
return getDbProperty(m_databaseKey + ".user");
}
/**
* Set the database user that is used to connect to the database.<p>
*
* @param dbCreateUser the user to set
*/
public void setDbCreateUser(String dbCreateUser) {
setDbProperty(m_databaseKey + ".user", dbCreateUser);
}
/**
* Returns the password used for database creation.<p>
*
* @return the password used for database creation
*/
public String getDbCreatePwd() {
return (m_dbCreatePwd != null) ? m_dbCreatePwd : "";
}
/**
* Sets the password used for the initial OpenCms database creation.<p>
*
* This password will not be stored permanently,
* but used only in the setup wizard.<p>
*
* @param dbCreatePwd the password used for the initial OpenCms database creation
*/
public void setDbCreatePwd(String dbCreatePwd) {
m_dbCreatePwd = dbCreatePwd;
}
/**
* Checks if the setup wizard is enabled.<p>
*
* @return true if the setup wizard is enables, false otherwise
*/
public boolean getWizardEnabled() {
return "true".equals(getExtProperty("wizard.enabled"));
}
/**
* Locks (i.e. disables) the setup wizard.<p>
*
*/
public void lockWizard() {
setExtProperty("wizard.enabled", "false");
}
/**
* Sets filename translation to enabled / disabled.<p>
*
* @param value value to set (must be "true" or "false")
*/
public void setFilenameTranslationEnabled(String value) {
setExtProperty("filename.translation.enabled", value);
}
/**
* Returns "true" if filename translation is enabled.<p>
*
* @return "true" if filename translation is enabled
*/
public String getFilenameTranslationEnabled() {
return getExtProperty("filename.translation.enabled");
}
/**
* Returns the specified HTML part of the HTML property file to create the output.<p>
*
* @param part the name of the desired part
* @return the HTML part or an empty String, if the part was not found
*/
public String getHtmlPart(String part) {
return getHtmlPart(part, "");
}
/**
* Returns the specified HTML part of the HTML property file to create the output.<p>
*
* @param part the name of the desired part
* @param replaceString String which is inserted in the found HTML part at the location of "$replace$"
* @return the HTML part or an empty String, if the part was not found
*/
public String getHtmlPart(String part, String replaceString) {
String value = m_htmlProps.getProperty(part);
if (value == null) {
return "";
} else {
return CmsStringSubstitution.substitute(value, "$replace$", replaceString);
}
}
/**
* Sets directory translation to enabled / disabled.<p>
*
* @param value value to set (must be "true" or "false")
*/
public void setDirectoryTranslationEnabled(String value) {
setExtProperty("directory.translation.enabled", value);
}
/**
* Returns "true" if directory translation is enabled.<p>
*
* @return "true" if directory translation is enabled
*/
public String getDirectoryTranslationEnabled() {
return getExtProperty("directory.translation.enabled");
}
/**
* Sets the directory default index files.<p>
*
* This must be a comma separated list of files.<p>
*
* @param value the value to set
*/
public void setDirectoryIndexFiles(String value) {
setExtProperty("directory.default.files", value);
}
/**
* Returns the directory default index files as a comma separated list.<p>
*
* @return the directory default index files as a comma separated list
*/
public String getDirectoryIndexFiles() {
Object value = null;
value = m_extProperties.get("directory.default.files");
if (value == null) {
// could be null...
return "";
}
if (value instanceof String) {
// ...a string...
return value.toString();
}
// ...or a vector!
Enumeration allIndexFiles = ((Vector)value).elements();
String indexFiles = "";
while (allIndexFiles.hasMoreElements()) {
indexFiles += (String)allIndexFiles.nextElement();
if (allIndexFiles.hasMoreElements()) {
indexFiles += ",";
}
}
return indexFiles;
}
/**
* Over simplistic helper to compare two strings to check radio buttons.
*
* @param value1 the first value
* @param value2 the secound value
* @return "checked" if both values are equal, the empty String "" otherwise
*/
public String isChecked(String value1, String value2) {
if (value1 == null || value2 == null) {
return "";
}
if (value1.trim().equalsIgnoreCase(value2.trim())) {
return "checked";
}
return "";
}
/**
* Returns the defaultContentEncoding.
* @return String
*/
public String getDefaultContentEncoding() {
return getExtProperty("defaultContentEncoding");
}
/**
* Sets the defaultContentEncoding.
* @param defaultContentEncoding The defaultContentEncoding to set
*/
public void setDefaultContentEncoding(String defaultContentEncoding) {
setExtProperty("defaultContentEncoding", defaultContentEncoding);
}
/**
* Sets the webapp name
*
* @param value the new webapp name
*/
public void setAppName(String value) {
setExtProperty("app.name", value);
}
/**
* Returns the webapp name
*
* @return the webapp name
*/
public String getAppName() {
return getExtProperty("app.name");
}
/**
* Returns the replacer.<p>
*
* @return the replacer
*/
public Map getReplacer() {
return m_replacer;
}
/**
* Sets the replacer.<p>
*
* @param map the replacer to set
*/
public void setReplacer(Map map) {
m_replacer = map;
}
/**
* Returns the clear text name for a database server setup specified by a database key (e.g. "mysql", "generic" or "oracle").<p>
*
* @param databaseKey a database key (e.g. "mysql", "generic" or "oracle")
* @return the clear text name for a database server setup
*/
public String getDatabaseName(String databaseKey) {
return (String) ((Map) getDatabaseProperties().get(databaseKey)).get(databaseKey + ".name");
}
/**
* Returns a map with lists of dependent module package names keyed by module package names.<p>
*
* @return a map with lists of dependent module package names keyed by module package names
*/
public Map getModuleDependencies() {
getAvailableModules();
return m_moduleDependencies;
}
/**
* Returns a map with all available modules.<p>
*
* The map contains maps keyed by module package names. Each of these maps contains various
* information about the module such as the module name, version, description, and a list of
* it's dependencies. You should refer to the source code of this method to understand the data
* structure of the map returned by this method!<p>
*
* @return a map with all available modules
*/
public Map getAvailableModules() {
File packagesFolder = null;
File[] childResources = null;
File childResource = null;
Document manifest = null;
String moduleName = null;
String moduleNiceName = null;
String moduleVersion = null;
String moduleDescription = null;
List dependencyNodes = null;
List moduleDependencies = null;
Element rootElement = null;
Element moduleDependency = null;
String moduleDependencyName = null;
Map module = null;
if (m_availableModules != null) {
return m_availableModules;
}
try {
m_availableModules = (Map) new HashMap();
m_moduleDependencies = (Map) new HashMap();
// open the folder "/WEB-INF/packages/modules/"
packagesFolder = new File(m_basePath + "WEB-INF" + File.separator + "packages" + File.separator + "modules");
if (packagesFolder.exists()) {
// list all child resources in the packages folder
childResources = packagesFolder.listFiles();
if (childResources != null) {
for (int i = 0; i < childResources.length; i++) {
childResource = childResources[i];
// try to get manifest.xml either from a ZIP file or a subfolder
if (childResource.exists() && childResource.canRead() && (manifest = getManifest(childResource)) != null) {
// get the "export" node
rootElement = manifest.getRootElement();
// module package name
moduleName = ((Element) rootElement.selectNodes("//export/module/name").get(0)).getTextTrim();
// module nice name
moduleNiceName = ((Element) rootElement.selectNodes("//export/module/nicename").get(0)).getTextTrim();
// module version
moduleVersion = ((Element) rootElement.selectNodes("//export/module/version").get(0)).getTextTrim();
// module description
moduleDescription = ((Element) rootElement.selectNodes("//export/module/description").get(0)).getTextTrim();
// all module "dependency" sub nodes
dependencyNodes = rootElement.selectNodes("//export/module/dependencies/dependency");
// if module a depends on module b, and module c depends also on module b:
// build a map with a list containing "a" and "c" keyed by "b" to get a
// list of modules depending on module "b"...
for (int j = 0; j < dependencyNodes.size(); j++) {
moduleDependency = (Element) dependencyNodes.get(j);
// module dependency package name
moduleDependencyName = ((Element) moduleDependency.selectNodes("./name").get(0)).getTextTrim();
// get the list of dependend modules ("b" in the example)
moduleDependencies = (List) m_moduleDependencies.get(moduleDependencyName);
if (moduleDependencies == null) {
// build a new list if "b" has no dependend modules yet
moduleDependencies = (List) new ArrayList();
m_moduleDependencies.put(moduleDependencyName, moduleDependencies);
}
// add "a" as a module depending on "b"
moduleDependencies.add(moduleName);
}
// create a map holding the collected module information
module = (Map) new HashMap();
module.put("name", moduleName);
module.put("niceName", moduleNiceName);
module.put("version", moduleVersion);
module.put("description", moduleDescription);
module.put("filename", childResource.getName());
// put the module information into a map keyed by the module packages names
m_availableModules.put(moduleName, module);
}
}
}
}
} catch (Exception e) {
System.err.println(e.toString());
e.printStackTrace(System.err);
}
return m_availableModules;
}
/**
* Returns the "manifest.xml" of an available module as a dom4j document.<p>
*
* The manifest is either read as a ZIP entry, or from a subfolder of the specified
* file resource.<p>
*
* @param resource a File resource
* @return the "manifest.xml" as a dom4j document
*/
protected Document getManifest(File resource) {
Document manifest = null;
ZipFile zipFile = null;
ZipEntry zipFileEntry = null;
InputStream input = null;
Reader reader = null;
SAXReader saxReader = null;
File manifestFile = null;
try {
if (resource.isFile()) {
if (!resource.getName().toLowerCase().endsWith(".zip")) {
// skip non-ZIP files
return null;
}
// create a Reader either from a ZIP file's manifest.xml entry...
zipFile = new ZipFile(resource);
zipFileEntry = zipFile.getEntry("manifest.xml");
input = zipFile.getInputStream(zipFileEntry);
reader = new BufferedReader(new InputStreamReader(input));
} else if (resource.isDirectory()) {
// ...or from a subresource inside a folder
manifestFile = new File(resource, "manifest.xml");
reader = new BufferedReader(new FileReader(manifestFile));
}
// transform the manifest.xml file into a dom4j Document
saxReader = new SAXReader();
manifest = saxReader.read(reader);
} catch (Exception e) {
System.err.println("Error reading manifest.xml from resource: " + resource + ", " + e.toString());
e.printStackTrace(System.err);
manifest = null;
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (Exception e) {
// noop
}
}
return manifest;
}
/**
* Sets the list with the package names of the modules to be installed.<p>
*
* @param value a string with the package names of the modules to be installed delimited by the pipe symbol "|"
*/
public void setInstallModules(String value) {
StringTokenizer tokenizer = new StringTokenizer(value, "|");
if (tokenizer.countTokens() > 0) {
m_installModules = (List) new ArrayList();
while (tokenizer.hasMoreTokens()) {
m_installModules.add(tokenizer.nextToken());
}
} else {
m_installModules = Collections.EMPTY_LIST;
}
}
/**
* @see org.opencms.main.I_CmsShellCommands#initShellCmsObject(org.opencms.file.CmsObject)
*/
public void initShellCmsObject(CmsObject cms) {
m_cms = cms;
}
/**
* Installed all modules that have been set using {@link #setInstallModules(String)}.<p>
*
* This method is invoked as a shell command.<p>
*
* @throws Exception if something goes wrong
*/
public void importModulesFromSetupBean() throws Exception {
Map module = null;
String filename = null;
// read here how the list of modules to be installed is passed from the setup bean to the
// setup thread, and finally to the shell process that executes the setup script:
// 1) the list with the package names of the modules to be installed is saved by setInstallModules
// 2) the setup thread gets initialized in a JSP of the setup wizard
// 3) the instance of the setup bean is passed to the setup thread by setAdditionalShellCommand
// 4) the setup bean is passed to the shell by startSetup
// 5) because the setup bean implements I_CmsShellCommands, the shell constructor can pass the shell's CmsObject back to the setup bean
// 6) thus, the setup bean can do things with the Cms
if (m_cms != null && m_installModules != null) {
for (int i = 0; i < m_installModules.size(); i++) {
module = (Map) m_availableModules.get(m_installModules.get(i));
filename = (String) module.get("filename");
importModuleFromDefault(filename);
}
}
}
/**
* Imports a module (zipfile) from the default module directory,
* creating a temporary project for this.<p>
*
* @param importFile the name of the import module located in the default module directory
* @throws Exception if something goes wrong
* @see CmsRegistry#importModule(String, Vector, org.opencms.report.I_CmsReport)
*/
protected void importModuleFromDefault(String importFile) throws Exception {
// build the complete filename
String exportPath = null;
exportPath = m_cms.readPackagePath();
String fileName = OpenCms.getSystemInfo().getAbsolutePathRelativeToWebInf(exportPath + CmsRegistry.C_MODULE_PATH + importFile);
// import the module
System.out.println("Importing module: " + fileName);
// create a temporary project for the import
CmsProject project = m_cms.createProject(
"ModuleImport",
"A temporary project to import the module " + importFile,
OpenCms.getDefaultUsers().getGroupAdministrators(),
OpenCms.getDefaultUsers().getGroupAdministrators(),
I_CmsConstants.C_PROJECT_TYPE_TEMPORARY
);
int id = project.getId();
m_cms.getRequestContext().setCurrentProject(id);
m_cms.getRequestContext().saveSiteRoot();
m_cms.getRequestContext().setSiteRoot("/");
m_cms.copyResourceToProject("/");
m_cms.getRequestContext().restoreSiteRoot();
// import the module
CmsRegistry reg = m_cms.getRegistry();
reg.importModule(fileName, new Vector(), new CmsShellReport());
// finally publish the project
m_cms.unlockProject(id);
m_cms.publishProject();
}
}
|
package org.uct.cs.hough;
import org.uct.cs.hough.gui.ScalingImagePanel;
import org.uct.cs.hough.util.Circle;
import org.uct.cs.hough.util.CircleAdder;
import org.uct.cs.hough.util.ImageFileFilter;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Collection;
public class GuiDriver
{
private static final int BORDER_GAP = 5;
private JFrame frame;
private JPanel panel1;
private JTabbedPane tabbedPane1;
private JButton loadImageButton;
private JFileChooser imageChooser;
private JLabel statusBar;
private ScalingImagePanel tabPanel1, tabPanel2, tabPanel3, tabPanel4;
public GuiDriver()
{
createGui();
imageChooser = new JFileChooser();
imageChooser.addChoosableFileFilter(new ImageFileFilter());
imageChooser.setAcceptAllFileFilterUsed(false);
imageChooser.setCurrentDirectory(Paths.get(".").toFile());
loadImageButton.addMouseListener(
new MouseAdapter()
{
@Override
public void mouseClicked(MouseEvent e)
{
int result = imageChooser.showOpenDialog(panel1);
if (result == JFileChooser.APPROVE_OPTION)
{
try
{
runCircleDetection(imageChooser.getSelectedFile());
}
catch (IOException e1)
{
JOptionPane.showMessageDialog(frame, e1.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
e1.printStackTrace();
}
}
}
}
);
}
private void createGui()
{
frame = new JFrame("Circle Detector");
panel1 = new JPanel(new BorderLayout(BORDER_GAP, BORDER_GAP));
panel1.setBorder(BorderFactory.createEmptyBorder(BORDER_GAP, BORDER_GAP, BORDER_GAP, BORDER_GAP));
loadImageButton = new JButton();
loadImageButton.setText("Load Image");
loadImageButton.setMnemonic('L');
loadImageButton.setDisplayedMnemonicIndex(0);
loadImageButton.setPreferredSize(new Dimension(120, 40));
panel1.add(loadImageButton, BorderLayout.NORTH);
tabbedPane1 = new JTabbedPane();
tabPanel1 = new ScalingImagePanel();
tabbedPane1.addTab("Original", tabPanel1);
tabPanel2 = new ScalingImagePanel();
tabbedPane1.addTab("Edges", tabPanel2);
tabPanel3 = new ScalingImagePanel();
tabbedPane1.addTab("Hough Space", tabPanel3);
tabPanel4 = new ScalingImagePanel();
tabbedPane1.addTab("Detected Circled", tabPanel4);
tabbedPane1.setEnabled(false);
panel1.add(tabbedPane1, BorderLayout.CENTER);
statusBar = new JLabel("Press 'Load Image' to perform Circle Detection.");
panel1.add(statusBar, BorderLayout.SOUTH);
frame.setContentPane(panel1);
frame.setLocationRelativeTo(null);
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.pack();
frame.setVisible(true);
}
private void runCircleDetection(File f) throws IOException
{
BufferedImage image = ImageIO.read(f);
tabbedPane1.setEnabled(true);
tabPanel1.setImage(image);
long startTime = System.nanoTime();
CircleDetection.storeHoughAccumImage();
CircleDetection.storeEdgeImage();
Collection<Circle> circles = CircleDetection.detect(image);
BufferedImage output = CircleAdder.combine(image, circles);
tabPanel2.setImage(CircleDetection.getStoredEdgeImage());
tabPanel3.setImage(CircleDetection.getStoredHoughAccumImage());
tabPanel4.setImage(output);
tabbedPane1.setSelectedIndex(3);
long elapsed = System.nanoTime() - startTime;
statusBar.setText(String.format("File: %s | Elapsed: %s", f.getAbsolutePath(), org.uct.cs.hough.util.Timer.formatTime(elapsed)));
frame.pack();
}
public static void main(String[] args)
{
try
{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e)
{
e.printStackTrace();
}
new GuiDriver();
}
}
|
public class Solution {
public int countPrimes(int n) {
boolean[] isPrime = new boolean[n];
for(int i = 0; i < n; i ++){
isPrime[i] = true;
}
for(int i = 2; i * i < n; i++){
if(!isPrime[i])
continue;
for( int j = i * i; j < n; j += i){
isPrime[j] = false;
}
}
int count = 0;
for(int i = 2; i < n; i ++){
if(isPrime[i])
count ++;
}
return count;
}
}
|
package utils;
import java.io.InvalidClassException;
import java.io.IOException;
import java.io.NotSerializableException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OptionalDataException;
import java.io.StreamCorruptedException;
import java.lang.ClassNotFoundException;
import java.net.Socket;
import utils.Request;
import utils.Log;
public abstract class NetworkNode {
private int port = -1;
private volatile int numberSend = 0;
public synchronized void send(Request r, ObjectOutputStream outputStream) {
try {
outputStream.writeObject(r);
numberSend++;
if (numberSend % 40 == 0) {
outputStream.reset();
}
outputStream.flush();
} catch (InvalidClassException e) {
Log.error("NetworkNode send() - Something is wrong with a class used by serialization.");
} catch (NotSerializableException e) {
Log.error("NetworkNode send() - The object does not implement java.io.Serializable.");
} catch (IOException e) {
Log.error("NetworkNode send() - I/O error on the OutputStream.");
e.printStackTrace();
}
}
/** Reads from the ObjectInputStream and returns a Request
if there was one, returns null otherwise.
*/
public Request receive(ObjectInputStream ois) {
Request result = null;
try {
result = (Request) ois.readObject();
} catch (ClassNotFoundException e) {
Log.error("NetworkNode receive() : Class of a serialized object cannot be found.");
} catch (InvalidClassException e) {
Log.error("NetworkNode receive() : Something is wrong with a class used by serialization.");
} catch(StreamCorruptedException e) {
Log.error("NetworkNode receive() : Control information in the stream is inconsistent.");
} catch(OptionalDataException e) {
Log.error("NetworkNode receive() : Primitive data was found in the stream instead of objects.");
} catch(IOException e) {
Log.error("NetworkNode receive() : Input/Output related exceptions");
e.printStackTrace();
}
return result;
}
public abstract void stop();
public abstract void start();
public int getPort() {
return this.port;
}
/**
Set the port of the server from a String containing its number
*/
public void setPort(String port) {
try {
this.port = Integer.parseInt(port);
} catch (NumberFormatException e) {
Log.error("NetworkNode setPort() - The string does not contain a parsable integer. Exiting...");
System.exit(-1);
}
}
/**
Returns the ObjectInputStream of the Socket if it is possible, null
otherwise.
*/
public ObjectInputStream getSocketInputStream(Socket socket) {
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(socket.getInputStream());
} catch (IOException e){
Log.error(
"NetworkNode getSocketInputStream() - The socket is closed, " +
"not connected or the input has been shutdown."
);
}
return ois;
}
public void closeStream(ObjectOutputStream oos){
try{
oos.close();
} catch (IOException e){
Log.error("flushOutput - Error");
}
}
public ObjectOutputStream getSocketOutputStream(Socket socket) {
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(socket.getOutputStream());
oos.flush();
} catch (IOException e) {
Log.error("NetworkNode getSocketOutputStream() - I/O error " +
"occured when creating the output stream or socket is not connected.");
}
return oos;
}
public void threadSleep(long milliseconds) {
try {
Thread.sleep(milliseconds);
} catch (InterruptedException e){
System.err.println("Error - Client threadSleep() - thread was interrupted.");
e.printStackTrace();
}
}
}
|
package biomodel.gui.sbmlcore;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import main.Gui;
import main.util.Utility;
import org.sbml.jsbml.ASTNode;
import org.sbml.jsbml.Compartment;
import org.sbml.jsbml.InitialAssignment;
import org.sbml.jsbml.KineticLaw;
import org.sbml.jsbml.ListOf;
import org.sbml.jsbml.LocalParameter;
import org.sbml.jsbml.Model;
import org.sbml.jsbml.ModifierSpeciesReference;
import org.sbml.jsbml.Parameter;
import org.sbml.jsbml.Reaction;
import org.sbml.jsbml.SBMLDocument;
import org.sbml.jsbml.SBase;
import org.sbml.jsbml.SimpleSpeciesReference;
import org.sbml.jsbml.Species;
import org.sbml.jsbml.SpeciesReference;
import org.sbml.jsbml.UnitDefinition;
import org.sbml.jsbml.ext.arrays.ArraysSBasePlugin;
import org.sbml.jsbml.ext.arrays.Index;
import org.sbml.jsbml.ext.comp.Port;
import org.sbml.jsbml.ext.fbc.FluxBound;
import biomodel.annotation.AnnotationUtility;
import biomodel.annotation.SBOLAnnotation;
import biomodel.gui.sbol.SBOLField;
import biomodel.gui.schematic.ModelEditor;
import biomodel.parser.BioModel;
import biomodel.util.GlobalConstants;
import biomodel.util.SBMLutilities;
/**
* This is a class for creating SBML parameters
*
* @author Chris Myers
*
*/
public class Reactions extends JPanel implements ActionListener, MouseListener {
private static final long serialVersionUID = 1L;
private JComboBox stoiciLabel;
private JComboBox reactionComp; // compartment combo box
private JList reactions; // JList of reactions
private String[] reacts; // array of reactions
/*
* reactions buttons
*/
private JButton addReac, removeReac, editReac;
private JList reacParameters; // JList of reaction parameters
private String[] reacParams; // array of reaction parameters
/*
* reaction parameters buttons
*/
private JButton reacAddParam, reacRemoveParam, reacEditParam;
private ArrayList<LocalParameter> changedParameters; // ArrayList of parameters
/*
* reaction parameters text fields
*/
private JTextField reacParamID, reacParamValue, reacParamName;
private JComboBox reacParamUnits;
private JTextField reacID, reacName; // reaction name and id text
private JCheckBox onPort, paramOnPort;
// fields
private JComboBox reacReverse, reacFast; // reaction reversible, fast combo
// boxes
/*
* reactant buttons
*/
private JButton addReactant, removeReactant, editReactant;
private JList reactants; // JList for reactants
private String[] reacta; // array for reactants
private JComboBox reactantConstant;
/*
* ArrayList of reactants
*/
private ArrayList<SpeciesReference> changedReactants;
/*
* product buttons
*/
private JButton addProduct, removeProduct, editProduct;
private JList products; // JList for products
private String[] proda; // array for products
private JComboBox productConstant;
/*
* ArrayList of products
*/
private ArrayList<SpeciesReference> changedProducts;
/*
* modifier buttons
*/
private JButton addModifier, removeModifier, editModifier;
private JList modifiers; // JList for modifiers
private String[] modifierArray; // array for modifiers
/*
* ArrayList of modifiers
*/
private ArrayList<ModifierSpeciesReference> changedModifiers;
private JComboBox productSpecies; // ComboBox for product editing
private JComboBox modifierSpecies; // ComboBox for modifier editing
private JTextField productId;
private JTextField productName;
private JTextField modifierName;
private JTextField productStoichiometry; // text field for editing products
private JComboBox reactantSpecies; // ComboBox for reactant editing
private JTextField RiIndex;
private JTextField PiIndex;
private JTextField MiIndex, modifierId;
private JTextField CiIndex;
/*
* text field for editing reactants
*/
private JTextField reactantId;
private JTextField reactantName;
private SBOLField sbolField;
private JTextField reactantStoichiometry;
private JTextArea kineticLaw; // text area for editing kinetic law
private ArrayList<String> thisReactionParams;
private JButton useMassAction, clearKineticLaw;
private BioModel bioModel;
private Boolean paramsOnly;
private String file;
private ArrayList<String> parameterChanges;
private InitialAssignments initialsPanel;
private Rules rulesPanel;
private String selectedReaction;
private ModelEditor modelEditor;
private Reaction complex = null;
private Reaction production = null;
private JComboBox SBOTerms = null;
private JTextField repCooperativity, actCooperativity, repBinding, actBinding;
public Reactions(BioModel gcm, Boolean paramsOnly, ArrayList<String> getParams, String file, ArrayList<String> parameterChanges,
ModelEditor gcmEditor) {
super(new BorderLayout());
this.bioModel = gcm;
this.paramsOnly = paramsOnly;
this.file = file;
this.parameterChanges = parameterChanges;
this.modelEditor = gcmEditor;
Model model = gcm.getSBMLDocument().getModel();
JPanel addReacs = new JPanel();
addReac = new JButton("Add Reaction");
removeReac = new JButton("Remove Reaction");
editReac = new JButton("Edit Reaction");
addReacs.add(addReac);
addReacs.add(removeReac);
addReacs.add(editReac);
addReac.addActionListener(this);
removeReac.addActionListener(this);
editReac.addActionListener(this);
if (paramsOnly) {
addReac.setEnabled(false);
removeReac.setEnabled(false);
}
JLabel reactionsLabel = new JLabel("List of Reactions:");
reactions = new JList();
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane scroll2 = new JScrollPane();
scroll2.setViewportView(reactions);
ListOf<Reaction> listOfReactions = model.getListOfReactions();
reacts = new String[model.getReactionCount()];
for (int i = 0; i < model.getReactionCount(); i++) {
Reaction reaction = listOfReactions.get(i);
reacts[i] = reaction.getId();
if (paramsOnly && reaction.getKineticLaw()!=null) {
ListOf<LocalParameter> params = reaction.getKineticLaw().getListOfLocalParameters();
for (int j = 0; j < reaction.getKineticLaw().getLocalParameterCount(); j++) {
LocalParameter paramet = (params.get(j));
for (int k = 0; k < getParams.size(); k++) {
if (getParams.get(k).split(" ")[0].equals(reaction.getId() + "/" + paramet.getId())) {
parameterChanges.add(getParams.get(k));
String[] splits = getParams.get(k).split(" ");
if (splits[splits.length - 2].equals("Modified") || splits[splits.length - 2].equals("Custom")) {
String value = splits[splits.length - 1];
paramet.setValue(Double.parseDouble(value));
}
else if (splits[splits.length - 2].equals("Sweep")) {
String value = splits[splits.length - 1];
paramet.setValue(Double.parseDouble(value.split(",")[0].substring(1).trim()));
}
if (!reacts[i].contains("Modified")) {
reacts[i] += " Modified";
}
}
}
}
}
}
Utility.sort(reacts);
reactions.setListData(reacts);
reactions.setSelectedIndex(0);
reactions.addMouseListener(this);
this.add(reactionsLabel, "North");
this.add(scroll2, "Center");
this.add(addReacs, "South");
}
/**
* Creates a frame used to edit reactions or create new ones.
*/
public void reactionsEditor(BioModel bioModel, String option, String reactionId, boolean inSchematic) {
/*
* if (option.equals("OK") && reactions.getSelectedIndex() == -1) {
* JOptionPane.showMessageDialog(Gui.frame, "No reaction selected.",
* "Must Select A Reaction", JOptionPane.ERROR_MESSAGE); return; }
*/
selectedReaction = reactionId;
JLabel id = new JLabel("ID:");
reacID = new JTextField(15);
JLabel name = new JLabel("Name:");
if (bioModel.getSBMLDocument().getLevel() < 3) {
reacName = new JTextField(50);
}
else {
reacName = new JTextField(30);
}
JLabel onPortLabel = new JLabel("Is Mapped to a Port:");
onPort = new JCheckBox();
JLabel reactionCompLabel = new JLabel("Compartment:");
ListOf<Compartment> listOfCompartments = bioModel.getSBMLDocument().getModel().getListOfCompartments();
String[] addC = new String[bioModel.getSBMLDocument().getModel().getCompartmentCount()];
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getCompartmentCount(); i++) {
addC[i] = listOfCompartments.get(i).getId();
}
reactionComp = new JComboBox(addC);
reactionComp.addActionListener(this);
JLabel reverse = new JLabel("Reversible:");
String[] options = { "true", "false" };
reacReverse = new JComboBox(options);
reacReverse.setSelectedItem("false");
JLabel fast = new JLabel("Fast:");
reacFast = new JComboBox(options);
reacFast.setSelectedItem("false");
Reaction copyReact = null;
JPanel param = new JPanel(new BorderLayout());
JPanel addParams = new JPanel();
reacAddParam = new JButton("Add Parameter");
reacRemoveParam = new JButton("Remove Parameter");
reacEditParam = new JButton("Edit Parameter");
addParams.add(reacAddParam);
addParams.add(reacRemoveParam);
addParams.add(reacEditParam);
reacAddParam.addActionListener(this);
reacRemoveParam.addActionListener(this);
reacEditParam.addActionListener(this);
JLabel parametersLabel = new JLabel("List Of Local Parameters:");
reacParameters = new JList();
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane scroll = new JScrollPane();
scroll.setMinimumSize(new Dimension(260, 220));
scroll.setPreferredSize(new Dimension(276, 152));
scroll.setViewportView(reacParameters);
reacParams = new String[0];
changedParameters = new ArrayList<LocalParameter>();
thisReactionParams = new ArrayList<String>();
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
if (reac.getKineticLaw()!=null) {
//reac.createKineticLaw();
ListOf<LocalParameter> listOfParameters = reac.getKineticLaw().getListOfLocalParameters();
reacParams = new String[reac.getKineticLaw().getLocalParameterCount()];
for (int i = 0; i < reac.getKineticLaw().getLocalParameterCount(); i++) {
/*
* This code is a hack to get around a local parameter
* conversion bug in libsbml
*/
LocalParameter pp = listOfParameters.get(i);
LocalParameter parameter = new LocalParameter(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
parameter.setId(pp.getId());
SBMLutilities.setMetaId(parameter, pp.getMetaId());
parameter.setName(pp.getName());
parameter.setValue(pp.getValue());
parameter.setUnits(pp.getUnits());
changedParameters.add(parameter);
thisReactionParams.add(parameter.getId());
String p;
if (parameter.isSetUnits()) {
p = parameter.getId() + " " + parameter.getValue() + " " + parameter.getUnits();
}
else {
p = parameter.getId() + " " + parameter.getValue();
}
if (paramsOnly) {
for (int j = 0; j < parameterChanges.size(); j++) {
if (parameterChanges.get(j).split(" ")[0].equals(selectedReaction + "/" + parameter.getId())) {
p = parameterChanges.get(j).split("/")[1];
}
}
}
reacParams[i] = p;
}
}
}
else {
// Parameter p = new Parameter(BioSim.SBML_LEVEL,
// BioSim.SBML_VERSION);
LocalParameter p = new LocalParameter(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
p.setId("kf");
p.setValue(0.1);
changedParameters.add(p);
// p = new Parameter(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
p = new LocalParameter(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
p.setId("kr");
p.setValue(1.0);
changedParameters.add(p);
reacParams = new String[2];
reacParams[0] = "kf 0.1";
reacParams[1] = "kr 1.0";
thisReactionParams.add("kf");
thisReactionParams.add("kr");
}
Utility.sort(reacParams);
reacParameters.setListData(reacParams);
reacParameters.setSelectedIndex(0);
reacParameters.addMouseListener(this);
param.add(parametersLabel, "North");
param.add(scroll, "Center");
param.add(addParams, "South");
JPanel reactantsPanel = new JPanel(new BorderLayout());
JPanel addReactants = new JPanel();
addReactant = new JButton("Add Reactant");
removeReactant = new JButton("Remove Reactant");
editReactant = new JButton("Edit Reactant");
addReactants.add(addReactant);
addReactants.add(removeReactant);
addReactants.add(editReactant);
addReactant.addActionListener(this);
removeReactant.addActionListener(this);
editReactant.addActionListener(this);
JLabel reactantsLabel = new JLabel("List Of Reactants:");
reactants = new JList();
reactants.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane scroll2 = new JScrollPane();
scroll2.setMinimumSize(new Dimension(260, 220));
scroll2.setPreferredSize(new Dimension(276, 152));
scroll2.setViewportView(reactants);
reacta = new String[0];
changedReactants = new ArrayList<SpeciesReference>();
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
ListOf<SpeciesReference> listOfReactants = reac.getListOfReactants();
reacta = new String[reac.getReactantCount()];
for (int i = 0; i < reac.getReactantCount(); i++) {
SpeciesReference reactant = listOfReactants.get(i);
changedReactants.add(reactant);
reacta[i] = reactant.getSpecies() + " " + reactant.getStoichiometry();
}
}
Utility.sort(reacta);
reactants.setListData(reacta);
reactants.setSelectedIndex(0);
reactants.addMouseListener(this);
reactantsPanel.add(reactantsLabel, "North");
reactantsPanel.add(scroll2, "Center");
reactantsPanel.add(addReactants, "South");
JPanel productsPanel = new JPanel(new BorderLayout());
JPanel addProducts = new JPanel();
addProduct = new JButton("Add Product");
removeProduct = new JButton("Remove Product");
editProduct = new JButton("Edit Product");
addProducts.add(addProduct);
addProducts.add(removeProduct);
addProducts.add(editProduct);
addProduct.addActionListener(this);
removeProduct.addActionListener(this);
editProduct.addActionListener(this);
JLabel productsLabel = new JLabel("List Of Products:");
products = new JList();
products.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane scroll3 = new JScrollPane();
scroll3.setMinimumSize(new Dimension(260, 220));
scroll3.setPreferredSize(new Dimension(276, 152));
scroll3.setViewportView(products);
proda = new String[0];
changedProducts = new ArrayList<SpeciesReference>();
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
ListOf<SpeciesReference> listOfProducts = reac.getListOfProducts();
proda = new String[reac.getProductCount()];
for (int i = 0; i < reac.getProductCount(); i++) {
SpeciesReference product = listOfProducts.get(i);
changedProducts.add(product);
this.proda[i] = product.getSpecies() + " " + product.getStoichiometry();
}
}
Utility.sort(proda);
products.setListData(proda);
products.setSelectedIndex(0);
products.addMouseListener(this);
productsPanel.add(productsLabel, "North");
productsPanel.add(scroll3, "Center");
productsPanel.add(addProducts, "South");
JPanel modifierPanel = new JPanel(new BorderLayout());
JPanel addModifiers = new JPanel();
addModifier = new JButton("Add Modifier");
removeModifier = new JButton("Remove Modifier");
editModifier = new JButton("Edit Modifier");
addModifiers.add(addModifier);
addModifiers.add(removeModifier);
addModifiers.add(editModifier);
addModifier.addActionListener(this);
removeModifier.addActionListener(this);
editModifier.addActionListener(this);
JLabel modifiersLabel = new JLabel("List Of Modifiers:");
modifiers = new JList();
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane scroll5 = new JScrollPane();
scroll5.setMinimumSize(new Dimension(260, 220));
scroll5.setPreferredSize(new Dimension(276, 152));
scroll5.setViewportView(modifiers);
modifierArray = new String[0];
changedModifiers = new ArrayList<ModifierSpeciesReference>();
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
ListOf<ModifierSpeciesReference> listOfModifiers = reac.getListOfModifiers();
modifierArray = new String[reac.getModifierCount()];
for (int i = 0; i < reac.getModifierCount(); i++) {
ModifierSpeciesReference modifier = listOfModifiers.get(i);
changedModifiers.add(modifier);
this.modifierArray[i] = modifier.getSpecies();
}
}
Utility.sort(modifierArray);
modifiers.setListData(modifierArray);
modifiers.setSelectedIndex(0);
modifiers.addMouseListener(this);
modifierPanel.add(modifiersLabel, "North");
modifierPanel.add(scroll5, "Center");
modifierPanel.add(addModifiers, "South");
JComboBox kineticFluxLabel = new JComboBox(new String[] {"Kinetic Law:","Flux Bounds:"});
kineticLaw = new JTextArea();
kineticLaw.setLineWrap(true);
kineticLaw.setWrapStyleWord(true);
useMassAction = new JButton("Use Mass Action");
clearKineticLaw = new JButton("Clear");
useMassAction.addActionListener(this);
clearKineticLaw.addActionListener(this);
JPanel kineticButtons = new JPanel();
kineticButtons.add(useMassAction);
kineticButtons.add(clearKineticLaw);
JScrollPane scroll4 = new JScrollPane();
scroll4.setMinimumSize(new Dimension(100, 100));
scroll4.setPreferredSize(new Dimension(100, 100));
scroll4.setViewportView(kineticLaw);
if (option.equals("OK")) {
if (bioModel.getSBMLDocument().getModel().getReaction(reactionId).getKineticLaw()!=null) {
kineticFluxLabel.setSelectedIndex(0);
kineticLaw.setText(bioModel.removeBooleans(bioModel.getSBMLDocument().getModel().getReaction(reactionId).getKineticLaw().getMath()));
} else {
kineticFluxLabel.setSelectedIndex(1);
String fluxbounds = "";
for(int i = 0; i < bioModel.getSBMLFBC().getListOfFluxBounds().size(); i++){
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getReaction().equals(reactionId)){
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getOperation().toString().equals("greaterEqual")){
fluxbounds = bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getValue() + "<=" + fluxbounds;
if(!fluxbounds.contains(reactionId)){
fluxbounds += reactionId;
}
}
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getOperation().toString().equals("lessEqual")){
fluxbounds += "<=" + bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getValue();
if(!fluxbounds.contains(reactionId)){
fluxbounds = reactionId + fluxbounds;
}
}
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getOperation().toString().equals("equal")){
double value = bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getValue();
fluxbounds = value + "<=" + reactionId + "<=" + value;
}
}
}
kineticLaw.setText(fluxbounds);
}
}
JPanel kineticPanel = new JPanel(new BorderLayout());
kineticPanel.add(kineticFluxLabel, "North");
kineticPanel.add(scroll4, "Center");
kineticPanel.add(kineticButtons, "South");
JPanel reactionPanel = new JPanel(new BorderLayout());
JPanel reactionPanelNorth = new JPanel();
reactionPanelNorth.setLayout(new GridLayout(3, 1));
JPanel reactionPanelNorth1 = new JPanel();
JPanel reactionPanelNorth3 = new JPanel();
JPanel reactionPanelNorth4 = new JPanel();
CiIndex = new JTextField(20);
reactionPanelNorth1.add(id);
reactionPanelNorth1.add(reacID);
reactionPanelNorth1.add(name);
reactionPanelNorth1.add(reacName);
reactionPanelNorth1.add(onPortLabel);
reactionPanelNorth1.add(onPort);
reactionPanelNorth3.add(reactionCompLabel);
reactionPanelNorth3.add(reactionComp);
reactionPanelNorth3.add(new JLabel("Compartment Indices:"));
reactionPanelNorth3.add(CiIndex);
reactionPanelNorth4.add(reverse);
reactionPanelNorth4.add(reacReverse);
reactionPanelNorth4.add(fast);
reactionPanelNorth4.add(reacFast);
// Parse out SBOL annotations and add to SBOL field
if (!paramsOnly) {
// Field for annotating reaction with SBOL DNA components
List<URI> sbolURIs = new LinkedList<URI>();
String sbolStrand = "";
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
sbolStrand = AnnotationUtility.parseSBOLAnnotation(reac, sbolURIs);
}
sbolField = new SBOLField(sbolURIs, sbolStrand, GlobalConstants.SBOL_DNA_COMPONENT, modelEditor, 2, false);
reactionPanelNorth4.add(sbolField);
}
reactionPanelNorth.add(reactionPanelNorth1);
reactionPanelNorth.add(reactionPanelNorth3);
reactionPanelNorth.add(reactionPanelNorth4);
if (inSchematic) {
reactionPanel.add(reactionPanelNorth, "North");
reactionPanel.add(param, "Center");
reactionPanel.add(kineticPanel, "South");
}
else {
JPanel reactionPanelCentral = new JPanel(new GridLayout(1, 3));
JPanel reactionPanelSouth = new JPanel(new GridLayout(1, 2));
reactionPanelCentral.add(reactantsPanel);
reactionPanelCentral.add(productsPanel);
reactionPanelCentral.add(modifierPanel);
reactionPanelSouth.add(param);
reactionPanelSouth.add(kineticPanel);
reactionPanel.add(reactionPanelNorth, "North");
reactionPanel.add(reactionPanelCentral, "Center");
reactionPanel.add(reactionPanelSouth, "South");
}
if (option.equals("OK")) {
Reaction reac = bioModel.getSBMLDocument().getModel().getReaction(reactionId);
copyReact = reac.clone();
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(reac);
String dimInID = "";
for(int i = sBasePlugin.getDimensionCount()-1; i>=0; i
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.getDimensionByArrayDimension(i);
dimInID += "[" + dimX.getSize() + "]";
}
String freshIndex = "";
for(int i = sBasePlugin.getIndexCount()-1; i>=0; i
Index indie = sBasePlugin.getIndex(i);
freshIndex += "[" + SBMLutilities.myFormulaToString(indie.getMath()) + "]";
}
CiIndex.setText(freshIndex);
reacID.setText(reac.getId()+dimInID);
reacName.setText(reac.getName());
if (bioModel.getPortByIdRef(reac.getId())!=null) {
onPort.setSelected(true);
} else {
onPort.setSelected(false);
}
if (reac.getReversible()) {
reacReverse.setSelectedItem("true");
}
else {
reacReverse.setSelectedItem("false");
}
if (reac.getFast()) {
reacFast.setSelectedItem("true");
}
else {
reacFast.setSelectedItem("false");
}
if (bioModel.getSBMLDocument().getLevel() > 2) {
reactionComp.setSelectedItem(reac.getCompartment());
}
complex = null;
production = null;
if (reac.isSetSBOTerm()) {
if (BioModel.isComplexReaction(reac)) {
complex = reac;
reacID.setEnabled(false);
reacName.setEnabled(false);
onPort.setEnabled(false);
reacReverse.setEnabled(false);
reacFast.setEnabled(false);
reactionComp.setEnabled(false);
reacAddParam.setEnabled(false);
reacRemoveParam.setEnabled(false);
reacEditParam.setEnabled(false);
addProduct.setEnabled(false);
removeProduct.setEnabled(false);
editProduct.setEnabled(false);
products.removeMouseListener(this);
addModifier.setEnabled(false);
removeModifier.setEnabled(false);
editModifier.setEnabled(false);
modifiers.removeMouseListener(this);
kineticLaw.setEditable(false);
clearKineticLaw.setEnabled(false);
reacParameters.setEnabled(false);
useMassAction.setEnabled(false);
} else if (BioModel.isConstitutiveReaction(reac) || BioModel.isDegradationReaction(reac) ||
BioModel.isDiffusionReaction(reac)) {
reacID.setEnabled(false);
reacName.setEnabled(false);
onPort.setEnabled(false);
reacReverse.setEnabled(false);
reacFast.setEnabled(false);
reactionComp.setEnabled(false);
reacAddParam.setEnabled(false);
reacRemoveParam.setEnabled(false);
reacEditParam.setEnabled(false);
addReactant.setEnabled(false);
removeReactant.setEnabled(false);
editReactant.setEnabled(false);
reactants.removeMouseListener(this);
addProduct.setEnabled(false);
removeProduct.setEnabled(false);
editProduct.setEnabled(false);
products.removeMouseListener(this);
addModifier.setEnabled(false);
removeModifier.setEnabled(false);
editModifier.setEnabled(false);
modifiers.removeMouseListener(this);
kineticLaw.setEditable(false);
useMassAction.setEnabled(false);
clearKineticLaw.setEnabled(false);
reacParameters.setEnabled(false);
} else if (BioModel.isProductionReaction(reac)) {
production = reac;
reacID.setEnabled(false);
reacName.setEnabled(false);
onPort.setEnabled(false);
reacReverse.setEnabled(false);
reacFast.setEnabled(false);
reactionComp.setEnabled(false);
reacAddParam.setEnabled(false);
reacRemoveParam.setEnabled(false);
reacEditParam.setEnabled(false);
addReactant.setEnabled(false);
removeReactant.setEnabled(false);
editReactant.setEnabled(false);
reactants.removeMouseListener(this);
kineticLaw.setEditable(false);
clearKineticLaw.setEnabled(false);
reacParameters.setEnabled(false);
useMassAction.setEnabled(false);
}
}
}
else {
String NEWreactionId = "r0";
int i = 0;
while (bioModel.isSIdInUse(NEWreactionId)) {
i++;
NEWreactionId = "r" + i;
}
reacID.setText(NEWreactionId);
}
if (paramsOnly) {
reacID.setEditable(false);
reacName.setEditable(false);
reacReverse.setEnabled(false);
reacFast.setEnabled(false);
reacAddParam.setEnabled(false);
reacRemoveParam.setEnabled(false);
addReactant.setEnabled(false);
removeReactant.setEnabled(false);
editReactant.setEnabled(false);
addProduct.setEnabled(false);
removeProduct.setEnabled(false);
editProduct.setEnabled(false);
addModifier.setEnabled(false);
removeModifier.setEnabled(false);
editModifier.setEnabled(false);
kineticLaw.setEditable(false);
useMassAction.setEnabled(false);
clearKineticLaw.setEnabled(false);
reactionComp.setEnabled(false);
onPort.setEnabled(false);
}
Object[] options1 = { option, "Cancel" };
int value = JOptionPane.showOptionDialog(Gui.frame, reactionPanel, "Reaction Editor", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE,
null, options1, options1[0]);
String[] dimID = new String[]{""};
String[] dex = new String[]{""};
String[] dimensionIds = new String[]{""};
boolean error = true;
while (error && value == JOptionPane.YES_OPTION) {
error = false;
dimID = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), reacID.getText(), false);
if(dimID!=null){
dimensionIds = SBMLutilities.getDimensionIds("",dimID.length-1);
error = SBMLutilities.checkID(bioModel.getSBMLDocument(), dimID[0].trim(), reactionId.trim(), false);
}
else{
error = true;
}
if(reactionComp.isEnabled() && !error){
SBase variable = SBMLutilities.getElementBySId(bioModel.getSBMLDocument(), (String)reactionComp.getSelectedItem());
dex = SBMLutilities.checkIndices(CiIndex.getText().trim(), variable, bioModel.getSBMLDocument(), dimensionIds, "compartment", dimID, null, null);
error = (dex==null);
}
if (!error) {
if (complex==null && production==null && kineticLaw.getText().trim().equals("")) {
JOptionPane.showMessageDialog(Gui.frame, "A reaction must have a kinetic law.", "Enter A Kinetic Law", JOptionPane.ERROR_MESSAGE);
error = true;
}
else if ((changedReactants.size() == 0) && (changedProducts.size() == 0)) {
JOptionPane.showMessageDialog(Gui.frame, "A reaction must have at least one reactant or product.", "No Reactants or Products",
JOptionPane.ERROR_MESSAGE);
error = true;
}
else if(kineticFluxLabel.getSelectedItem().equals("Kinetic Law:")){
if (complex==null && production==null && SBMLutilities.myParseFormula(kineticLaw.getText().trim()) == null) {
JOptionPane.showMessageDialog(Gui.frame, "Unable to parse kinetic law.", "Kinetic Law Error", JOptionPane.ERROR_MESSAGE);
error = true;
}
else if (complex==null && production==null){
ArrayList<String> invalidKineticVars = getInvalidVariablesInReaction(kineticLaw.getText().trim(), dimensionIds, true, "", false);
if (invalidKineticVars.size() > 0) {
String invalid = "";
for (int i = 0; i < invalidKineticVars.size(); i++) {
if (i == invalidKineticVars.size() - 1) {
invalid += invalidKineticVars.get(i);
}
else {
invalid += invalidKineticVars.get(i) + "\n";
}
}
String message;
message = "Kinetic law contains unknown variables.\n\n" + "Unknown variables:\n" + invalid;
JTextArea messageArea = new JTextArea(message);
messageArea.setLineWrap(true);
messageArea.setWrapStyleWord(true);
messageArea.setEditable(false);
JScrollPane scrolls = new JScrollPane();
scrolls.setMinimumSize(new Dimension(300, 300));
scrolls.setPreferredSize(new Dimension(300, 300));
scrolls.setViewportView(messageArea);
JOptionPane.showMessageDialog(Gui.frame, scrolls, "Kinetic Law Error", JOptionPane.ERROR_MESSAGE);
error = true;
}
if (!error) {
error = SBMLutilities.checkNumFunctionArguments(bioModel.getSBMLDocument(), SBMLutilities.myParseFormula(kineticLaw.getText().trim()));
}
if (!error) {
error = SBMLutilities.checkFunctionArgumentTypes(bioModel.getSBMLDocument(), SBMLutilities.myParseFormula(kineticLaw.getText().trim()));
}
}
else {
error = !fluxBoundisGood(kineticLaw.getText().replaceAll("\\s",""), reactionId);
}
}
}
if(kineticFluxLabel.getSelectedItem().equals("Kinetic Law:")){
if (!error && complex==null && production==null) {
if (SBMLutilities.returnsBoolean(bioModel.addBooleans(kineticLaw.getText().trim()), bioModel.getSBMLDocument().getModel())) {
JOptionPane.showMessageDialog(Gui.frame, "Kinetic law must evaluate to a number.", "Number Expected", JOptionPane.ERROR_MESSAGE);
error = true;
}
}
}
if (!error) {
if (option.equals("OK")) {
int index = reactions.getSelectedIndex();
String val = reactionId;
Reaction react = bioModel.getSBMLDocument().getModel().getReaction(val);
ListOf remove;
long size;
if (react.getKineticLaw()==null) {
react.createKineticLaw();
}
remove = react.getKineticLaw().getListOfLocalParameters();
size = react.getKineticLaw().getLocalParameterCount();
for (int i = 0; i < size; i++) {
remove.remove(0);
}
for (int i = 0; i < changedParameters.size(); i++) {
react.getKineticLaw().addLocalParameter(changedParameters.get(i));
}
remove = react.getListOfProducts();
size = react.getProductCount();
for (int i = 0; i < size; i++) {
remove.remove(0);
}
for (int i = 0; i < changedProducts.size(); i++) {
react.addProduct(changedProducts.get(i));
}
remove = react.getListOfModifiers();
size = react.getModifierCount();
for (int i = 0; i < size; i++) {
remove.remove(0);
}
for (int i = 0; i < changedModifiers.size(); i++) {
react.addModifier(changedModifiers.get(i));
}
remove = react.getListOfReactants();
size = react.getReactantCount();
for (int i = 0; i < size; i++) {
remove.remove(0);
}
for (int i = 0; i < changedReactants.size(); i++) {
react.addReactant(changedReactants.get(i));
}
if (reacReverse.getSelectedItem().equals("true")) {
react.setReversible(true);
}
else {
react.setReversible(false);
}
if (bioModel.getSBMLDocument().getLevel() > 2) {
react.setCompartment((String) reactionComp.getSelectedItem());
}
if (reacFast.getSelectedItem().equals("true")) {
react.setFast(true);
}
else {
react.setFast(false);
}
react.setId(dimID[0].trim());
react.setName(reacName.getText().trim());
Port port = bioModel.getPortByIdRef(val);
if (port!=null) {
if (onPort.isSelected()) {
port.setId(GlobalConstants.SBMLREACTION+"__"+react.getId());
port.setIdRef(react.getId());
} else {
bioModel.getSBMLCompModel().removePort(port);
}
} else {
if (onPort.isSelected()) {
port = bioModel.getSBMLCompModel().createPort();
port.setId(GlobalConstants.SBMLREACTION+"__"+react.getId());
port.setIdRef(react.getId());
}
}
if(kineticFluxLabel.getSelectedItem().equals("Kinetic Law:")){
if (complex==null && production==null) {
react.getKineticLaw().setMath(bioModel.addBooleans(kineticLaw.getText().trim()));
} else if (complex!=null) {
react.getKineticLaw().setMath(SBMLutilities.myParseFormula(BioModel.createComplexKineticLaw(complex)));
} else {
react.getKineticLaw().setMath(SBMLutilities.myParseFormula(BioModel.createProductionKineticLaw(production)));
}
error = checkKineticLawUnits(react.getKineticLaw());
int i = 0;
while (i < bioModel.getSBMLFBC().getListOfFluxBounds().size()) {
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getReaction().equals(reactionId)){
bioModel.getSBMLFBC().removeFluxBound(i);
} else {
i++;
}
}
}
else{
react.unsetKineticLaw();
int i = 0;
while(i < bioModel.getSBMLFBC().getListOfFluxBounds().size()){
if(bioModel.getSBMLFBC().getListOfFluxBounds().get(i).getReaction().equals(reactionId)){
bioModel.getSBMLFBC().removeFluxBound(i);
} else {
i++;
}
}
if(kineticLaw.getText().contains("<=")){
String[] userInput = kineticLaw.getText().replaceAll("\\s","").split("<=");
if (userInput.length==3) {
double greaterValue = Double.parseDouble(userInput[0]);
FluxBound fxGreater = bioModel.getSBMLFBC().createFluxBound();
fxGreater.setOperation(FluxBound.Operation.GREATER_EQUAL);
fxGreater.setValue(greaterValue);
fxGreater.setReaction(reactionId);
double lessValue = Double.parseDouble(userInput[2]);
FluxBound fxLess = bioModel.getSBMLFBC().createFluxBound();
fxLess.setOperation(FluxBound.Operation.LESS_EQUAL);
fxLess.setValue(lessValue);
fxLess.setReaction(reactionId);
}
else {
try{
double lessValue = Double.parseDouble(userInput[1]);
FluxBound fxLess = bioModel.getSBMLFBC().createFluxBound();
fxLess.setOperation(FluxBound.Operation.LESS_EQUAL);
fxLess.setValue(lessValue);
fxLess.setReaction(reactionId);
}
catch(Exception e){
double greaterValue = Double.parseDouble(userInput[0]);
FluxBound fxGreater = bioModel.getSBMLFBC().createFluxBound();
fxGreater.setOperation(FluxBound.Operation.GREATER_EQUAL);
fxGreater.setValue(greaterValue);
fxGreater.setReaction(reactionId);
}
}
}
else if(kineticLaw.getText().contains(">=")){
String[] userInput = kineticLaw.getText().replaceAll("\\s","").split(">=");
if (userInput.length==3) {
double greaterValue = Double.parseDouble(userInput[2]);
FluxBound fxGreater = bioModel.getSBMLFBC().createFluxBound();
fxGreater.setOperation(FluxBound.Operation.GREATER_EQUAL);
fxGreater.setValue(greaterValue);
fxGreater.setReaction(reactionId);
double lessValue = Double.parseDouble(userInput[0]);
FluxBound fxLess = bioModel.getSBMLFBC().createFluxBound();
fxLess.setOperation(FluxBound.Operation.LESS_EQUAL);
fxLess.setValue(lessValue);
fxLess.setReaction(reactionId);
} else {
try{
double greaterValue = Double.parseDouble(userInput[1]);
FluxBound fxGreater = bioModel.getSBMLFBC().createFluxBound();
fxGreater.setOperation(FluxBound.Operation.GREATER_EQUAL);
fxGreater.setValue(greaterValue);
fxGreater.setReaction(reactionId);
}
catch(Exception e){
double lessValue = Double.parseDouble(userInput[0]);
FluxBound fxLess = bioModel.getSBMLFBC().createFluxBound();
fxLess.setOperation(FluxBound.Operation.LESS_EQUAL);
fxLess.setValue(lessValue);
fxLess.setReaction(reactionId);
}
}
}
else{
String[] userInput = kineticLaw.getText().replaceAll("\\s","").split("=");
FluxBound fxEqual = bioModel.getSBMLFBC().createFluxBound();
fxEqual.setOperation(FluxBound.Operation.EQUAL);
fxEqual.setReaction(reactionId);
if(userInput[0].equals(reactionId)){
fxEqual.setValue(Double.parseDouble(userInput[1]));
}
else{
fxEqual.setValue(Double.parseDouble(userInput[0]));
}
}
}
if (!error) {
error = SBMLutilities.checkCycles(bioModel.getSBMLDocument());
if (error) {
JOptionPane.showMessageDialog(Gui.frame, "Cycle detected within initial assignments, assignment rules, and rate laws.",
"Cycle Detected", JOptionPane.ERROR_MESSAGE);
}
}
if (!error) {
if (index >= 0) {
if (!paramsOnly) {
reacts[index] = dimID[0];
}
reactions.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacts = Utility.getList(reacts, reactions);
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
Utility.sort(reacts);
reactions.setListData(reacts);
reactions.setSelectedIndex(index);
}
}
else {
changedParameters = new ArrayList<LocalParameter>();
ListOf<LocalParameter> listOfParameters = react.getKineticLaw().getListOfLocalParameters();
for (int i = 0; i < react.getKineticLaw().getLocalParameterCount(); i++) {
LocalParameter parameter = listOfParameters.get(i);
changedParameters.add(new LocalParameter(parameter));
}
changedProducts = new ArrayList<SpeciesReference>();
ListOf<SpeciesReference> listOfProducts = react.getListOfProducts();
for (int i = 0; i < react.getProductCount(); i++) {
SpeciesReference product = listOfProducts.get(i);
changedProducts.add(product);
}
changedReactants = new ArrayList<SpeciesReference>();
ListOf<SpeciesReference> listOfReactants = react.getListOfReactants();
for (int i = 0; i < react.getReactantCount(); i++) {
SpeciesReference reactant = listOfReactants.get(i);
changedReactants.add(reactant);
}
changedModifiers = new ArrayList<ModifierSpeciesReference>();
ListOf<ModifierSpeciesReference> listOfModifiers = react.getListOfModifiers();
for (int i = 0; i < react.getModifierCount(); i++) {
ModifierSpeciesReference modifier = listOfModifiers.get(i);
changedModifiers.add(modifier);
}
}
// Handle SBOL data
if (!error && inSchematic && !paramsOnly) {
if (!error) {
// Add SBOL annotation to reaction
if (sbolField.getSBOLURIs().size() > 0) {
if (!react.isSetMetaId() || react.getMetaId().equals(""))
SBMLutilities.setDefaultMetaID(bioModel.getSBMLDocument(), react,
bioModel.getMetaIDIndex());
SBOLAnnotation sbolAnnot = new SBOLAnnotation(react.getMetaId(), sbolField.getSBOLURIs(),
sbolField.getSBOLStrand());
AnnotationUtility.setSBOLAnnotation(react, sbolAnnot);
} else
AnnotationUtility.removeSBOLAnnotation(react);
}
}
// TODO: Scott - change for Plugin writing
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(react);
sBasePlugin.unsetListOfDimensions();
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
sBasePlugin.unsetListOfIndices();
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
}
else {
Reaction react = bioModel.getSBMLDocument().getModel().createReaction();
int index = reactions.getSelectedIndex();
if(kineticFluxLabel.getSelectedItem().equals("Kinetic Law:")){
react.createKineticLaw();
for (int i = 0; i < changedParameters.size(); i++) {
react.getKineticLaw().addLocalParameter(changedParameters.get(i));
}
}
for (int i = 0; i < changedProducts.size(); i++) {
react.addProduct(changedProducts.get(i));
}
for (int i = 0; i < changedModifiers.size(); i++) {
react.addModifier(changedModifiers.get(i));
}
for (int i = 0; i < changedReactants.size(); i++) {
react.addReactant(changedReactants.get(i));
}
if (reacReverse.getSelectedItem().equals("true")) {
react.setReversible(true);
}
else {
react.setReversible(false);
}
if (reacFast.getSelectedItem().equals("true")) {
react.setFast(true);
}
else {
react.setFast(false);
}
if (bioModel.getSBMLDocument().getLevel() > 2) {
react.setCompartment((String) reactionComp.getSelectedItem());
}
react.setId(dimID[0]);
react.setName(reacName.getText().trim());
if (onPort.isSelected()) {
Port port = bioModel.getSBMLCompModel().createPort();
port.setId(GlobalConstants.SBMLREACTION+"__"+react.getId());
port.setIdRef(react.getId());
}
if(kineticFluxLabel.getSelectedItem().equals("Kinetic Law:")){
if (complex==null && production==null) {
react.getKineticLaw().setMath(bioModel.addBooleans(kineticLaw.getText().trim()));
} else if (complex!=null) {
react.getKineticLaw().setMath(SBMLutilities.myParseFormula(BioModel.createComplexKineticLaw(complex)));
} else {
react.getKineticLaw().setMath(SBMLutilities.myParseFormula(BioModel.createProductionKineticLaw(production)));
}
error = checkKineticLawUnits(react.getKineticLaw());
}
else{
error = !fluxBoundisGood(kineticLaw.getText().replaceAll("\\s",""), reactionId);
}
if (!error) {
error = SBMLutilities.checkCycles(bioModel.getSBMLDocument());
if (error) {
JOptionPane.showMessageDialog(Gui.frame, "Cycle detected within initial assignments, assignment rules, and rate laws.",
"Cycle Detected", JOptionPane.ERROR_MESSAGE);
}
}
if (!error) {
JList add = new JList();
Object[] adding = { dimID[0] };
add.setListData(adding);
add.setSelectedIndex(0);
reactions.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(reacts, reactions, add);
reacts = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
reacts[i] = (String) adding[i];
}
Utility.sort(reacts);
reactions.setListData(reacts);
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (bioModel.getSBMLDocument().getModel().getReactionCount() == 1) {
reactions.setSelectedIndex(0);
}
else {
reactions.setSelectedIndex(index);
}
}
else {
removeTheReaction(bioModel, dimID[0]);
}
// TODO: Scott - change for Plugin writing
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(react);
sBasePlugin.unsetListOfDimensions();
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
sBasePlugin.unsetListOfIndices();
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
if (error) {
value = JOptionPane.showOptionDialog(Gui.frame, reactionPanel, "Reaction Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options1, options1[0]);
}
}
if (value == JOptionPane.NO_OPTION) {
if (option.equals("OK")) {
String reactId = reactionId;
removeTheReaction(bioModel, reactId);
bioModel.getSBMLDocument().getModel().addReaction(copyReact);
}
return;
}
}
/**
* Find invalid reaction variables in a formula
*/
private ArrayList<String> getInvalidVariablesInReaction(String formula, String[] dimensionIds, boolean isReaction, String arguments, boolean isFunction) {
ArrayList<String> validVars = new ArrayList<String>();
ArrayList<String> invalidVars = new ArrayList<String>();
Model model = bioModel.getSBMLDocument().getModel();
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getFunctionDefinitionCount(); i++) {
validVars.add(model.getFunctionDefinition(i).getId());
}
if (isReaction) {
for (int i = 0; i < changedParameters.size(); i++) {
validVars.add(changedParameters.get(i).getId());
}
for (int i = 0; i < changedReactants.size(); i++) {
validVars.add(changedReactants.get(i).getSpecies());
validVars.add(changedReactants.get(i).getId());
}
for (int i = 0; i < changedProducts.size(); i++) {
validVars.add(changedProducts.get(i).getSpecies());
validVars.add(changedProducts.get(i).getId());
}
for (int i = 0; i < changedModifiers.size(); i++) {
validVars.add(changedModifiers.get(i).getSpecies());
}
if (dimensionIds != null) {
for (int i = 0; i < dimensionIds.length; i++) {
validVars.add(dimensionIds[i]);
}
}
}
else if (!isFunction) {
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getSpeciesCount(); i++) {
validVars.add(model.getSpecies(i).getId());
}
}
if (isFunction) {
String[] args = arguments.split(" |\\,");
for (int i = 0; i < args.length; i++) {
validVars.add(args[i]);
}
}
else {
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getCompartmentCount(); i++) {
if (bioModel.getSBMLDocument().getLevel() > 2 || (model.getCompartment(i).getSpatialDimensions() != 0)) {
validVars.add(model.getCompartment(i).getId());
}
}
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getParameterCount(); i++) {
validVars.add(model.getParameter(i).getId());
}
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getReactionCount(); i++) {
Reaction reaction = model.getReaction(i);
validVars.add(reaction.getId());
for (int j = 0; j < reaction.getReactantCount(); j++) {
SpeciesReference reactant = reaction.getReactant(j);
if ((reactant.isSetId()) && (!reactant.getId().equals(""))) {
validVars.add(reactant.getId());
}
}
for (int j = 0; j < reaction.getProductCount(); j++) {
SpeciesReference product = reaction.getProduct(j);
if ((product.isSetId()) && (!product.getId().equals(""))) {
validVars.add(product.getId());
}
}
}
String[] kindsL3V1 = { "ampere", "avogadro", "becquerel", "candela", "celsius", "coulomb", "dimensionless", "farad", "gram", "gray", "henry",
"hertz", "item", "joule", "katal", "kelvin", "kilogram", "litre", "lumen", "lux", "metre", "mole", "newton", "ohm", "pascal",
"radian", "second", "siemens", "sievert", "steradian", "tesla", "volt", "watt", "weber" };
for (int i = 0; i < kindsL3V1.length; i++) {
validVars.add(kindsL3V1[i]);
}
for (int i = 0; i < model.getUnitDefinitionCount(); i++) {
validVars.add(model.getUnitDefinition(i).getId());
}
}
String[] splitLaw = formula.split(" |\\(|\\)|\\,|\\*|\\+|\\/|\\-|>|=|<|\\^|%|&|\\||!|\\[|\\]|\\{|\\}");
for (int i = 0; i < splitLaw.length; i++) {
if (splitLaw[i].equals("abs") || splitLaw[i].equals("arccos") || splitLaw[i].equals("arccosh") || splitLaw[i].equals("arcsin")
|| splitLaw[i].equals("arcsinh") || splitLaw[i].equals("arctan") || splitLaw[i].equals("arctanh") || splitLaw[i].equals("arccot")
|| splitLaw[i].equals("arccoth") || splitLaw[i].equals("arccsc") || splitLaw[i].equals("arccsch") || splitLaw[i].equals("arcsec")
|| splitLaw[i].equals("arcsech") || splitLaw[i].equals("acos") || splitLaw[i].equals("acosh") || splitLaw[i].equals("asin")
|| splitLaw[i].equals("asinh") || splitLaw[i].equals("atan") || splitLaw[i].equals("atanh") || splitLaw[i].equals("acot")
|| splitLaw[i].equals("acoth") || splitLaw[i].equals("acsc") || splitLaw[i].equals("acsch") || splitLaw[i].equals("asec")
|| splitLaw[i].equals("asech") || splitLaw[i].equals("cos") || splitLaw[i].equals("cosh") || splitLaw[i].equals("cot")
|| splitLaw[i].equals("coth") || splitLaw[i].equals("csc") || splitLaw[i].equals("csch") || splitLaw[i].equals("ceil")
|| splitLaw[i].equals("factorial") || splitLaw[i].equals("exp") || splitLaw[i].equals("floor") || splitLaw[i].equals("ln")
|| splitLaw[i].equals("log") || splitLaw[i].equals("sqr") || splitLaw[i].equals("log10") || splitLaw[i].equals("pow")
|| splitLaw[i].equals("sqrt") || splitLaw[i].equals("root") || splitLaw[i].equals("piecewise") || splitLaw[i].equals("sec")
|| splitLaw[i].equals("sech") || splitLaw[i].equals("sin") || splitLaw[i].equals("sinh") || splitLaw[i].equals("tan")
|| splitLaw[i].equals("tanh") || splitLaw[i].equals("") || splitLaw[i].equals("and") || splitLaw[i].equals("or")
|| splitLaw[i].equals("xor") || splitLaw[i].equals("not") || splitLaw[i].equals("eq") || splitLaw[i].equals("geq")
|| splitLaw[i].equals("leq") || splitLaw[i].equals("gt") || splitLaw[i].equals("neq") || splitLaw[i].equals("lt")
|| splitLaw[i].equals("delay") || splitLaw[i].equals("t") || splitLaw[i].equals("time") || splitLaw[i].equals("true")
|| splitLaw[i].equals("false") || splitLaw[i].equals("pi") || splitLaw[i].equals("exponentiale")
|| ((bioModel.getSBMLDocument().getLevel() > 2) && (splitLaw[i].equals("avogadro")))) {
}
else {
String temp = splitLaw[i];
if (splitLaw[i].substring(splitLaw[i].length() - 1, splitLaw[i].length()).equals("e")) {
temp = splitLaw[i].substring(0, splitLaw[i].length() - 1);
}
try {
Double.parseDouble(temp);
}
catch (Exception e1) {
if (!validVars.contains(splitLaw[i])) {
invalidVars.add(splitLaw[i]);
}
}
}
}
return invalidVars;
}
/**
* Creates a frame used to edit reactions parameters or create new ones.
*/
private void reacParametersEditor(BioModel bioModel,String option) {
if (option.equals("OK") && reacParameters.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No parameter selected.", "Must Select A Parameter", JOptionPane.ERROR_MESSAGE);
return;
}
JPanel parametersPanel;
if (paramsOnly) {
parametersPanel = new JPanel(new GridLayout(7, 2));
}
else {
parametersPanel = new JPanel(new GridLayout(5, 2));
}
JLabel idLabel = new JLabel("ID:");
JLabel nameLabel = new JLabel("Name:");
JLabel valueLabel = new JLabel("Value:");
JLabel unitsLabel = new JLabel("Units:");
JLabel onPortLabel = new JLabel("Is Mapped to a Port:");
paramOnPort = new JCheckBox();
reacParamID = new JTextField();
reacParamName = new JTextField();
reacParamValue = new JTextField();
reacParamUnits = new JComboBox();
reacParamUnits.addItem("( none )");
Model model = bioModel.getSBMLDocument().getModel();
ListOf<UnitDefinition> listOfUnits = model.getListOfUnitDefinitions();
String[] units = new String[model.getUnitDefinitionCount()];
for (int i = 0; i < model.getUnitDefinitionCount(); i++) {
UnitDefinition unit = listOfUnits.get(i);
units[i] = unit.getId();
// GET OTHER THINGS
}
for (int i = 0; i < units.length; i++) {
if (bioModel.getSBMLDocument().getLevel() > 2
|| (!units[i].equals("substance") && !units[i].equals("volume") && !units[i].equals("area") && !units[i].equals("length") && !units[i]
.equals("time"))) {
reacParamUnits.addItem(units[i]);
}
}
String[] unitIdsL2V4 = { "substance", "volume", "area", "length", "time", "ampere", "becquerel", "candela", "celsius", "coulomb",
"dimensionless", "farad", "gram", "gray", "henry", "hertz", "item", "joule", "katal", "kelvin", "kilogram", "litre", "lumen", "lux",
"metre", "mole", "newton", "ohm", "pascal", "radian", "second", "siemens", "sievert", "steradian", "tesla", "volt", "watt", "weber" };
String[] unitIdsL3V1 = { "ampere", "avogadro", "becquerel", "candela", "celsius", "coulomb", "dimensionless", "farad", "gram", "gray",
"henry", "hertz", "item", "joule", "katal", "kelvin", "kilogram", "litre", "lumen", "lux", "metre", "mole", "newton", "ohm",
"pascal", "radian", "second", "siemens", "sievert", "steradian", "tesla", "volt", "watt", "weber" };
String[] unitIds;
if (bioModel.getSBMLDocument().getLevel() < 3) {
unitIds = unitIdsL2V4;
}
else {
unitIds = unitIdsL3V1;
}
for (int i = 0; i < unitIds.length; i++) {
reacParamUnits.addItem(unitIds[i]);
}
String[] list = { "Original", "Modified" };
String[] list1 = { "1", "2" };
final JComboBox type = new JComboBox(list);
final JTextField start = new JTextField();
final JTextField stop = new JTextField();
final JTextField step = new JTextField();
final JComboBox level = new JComboBox(list1);
final JButton sweep = new JButton("Sweep");
sweep.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Object[] options = { "Ok", "Close" };
JPanel p = new JPanel(new GridLayout(4, 2));
JLabel startLabel = new JLabel("Start:");
JLabel stopLabel = new JLabel("Stop:");
JLabel stepLabel = new JLabel("Step:");
JLabel levelLabel = new JLabel("Level:");
p.add(startLabel);
p.add(start);
p.add(stopLabel);
p.add(stop);
p.add(stepLabel);
p.add(step);
p.add(levelLabel);
p.add(level);
int i = JOptionPane.showOptionDialog(Gui.frame, p, "Sweep", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE, null, options,
options[0]);
if (i == JOptionPane.YES_OPTION) {
double startVal = 0.0;
double stopVal = 0.0;
double stepVal = 0.0;
try {
startVal = Double.parseDouble(start.getText().trim());
stopVal = Double.parseDouble(stop.getText().trim());
stepVal = Double.parseDouble(step.getText().trim());
}
catch (Exception e1) {
}
reacParamValue.setText("(" + startVal + "," + stopVal + "," + stepVal + "," + level.getSelectedItem() + ")");
}
}
});
if (paramsOnly) {
reacParamID.setEditable(false);
reacParamName.setEditable(false);
reacParamValue.setEnabled(false);
reacParamUnits.setEnabled(false);
sweep.setEnabled(false);
paramOnPort.setEnabled(false);
}
String selectedID = "";
if (option.equals("OK")) {
String v = ((String) reacParameters.getSelectedValue()).split(" ")[0];
LocalParameter paramet = null;
for (LocalParameter p : changedParameters) {
if (p.getId().equals(v)) {
paramet = p;
}
}
if (paramet==null) return;
reacParamID.setText(paramet.getId());
selectedID = paramet.getId();
reacParamName.setText(paramet.getName());
reacParamValue.setText("" + paramet.getValue());
if (paramet.isSetUnits()) {
reacParamUnits.setSelectedItem(paramet.getUnits());
}
if (bioModel.getPortByMetaIdRef(paramet.getMetaId())!=null) {
paramOnPort.setSelected(true);
} else {
paramOnPort.setSelected(false);
}
if (paramsOnly && (((String) reacParameters.getSelectedValue()).contains("Modified"))
|| (((String) reacParameters.getSelectedValue()).contains("Custom"))
|| (((String) reacParameters.getSelectedValue()).contains("Sweep"))) {
type.setSelectedItem("Modified");
sweep.setEnabled(true);
reacParamValue.setText(((String) reacParameters.getSelectedValue()).split(" ")[((String) reacParameters.getSelectedValue())
.split(" ").length - 1]);
reacParamValue.setEnabled(true);
reacParamUnits.setEnabled(false);
if (reacParamValue.getText().trim().startsWith("(")) {
try {
start.setText((reacParamValue.getText().trim()).split(",")[0].substring(1).trim());
stop.setText((reacParamValue.getText().trim()).split(",")[1].trim());
step.setText((reacParamValue.getText().trim()).split(",")[2].trim());
int lev = Integer.parseInt((reacParamValue.getText().trim()).split(",")[3].replace(")", "").trim());
if (lev == 1) {
level.setSelectedIndex(0);
}
else {
level.setSelectedIndex(1);
}
}
catch (Exception e1) {
}
}
}
}
parametersPanel.add(idLabel);
parametersPanel.add(reacParamID);
parametersPanel.add(nameLabel);
parametersPanel.add(reacParamName);
if (paramsOnly) {
JLabel typeLabel = new JLabel("Value Type:");
type.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (!((String) type.getSelectedItem()).equals("Original")) {
sweep.setEnabled(true);
reacParamValue.setEnabled(true);
reacParamUnits.setEnabled(false);
}
else {
sweep.setEnabled(false);
reacParamValue.setEnabled(false);
reacParamUnits.setEnabled(false);
SBMLDocument d = SBMLutilities.readSBML(file);
KineticLaw KL = d.getModel().getReaction(selectedReaction).getKineticLaw();
ListOf<LocalParameter> list = KL.getListOfLocalParameters();
int number = -1;
for (int i = 0; i < KL.getLocalParameterCount(); i++) {
if (list.get(i).getId().equals(((String) reacParameters.getSelectedValue()).split(" ")[0])) {
number = i;
}
}
reacParamValue.setText(d.getModel().getReaction(selectedReaction).getKineticLaw()
.getLocalParameter(number).getValue()
+ "");
if (d.getModel().getReaction(selectedReaction).getKineticLaw().getLocalParameter(number)
.isSetUnits()) {
reacParamUnits.setSelectedItem(d.getModel().getReaction(selectedReaction)
.getKineticLaw().getLocalParameter(number).getUnits());
}
reacParamValue.setText(d.getModel().getReaction(selectedReaction).getKineticLaw().getLocalParameter(number).getValue() + "");
}
}
});
parametersPanel.add(typeLabel);
parametersPanel.add(type);
}
parametersPanel.add(valueLabel);
parametersPanel.add(reacParamValue);
if (paramsOnly) {
parametersPanel.add(new JLabel());
parametersPanel.add(sweep);
}
parametersPanel.add(unitsLabel);
parametersPanel.add(reacParamUnits);
parametersPanel.add(onPortLabel);
parametersPanel.add(paramOnPort);
Object[] options = { option, "Cancel" };
int value = JOptionPane.showOptionDialog(Gui.frame, parametersPanel, "Parameter Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options, options[0]);
boolean error = true;
while (error && value == JOptionPane.YES_OPTION) {
error = SBMLutilities.checkID(bioModel.getSBMLDocument(), reacParamID.getText().trim(), selectedID, true);
if (!error) {
if (thisReactionParams.contains(reacParamID.getText().trim()) && (!reacParamID.getText().trim().equals(selectedID))) {
JOptionPane.showMessageDialog(Gui.frame, "ID is not unique.", "ID Not Unique", JOptionPane.ERROR_MESSAGE);
error = true;
}
}
if (!error) {
double val = 0;
if (reacParamValue.getText().trim().startsWith("(") && reacParamValue.getText().trim().endsWith(")")) {
try {
Double.parseDouble((reacParamValue.getText().trim()).split(",")[0].substring(1).trim());
Double.parseDouble((reacParamValue.getText().trim()).split(",")[1].trim());
Double.parseDouble((reacParamValue.getText().trim()).split(",")[2].trim());
int lev = Integer.parseInt((reacParamValue.getText().trim()).split(",")[3].replace(")", "").trim());
if (lev != 1 && lev != 2) {
error = true;
JOptionPane.showMessageDialog(Gui.frame, "The level can only be 1 or 2.", "Error", JOptionPane.ERROR_MESSAGE);
}
}
catch (Exception e1) {
error = true;
JOptionPane.showMessageDialog(Gui.frame, "Invalid sweeping parameters.", "Error", JOptionPane.ERROR_MESSAGE);
}
}
else {
try {
val = Double.parseDouble(reacParamValue.getText().trim());
}
catch (Exception e1) {
JOptionPane
.showMessageDialog(Gui.frame, "The value must be a real number.", "Enter A Valid Value", JOptionPane.ERROR_MESSAGE);
error = true;
}
}
if (!error) {
String unit = (String) reacParamUnits.getSelectedItem();
String param = "";
if (paramsOnly && !((String) type.getSelectedItem()).equals("Original")) {
int index = reacParameters.getSelectedIndex();
String[] splits = reacParams[index].split(" ");
for (int i = 0; i < splits.length - 2; i++) {
param += splits[i] + " ";
}
if (!splits[splits.length - 2].equals("Modified") && !splits[splits.length - 2].equals("Custom")
&& !splits[splits.length - 2].equals("Sweep")) {
param += splits[splits.length - 2] + " " + splits[splits.length - 1] + " ";
}
if (reacParamValue.getText().trim().startsWith("(") && reacParamValue.getText().trim().endsWith(")")) {
double startVal = Double.parseDouble((reacParamValue.getText().trim()).split(",")[0].substring(1).trim());
double stopVal = Double.parseDouble((reacParamValue.getText().trim()).split(",")[1].trim());
double stepVal = Double.parseDouble((reacParamValue.getText().trim()).split(",")[2].trim());
int lev = Integer.parseInt((reacParamValue.getText().trim()).split(",")[3].replace(")", "").trim());
param += "Sweep (" + startVal + "," + stopVal + "," + stepVal + "," + lev + ")";
}
else {
param += "Modified " + val;
}
}
else {
if (unit.equals("( none )")) {
param = reacParamID.getText().trim() + " " + val;
}
else {
param = reacParamID.getText().trim() + " " + val + " " + unit;
}
}
if (option.equals("OK")) {
int index = reacParameters.getSelectedIndex();
String v = ((String) reacParameters.getSelectedValue()).split(" ")[0];
LocalParameter paramet = null;
for (LocalParameter p : changedParameters) {
if (p.getId().equals(v)) {
paramet = p;
}
}
if (paramet==null) return;
reacParameters.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacParams = Utility.getList(reacParams, reacParameters);
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
paramet.setId(reacParamID.getText().trim());
paramet.setName(reacParamName.getText().trim());
SBMLutilities.setMetaId(paramet, reacID.getText()+"___"+reacParamID.getText().trim());
for (int i = 0; i < thisReactionParams.size(); i++) {
if (thisReactionParams.get(i).equals(v)) {
thisReactionParams.set(i, reacParamID.getText().trim());
}
}
paramet.setValue(val);
if (unit.equals("( none )")) {
paramet.unsetUnits();
}
else {
paramet.setUnits(unit);
}
reacParams[index] = param;
Utility.sort(reacParams);
reacParameters.setListData(reacParams);
reacParameters.setSelectedIndex(index);
if (paramsOnly) {
int remove = -1;
for (int i = 0; i < parameterChanges.size(); i++) {
if (parameterChanges.get(i).split(" ")[0].equals(selectedReaction + "/" + reacParamID.getText().trim())) {
remove = i;
}
}
String reacValue = selectedReaction;
int index1 = reactions.getSelectedIndex();
if (remove != -1) {
parameterChanges.remove(remove);
reactions.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacts = Utility.getList(reacts, reactions);
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
reacts[index1] = reacValue.split(" ")[0];
Utility.sort(reacts);
reactions.setListData(reacts);
reactions.setSelectedIndex(index1);
}
if (!((String) type.getSelectedItem()).equals("Original")) {
parameterChanges.add(reacValue + "/" + param);
reactions.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacts = Utility.getList(reacts, reactions);
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
reacts[index1] = reacValue + " Modified";
Utility.sort(reacts);
reactions.setListData(reacts);
reactions.setSelectedIndex(index1);
}
}
else {
kineticLaw.setText(SBMLutilities.updateFormulaVar(kineticLaw.getText().trim(), v, reacParamID.getText().trim()));
}
Port port = bioModel.getPortByMetaIdRef(paramet.getMetaId());
if (port!=null) {
if (paramOnPort.isSelected()) {
port.setId(GlobalConstants.LOCALPARAMETER+"__"+paramet.getMetaId());
port.setMetaIdRef(paramet.getMetaId());
} else {
bioModel.getSBMLCompModel().removePort(port);
}
} else {
if (paramOnPort.isSelected()) {
port = bioModel.getSBMLCompModel().createPort();
port.setId(GlobalConstants.LOCALPARAMETER+"__"+paramet.getMetaId());
port.setMetaIdRef(paramet.getMetaId());
}
}
}
else {
int index = reacParameters.getSelectedIndex();
// Parameter paramet = new Parameter(BioSim.SBML_LEVEL,
// BioSim.SBML_VERSION);
LocalParameter paramet = new LocalParameter(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
changedParameters.add(paramet);
paramet.setId(reacParamID.getText().trim());
paramet.setName(reacParamName.getText().trim());
SBMLutilities.setMetaId(paramet, reacID.getText()+"___"+reacParamID.getText().trim());
thisReactionParams.add(reacParamID.getText().trim());
paramet.setValue(val);
if (!unit.equals("( none )")) {
paramet.setUnits(unit);
}
if (paramOnPort.isSelected()) {
Port port = bioModel.getSBMLCompModel().createPort();
port.setId(GlobalConstants.LOCALPARAMETER+"__"+paramet.getMetaId());
port.setMetaIdRef(paramet.getMetaId());
}
JList add = new JList();
Object[] adding = { param };
add.setListData(adding);
add.setSelectedIndex(0);
reacParameters.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(reacParams, reacParameters, add);
reacParams = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
reacParams[i] = (String) adding[i];
}
Utility.sort(reacParams);
reacParameters.setListData(reacParams);
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
try {
if (bioModel.getSBMLDocument().getModel().getReaction(selectedReaction).getKineticLaw()
.getLocalParameterCount() == 1) {
reacParameters.setSelectedIndex(0);
}
else {
reacParameters.setSelectedIndex(index);
}
}
catch (Exception e2) {
reacParameters.setSelectedIndex(0);
}
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
if (error) {
value = JOptionPane.showOptionDialog(Gui.frame, parametersPanel, "Parameter Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options, options[0]);
}
}
if (value == JOptionPane.NO_OPTION) {
return;
}
}
private void addLocalParameter(String id,double val) {
LocalParameter paramet = new LocalParameter(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
changedParameters.add(paramet);
paramet.setId(id);
paramet.setName(id);
SBMLutilities.setMetaId(paramet, reacID.getText()+"___"+id);
thisReactionParams.add(id);
paramet.setValue(val);
JList add = new JList();
String param = id + " " + val;
Object[] adding = { param };
add.setListData(adding);
add.setSelectedIndex(0);
reacParameters.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(reacParams, reacParameters, add);
reacParams = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
reacParams[i] = (String) adding[i];
}
Utility.sort(reacParams);
reacParameters.setListData(reacParams);
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
reacParameters.setSelectedIndex(0);
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
/**
* Creates a frame used to edit products or create new ones.
*/
public void productsEditor(BioModel bioModel, String option, String selectedProductId, SpeciesReference product,
boolean inSchematic, Reaction reaction) {
JPanel productsPanel;
productsPanel = new JPanel(new GridLayout(6, 2));
JLabel productIdLabel = new JLabel("Id:");
JLabel productNameLabel = new JLabel("Name:");
JLabel speciesLabel = new JLabel("Species:");
Object[] stoiciOptions = { "Stoichiometry", "Stoichiometry Math" };
stoiciLabel = new JComboBox(stoiciOptions);
JLabel stoichiometryLabel = new JLabel("Stoichiometry:");
JLabel constantLabel = new JLabel("Constant:");
Object[] productConstantOptions = { "true", "false" };
productConstant = new JComboBox(productConstantOptions);
ListOf<Species> listOfSpecies = bioModel.getSBMLDocument().getModel().getListOfSpecies();
String[] speciesList = new String[bioModel.getSBMLDocument().getModel().getSpeciesCount()];
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getSpeciesCount(); i++) {
speciesList[i] = listOfSpecies.get(i).getId();
}
Utility.sort(speciesList);
productSpecies = new JComboBox();
PiIndex = new JTextField(10);
PiIndex.setEnabled(true);
productSpecies.addActionListener(this);
if (inSchematic) {
productSpecies.setEnabled(false);
} else {
productSpecies.setEnabled(true);
}
for (int i = 0; i < speciesList.length; i++) {
Species species = bioModel.getSBMLDocument().getModel().getSpecies(speciesList[i]);
if (species.getBoundaryCondition() || (!species.getConstant() && Rules.keepVarRateRule(bioModel, "", speciesList[i]))) {
productSpecies.addItem(speciesList[i]);
}
}
productId = new JTextField("");
/*
* int j = 0; while (usedIDs.contains("product"+j)) { j++; }
* productId.setText("product"+j);
*/
productName = new JTextField("");
productStoichiometry = new JTextField("1");
String selectedID = "";
if (option.equals("OK")) {
String v = selectedProductId;
if (product == null || !inSchematic) {
for (SpeciesReference p : changedProducts) {
if (p.getSpecies().equals(v)) {
product = p;
}
}
}
if (product==null) return;
if (product.isSetName()) {
productName.setText(product.getName());
}
productSpecies.setSelectedItem(product.getSpecies());
productStoichiometry.setText("" + product.getStoichiometry());
if (product.isSetId()) {
selectedID = product.getId();
productId.setText(product.getId());
InitialAssignment init = bioModel.getSBMLDocument().getModel().getInitialAssignment(selectedID);
if (init!=null) {
productStoichiometry.setText("" + bioModel.removeBooleans(init.getMath()));
}
}
if (!product.getConstant()) {
productConstant.setSelectedItem("false");
}
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(product);
//TODO: Make sure it reads correctly
String dimInID = "";
for(int i = sBasePlugin.getDimensionCount()-1; i>=0; i
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.getDimensionByArrayDimension(i);
dimInID += "[" + dimX.getSize() + "]";
}
productId.setText(productId.getText()+dimInID);
// TODO: Scott - change for Plugin reading
String freshIndex = "";
for(int i = sBasePlugin.getIndexCount()-1; i>=0; i
Index indie = sBasePlugin.getIndex(i);
freshIndex += "[" + SBMLutilities.myFormulaToString(indie.getMath()) + "]";
}
PiIndex.setText(freshIndex);
}
if (production!=null) {
double np = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.STOICHIOMETRY_STRING).getValue();
if (production.getKineticLaw().getLocalParameter(GlobalConstants.STOICHIOMETRY_STRING)!=null) {
np = production.getKineticLaw().getLocalParameter(GlobalConstants.STOICHIOMETRY_STRING).getValue();
}
productStoichiometry.setText(""+np);
productStoichiometry.setEnabled(false);
}
String[] reactdimIDs = null;
String[] reactdimIDSizes = null;
if (reaction!=null) {
ArraysSBasePlugin reactPlugin = SBMLutilities.getArraysSBasePlugin(reaction);
reactdimIDs = new String[reactPlugin.getDimensionCount()];
reactdimIDSizes = new String[reactPlugin.getDimensionCount()+1];
reactdimIDSizes[0] = reaction.getId();
for(int i = 0; i<reactPlugin.getDimensionCount(); i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = reactPlugin.getDimensionByArrayDimension(i);
reactdimIDs[i] = dimX.getId();
reactdimIDSizes[i+1] = dimX.getSize();
}
} else {
reactdimIDs = new String[]{""};
reactdimIDSizes = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), reacID.getText(), false);
if(reactdimIDSizes!=null){
reactdimIDs = SBMLutilities.getDimensionIds("",reactdimIDSizes.length-1);
}
}
productsPanel.add(productIdLabel);
productsPanel.add(productId);
productsPanel.add(productNameLabel);
productsPanel.add(productName);
productsPanel.add(speciesLabel);
productsPanel.add(productSpecies);
productsPanel.add(new JLabel("Indices:"));
productsPanel.add(PiIndex);
if (bioModel.getSBMLDocument().getLevel() < 3) {
productsPanel.add(stoiciLabel);
}
else {
productsPanel.add(stoichiometryLabel);
}
productsPanel.add(productStoichiometry);
if (bioModel.getSBMLDocument().getLevel() > 2) {
productsPanel.add(constantLabel);
productsPanel.add(productConstant);
}
if (speciesList.length == 0) {
JOptionPane.showMessageDialog(Gui.frame, "There are no species availiable to be products." + "\nAdd species to this sbml file first.",
"No Species", JOptionPane.ERROR_MESSAGE);
return;
}
Object[] options = { option, "Cancel" };
int value = JOptionPane.showOptionDialog(Gui.frame, productsPanel, "Products Editor", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE,
null, options, options[0]);
String[] dimID = new String[]{""};
String[] dex = new String[]{""};
String[] dimensionIds = new String[]{""};
boolean error = true;
while (error && value == JOptionPane.YES_OPTION) {
error = false;
String prod = "";
double val = 1.0;
dimID = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), productId.getText(), true);
if(dimID!=null){
dimensionIds = SBMLutilities.getDimensionIds("p",dimID.length-1);
if (dimID[0].equals("")) {
error = SBMLutilities.variableInUse(bioModel.getSBMLDocument(), selectedID, false, true, true);
}
else {
error = SBMLutilities.checkID(bioModel.getSBMLDocument(), dimID[0], selectedID, false);
}
}
else{
error = true;
}
if(!error){
SBase variable = SBMLutilities.getElementBySId(bioModel.getSBMLDocument(), (String)productSpecies.getSelectedItem());
dex = SBMLutilities.checkIndices(PiIndex.getText(), variable, bioModel.getSBMLDocument(), dimensionIds, "species", dimID, reactdimIDs, reactdimIDSizes);
error = (dex==null);
}
if (!error) {
if (stoiciLabel.getSelectedItem().equals("Stoichiometry")) {
InitialAssignments.removeInitialAssignment(bioModel, selectedID);
try {
val = Double.parseDouble(productStoichiometry.getText().trim());
}
catch (Exception e1) {
if (productId.getText().equals("")) {
JOptionPane.showMessageDialog(Gui.frame, "The stoichiometry must be a real number if no id is provided.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
} else {
// TODO: this needs to send product dimension when it exists
error = InitialAssignments.addInitialAssignment(bioModel, productStoichiometry.getText().trim(), dimID);
val = 1.0;
}
}
if (val <= 0) {
JOptionPane.showMessageDialog(Gui.frame, "The stoichiometry value must be greater than 0.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
prod = productSpecies.getSelectedItem() + " " + val;
}
else {
prod = productSpecies.getSelectedItem() + " " + productStoichiometry.getText().trim();
}
}
int index = -1;
if (!error) {
if (product == null || !inSchematic) {
if (option.equals("OK")) {
index = products.getSelectedIndex();
}
products.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
proda = Utility.getList(proda, products);
products.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index >= 0) {
products.setSelectedIndex(index);
}
for (int i = 0; i < proda.length; i++) {
if (i != index) {
if (proda[i].split(" ")[0].equals(productSpecies.getSelectedItem())) {
error = true;
JOptionPane.showMessageDialog(Gui.frame, "Unable to add species as a product.\n"
+ "Each species can only be used as a product once.", "Species Can Only Be Used Once",
JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
if (!error) {
if (stoiciLabel.getSelectedItem().equals("Stoichiometry Math")) {
if (productStoichiometry.getText().trim().equals("")) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry math must have formula.", "Enter Stoichiometry Formula",
JOptionPane.ERROR_MESSAGE);
error = true;
}
else if (SBMLutilities.myParseFormula(productStoichiometry.getText().trim()) == null) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry formula is not valid.", "Enter Valid Formula",
JOptionPane.ERROR_MESSAGE);
error = true;
}
else {
ArrayList<String> invalidVars = getInvalidVariablesInReaction(productStoichiometry.getText().trim(), dimensionIds, true, "", false);
if (invalidVars.size() > 0) {
String invalid = "";
for (int i = 0; i < invalidVars.size(); i++) {
if (i == invalidVars.size() - 1) {
invalid += invalidVars.get(i);
}
else {
invalid += invalidVars.get(i) + "\n";
}
}
String message;
message = "Stoiciometry math contains unknown variables.\n\n" + "Unknown variables:\n" + invalid;
JTextArea messageArea = new JTextArea(message);
messageArea.setLineWrap(true);
messageArea.setWrapStyleWord(true);
messageArea.setEditable(false);
JScrollPane scrolls = new JScrollPane();
scrolls.setMinimumSize(new Dimension(300, 300));
scrolls.setPreferredSize(new Dimension(300, 300));
scrolls.setViewportView(messageArea);
JOptionPane.showMessageDialog(Gui.frame, scrolls, "Stoiciometry Math Error", JOptionPane.ERROR_MESSAGE);
error = true;
}
if (!error) {
error = SBMLutilities.checkNumFunctionArguments(bioModel.getSBMLDocument(),
SBMLutilities.myParseFormula(productStoichiometry.getText().trim()));
}
if (!error) {
error = SBMLutilities.checkFunctionArgumentTypes(bioModel.getSBMLDocument(),
SBMLutilities.myParseFormula(productStoichiometry.getText().trim()));
}
if (!error) {
if (SBMLutilities.returnsBoolean(SBMLutilities.myParseFormula(productStoichiometry.getText().trim()), bioModel.getSBMLDocument().getModel())) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry math must evaluate to a number.", "Number Expected",
JOptionPane.ERROR_MESSAGE);
error = true;
}
}
}
}
}
if (!error && option.equals("OK") && productConstant.getSelectedItem().equals("true")) {
String id = selectedID;
error = SBMLutilities.checkConstant(bioModel.getSBMLDocument(), "Product stoiciometry", id);
}
if (!error) {
if (option.equals("OK")) {
String v = selectedProductId;
SpeciesReference produ = product;
if (product == null || !inSchematic) {
for (SpeciesReference p : changedProducts) {
if (p.getSpecies().equals(v)) {
produ = p;
}
}
products.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
proda = Utility.getList(proda, products);
products.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
}
if (produ==null) return;
produ.setId(dimID[0]);
produ.setName(productName.getText().trim());
produ.setSpecies((String) productSpecies.getSelectedItem());
produ.setStoichiometry(val);
if (productConstant.getSelectedItem().equals("true")) {
produ.setConstant(true);
}
else {
produ.setConstant(false);
}
// TODO: Scott - change for Plugin writing
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(produ);
sBasePlugin.unsetListOfDimensions();
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
sBasePlugin.unsetListOfIndices();
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
if (product == null || !inSchematic) {
proda[index] = prod;
Utility.sort(proda);
products.setListData(proda);
products.setSelectedIndex(index);
}
SBMLutilities.updateVarId(bioModel.getSBMLDocument(), false, selectedID, dimID[0]);
if (product == null || !inSchematic) {
kineticLaw.setText(SBMLutilities.updateFormulaVar(kineticLaw.getText().trim(), selectedID, dimID[0]));
}
}
else {
// SpeciesReference produ = new
// SpeciesReference(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
SpeciesReference produ = new SpeciesReference(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
produ.setId(dimID[0]);
produ.setName(productName.getText().trim());
changedProducts.add(produ);
produ.setSpecies((String) productSpecies.getSelectedItem());
produ.setStoichiometry(val);
if (productConstant.getSelectedItem().equals("true")) {
produ.setConstant(true);
}
else {
produ.setConstant(false);
}
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(produ);
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
JList add = new JList();
Object[] adding = { prod };
add.setListData(adding);
add.setSelectedIndex(0);
products.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(proda, products, add);
proda = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
proda[i] = (String) adding[i];
}
Utility.sort(proda);
products.setListData(proda);
products.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
products.setSelectedIndex(0);
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
if (error) {
value = JOptionPane.showOptionDialog(Gui.frame, productsPanel, "Products Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options, options[0]);
}
}
if (value == JOptionPane.NO_OPTION) {
return;
}
}
private LocalParameter getChangedParameter(String paramStr) {
for (LocalParameter r : changedParameters) {
if (r.getId().equals(paramStr)) {
return r;
}
}
return null;
}
/**
* Creates a frame used to edit modifiers or create new ones.
*/
public void modifiersEditor(BioModel bioModel,String option,String selectedModifierId, ModifierSpeciesReference modifier,
boolean inSchematic, Reaction reaction) {
JPanel modifiersPanel;
MiIndex = new JTextField(10);
modifierId = new JTextField(10);
modifierName = new JTextField(10);
JLabel speciesLabel = new JLabel("Species:");
ListOf<Species> listOfSpecies = bioModel.getSBMLDocument().getModel().getListOfSpecies();
String[] speciesList = new String[bioModel.getSBMLDocument().getModel().getSpeciesCount()];
for (int i = 0; i < bioModel.getSBMLDocument().getModel().getSpeciesCount(); i++) {
speciesList[i] = listOfSpecies.get(i).getId();
}
Utility.sort(speciesList);
Object[] choices = speciesList;
modifierSpecies = new JComboBox(choices);
modifierSpecies.addActionListener(this);
if (inSchematic) {
modifierSpecies.setEnabled(false);
} else {
modifierSpecies.setEnabled(true);
}
JLabel SBOTermsLabel = new JLabel("Type");
JLabel RepStoichiometryLabel = new JLabel("Stoichiometry of repression (nc)");
JLabel RepBindingLabel = new JLabel("Repression binding equilibrium (Kr)");
JLabel ActStoichiometryLabel = new JLabel("Stoichiometry of activation (nc)");
JLabel ActBindingLabel = new JLabel("Activation binding equilibrium (Ka)");
String selectedID = "";
if (option.equals("OK")) {
String v = selectedModifierId;
if (modifier == null || !inSchematic) {
for (ModifierSpeciesReference p : changedModifiers) {
if (p.getSpecies().equals(v)) {
modifier = p;
}
}
}
if (modifier==null) return;
if (modifier.isSetName()) {
modifierName.setText(modifier.getName());
}
if (modifier.isSetId()) {
selectedID = modifier.getId();
modifierId.setText(modifier.getId());
}
// TODO: Scott - change for Plugin reading
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(modifier);
String dimInID = "";
for(int i = sBasePlugin.getDimensionCount()-1; i>=0; i
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.getDimensionByArrayDimension(i);
dimInID += "[" + dimX.getSize() + "]";
}
modifierId.setText(modifierId.getText()+dimInID);
String freshIndex = "";
for(int i = sBasePlugin.getIndexCount()-1; i>=0; i
Index indie = sBasePlugin.getIndex(i);
freshIndex += "[" + SBMLutilities.myFormulaToString(indie.getMath()) + "]";
}
MiIndex.setText(freshIndex);
modifierSpecies.setSelectedItem(modifier.getSpecies());
if (production!=null) {
if (BioModel.isPromoter(modifier)) {
String [] sboTerms = new String[1];
sboTerms[0] = "Promoter";
SBOTerms = new JComboBox(sboTerms);
SBOTerms.setSelectedItem("Promoter");
} else {
String [] sboTerms = new String[4];
sboTerms[0] = "Repression";
sboTerms[1] = "Activation";
sboTerms[2] = "Dual activity";
sboTerms[3] = "No influence";
SBOTerms = new JComboBox(sboTerms);
if (BioModel.isRepressor(modifier)) {
SBOTerms.setSelectedItem("Repression");
} else if (BioModel.isActivator(modifier)) {
SBOTerms.setSelectedItem("Activation");
} else if (BioModel.isRegulator(modifier)) {
SBOTerms.setSelectedItem("Dual activity");
} else if (BioModel.isNeutral(modifier)) {
SBOTerms.setSelectedItem("No influence");
}
}
}
} else {
String [] sboTerms = new String[4];
sboTerms[0] = "Repression";
sboTerms[1] = "Activation";
sboTerms[2] = "Dual activity";
sboTerms[3] = "No influence";
SBOTerms = new JComboBox(sboTerms);
}
if (production==null) {
modifiersPanel = new JPanel(new GridLayout(4, 2));
} else {
if (SBOTerms.getSelectedItem().equals("Promoter")) {
modifiersPanel = new JPanel(new GridLayout(5, 2));
} else {
modifiersPanel = new JPanel(new GridLayout(9, 2));
}
}
String[] reactdimIDs = null;
String[] reactdimIDSizes = null;
if (reaction!=null) {
ArraysSBasePlugin reactPlugin = SBMLutilities.getArraysSBasePlugin(reaction);
reactdimIDs = new String[reactPlugin.getDimensionCount()];
reactdimIDSizes = new String[reactPlugin.getDimensionCount()+1];
reactdimIDSizes[0] = reaction.getId();
for(int i = 0; i<reactPlugin.getDimensionCount(); i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = reactPlugin.getDimensionByArrayDimension(i);
reactdimIDs[i] = dimX.getId();
reactdimIDSizes[i+1] = dimX.getSize();
}
} else {
reactdimIDs = new String[]{""};
reactdimIDSizes = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), reacID.getText(), false);
if(reactdimIDSizes!=null){
reactdimIDs = SBMLutilities.getDimensionIds("",reactdimIDSizes.length-1);
}
}
modifiersPanel.add(new JLabel("Id:"));
modifiersPanel.add(modifierId);
modifiersPanel.add(new JLabel("Name:"));
modifiersPanel.add(modifierName);
modifiersPanel.add(speciesLabel);
modifiersPanel.add(modifierSpecies);
modifiersPanel.add(new JLabel("Indices:"));
modifiersPanel.add(MiIndex);
if (production!=null) {
modifiersPanel.add(SBOTermsLabel);
modifiersPanel.add(SBOTerms);
if (SBOTerms.getSelectedItem().equals("Promoter")) {
modifierSpecies.setEnabled(false);
SBOTerms.setEnabled(false);
} else {
String selectedSpecies = (String)modifierSpecies.getSelectedItem();
modifiersPanel.add(RepStoichiometryLabel);
repCooperativity = new JTextField();
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
repCooperativity.setText(""+nc);
LocalParameter p = getChangedParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+selectedSpecies+"_r");
if (p!=null) {
repCooperativity.setText(""+p.getValue());
}
modifiersPanel.add(repCooperativity);
modifiersPanel.add(RepBindingLabel);
repBinding = new JTextField(bioModel.getParameter(GlobalConstants.FORWARD_KREP_STRING) + "/" +
bioModel.getParameter(GlobalConstants.REVERSE_KREP_STRING));
LocalParameter kr_f = getChangedParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + selectedSpecies + "_"));
LocalParameter kr_r = getChangedParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + selectedSpecies + "_"));
if (kr_f!=null && kr_r!=null) {
repBinding.setText(""+kr_f.getValue()+"/"+kr_r.getValue());
}
modifiersPanel.add(repBinding);
modifiersPanel.add(ActStoichiometryLabel);
actCooperativity = new JTextField();
nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
actCooperativity.setText(""+nc);
p = getChangedParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+selectedSpecies+"_a");
if (p!=null) {
actCooperativity.setText(""+p.getValue());
}
modifiersPanel.add(actCooperativity);
modifiersPanel.add(ActBindingLabel);
actBinding = new JTextField(bioModel.getParameter(GlobalConstants.FORWARD_KACT_STRING) + "/" +
bioModel.getParameter(GlobalConstants.REVERSE_KACT_STRING));
LocalParameter ka_f = getChangedParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + selectedSpecies + "_"));
LocalParameter ka_r = getChangedParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + selectedSpecies + "_"));
if (ka_f!=null && ka_r!=null) {
actBinding.setText(""+ka_f.getValue()+"/"+ka_r.getValue());
}
modifiersPanel.add(actBinding);
}
}
if (choices.length == 0) {
JOptionPane.showMessageDialog(Gui.frame, "There are no species availiable to be modifiers." + "\nAdd species to this sbml file first.",
"No Species", JOptionPane.ERROR_MESSAGE);
return;
}
Object[] options = { option, "Cancel" };
int value = JOptionPane.showOptionDialog(Gui.frame, modifiersPanel, "Modifiers Editor", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE,
null, options, options[0]);
String[] dex = new String[]{""};
String[] dimensionIds = new String[]{""};
String[] dimID = new String[]{""};
boolean error = true;
while (error && value == JOptionPane.YES_OPTION) {
error = false;
int index = -1;
dimID = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), modifierId.getText(), true);
if(dimID!=null){
dimensionIds = SBMLutilities.getDimensionIds("m",dimID.length-1);
if (dimID[0].trim().equals("")) {
error = SBMLutilities.variableInUse(bioModel.getSBMLDocument(), selectedID, false, true, true);
}
else {
error = SBMLutilities.checkID(bioModel.getSBMLDocument(), dimID[0].trim(), selectedID, false);
}
}
else{
error = true;
}
if(!error){
SBase variable = SBMLutilities.getElementBySId(bioModel.getSBMLDocument(), (String)modifierSpecies.getSelectedItem());
dex = SBMLutilities.checkIndices(MiIndex.getText(), variable, bioModel.getSBMLDocument(), dimensionIds, "species", dimID, reactdimIDs, reactdimIDSizes);
error = (dex==null);
}
if (modifier == null || !inSchematic) {
if (option.equals("OK")) {
index = modifiers.getSelectedIndex();
}
modifiers.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
modifierArray = Utility.getList(modifierArray, modifiers);
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index >= 0) {
modifiers.setSelectedIndex(index);
}
for (int i = 0; i < modifierArray.length; i++) {
if (i != index) {
if (modifierArray[i].split(" ")[0].equals(modifierSpecies.getSelectedItem())) {
error = true;
JOptionPane.showMessageDialog(Gui.frame, "Unable to add species as a modifier.\n"
+ "Each species can only be used as a modifier once.", "Species Can Only Be Used Once",
JOptionPane.ERROR_MESSAGE);
}
}
}
}
String mod = (String) modifierSpecies.getSelectedItem();
double repCoop = 0.0;
double actCoop = 0.0;
double repBindf = 0.0;
double repBindr = 1.0;
double actBindf = 0.0;
double actBindr = 1.0;
if (production!=null) {
try {
repCoop = Double.parseDouble(repCooperativity.getText().trim());
}
catch (Exception e1) {
JOptionPane.showMessageDialog(Gui.frame, "The repression cooperativity must be a real number.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
try {
repBindf = Double.parseDouble(repBinding.getText().trim().split("/")[0]);
repBindr = Double.parseDouble(repBinding.getText().trim().split("/")[1]);
}
catch (Exception e1) {
JOptionPane.showMessageDialog(Gui.frame, "The repression binding must be a forward rate / reverse rate.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
try {
actCoop = Double.parseDouble(actCooperativity.getText().trim());
}
catch (Exception e1) {
JOptionPane.showMessageDialog(Gui.frame, "The activation cooperativity must be a real number.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
try {
actBindf = Double.parseDouble(actBinding.getText().trim().split("/")[0]);
actBindr = Double.parseDouble(actBinding.getText().trim().split("/")[1]);
}
catch (Exception e1) {
JOptionPane.showMessageDialog(Gui.frame, "The activation binding must be a forward rate / reverse rate.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
}
if (!error) {
if (option.equals("OK")) {
String v = selectedModifierId;
ModifierSpeciesReference modi = modifier;
modi.setId(dimID[0]);
modi.setName(modifierName.getText());
if (modifier == null || !inSchematic) {
for (ModifierSpeciesReference p : changedModifiers) {
if (p.getSpecies().equals(mod)) {
modi = p;
}
}
modifiers.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
modifierArray = Utility.getList(modifierArray, modifiers);
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
//modifiers.setSelectedIndex(index);
}
modi.setSpecies((String) modifierSpecies.getSelectedItem());
if (production!=null) {
if (SBOTerms.getSelectedItem().equals("Repression")) {
modi.setSBOTerm(GlobalConstants.SBO_REPRESSION);
} else if (SBOTerms.getSelectedItem().equals("Activation")) {
modi.setSBOTerm(GlobalConstants.SBO_ACTIVATION);
} else if (SBOTerms.getSelectedItem().equals("Dual activity")) {
modi.setSBOTerm(GlobalConstants.SBO_DUAL_ACTIVITY);
} else if (SBOTerms.getSelectedItem().equals("No influence")) {
modi.setSBOTerm(GlobalConstants.SBO_NEUTRAL);
} else if (SBOTerms.getSelectedItem().equals("Promoter")) {
modi.setSBOTerm(GlobalConstants.SBO_PROMOTER_MODIFIER);
}
}
if (production!=null) {
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
String ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+mod+"_r";
LocalParameter paramet = getChangedParameter(ncStr);
if (paramet != null) {
removeLocalParameter(ncStr);
}
if (nc!=repCoop) {
addLocalParameter(ncStr,repCoop);
}
ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+mod+"_a";
paramet = getChangedParameter(ncStr);
if (paramet != null) {
removeLocalParameter(ncStr);
}
if (nc!=actCoop) {
addLocalParameter(ncStr,actCoop);
}
double bindf = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.FORWARD_KREP_STRING).getValue();
double bindr = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.REVERSE_KREP_STRING).getValue();
LocalParameter kr_f = getChangedParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"));
LocalParameter kr_r = getChangedParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"));
if (kr_f != null) {
removeLocalParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"));
}
if (kr_r != null) {
removeLocalParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"));
}
if (repBindf!=bindf || repBindr!=bindr) {
addLocalParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"),repBindf);
addLocalParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"),repBindr);
}
bindf = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.FORWARD_KACT_STRING).getValue();
bindr = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.REVERSE_KACT_STRING).getValue();
LocalParameter ka_f = getChangedParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"));
LocalParameter ka_r = getChangedParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"));
if (ka_f != null) {
removeLocalParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"));
}
if (ka_r != null) {
removeLocalParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"));
}
if (actBindf!=bindf || actBindr!=bindr) {
addLocalParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"),actBindf);
addLocalParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"),actBindr);
}
}
if (modifier == null || !inSchematic) {
modifierArray[index] = mod;
Utility.sort(modifierArray);
modifiers.setListData(modifierArray);
modifiers.setSelectedIndex(index);
}
// TODO: Scott - change for Plugin writing
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(modifier);
sBasePlugin.unsetListOfDimensions();
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
sBasePlugin.unsetListOfIndices();
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
else {
modifiers.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
modifierArray = Utility.getList(modifierArray, modifiers);
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
modifiers.setSelectedIndex(index);
ModifierSpeciesReference modi = new ModifierSpeciesReference(bioModel.getSBMLDocument().getLevel(), bioModel.getSBMLDocument().getVersion());
modi.setId(dimID[0]);
modi.setName(modifierName.getText());
changedModifiers.add(modi);
modi.setSpecies(mod);
if (production!=null) {
if (SBOTerms.getSelectedItem().equals("Repression")) {
modi.setSBOTerm(GlobalConstants.SBO_REPRESSION);
} else if (SBOTerms.getSelectedItem().equals("Activation")) {
modi.setSBOTerm(GlobalConstants.SBO_ACTIVATION);
} else if (SBOTerms.getSelectedItem().equals("Dual activity")) {
modi.setSBOTerm(GlobalConstants.SBO_DUAL_ACTIVITY);
} else if (SBOTerms.getSelectedItem().equals("No influence")) {
modi.setSBOTerm(GlobalConstants.SBO_NEUTRAL);
} else if (SBOTerms.getSelectedItem().equals("Promoter")) {
modi.setSBOTerm(GlobalConstants.SBO_PROMOTER_MODIFIER);
}
}
if (production!=null) {
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
String ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+mod+"_r";
LocalParameter paramet = getChangedParameter(ncStr);
if (paramet != null) {
removeLocalParameter(ncStr);
}
if (nc!=repCoop) {
addLocalParameter(ncStr,repCoop);
}
ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+mod+"_a";
paramet = getChangedParameter(ncStr);
if (paramet != null) {
removeLocalParameter(ncStr);
}
if (nc!=actCoop) {
addLocalParameter(ncStr,actCoop);
}
double bindf = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.FORWARD_KREP_STRING).getValue();
double bindr = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.REVERSE_KREP_STRING).getValue();
LocalParameter kr_f = getChangedParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"));
LocalParameter kr_r = getChangedParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"));
if (kr_f != null) {
removeLocalParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"));
}
if (kr_r != null) {
removeLocalParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"));
}
if (repBindf!=bindf || repBindr!=bindr) {
addLocalParameter(GlobalConstants.FORWARD_KREP_STRING.replace("_","_" + mod + "_"),repBindf);
addLocalParameter(GlobalConstants.REVERSE_KREP_STRING.replace("_","_" + mod + "_"),repBindr);
}
bindf = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.FORWARD_KACT_STRING).getValue();
bindr = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.REVERSE_KACT_STRING).getValue();
LocalParameter ka_f = getChangedParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"));
LocalParameter ka_r = getChangedParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"));
if (ka_f != null) {
removeLocalParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"));
}
if (ka_r != null) {
removeLocalParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"));
}
if (actBindf!=bindf || actBindr!=bindr) {
addLocalParameter(GlobalConstants.FORWARD_KACT_STRING.replace("_","_" + mod + "_"),actBindf);
addLocalParameter(GlobalConstants.REVERSE_KACT_STRING.replace("_","_" + mod + "_"),actBindr);
}
}
JList add = new JList();
Object[] adding = { mod };
add.setListData(adding);
add.setSelectedIndex(0);
modifiers.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(modifierArray, modifiers, add);
modifierArray = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
modifierArray[i] = (String) adding[i];
}
Utility.sort(modifierArray);
modifiers.setListData(modifierArray);
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
try {
if (bioModel.getSBMLDocument().getModel().getReaction(selectedReaction).getModifierCount() == 1) {
modifiers.setSelectedIndex(0);
}
else {
modifiers.setSelectedIndex(index);
}
}
catch (Exception e2) {
modifiers.setSelectedIndex(0);
}
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(modifier);
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i]);
dimX.setArrayDimension(i);
}
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
if (error) {
value = JOptionPane.showOptionDialog(Gui.frame, modifiersPanel, "Modifiers Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options, options[0]);
}
}
if (value == JOptionPane.NO_OPTION) {
return;
}
}
/**
* Creates a frame used to edit reactants or create new ones.
*/
public void reactantsEditor(BioModel gcm, String option, String selectedReactantId, SpeciesReference reactant,
boolean inSchematic, Reaction reaction) {
JPanel reactantsPanel;
if (gcm.getSBMLDocument().getLevel() < 3) {
reactantsPanel = new JPanel(new GridLayout(5, 2));
}
else {
reactantsPanel = new JPanel(new GridLayout(6, 2));
}
JLabel reactantIdLabel = new JLabel("Id:");
JLabel reactantNameLabel = new JLabel("Name:");
JLabel speciesLabel = new JLabel("Species:");
Object[] stoiciOptions = { "Stoichiometry", "Stoichiometry Math" };
stoiciLabel = new JComboBox(stoiciOptions);
JLabel stoichiometryLabel = new JLabel("Stoichiometry:");
JLabel constantLabel = new JLabel("Constant:");
Object[] reactantConstantOptions = { "true", "false" };
reactantConstant = new JComboBox(reactantConstantOptions);
ListOf<Species> listOfSpecies = gcm.getSBMLDocument().getModel().getListOfSpecies();
String[] speciesList = new String[gcm.getSBMLDocument().getModel().getSpeciesCount()];
for (int i = 0; i < gcm.getSBMLDocument().getModel().getSpeciesCount(); i++) {
speciesList[i] = listOfSpecies.get(i).getId();
}
Utility.sort(speciesList);
RiIndex = new JTextField(10);
RiIndex.setEnabled(true);
reactantSpecies = new JComboBox();
reactantSpecies.addActionListener(this);
if (inSchematic) {
reactantSpecies.setEnabled(false);
} else {
reactantSpecies.setEnabled(true);
}
// TODO: Scott: if (inSchematic) extract dimensions from reaction else from reaction id field
for (int i = 0; i < speciesList.length; i++) {
Species species = gcm.getSBMLDocument().getModel().getSpecies(speciesList[i]);
if (species.getBoundaryCondition() || (!species.getConstant() && Rules.keepVarRateRule(gcm, "", speciesList[i]))) {
reactantSpecies.addItem(speciesList[i]);
}
}
reactantId = new JTextField("");
reactantName = new JTextField("");
reactantStoichiometry = new JTextField("1");
String selectedID = "";
if (complex!=null) {
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
reactantStoichiometry.setText(""+nc);
}
if (option.equals("OK")) {
String v = selectedReactantId;
if (reactant == null) {
for (SpeciesReference r : changedReactants) {
if (r.getSpecies().equals(v)) {
reactant = r;
}
}
}
reactantSpecies.setSelectedItem(reactant.getSpecies());
if (reactant.isSetName()) {
reactantName.setText(reactant.getName());
}
reactantStoichiometry.setText("" + reactant.getStoichiometry());
if (reactant.isSetId()) {
selectedID = reactant.getId();
reactantId.setText(reactant.getId());
InitialAssignment init = bioModel.getSBMLDocument().getModel().getInitialAssignment(selectedID);
if (init!=null) {
reactantStoichiometry.setText("" + bioModel.removeBooleans(init.getMath()));
}
}
if (!reactant.getConstant()) {
reactantConstant.setSelectedItem("false");
}
// TODO: Scott - change for Plugin reading
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(reactant);
String dimInID = "";
for(int i = sBasePlugin.getDimensionCount()-1; i>=0; i
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.getDimensionByArrayDimension(i);
dimInID += "[" + dimX.getSize() + "]";
}
reactantId.setText(reactantId.getText()+dimInID);
String freshIndex = "";
for(int i = sBasePlugin.getIndexCount()-1; i>=0; i
Index indie = sBasePlugin.getIndex(i);
freshIndex += "[" + SBMLutilities.myFormulaToString(indie.getMath()) + "]";
}
RiIndex.setText(freshIndex);
if (complex!=null) {
if (complex.getKineticLaw().getLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+selectedID)!=null) {
double nc = complex.getKineticLaw().getLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+selectedID).getValue();
reactantStoichiometry.setText(""+nc);
}
}
}
String[] reactdimIDs = null;
String[] reactdimIDSizes = null;
if (reaction!=null) {
ArraysSBasePlugin reactPlugin = SBMLutilities.getArraysSBasePlugin(reaction);
reactdimIDs = new String[reactPlugin.getDimensionCount()];
reactdimIDSizes = new String[reactPlugin.getDimensionCount()+1];
reactdimIDSizes[0] = reaction.getId();
for(int i = 0; i<reactPlugin.getDimensionCount(); i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = reactPlugin.getDimensionByArrayDimension(i);
reactdimIDs[i] = dimX.getId();
reactdimIDSizes[i+1] = dimX.getSize();
}
} else {
reactdimIDs = new String[]{""};
reactdimIDSizes = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), reacID.getText(), false);
if(reactdimIDSizes!=null){
reactdimIDs = SBMLutilities.getDimensionIds("",reactdimIDSizes.length-1);
}
}
reactantsPanel.add(reactantIdLabel);
reactantsPanel.add(reactantId);
reactantsPanel.add(reactantNameLabel);
reactantsPanel.add(reactantName);
reactantsPanel.add(speciesLabel);
reactantsPanel.add(reactantSpecies);
reactantsPanel.add(new JLabel("Indices:"));
reactantsPanel.add(RiIndex);
if (gcm.getSBMLDocument().getLevel() < 3) {
reactantsPanel.add(stoiciLabel);
}
else {
reactantsPanel.add(stoichiometryLabel);
}
reactantsPanel.add(reactantStoichiometry);
if (gcm.getSBMLDocument().getLevel() > 2) {
reactantsPanel.add(constantLabel);
reactantsPanel.add(reactantConstant);
}
if (speciesList.length == 0) {
JOptionPane.showMessageDialog(Gui.frame, "There are no species availiable to be reactants." + "\nAdd species to this sbml file first.",
"No Species", JOptionPane.ERROR_MESSAGE);
return;
}
Object[] options = { option, "Cancel" };
int value = JOptionPane.showOptionDialog(Gui.frame, reactantsPanel, "Reactants Editor", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE,
null, options, options[0]);
String[] dimID = new String[]{""};
String[] dex = new String[]{""};
String[] dimensionIds = new String[]{""};
boolean error = true;
while (error && value == JOptionPane.YES_OPTION) {
error = false;
String react = "";
double val = 1.0;
dimID = SBMLutilities.checkSizeParameters(bioModel.getSBMLDocument(), reactantId.getText(), true);
if(dimID!=null){
dimensionIds = SBMLutilities.getDimensionIds("r",dimID.length-1);
if (dimID[0].trim().equals("")) {
error = SBMLutilities.variableInUse(gcm.getSBMLDocument(), selectedID, false, true, true);
}
else {
error = SBMLutilities.checkID(gcm.getSBMLDocument(), dimID[0].trim(), selectedID, false);
}
}
else{
error = true;
}
if(!error){
SBase variable = SBMLutilities.getElementBySId(bioModel.getSBMLDocument(), (String)reactantSpecies.getSelectedItem());
dex = SBMLutilities.checkIndices(RiIndex.getText(), variable, bioModel.getSBMLDocument(), dimensionIds, "species", dimID, reactdimIDs, reactdimIDSizes);
error = (dex==null);
}
if (!error) {
if (stoiciLabel.getSelectedItem().equals("Stoichiometry")) {
InitialAssignments.removeInitialAssignment(bioModel, selectedID);
try {
val = Double.parseDouble(reactantStoichiometry.getText().trim());
}
catch (Exception e1) {
if (reactantId.getText().equals("")) {
JOptionPane.showMessageDialog(Gui.frame, "The stoichiometry must be a real number if no id is provided.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
} else {
// TODO: need sot use reactant dimension when it exists
error = InitialAssignments.addInitialAssignment(bioModel, reactantStoichiometry.getText().trim(), dimID);
val = 1.0;
}
}
if (val <= 0) {
JOptionPane.showMessageDialog(Gui.frame, "The stoichiometry value must be greater than 0.", "Enter A Valid Value",
JOptionPane.ERROR_MESSAGE);
error = true;
}
react = reactantSpecies.getSelectedItem() + " " + val;
}
else {
react = reactantSpecies.getSelectedItem() + " " + reactantStoichiometry.getText().trim();
}
}
int index = -1;
if (!error) {
if (reactant == null || !inSchematic) {
if (option.equals("OK")) {
index = reactants.getSelectedIndex();
}
reactants.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacta = Utility.getList(reacta, reactants);
reactants.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index >= 0) {
reactants.setSelectedIndex(index);
}
for (int i = 0; i < reacta.length; i++) {
if (i != index) {
if (reacta[i].split(" ")[0].equals(reactantSpecies.getSelectedItem())) {
error = true;
JOptionPane.showMessageDialog(Gui.frame, "Unable to add species as a reactant.\n"
+ "Each species can only be used as a reactant once.", "Species Can Only Be Used Once",
JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
if (!error) {
if (stoiciLabel.getSelectedItem().equals("Stoichiometry Math")) {
if (reactantStoichiometry.getText().trim().equals("")) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry math must have formula.", "Enter Stoichiometry Formula",
JOptionPane.ERROR_MESSAGE);
error = true;
}
else if (SBMLutilities.myParseFormula(reactantStoichiometry.getText().trim()) == null) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry formula is not valid.", "Enter Valid Formula",
JOptionPane.ERROR_MESSAGE);
error = true;
}
else {
ArrayList<String> invalidVars = getInvalidVariablesInReaction(reactantStoichiometry.getText().trim(), dimensionIds, true, "", false);
if (invalidVars.size() > 0) {
String invalid = "";
for (int i = 0; i < invalidVars.size(); i++) {
if (i == invalidVars.size() - 1) {
invalid += invalidVars.get(i);
}
else {
invalid += invalidVars.get(i) + "\n";
}
}
String message;
message = "Stoiciometry math contains unknown variables.\n\n" + "Unknown variables:\n" + invalid;
JTextArea messageArea = new JTextArea(message);
messageArea.setLineWrap(true);
messageArea.setWrapStyleWord(true);
messageArea.setEditable(false);
JScrollPane scrolls = new JScrollPane();
scrolls.setMinimumSize(new Dimension(300, 300));
scrolls.setPreferredSize(new Dimension(300, 300));
scrolls.setViewportView(messageArea);
JOptionPane.showMessageDialog(Gui.frame, scrolls, "Stoiciometry Math Error", JOptionPane.ERROR_MESSAGE);
error = true;
}
if (!error) {
error = SBMLutilities.checkNumFunctionArguments(gcm.getSBMLDocument(),
SBMLutilities.myParseFormula(reactantStoichiometry.getText().trim()));
}
if (!error) {
error = SBMLutilities.checkFunctionArgumentTypes(gcm.getSBMLDocument(),
SBMLutilities.myParseFormula(reactantStoichiometry.getText().trim()));
}
if (!error) {
if (SBMLutilities.returnsBoolean(SBMLutilities.myParseFormula(reactantStoichiometry.getText().trim()), bioModel.getSBMLDocument().getModel())) {
JOptionPane.showMessageDialog(Gui.frame, "Stoichiometry math must evaluate to a number.", "Number Expected",
JOptionPane.ERROR_MESSAGE);
error = true;
}
}
}
}
}
if (!error && option.equals("OK") && reactantConstant.getSelectedItem().equals("true")) {
String id = selectedID;
error = SBMLutilities.checkConstant(gcm.getSBMLDocument(), "Reactant stoiciometry", id);
}
if (!error) {
if (option.equals("OK")) {
String v = selectedReactantId;
SpeciesReference reactan = reactant;
if (reactant == null || !inSchematic) {
for (SpeciesReference r : changedReactants) {
if (r.getSpecies().equals(v)) {
reactan = r;
}
}
reactants.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacta = Utility.getList(reacta, reactants);
reactants.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
}
if (reactan==null) return;
reactan.setId(dimID[0].trim());
reactan.setName(reactantName.getText().trim());
reactan.setSpecies((String) reactantSpecies.getSelectedItem());
reactan.setStoichiometry(val);
if (complex!=null) {
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
String ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies();
LocalParameter paramet = null;
for (LocalParameter p : changedParameters) {
if (p.getId().equals(ncStr)) {
paramet = p;
}
}
if (nc==val) {
if (paramet != null) {
removeLocalParameter(ncStr);
}
} else {
if (paramet != null) {
removeLocalParameter(ncStr);
addLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies(),val);
} else {
addLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies(),val);
}
}
}
if (reactantConstant.getSelectedItem().equals("true")) {
reactan.setConstant(true);
}
else {
reactan.setConstant(false);
}
// TODO: Scott - change for Plugin writing
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(reactan);
sBasePlugin.unsetListOfDimensions();
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
sBasePlugin.unsetListOfIndices();
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
if (reactant == null || !inSchematic) {
reacta[index] = react;
Utility.sort(reacta);
reactants.setListData(reacta);
reactants.setSelectedIndex(index);
}
SBMLutilities.updateVarId(gcm.getSBMLDocument(), false, selectedID, dimID[0].trim());
if (reactant == null || !inSchematic) {
kineticLaw.setText(SBMLutilities.updateFormulaVar(kineticLaw.getText().trim(), selectedID, dimID[0].trim()));
}
}
else {
// SpeciesReference reactan = new
// SpeciesReference(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
SpeciesReference reactan = new SpeciesReference(gcm.getSBMLDocument().getLevel(), gcm.getSBMLDocument().getVersion());
reactan.setId(dimID[0].trim());
reactan.setName(reactantName.getText().trim());
reactan.setConstant(true);
changedReactants.add(reactan);
reactan.setSpecies((String) reactantSpecies.getSelectedItem());
reactan.setStoichiometry(val);
if (complex!=null) {
double nc = bioModel.getSBMLDocument().getModel().getParameter(GlobalConstants.COOPERATIVITY_STRING).getValue();
String ncStr = GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies();
LocalParameter paramet = null;
for (LocalParameter p : changedParameters) {
if (p.getId().equals(ncStr)) {
paramet = p;
}
}
if (nc==val) {
if (paramet != null) {
removeLocalParameter(ncStr);
}
} else {
if (paramet != null) {
removeLocalParameter(ncStr);
addLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies(),val);
} else {
addLocalParameter(GlobalConstants.COOPERATIVITY_STRING+"_"+reactan.getSpecies(),val);
}
}
}
if (reactantConstant.getSelectedItem().equals("true")) {
reactan.setConstant(true);
}
else {
reactan.setConstant(false);
}
// TODO: Scott - change for Plugin writing
if(!error){
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(reactan);
for(int i = 0; i<dimID.length-1; i++){
org.sbml.jsbml.ext.arrays.Dimension dimX = sBasePlugin.createDimension(dimensionIds[i]);
dimX.setSize(dimID[i+1]);
dimX.setArrayDimension(i);
}
// Add the indices
for(int i = 0; dex!=null && i<dex.length-1; i++){
Index indexRule = new Index();
indexRule.setArrayDimension(i);
indexRule.setReferencedAttribute("species");
ASTNode indexMath = SBMLutilities.myParseFormula(dex[i+1]);
indexRule.setMath(indexMath);
sBasePlugin.addIndex(indexRule);
}
}
JList add = new JList();
Object[] adding = { react };
add.setListData(adding);
add.setSelectedIndex(0);
reactants.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
adding = Utility.add(reacta, reactants, add);
reacta = new String[adding.length];
for (int i = 0; i < adding.length; i++) {
reacta[i] = (String) adding[i];
}
Utility.sort(reacta);
reactants.setListData(reacta);
reactants.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
reactants.setSelectedIndex(0);
}
modelEditor.setDirty(true);
gcm.makeUndoPoint();
}
if (error) {
value = JOptionPane.showOptionDialog(Gui.frame, reactantsPanel, "Reactants Editor", JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE, null, options, options[0]);
}
}
if (value == JOptionPane.NO_OPTION) {
return;
}
}
/**
* Remove a reaction
*/
private void removeReaction() {
int index = reactions.getSelectedIndex();
if (index != -1) {
String selected = ((String) reactions.getSelectedValue()).split(" ")[0];
Reaction reaction = bioModel.getSBMLDocument().getModel().getReaction(selected);
if (BioModel.isProductionReaction(reaction)) {
bioModel.removePromoter(SBMLutilities.getPromoterId(reaction));
} else {
bioModel.removeReaction(selected);
reactions.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacts = (String[]) Utility.remove(reactions, reacts);
reactions.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index < reactions.getModel().getSize()) {
reactions.setSelectedIndex(index);
}
else {
reactions.setSelectedIndex(index - 1);
}
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
/**
* Remove the reaction
*/
public static void removeTheReaction(BioModel gcm, String selected) {
Reaction tempReaction = gcm.getSBMLDocument().getModel().getReaction(selected);
ListOf<Reaction> r = gcm.getSBMLDocument().getModel().getListOfReactions();
for (int i = 0; i < gcm.getSBMLDocument().getModel().getReactionCount(); i++) {
if (r.get(i).getId().equals(tempReaction.getId())) {
r.remove(i);
}
}
}
/**
* Remove a reactant from a reaction
*/
private void removeReactant() {
int index = reactants.getSelectedIndex();
if (index != -1) {
String v = ((String) reactants.getSelectedValue()).split(" ")[0];
for (int i = 0; i < changedReactants.size(); i++) {
if (changedReactants.get(i).getSpecies().equals(v) &&
!SBMLutilities.variableInUse(bioModel.getSBMLDocument(), changedReactants.get(i).getId(), false, true,true)) {
changedReactants.remove(i);
reactants.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacta = (String[]) Utility.remove(reactants, reacta);
reactants.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index < reactants.getModel().getSize()) {
reactants.setSelectedIndex(index);
}
else {
reactants.setSelectedIndex(index - 1);
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
}
}
/**
* Remove a product from a reaction
*/
private void removeProduct() {
int index = products.getSelectedIndex();
if (index != -1) {
String v = ((String) products.getSelectedValue()).split(" ")[0];
for (int i = 0; i < changedProducts.size(); i++) {
if (changedProducts.get(i).getSpecies().equals(v) &&
!SBMLutilities.variableInUse(bioModel.getSBMLDocument(), changedProducts.get(i).getId(), false, true, true)) {
changedProducts.remove(i);
products.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
proda = (String[]) Utility.remove(products, proda);
products.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index < products.getModel().getSize()) {
products.setSelectedIndex(index);
}
else {
products.setSelectedIndex(index - 1);
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
}
}
/**
* Remove a modifier from a reaction
*/
private void removeModifier() {
int index = modifiers.getSelectedIndex();
if (index != -1) {
String v = ((String) modifiers.getSelectedValue()).split(" ")[0];
for (int i = 0; i < changedModifiers.size(); i++) {
if (changedModifiers.get(i).getSpecies().equals(v)) {
if (!changedModifiers.get(i).isSetSBOTerm() ||
changedModifiers.get(i).getSBOTerm()!=GlobalConstants.SBO_PROMOTER_MODIFIER) {
changedModifiers.remove(i);
} else {
return;
}
}
}
modifiers.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
modifierArray = (String[]) Utility.remove(modifiers, modifierArray);
modifiers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index < modifiers.getModel().getSize()) {
modifiers.setSelectedIndex(index);
}
else {
modifiers.setSelectedIndex(index - 1);
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
private static String indexedSpeciesRef(SimpleSpeciesReference reference) {
String result = reference.getSpecies();
ArraysSBasePlugin sBasePlugin = SBMLutilities.getArraysSBasePlugin(reference);
for(int i = sBasePlugin.getIndexCount()-1; i>=0; i
Index index = sBasePlugin.getIndex(i);
result += "[" + SBMLutilities.myFormulaToString(index.getMath()) + "]";
}
return result;
}
private static String indexedSpeciesRefId(SBMLDocument document,String reactionId,SimpleSpeciesReference reference) {
String[] dimID = new String[]{""};
String[] dimensionIds = new String[]{""};
dimID = SBMLutilities.checkSizeParameters(document, reactionId, false);
if (dimID==null) return null;
dimensionIds = SBMLutilities.getDimensionIds("",dimID.length-1);
String result = reference.getId();
for(int i = dimensionIds.length-1; i >=0; i
result += "[" + dimensionIds[i] + "]";
}
return result;
}
/**
* Remove a function if not in use
*/
private void useMassAction() {
String kf;
String kr;
SBMLDocument doc = bioModel.getSBMLDocument();
if (changedParameters.size() == 0) {
JOptionPane.showMessageDialog(Gui.frame, "Unable to create mass action kinetic law.\n"
+ "Requires at least one local parameter.", "Unable to Create Kinetic Law",
JOptionPane.ERROR_MESSAGE);
return;
}
else if (changedParameters.size() == 1) {
kf = changedParameters.get(0).getId();
kr = changedParameters.get(0).getId();
}
else {
kf = changedParameters.get(0).getId();
kr = changedParameters.get(1).getId();
}
String kinetic = kf;
boolean addEquil = false;
String equilExpr = "";
for (SpeciesReference s : changedReactants) {
if (SBMLutilities.getArraysSBasePlugin(s).getDimensionCount()>0) {
JOptionPane.showMessageDialog(Gui.frame, "Unable to create mass action kinetic law.\n"
+ "Dimensions on species references not currently supported for use mass action button.", "Unable to Create Kinetic Law",
JOptionPane.ERROR_MESSAGE);
return;
}
if (s.isSetId()) {
addEquil = true;
String stoichiometry = indexedSpeciesRefId(doc,reacID.getText(),s);
if (stoichiometry==null) return;
equilExpr += stoichiometry;
}
else {
equilExpr += s.getStoichiometry();
}
}
if (addEquil) {
kinetic += " * pow(" + kf + "/" + kr + "," + equilExpr + "-2)";
}
for (SpeciesReference s : changedReactants) {
if (s.isSetId()) {
String stoichiometry = indexedSpeciesRefId(doc,reacID.getText(),s);
if (stoichiometry==null) return;
kinetic += " * pow(" + indexedSpeciesRef(s) + ", " + stoichiometry + ")";
}
else {
if (s.getStoichiometry() == 1) {
kinetic += " * " + indexedSpeciesRef(s);
}
else {
kinetic += " * pow(" + indexedSpeciesRef(s) + ", " + s.getStoichiometry() + ")";
}
}
}
for (ModifierSpeciesReference s : changedModifiers) {
if (SBMLutilities.getArraysSBasePlugin(s).getDimensionCount()>0) {
JOptionPane.showMessageDialog(Gui.frame, "Unable to create mass action kinetic law.\n"
+ "Dimensions on species references not currently supported for use mass action button.", "Unable to Create Kinetic Law",
JOptionPane.ERROR_MESSAGE);
return;
}
kinetic += " * " + indexedSpeciesRef(s);
}
if (reacReverse.getSelectedItem().equals("true")) {
kinetic += " - " + kr;
addEquil = false;
equilExpr = "";
for (SpeciesReference s : changedProducts) {
if (SBMLutilities.getArraysSBasePlugin(s).getDimensionCount()>0) {
JOptionPane.showMessageDialog(Gui.frame, "Unable to create mass action kinetic law.\n"
+ "Dimensions on species references not currently supported for use mass action button.", "Unable to Create Kinetic Law",
JOptionPane.ERROR_MESSAGE);
return;
}
if (s.isSetId()) {
addEquil = true;
String stoichiometry = indexedSpeciesRefId(doc,reacID.getText(),s);
if (stoichiometry==null) return;
equilExpr += stoichiometry;
}
else {
equilExpr += s.getStoichiometry();
}
}
if (addEquil) {
kinetic += " * pow(" + kf + "/" + kr + "," + equilExpr + "-1)";
}
for (SpeciesReference s : changedProducts) {
if (s.isSetId()) {
String stoichiometry = indexedSpeciesRefId(doc,reacID.getText(),s);
if (stoichiometry==null) return;
kinetic += " * pow(" + indexedSpeciesRef(s) + ", " + stoichiometry + ")";
}
else {
if (s.getStoichiometry() == 1) {
kinetic += " * " + indexedSpeciesRef(s);
}
else {
kinetic += " * pow(" + indexedSpeciesRef(s) + ", " + s.getStoichiometry() + ")";
}
}
}
for (ModifierSpeciesReference s : changedModifiers) {
kinetic += " * " + indexedSpeciesRef(s);
}
}
kineticLaw.setText(kinetic);
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
/**
* Remove a reaction parameter, if allowed
*/
private void reacRemoveParam() {
int index = reacParameters.getSelectedIndex();
if (index != -1) {
String v = ((String) reacParameters.getSelectedValue()).split(" ")[0];
if (reactions.getSelectedIndex() != -1) {
String kinetic = kineticLaw.getText().trim();
String[] vars = new String[0];
if (!kinetic.equals("")) {
vars = SBMLutilities.myFormulaToString(SBMLutilities.myParseFormula(kineticLaw.getText().trim())).split(" |\\(|\\)|\\,");
}
for (int j = 0; j < vars.length; j++) {
if (vars[j].equals(v)) {
JOptionPane.showMessageDialog(Gui.frame, "Cannot remove reaction parameter because it is used in the kinetic law.",
"Cannot Remove Parameter", JOptionPane.ERROR_MESSAGE);
return;
}
}
}
for (int i = 0; i < changedParameters.size(); i++) {
if (changedParameters.get(i).getId().equals(v)) {
changedParameters.remove(i);
}
}
thisReactionParams.remove(v);
reacParameters.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacParams = (String[]) Utility.remove(reacParameters, reacParams);
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
if (index < reacParameters.getModel().getSize()) {
reacParameters.setSelectedIndex(index);
}
else {
reacParameters.setSelectedIndex(index - 1);
}
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
}
private void removeLocalParameter(String v) {
for (int i = 0; i < reacParameters.getModel().getSize(); i++) {
if (((String)reacParameters.getModel().getElementAt(i)).split(" ")[0].equals(v)) {
reacParameters.setSelectedIndex(i);
break;
}
}
for (int i = 0; i < changedParameters.size(); i++) {
if (changedParameters.get(i).getId().equals(v)) {
changedParameters.remove(i);
}
}
thisReactionParams.remove(v);
reacParameters.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
reacParams = (String[]) Utility.remove(reacParameters, reacParams);
reacParameters.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
reacParameters.setSelectedIndex(0);
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
/**
* Check the units of a kinetic law
*/
public boolean checkKineticLawUnits(KineticLaw law) {
if (law.containsUndeclaredUnits()) {
if (Gui.getCheckUndeclared()) {
JOptionPane.showMessageDialog(Gui.frame, "Kinetic law contains literals numbers or parameters with undeclared units.\n"
+ "Therefore, it is not possible to completely verify the consistency of the units.", "Contains Undeclared Units",
JOptionPane.WARNING_MESSAGE);
}
return false;
}
else if (Gui.getCheckUnits()) {
if (SBMLutilities.checkUnitsInKineticLaw(bioModel.getSBMLDocument(), law)) {
JOptionPane.showMessageDialog(Gui.frame, "Kinetic law units should be substance / time.", "Units Do Not Match",
JOptionPane.ERROR_MESSAGE);
return true;
}
}
return false;
}
/**
* Checks the string to see if there are any errors. Retruns true if there are no errors, else returns false.
*/
public boolean fluxBoundisGood(String s, String reactionId){
if(s.contains("<=")){
String [] correctnessTest = s.split("<=");
if(correctnessTest.length == 3){
try{
Double.parseDouble(correctnessTest[0]);
}
catch(NumberFormatException e){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0]+ " has to be a double.",
"Incorrect Element", JOptionPane.ERROR_MESSAGE);
return false;
}
try{
Double.parseDouble(correctnessTest[2]);
}
catch(NumberFormatException e){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[2] + " has to be a double.",
"Incorrect Element", JOptionPane.ERROR_MESSAGE);
return false;
}
if(Double.parseDouble(correctnessTest[0]) > Double.parseDouble(correctnessTest[2])){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0] + " must be less than " + correctnessTest[2],
"Imbalance with Bounds", JOptionPane.ERROR_MESSAGE);
return false;
}
return true;
}
else if(correctnessTest.length == 2){
if(!correctnessTest[0].equals(reactionId) && !correctnessTest[1].equals(reactionId)){
JOptionPane.showMessageDialog(Gui.frame, "Must have "+ reactionId + " in the equation.", "No Reaction",
JOptionPane.ERROR_MESSAGE);
return false;
}
if(correctnessTest[0].equals(reactionId)){
try{
Double.parseDouble(correctnessTest[1]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[1] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
else{
try{
Double.parseDouble(correctnessTest[0]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
return true;
} else{
JOptionPane.showMessageDialog(Gui.frame, "Wrong number of elements.", "Bad Format",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
else if(s.contains(">=")){
String [] correctnessTest = s.split(">=");
if(correctnessTest.length == 3){
try{
Double.parseDouble(correctnessTest[0]);
}
catch(NumberFormatException e){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0]+ " has to be a double.",
"Incorrect Element", JOptionPane.ERROR_MESSAGE);
return false;
}
try{
Double.parseDouble(correctnessTest[2]);
}
catch(NumberFormatException e){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[2] + " has to be a double.",
"Incorrect Element", JOptionPane.ERROR_MESSAGE);
return false;
}
if(Double.parseDouble(correctnessTest[0]) < Double.parseDouble(correctnessTest[2])){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0] + " must be greater than " + correctnessTest[2],
"Imbalance with Bounds", JOptionPane.ERROR_MESSAGE);
return false;
}
return true;
}
else if(correctnessTest.length == 2){
if(!correctnessTest[0].equals(reactionId) && !correctnessTest[1].equals(reactionId)){
JOptionPane.showMessageDialog(Gui.frame, "Must have "+ reactionId + " in the equation.", "No Reaction",
JOptionPane.ERROR_MESSAGE);
return false;
}
if(correctnessTest[0].equals(reactionId)){
try{
Double.parseDouble(correctnessTest[1]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[1] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
else{
try{
Double.parseDouble(correctnessTest[0]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
return true;
} else{
JOptionPane.showMessageDialog(Gui.frame, "Wrong number of elements.", "Bad Format",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
else if(s.contains("=")){
String [] correctnessTest = s.split("=");
if(correctnessTest.length == 2){
if(!correctnessTest[0].equals(reactionId) && !correctnessTest[1].equals(reactionId)){
JOptionPane.showMessageDialog(Gui.frame, "Must have "+ reactionId + " in the equation.", "No Reaction",
JOptionPane.ERROR_MESSAGE);
return false;
}
if(correctnessTest[0].equals(reactionId)){
try{
Double.parseDouble(correctnessTest[1]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[1] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
else{
try{
Double.parseDouble(correctnessTest[0]);
}
catch(Exception e){
if(e.equals(new NumberFormatException())){
JOptionPane.showMessageDialog(Gui.frame, correctnessTest[0] + " has to be a double.", "Incorrect Element",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
return true;
} else{
JOptionPane.showMessageDialog(Gui.frame, "Wrong number of elements.", "Bad Format",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
else{
JOptionPane.showMessageDialog(Gui.frame, "Need Operations.", "Bad Format",
JOptionPane.ERROR_MESSAGE);
return false;
}
}
public void setPanels(InitialAssignments initialsPanel, Rules rulesPanel) {
this.initialsPanel = initialsPanel;
this.rulesPanel = rulesPanel;
}
@Override
public void actionPerformed(ActionEvent e) {
// if the add compartment type button is clicked
// if the add species type button is clicked
// if the add compartment button is clicked
// if the add parameters button is clicked
// if the add reactions button is clicked
if (e.getSource() == addReac) {
reactionsEditor(bioModel, "Add", "", false);
}
// if the edit reactions button is clicked
else if (e.getSource() == editReac) {
if (reactions.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No reaction selected.", "Must Select A Reaction", JOptionPane.ERROR_MESSAGE);
return;
}
reactionsEditor(bioModel, "OK", ((String) reactions.getSelectedValue()).split(" ")[0], false);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
// if the remove reactions button is clicked
else if (e.getSource() == removeReac) {
removeReaction();
}
// if the add reactions parameters button is clicked
else if (e.getSource() == reacAddParam) {
reacParametersEditor(bioModel,"Add");
}
// if the edit reactions parameters button is clicked
else if (e.getSource() == reacEditParam) {
reacParametersEditor(bioModel,"OK");
}
// if the remove reactions parameters button is clicked
else if (e.getSource() == reacRemoveParam) {
reacRemoveParam();
}
// if the add reactants button is clicked
else if (e.getSource() == addReactant) {
reactantsEditor(bioModel, "Add", "", null, false, null);
}
// if the edit reactants button is clicked
else if (e.getSource() == editReactant) {
if (reactants.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No reactant selected.", "Must Select A Reactant", JOptionPane.ERROR_MESSAGE);
return;
}
reactantsEditor(bioModel, "OK", ((String) reactants.getSelectedValue()).split(" ")[0], null, false, null);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
// if the remove reactants button is clicked
else if (e.getSource() == removeReactant) {
removeReactant();
}
// if the add products button is clicked
else if (e.getSource() == addProduct) {
productsEditor(bioModel, "Add", "", null, false, null);
}
// if the edit products button is clicked
else if (e.getSource() == editProduct) {
if (products.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No product selected.", "Must Select A Product", JOptionPane.ERROR_MESSAGE);
return;
}
productsEditor(bioModel, "OK", ((String) products.getSelectedValue()).split(" ")[0], null, false, null);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
// if the remove products button is clicked
else if (e.getSource() == removeProduct) {
removeProduct();
}
// if the add modifiers button is clicked
else if (e.getSource() == addModifier) {
modifiersEditor(bioModel, "Add", "", null, false, null);
}
// if the edit modifiers button is clicked
else if (e.getSource() == editModifier) {
if (modifiers.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No modifier selected.", "Must Select A Modifier", JOptionPane.ERROR_MESSAGE);
return;
}
modifiersEditor(bioModel,"OK", ((String) modifiers.getSelectedValue()).split(" ")[0], null, false, null);
}
// if the remove modifiers button is clicked
else if (e.getSource() == removeModifier) {
removeModifier();
}
// if the clear button is clicked
else if (e.getSource() == clearKineticLaw) {
kineticLaw.setText("");
modelEditor.setDirty(true);
bioModel.makeUndoPoint();
}
// if the use mass action button is clicked
else if (e.getSource() == useMassAction) {
useMassAction();
}
else if (e.getSource() == reactionComp) {
SBase variable = bioModel.getSBMLDocument().getModel().getCompartment((String)reactionComp.getSelectedItem());
ArraysSBasePlugin ABV = SBMLutilities.getArraysSBasePlugin(variable);
int varDimCount = ABV.getDimensionCount();
if (varDimCount > 0) {
CiIndex.setEnabled(true);
} else {
CiIndex.setText("");
CiIndex.setEnabled(false);
}
}
}
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
if (e.getSource() == reactions) {
if (reactions.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No reaction selected.", "Must Select A Reaction", JOptionPane.ERROR_MESSAGE);
return;
}
reactionsEditor(bioModel, "OK", ((String) reactions.getSelectedValue()).split(" ")[0], false);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
else if (e.getSource() == reacParameters) {
reacParametersEditor(bioModel,"OK");
}
else if (e.getSource() == reactants) {
if (!paramsOnly) {
if (reactants.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No reactant selected.", "Must Select A Reactant", JOptionPane.ERROR_MESSAGE);
return;
}
reactantsEditor(bioModel, "OK", ((String) reactants.getSelectedValue()).split(" ")[0], null, false, null);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
}
else if (e.getSource() == products) {
if (!paramsOnly) {
if (products.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No product selected.", "Must Select A Product", JOptionPane.ERROR_MESSAGE);
return;
}
productsEditor(bioModel, "OK", ((String) products.getSelectedValue()).split(" ")[0], null, false, null);
initialsPanel.refreshInitialAssignmentPanel(bioModel);
rulesPanel.refreshRulesPanel();
}
}
else if (e.getSource() == modifiers) {
if (!paramsOnly) {
if (modifiers.getSelectedIndex() == -1) {
JOptionPane.showMessageDialog(Gui.frame, "No modifier selected.", "Must Select A Modifier", JOptionPane.ERROR_MESSAGE);
return;
}
modifiersEditor(bioModel,"OK", ((String) modifiers.getSelectedValue()).split(" ")[0], null, false, null);
}
}
}
}
/**
* Refresh reaction panel
*/
public void refreshReactionPanel(BioModel gcm) {
String selectedReactionId = "";
if (!reactions.isSelectionEmpty()) {
selectedReactionId = ((String) reactions.getSelectedValue()).split(" ")[0];
}
this.bioModel = gcm;
Model model = gcm.getSBMLDocument().getModel();
ListOf<Reaction> listOfReactions = model.getListOfReactions();
reacts = new String[model.getReactionCount()];
for (int i = 0; i < model.getReactionCount(); i++) {
Reaction reaction = listOfReactions.get(i);
reacts[i] = reaction.getId();
if (paramsOnly) {
if (!reaction.isSetKineticLaw()) continue;
ListOf<LocalParameter> params = reaction.getKineticLaw().getListOfLocalParameters();
for (int j = 0; j < reaction.getKineticLaw().getLocalParameterCount(); j++) {
LocalParameter paramet = (params.get(j));
for (int k = 0; k < parameterChanges.size(); k++) {
if (parameterChanges.get(k).split(" ")[0].equals(reaction.getId() + "/" + paramet.getId())) {
String[] splits = parameterChanges.get(k).split(" ");
if (splits[splits.length - 2].equals("Modified") || splits[splits.length - 2].equals("Custom")) {
String value = splits[splits.length - 1];
paramet.setValue(Double.parseDouble(value));
}
else if (splits[splits.length - 2].equals("Sweep")) {
String value = splits[splits.length - 1];
paramet.setValue(Double.parseDouble(value.split(",")[0].substring(1).trim()));
}
if (!reacts[i].contains("Modified")) {
reacts[i] += " Modified";
}
}
}
}
}
}
Utility.sort(reacts);
int selected = 0;
for (int i = 0; i < reacts.length; i++) {
if (reacts[i].split(" ")[0].equals(selectedReactionId)) {
selected = i;
}
}
reactions.setListData(reacts);
reactions.setSelectedIndex(selected);
}
/**
* This method currently does nothing.
*/
@Override
public void mouseEntered(MouseEvent e) {
}
/**
* This method currently does nothing.
*/
@Override
public void mouseExited(MouseEvent e) {
}
/**
* This method currently does nothing.
*/
@Override
public void mousePressed(MouseEvent e) {
}
/**
* This method currently does nothing.
*/
@Override
public void mouseReleased(MouseEvent e) {
}
}
|
package gcm2sbml.network;
import gcm2sbml.parser.GCMFile;
import gcm2sbml.parser.GCMParser;
import gcm2sbml.util.GlobalConstants;
import gcm2sbml.util.Utility;
import gcm2sbml.visitor.AbstractPrintVisitor;
import gcm2sbml.visitor.PrintActivatedBindingVisitor;
import gcm2sbml.visitor.PrintActivatedProductionVisitor;
import gcm2sbml.visitor.PrintBiochemicalVisitor;
import gcm2sbml.visitor.PrintDecaySpeciesVisitor;
import gcm2sbml.visitor.PrintDimerizationVisitor;
import gcm2sbml.visitor.PrintRepressionBindingVisitor;
import gcm2sbml.visitor.PrintSpeciesVisitor;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import org.sbml.libsbml.KineticLaw;
import org.sbml.libsbml.Model;
import org.sbml.libsbml.Parameter;
import org.sbml.libsbml.SBMLDocument;
import org.sbml.libsbml.SBMLReader;
import org.sbml.libsbml.SBMLWriter;
import org.sbml.libsbml.Species;
import org.sbml.libsbml.SpeciesReference;
import org.sbml.libsbml.Unit;
import org.sbml.libsbml.UnitDefinition;
/**
* This class represents a genetic network
*
* @author Nam
*
*/
public class GeneticNetwork {
/**
* Constructor
*
* @param species
* a hashmap of species
* @param stateMap
* a hashmap of statename to species name
* @param promoters
* a hashmap of promoters
*/
public GeneticNetwork(HashMap<String, SpeciesInterface> species,
HashMap<String, SpeciesInterface> stateMap,
HashMap<String, Promoter> promoters) {
this(species, stateMap, promoters, null);
}
/**
* Constructor
*/
public GeneticNetwork() {
}
/**
* Constructor
*
* @param species
* a hashmap of species
* @param stateMap
* a hashmap of statename to species name
* @param promoters
* a hashmap of promoters
* @param gcm
* a gcm file containing extra information
*/
public GeneticNetwork(HashMap<String, SpeciesInterface> species,
HashMap<String, SpeciesInterface> stateMap,
HashMap<String, Promoter> promoters, GCMFile gcm) {
this.species = species;
this.stateMap = stateMap;
this.promoters = promoters;
this.properties = gcm;
AbstractPrintVisitor.setGCMFile(gcm);
initialize();
}
public void buildTemplate(HashMap<String, SpeciesInterface> species,
HashMap<String, Promoter> promoters, String gcm, String filename) {
GCMFile file = new GCMFile();
file.load(currentRoot+gcm);
AbstractPrintVisitor.setGCMFile(file);
setSpecies(species);
setPromoters(promoters);
SBMLDocument document = new SBMLDocument(2, 3);
currentDocument = document;
Model m = document.createModel();
document.setModel(m);
Utility.addCompartments(document, compartment);
document.getModel().getCompartment(compartment).setSize(1);
SBMLWriter writer = new SBMLWriter();
printSpecies(document);
printOnlyPromoters(document);
try {
PrintStream p = new PrintStream(new FileOutputStream(filename));
m.setName("Created from " + gcm);
m.setId(new File(filename).getName().replace(".sbml", ""));
p.print(writer.writeToString(document));
p.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Loads in a properties file
*
* @param filename
* the file to load
*/
public void loadProperties(GCMFile gcm) {
properties = gcm;
dimerizationAbstraction = gcm.getDimAbs();
biochemicalAbstraction = gcm.getBioAbs();
}
public void setSBMLFile(String file) {
sbmlDocument = file;
}
/**
* Outputs the network to an SBML file
*
* @param filename
* @return the sbml document
*/
public SBMLDocument outputSBML(String filename) {
SBMLDocument document = new SBMLDocument(2, 3);
currentDocument = document;
Model m = document.createModel();
document.setModel(m);
Utility.addCompartments(document, compartment);
document.getModel().getCompartment(compartment).setSize(1);
outputSBML(filename, document);
return document;
}
public void outputSBML(String filename, SBMLDocument document) {
try {
Model m = document.getModel();
//checkConsistancy(document);
SBMLWriter writer = new SBMLWriter();
//printParameters(document);
printSpecies(document);
printPromoters(document);
printRNAP(document);
printDecay(document);
// System.out.println(counter++);
//checkConsistancy(document);
if (!dimerizationAbstraction) {
printDimerization(document);
}
if (!biochemicalAbstraction) {
printBiochemical(document);
}
// System.out.println(counter++);
//checkConsistancy(document);
printPromoterProduction(document);
// System.out.println(counter++);
//checkConsistancy(document);
printPromoterBinding(document);
// System.out.println(counter++);
//checkConsistancy(document);
printComponents(document, filename);
PrintStream p = new PrintStream(new FileOutputStream(filename));
m.setName("Created from " + new File(filename).getName().replace("sbml", "gcm"));
m.setId(new File(filename).getName().replace(".sbml", ""));
p.print(writer.writeToString(document));
p.close();
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Unable to output to SBML");
}
}
/**
* Merges an SBML file network to an SBML file
*
* @param filename
* @return the sbml document
*/
public void mergeSBML(String filename) {
try {
if (sbmlDocument.equals("")) {
outputSBML(filename);
return;
}
SBMLDocument document = new SBMLReader().readSBML(currentRoot + sbmlDocument);
//checkConsistancy(document);
currentDocument = document;
outputSBML(filename, document);
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Unable to output to SBML");
}
}
/**
* Prints the parameters to the SBMLDocument
* @param document the document to print to
*/
private void printParameters(SBMLDocument document) {
if (properties != null) {
for (String s : properties.getGlobalParameters().keySet()) {
String param = properties.getParameter(s);
//Utility.addGlobalParameter(document, new Parameter())
}
}
}
/**
* Prints each promoter binding
*
* @param document
* the SBMLDocument to print to
*/
private void printPromoterBinding(SBMLDocument document) {
double rnap = .033;
double rep = .05;
double act = .0033;
double kdimer = .05;
double kbio = .05;
double kcoop = 1;
double dimer = 1;
if (properties != null) {
kbio = Double.parseDouble(properties
.getParameter(GlobalConstants.KBIO_STRING));
kdimer = Double.parseDouble(properties
.getParameter(GlobalConstants.KASSOCIATION_STRING));
rnap = Double.parseDouble(properties
.getParameter(GlobalConstants.RNAP_BINDING_STRING));
rep = Double.parseDouble(properties
.getParameter(GlobalConstants.KREP_STRING));
act = Double.parseDouble(properties
.getParameter(GlobalConstants.KACT_STRING));
kcoop = Double.parseDouble(properties
.getParameter(GlobalConstants.COOPERATIVITY_STRING));
dimer = Double.parseDouble(properties
.getParameter(GlobalConstants.MAX_DIMER_STRING));
}
for (Promoter p : promoters.values()) {
// First setup RNAP binding
if (p.getOutputs().size()==0) continue;
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(
"R_RNAP_" + p.getId());
r.addReactant(new SpeciesReference("RNAP", 1));
r.addReactant(new SpeciesReference(p.getId(), 1));
r.addProduct(new SpeciesReference("RNAP_" + p.getId()));
r.setReversible(true);
KineticLaw kl = new KineticLaw();
kl.addParameter(new Parameter("kf", rnap, getMoleTimeParameter(2)));
kl.addParameter(new Parameter("kr", 1, getMoleTimeParameter(1)));
kl.setFormula("kf*" + "RNAP*" + p.getId() + "-kr*RNAP_"
+ p.getId());
r.setKineticLaw(kl);
Utility.addReaction(document, r);
// Next setup activated binding
PrintActivatedBindingVisitor v = new PrintActivatedBindingVisitor(
document, p, act, kdimer, kcoop, kbio,
dimer);
v.setBiochemicalAbstraction(biochemicalAbstraction);
v.setDimerizationAbstraction(dimerizationAbstraction);
v.setCooperationAbstraction(cooperationAbstraction);
v.run();
// Next setup repression binding
p.getRepressors();
PrintRepressionBindingVisitor v2 = new PrintRepressionBindingVisitor(
document, p, rep, kdimer, kcoop, kbio,
dimer);
v2.setBiochemicalAbstraction(biochemicalAbstraction);
v2.setDimerizationAbstraction(dimerizationAbstraction);
v2.setCooperationAbstraction(cooperationAbstraction);
v2.run();
}
}
/**
* Prints each promoter production values
*
* @param document
* the SBMLDocument to print to
*/
private void printPromoterProduction(SBMLDocument document) {
double basal = .0001;
double koc = .25;
int stoc = 1;
double act = .25;
if (properties != null) {
basal = Double.parseDouble(properties
.getParameter(GlobalConstants.KBASAL_STRING));
koc = Double.parseDouble(properties
.getParameter(GlobalConstants.OCR_STRING));
stoc = Integer.parseInt(properties
.getParameter(GlobalConstants.STOICHIOMETRY_STRING));
act = Double.parseDouble(properties
.getParameter(GlobalConstants.ACTIVED_STRING));
}
for (Promoter p : promoters.values()) {
if (p.getOutputs().size()==0) continue;
if (p.getActivators().size() > 0 && p.getRepressors().size() == 0) {
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(
"R_basal_production_" + p.getId());
r.addReactant(new SpeciesReference("RNAP_" + p.getId(), 1));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(new SpeciesReference(species.getId(), stoc));
}
r.addProduct(new SpeciesReference("RNAP_" + p.getId(), 1));
r.setReversible(false);
r.setFast(false);
KineticLaw kl = new KineticLaw();
if (p.getProperty(GlobalConstants.KBASAL_STRING) != null) {
kl.addParameter(new Parameter("basal", Double.parseDouble(p
.getProperty(GlobalConstants.KBASAL_STRING)),
getMoleTimeParameter(1)));
} else {
kl.addParameter(new Parameter("basal", basal,
getMoleTimeParameter(1)));
}
kl.setFormula("basal*" + "RNAP_" + p.getId());
r.setKineticLaw(kl);
Utility.addReaction(document, r);
PrintActivatedProductionVisitor v = new PrintActivatedProductionVisitor(
document, p, p.getActivators(), act, stoc);
v.run();
} else if (p.getActivators().size() == 0
&& p.getRepressors().size() >= 0) {
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(
"R_production_" + p.getId());
r.addReactant(new SpeciesReference("RNAP_" + p.getId(), 1));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(new SpeciesReference(species.getId(), stoc));
}
r.addProduct(new SpeciesReference("RNAP_" + p.getId(), 1));
r.setReversible(false);
r.setFast(false);
KineticLaw kl = new KineticLaw();
if (p.getProperty(GlobalConstants.OCR_STRING) != null) {
kl.addParameter(new Parameter("koc", Double.parseDouble(p
.getProperty(GlobalConstants.OCR_STRING)),
getMoleTimeParameter(1)));
} else {
kl.addParameter(new Parameter("koc", koc,
getMoleTimeParameter(1)));
}
kl.setFormula("koc*" + "RNAP_" + p.getId());
r.setKineticLaw(kl);
Utility.addReaction(document, r);
} else {
// TODO: Should ask Chris how to handle
// Both activated and repressed
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(
"R_basal_production_" + p.getId());
r.addReactant(new SpeciesReference("RNAP_" + p.getId(), 1));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(new SpeciesReference(species.getId(), stoc));
}
r.addProduct(new SpeciesReference("RNAP_" + p.getId(), 1));
r.setReversible(false);
r.setFast(false);
KineticLaw kl = new KineticLaw();
if (p.getProperty(GlobalConstants.KBASAL_STRING) != null) {
kl.addParameter(new Parameter("basal", Double.parseDouble(p
.getProperty(GlobalConstants.KBASAL_STRING)),
getMoleTimeParameter(1)));
} else {
kl.addParameter(new Parameter("basal", basal,
getMoleTimeParameter(1)));
}
kl.setFormula("basal*" + "RNAP_" + p.getId());
r.setKineticLaw(kl);
Utility.addReaction(document, r);
PrintActivatedProductionVisitor v = new PrintActivatedProductionVisitor(
document, p, p.getActivators(), act, stoc);
v.run();
}
}
}
/**
* Prints the decay reactions
*
* @param document
* the SBML document
*/
private void printDecay(SBMLDocument document) {
// Check to see if number of promoters is a property, if not, default to
double decay = .0075;
if (properties != null) {
decay = Double.parseDouble(properties
.getParameter(GlobalConstants.KDECAY_STRING));
}
PrintDecaySpeciesVisitor visitor = new PrintDecaySpeciesVisitor(
document, species.values(), decay);
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
/**
* Prints the dimerization reactions
*
* @param document
* the SBML document to print to
*/
private void printDimerization(SBMLDocument document) {
double kdimer = .05;
double dimer = 2;
if (properties != null) {
kdimer = Double.parseDouble(properties
.getParameter(GlobalConstants.KASSOCIATION_STRING));
dimer = Double.parseDouble(properties
.getParameter(GlobalConstants.MAX_DIMER_STRING));
}
PrintDimerizationVisitor visitor = new PrintDimerizationVisitor(
document, species.values(), kdimer, dimer);
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
private void printBiochemical(SBMLDocument document) {
double kbio = .05;
if (properties != null) {
kbio = Double.parseDouble(properties
.getParameter(GlobalConstants.KBIO_STRING));
}
PrintBiochemicalVisitor visitor = new PrintBiochemicalVisitor(document,
species.values(), kbio);
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
/**
* Prints the species in the network
*
* @param document
* the SBML document
*/
private void printSpecies(SBMLDocument document) {
double init = 0;
if (properties != null) {
init = Double.parseDouble(properties
.getParameter(GlobalConstants.INITIAL_STRING));
}
PrintSpeciesVisitor visitor = new PrintSpeciesVisitor(document, species
.values(), compartment, init);
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
private void printComponents(SBMLDocument document, String filename) {
for (String s : properties.getComponents().keySet()) {
GCMParser parser = new GCMParser(currentRoot + File.separator +
properties.getComponents().get(s).getProperty("gcm"));
GeneticNetwork network = parser.buildNetwork();
SBMLDocument d = network.outputSBML(filename);
Model m = d.getModel();
for (int i = 0; i < m.getNumSpecies(); i ++) {
Species spec = m.getSpecies(i);
String newName = s + "_" + spec.getId();
for (Object port : properties.getComponents().get(s).keySet()) {
if (spec.getId().equals((String) port)) {
newName = (String) port;
}
}
for (int j = 0; j < m.getNumReactions(); j ++) {
org.sbml.libsbml.Reaction r = m.getReaction(j);
for (int k = 0; k < r.getNumModifiers(); k ++) {
if(r.getModifier(k).getSpecies().equals(spec.getId())) {
r.getModifier(k).setSpecies(newName);
}
}
for (int k = 0; k < r.getNumProducts(); k ++) {
if(r.getProduct(k).getSpecies().equals(spec.getId())) {
r.getProduct(k).setSpecies(newName);
}
}
for (int k = 0; k < r.getNumReactants(); k ++) {
if(r.getReactant(k).getSpecies().equals(spec.getId())) {
r.getReactant(k).setSpecies(newName);
}
}
String formula = " " + r.getKineticLaw().getFormula() + " ";
formula.replace(spec.getId(), newName);
r.getKineticLaw().setFormula(formula);
}
spec.setId(newName);
boolean add = true;
for (int j = 0; j < document.getModel().getNumSpecies(); j ++) {
if (document.getModel().getSpecies(j).getId().equals(spec.getId())) {
add = false;
}
}
if (add) {
document.getModel().addSpecies(spec);
}
}
for (int i = 0; i < m.getNumReactions(); i ++) {
org.sbml.libsbml.Reaction r = m.getReaction(i);
String newName = s + "_" + r.getId();
r.setId(newName);
boolean add = true;
for (int j = 0; j < document.getModel().getNumReactions(); j ++) {
if (document.getModel().getReaction(j).getId().equals(r.getId())) {
add = false;
}
}
if (add) {
document.getModel().addReaction(r);
}
}
}
}
/**
* Prints the promoters in the network
*
* @param document
* the SBML document
*/
private void printPromoters(SBMLDocument document) {
// Check to see if number of promoters is a property, if not, default to
String numPromoters = "1";
if (properties != null) {
numPromoters = properties
.getParameter(GlobalConstants.PROMOTER_COUNT_STRING);
}
for (Promoter promoter : promoters.values()) {
if (promoter.getOutputs().size()==0) continue;
// First print out the promoter, and promoter bound to RNAP
String tempPromoters = numPromoters;
if (promoter.getProperty(GlobalConstants.PROMOTER_COUNT_STRING) != null) {
tempPromoters = promoter
.getProperty(GlobalConstants.PROMOTER_COUNT_STRING);
}
Species s = Utility.makeSpecies(promoter.getId(), compartment,
Double.parseDouble(tempPromoters));
if ((promoter.getProperties() != null) &&
(promoter.getProperties().containsKey(GlobalConstants.NAME))) {
s.setName(promoter.getProperty(GlobalConstants.NAME));
}
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
s = Utility.makeSpecies("RNAP_" + promoter.getId(), compartment,
0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
// Now cycle through all activators and repressors and add those
// bindings
for (SpeciesInterface species : promoter.getActivators()) {
s = Utility.makeSpecies("RNAP_" + promoter.getId() + "_"
+ species.getId(), compartment, 0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
for (SpeciesInterface species : promoter.getRepressors()) {
s = Utility.makeSpecies("bound_" + promoter.getId() + "_"
+ species.getId(), compartment, 0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
}
}
/**
* Prints the promoters in the network
*
* @param document
* the SBML document
*/
private void printOnlyPromoters(SBMLDocument document) {
// Check to see if number of promoters is a property, if not, default to
String numPromoters = "1";
if (properties != null) {
numPromoters = properties
.getParameter(GlobalConstants.PROMOTER_COUNT_STRING);
}
for (Promoter promoter : promoters.values()) {
// First print out the promoter, and promoter bound to RNAP
String tempPromoters = numPromoters;
if (promoter.getProperty(GlobalConstants.PROMOTER_COUNT_STRING) != null) {
tempPromoters = promoter
.getProperty(GlobalConstants.PROMOTER_COUNT_STRING);
}
Species s = Utility.makeSpecies(promoter.getId(), compartment,
Double.parseDouble(tempPromoters));
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
}
/**
* Prints the RNAP molecule to the document
*
* @param document
* the SBML document
*/
private void printRNAP(SBMLDocument document) {
double rnap = 30;
if (properties != null) {
rnap = Double.parseDouble(properties
.getParameter(GlobalConstants.RNAP_STRING));
}
Species s = Utility.makeSpecies("RNAP", compartment, rnap);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
/**
* Initializes the network
*
*/
private void initialize() {
buildDimers();
buildPromoters();
buildBiochemical();
}
/**
* Configurates the promoters
*
*/
private void buildPromoters() {
for (Promoter promoter : promoters.values()) {
for (Reaction reaction : promoter.getActivatingReactions()) {
if (!reaction.isBiochemical() && reaction.getDimer() <= 1 &&
!reaction.getInputState().equals("none")) {
promoter.addActivator(stateMap
.get(reaction.getInputState()));
promoter.addToReactionMap(stateMap.get(reaction
.getInputState()), reaction);
}
if (!reaction.getOutputState().equals("none")) {
promoter.addOutput(stateMap.get(reaction.getOutputState()));
}
}
for (Reaction reaction : promoter.getRepressingReactions()) {
if (!reaction.isBiochemical() && reaction.getDimer() <= 1 &&
!reaction.getInputState().equals("none")) {
promoter.addRepressor(stateMap
.get(reaction.getInputState()));
promoter.addToReactionMap(stateMap.get(reaction
.getInputState()), reaction);
}
if (!reaction.getOutputState().equals("none")) {
promoter.addOutput(stateMap.get(reaction.getOutputState()));
}
}
}
}
/**
* Builds the dimer list from reactions and species and adds it to the
* promoter as input.
*/
private void buildDimers() {
// First go through all species list and add all dimers found to hashMap
HashMap<String, SpeciesInterface> dimers = new HashMap<String, SpeciesInterface>();
for (SpeciesInterface specie : species.values()) {
int dimerValue = 1;
if (properties != null) {
dimerValue = (int) Integer.parseInt((properties
.getParameter(GlobalConstants.MAX_DIMER_STRING)));
}
if (specie.getProperty(GlobalConstants.MAX_DIMER_STRING) != null) {
dimerValue = (int) Integer.parseInt(specie
.getProperty(GlobalConstants.MAX_DIMER_STRING));
}
for (int i = 2; i <= dimerValue; i++) {
DimerSpecies dimer = new DimerSpecies(specie, i);
dimer.addProperty(GlobalConstants.KDECAY_STRING, "0");
dimers.put(dimer.getId(), dimer);
}
}
// Now go through reaction list to see if any are missed
for (Promoter promoter : promoters.values()) {
for (Reaction reaction : promoter.getActivatingReactions()) {
if (reaction.getDimer() > 1) {
DimerSpecies dimer = new DimerSpecies(stateMap.get(reaction
.getInputState()), reaction.getDimer());
dimers.put(dimer.getId(), dimer);
promoter.addToReactionMap(dimer, reaction);
promoter.getActivators().add(dimer);
}
}
for (Reaction reaction : promoter.getRepressingReactions()) {
if (reaction.getDimer() > 1) {
DimerSpecies dimer = new DimerSpecies(stateMap.get(reaction
.getInputState()), reaction.getDimer());
dimers.put(dimer.getId(), dimer);
promoter.addToReactionMap(dimer, reaction);
promoter.getRepressors().add(dimer);
}
}
}
// Now put dimers back into network
for (SpeciesInterface specie : dimers.values()) {
species.put(specie.getId(), specie);
}
}
/**
* Builds the biochemical species, and adds it to the promoter as input
*/
private void buildBiochemical() {
// Cycle through each promoter
for (Promoter promoter : promoters.values()) {
// keep track of all activating and repressing reactions in separate
// lists
ArrayList<SpeciesInterface> biochem = new ArrayList<SpeciesInterface>();
ArrayList<Reaction> reactions = new ArrayList<Reaction>();
for (Reaction reaction : promoter.getActivatingReactions()) {
if (reaction.isBiochemical()) {
reactions.add(reaction);
biochem.add(stateMap.get(reaction.getInputState()));
}
}
if (biochem.size() == 1) {
throw new IllegalStateException(
"Must have more than 1 biochemical reaction");
} else if (biochem.size() >= 2) {
BiochemicalSpecies bio = new BiochemicalSpecies(biochem);
promoter.addActivator(bio);
for (Reaction reaction : reactions) {
promoter.addToReactionMap(bio, reaction);
}
bio.addProperty(GlobalConstants.KDECAY_STRING, "0");
species.put(bio.getId(), bio);
}
biochem = new ArrayList<SpeciesInterface>();
reactions = new ArrayList<Reaction>();
for (Reaction reaction : promoter.getRepressingReactions()) {
if (reaction.isBiochemical()) {
reactions.add(reaction);
biochem.add(stateMap.get(reaction.getInputState()));
}
}
if (biochem.size() == 1) {
throw new IllegalStateException(
"Must have more than 1 biochemical reaction");
} else if (biochem.size() >= 2) {
BiochemicalSpecies bio = new BiochemicalSpecies(biochem);
for (Reaction reaction : reactions) {
promoter.addToReactionMap(bio, reaction);
}
promoter.addRepressor(bio);
bio.addProperty(GlobalConstants.KDECAY_STRING, "0");
species.put(bio.getId(), bio);
}
}
}
public HashMap<String, SpeciesInterface> getSpecies() {
return species;
}
public void setSpecies(HashMap<String, SpeciesInterface> species) {
this.species = species;
}
public HashMap<String, SpeciesInterface> getStateMap() {
return stateMap;
}
public void setStateMap(HashMap<String, SpeciesInterface> stateMap) {
this.stateMap = stateMap;
}
public HashMap<String, Promoter> getPromoters() {
return promoters;
}
public void setPromoters(HashMap<String, Promoter> promoters) {
this.promoters = promoters;
}
public GCMFile getProperties() {
return properties;
}
public void setProperties(GCMFile properties) {
this.properties = properties;
}
/**
* Checks the consistancy of the document
*
* @param doc
* the SBML document to check
*/
private void checkConsistancy(SBMLDocument doc) {
if (doc.checkConsistency() > 0) {
for (int i = 0; i < doc.getNumErrors(); i++) {
System.out.println(doc.getError(i).getMessage());
}
}
}
private String sbmlDocument = "";
private static SBMLDocument currentDocument = null;
private static String currentRoot = "";
private boolean biochemicalAbstraction = false;
private boolean dimerizationAbstraction = false;
private boolean cooperationAbstraction = false;
private HashMap<String, SpeciesInterface> species = null;
private HashMap<String, SpeciesInterface> stateMap = null;
private HashMap<String, Promoter> promoters = null;
private GCMFile properties = null;
private String compartment = "default";
/**
* Returns the curent SBML document being built
*
* @return the curent SBML document being built
*/
public static SBMLDocument getCurrentDocument() {
return currentDocument;
}
/**
* Sets the current root
* @param root the root directory
*/
public static void setRoot(String root) {
currentRoot = root;
}
public static String getUnitString(ArrayList<String> unitNames,
ArrayList<Integer> exponents, ArrayList<Integer> multiplier,
Model model) {
// First build the name of the unit and see if it exists, start by
// sorting the units to build a unique string
for (int i = 0; i < unitNames.size(); i++) {
for (int j = i; j > 0; j
if (unitNames.get(j - 1).compareTo(unitNames.get(i)) > 0) {
Integer tempD = multiplier.get(j);
Integer tempI = exponents.get(j);
String tempS = unitNames.get(j);
multiplier.set(j, multiplier.get(j - 1));
unitNames.set(j, unitNames.get(j - 1));
exponents.set(j, exponents.get(j - 1));
multiplier.set(j - 1, tempD);
unitNames.set(j - 1, tempS);
exponents.set(j - 1, tempI);
}
}
}
UnitDefinition t = new UnitDefinition();
String name = "u_";
for (int i = 0; i < unitNames.size(); i++) {
String sign = "";
if (exponents.get(i).intValue() < 0) {
sign = "n";
}
name = name + multiplier.get(i) + "_" + unitNames.get(i) + "_"
+ sign + Math.abs(exponents.get(i)) + "_";
t.addUnit(new Unit(unitNames.get(i), exponents.get(i).intValue(),
multiplier.get(i).intValue()));
}
name = name.substring(0, name.length() - 1);
t.setId(name);
if (model.getUnitDefinition(name) == null) {
model.addUnitDefinition(t);
}
return name;
}
/**
* Returns a unit name for a parameter based on the number of molecules
* involved
*
* @param numMolecules
* the number of molecules involved
* @return a unit name
*/
public static String getMoleTimeParameter(int numMolecules) {
ArrayList<String> unitS = new ArrayList<String>();
ArrayList<Integer> unitE = new ArrayList<Integer>();
ArrayList<Integer> unitM = new ArrayList<Integer>();
if (numMolecules > 1) {
unitS.add("mole");
unitE.add(new Integer(-(numMolecules - 1)));
unitM.add(new Integer(0));
}
unitS.add("second");
unitE.add(new Integer(-1));
unitM.add(new Integer(0));
return GeneticNetwork.getUnitString(unitS, unitE, unitM,
currentDocument.getModel());
}
/**
* Returns a unit name for a parameter based on the number of molecules
* involved
*
* @param numMolecules
* the number of molecules involved
* @return a unit name
*/
public static String getMoleParameter(int numMolecules) {
ArrayList<String> unitS = new ArrayList<String>();
ArrayList<Integer> unitE = new ArrayList<Integer>();
ArrayList<Integer> unitM = new ArrayList<Integer>();
unitS.add("mole");
unitE.add(new Integer(-(numMolecules - 1)));
unitM.add(new Integer(0));
return GeneticNetwork.getUnitString(unitS, unitE, unitM,
currentDocument.getModel());
}
public static String getMoleParameter(String numMolecules) {
return getMoleParameter(Integer.parseInt(numMolecules));
}
}
|
package gcm2sbml.network;
import gcm2sbml.parser.CompatibilityFixer;
import gcm2sbml.parser.GCMFile;
import gcm2sbml.parser.GCMParser;
import gcm2sbml.util.GlobalConstants;
import gcm2sbml.util.Utility;
import gcm2sbml.visitor.AbstractPrintVisitor;
import gcm2sbml.visitor.PrintActivatedBindingVisitor;
import gcm2sbml.visitor.PrintActivatedProductionVisitor;
import gcm2sbml.visitor.PrintBiochemicalVisitor;
import gcm2sbml.visitor.PrintDecaySpeciesVisitor;
import gcm2sbml.visitor.PrintDimerizationVisitor;
import gcm2sbml.visitor.PrintRepressionBindingVisitor;
import gcm2sbml.visitor.PrintSpeciesVisitor;
import java.awt.Dimension;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import org.sbml.libsbml.ASTNode;
import org.sbml.libsbml.Constraint;
import org.sbml.libsbml.EventAssignment;
import org.sbml.libsbml.FunctionDefinition;
import org.sbml.libsbml.InitialAssignment;
import org.sbml.libsbml.KineticLaw;
import org.sbml.libsbml.Model;
import org.sbml.libsbml.ModifierSpeciesReference;
import org.sbml.libsbml.Rule;
import org.sbml.libsbml.SBMLDocument;
import org.sbml.libsbml.SBMLWriter;
import org.sbml.libsbml.Species;
import org.sbml.libsbml.SpeciesReference;
import org.sbml.libsbml.Unit;
import org.sbml.libsbml.UnitDefinition;
import org.sbml.libsbml.libsbml;
import biomodelsim.BioSim;
/**
* This class represents a genetic network
*
* @author Nam
*
*/
public class GeneticNetwork {
private String separator;
/**
* Constructor
*
* @param species
* a hashmap of species
* @param stateMap
* a hashmap of statename to species name
* @param promoters
* a hashmap of promoters
*/
public GeneticNetwork(HashMap<String, SpeciesInterface> species,
HashMap<String, SpeciesInterface> stateMap,
HashMap<String, Promoter> promoters) {
this(species, stateMap, promoters, null);
}
/**
* Constructor
*/
public GeneticNetwork() {
if (File.separator.equals("\\")) {
separator = "\\\\";
}
else {
separator = File.separator;
}
}
/**
* Constructor
*
* @param species
* a hashmap of species
* @param stateMap
* a hashmap of statename to species name
* @param promoters
* a hashmap of promoters
* @param gcm
* a gcm file containing extra information
*/
public GeneticNetwork(HashMap<String, SpeciesInterface> species,
HashMap<String, SpeciesInterface> stateMap,
HashMap<String, Promoter> promoters, GCMFile gcm) {
if (File.separator.equals("\\")) {
separator = "\\\\";
}
else {
separator = File.separator;
}
this.species = species;
this.stateMap = stateMap;
this.promoters = promoters;
this.properties = gcm;
AbstractPrintVisitor.setGCMFile(gcm);
initialize();
}
public void buildTemplate(HashMap<String, SpeciesInterface> species,
HashMap<String, Promoter> promoters, String gcm, String filename) {
GCMFile file = new GCMFile(currentRoot);
file.load(currentRoot+gcm);
AbstractPrintVisitor.setGCMFile(file);
setSpecies(species);
setPromoters(promoters);
SBMLDocument document = new SBMLDocument(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
currentDocument = document;
Model m = document.createModel();
document.setModel(m);
Utility.addCompartments(document, compartment);
document.getModel().getCompartment(compartment).setSize(1);
SBMLWriter writer = new SBMLWriter();
printSpecies(document);
printOnlyPromoters(document);
try {
PrintStream p = new PrintStream(new FileOutputStream(filename));
m.setName("Created from " + gcm);
m.setId(new File(filename).getName().replace(".xml", ""));
m.setVolumeUnits("litre");
m.setSubstanceUnits("mole");
p.print(writer.writeToString(document));
p.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Loads in a properties file
*
* @param filename
* the file to load
*/
public void loadProperties(GCMFile gcm) {
properties = gcm;
dimerizationAbstraction = gcm.getDimAbs();
biochemicalAbstraction = gcm.getBioAbs();
}
public void setSBMLFile(String file) {
sbmlDocument = file;
}
public void setSBML(SBMLDocument doc) {
document = doc;
}
/**
* Outputs the network to an SBML file
*
* @param filename
* @return the sbml document
*/
public SBMLDocument outputSBML(String filename) {
SBMLDocument document = new SBMLDocument(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
currentDocument = document;
Model m = document.createModel();
document.setModel(m);
Utility.addCompartments(document, compartment);
document.getModel().getCompartment(compartment).setSize(1);
return outputSBML(filename, document);
}
public SBMLDocument outputSBML(String filename, SBMLDocument document) {
try {
Model m = document.getModel();
//checkConsistancy(document);
SBMLWriter writer = new SBMLWriter();
//printParameters(document);
printSpecies(document);
printPromoters(document);
printRNAP(document);
printDecay(document);
printPromoterProduction(document);
printPromoterBinding(document);
PrintStream p = new PrintStream(new FileOutputStream(filename));
m.setName("Created from " + new File(filename).getName().replace("xml", "gcm"));
m.setId(new File(filename).getName().replace(".xml", ""));
m.setVolumeUnits("litre");
m.setSubstanceUnits("mole");
if (document != null) {
document.setConsistencyChecks(libsbml.LIBSBML_CAT_GENERAL_CONSISTENCY, true);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_IDENTIFIER_CONSISTENCY, true);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_UNITS_CONSISTENCY, false);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_MATHML_CONSISTENCY, false);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_SBO_CONSISTENCY, false);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_MODELING_PRACTICE, false);
document.setConsistencyChecks(libsbml.LIBSBML_CAT_OVERDETERMINED_MODEL, true);
long numErrors = document.checkConsistency();
if (numErrors > 0) {
String message = "";
for (long i = 0; i < numErrors; i++) {
String error = document.getError(i).getMessage(); // .replace(". ",
message += i + ":" + error + "\n";
}
JTextArea messageArea = new JTextArea(message);
messageArea.setLineWrap(true);
messageArea.setEditable(false);
JScrollPane scroll = new JScrollPane();
scroll.setMinimumSize(new Dimension(600, 600));
scroll.setPreferredSize(new Dimension(600, 600));
scroll.setViewportView(messageArea);
JOptionPane.showMessageDialog(BioSim.frame(), scroll, "Generated SBML Has Errors",
JOptionPane.ERROR_MESSAGE);
}
}
p.print(writer.writeToString(document));
p.close();
return document;
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Unable to output to SBML");
}
}
/**
* Merges an SBML file network to an SBML file
*
* @param filename
* @return the sbml document
*/
public SBMLDocument mergeSBML(String filename) {
try {
if (document == null) {
if (sbmlDocument.equals("")) {
return outputSBML(filename);
}
SBMLDocument document = BioSim.readSBML(currentRoot + sbmlDocument);
// checkConsistancy(document);
currentDocument = document;
return outputSBML(filename, document);
}
else {
currentDocument = document;
return outputSBML(filename, document);
}
}
catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Unable to output to SBML");
}
}
/**
* Merges an SBML file network to an SBML file
*
* @param filename
* @return the sbml document
*/
public SBMLDocument mergeSBML(String filename, SBMLDocument document) {
try {
currentDocument = document;
return outputSBML(filename, document);
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Unable to output to SBML");
}
}
/**
* Prints each promoter binding
*
* @param document
* the SBMLDocument to print to
*/
private void printPromoterBinding(SBMLDocument document) {
for (Promoter p : promoters.values()) {
// First setup RNAP binding
if (p.getOutputs().size()==0) continue;
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
r.setId("R_RNAP_" + p.getId());
r.addReactant(Utility.SpeciesReference("RNAP", 1));
r.addReactant(Utility.SpeciesReference(p.getId(), 1));
r.addProduct(Utility.SpeciesReference("RNAP_" + p.getId(), 1));
r.setReversible(true);
r.setFast(false);
KineticLaw kl = r.createKineticLaw();
kl.addParameter(Utility.Parameter(kRnapString, p.getKrnap(), getMoleParameter(2)));
kl.addParameter(Utility.Parameter("kr", 1, getMoleTimeParameter(1)));
kl.setFormula("kr*" + kRnapString + "*" + "RNAP*" + p.getId() + "-kr*RNAP_"
+ p.getId());
Utility.addReaction(document, r);
// Next setup activated binding
PrintActivatedBindingVisitor v = new PrintActivatedBindingVisitor(
document, p);
v.setBiochemicalAbstraction(biochemicalAbstraction);
v.setDimerizationAbstraction(dimerizationAbstraction);
v.setCooperationAbstraction(cooperationAbstraction);
v.run();
// Next setup repression binding
p.getRepressors();
PrintRepressionBindingVisitor v2 = new PrintRepressionBindingVisitor(
document, p);
v2.setBiochemicalAbstraction(biochemicalAbstraction);
v2.setDimerizationAbstraction(dimerizationAbstraction);
v2.setCooperationAbstraction(cooperationAbstraction);
v2.run();
}
}
/**
* Prints each promoter production values
*
* @param document
* the SBMLDocument to print to
*/
private void printPromoterProduction(SBMLDocument document) {
for (Promoter p : promoters.values()) {
if (p.getOutputs().size()==0) continue;
if (p.getActivators().size() > 0 && p.getRepressors().size() == 0) {
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
r.setId("R_basal_production_" + p.getId());
r.addModifier(Utility.ModifierSpeciesReference("RNAP_" + p.getId()));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(Utility.SpeciesReference(species.getId(), p.getStoich()));
}
r.setReversible(false);
r.setFast(false);
KineticLaw kl = r.createKineticLaw();
kl.addParameter(Utility.Parameter(kBasalString, p.getKbasal(),
getMoleTimeParameter(1)));
kl.setFormula(kBasalString + "*" + "RNAP_" + p.getId());
Utility.addReaction(document, r);
PrintActivatedProductionVisitor v = new PrintActivatedProductionVisitor(
document, p);
v.run();
} else if (p.getActivators().size() == 0
&& p.getRepressors().size() >= 0) {
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
r.setId("R_constitutive_production_" + p.getId());
r.addModifier(Utility.ModifierSpeciesReference("RNAP_" + p.getId()));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(Utility.SpeciesReference(species.getId(), p.getStoich()));
}
r.setReversible(false);
r.setFast(false);
KineticLaw kl = r.createKineticLaw();
kl.addParameter(Utility.Parameter(kOcString, p.getKoc(),
getMoleTimeParameter(1)));
kl.setFormula(kOcString + "*" + "RNAP_" + p.getId());
Utility.addReaction(document, r);
} else {
// TODO: Should ask Chris how to handle
// Both activated and repressed
org.sbml.libsbml.Reaction r = new org.sbml.libsbml.Reaction(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
r.setId("R_constitutive_production_" + p.getId());
r.addModifier(Utility.ModifierSpeciesReference("RNAP_" + p.getId()));
for (SpeciesInterface species : p.getOutputs()) {
r.addProduct(Utility.SpeciesReference(species.getId(), p.getStoich()));
}
r.setReversible(false);
r.setFast(false);
KineticLaw kl = r.createKineticLaw();
kl.addParameter(Utility.Parameter(kOcString, p.getKoc(),
getMoleTimeParameter(1)));
kl.setFormula(kOcString + "*" + "RNAP_" + p.getId());
Utility.addReaction(document, r);
PrintActivatedProductionVisitor v = new PrintActivatedProductionVisitor(
document, p);
v.run();
}
}
}
/**
* Prints the decay reactions
*
* @param document
* the SBML document
*/
private void printDecay(SBMLDocument document) {
PrintDecaySpeciesVisitor visitor = new PrintDecaySpeciesVisitor(
document, species.values());
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
/**
* Prints the species in the network
*
* @param document
* the SBML document
*/
private void printSpecies(SBMLDocument document) {
PrintSpeciesVisitor visitor = new PrintSpeciesVisitor(document, species
.values(), compartment);
visitor.setBiochemicalAbstraction(biochemicalAbstraction);
visitor.setDimerizationAbstraction(dimerizationAbstraction);
visitor.run();
}
private ASTNode updateMathVar(ASTNode math, String origVar, String newVar) {
String s = updateFormulaVar(myFormulaToString(math), origVar, newVar);
return myParseFormula(s);
}
private String myFormulaToString(ASTNode mathFormula) {
String formula = libsbml.formulaToString(mathFormula);
formula = formula.replaceAll("arccot", "acot");
formula = formula.replaceAll("arccoth", "acoth");
formula = formula.replaceAll("arccsc", "acsc");
formula = formula.replaceAll("arccsch", "acsch");
formula = formula.replaceAll("arcsec", "asec");
formula = formula.replaceAll("arcsech", "asech");
formula = formula.replaceAll("arccosh", "acosh");
formula = formula.replaceAll("arcsinh", "asinh");
formula = formula.replaceAll("arctanh", "atanh");
String newformula = formula.replaceFirst("00e", "0e");
while (!(newformula.equals(formula))) {
formula = newformula;
newformula = formula.replaceFirst("0e\\+", "e+");
newformula = newformula.replaceFirst("0e-", "e-");
}
formula = formula.replaceFirst("\\.e\\+", ".0e+");
formula = formula.replaceFirst("\\.e-", ".0e-");
return formula;
}
private String updateFormulaVar(String s, String origVar, String newVar) {
s = " " + s + " ";
s = s.replace(" " + origVar + " ", " " + newVar + " ");
s = s.replace(" " + origVar + "(", " " + newVar + "(");
s = s.replace("(" + origVar + ")", "(" + newVar + ")");
s = s.replace("(" + origVar + " ", "(" + newVar + " ");
s = s.replace("(" + origVar + ",", "(" + newVar + ",");
s = s.replace(" " + origVar + ")", " " + newVar + ")");
s = s.replace(" " + origVar + "^", " " + newVar + "^");
return s.trim();
}
private ASTNode myParseFormula(String formula) {
ASTNode mathFormula = libsbml.parseFormula(formula);
if (mathFormula == null)
return null;
setTimeAndTrigVar(mathFormula);
return mathFormula;
}
private void setTimeAndTrigVar(ASTNode node) {
if (node.getType() == libsbml.AST_NAME) {
if (node.getName().equals("t")) {
node.setType(libsbml.AST_NAME_TIME);
}
else if (node.getName().equals("time")) {
node.setType(libsbml.AST_NAME_TIME);
}
}
if (node.getType() == libsbml.AST_FUNCTION) {
if (node.getName().equals("acot")) {
node.setType(libsbml.AST_FUNCTION_ARCCOT);
}
else if (node.getName().equals("acoth")) {
node.setType(libsbml.AST_FUNCTION_ARCCOTH);
}
else if (node.getName().equals("acsc")) {
node.setType(libsbml.AST_FUNCTION_ARCCSC);
}
else if (node.getName().equals("acsch")) {
node.setType(libsbml.AST_FUNCTION_ARCCSCH);
}
else if (node.getName().equals("asec")) {
node.setType(libsbml.AST_FUNCTION_ARCSEC);
}
else if (node.getName().equals("asech")) {
node.setType(libsbml.AST_FUNCTION_ARCSECH);
}
else if (node.getName().equals("acosh")) {
node.setType(libsbml.AST_FUNCTION_ARCCOSH);
}
else if (node.getName().equals("asinh")) {
node.setType(libsbml.AST_FUNCTION_ARCSINH);
}
else if (node.getName().equals("atanh")) {
node.setType(libsbml.AST_FUNCTION_ARCTANH);
}
}
for (int c = 0; c < node.getNumChildren(); c++)
setTimeAndTrigVar(node.getChild(c));
}
private void updateVarId(boolean isSpecies, String origId, String newId, SBMLDocument document) {
if (origId.equals(newId))
return;
Model model = document.getModel();
for (int i = 0; i < model.getNumSpecies(); i++) {
org.sbml.libsbml.Species species = (org.sbml.libsbml.Species) model.getListOfSpecies().get(i);
if (species.getCompartment().equals(origId)) {
species.setCompartment(newId);
}
if (species.getSpeciesType().equals(origId)) {
species.setSpeciesType(newId);
}
}
for (int i = 0; i < model.getNumCompartments(); i++) {
org.sbml.libsbml.Compartment compartment = (org.sbml.libsbml.Compartment) model.getListOfCompartments().get(i);
if (compartment.getCompartmentType().equals(origId)) {
compartment.setCompartmentType(newId);
}
}
for (int i = 0; i < model.getNumReactions(); i++) {
org.sbml.libsbml.Reaction reaction = (org.sbml.libsbml.Reaction) model.getListOfReactions().get(i);
for (int j = 0; j < reaction.getNumProducts(); j++) {
if (reaction.getProduct(j).isSetSpecies()) {
SpeciesReference specRef = reaction.getProduct(j);
if (isSpecies && origId.equals(specRef.getSpecies())) {
specRef.setSpecies(newId);
}
if (specRef.isSetStoichiometryMath()) {
specRef.getStoichiometryMath().setMath(updateMathVar(specRef
.getStoichiometryMath().getMath(), origId, newId));
}
}
}
if (isSpecies) {
for (int j = 0; j < reaction.getNumModifiers(); j++) {
if (reaction.getModifier(j).isSetSpecies()) {
ModifierSpeciesReference specRef = reaction.getModifier(j);
if (origId.equals(specRef.getSpecies())) {
specRef.setSpecies(newId);
}
}
}
}
for (int j = 0; j < reaction.getNumReactants(); j++) {
if (reaction.getReactant(j).isSetSpecies()) {
SpeciesReference specRef = reaction.getReactant(j);
if (isSpecies && origId.equals(specRef.getSpecies())) {
specRef.setSpecies(newId);
}
if (specRef.isSetStoichiometryMath()) {
specRef.getStoichiometryMath().setMath(updateMathVar(specRef
.getStoichiometryMath().getMath(), origId, newId));
}
}
}
reaction.getKineticLaw().setMath(
updateMathVar(reaction.getKineticLaw().getMath(), origId, newId));
}
if (model.getNumInitialAssignments() > 0) {
for (int i = 0; i < model.getNumInitialAssignments(); i++) {
InitialAssignment init = (InitialAssignment) model.getListOfInitialAssignments().get(i);
if (origId.equals(init.getSymbol())) {
init.setSymbol(newId);
}
init.setMath(updateMathVar(init.getMath(), origId, newId));
}
}
if (model.getNumRules() > 0) {
for (int i = 0; i < model.getNumRules(); i++) {
Rule rule = (Rule) model.getListOfRules().get(i);
if (rule.isSetVariable() && origId.equals(rule.getVariable())) {
rule.setVariable(newId);
}
rule.setMath(updateMathVar(rule.getMath(), origId, newId));
}
}
if (model.getNumConstraints() > 0) {
for (int i = 0; i < model.getNumConstraints(); i++) {
Constraint constraint = (Constraint) model.getListOfConstraints().get(i);
constraint.setMath(updateMathVar(constraint.getMath(), origId, newId));
}
}
if (model.getNumEvents() > 0) {
for (int i = 0; i < model.getNumEvents(); i++) {
org.sbml.libsbml.Event event = (org.sbml.libsbml.Event) model.getListOfEvents().get(i);
if (event.isSetTrigger()) {
event.getTrigger().setMath(updateMathVar(event.getTrigger().getMath(), origId, newId));
}
if (event.isSetDelay()) {
event.getDelay().setMath(updateMathVar(event.getDelay().getMath(), origId, newId));
}
for (int j = 0; j < event.getNumEventAssignments(); j++) {
EventAssignment ea = (EventAssignment) event.getListOfEventAssignments().get(j);
if (ea.getVariable().equals(origId)) {
ea.setVariable(newId);
}
if (ea.isSetMath()) {
ea.setMath(updateMathVar(ea.getMath(), origId, newId));
}
}
}
}
}
private void unionSBML(SBMLDocument mainDoc, SBMLDocument doc, String compName) {
Model m = doc.getModel();
for (int i = 0; i < m.getNumCompartmentTypes(); i ++) {
org.sbml.libsbml.CompartmentType c = m.getCompartmentType(i);
String newName = compName + "_" + c.getId();
updateVarId(false, c.getId(), newName, doc);
c.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumCompartmentTypes(); j ++) {
if (mainDoc.getModel().getCompartmentType(j).getId().equals(c.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addCompartmentType(c);
}
}
for (int i = 0; i < m.getNumCompartments(); i ++) {
org.sbml.libsbml.Compartment c = m.getCompartment(i);
String newName = compName + "_" + c.getId();
updateVarId(false, c.getId(), newName, doc);
c.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumCompartments(); j ++) {
if (mainDoc.getModel().getCompartment(j).getId().equals(c.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addCompartment(c);
}
}
for (int i = 0; i < m.getNumSpeciesTypes(); i ++) {
org.sbml.libsbml.SpeciesType s = m.getSpeciesType(i);
String newName = compName + "_" + s.getId();
updateVarId(false, s.getId(), newName, doc);
s.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumSpeciesTypes(); j ++) {
if (mainDoc.getModel().getSpeciesType(j).getId().equals(s.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addSpeciesType(s);
}
}
for (int i = 0; i < m.getNumSpecies(); i ++) {
Species spec = m.getSpecies(i);
if (!spec.getId().equals("RNAP")) {
String newName = compName + "_" + spec.getId();
for (Object port : properties.getComponents().get(compName).keySet()) {
if (spec.getId().equals((String) port)) {
newName = "_" + compName + "_" + properties.getComponents().get(compName).getProperty((String) port);
int removeDeg = -1;
for (int j = 0; j < m.getNumReactions(); j ++) {
org.sbml.libsbml.Reaction r = m.getReaction(j);
if (r.getId().equals("Degradation_" + spec.getId())) {
removeDeg = j;
}
}
if (removeDeg != -1) {
m.getListOfReactions().remove(removeDeg);
}
}
}
updateVarId(true, spec.getId(), newName, doc);
spec.setId(newName);
}
}
for (int i = 0; i < m.getNumSpecies(); i ++) {
Species spec = m.getSpecies(i);
if (spec.getId().startsWith("_" + compName + "_")) {
updateVarId(true, spec.getId(), spec.getId().substring(2 + compName.length()), doc);
spec.setId(spec.getId().substring(2 + compName.length()));
}
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumSpecies(); j ++) {
if (mainDoc.getModel().getSpecies(j).getId().equals(spec.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addSpecies(spec);
}
}
for (int i = 0; i < m.getNumParameters(); i ++) {
org.sbml.libsbml.Parameter p = m.getParameter(i);
String newName = compName + "_" + p.getId();
updateVarId(false, p.getId(), newName, doc);
p.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumParameters(); j ++) {
if (mainDoc.getModel().getParameter(j).getId().equals(p.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addParameter(p);
}
}
for (int i = 0; i < m.getNumReactions(); i ++) {
org.sbml.libsbml.Reaction r = m.getReaction(i);
String newName = compName + "_" + r.getId();
updateVarId(false, r.getId(), newName, doc);
r.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumReactions(); j ++) {
if (mainDoc.getModel().getReaction(j).getId().equals(r.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addReaction(r);
}
}
for (int i = 0; i < m.getNumInitialAssignments(); i ++) {
InitialAssignment init = (InitialAssignment) m.getListOfInitialAssignments().get(i);
mainDoc.getModel().addInitialAssignment(init);
}
for (int i = 0; i < m.getNumRules(); i++) {
org.sbml.libsbml.Rule r = m.getRule(i);
mainDoc.getModel().addRule(r);
}
for (int i = 0; i < m.getNumConstraints(); i++) {
Constraint constraint = (Constraint) m.getListOfConstraints().get(i);
mainDoc.getModel().addConstraint(constraint);
}
for (int i = 0; i < m.getNumEvents(); i++) {
org.sbml.libsbml.Event event = (org.sbml.libsbml.Event) m.getListOfEvents().get(i);
String newName = compName + "_" + event.getId();
updateVarId(false, event.getId(), newName, doc);
event.setId(newName);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumEvents(); j ++) {
if (mainDoc.getModel().getEvent(j).getId().equals(event.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addEvent(event);
}
}
for (int i = 0; i < m.getNumUnitDefinitions(); i ++) {
UnitDefinition u = m.getUnitDefinition(i);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumUnitDefinitions(); j ++) {
if (mainDoc.getModel().getUnitDefinition(j).getId().equals(u.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addUnitDefinition(u);
}
}
for (int i = 0; i < m.getNumFunctionDefinitions(); i ++) {
FunctionDefinition f = m.getFunctionDefinition(i);
boolean add = true;
for (int j = 0; j < mainDoc.getModel().getNumFunctionDefinitions(); j ++) {
if (mainDoc.getModel().getFunctionDefinition(j).getId().equals(f.getId())) {
add = false;
}
}
if (add) {
mainDoc.getModel().addFunctionDefinition(f);
}
}
}
private void printComponents(SBMLDocument document, String filename) {
for (String s : properties.getComponents().keySet()) {
GCMParser parser = new GCMParser(currentRoot + separator +
properties.getComponents().get(s).getProperty("gcm"));
parser.setParameters(properties.getParameters());
GeneticNetwork network = parser.buildNetwork();
SBMLDocument d = network.mergeSBML(filename);
unionSBML(document, d, s);
}
}
/**
* Prints the promoters in the network
*
* @param document
* the SBML document
*/
private void printPromoters(SBMLDocument document) {
for (Promoter p : promoters.values()) {
if (p.getOutputs().size()==0) continue;
// First print out the promoter, and promoter bound to RNAP
Species s = Utility.makeSpecies(p.getId(), compartment,
p.getPcount());
if ((p.getProperties() != null) &&
(p.getProperties().containsKey(GlobalConstants.NAME))) {
s.setName(p.getProperty(GlobalConstants.NAME));
}
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
s = Utility.makeSpecies("RNAP_" + p.getId(), compartment,
0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
// Now cycle through all activators and repressors and add those
// bindings
for (SpeciesInterface specie : p.getActivators()) {
s = Utility.makeSpecies("RNAP_" + p.getId() + "_"
+ specie.getId(), compartment, 0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
for (SpeciesInterface specie : p.getRepressors()) {
s = Utility.makeSpecies("bound_" + p.getId() + "_"
+ specie.getId(), compartment, 0);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
}
}
/**
* Prints the promoters in the network
*
* @param document
* the SBML document
*/
private void printOnlyPromoters(SBMLDocument document) {
for (Promoter p : promoters.values()) {
Species s = Utility.makeSpecies(p.getId(), compartment,
p.getPcount());
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
}
/**
* Prints the RNAP molecule to the document
*
* @param document
* the SBML document
*/
private void printRNAP(SBMLDocument document) {
double rnap = 30;
if (properties != null) {
rnap = Double.parseDouble(properties
.getParameter(GlobalConstants.RNAP_STRING));
}
Species s = Utility.makeSpecies("RNAP", compartment, rnap);
s.setHasOnlySubstanceUnits(true);
Utility.addSpecies(document, s);
}
/**
* Initializes the network
*
*/
private void initialize() {
buildPromoters();
buildDimers();
buildBiochemical();
}
/**
* Configurates the promoters
*
*/
private void buildPromoters() {
for (Promoter promoter : promoters.values()) {
for (Reaction reaction : promoter.getActivatingReactions()) {
if (!reaction.isBiochemical() && reaction.getDimer() <= 1) {
promoter.addActivator(stateMap
.get(reaction.getInputState()));
promoter.addToReactionMap(stateMap.get(reaction
.getInputState()), reaction);
}
if (!reaction.getOutputState().equals("none")) {
promoter.addOutput(stateMap.get(reaction.getOutputState()));
}
}
for (Reaction reaction : promoter.getRepressingReactions()) {
if (!reaction.isBiochemical() && reaction.getDimer() <= 1) {
promoter.addRepressor(stateMap
.get(reaction.getInputState()));
promoter.addToReactionMap(stateMap.get(reaction
.getInputState()), reaction);
}
if (!reaction.getOutputState().equals("none")) {
promoter.addOutput(stateMap.get(reaction.getOutputState()));
}
}
}
}
/**
* Builds the dimer list from reactions and species and adds it to the
* promoter as input.
*/
private void buildDimers() {
// Go through reaction list to see if any are missed
for (Promoter promoter : promoters.values()) {
for (Reaction reaction : promoter.getActivatingReactions()) {
double nDimer = reaction.getDimer();
if (nDimer > 1) {
SpeciesInterface specie = stateMap.get(reaction.getInputState());
DimerSpecies dimer = new DimerSpecies(specie, specie.getProperty(GlobalConstants.KASSOCIATION_STRING), nDimer);
promoter.addToReactionMap(dimer, reaction);
promoter.addActivator(dimer);
species.put(dimer.getId(), dimer);
}
}
for (Reaction reaction : promoter.getRepressingReactions()) {
double nDimer = reaction.getDimer();
if (nDimer > 1) {
SpeciesInterface specie = stateMap.get(reaction.getInputState());
DimerSpecies dimer = new DimerSpecies(specie, specie.getProperty(GlobalConstants.KASSOCIATION_STRING), nDimer);
promoter.addToReactionMap(dimer, reaction);
promoter.addRepressor(dimer);
species.put(dimer.getId(), dimer);
}
}
}
}
/**
* Builds the biochemical species, and adds it to the promoter as input
*/
private void buildBiochemical() {
// Cycle through each promoter
for (Promoter promoter : promoters.values()) {
// keep track of all activating and repressing reactions in separate
// lists
ArrayList<SpeciesInterface> biochem = new ArrayList<SpeciesInterface>();
ArrayList<Reaction> reactions = new ArrayList<Reaction>();
for (Reaction reaction : promoter.getActivatingReactions()) {
if (reaction.isBiochemical()) {
reactions.add(reaction);
SpeciesInterface specie = stateMap.get(reaction.getInputState());
biochem.add(specie);
}
}
if (biochem.size() == 1) {
throw new IllegalStateException(
"Must have more than 1 biochemical reaction");
} else if (biochem.size() >= 2) {
BiochemicalSpecies bio = null;
String unique = isUnique(biochem);
if (unique.equals("unique")) {
bio = new BiochemicalSpecies(biochem);
} else {
bio = (BiochemicalSpecies) species.get(unique);
}
promoter.addActivator(bio);
for (Reaction reaction : reactions) {
promoter.addToReactionMap(bio, reaction);
}
species.put(bio.getId(), bio);
}
biochem = new ArrayList<SpeciesInterface>();
reactions = new ArrayList<Reaction>();
for (Reaction reaction : promoter.getRepressingReactions()) {
if (reaction.isBiochemical()) {
reactions.add(reaction);
SpeciesInterface specie = stateMap.get(reaction.getInputState());
biochem.add(specie);
}
}
if (biochem.size() == 1) {
throw new IllegalStateException(
"Must have more than 1 biochemical reaction");
} else if (biochem.size() >= 2) {
BiochemicalSpecies bio = null;
String unique = isUnique(biochem);
if (unique.equals("unique")) {
bio = new BiochemicalSpecies(biochem);
} else {
bio = (BiochemicalSpecies) species.get(unique);
}
promoter.addRepressor(bio);
for (Reaction reaction : reactions) {
promoter.addToReactionMap(bio, reaction);
}
species.put(bio.getId(), bio);
}
}
}
/**
* Identifies whether biochemical species to be constructed has
* already been constructed or not
*/
private String isUnique(ArrayList<SpeciesInterface> biochem) {
Set<String> keys = species.keySet();
Iterator<String> bob = keys.iterator();
while (bob.hasNext()) {
String key = bob.next();
if (key.startsWith("Biochemical")) {
String name = key.substring(11);
int count = 0;
for (SpeciesInterface s : biochem) {
if (name.contains(s.getId())) {
count++;
}
}
if (count == biochem.size()) {
return key;
}
}
}
return "unique";
}
public HashMap<String, SpeciesInterface> getSpecies() {
return species;
}
public void setSpecies(HashMap<String, SpeciesInterface> species) {
this.species = species;
}
public HashMap<String, SpeciesInterface> getStateMap() {
return stateMap;
}
public void setStateMap(HashMap<String, SpeciesInterface> stateMap) {
this.stateMap = stateMap;
}
public HashMap<String, Promoter> getPromoters() {
return promoters;
}
public void setPromoters(HashMap<String, Promoter> promoters) {
this.promoters = promoters;
}
public GCMFile getProperties() {
return properties;
}
public void setProperties(GCMFile properties) {
this.properties = properties;
}
/**
* Checks the consistancy of the document
*
* @param doc
* the SBML document to check
*/
private void checkConsistancy(SBMLDocument doc) {
if (doc.checkConsistency() > 0) {
for (int i = 0; i < doc.getNumErrors(); i++) {
System.out.println(doc.getError(i).getMessage());
}
}
}
private String sbmlDocument = "";
private SBMLDocument document = null;
private static SBMLDocument currentDocument = null;
private static String currentRoot = "";
private boolean biochemicalAbstraction = false;
private boolean dimerizationAbstraction = false;
private boolean cooperationAbstraction = false;
private HashMap<String, SpeciesInterface> species = null;
private HashMap<String, SpeciesInterface> stateMap = null;
private HashMap<String, Promoter> promoters = null;
private GCMFile properties = null;
private String compartment = "default";
private String kRnapString = CompatibilityFixer
.getSBMLName(GlobalConstants.RNAP_BINDING_STRING);
private String kBasalString = CompatibilityFixer
.getSBMLName(GlobalConstants.KBASAL_STRING);
private String kOcString = CompatibilityFixer
.getSBMLName(GlobalConstants.OCR_STRING);
/**
* Returns the curent SBML document being built
*
* @return the curent SBML document being built
*/
public static SBMLDocument getCurrentDocument() {
return currentDocument;
}
/**
* Sets the current root
* @param root the root directory
*/
public static void setRoot(String root) {
currentRoot = root;
}
public static String getUnitString(ArrayList<String> unitNames,
ArrayList<Integer> exponents, ArrayList<Integer> multiplier,
Model model) {
// First build the name of the unit and see if it exists, start by
// sorting the units to build a unique string
for (int i = 0; i < unitNames.size(); i++) {
for (int j = i; j > 0; j
if (unitNames.get(j - 1).compareTo(unitNames.get(i)) > 0) {
Integer tempD = multiplier.get(j);
Integer tempI = exponents.get(j);
String tempS = unitNames.get(j);
multiplier.set(j, multiplier.get(j - 1));
unitNames.set(j, unitNames.get(j - 1));
exponents.set(j, exponents.get(j - 1));
multiplier.set(j - 1, tempD);
unitNames.set(j - 1, tempS);
exponents.set(j - 1, tempI);
}
}
}
UnitDefinition t = new UnitDefinition(BioSim.SBML_LEVEL, BioSim.SBML_VERSION);
String name = "u_";
for (int i = 0; i < unitNames.size(); i++) {
String sign = "";
if (exponents.get(i).intValue() < 0) {
sign = "n";
}
name = name + multiplier.get(i) + "_" + unitNames.get(i) + "_"
+ sign + Math.abs(exponents.get(i)) + "_";
Unit u = t.createUnit();
u.setKind(libsbml.UnitKind_forName(unitNames.get(i)));
u.setExponent(exponents.get(i).intValue());
u.setMultiplier(multiplier.get(i).intValue());
u.setScale(0);
}
name = name.substring(0, name.length() - 1);
t.setId(name);
if (model.getUnitDefinition(name) == null) {
model.addUnitDefinition(t);
}
return name;
}
/**
* Returns a unit name for a parameter based on the number of molecules
* involved
*
* @param numMolecules
* the number of molecules involved
* @return a unit name
*/
public static String getMoleTimeParameter(int numMolecules) {
ArrayList<String> unitS = new ArrayList<String>();
ArrayList<Integer> unitE = new ArrayList<Integer>();
ArrayList<Integer> unitM = new ArrayList<Integer>();
if (numMolecules > 1) {
unitS.add("mole");
unitE.add(new Integer(-(numMolecules - 1)));
unitM.add(new Integer(1));
}
unitS.add("second");
unitE.add(new Integer(-1));
unitM.add(new Integer(1));
return GeneticNetwork.getUnitString(unitS, unitE, unitM,
currentDocument.getModel());
}
/**
* Returns a unit name for a parameter based on the number of molecules
* involved
*
* @param numMolecules
* the number of molecules involved
* @return a unit name
*/
public static String getMoleParameter(int numMolecules) {
ArrayList<String> unitS = new ArrayList<String>();
ArrayList<Integer> unitE = new ArrayList<Integer>();
ArrayList<Integer> unitM = new ArrayList<Integer>();
unitS.add("mole");
unitE.add(new Integer(-(numMolecules - 1)));
unitM.add(new Integer(1));
return GeneticNetwork.getUnitString(unitS, unitE, unitM,
currentDocument.getModel());
}
public static String getMoleParameter(String numMolecules) {
return getMoleParameter(Integer.parseInt(numMolecules));
}
}
|
package com.intellij.coverage;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.history.FileRevisionTimestampComparator;
import com.intellij.history.LocalHistory;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.impl.DocumentMarkupModel;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.LineTokenizer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.reference.SoftReference;
import com.intellij.rt.coverage.data.ClassData;
import com.intellij.rt.coverage.data.LineCoverage;
import com.intellij.rt.coverage.data.LineData;
import com.intellij.rt.coverage.data.ProjectData;
import com.intellij.ui.EditorNotificationPanel;
import com.intellij.util.Alarm;
import com.intellij.util.Function;
import com.intellij.util.diff.Diff;
import com.intellij.util.diff.FilesTooBigForDiffException;
import gnu.trove.TIntIntHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
/**
* @author ven
*/
public class SrcFileAnnotator implements Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.coverage.SrcFileAnnotator");
public static final Key<List<RangeHighlighter>> COVERAGE_HIGHLIGHTERS = Key.create("COVERAGE_HIGHLIGHTERS");
private static final Key<DocumentListener> COVERAGE_DOCUMENT_LISTENER = Key.create("COVERAGE_DOCUMENT_LISTENER");
public static final Key<Map<FileEditor, EditorNotificationPanel>> NOTIFICATION_PANELS = Key.create("NOTIFICATION_PANELS");
private PsiFile myFile;
private Editor myEditor;
private Document myDocument;
private final Project myProject;
private SoftReference<TIntIntHashMap> myNewToOldLines;
private SoftReference<TIntIntHashMap> myOldToNewLines;
private SoftReference<byte[]> myOldContent;
private final static Object LOCK = new Object();
private final Alarm myUpdateAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, this);
public SrcFileAnnotator(final PsiFile file, final Editor editor) {
myFile = file;
myEditor = editor;
myProject = file.getProject();
myDocument = myEditor.getDocument();
}
public void hideCoverageData() {
if (myEditor == null) return;
final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject);
final List<RangeHighlighter> highlighters = myEditor.getUserData(COVERAGE_HIGHLIGHTERS);
if (highlighters != null) {
for (final RangeHighlighter highlighter : highlighters) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
highlighter.dispose();
}
});
}
myEditor.putUserData(COVERAGE_HIGHLIGHTERS, null);
}
final Map<FileEditor, EditorNotificationPanel> map = myFile.getCopyableUserData(NOTIFICATION_PANELS);
if (map != null) {
final VirtualFile vFile = myFile.getVirtualFile();
LOG.assertTrue(vFile != null);
boolean freeAll = !fileEditorManager.isFileOpen(vFile);
myFile.putCopyableUserData(NOTIFICATION_PANELS, null);
for (FileEditor fileEditor : map.keySet()) {
if (!freeAll && !isCurrentEditor(fileEditor)) {
continue;
}
fileEditorManager.removeTopComponent(fileEditor, map.get(fileEditor));
}
}
final DocumentListener documentListener = myEditor.getUserData(COVERAGE_DOCUMENT_LISTENER);
if (documentListener != null) {
myDocument.removeDocumentListener(documentListener);
myEditor.putUserData(COVERAGE_DOCUMENT_LISTENER, null);
}
}
private static
@NotNull
String[] getCoveredLines(@NotNull byte[] oldContent, VirtualFile vFile) {
final String text = LoadTextUtil.getTextByBinaryPresentation(oldContent, vFile, false, false).toString();
return LineTokenizer.tokenize(text, false);
}
private
@NotNull
String[] getUpToDateLines() {
final Ref<String[]> linesRef = new Ref<String[]>();
final Runnable runnable = new Runnable() {
public void run() {
final int lineCount = myDocument.getLineCount();
final String[] lines = new String[lineCount];
final CharSequence chars = myDocument.getCharsSequence();
for (int i = 0; i < lineCount; i++) {
lines[i] = chars.subSequence(myDocument.getLineStartOffset(i), myDocument.getLineEndOffset(i)).toString();
}
linesRef.set(lines);
}
};
ApplicationManager.getApplication().runReadAction(runnable);
return linesRef.get();
}
private static TIntIntHashMap getCoverageVersionToCurrentLineMapping(Diff.Change change, int firstNLines) {
TIntIntHashMap result = new TIntIntHashMap();
int prevLineInFirst = 0;
int prevLineInSecond = 0;
while (change != null) {
for (int l = 0; l < change.line0 - prevLineInFirst; l++) {
result.put(prevLineInFirst + l, prevLineInSecond + l);
}
prevLineInFirst = change.line0 + change.deleted;
prevLineInSecond = change.line1 + change.inserted;
change = change.link;
}
for (int i = prevLineInFirst; i < firstNLines; i++) {
result.put(i, prevLineInSecond + i - prevLineInFirst);
}
return result;
}
@Nullable
private TIntIntHashMap getOldToNewLineMapping(final long date) {
if (myOldToNewLines == null) {
myOldToNewLines = doGetLineMapping(date, true);
if (myOldToNewLines == null) return null;
}
return myOldToNewLines.get();
}
@Nullable
private TIntIntHashMap getNewToOldLineMapping(final long date) {
if (myNewToOldLines == null) {
myNewToOldLines = doGetLineMapping(date, false);
if (myNewToOldLines == null) return null;
}
return myNewToOldLines.get();
}
@Nullable
private SoftReference<TIntIntHashMap> doGetLineMapping(final long date, boolean oldToNew) {
final VirtualFile f = getVirtualFile();
final byte[] oldContent;
synchronized (LOCK) {
if (myOldContent == null) {
if (ApplicationManager.getApplication().isDispatchThread()) return null;
final byte[] byteContent = LocalHistory.getInstance().getByteContent(f, new FileRevisionTimestampComparator() {
public boolean isSuitable(long revisionTimestamp) {
return revisionTimestamp < date;
}
});
myOldContent = new SoftReference<byte[]>(byteContent);
}
oldContent = myOldContent.get();
}
if (oldContent == null) return null;
String[] coveredLines = getCoveredLines(oldContent, f);
String[] currentLines = getUpToDateLines();
String[] oldLines = oldToNew ? coveredLines : currentLines;
String[] newLines = oldToNew ? currentLines : coveredLines;
Diff.Change change = null;
try {
change = Diff.buildChanges(oldLines, newLines);
}
catch (FilesTooBigForDiffException e) {
LOG.info(e);
return null;
}
return new SoftReference<TIntIntHashMap>(getCoverageVersionToCurrentLineMapping(change, oldLines.length));
}
public void showCoverageInformation(final CoverageSuitesBundle suite) {
if (myEditor == null || myFile == null) return;
final MarkupModel markupModel = DocumentMarkupModel.forDocument(myDocument, myProject, true);
final List<RangeHighlighter> highlighters = new ArrayList<RangeHighlighter>();
final ProjectData data = suite.getCoverageData();
if (data == null) {
coverageDataNotFound(suite);
return;
}
final CoverageEngine engine = suite.getCoverageEngine();
final Set<String> qualifiedNames = engine.getQualifiedNames(myFile);
// let's find old content in local history and build mapping from old lines to new one
// local history doesn't index libraries, so let's distinguish libraries content with other one
final ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
final VirtualFile file = getVirtualFile();
final long fileTimeStamp = file.getTimeStamp();
final long coverageTimeStamp = suite.getLastCoverageTimeStamp();
final TIntIntHashMap oldToNewLineMapping;
//do not show coverage info over cls
if (engine.isInLibraryClasses(myProject, file)) {
return;
}
// if in libraries content
if (projectFileIndex.isInLibrarySource(file)) {
// compare file and coverage timestamps
if (fileTimeStamp > coverageTimeStamp) {
showEditorWarningMessage(CodeInsightBundle.message("coverage.data.outdated"));
return;
}
oldToNewLineMapping = null;
}
else {
// check local history
oldToNewLineMapping = getOldToNewLineMapping(coverageTimeStamp);
if (oldToNewLineMapping == null) {
// if history for file isn't available let's check timestamps
if (fileTimeStamp > coverageTimeStamp && classesArePresentInCoverageData(data, qualifiedNames)) {
showEditorWarningMessage(CodeInsightBundle.message("coverage.data.outdated"));
return;
}
}
}
if (myEditor.getUserData(COVERAGE_HIGHLIGHTERS) != null) {
//highlighters already collected - no need to do it twice
return;
}
final Module module = ApplicationManager.getApplication().runReadAction(new Computable<Module>() {
@Nullable
@Override
public Module compute() {
return ModuleUtilCore.findModuleForPsiElement(myFile);
}
});
if (module != null) {
if (engine.recompileProjectAndRerunAction(module, suite, new Runnable() {
public void run() {
CoverageDataManager.getInstance(myProject).chooseSuitesBundle(suite);
}
})) {
return;
}
}
// now if oldToNewLineMapping is null we should use f(x)=id(x) mapping
// E.g. all *.class files for java source file with several classes
final Set<File> outputFiles = engine.getCorrespondingOutputFiles(myFile, module, suite);
final boolean subCoverageActive = CoverageDataManager.getInstance(myProject).isSubCoverageActive();
final boolean coverageByTestApplicable = suite.isCoverageByTestApplicable() && !(subCoverageActive && suite.isCoverageByTestEnabled());
final TreeMap<Integer, LineData> executableLines = new TreeMap<Integer, LineData>();
final TreeMap<Integer, Object[]> classLines = new TreeMap<Integer, Object[]>();
final TreeMap<Integer, String> classNames = new TreeMap<Integer, String>();
class HighlightersCollector {
private void collect(File outputFile, final String qualifiedName) {
final ClassData fileData = data.getClassData(qualifiedName);
if (fileData != null) {
final Object[] lines = fileData.getLines();
if (lines != null) {
final Object[] postProcessedLines = suite.getCoverageEngine().postProcessExecutableLines(lines, myEditor);
for (Object lineData : postProcessedLines) {
if (lineData instanceof LineData) {
final int line = ((LineData)lineData).getLineNumber() - 1;
final int lineNumberInCurrent;
if (oldToNewLineMapping != null) {
// use mapping based on local history
if (!oldToNewLineMapping.contains(line)) {
continue;
}
lineNumberInCurrent = oldToNewLineMapping.get(line);
}
else {
// use id mapping
lineNumberInCurrent = line;
}
LOG.assertTrue(lineNumberInCurrent < myDocument.getLineCount());
executableLines.put(line, (LineData)lineData);
classLines.put(line, postProcessedLines);
classNames.put(line, qualifiedName);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myDocument == null || lineNumberInCurrent >= myDocument.getLineCount()) return;
final RangeHighlighter highlighter =
createRangeHighlighter(suite.getLastCoverageTimeStamp(), markupModel, coverageByTestApplicable, executableLines,
qualifiedName, line, lineNumberInCurrent, suite, postProcessedLines);
highlighters.add(highlighter);
}
});
}
}
}
}
else if (outputFile != null &&
!subCoverageActive &&
engine.includeUntouchedFileInCoverage(qualifiedName, outputFile, myFile, suite)) {
collectNonCoveredFileInfo(outputFile, highlighters, markupModel, executableLines, coverageByTestApplicable);
}
}
}
final HighlightersCollector collector = new HighlightersCollector();
if (!outputFiles.isEmpty()) {
for (File outputFile : outputFiles) {
final String qualifiedName = engine.getQualifiedName(outputFile, myFile);
if (qualifiedName != null) {
collector.collect(outputFile, qualifiedName);
}
}
}
else { //check non-compilable classes which present in ProjectData
for (String qName : qualifiedNames) {
collector.collect(null, qName);
}
}
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myEditor != null && highlighters.size() > 0) {
myEditor.putUserData(COVERAGE_HIGHLIGHTERS, highlighters);
}
}
});
final DocumentListener documentListener = new DocumentAdapter() {
@Override
public void documentChanged(final DocumentEvent e) {
myNewToOldLines = null;
myOldToNewLines = null;
List<RangeHighlighter> rangeHighlighters = myEditor.getUserData(COVERAGE_HIGHLIGHTERS);
if (rangeHighlighters == null) rangeHighlighters = new ArrayList<RangeHighlighter>();
int offset = e.getOffset();
final int lineNumber = myDocument.getLineNumber(offset);
final int lastLineNumber = myDocument.getLineNumber(offset + e.getNewLength());
final TextRange changeRange =
new TextRange(myDocument.getLineStartOffset(lineNumber), myDocument.getLineEndOffset(lastLineNumber));
for (Iterator<RangeHighlighter> it = rangeHighlighters.iterator(); it.hasNext(); ) {
final RangeHighlighter highlighter = it.next();
if (!highlighter.isValid() || TextRange.create(highlighter).intersects(changeRange)) {
highlighter.dispose();
it.remove();
}
}
final List<RangeHighlighter> highlighters = rangeHighlighters;
myUpdateAlarm.cancelAllRequests();
if (!myUpdateAlarm.isDisposed()) {
myUpdateAlarm.addRequest(new Runnable() {
@Override
public void run() {
final TIntIntHashMap newToOldLineMapping = getNewToOldLineMapping(suite.getLastCoverageTimeStamp());
if (newToOldLineMapping != null) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myEditor == null) return;
for (int line = lineNumber; line <= lastLineNumber; line++) {
final int oldLineNumber = newToOldLineMapping.get(line);
final LineData lineData = executableLines.get(oldLineNumber);
if (lineData != null) {
RangeHighlighter rangeHighlighter =
createRangeHighlighter(suite.getLastCoverageTimeStamp(), markupModel, coverageByTestApplicable, executableLines,
classNames.get(oldLineNumber), oldLineNumber, line, suite,
classLines.get(oldLineNumber));
highlighters.add(rangeHighlighter);
}
}
myEditor.putUserData(COVERAGE_HIGHLIGHTERS, highlighters.size() > 0 ? highlighters : null);
}
});
}
}
}, 100);
}
}
};
myDocument.addDocumentListener(documentListener);
myEditor.putUserData(COVERAGE_DOCUMENT_LISTENER, documentListener);
}
private static boolean classesArePresentInCoverageData(ProjectData data, Set<String> qualifiedNames) {
for (String qualifiedName : qualifiedNames) {
if (data.getClassData(qualifiedName) != null) {
return true;
}
}
return false;
}
private RangeHighlighter createRangeHighlighter(final long date, final MarkupModel markupModel,
final boolean coverageByTestApplicable,
final TreeMap<Integer, LineData> executableLines, @Nullable final String className,
final int line,
final int lineNumberInCurrent,
@NotNull final CoverageSuitesBundle coverageSuite, Object[] lines) {
EditorColorsScheme scheme = EditorColorsManager.getInstance().getGlobalScheme();
final TextAttributes attributes = scheme.getAttributes(CoverageLineMarkerRenderer.getAttributesKey(line, executableLines));
TextAttributes textAttributes = null;
if (attributes.getBackgroundColor() != null) {
textAttributes = attributes;
}
final int startOffset = myDocument.getLineStartOffset(lineNumberInCurrent);
final int endOffset = myDocument.getLineEndOffset(lineNumberInCurrent);
final RangeHighlighter highlighter =
markupModel.addRangeHighlighter(startOffset, endOffset, HighlighterLayer.SELECTION - 1, textAttributes, HighlighterTargetArea.LINES_IN_RANGE);
final Function<Integer, Integer> newToOldConverter = new Function<Integer, Integer>() {
public Integer fun(final Integer newLine) {
if (myEditor == null) return -1;
final TIntIntHashMap oldLineMapping = getNewToOldLineMapping(date);
return oldLineMapping != null ? oldLineMapping.get(newLine.intValue()) : newLine.intValue();
}
};
final Function<Integer, Integer> oldToNewConverter = new Function<Integer, Integer>() {
public Integer fun(final Integer newLine) {
if (myEditor == null) return -1;
final TIntIntHashMap newLineMapping = getOldToNewLineMapping(date);
return newLineMapping != null ? newLineMapping.get(newLine.intValue()) : newLine.intValue();
}
};
final CoverageLineMarkerRenderer markerRenderer = coverageSuite.getCoverageEngine()
.getLineMarkerRenderer(line, className, executableLines, coverageByTestApplicable, coverageSuite, newToOldConverter,
oldToNewConverter, CoverageDataManager.getInstance(myProject).isSubCoverageActive());
highlighter.setLineMarkerRenderer(markerRenderer);
final LineData lineData = className != null ? (LineData)lines[line + 1] : null;
if (lineData != null && lineData.getStatus() == LineCoverage.NONE) {
highlighter.setErrorStripeMarkColor(markerRenderer.getErrorStripeColor(myEditor));
highlighter.setThinErrorStripeMark(true);
highlighter.setGreedyToLeft(true);
highlighter.setGreedyToRight(true);
}
return highlighter;
}
private void showEditorWarningMessage(final String message) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myEditor == null) return;
final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject);
final VirtualFile vFile = myFile.getVirtualFile();
assert vFile != null;
Map<FileEditor, EditorNotificationPanel> map = myFile.getCopyableUserData(NOTIFICATION_PANELS);
if (map == null) {
map = new HashMap<FileEditor, EditorNotificationPanel>();
myFile.putCopyableUserData(NOTIFICATION_PANELS, map);
}
final FileEditor[] editors = fileEditorManager.getAllEditors(vFile);
for (final FileEditor editor : editors) {
if (isCurrentEditor(editor)) {
final EditorNotificationPanel panel = new EditorNotificationPanel() {
{
myLabel.setIcon(AllIcons.General.ExclMark);
myLabel.setText(message);
}
};
panel.createActionLabel("Close", new Runnable() {
@Override
public void run() {
fileEditorManager.removeTopComponent(editor, panel);
}
});
map.put(editor, panel);
fileEditorManager.addTopComponent(editor, panel);
break;
}
}
}
});
}
private boolean isCurrentEditor(FileEditor editor) {
return editor instanceof TextEditor && ((TextEditor)editor).getEditor() == myEditor;
}
private void collectNonCoveredFileInfo(final File outputFile,
final List<RangeHighlighter> highlighters, final MarkupModel markupModel,
final TreeMap<Integer, LineData> executableLines,
final boolean coverageByTestApplicable) {
final CoverageSuitesBundle coverageSuite = CoverageDataManager.getInstance(myProject).getCurrentSuitesBundle();
if (coverageSuite == null) return;
final TIntIntHashMap mapping;
if (outputFile.lastModified() < getVirtualFile().getTimeStamp()) {
mapping = getOldToNewLineMapping(outputFile.lastModified());
if (mapping == null) return;
}
else {
mapping = null;
}
final List<Integer> uncoveredLines = coverageSuite.getCoverageEngine().collectSrcLinesForUntouchedFile(outputFile, coverageSuite);
final int lineCount = myDocument.getLineCount();
if (uncoveredLines == null) {
for (int lineNumber = 0; lineNumber < lineCount; lineNumber++) {
addHighlighter(outputFile, highlighters, markupModel, executableLines, coverageByTestApplicable, coverageSuite,
lineNumber, lineNumber);
}
}
else {
for (int lineNumber : uncoveredLines) {
if (lineNumber >= lineCount) {
continue;
}
final int updatedLineNumber = mapping != null ? mapping.get(lineNumber) : lineNumber;
addHighlighter(outputFile, highlighters, markupModel, executableLines, coverageByTestApplicable, coverageSuite,
lineNumber, updatedLineNumber);
}
}
}
private void addHighlighter(final File outputFile,
final List<RangeHighlighter> highlighters,
final MarkupModel markupModel,
final TreeMap<Integer, LineData> executableLines,
final boolean coverageByTestApplicable,
final CoverageSuitesBundle coverageSuite,
final int lineNumber,
final int updatedLineNumber) {
executableLines.put(updatedLineNumber, null);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myEditor == null) return;
final RangeHighlighter highlighter =
createRangeHighlighter(outputFile.lastModified(), markupModel, coverageByTestApplicable, executableLines, null, lineNumber,
updatedLineNumber, coverageSuite, null);
highlighters.add(highlighter);
}
});
}
private VirtualFile getVirtualFile() {
final VirtualFile vFile = myFile.getVirtualFile();
LOG.assertTrue(vFile != null);
return vFile;
}
private void coverageDataNotFound(final CoverageSuitesBundle suite) {
showEditorWarningMessage(CodeInsightBundle.message("coverage.data.not.found"));
for (CoverageSuite coverageSuite : suite.getSuites()) {
CoverageDataManager.getInstance(myProject).removeCoverageSuite(coverageSuite);
}
}
public void dispose() {
hideCoverageData();
myEditor = null;
myDocument = null;
myFile = null;
}
}
|
package hex;
import hex.schemas.ModelBuilderSchema;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OKeyNotFoundArgumentException;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.Log;
import water.util.MRUtils;
import water.util.ReflectionUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Model builder parent class. Contains the common interfaces and fields across all model builders.
*/
abstract public class ModelBuilder<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Job<M> {
/** All the parameters required to build the model. */
public final P _parms;
/** Training frame: derived from the parameter's training frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to an Categorical, etc. */
public final Frame train() { return _train; }
protected transient Frame _train;
/** Validation frame: derived from the parameter's validation frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to a Categorical, etc. Is null if no validation key is set. */
public final Frame valid() { return _valid; }
protected transient Frame _valid;
// TODO: tighten up the type
// Map the algo name (e.g., "deeplearning") to the builder class (e.g., DeepLearning.class) :
private static final Map<String, Class<? extends ModelBuilder>> _builders = new HashMap<>();
// Map the Model class (e.g., DeepLearningModel.class) to the algo name (e.g., "deeplearning"):
private static final Map<Class<? extends Model>, String> _model_class_to_algo = new HashMap<>();
// Map the simple algo name (e.g., deeplearning) to the full algo name (e.g., "Deep Learning"):
private static final Map<String, String> _algo_to_algo_full_name = new HashMap<>();
// Map the algo name (e.g., "deeplearning") to the Model class (e.g., DeepLearningModel.class):
private static final Map<String, Class<? extends Model>> _algo_to_model_class = new HashMap<>();
/** Train response vector. */
public Vec response(){return _response;}
/** Validation response vector. */
public Vec vresponse(){return _vresponse;}
/**
* Register a ModelBuilder, assigning it an algo name.
*/
public static void registerModelBuilder(String name, String full_name, Class<? extends ModelBuilder> clz) {
_builders.put(name, clz);
Class<? extends Model> model_class = (Class<? extends Model>)ReflectionUtils.findActualClassParameter(clz, 0);
_model_class_to_algo.put(model_class, name);
_algo_to_algo_full_name.put(name, full_name);
_algo_to_model_class.put(name, model_class);
}
/** Get a Map of all algo names to their ModelBuilder classes. */
public static Map<String, Class<? extends ModelBuilder>>getModelBuilders() { return _builders; }
/** Get the ModelBuilder class for the given algo name. */
public static Class<? extends ModelBuilder> getModelBuilder(String name) {
return _builders.get(name);
}
/** Get the Model class for the given algo name. */
public static Class<? extends Model> getModelClass(String name) {
return _algo_to_model_class.get(name);
}
/** Get the algo name for the given Model. */
public static String getAlgo(Model model) {
return _model_class_to_algo.get(model.getClass());
}
/** Get the algo full name for the given algo. */
public static String getAlgoFullName(String algo) {
return _algo_to_algo_full_name.get(algo);
}
public String getAlgo() {
return getAlgo(this.getClass());
}
public static String getAlgo(Class<? extends ModelBuilder> clz) {
// Check for unknown algo names, but if none are registered keep going; we're probably in JUnit.
if (_builders.isEmpty())
return "Unknown algo (should only happen under JUnit)";
if (! _builders.containsValue(clz))
throw new H2OIllegalArgumentException("Failed to find ModelBuilder class in registry: " + clz, "Failed to find ModelBuilder class in registry: " + clz);
for (Map.Entry<String, Class<? extends ModelBuilder>> entry : _builders.entrySet())
if (entry.getValue().equals(clz))
return entry.getKey();
// Note: unreachable:
throw new H2OIllegalArgumentException("Failed to find ModelBuilder class in registry: " + clz, "Failed to find ModelBuilder class in registry: " + clz);
}
/**
* Externally visible default schema
* TODO: this is in the wrong layer: the internals should not know anything about the schemas!!!
* This puts a reverse edge into the dependency graph.
*/
public abstract ModelBuilderSchema schema();
/** Constructor called from an http request; MUST override in subclasses. */
public ModelBuilder(P ignore) {
super(Key.make("Failed"),"ModelBuilder constructor needs to be overridden.");
throw H2O.fail("ModelBuilder subclass failed to override the params constructor: " + this.getClass());
}
/** Constructor making a default destination key */
public ModelBuilder(String desc, P parms) {
this((parms == null || parms._model_id == null) ? Key.make(desc + "Model_" + Key.rand()) : parms._model_id, desc, parms);
}
/** Default constructor, given all arguments */
public ModelBuilder(Key dest, String desc, P parms) {
super(dest,desc);
_parms = parms;
}
/** Factory method to create a ModelBuilder instance of the correct class given the algo name. */
public static ModelBuilder createModelBuilder(String algo) {
ModelBuilder modelBuilder;
Class<? extends ModelBuilder> clz = null;
try {
clz = ModelBuilder.getModelBuilder(algo);
}
catch (Exception ignore) {}
if (clz == null) {
throw new H2OIllegalArgumentException("algo", "createModelBuilder", "Algo not known (" + algo + ")");
}
try {
if (! (clz.getGenericSuperclass() instanceof ParameterizedType)) {
throw H2O.fail("Class is not parameterized as expected: " + clz);
}
Type[] handler_type_parms = ((ParameterizedType)(clz.getGenericSuperclass())).getActualTypeArguments();
// [0] is the Model type; [1] is the Model.Parameters type; [2] is the Model.Output type.
Class<? extends Model.Parameters> pclz = (Class<? extends Model.Parameters>)handler_type_parms[1];
Constructor<ModelBuilder> constructor = (Constructor<ModelBuilder>)clz.getDeclaredConstructor(new Class[] { (Class)handler_type_parms[1] });
Model.Parameters p = pclz.newInstance();
modelBuilder = constructor.newInstance(p);
} catch (java.lang.reflect.InvocationTargetException e) {
throw H2O.fail("Exception when trying to instantiate ModelBuilder for: " + algo + ": " + e.getCause(), e);
} catch (Exception e) {
throw H2O.fail("Exception when trying to instantiate ModelBuilder for: " + algo + ": " + e.getCause(), e);
}
return modelBuilder;
}
/** Method to launch training of a Model, based on its parameters. */
abstract public Job<M> trainModel();
/** List containing the categories of models that this builder can
* build. Each ModelBuilder must have one of these. */
abstract public ModelCategory[] can_build();
/**
* Visibility for this algo: is it always visible, is it beta (always visible but with a note in the UI)
* or is it experimental (hidden by default, visible in the UI if the user gives an "experimental" flag
* at startup).
*/
public enum BuilderVisibility {
Experimental,
Beta,
Stable
}
/**
* Visibility for this algo: is it always visible, is it beta (always visible but with a note in the UI)
* or is it experimental (hidden by default, visible in the UI if the user gives an "experimental" flag
* at startup).
*/
abstract public BuilderVisibility builderVisibility();
/** Clear whatever was done by init() so it can be run again. */
public void clearInitState() {
clearValidationErrors();
}
public boolean isSupervised(){return false;}
protected transient Vec _response; // Handy response column
protected transient Vec _vresponse; // Handy response column
protected transient Vec _offset; // Handy offset column
protected transient Vec _weights;
public boolean hasOffset(){ return _offset != null;}
public boolean hasWeights(){return _weights != null;}
// no hasResponse, call isSupervised instead (response is mandatory if isSupervised is true)
protected int _nclass; // Number of classes; 1 for regression; 2+ for classification
public int nclasses(){return _nclass;}
public final boolean isClassifier() { return _nclass > 1; }
/**
* Find and set response/weights/offset and put them all in the end,
* @return number of non-feature vecs
*/
protected int separateFeatureVecs() {
int res = 0;
if(_parms._weights_column != null) {
Vec w = _train.remove(_parms._weights_column);
if(w == null)
error("_weights_column","Offset column '" + _parms._weights_column + "' not found in the training frame");
else {
if(!w.isNumeric())
error("_weights_column","Invalid weights column '" + _parms._weights_column + "', weights must be numeric");
_weights = w;
if(w.naCnt() > 0)
error("_weights_columns","Weights cannot have missing values.");
if(w.min() < 0)
error("_weights_columns","Weights must be >= 0");
if(w.max() == 0)
error("_weights_columns","Max. weight must be > 0");
_train.add(_parms._weights_column, w);
++res;
}
}
if(_parms._offset_column != null) {
Vec o = _train.remove(_parms._offset_column);
if(o == null)
error("_offset_column","Offset column '" + _parms._offset_column + "' not found in the training frame");
else {
if(!o.isNumeric())
error("_offset_column","Invalid offset column '" + _parms._offset_column + "', offset must be numeric");
_offset = o;
if(o.naCnt() > 0)
error("_offset_column","Offset cannot have missing values.");
if(_weights == _offset)
error("_offset_column", "Offset must be different from weights");
_train.add(_parms._offset_column, o);
++res;
}
}
if(isSupervised() && _parms._response_column != null) {
_response = _train.remove(_parms._response_column);
if (_response == null) {
if (isSupervised())
error("_response_column", "Response column '" + _parms._response_column + "' not found in the training frame");
} else {
_train.add(_parms._response_column, _response);
++res;
}
}
return res;
}
protected boolean ignoreStringColumns(){return true;}
/**
* Ignore constant columns, columns with all NAs and strings.
* @param npredictors
* @param expensive
*/
protected void ignoreBadColumns(int npredictors, boolean expensive){
// Drop all-constant and all-bad columns.
if( _parms._ignore_const_cols)
new FilterCols(npredictors) {
@Override protected boolean filter(Vec v) { return v.isConst() || v.isBad() || (ignoreStringColumns() && v.isString()); }
}.doIt(_train,"Dropping constant columns: ",expensive);
}
/**
* Override this method to call error() if the model is expected to not fit in memory, and say why
*/
protected void checkMemoryFootPrint() {}
transient long [] _distribution;
transient double [] _priorClassDist;
protected boolean computePriorClassDistribution(){
return _parms._balance_classes;
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made by
* the front-end whenever the GUI is clicked, and needs to be fast whenever
* {@code expensive} is false; it will be called once again at the start of
* model building {@see #trainModel()} with expensive set to true.
*<p>
* The incoming training frame (and validation frame) will have ignored
* columns dropped out, plus whatever work the parent init did.
*<p>
* NOTE: The front end initially calls this through the parameters validation
* endpoint with no training_frame, so each subclass's {@code init()} method
* has to work correctly with the training_frame missing.
*<p>
* @see #updateValidationMessages()
*/
public void init(boolean expensive) {
// Log parameters
if (expensive) {
Log.info("Building H2O " + this.getClass().getSimpleName().toString() + " model with these parameters:");
Log.info(new String(_parms.writeJSON(new AutoBuffer()).buf()));
}
// NOTE: allow re-init:
clearInitState();
assert _parms != null; // Parms must already be set in
if( _parms._train == null ) {
if (expensive)
error("_train","Missing training frame");
return;
}
Frame tr = _parms.train();
if( tr == null ) { error("_train","Missing training frame: "+_parms._train); return; }
_train = new Frame(null /* not putting this into KV */, tr._names.clone(), tr.vecs().clone());
// Drop explicitly dropped columns
if( _parms._ignored_columns != null ) {
_train.remove(_parms._ignored_columns);
if( expensive ) Log.info("Dropping ignored columns: "+Arrays.toString(_parms._ignored_columns));
}
// Drop all non-numeric columns (e.g., String and UUID). No current algo
// can use them, and otherwise all algos will then be forced to remove
// them. Text algos (grep, word2vec) take raw text columns - which are
// numeric (arrays of bytes).
ignoreBadColumns(separateFeatureVecs(), expensive);
// Check that at least some columns are not-constant and not-all-NAs
if( _train.numCols() == 0 )
error("_train","There are no usable columns to generate model");
if(isSupervised()) {
if(_response != null) {
_nclass = _response.isEnum() ? _response.cardinality() : 1;
if (_response.isConst())
error("_response","Response cannot be constant.");
}
if (! _parms._balance_classes)
hide("_max_after_balance_size", "Balance classes is false, hide max_after_balance_size");
if( _parms._max_after_balance_size <= 0.0 )
error("_max_after_balance_size","Max size after balancing needs to be positive, suggest 1.0f");
if( _train != null ) {
if (_train.numCols() <= 1)
error("_train", "Training data must have at least 2 features (incl. response).");
if( null == _parms._response_column) {
error("_response_column", "Response column parameter not set.");
return;
}
if(_response != null && computePriorClassDistribution()) {
if (isClassifier() && isSupervised()) {
MRUtils.ClassDist cdmt = new MRUtils.ClassDist(nclasses()).doAll(_response);
_distribution = cdmt.dist();
_priorClassDist = cdmt.rel_dist();
} else { // Regression; only 1 "class"
_distribution = new long[]{train().numRows()};
_priorClassDist = new double[]{1.0f};
}
}
}
if( !isClassifier() ) {
hide("_balance_classes", "Balance classes is only applicable to classification problems.");
hide("_class_sampling_factors", "Class sampling factors is only applicable to classification problems.");
hide("_max_after_balance_size", "Max after balance size is only applicable to classification problems.");
hide("_max_confusion_matrix_size", "Max confusion matrix size is only applicable to classification problems.");
}
else {
if (_offset != null)
error("_offset", "Offset only applies to regression problems.");
}
if (_nclass <= 2) {
hide("_max_hit_ratio_k", "Max K-value for hit ratio is only applicable to multi-class classification problems.");
hide("_max_confusion_matrix_size", "Only for multi-class classification problems.");
}
if( !_parms._balance_classes ) {
hide("_max_after_balance_size", "Only used with balanced classes");
hide("_class_sampling_factors", "Class sampling factors is only applicable if balancing classes.");
}
}
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap enums
Frame va = _parms.valid(); // User-given validation set
if (va != null) {
_valid = new Frame(null /* not putting this into KV */, va._names.clone(), va.vecs().clone());
try {
String[] msgs = Model.adaptTestForTrain(_train._names, _parms._weights_column, _parms._offset_column, null, _train.domains(), _valid, _parms.missingColumnsType(), expensive, true);
_vresponse = _valid.vec(_parms._response_column);
if (_vresponse == null && _parms._response_column != null)
error("_validation_frame", "Validation frame must have a response column '" + _parms._response_column + "'.");
if (expensive) {
for (String s : msgs) {
Log.info(s);
info("_valid", s);
}
}
assert !expensive || (_valid == null || Arrays.equals(_train._names, _valid._names));
} catch (IllegalArgumentException iae) {
error("_valid", iae.getMessage());
}
}
}
/**
* init(expensive) is called inside a DTask, not from the http request thread. If we add validation messages to the
* ModelBuilder (aka the Job) we want to update it in the DKV so the client can see them when polling and later on
* after the job completes.
* <p>
* NOTE: this should only be called when no other threads are updating the job, for example from init() or after the
* DTask is stopped and is getting cleaned up.
* @see #init(boolean)
*/
public void updateValidationMessages() {
// Atomically update the validation messages in the Job in the DKV.
// In some cases we haven't stored to the DKV yet:
new TAtomic<Job>() {
@Override public Job atomic(Job old) {
if( old == null ) throw new H2OKeyNotFoundArgumentException(old._key);
ModelBuilder builder = (ModelBuilder)old;
builder._messages = ModelBuilder.this._messages;
return builder;
}
// Run the onCancelled code synchronously, right now
@Override public void onSuccess( Job old ) { if( isCancelledOrCrashed() ) onCancelled(); }
}.invoke(_key);
}
abstract class FilterCols {
final int _specialVecs; // special vecs to skip at the end
public FilterCols(int n) {_specialVecs = n;}
abstract protected boolean filter(Vec v);
void doIt( Frame f, String msg, boolean expensive ) {
boolean any=false;
for( int i = 0; i < f.vecs().length - _specialVecs; i++ ) {
if( filter(f.vecs()[i]) ) {
if( any ) msg += ", "; // Log dropped cols
any = true;
msg += f._names[i];
f.remove(i);
i--; // Re-run at same iteration after dropping a col
}
}
if( any ) {
warn("_train", msg);
if (expensive) Log.info(msg);
}
}
}
/** A list of field validation issues. */
public ValidationMessage[] _messages = new ValidationMessage[0];
private int _error_count = -1; // -1 ==> init not run yet; note, this counts ONLY errors, not WARNs and etc.
public int error_count() { assert _error_count>=0 : "init() not run yet"; return _error_count; }
public void hide (String field_name, String message) { message(ValidationMessage.MessageType.HIDE , field_name, message); }
public void info (String field_name, String message) { message(ValidationMessage.MessageType.INFO , field_name, message); }
public void warn (String field_name, String message) { message(ValidationMessage.MessageType.WARN , field_name, message); }
public void error(String field_name, String message) { message(ValidationMessage.MessageType.ERROR, field_name, message); _error_count++; }
private void clearValidationErrors() {
_messages = new ValidationMessage[0];
_error_count = 0;
}
private void message(ValidationMessage.MessageType message_type, String field_name, String message) {
_messages = Arrays.copyOf(_messages, _messages.length + 1);
_messages[_messages.length - 1] = new ValidationMessage(message_type, field_name, message);
}
/** Get a string representation of only the ERROR ValidationMessages (e.g., to use in an exception throw). */
public String validationErrors() {
StringBuilder sb = new StringBuilder();
for( ValidationMessage vm : _messages )
if( vm.message_type == ValidationMessage.MessageType.ERROR )
sb.append(vm.toString()).append("\n");
return sb.toString();
}
/** The result of an abnormal Model.Parameter check. Contains a
* level, a field name, and a message.
*
* Can be an ERROR, meaning the parameters can't be used as-is,
* a HIDE, which means the specified field should be hidden given
* the values of other fields, or a WARN or INFO for informative
* messages to the user.
*/
public static final class ValidationMessage extends Iced {
public enum MessageType { HIDE, INFO, WARN, ERROR }
final MessageType message_type;
final String field_name;
final String message;
public ValidationMessage(MessageType message_type, String field_name, String message) {
this.message_type = message_type;
this.field_name = field_name;
this.message = message;
switch (message_type) {
case INFO: Log.info(field_name + ": " + message); break;
case WARN: Log.warn(field_name + ": " + message); break;
case ERROR: Log.err(field_name + ": " + message); break;
}
}
@Override public String toString() { return message_type + " on field: " + field_name + ": " + message; }
}
}
|
package hex;
import hex.genmodel.utils.DistributionFamily;
import jsr166y.CountedCompleter;
import water.*;
import water.api.FSIOException;
import water.api.HDFSIOException;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.*;
import water.rapids.ast.prims.advmath.AstKFold;
import water.udf.CFuncRef;
import water.util.*;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Model builder parent class. Contains the common interfaces and fields across all model builders.
*/
abstract public class ModelBuilder<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Iced {
private ModelBuilderListener _modelBuilderListener;
public void setModelBuilderListener(final ModelBuilderListener modelBuilderListener) {
this._modelBuilderListener = modelBuilderListener;
}
public ToEigenVec getToEigenVec() { return null; }
public boolean shouldReorder(Vec v) { return _parms._categorical_encoding.needsResponse() && isSupervised(); }
// initialized to be non-null to provide nicer exceptions when used incorrectly (instead of NPE)
private transient Workspace _workspace = new Workspace(false);
public Job<M> _job; // Job controlling this build
/** Block till completion, and return the built model from the DKV. Note the
* funny assert: the Job does NOT have to be controlling this model build,
* but might, e.g. be controlling a Grid search for which this is just one
* of many results. Calling 'get' means that we are blocking on the Job
* which is controlling ONLY this ModelBuilder, and when the Job completes
* we can return built Model. */
public final M get() { assert _job._result == _result; return _job.get(); }
public final boolean isStopped() { return _job.isStopped(); }
// Key of the model being built; note that this is DIFFERENT from
// _job._result if the Job is being shared by many sub-models
// e.g. cross-validation.
protected Key<M> _result; // Built Model key
public final Key<M> dest() { return _result; }
private Countdown _build_model_countdown;
private Countdown _build_step_countdown;
private void startClock() {
_build_model_countdown = Countdown.fromSeconds(_parms._max_runtime_secs);
_build_model_countdown.start();
}
protected boolean timeout() {
return _build_step_countdown != null ? _build_step_countdown.timedOut() : _build_model_countdown.timedOut();
}
protected boolean stop_requested() {
return _job.stop_requested() || timeout();
}
protected long remainingTimeSecs() {
return (long) Math.ceil(_build_model_countdown.remainingTime() / 1000.0);
}
/** Default model-builder key */
public static <S extends Model> Key<S> defaultKey(String algoName) {
return Key.make(H2O.calcNextUniqueModelId(algoName));
}
/** Default easy constructor: Unique new job and unique new result key */
protected ModelBuilder(P parms) {
this(parms, ModelBuilder.<M>defaultKey(parms.algoName()));
}
/** Unique new job and named result key */
protected ModelBuilder(P parms, Key<M> key) {
_job = new Job<>(_result = key, parms.javaName(), parms.algoName());
_parms = parms;
_input_parms = (P) parms.clone();
}
/** Shared pre-existing Job and unique new result key */
protected ModelBuilder(P parms, Job<M> job) {
_job = job;
_result = defaultKey(parms.algoName());
_parms = parms;
_input_parms = (P) parms.clone();
}
/** List of known ModelBuilders with all default args; endlessly cloned by
* the GUI for new private instances, then the GUI overrides some of the
* defaults with user args. */
private static String[] ALGOBASES = new String[0];
public static String[] algos() { return ALGOBASES; }
private static String[] SCHEMAS = new String[0];
private static ModelBuilder[] BUILDERS = new ModelBuilder[0];
protected boolean _startUpOnceModelBuilder = false;
/** One-time start-up only ModelBuilder, endlessly cloned by the GUI for the
* default settings. */
protected ModelBuilder(P parms, boolean startup_once) { this(parms,startup_once,"hex.schemas."); }
protected ModelBuilder(P parms, boolean startup_once, String externalSchemaDirectory ) {
String base = getName();
if (!startup_once)
throw H2O.fail("Algorithm " + base + " registration issue. It can only be called at startup.");
_startUpOnceModelBuilder = true;
_job = null;
_result = null;
_parms = parms;
init(false); // Default cheap init
if( ArrayUtils.find(ALGOBASES,base) != -1 )
throw H2O.fail("Only called once at startup per ModelBuilder, and "+base+" has already been called");
// FIXME: this is not thread safe!
// michalk: this note ^^ is generally true (considering 3rd parties), however, in h2o-3 code base we have a sequential ModelBuilder initialization
ALGOBASES = Arrays.copyOf(ALGOBASES,ALGOBASES.length+1);
BUILDERS = Arrays.copyOf(BUILDERS ,BUILDERS .length+1);
SCHEMAS = Arrays.copyOf(SCHEMAS ,SCHEMAS .length+1);
ALGOBASES[ALGOBASES.length-1] = base;
BUILDERS [BUILDERS .length-1] = this;
SCHEMAS [SCHEMAS .length-1] = externalSchemaDirectory;
}
/** gbm -> GBM, deeplearning -> DeepLearning */
public static String algoName(String urlName) { return BUILDERS[ArrayUtils.find(ALGOBASES,urlName)]._parms.algoName(); }
/** gbm -> hex.tree.gbm.GBM, deeplearning -> hex.deeplearning.DeepLearning */
public static String javaName(String urlName) { return BUILDERS[ArrayUtils.find(ALGOBASES,urlName)]._parms.javaName(); }
/** gbm -> GBMParameters */
public static String paramName(String urlName) { return algoName(urlName)+"Parameters"; }
/** gbm -> "hex.schemas." ; custAlgo -> "org.myOrg.schemas." */
public static String schemaDirectory(String urlName) { return SCHEMAS[ArrayUtils.find(ALGOBASES,urlName)]; }
@SuppressWarnings("unchecked")
static <B extends ModelBuilder> Optional<B> getRegisteredBuilder(String urlName) {
final String formattedName = urlName.toLowerCase();
int idx = ArrayUtils.find(ALGOBASES, formattedName);
if (idx < 0)
return Optional.empty();
return Optional.of((B) BUILDERS[idx]);
}
/** Factory method to create a ModelBuilder instance for given the algo name.
* Shallow clone of both the default ModelBuilder instance and a Parameter. */
public static <B extends ModelBuilder> B make(String algo, Job job, Key<Model> result) {
return getRegisteredBuilder(algo)
.map(prototype -> {
@SuppressWarnings("unchecked")
B mb = (B) prototype.clone();
mb._job = job;
mb._result = result;
mb._parms = prototype._parms.clone();
mb._input_parms = prototype._parms.clone();
return mb;
})
.orElseThrow(() -> {
StringBuilder sb = new StringBuilder();
sb.append("Unknown algo: '").append(algo).append("'; Extension report: ");
Log.err(ExtensionManager.getInstance().makeExtensionReport(sb));
return new IllegalStateException("Algorithm '" + algo + "' is not registered. " +
"Available algos: [" + StringUtils.join(",", ALGOBASES) + "]");
});
}
/**
* Factory method to create a ModelBuilder instance from a clone of a given {@code parms} instance of Model.Parameters.
*/
public static <B extends ModelBuilder, MP extends Model.Parameters> B make(MP parms) {
Key<Model> mKey = ModelBuilder.defaultKey(parms.algoName());
return make(parms, mKey);
}
public static <B extends ModelBuilder, MP extends Model.Parameters> B make(MP parms, Key<Model> mKey) {
Job<Model> mJob = new Job<>(mKey, parms.javaName(), parms.algoName());
B newMB = ModelBuilder.make(parms.algoName(), mJob, mKey);
newMB._parms = parms.clone();
newMB._input_parms = parms.clone();
return newMB;
}
/** All the parameters required to build the model. */
public P _parms; // Not final, so CV can set-after-clone
/** All the parameters required to build the model conserved in the input form, with AUTO values not evaluated yet. */
public P _input_parms;
/** Training frame: derived from the parameter's training frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to an Categorical, etc. */
public final Frame train() { return _train; }
protected transient Frame _train;
protected transient Frame _origTrain;
public void setTrain(Frame train) {
_train = train;
}
public void setValid(Frame valid) {
_valid = valid;
}
/** Validation frame: derived from the parameter's validation frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to a Categorical, etc. Is null if no validation key is set. */
public final Frame valid() { return _valid; }
protected transient Frame _valid;
// TODO: tighten up the type
// Map the algo name (e.g., "deeplearning") to the builder class (e.g., DeepLearning.class) :
private static final Map<String, Class<? extends ModelBuilder>> _builders = new HashMap<>();
// Map the Model class (e.g., DeepLearningModel.class) to the algo name (e.g., "deeplearning"):
private static final Map<Class<? extends Model>, String> _model_class_to_algo = new HashMap<>();
// Map the simple algo name (e.g., deeplearning) to the full algo name (e.g., "Deep Learning"):
private static final Map<String, String> _algo_to_algo_full_name = new HashMap<>();
// Map the algo name (e.g., "deeplearning") to the Model class (e.g., DeepLearningModel.class):
private static final Map<String, Class<? extends Model>> _algo_to_model_class = new HashMap<>();
/** Train response vector. */
public Vec response(){return _response;}
/** Validation response vector. */
public Vec vresponse(){return _vresponse == null ? _response : _vresponse;}
abstract protected class Driver extends H2O.H2OCountedCompleter<Driver> {
protected Driver(){ super(); }
protected Driver(H2O.H2OCountedCompleter completer){ super(completer); }
// Pull the boilerplate out of the computeImpl(), so the algo writer doesn't need to worry about the following:
// 1) Scope (unless they want to keep data, then they must call Scope.untrack(Key<Vec>[]))
// 2) Train/Valid frame locking and unlocking
// 3) calling tryComplete()
public void compute2() {
try {
Scope.enter();
_parms.read_lock_frames(_job); // Fetch & read-lock input frames
computeImpl();
computeParameters();
saveModelCheckpointIfConfigured();
} finally {
_parms.read_unlock_frames(_job);
if (!_parms._is_cv_model) cleanUp(); //cv calls cleanUp on its own terms
Scope.exit();
}
tryComplete();
}
@Override
public void onCompletion(CountedCompleter caller) {
setFinalState();
if (_modelBuilderListener != null) {
_modelBuilderListener.onModelSuccess(_result.get());
}
}
@Override
public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller) {
setFinalState();
if (_modelBuilderListener != null) {
_modelBuilderListener.onModelFailure(ex, _parms);
}
return true;
}
public abstract void computeImpl();
public final void computeParameters() {
M model = _result.get();
if (model != null) {
//set input parameters
model.setInputParms(_input_parms);
}
}
}
private void setFinalState() {
Key<M> reskey = dest();
if (reskey == null) return;
M res = reskey.get();
if (res != null && res._output != null) {
res._output._job = _job;
res._output.stopClock();
// res.unlock(_job == null ? null : _job._key, false); // last resort: dirty way to force unlock to be able to reacquire lock
res.write_lock(_job);
res.update(_job);
res.unlock(_job);
}
Log.info("Completing model "+ reskey);
}
private void saveModelCheckpointIfConfigured() {
Model model = _result.get();
if (model != null && !StringUtils.isNullOrEmpty(model._parms._export_checkpoints_dir)) {
try {
model.exportBinaryModel(model._parms._export_checkpoints_dir + "/" + model._key.toString(), true);
} catch (FSIOException | HDFSIOException | IOException e) {
throw new H2OIllegalArgumentException("export_checkpoints_dir", "saveModelIfConfigured", e);
}
}
}
public Job<M> trainModelOnH2ONode() {
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
this._input_parms = (P) this._parms.clone();
TrainModelRunnable trainModel = new TrainModelRunnable(this);
H2O.runOnH2ONode(trainModel);
return _job;
}
private static class TrainModelRunnable extends H2O.RemoteRunnable<TrainModelRunnable> {
private transient ModelBuilder _mb;
private Job<Model> _job;
private Key<Model> _key;
private Model.Parameters _parms;
private Model.Parameters _input_parms;
@SuppressWarnings("unchecked")
private TrainModelRunnable(ModelBuilder mb) {
_mb = mb;
_job = (Job<Model>) _mb._job;
_key = _job._result;
_parms = _mb._parms;
_input_parms = _mb._input_parms;
}
@Override
public void setupOnRemote() {
_mb = ModelBuilder.make(_parms.algoName(), _job, _key);
_mb._parms = _parms;
_mb._input_parms = _input_parms;
_mb.init(false); // validate parameters
}
@Override
public void run() {
_mb.trainModel();
}
}
/** Method to launch training of a Model, based on its parameters. */
final public Job<M> trainModel() {
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
startClock();
if( !nFoldCV() )
return _job.start(trainModelImpl(), _parms.progressUnits(), _parms._max_runtime_secs);
// cross-validation needs to be forked off to allow continuous (non-blocking) progress bar
return _job.start(new H2O.H2OCountedCompleter() {
@Override
public void compute2() {
computeCrossValidation();
tryComplete();
}
@Override
public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller) {
Log.warn("Model training job "+_job._description+" completed with exception: "+ex);
try {
Keyed.remove(_job._result); //ensure there's no incomplete model left for manipulation after crash or cancellation
} catch (Exception logged) {
Log.warn("Exception thrown when removing result from job "+ _job._description, logged);
}
return true;
}
},
(nFoldWork()+1/*main model*/) * _parms.progressUnits(), _parms._max_runtime_secs);
}
/**
* Train a model as part of a larger Job;
*
* @param fr: Input frame override, ignored if null.
* In some cases, algos do not work directly with the original frame in the K/V store.
* Instead they run on a private anonymous copy (eg: reblanced dataset).
* Use this argument if you want nested job to work on the actual working copy rather than the original Frame in the K/V.
* Example: Outer job rebalances dataset and then calls nested job. To avoid needless second reblance, pass in the (already rebalanced) working copy.
* */
final public M trainModelNested(Frame fr) {
if(fr != null) // Use the working copy (e.g. rebalanced) instead of the original K/V store version
setTrain(fr);
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
startClock();
if( !nFoldCV() ) trainModelImpl().compute2();
else computeCrossValidation();
return _result.get();
}
/**
* Train a model as part of a larger job. The model will be built on a non-client node.
*
* @param job containing job
* @param result key of the resulting model
* @param params model parameters
* @param fr input frame, ignored if null
* @param <MP> Model.Parameters
* @return instance of a Model
*/
public static <MP extends Model.Parameters> Model trainModelNested(Job<?> job, Key<Model> result, MP params, Frame fr) {
H2O.runOnH2ONode(new TrainModelNestedRunnable(job, result, params, fr));
return result.get();
}
private static class TrainModelNestedRunnable extends H2O.RemoteRunnable<TrainModelNestedRunnable> {
private Job<?> _job;
private Key<Model> _key;
private Model.Parameters _parms;
private Frame _fr;
private TrainModelNestedRunnable(Job<?> job, Key<Model> key, Model.Parameters parms, Frame fr) {
_job = job;
_key = key;
_parms = parms;
_fr = fr;
}
@Override
public void run() {
ModelBuilder mb = ModelBuilder.make(_parms.algoName(), _job, _key);
mb._parms = _parms;
mb._input_parms = _parms.clone();
mb.trainModelNested(_fr);
}
}
/** Model-specific implementation of model training
* @return A F/J Job, which, when executed, does the build. F/J is NOT started. */
abstract protected Driver trainModelImpl();
@Deprecated protected int nModelsInParallel() { return 0; }
/**
* How many should be trained in parallel during N-fold cross-validation?
* Train all CV models in parallel when parallelism is enabled, otherwise train one at a time
* Each model can override this logic, based on parameters, dataset size, etc.
* @return How many models to train in parallel during cross-validation
*/
protected int nModelsInParallel(int folds) {
int n = nModelsInParallel();
if (n > 0) return n;
return nModelsInParallel(folds, 1);
}
protected int nModelsInParallel(int folds, int defaultParallelization) {
if (!_parms._parallelize_cross_validation) return 1; //user demands serial building (or we need to honor the time constraints for all CV models equally)
int parallelization = defaultParallelization;
if (_train.byteSize() < 1e6) parallelization = folds; //for small data, parallelize over CV models
// TODO: apply better heuristic, estimating parallelization based on H2O.getCloudSize() and H2O.ARGS.nthreads
return Math.min(parallelization, H2O.getCloudSize()*H2O.ARGS.nthreads);
}
private double maxRuntimeSecsPerModel(int cvModelsCount, int parallelization) {
return cvModelsCount > 0
? _parms._max_runtime_secs / Math.ceil((double)cvModelsCount / parallelization + 1)
// ? _parms._max_runtime_secs * cvModelsCount / (cvModelsCount + 1) / Math.ceil((double)cvModelsCount / parallelization)
: _parms._max_runtime_secs;
}
// Work for each requested fold
protected int nFoldWork() {
if( _parms._fold_column == null ) return _parms._nfolds;
Vec f = _parms._train.get().vec(_parms._fold_column);
Vec fc = VecUtils.toCategoricalVec(f);
int N = fc.domain().length;
fc.remove();
return N;
}
/**
* Default naive (serial) implementation of N-fold cross-validation
* (builds N+1 models, all have train+validation metrics, the main model has N-fold cross-validated validation metrics)
*/
public void computeCrossValidation() {
assert _job.isRunning(); // main Job is still running
_job.setReadyForView(false); //wait until the main job starts to let the user inspect the main job
final int N = nFoldWork();
init(false);
ModelBuilder<M, P, O>[] cvModelBuilders = null;
try {
Scope.enter();
// Step 1: Assign each row to a fold
final Vec foldAssignment = cv_AssignFold(N);
// Step 2: Make 2*N binary weight vectors
final Vec[] weights = cv_makeWeights(N, foldAssignment);
// Step 3: Build N train & validation frames; build N ModelBuilders; error check them all
cvModelBuilders = cv_makeFramesAndBuilders(N, weights);
// Step 4: Run all the CV models
cv_buildModels(N, cvModelBuilders);
// Step 5: Score the CV models
ModelMetrics.MetricBuilder mbs[] = cv_scoreCVModels(N, weights, cvModelBuilders);
// Step 6: Build the main model
long time_allocated_to_main_model = (long)(maxRuntimeSecsPerModel(N, nModelsInParallel(N)) * 1e3);
buildMainModel(time_allocated_to_main_model);
// Step 7: Combine cross-validation scores; compute main model x-val
// scores; compute gains/lifts
cv_mainModelScores(N, mbs, cvModelBuilders);
_job.setReadyForView(true);
DKV.put(_job);
} catch (Exception e) {
if (cvModelBuilders != null) {
Futures fs = new Futures();
// removing keys added during cv_makeFramesAndBuilders and cv_makeFramesAndBuilders
// need a better solution: part of this is done in cv_makeFramesAndBuilders but partially and only for its method scope
// also removing the completed CV models as the main model is incomplete anyway
for (ModelBuilder mb : cvModelBuilders) {
DKV.remove(mb._parms._train, fs);
DKV.remove(mb._parms._valid, fs);
DKV.remove(Key.make(mb.getPredictionKey()), fs);
Keyed.remove(mb._result, fs, true);
}
fs.blockForPending();
}
throw e;
} finally {
if (cvModelBuilders != null) {
for (ModelBuilder mb : cvModelBuilders) {
mb.cleanUp();
}
}
cleanUp();
Scope.exit();
}
}
// Step 1: Assign each row to a fold
// TODO: Implement better splitting algo (with Strata if response is
public Vec cv_AssignFold(int N) {
assert(N>=2);
Vec fold = train().vec(_parms._fold_column);
if( fold != null ) {
if( !fold.isInt() ||
(!(fold.min() == 0 && fold.max() == N-1) &&
!(fold.min() == 1 && fold.max() == N ) )) // Allow 0 to N-1, or 1 to N
throw new H2OIllegalArgumentException("Fold column must be either categorical or contiguous integers from 0..N-1 or 1..N");
return fold;
}
final long seed = _parms.getOrMakeRealSeed();
Log.info("Creating " + N + " cross-validation splits with random number seed: " + seed);
switch( _parms._fold_assignment ) {
case AUTO:
case Random: return AstKFold. kfoldColumn(train().anyVec().makeZero(),N,seed);
case Modulo: return AstKFold. moduloKfoldColumn(train().anyVec().makeZero(),N );
case Stratified: return AstKFold.stratifiedKFoldColumn(response(),N,seed);
default: throw H2O.unimpl();
}
}
// Step 2: Make 2*N binary weight vectors
public Vec[] cv_makeWeights( final int N, Vec foldAssignment ) {
String origWeightsName = _parms._weights_column;
Vec origWeight = origWeightsName != null ? train().vec(origWeightsName) : train().anyVec().makeCon(1.0);
Frame folds_and_weights = new Frame(foldAssignment, origWeight);
Vec[] weights = new MRTask() {
@Override public void map(Chunk chks[], NewChunk nchks[]) {
Chunk fold = chks[0], orig = chks[1];
for( int row=0; row< orig._len; row++ ) {
int foldIdx = (int)fold.at8(row) % N;
double w = orig.atd(row);
for( int f = 0; f < N; f++ ) {
boolean holdout = foldIdx == f;
nchks[2 * f].addNum(holdout ? 0 : w);
nchks[2*f+1].addNum(holdout ? w : 0);
}
}
}
}.doAll(2*N,Vec.T_NUM,folds_and_weights).outputFrame().vecs();
if (_parms._keep_cross_validation_fold_assignment)
DKV.put(new Frame(Key.<Frame>make("cv_fold_assignment_" + _result.toString()), new String[]{"fold_assignment"}, new Vec[]{foldAssignment}));
if( _parms._fold_column == null && !_parms._keep_cross_validation_fold_assignment) foldAssignment.remove();
if( origWeightsName == null ) origWeight.remove(); // Cleanup temp
for( Vec weight : weights )
if( weight.isConst() )
throw new H2OIllegalArgumentException("Not enough data to create " + N + " random cross-validation splits. Either reduce nfolds, specify a larger dataset (or specify another random number seed, if applicable).");
return weights;
}
// Step 3: Build N train & validation frames; build N ModelBuilders; error check them all
private ModelBuilder<M, P, O>[] cv_makeFramesAndBuilders( int N, Vec[] weights ) {
final long old_cs = _parms.checksum();
final String origDest = _result.toString();
final String weightName = "__internal_cv_weights__";
if (train().find(weightName) != -1) throw new H2OIllegalArgumentException("Frame cannot contain a Vec called '" + weightName + "'.");
Frame cv_fr = new Frame(train().names(),train().vecs());
if( _parms._weights_column!=null ) cv_fr.remove( _parms._weights_column ); // The CV frames will have their own private weight column
ModelBuilder<M, P, O>[] cvModelBuilders = new ModelBuilder[N];
List<Frame> cvFramesForFailedModels = new ArrayList<>();
double cv_max_runtime_secs = maxRuntimeSecsPerModel(N, nModelsInParallel(N));
for( int i=0; i<N; i++ ) {
String identifier = origDest + "_cv_" + (i+1);
// Training/Validation share the same data, but will have exclusive weights
Frame cvTrain = new Frame(Key.make(identifier + "_train"), cv_fr.names(), cv_fr.vecs());
cvTrain.write_lock(_job);
cvTrain.add(weightName, weights[2*i]);
cvTrain.update(_job);
Frame cvValid = new Frame(Key.make(identifier + "_valid"), cv_fr.names(), cv_fr.vecs());
cvValid.write_lock(_job);
cvValid.add(weightName, weights[2*i+1]);
cvValid.update(_job);
// Shallow clone - not everything is a private copy!!!
ModelBuilder<M, P, O> cv_mb = (ModelBuilder)this.clone();
cv_mb.setTrain(cvTrain);
cv_mb._result = Key.make(identifier); // Each submodel gets its own key
cv_mb._parms = (P) _parms.clone();
// Fix up some parameters of the clone
cv_mb._parms._is_cv_model = true;
cv_mb._parms._cv_fold = i;
cv_mb._parms._weights_column = weightName;// All submodels have a weight column, which the main model does not
cv_mb._parms.setTrain(cvTrain._key); // All submodels have a weight column, which the main model does not
cv_mb._parms._valid = cvValid._key;
cv_mb._parms._fold_assignment = Model.Parameters.FoldAssignmentScheme.AUTO;
cv_mb._parms._nfolds = 0; // Each submodel is not itself folded
cv_mb._parms._max_runtime_secs = cv_max_runtime_secs;
cv_mb.clearValidationErrors(); // each submodel gets its own validation messages and error_count()
cv_mb._input_parms = (P) _parms.clone();
// Error-check all the cross-validation Builders before launching any
cv_mb.init(false);
if( cv_mb.error_count() > 0 ) { // Gather all submodel error messages
Log.info("Marking frame for failed cv model for removal: " + cvTrain._key);
cvFramesForFailedModels.add(cvTrain);
Log.info("Marking frame for failed cv model for removal: " + cvValid._key);
cvFramesForFailedModels.add(cvValid);
for (ValidationMessage vm : cv_mb._messages)
message(vm._log_level, vm._field_name, vm._message);
}
cvModelBuilders[i] = cv_mb;
}
if( error_count() > 0 ) { // Found an error in one or more submodels
Futures fs = new Futures();
for (Frame cvf : cvFramesForFailedModels) {
cvf.vec(weightName).remove(fs); // delete the Vec's chunks
DKV.remove(cvf._key, fs); // delete the Frame from the DKV, leaving its vecs
Log.info("Removing frame for failed cv model: " + cvf._key);
}
fs.blockForPending();
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
}
// check that this Job's original _params haven't changed
assert old_cs == _parms.checksum();
return cvModelBuilders;
}
// Step 4: Run all the CV models and launch the main model
public void cv_buildModels(int N, ModelBuilder<M, P, O>[] cvModelBuilders ) {
bulkBuildModels("cross-validation", _job, cvModelBuilders, nModelsInParallel(N), 0 /*no job updates*/);
cv_computeAndSetOptimalParameters(cvModelBuilders);
}
/**
* Runs given model builders in bulk.
*
* @param modelType text description of group of models being built (for logging purposes)
* @param job parent job (processing will be stopped if stop of a parent job was requested)
* @param modelBuilders list of model builders to run in bulk
* @param parallelization level of parallelization (how many models can be built at the same time)
* @param updateInc update increment (0 = disable updates)
*/
public static void bulkBuildModels(String modelType, Job job, ModelBuilder<?, ?, ?>[] modelBuilders,
int parallelization, int updateInc) {
final int N = modelBuilders.length;
H2O.H2OCountedCompleter submodel_tasks[] = new H2O.H2OCountedCompleter[N];
int nRunning=0;
RuntimeException rt = null;
for( int i=0; i<N; ++i ) {
if (job.stop_requested() ) {
Log.info("Skipping build of last "+(N-i)+" out of "+N+" "+modelType+" CV models");
stopAll(submodel_tasks);
throw new Job.JobCancelledException();
}
Log.info("Building " + modelType + " model " + (i + 1) + " / " + N + ".");
modelBuilders[i].startClock();
submodel_tasks[i] = H2O.submitTask(modelBuilders[i].trainModelImpl());
if(++nRunning == parallelization) { //piece-wise advance in training the models
while (nRunning > 0) try {
submodel_tasks[i + 1 - nRunning--].join();
if (updateInc > 0) job.update(updateInc); // One job finished
} catch (RuntimeException t) {
if (rt == null) rt = t;
}
if(rt != null) throw rt;
}
}
for( int i=0; i<N; ++i ) //all sub-models must be completed before the main model can be built
try {
final H2O.H2OCountedCompleter task = submodel_tasks[i];
assert task != null;
task.join();
} catch(RuntimeException t){
if (rt == null) rt = t;
}
if(rt != null) throw rt;
}
private static void stopAll(H2O.H2OCountedCompleter[] tasks) {
for (H2O.H2OCountedCompleter task : tasks) {
if (task != null) {
task.cancel(true);
}
}
}
// Step 5: Score the CV models
public ModelMetrics.MetricBuilder[] cv_scoreCVModels(int N, Vec[] weights, ModelBuilder<M, P, O>[] cvModelBuilders) {
if (_job.stop_requested()) {
Log.info("Skipping scoring of CV models");
throw new Job.JobCancelledException();
}
assert weights.length == 2*N;
assert cvModelBuilders.length == N;
Log.info("Scoring the "+N+" CV models");
ModelMetrics.MetricBuilder[] mbs = new ModelMetrics.MetricBuilder[N];
Futures fs = new Futures();
for (int i=0; i<N; ++i) {
if (_job.stop_requested()) {
Log.info("Skipping scoring for last "+(N-i)+" out of "+N+" CV models");
throw new Job.JobCancelledException();
}
Frame cvValid = cvModelBuilders[i].valid();
Frame adaptFr = new Frame(cvValid);
M cvModel = cvModelBuilders[i].dest().get();
cvModel.adaptTestForTrain(adaptFr, true, !isSupervised());
mbs[i] = cvModel.scoreMetrics(adaptFr);
if (nclasses() == 2 /* need holdout predictions for gains/lift table */
|| _parms._keep_cross_validation_predictions
|| (cvModel.isDistributionHuber() /*need to compute quantiles on abs error of holdout predictions*/)) {
String predName = cvModelBuilders[i].getPredictionKey();
cvModel.predictScoreImpl(cvValid, adaptFr, predName, _job, true, CFuncRef.NOP);
DKV.put(cvModel);
}
// free resources as early as possible
if (adaptFr != null) {
Frame.deleteTempFrameAndItsNonSharedVecs(adaptFr, cvValid);
DKV.remove(adaptFr._key,fs);
}
DKV.remove(cvModelBuilders[i]._parms._train,fs);
DKV.remove(cvModelBuilders[i]._parms._valid,fs);
weights[2*i ].remove(fs);
weights[2*i+1].remove(fs);
}
fs.blockForPending();
return mbs;
}
// Step 6: build the main model
private void buildMainModel(long max_runtime_millis) {
if (_job.stop_requested()) {
Log.info("Skipping main model");
throw new Job.JobCancelledException();
}
assert _job.isRunning();
Log.info("Building main model.");
Log.info("Remaining time for main model (ms): " + max_runtime_millis);
_build_step_countdown = new Countdown(max_runtime_millis, true);
H2O.H2OCountedCompleter mm = H2O.submitTask(trainModelImpl());
mm.join(); // wait for completion
_build_step_countdown = null;
}
// Step 7: Combine cross-validation scores; compute main model x-val scores; compute gains/lifts
public void cv_mainModelScores(int N, ModelMetrics.MetricBuilder mbs[], ModelBuilder<M, P, O> cvModelBuilders[]) {
//never skipping CV main scores: we managed to reach last step and this should not be an expensive one, so let's offer this model
M mainModel = _result.get();
// Compute and put the cross-validation metrics into the main model
Log.info("Computing "+N+"-fold cross-validation metrics.");
Key<M>[] cvModKeys = new Key[N];
mainModel._output._cross_validation_models = _parms._keep_cross_validation_models ? cvModKeys : null;
Key<Frame>[] predKeys = new Key[N];
mainModel._output._cross_validation_predictions = _parms._keep_cross_validation_predictions ? predKeys : null;
for (int i = 0; i < N; ++i) {
if (i > 0) mbs[0].reduce(mbs[i]);
cvModKeys[i] = cvModelBuilders[i]._result;
predKeys[i] = Key.make(cvModelBuilders[i].getPredictionKey());
}
Frame holdoutPreds = null;
if (_parms._keep_cross_validation_predictions || (nclasses()==2 /*GainsLift needs this*/ || mainModel.isDistributionHuber())) {
Key<Frame> cvhp = Key.make("cv_holdout_prediction_" + mainModel._key.toString());
if (_parms._keep_cross_validation_predictions) //only show the user if they asked for it
mainModel._output._cross_validation_holdout_predictions_frame_id = cvhp;
holdoutPreds = combineHoldoutPredictions(predKeys, cvhp);
}
if (_parms._keep_cross_validation_fold_assignment) {
mainModel._output._cross_validation_fold_assignment_frame_id = Key.make("cv_fold_assignment_" + _result.toString());
Frame xvalidation_fold_assignment_frame = mainModel._output._cross_validation_fold_assignment_frame_id.get();
if (xvalidation_fold_assignment_frame != null)
Scope.untrack(xvalidation_fold_assignment_frame.keysList());
}
// Keep or toss predictions
if (_parms._keep_cross_validation_predictions) {
for (Key<Frame> k : predKeys) {
Frame fr = DKV.getGet(k);
if (fr != null) Scope.untrack(fr.keysList());
}
} else {
int count = Model.deleteAll(predKeys);
Log.info(count+" CV predictions were removed");
}
mainModel._output._cross_validation_metrics = mbs[0].makeModelMetrics(mainModel, _parms.train(), null, holdoutPreds);
if (holdoutPreds != null) {
if (_parms._keep_cross_validation_predictions) Scope.untrack(holdoutPreds.keysList());
else holdoutPreds.remove();
}
mainModel._output._cross_validation_metrics._description = N + "-fold cross-validation on training data (Metrics computed for combined holdout predictions)";
Log.info(mainModel._output._cross_validation_metrics.toString());
mainModel._output._cross_validation_metrics_summary = makeCrossValidationSummaryTable(cvModKeys);
if (!_parms._keep_cross_validation_models) {
int count = Model.deleteAll(cvModKeys);
Log.info(count+" CV models were removed");
}
mainModel._output._total_run_time = _build_model_countdown.elapsedTime();
// Now, the main model is complete (has cv metrics)
DKV.put(mainModel);
}
private String getPredictionKey() {
return "prediction_"+_result.toString();
}
/** Override for model-specific checks / modifications to _parms for the main model during N-fold cross-validation.
* Also allow the cv models to be modified after all of them have been built.
* For example, the model might need to be told to not do early stopping. CV models might have their lambda value modified, etc.
*/
public void cv_computeAndSetOptimalParameters(ModelBuilder<M, P, O>[] cvModelBuilders) { }
/** @return Whether n-fold cross-validation is done */
public boolean nFoldCV() {
return _parms._fold_column != null || _parms._nfolds != 0;
}
/** List containing the categories of models that this builder can
* build. Each ModelBuilder must have one of these. */
abstract public ModelCategory[] can_build();
/** Visibility for this algo: is it always visible, is it beta (always
* visible but with a note in the UI) or is it experimental (hidden by
* default, visible in the UI if the user gives an "experimental" flag at
* startup); test-only builders are "experimental" */
public enum BuilderVisibility {
Experimental, Beta, Stable;
public static BuilderVisibility valueOfIgnoreCase(final String value) throws IllegalArgumentException {
final BuilderVisibility[] values = values();
for (int i = 0; i < values.length; i++) {
if (values[i].name().equalsIgnoreCase(value)) return values[i];
}
throw new IllegalArgumentException(String.format("Algorithm availability level of '%s' is not known. Available levels: %s",
value, Arrays.toString(values)));
}
}
public BuilderVisibility builderVisibility() { return BuilderVisibility.Stable; }
/** Clear whatever was done by init() so it can be run again. */
public void clearInitState() {
clearValidationErrors();
}
protected boolean logMe() { return true; }
abstract public boolean isSupervised();
public boolean isResponseOptional() {
return false;
}
protected transient Vec _response; // Handy response column
protected transient Vec _vresponse; // Handy response column
protected transient Vec _offset; // Handy offset column
protected transient Vec _weights; // observation weight column
protected transient Vec _fold; // fold id column
protected transient String[] _origNames; // only set if ModelBuilder.encodeFrameCategoricals() changes the training frame
protected transient String[][] _origDomains; // only set if ModelBuilder.encodeFrameCategoricals() changes the training frame
protected transient double[] _orig_projection_array; // only set if ModelBuilder.encodeFrameCategoricals() changes the training frame
public boolean hasOffsetCol(){ return _parms._offset_column != null;} // don't look at transient Vec
public boolean hasWeightCol(){ return _parms._weights_column != null;} // don't look at transient Vec
public boolean hasFoldCol() { return _parms._fold_column != null;} // don't look at transient Vec
public int numSpecialCols() { return (hasOffsetCol() ? 1 : 0) + (hasWeightCol() ? 1 : 0) + (hasFoldCol() ? 1 : 0); }
public String[] specialColNames() {
String[] n = new String[numSpecialCols()];
int i=0;
if (hasOffsetCol()) n[i++]=_parms._offset_column;
if (hasWeightCol()) n[i++]=_parms._weights_column;
if (hasFoldCol()) n[i++]=_parms._fold_column;
return n;
}
// no hasResponse, call isSupervised instead (response is mandatory if isSupervised is true)
public boolean havePojo() { return false; }
public boolean haveMojo() { return false; }
protected int _nclass; // Number of classes; 1 for regression; 2+ for classification
public int nclasses(){return _nclass;}
public final boolean isClassifier() { return nclasses() > 1; }
protected boolean validateStoppingMetric() {
return true;
}
/**
* Find and set response/weights/offset/fold and put them all in the end,
* @return number of non-feature vecs
*/
public int separateFeatureVecs() {
int res = 0;
if(_parms._weights_column != null) {
Vec w = _train.remove(_parms._weights_column);
if(w == null)
error("_weights_column","Weights column '" + _parms._weights_column + "' not found in the training frame");
else {
if(!w.isNumeric())
error("_weights_column","Invalid weights column '" + _parms._weights_column + "', weights must be numeric");
_weights = w;
if(w.naCnt() > 0)
error("_weights_columns","Weights cannot have missing values.");
if(w.min() < 0)
error("_weights_columns","Weights must be >= 0");
if(w.max() == 0)
error("_weights_columns","Max. weight must be > 0");
_train.add(_parms._weights_column, w);
++res;
}
} else {
_weights = null;
assert(!hasWeightCol());
}
if(_parms._offset_column != null) {
Vec o = _train.remove(_parms._offset_column);
if(o == null)
error("_offset_column","Offset column '" + _parms._offset_column + "' not found in the training frame");
else {
if(!o.isNumeric())
error("_offset_column","Invalid offset column '" + _parms._offset_column + "', offset must be numeric");
_offset = o;
if(o.naCnt() > 0)
error("_offset_column","Offset cannot have missing values.");
if(_weights == _offset)
error("_offset_column", "Offset must be different from weights");
_train.add(_parms._offset_column, o);
++res;
}
} else {
_offset = null;
assert(!hasOffsetCol());
}
if(_parms._fold_column != null) {
Vec f = _train.remove(_parms._fold_column);
if(f == null)
error("_fold_column","Fold column '" + _parms._fold_column + "' not found in the training frame");
else {
if(!f.isInt() && !f.isCategorical())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be integer or categorical");
if(f.min() < 0)
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be non-negative");
if(f.isConst())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold cannot be constant");
_fold = f;
if(f.naCnt() > 0)
error("_fold_column","Fold cannot have missing values.");
if(_fold == _weights)
error("_fold_column", "Fold must be different from weights");
if(_fold == _offset)
error("_fold_column", "Fold must be different from offset");
_train.add(_parms._fold_column, f);
++res;
}
} else {
_fold = null;
assert(!hasFoldCol());
}
if(isSupervised() && _parms._response_column != null) {
_response = _train.remove(_parms._response_column);
if (_response == null) {
if (isSupervised())
error("_response_column", "Response column '" + _parms._response_column + "' not found in the training frame");
} else {
if(_response == _offset)
error("_response_column", "Response column must be different from offset_column");
if(_response == _weights)
error("_response_column", "Response column must be different from weights_column");
if(_response == _fold)
error("_response_column", "Response column must be different from fold_column");
_train.add(_parms._response_column, _response);
++res;
}
} else {
_response = null;
}
return res;
}
protected boolean ignoreStringColumns() {
return true;
}
protected boolean ignoreConstColumns() {
return _parms._ignore_const_cols;
}
protected boolean ignoreUuidColumns() {
return true;
}
/**
* Ignore constant columns, columns with all NAs and strings.
* @param npredictors
* @param expensive
*/
protected void ignoreBadColumns(int npredictors, boolean expensive){
// Drop all-constant and all-bad columns.
if(_parms._ignore_const_cols)
new FilterCols(npredictors) {
@Override protected boolean filter(Vec v) {
boolean isBad = v.isBad();
boolean skipConst = ignoreConstColumns() && v.isConst(canLearnFromNAs()); // NAs can have information
boolean skipString = ignoreStringColumns() && v.isString();
boolean skipUuid = ignoreUuidColumns() && v.isUUID();
boolean skip = isBad || skipConst || skipString || skipUuid;
return skip;
}
}.doIt(_train,"Dropping bad and constant columns: ",expensive);
}
/**
* Indicates that the algorithm is able to natively learn from NA values, there is no need
* to eg. impute missing values or skip rows that have missing values.
* @return whether model builder natively supports NAs
*/
protected boolean canLearnFromNAs() {
return false;
}
/**
* Checks response variable attributes and adds errors if response variable is unusable.
*/
protected void checkResponseVariable() {
if (_response != null && (!_response.isNumeric() && !_response.isCategorical() && !_response.isTime())) {
error("_response_column", "Use numerical, categorical or time variable. Currently used " + _response.get_type_str());
}
}
/**
* Ignore invalid columns (columns that have a very high max value, which can cause issues in DHistogram)
* @param npredictors
* @param expensive
*/
protected void ignoreInvalidColumns(int npredictors, boolean expensive){}
/**
* Makes sure the final model will fit in memory.
*
* Note: This method should not be overridden (override checkMemoryFootPrint_impl instead). It is
* not declared 'final' to not to break 3rd party implementations. It might be declared final in the future
* if necessary.
*/
protected void checkMemoryFootPrint() {
if (Boolean.getBoolean(H2O.OptArgs.SYSTEM_PROP_PREFIX + "debug.noMemoryCheck")) return; // skip check if disabled
checkMemoryFootPrint_impl();
}
/**
* Override this method to call error() if the model is expected to not fit in memory, and say why
*/
protected void checkMemoryFootPrint_impl() {}
transient double [] _distribution;
transient protected double [] _priorClassDist;
protected boolean computePriorClassDistribution(){
return isClassifier();
}
/** A list of field validation issues. */
public ValidationMessage[] _messages = new ValidationMessage[0];
private int _error_count = -1; // -1 ==> init not run yet, for those Jobs that have an init, like ModelBuilder. Note, this counts ONLY errors, not WARNs and etc.
public int error_count() { assert _error_count >= 0 : "init() not run yet"; return _error_count; }
public void hide (String field_name, String message) { message(Log.TRACE, field_name, message); }
public void info (String field_name, String message) { message(Log.INFO , field_name, message); }
public void warn (String field_name, String message) { message(Log.WARN , field_name, message); }
public void error(String field_name, String message) { message(Log.ERRR , field_name, message); _error_count++; }
public void clearValidationErrors() {
_messages = new ValidationMessage[0];
_error_count = 0;
}
public void message(byte log_level, String field_name, String message) {
_messages = Arrays.copyOf(_messages, _messages.length + 1);
_messages[_messages.length - 1] = new ValidationMessage(log_level, field_name, message);
if (log_level == Log.ERRR) _error_count++;
}
/** Get a string representation of only the ERROR ValidationMessages (e.g., to use in an exception throw). */
public String validationErrors() {
StringBuilder sb = new StringBuilder();
for( ValidationMessage vm : _messages )
if( vm._log_level == Log.ERRR )
sb.append(vm.toString()).append("\n");
return sb.toString();
}
/** Can be an ERROR, meaning the parameters can't be used as-is,
* a TRACE, which means the specified field should be hidden given
* the values of other fields, or a WARN or INFO for informative
* messages to the user. */
public static final class ValidationMessage extends Iced {
final byte _log_level; // See util/Log.java for levels
final String _field_name;
final String _message;
public ValidationMessage(byte log_level, String field_name, String message) {
_log_level = log_level;
_field_name = field_name;
_message = message;
Log.log(log_level,field_name + ": " + message);
}
public int log_level() { return _log_level; }
@Override public String toString() { return Log.LVLS[_log_level] + " on field: " + _field_name + ": " + _message; }
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made by
* the front-end whenever the GUI is clicked, and needs to be fast whenever
* {@code expensive} is false; it will be called once again at the start of
* model building {@see #trainModel()} with expensive set to true.
*<p>
* The incoming training frame (and validation frame) will have ignored
* columns dropped out, plus whatever work the parent init did.
*<p>
* NOTE: The front end initially calls this through the parameters validation
* endpoint with no training_frame, so each subclass's {@code init()} method
* has to work correctly with the training_frame missing.
*<p>
*/
public void init(boolean expensive) {
// Log parameters
if( expensive && logMe() ) {
Log.info("Building H2O " + this.getClass().getSimpleName() + " model with these parameters:");
Log.info(new String(_parms.writeJSON(new AutoBuffer()).buf()));
}
// NOTE: allow re-init:
clearInitState();
initWorkspace(expensive);
assert _parms != null; // Parms must already be set in
if( _parms._train == null ) {
if (expensive)
error("_train", "Missing training frame");
return;
}
Frame tr = _train != null?_train:_parms.train();
if( tr == null ) { error("_train", "Missing training frame: "+_parms._train); return; }
setTrain(new Frame(null /* not putting this into KV */, tr._names.clone(), tr.vecs().clone()));
if (expensive) {
_parms.getOrMakeRealSeed();
}
if (_parms._categorical_encoding.needsResponse() && !isSupervised()) {
error("_categorical_encoding", "Categorical encoding scheme cannot be "
+ _parms._categorical_encoding.toString() + " - no response column available.");
}
if (_parms._nfolds < 0 || _parms._nfolds == 1) {
error("_nfolds", "nfolds must be either 0 or >1.");
}
if (_parms._nfolds > 1 && _parms._nfolds > train().numRows()) {
error("_nfolds", "nfolds cannot be larger than the number of rows (" + train().numRows() + ").");
}
if (_parms._fold_column != null) {
hide("_fold_assignment", "Fold assignment is ignored when a fold column is specified.");
if (_parms._nfolds > 1) {
error("_nfolds", "nfolds cannot be specified at the same time as a fold column.");
} else {
hide("_nfolds", "nfolds is ignored when a fold column is specified.");
}
if (_parms._fold_assignment != Model.Parameters.FoldAssignmentScheme.AUTO && _parms._fold_assignment != null && _parms != null) {
error("_fold_assignment", "Fold assignment is not allowed in conjunction with a fold column.");
}
}
if (_parms._nfolds > 1) {
hide("_fold_column", "Fold column is ignored when nfolds > 1.");
}
// hide cross-validation parameters unless cross-val is enabled
if (!nFoldCV()) {
hide("_keep_cross_validation_models", "Only for cross-validation.");
hide("_keep_cross_validation_predictions", "Only for cross-validation.");
hide("_keep_cross_validation_fold_assignment", "Only for cross-validation.");
hide("_fold_assignment", "Only for cross-validation.");
if (_parms._fold_assignment != Model.Parameters.FoldAssignmentScheme.AUTO && _parms._fold_assignment != null) {
error("_fold_assignment", "Fold assignment is only allowed for cross-validation.");
}
}
if (_parms._distribution == DistributionFamily.modified_huber) {
error("_distribution", "Modified Huber distribution is not supported yet.");
}
if (_parms._distribution != DistributionFamily.tweedie) {
hide("_tweedie_power", "Only for Tweedie Distribution.");
}
if (_parms._tweedie_power <= 1 || _parms._tweedie_power >= 2) {
error("_tweedie_power", "Tweedie power must be between 1 and 2 (exclusive).");
}
// Drop explicitly dropped columns
if( _parms._ignored_columns != null ) {
_train.remove(_parms._ignored_columns);
if( expensive ) Log.info("Dropping ignored columns: "+Arrays.toString(_parms._ignored_columns));
}
if(_parms._checkpoint != null){
if(DKV.get(_parms._checkpoint) == null){
error("_checkpoint", "Checkpoint has to point to existing model!");
}
// Do not ignore bad columns, as only portion of the training data might be supplied (e.g. continue from checkpoint)
final Model checkpointedModel = _parms._checkpoint.get();
final String[] warnings = checkpointedModel.adaptTestForTrain(_train, expensive, false);
for (final String warning : warnings){
warn("_checkpoint", warning);
}
separateFeatureVecs(); // set MB's fields (like response)
} else {
// Drop all non-numeric columns (e.g., String and UUID). No current algo
// can use them, and otherwise all algos will then be forced to remove
// them. Text algos (grep, word2vec) take raw text columns - which are
// numeric (arrays of bytes).
ignoreBadColumns(separateFeatureVecs(), expensive);
ignoreInvalidColumns(separateFeatureVecs(), expensive);
checkResponseVariable();
}
// Rebalance train and valid datasets (after invalid/bad columns are dropped)
if (expensive && error_count() == 0 && _parms._auto_rebalance) {
setTrain(rebalance(_train, false, _result + ".temporary.train"));
separateFeatureVecs(); // need to reset MB's fields (like response) after rebalancing
_valid = rebalance(_valid, false, _result + ".temporary.valid");
}
// Check that at least some columns are not-constant and not-all-NAs
if (_train.numCols() == 0)
error("_train", "There are no usable columns to generate model");
if(isSupervised()) {
if(_response != null) {
if (_parms._distribution != DistributionFamily.tweedie) {
hide("_tweedie_power", "Tweedie power is only used for Tweedie distribution.");
}
if (_parms._distribution != DistributionFamily.quantile) {
hide("_quantile_alpha", "Quantile (alpha) is only used for Quantile regression.");
}
if (expensive) checkDistributions();
_nclass = init_getNClass();
if (_parms._check_constant_response && _response.isConst()) {
error("_response", "Response cannot be constant.");
}
}
if (! _parms._balance_classes)
hide("_max_after_balance_size", "Balance classes is false, hide max_after_balance_size");
else if (_parms._weights_column != null && _weights != null && !_weights.isBinary())
error("_balance_classes", "Balance classes and observation weights are not currently supported together.");
if( _parms._max_after_balance_size <= 0.0 )
error("_max_after_balance_size","Max size after balancing needs to be positive, suggest 1.0f");
if( _train != null ) {
if (_train.numCols() <= 1 && !getClass().toString().equals("class hex.gam.GAM")) // gam can have zero predictors
error("_train", "Training data must have at least 2 features (incl. response).");
if( null == _parms._response_column) {
error("_response_column", "Response column parameter not set.");
return;
}
if(_response != null && computePriorClassDistribution()) {
if (isClassifier() && isSupervised()) {
if(_parms._distribution == DistributionFamily.quasibinomial){
String[] quasiDomains = new VecUtils.CollectDoubleDomain(null,2).doAll(_response).stringDomain(_response.isInt());
MRUtils.ClassDistQuasibinomial cdmt =
_weights != null ? new MRUtils.ClassDistQuasibinomial(quasiDomains).doAll(_response, _weights) : new MRUtils.ClassDistQuasibinomial(quasiDomains).doAll(_response);
_distribution = cdmt.dist();
_priorClassDist = cdmt.relDist();
} else {
MRUtils.ClassDist cdmt =
_weights != null ? new MRUtils.ClassDist(nclasses()).doAll(_response, _weights) : new MRUtils.ClassDist(nclasses()).doAll(_response);
_distribution = cdmt.dist();
_priorClassDist = cdmt.relDist();
}
} else { // Regression; only 1 "class"
_distribution = new double[]{ (_weights != null ? _weights.mean() : 1.0) * train().numRows() };
_priorClassDist = new double[]{1.0f};
}
}
}
if( !isClassifier() ) {
hide("_balance_classes", "Balance classes is only applicable to classification problems.");
hide("_class_sampling_factors", "Class sampling factors is only applicable to classification problems.");
hide("_max_after_balance_size", "Max after balance size is only applicable to classification problems.");
hide("_max_confusion_matrix_size", "Max confusion matrix size is only applicable to classification problems.");
}
if (_nclass <= 2) {
hide("_max_confusion_matrix_size", "Only for multi-class classification problems.");
}
if( !_parms._balance_classes ) {
hide("_max_after_balance_size", "Only used with balanced classes");
hide("_class_sampling_factors", "Class sampling factors is only applicable if balancing classes.");
}
}
else {
if (!isResponseOptional()) {
hide("_response_column", "Ignored for unsupervised methods.");
_vresponse = null;
}
hide("_balance_classes", "Ignored for unsupervised methods.");
hide("_class_sampling_factors", "Ignored for unsupervised methods.");
hide("_max_after_balance_size", "Ignored for unsupervised methods.");
hide("_max_confusion_matrix_size", "Ignored for unsupervised methods.");
_response = null;
_nclass = 1;
}
if( _nclass > Model.Parameters.MAX_SUPPORTED_LEVELS ) {
error("_nclass", "Too many levels in response column: " + _nclass + ", maximum supported number of classes is " + Model.Parameters.MAX_SUPPORTED_LEVELS + ".");
}
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap categoricals
Frame va = _parms.valid(); // User-given validation set
if (va != null) {
if (isResponseOptional() && _parms._response_column != null && _response == null) {
_vresponse = va.vec(_parms._response_column);
}
_valid = adaptFrameToTrain(va, "Validation Frame", "_validation_frame", expensive, false); // see PUBDEV-7785
if (!isResponseOptional() || (_parms._response_column != null && _valid.find(_parms._response_column) >= 0)) {
_vresponse = _valid.vec(_parms._response_column);
}
} else {
_valid = null;
_vresponse = null;
}
if (expensive) {
boolean scopeTrack = !_parms._is_cv_model;
Frame newtrain = applyPreprocessors(_train, true, scopeTrack);
newtrain = encodeFrameCategoricals(newtrain, scopeTrack); //we could turn this into a preprocessor later
if (newtrain != _train) {
_origTrain = _train;
_origNames = _train.names();
_origDomains = _train.domains();
setTrain(newtrain);
separateFeatureVecs(); //fix up the pointers to the special vecs
} else {
_origTrain = null;
}
if (_valid != null) {
Frame newvalid = applyPreprocessors(_valid, false, scopeTrack);
newvalid = encodeFrameCategoricals(newvalid, scopeTrack /* for CV, need to score one more time in outer loop */);
setValid(newvalid);
}
boolean restructured = false;
Vec[] vecs = _train.vecs();
for (int j = 0; j < vecs.length; ++j) {
Vec v = vecs[j];
if (v == _response || v == _fold) continue;
if (v.isCategorical() && shouldReorder(v)) {
final int len = v.domain().length;
Log.info("Reordering categorical column " + _train.name(j) + " (" + len + " levels) based on the mean (weighted) response per level.");
VecUtils.MeanResponsePerLevelTask mrplt = new VecUtils.MeanResponsePerLevelTask(len).doAll(v,
_parms._weights_column != null ? _train.vec(_parms._weights_column) : v.makeCon(1.0),
_train.vec(_parms._response_column));
double[] meanWeightedResponse = mrplt.meanWeightedResponse;
// for (int i=0;i<len;++i)
// Log.info(v.domain()[i] + " -> " + meanWeightedResponse[i]);
// Option 1: Order the categorical column by response to make better splits
int[] idx=new int[len];
for (int i=0;i<len;++i) idx[i] = i;
ArrayUtils.sort(idx, meanWeightedResponse);
int[] invIdx=new int[len];
for (int i=0;i<len;++i) invIdx[idx[i]] = i;
Vec vNew = new VecUtils.ReorderTask(invIdx).doAll(1, Vec.T_NUM, new Frame(v)).outputFrame().anyVec();
String[] newDomain = new String[len];
for (int i = 0; i < len; ++i) newDomain[i] = v.domain()[idx[i]];
vNew.setDomain(newDomain);
// for (int i=0;i<len;++i)
// Log.info(vNew.domain()[i] + " -> " + meanWeightedResponse[idx[i]]);
vecs[j] = vNew;
restructured = true;
}
}
if (restructured)
_train.restructure(_train.names(), vecs);
}
boolean names_may_differ = _parms._categorical_encoding == Model.Parameters.CategoricalEncodingScheme.Binary;
boolean names_differ = _valid !=null && ArrayUtils.difference(_train._names, _valid._names).length != 0;;
assert (!expensive || names_may_differ || !names_differ);
if (names_differ && names_may_differ) {
for (String name : _train._names)
assert(ArrayUtils.contains(_valid._names, name)) : "Internal error during categorical encoding: training column " + name + " not in validation frame with columns " + Arrays.toString(_valid._names);
}
if (_parms._stopping_tolerance < 0) {
error("_stopping_tolerance", "Stopping tolerance must be >= 0.");
}
if (_parms._stopping_tolerance >= 1) {
error("_stopping_tolerance", "Stopping tolerance must be < 1.");
}
if (_parms._stopping_rounds == 0) {
if (_parms._stopping_metric != ScoreKeeper.StoppingMetric.AUTO)
warn("_stopping_metric", "Stopping metric is ignored for _stopping_rounds=0.");
if (_parms._stopping_tolerance != _parms.defaultStoppingTolerance())
warn("_stopping_tolerance", "Stopping tolerance is ignored for _stopping_rounds=0.");
} else if (_parms._stopping_rounds < 0) {
error("_stopping_rounds", "Stopping rounds must be >= 0.");
} else if (validateStoppingMetric()){
if (isClassifier()) {
if (_parms._stopping_metric == ScoreKeeper.StoppingMetric.deviance && !getClass().getSimpleName().contains("GLM")) {
error("_stopping_metric", "Stopping metric cannot be deviance for classification.");
}
if (nclasses()!=2 && (_parms._stopping_metric == ScoreKeeper.StoppingMetric.AUC || _parms._stopping_metric
== ScoreKeeper.StoppingMetric.AUCPR)) {
error("_stopping_metric", "Stopping metric cannot be AUC or AUCPR for multinomial " +
"classification.");
}
} else {
if (_parms._stopping_metric == ScoreKeeper.StoppingMetric.misclassification ||
_parms._stopping_metric == ScoreKeeper.StoppingMetric.AUC ||
_parms._stopping_metric == ScoreKeeper.StoppingMetric.logloss || _parms._stopping_metric
== ScoreKeeper.StoppingMetric.AUCPR)
{
error("_stopping_metric", "Stopping metric cannot be " + _parms._stopping_metric.toString() + " for regression.");
}
}
}
if (_parms._stopping_metric == ScoreKeeper.StoppingMetric.custom || _parms._stopping_metric == ScoreKeeper.StoppingMetric.custom_increasing) {
if (_parms._custom_metric_func == null) {
error("_stopping_metric", "Custom metric function needs to be defined in order to use it for early stopping.");
}
}
if (_parms._max_runtime_secs < 0) {
error("_max_runtime_secs", "Max runtime (in seconds) must be greater than 0 (or 0 for unlimited).");
}
if (!StringUtils.isNullOrEmpty(_parms._export_checkpoints_dir)) {
if (!_parms._is_cv_model) {
// we do not need to check if the checkpoint directory is writeable on CV-models, it was already checked on the main model
if (!H2O.getPM().isWritableDirectory(_parms._export_checkpoints_dir)) {
error("_export_checkpoints_dir", "Checkpoints directory path must point to a writable path.");
}
}
}
}
/**
* Adapts a given frame to the same schema as the training frame.
* This includes encoding of categorical variables (if expensive is enabled).
*
* Note: This method should only be used during ModelBuilder initialization - it should be called in init(..) method.
*
* @param fr input frame
* @param frDesc frame description, eg. "Validation Frame" - will be shown in validation error messages
* @param field name of a field for validation errors
* @param expensive indicates full ("expensive") processing
* @return adapted frame
*/
public Frame init_adaptFrameToTrain(Frame fr, String frDesc, String field, boolean expensive) {
Frame adapted = adaptFrameToTrain(fr, frDesc, field, expensive, false);
if (expensive)
adapted = encodeFrameCategoricals(adapted, true);
return adapted;
}
private Frame adaptFrameToTrain(Frame fr, String frDesc, String field, boolean expensive, boolean catEncoded) {
if (fr.numRows()==0) error(field, frDesc + " must have > 0 rows.");
Frame adapted = new Frame(null /* not putting this into KV */, fr._names.clone(), fr.vecs().clone());
try {
String[] msgs = Model.adaptTestForTrain(
adapted,
null,
null,
_train._names,
_train.domains(),
_parms,
expensive,
true,
null,
getToEigenVec(),
_workspace.getToDelete(expensive),
catEncoded
);
Vec response = adapted.vec(_parms._response_column);
if (response == null && _parms._response_column != null && !isResponseOptional())
error(field, frDesc + " must have a response column '" + _parms._response_column + "'.");
if (expensive) {
for (String s : msgs) {
Log.info(s);
warn(field, s);
}
}
} catch (IllegalArgumentException iae) {
error(field, iae.getMessage());
}
return adapted;
}
private Frame applyPreprocessors(Frame fr, boolean isTraining, boolean scopeTrack) {
if (_parms._preprocessors == null) return fr;
for (Key<ModelPreprocessor> key : _parms._preprocessors) {
DKV.prefetch(key);
}
Frame result = fr;
Frame encoded;
for (Key<ModelPreprocessor> key : _parms._preprocessors) {
ModelPreprocessor preprocessor = key.get();
encoded = isTraining ? preprocessor.processTrain(result, _parms) : preprocessor.processValid(result, _parms);
if (encoded != result) trackEncoded(encoded, scopeTrack);
result = encoded;
}
if (!scopeTrack) Scope.untrack(result); // otherwise encoded frame is fully removed on CV model completion, raising exception when computing CV scores.
return result;
}
private Frame encodeFrameCategoricals(Frame fr, boolean scopeTrack) {
Frame encoded = FrameUtils.categoricalEncoder(
fr,
_parms.getNonPredictors(),
_parms._categorical_encoding,
getToEigenVec(),
_parms._max_categorical_levels
);
if (encoded != fr) trackEncoded(encoded, scopeTrack);
return encoded;
}
private void trackEncoded(Frame fr, boolean scopeTrack) {
assert fr._key != null;
if (scopeTrack)
Scope.track(fr);
else
_workspace.getToDelete(true).put(fr._key, Arrays.toString(Thread.currentThread().getStackTrace()));
}
/**
* Rebalance a frame for load balancing
* @param original_fr Input frame
* @param local Whether to only create enough chunks to max out all cores on one node only
* WARNING: This behavior is not actually implemented in the methods defined in this class, the default logic
* doesn't take this parameter into consideration.
* @param name Name of rebalanced frame
* @return Frame that has potentially more chunks
*/
protected Frame rebalance(final Frame original_fr, boolean local, final String name) {
if (original_fr == null) return null;
int chunks = desiredChunks(original_fr, local);
double rebalanceRatio = rebalanceRatio();
int nonEmptyChunks = original_fr.anyVec().nonEmptyChunks();
if (nonEmptyChunks >= chunks * rebalanceRatio) {
if (chunks>1)
Log.info(name.substring(name.length()-5)+ " dataset already contains " + nonEmptyChunks + " (non-empty) " +
" chunks. No need to rebalance. [desiredChunks=" + chunks, ", rebalanceRatio=" + rebalanceRatio + "]");
return original_fr;
}
Log.info("Rebalancing " + name.substring(name.length()-5) + " dataset into " + chunks + " chunks.");
Key newKey = Key.makeUserHidden(name + ".chunks" + chunks);
RebalanceDataSet rb = new RebalanceDataSet(original_fr, newKey, chunks);
H2O.submitTask(rb).join();
Frame rebalanced_fr = DKV.get(newKey).get();
Scope.track(rebalanced_fr);
return rebalanced_fr;
}
private double rebalanceRatio() {
String mode = H2O.getCloudSize() == 1 ? "single" : "multi";
String ratioStr = getSysProperty("rebalance.ratio." + mode, "1.0");
return Double.parseDouble(ratioStr);
}
/**
* Find desired number of chunks. If fewer, dataset will be rebalanced.
* @return Lower bound on number of chunks after rebalancing.
*/
protected int desiredChunks(final Frame original_fr, boolean local) {
if (H2O.getCloudSize() > 1 && Boolean.parseBoolean(getSysProperty("rebalance.enableMulti", "false")))
return desiredChunkMulti(original_fr);
else
return desiredChunkSingle(original_fr);
}
// single-node version (original version)
private int desiredChunkSingle(final Frame originalFr) {
return Math.min((int) Math.ceil(originalFr.numRows() / 1e3), H2O.NUMCPUS);
}
// multi-node version (experimental version)
private int desiredChunkMulti(final Frame fr) {
for (int type : fr.types()) {
if (type != Vec.T_NUM && type != Vec.T_CAT) {
Log.warn("Training frame contains columns non-numeric/categorical columns. Using old rebalance logic.");
return desiredChunkSingle(fr);
}
}
// estimate size of the Frame on disk as if it was represented in a binary _uncompressed_ format with no overhead
long itemCnt = 0;
for (Vec v : fr.vecs())
itemCnt += v.length() - v.naCnt();
final int itemSize = 4; // magic constant size of both Numbers and Categoricals
final long size = Math.max(itemCnt * itemSize, fr.byteSize());
final int desiredChunkSize = FileVec.calcOptimalChunkSize(size, fr.numCols(),
fr.numCols() * itemSize, H2O.NUMCPUS, H2O.getCloudSize(), false, true);
final int desiredChunks = (int) ((size / desiredChunkSize) + (size % desiredChunkSize > 0 ? 1 : 0));
Log.info("Calculated optimal number of chunks = " + desiredChunks);
return desiredChunks;
}
protected String getSysProperty(String name, String def) {
return System.getProperty(H2O.OptArgs.SYSTEM_PROP_PREFIX + name, def);
}
protected int init_getNClass() {
int nclass = _response.isCategorical() ? _response.cardinality() : 1;
if (_parms._distribution == DistributionFamily.quasibinomial) {
nclass = 2;
}
return nclass;
}
public void checkDistributions() {
if (_parms._distribution == DistributionFamily.poisson) {
if (_response.min() < 0)
error("_response", "Response must be non-negative for Poisson distribution.");
} else if (_parms._distribution == DistributionFamily.gamma) {
if (_response.min() < 0)
error("_response", "Response must be non-negative for Gamma distribution.");
} else if (_parms._distribution == DistributionFamily.tweedie) {
if (_parms._tweedie_power >= 2 || _parms._tweedie_power <= 1)
error("_tweedie_power", "Tweedie power must be between 1 and 2.");
if (_response.min() < 0)
error("_response", "Response must be non-negative for Tweedie distribution.");
} else if (_parms._distribution == DistributionFamily.quantile) {
if (_parms._quantile_alpha > 1 || _parms._quantile_alpha < 0)
error("_quantile_alpha", "Quantile alpha must be between 0 and 1.");
} else if (_parms._distribution == DistributionFamily.huber) {
if (_parms._huber_alpha <0 || _parms._huber_alpha>1)
error("_huber_alpha", "Huber alpha must be between 0 and 1.");
}
}
transient public HashSet<String> _removedCols = new HashSet<>();
public abstract class FilterCols {
final int _specialVecs; // special vecs to skip at the end
public FilterCols(int n) {_specialVecs = n;}
abstract protected boolean filter(Vec v);
public void doIt( Frame f, String msg, boolean expensive ) {
List<Integer> rmcolsList = new ArrayList<>();
for( int i = 0; i < f.vecs().length - _specialVecs; i++ )
if( filter(f.vec(i)) ) rmcolsList.add(i);
if( !rmcolsList.isEmpty() ) {
_removedCols = new HashSet<>(rmcolsList.size());
int[] rmcols = new int[rmcolsList.size()];
for (int i=0;i<rmcols.length;++i) {
rmcols[i]=rmcolsList.get(i);
_removedCols.add(f._names[rmcols[i]]);
}
f.remove(rmcols); //bulk-remove
msg += _removedCols.toString();
warn("_train", msg);
if (expensive) Log.info(msg);
}
}
}
//stitch together holdout predictions into one large Frame
private static Frame combineHoldoutPredictions(Key<Frame>[] predKeys, Key key) {
int N = predKeys.length;
Frame template = predKeys[0].get();
Vec[] vecs = new Vec[N*template.numCols()];
int idx=0;
for (int i=0;i<N;++i)
for (int j=0;j<predKeys[i].get().numCols();++j)
vecs[idx++]=predKeys[i].get().vec(j);
return new HoldoutPredictionCombiner(N,template.numCols()).doAll(template.types(),new Frame(vecs)).outputFrame(key, template.names(),template.domains());
}
// helper to combine multiple holdout prediction Vecs (each only has 1/N-th filled with non-zeros) into 1 Vec
private static class HoldoutPredictionCombiner extends MRTask<HoldoutPredictionCombiner> {
int _folds, _cols;
public HoldoutPredictionCombiner(int folds, int cols) { _folds=folds; _cols=cols; }
@Override public void map(Chunk[] cs, NewChunk[] nc) {
for (int c=0;c<_cols;++c) {
double [] vals = new double[cs[0].len()];
for (int f=0;f<_folds;++f)
for (int row = 0; row < cs[0].len(); ++row)
vals[row] += cs[f * _cols + c].atd(row);
nc[c].setDoubles(vals);
}
}
}
private TwoDimTable makeCrossValidationSummaryTable(Key[] cvmodels) {
if (cvmodels == null || cvmodels.length == 0) return null;
int N = cvmodels.length;
int extra_length=2; //mean/sigma/cv1/cv2/.../cvN
String[] colTypes = new String[N+extra_length];
Arrays.fill(colTypes, "string");
String[] colFormats = new String[N+extra_length];
Arrays.fill(colFormats, "%s");
String[] colNames = new String[N+extra_length];
colNames[0] = "mean";
colNames[1] = "sd";
for (int i=0;i<N;++i)
colNames[i+extra_length] = "cv_" + (i+1) + "_valid";
Set<String> excluded = new HashSet<>();
excluded.add("total_rows");
excluded.add("makeSchema");
excluded.add("hr");
excluded.add("frame");
excluded.add("model");
excluded.add("remove");
excluded.add("cm");
excluded.add("auc_obj");
List<Method> methods = new ArrayList<>();
{
Model m = DKV.getGet(cvmodels[0]);
ModelMetrics mm = m._output._validation_metrics;
if (mm != null) {
for (Method meth : mm.getClass().getMethods()) {
if (excluded.contains(meth.getName())) continue;
try {
double c = (double) meth.invoke(mm);
methods.add(meth);
} catch (Exception ignored) {}
}
ConfusionMatrix cm = mm.cm();
if (cm != null) {
for (Method meth : cm.getClass().getMethods()) {
if (excluded.contains(meth.getName())) continue;
try {
double c = (double) meth.invoke(cm);
methods.add(meth);
} catch (Exception ignored) {}
}
}
}
}
// make unique, and sort alphabetically
Set<String> rowNames=new TreeSet<>();
for (Method m : methods) rowNames.add(m.getName());
List<Method> meths = new ArrayList<>();
OUTER:
for (String n : rowNames)
for (Method m : methods)
if (m.getName().equals(n)) { //find the first method that has that name
meths.add(m);
continue OUTER;
}
int numMetrics = rowNames.size();
TwoDimTable table = new TwoDimTable("Cross-Validation Metrics Summary",
null,
rowNames.toArray(new String[0]), colNames, colTypes, colFormats, "");
MathUtils.BasicStats stats = new MathUtils.BasicStats(numMetrics);
double[][] vals = new double[N][numMetrics];
int i = 0;
for (Key<Model> km : cvmodels) {
Model m = DKV.getGet(km);
if (m==null) continue;
ModelMetrics mm = m._output._validation_metrics;
int j=0;
for (Method meth : meths) {
if (excluded.contains(meth.getName())) continue;
try {
double val = (double) meth.invoke(mm);
vals[i][j] = val;
table.set(j++, i+extra_length, (float)val);
} catch (Throwable e) { }
if (mm.cm()==null) continue;
try {
double val = (double) meth.invoke(mm.cm());
vals[i][j] = val;
table.set(j++, i+extra_length, (float)val);
} catch (Throwable e) { }
}
i++;
}
MathUtils.SimpleStats simpleStats = new MathUtils.SimpleStats(numMetrics);
for (i=0;i<N;++i)
simpleStats.add(vals[i],1);
for (i=0;i<numMetrics;++i) {
table.set(i, 0, (float)simpleStats.mean()[i]);
table.set(i, 1, (float)simpleStats.sigma()[i]);
}
Log.info(table);
return table;
}
/**
* Overridable Model Builder name used in generated code, in case the name of the ModelBuilder class is not suitable.
*
* @return Name of the builder to be used in generated code
*/
public String getName() {
return getClass().getSimpleName().toLowerCase();
}
private void cleanUp() {
_workspace.cleanUp();
}
@SuppressWarnings("WeakerAccess") // optionally allow users create workspace directly (instead of relying on init)
protected final void initWorkspace(boolean expensive) {
if (expensive)
_workspace = new Workspace(true);
}
static class Workspace {
private final IcedHashMap<Key,String> _toDelete;
private Workspace(boolean expensive) {
_toDelete = expensive ? new IcedHashMap<>() : null;
}
IcedHashMap<Key, String> getToDelete(boolean expensive) {
if (! expensive)
return null; // incorrect usages during "inexpensive" initialization will fail
if (_toDelete == null) {
throw new IllegalStateException("ModelBuilder was not correctly initialized. " +
"Expensive phase requires field `_toDelete` to be non-null. " +
"Does your implementation of init method call super.init(true) or alternatively initWorkspace(true)?");
}
return _toDelete;
}
void cleanUp() {
FrameUtils.cleanUp(_toDelete);
}
}
}
|
package water.fvec;
import water.*;
import water.parser.ValueString;
/** A compression scheme, over a chunk of data - a single array of bytes.
* Chunks are mapped many-to-1 to a {@link Vec}. The <em>actual</em> vector
* header info is in the Vec - which contains info to find all the bytes of
* the distributed vector. Subclasses of this abstract class implement
* (possibly empty) compression schemes.
*
* <p>Chunks are collections of elements, and support an array-like API.
* Chunks are subsets of a Vec; while the elements in a Vec are numbered
* starting at 0, any given Chunk has some (probably non-zero) starting row,
* and a length which is smaller than the whole Vec. Chunks are limited to a
* single Java byte array in a single JVM heap, and only an int's worth of
* elements. Chunks support both the notions of a global row-number and a
* chunk-local numbering. The global row-number calls are variants of {@code
* at} and {@code set}. If the row is outside the current Chunk's range, the
* data will be loaded by fetching from the correct Chunk. This probably
* involves some network traffic, and if all rows are loaded then the entire
* dataset will be pulled local (possibly triggering an OutOfMemory).
*
* <p>The chunk-local numbering supports the common {@code for} loop iterator
* pattern, using {@code at} and {@code set} calls that end in a '{@code 0}':
* <pre>{@code
for( int row=0; row < chunk._len; row++ )
chunk.at0(row)...
}</pre>
*
* <p>The array-like API allows loading and storing elements in and out of
* Chunks. When loading, values are decompressed. When storing, an attempt
* to compress back into the actual underlying Chunk subclass is made; if this
* fails the Chunk is "inflated" into a {@link NewChunk}, and the store
* completed there. Later the NewChunk will be compressed (probably into a
* different underlying Chunk subclass) and put back in the K/V store under
* the same Key - effectively replacing the original Chunk; this is done when
* {@link #close} is called.
*
* <p>Chunk updates are not multi-thread safe; the caller must do correct
* synchronization. This is already handled by the Map/Reduce ({@link
* MRTask}) framework. Chunk updates are not visible cross-cluster until
* a {@link #close} is made; again this is handled by MRTask directly.
*
* <p>In addition to normal load and store operations, Chunks support the
* notion a missing element via the {@code isNA()} calls, and a "next
* non-zero" notion for rapidly iterating over sparse data.
*
* <p><b>Data Types</b>
*
* <p>Chunks hold Java primitive values, timestamps, UUIDs, or Strings. All
* the Chunks in a Vec hold the same type. Most of the types are compressed.
* Integer types (boolean, byte, short, int, long) are always lossless. Float
* and Double types might lose 1 or 2 ulps in the compression. Time data is
* held as milliseconds since the Unix Epoch. UUIDs are held as 128-bit
* integers (a pair of Java longs). Strings are compressed in various obvious
* ways. Sparse data is held... sparsely; e.g. loading data in SVMLight
* format will not "blow up" the in-memory representation. Factors or Enums
* are held as small integers, with a shared String lookup table on the side.
*
* <p>Missing float and double data is always treated as a NaN, both if read
* or written. There is no equivalent for integer data; reading a missing
* integer value is a coding error and will be flagged. First check for
* a missing integer value before loading it:
* <pre>{@code
if( !chk.isNA0(row) ) ...chk.at80(row)....
}</pre>
*
* <p>The same holds true for the other non-real types (timestamps, UUIDs,
* Strings, or enums); they must be checked for missing before being used.
*
* <p><b>Performance Concerns</b>
*
* <p>The standard {@code for} loop mentioned above is the fastest way to
* access data; definitely faster (and less error prone) than iterating over
* global row numbers. Iterating over a single Chunk is nearly always
* memory-bandwidth bound. Often code will iterate over a number of Chunks
* aligned together (the common use-case of looking a whole rows of a
* dataset). Again, typically such a code pattern is memory-bandwidth bound
* although the X86 will stop being able to prefetch well beyond 100 or 200
* Chunks. Note that such Chunk alignment is guaranteed within all the Vecs
* of a Frame.
*
* <p>This example computes the Euclidean distance between all the columns and
* a given point, and stores the squared distance back in the last column.
* Note that due "NaN poisoning" if any row element is missing, the entire
* distance calculated will be NaN.
* <pre>{@code
final double[] _point; // The given point
public void map( Chunk[] chks ) {
for( int row=0; row < chks[0]._len; row++ ) {
double dist=0; // Squared distance
for( int col=0; col < chks.length-1; col++ ) {
double d = chks[col].at0(row) - _point[col];
dist += d*d*;
}
chks[chks.length-1].set0( row, dist );
}
}}</pre>
*/
public abstract class Chunk extends Iced implements Cloneable {
/** Starting row */
protected long _start = -1; // Start element; filled after AutoBuffer.read
public final long start() { return _start; } // Start element; filled after AutoBuffer.read
private int _len; // Number of elements in this chunk
public int len() { return _len; }
public int set_len(int _len) { return this._len = _len; }
private Chunk _chk2; // Normally==null, changed if chunk is written to
public final Chunk chk2() { return _chk2; } // Normally==null, changed if chunk is written to
protected Vec _vec; // Owning Vec; filled after AutoBuffer.read
public final Vec vec() { return _vec; } // Owning Vec; filled after AutoBuffer.read
protected byte[] _mem; // Short-cut to the embedded memory; WARNING: holds onto a large array
public final byte[] getBytes() { return _mem; } // Short-cut to the embedded memory; WARNING: holds onto a large array
// Used by a ParseExceptionTest to break the Chunk invariants & trigger an NPE
public final void crushBytes() { _mem=null; }
/** Load a long value. Floating point values are silently rounded to an
* integer. Throws if the value is missing.
* <p>
* Loads from the 1-entry chunk cache, or misses-out. This version uses
* absolute element numbers, but must convert them to chunk-relative indices
* - requiring a load from an aliasing local var, leading to lower quality
* JIT'd code (similar issue to using iterator objects).
* <p>
* Slightly slower than 'at0' since it range checks within a chunk. */
final long at8( long i ) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return at80((int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
/** Load a double value. Returns Double.NaN if value is missing.
* <p>
* Loads from the 1-entry chunk cache, or misses-out. This version uses
* absolute element numbers, but must convert them to chunk-relative indices
* - requiring a load from an aliasing local var, leading to lower quality
* JIT'd code (similar issue to using iterator objects).
* <p>
* Slightly slower than 'at80' since it range checks within a chunk. */
public final double at( long i ) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return at0((int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
/** Fetch the missing-status the slow way. */
final boolean isNA(long i) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return isNA0((int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
public final long at16l( long i ) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return at16l0((int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
public final long at16h( long i ) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return at16h0((int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
public final ValueString atStr( ValueString vstr, long i ) {
long x = i - (_start>0 ? _start : 0);
if( 0 <= x && x < len()) return atStr0(vstr,(int)x);
throw new ArrayIndexOutOfBoundsException(""+_start+" <= "+i+" < "+(_start+ len()));
}
/** The zero-based API. Somewhere between 10% to 30% faster in a tight-loop
* over the data than the generic at() API. Probably no gain on larger
* loops. The row reference is zero-based on the chunk, and should
* range-check by the JIT as expected. */
public final double at0 ( int i ) { return _chk2 == null ? atd_impl(i) : _chk2. atd_impl(i); }
public final long at80 ( int i ) { return _chk2 == null ? at8_impl(i) : _chk2. at8_impl(i); }
public final boolean isNA0( int i ) { return _chk2 == null ?isNA_impl(i) : _chk2.isNA_impl(i); }
public final long at16l0( int i ) { return _chk2 == null ? at16l_impl(i) : _chk2.at16l_impl(i); }
public final long at16h0( int i ) { return _chk2 == null ? at16h_impl(i) : _chk2.at16h_impl(i); }
public final ValueString atStr0( ValueString vstr, int i ) { return _chk2 == null ? atStr_impl(vstr,i) : _chk2.atStr_impl(vstr,i); }
/** Write element the slow way, as a long. There is no way to write a
* missing value with this call. Under rare circumstances this can throw:
* if the long does not fit in a double (value is larger magnitude than
* 2^52), AND float values are stored in Vector. In this case, there is no
* common compatible data representation. */
public final void set( long i, long l) { long x = i-_start; if (0 <= x && x < len()) set0((int)x,l); else _vec.set(i,l); }
/** Write element the slow way, as a double. Double.NaN will be treated as
* a set of a missing element. */
public final void set( long i, double d) { long x = i-_start; if (0 <= x && x < len()) set0((int)x,d); else _vec.set(i,d); }
/** Write element the slow way, as a float. Float.NaN will be treated as
* a set of a missing element. */
public final void set( long i, float f) { long x = i-_start; if (0 <= x && x < len()) set0((int)x,f); else _vec.set(i,f); }
/** Set the element as missing the slow way. */
final void setNA( long i ) { long x = i-_start; if (0 <= x && x < len()) setNA0((int)x); else _vec.setNA(i); }
public final void set( long i, String str) { long x = i-_start; if (0 <= x && x < len()) set0((int)x,str); else _vec.set(i,str); }
private void setWrite() {
if( _chk2 != null ) return; // Already setWrite
assert !(this instanceof NewChunk) : "Cannot direct-write into a NewChunk, only append";
_vec.preWriting(); // One-shot writing-init
_chk2 = (Chunk)clone(); // Flag this chunk as having been written into
assert _chk2._chk2 == null; // Clone has NOT been written into
}
/**
* Set a long element in a chunk given a 0-based chunk local index.
*
* Write into a chunk.
* May rewrite/replace chunks if the chunk needs to be
* "inflated" to hold larger values. Returns the input value.
*
* Note that the idx is an int (instead of a long), which tells you
* that index 0 is the first row in the chunk, not the whole Vec.
*/
public final long set0(int idx, long l) {
setWrite();
if( _chk2.set_impl(idx,l) ) return l;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,l);
return l;
}
/** Set a double element in a chunk given a 0-based chunk local index. */
public final double set0(int idx, double d) {
setWrite();
if( _chk2.set_impl(idx,d) ) return d;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,d);
return d;
}
/** Set a floating element in a chunk given a 0-based chunk local index. */
public final float set0(int idx, float f) {
setWrite();
if( _chk2.set_impl(idx,f) ) return f;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,f);
return f;
}
/** Set the element in a chunk as missing given a 0-based chunk local index. */
public final boolean setNA0(int idx) {
setWrite();
if( _chk2.setNA_impl(idx) ) return true;
(_chk2 = inflate_impl(new NewChunk(this))).setNA_impl(idx);
return true;
}
public final String set0(int idx, String str) {
setWrite();
if( _chk2.set_impl(idx,str) ) return str;
(_chk2 = inflate_impl(new NewChunk(this))).set_impl(idx,str);
return str;
}
/** After writing we must call close() to register the bulk changes */
public Futures close( int cidx, Futures fs ) {
if( this instanceof NewChunk ) _chk2 = this;
if( _chk2 == null ) return fs; // No change?
if( _chk2 instanceof NewChunk ) _chk2 = ((NewChunk)_chk2).new_close();
DKV.put(_vec.chunkKey(cidx),_chk2,fs,true); // Write updated chunk back into K/V
if( _vec._cache == this ) _vec._cache = null;
return fs;
}
public int cidx() { return _vec.elem2ChunkIdx(_start); }
/** Chunk-specific readers. */
abstract protected double atd_impl(int idx);
abstract protected long at8_impl(int idx);
abstract protected boolean isNA_impl(int idx);
protected long at16l_impl(int idx) { throw new IllegalArgumentException("Not a UUID"); }
protected long at16h_impl(int idx) { throw new IllegalArgumentException("Not a UUID"); }
protected ValueString atStr_impl(ValueString vstr, int idx) { throw new IllegalArgumentException("Not a String"); }
/** Chunk-specific writer. Returns false if the value does not fit in the
* current compression scheme. */
abstract boolean set_impl (int idx, long l );
abstract boolean set_impl (int idx, double d );
abstract boolean set_impl (int idx, float f );
abstract boolean setNA_impl(int idx);
boolean set_impl (int idx, String str) { throw new IllegalArgumentException("Not a String"); }
int nextNZ(int rid){return rid+1;}
public boolean isSparse() {return false;}
public int sparseLen(){return len();}
/** Get chunk-relative indices of values (nonzeros for sparse, all for dense) stored in this chunk.
* For dense chunks, this will contain indices of all the rows in this chunk.
* @return array of chunk-relative indices of values stored in this chunk.
*/
public int nonzeros(int [] res) {
for( int i = 0; i < len(); ++i) res[i] = i;
return len();
}
/**
* Get chunk-relative indices of values (nonzeros for sparse, all for dense) stored in this chunk.
* For dense chunks, this will contain indices of all the rows in this chunk.
*
* @return array of chunk-relative indices of values stored in this chunk.
*/
public final int [] nonzeros () {
int [] res = MemoryManager.malloc4(sparseLen());
nonzeros(res);
return res;
}
/** Chunk-specific bulk inflater back to NewChunk. Used when writing into a
* chunk and written value is out-of-range for an update-in-place operation.
* Bulk copy from the compressed form into the nc._ls array. */
abstract NewChunk inflate_impl(NewChunk nc);
/** Return the next Chunk, or null if at end. Mostly useful for parsers or
* optimized stencil calculations that want to "roll off the end" of a
* Chunk, but in a highly optimized way. */
Chunk nextChunk( ) { return _vec.nextChunk(this); }
@Override public String toString() { return getClass().getSimpleName(); }
public long byteSize() {
long s= _mem == null ? 0 : _mem.length;
s += (2+5)*8 + 12; // 2 hdr words, 5 other words, @8bytes each, plus mem array hdr
if( _chk2 != null ) s += _chk2.byteSize();
return s;
}
// Custom serializers: the _mem field contains ALL the fields already.
// Init _start to -1, so we know we have not filled in other fields.
// Leave _vec & _chk2 null, leave _len unknown.
abstract public AutoBuffer write_impl( AutoBuffer ab );
abstract public Chunk read_impl( AutoBuffer ab );
// Support for fixed-width format printing
// private String pformat () { return pformat0(); }
// private int pformat_len() { return pformat_len0(); }
public byte precision() { return -1; } // Digits after the decimal, or -1 for "all"
// protected String pformat0() {
// long min = (long)_vec.min();
// if( min < 0 ) return "% "+pformat_len0()+"d";
// return "%"+pformat_len0()+"d";
// protected int pformat_len0() {
// int len=0;
// long min = (long)_vec.min();
// if( min < 0 ) len++;
// long max = Math.max(Math.abs(min),Math.abs((long)_vec.max()));
// throw H2O.unimpl();
// //for( int i=1; i<DParseTask.powers10i.length; i++ )
// // if( max < DParseTask.powers10i[i] )
// // return i+len;
// //return 20;
// protected int pformat_len0( double scale, int lg ) {
// double dx = Math.log10(scale);
// int x = (int)dx;
// throw H2O.unimpl();
// //if( DParseTask.pow10i(x) != scale ) throw H2O.unimpl();
// //int w=1/*blank/sign*/+lg/*compression limits digits*/+1/*dot*/+1/*e*/+1/*neg exp*/+2/*digits of exp*/;
// //return w;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.